Samoed commited on
Commit
f7e2abb
β€’
1 Parent(s): 64624dd

add rumteb

Browse files
EXTERNAL_MODEL_RESULTS.json CHANGED
The diff for this file is too large to render. See raw diff
 
all_data_tasks/0/default.jsonl CHANGED
@@ -205,7 +205,7 @@
205
  {"index":107,"Rank":238,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Shimin\/LLaMA-embeeding\">LLaMA-embeeding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":84.82,"AmazonPolarityClassification":76.88,"AmazonReviewsClassification (en)":36.72,"Banking77Classification":"","EmotionClassification":41.93,"ImdbClassification":"","MassiveIntentClassification (en)":65.91,"MassiveScenarioClassification (en)":67.62,"MTOPDomainClassification (en)":87.95,"MTOPIntentClassification (en)":78.43,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":56.28}
206
  {"index":108,"Rank":239,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Shimin\/yiyouliao\">yiyouliao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":74.78,"AmazonPolarityClassification":71.89,"AmazonReviewsClassification (en)":36.7,"Banking77Classification":81.37,"EmotionClassification":42.6,"ImdbClassification":63.96,"MassiveIntentClassification (en)":68.56,"MassiveScenarioClassification (en)":74.15,"MTOPDomainClassification (en)":90.19,"MTOPIntentClassification (en)":69.5,"ToxicConversationsClassification":69.85,"TweetSentimentExtractionClassification":""}
207
  {"index":112,"Rank":240,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Snowflake\/snowflake-arctic-embed-m-v1.5\">snowflake-arctic-embed-m-v1.5<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonCounterfactualClassification (en)":68.3,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":46.27,"Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":66.69,"MassiveScenarioClassification (en)":73.06,"MTOPDomainClassification (en)":91.36,"MTOPIntentClassification (en)":60.64,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
208
- {"index":121,"Rank":243,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/YanshekWoo\/EminEmbed\">EminEmbed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":62.97,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":48.48,"Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":66.09,"MassiveScenarioClassification (en)":72.55,"MTOPDomainClassification (en)":90.15,"MTOPIntentClassification (en)":57.92,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
209
  {"index":140,"Rank":248,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/brahmairesearch\/slx-v0.1\">slx-v0.1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.08,"Average":"","AmazonCounterfactualClassification (en)":61.46,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":30.3,"Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":67.94,"MassiveScenarioClassification (en)":73.91,"MTOPDomainClassification (en)":91.97,"MTOPIntentClassification (en)":63.3,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
210
  {"index":142,"Rank":249,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":"","AmazonCounterfactualClassification (en)":"","AmazonPolarityClassification":"","AmazonReviewsClassification (en)":"","Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":60.48,"MassiveScenarioClassification (en)":65.43,"MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
211
  {"index":212,"Rank":272,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/mukaj\/fin-mpnet-base\">fin-mpnet-base<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonCounterfactualClassification (en)":"","AmazonPolarityClassification":"","AmazonReviewsClassification (en)":29.13,"Banking77Classification":80.25,"EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":"","MassiveScenarioClassification (en)":"","MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
 
205
  {"index":107,"Rank":238,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Shimin\/LLaMA-embeeding\">LLaMA-embeeding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":84.82,"AmazonPolarityClassification":76.88,"AmazonReviewsClassification (en)":36.72,"Banking77Classification":"","EmotionClassification":41.93,"ImdbClassification":"","MassiveIntentClassification (en)":65.91,"MassiveScenarioClassification (en)":67.62,"MTOPDomainClassification (en)":87.95,"MTOPIntentClassification (en)":78.43,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":56.28}
206
  {"index":108,"Rank":239,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Shimin\/yiyouliao\">yiyouliao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":74.78,"AmazonPolarityClassification":71.89,"AmazonReviewsClassification (en)":36.7,"Banking77Classification":81.37,"EmotionClassification":42.6,"ImdbClassification":63.96,"MassiveIntentClassification (en)":68.56,"MassiveScenarioClassification (en)":74.15,"MTOPDomainClassification (en)":90.19,"MTOPIntentClassification (en)":69.5,"ToxicConversationsClassification":69.85,"TweetSentimentExtractionClassification":""}
207
  {"index":112,"Rank":240,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Snowflake\/snowflake-arctic-embed-m-v1.5\">snowflake-arctic-embed-m-v1.5<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonCounterfactualClassification (en)":68.3,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":46.27,"Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":66.69,"MassiveScenarioClassification (en)":73.06,"MTOPDomainClassification (en)":91.36,"MTOPIntentClassification (en)":60.64,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
208
+ {"index":121,"Rank":243,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/YanshekWoo\/EminEmbed\">EminEmbed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":67.79,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":48.48,"Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":66.09,"MassiveScenarioClassification (en)":72.55,"MTOPDomainClassification (en)":90.15,"MTOPIntentClassification (en)":57.92,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
209
  {"index":140,"Rank":248,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/brahmairesearch\/slx-v0.1\">slx-v0.1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.08,"Average":"","AmazonCounterfactualClassification (en)":61.46,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":30.3,"Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":67.94,"MassiveScenarioClassification (en)":73.91,"MTOPDomainClassification (en)":91.97,"MTOPIntentClassification (en)":63.3,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
210
  {"index":142,"Rank":249,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":"","AmazonCounterfactualClassification (en)":"","AmazonPolarityClassification":"","AmazonReviewsClassification (en)":"","Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":60.48,"MassiveScenarioClassification (en)":65.43,"MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
211
  {"index":212,"Rank":272,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/mukaj\/fin-mpnet-base\">fin-mpnet-base<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonCounterfactualClassification (en)":"","AmazonPolarityClassification":"","AmazonReviewsClassification (en)":29.13,"Banking77Classification":80.25,"EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":"","MassiveScenarioClassification (en)":"","MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
all_data_tasks/33/default.jsonl CHANGED
@@ -1,25 +1,25 @@
1
- {"index":13,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":64.57,"GeoreviewClassification (rus-Cyrl)":50.25,"HeadlineClassification (rus-Cyrl)":85.68,"InappropriatenessClassification (rus-Cyrl)":67.19,"KinopoiskClassification (rus-Cyrl)":65.49,"RuReviewsClassification (rus-Cyrl)":67.68,"RuSciBenchGRNTIClassification (rus-Cyrl)":64.59,"RuSciBenchOECDClassification (rus-Cyrl)":51.13}
2
- {"index":4,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/ru-en-RoSBERTa\">ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":60.84,"GeoreviewClassification (rus-Cyrl)":49.7,"HeadlineClassification (rus-Cyrl)":78.0,"InappropriatenessClassification (rus-Cyrl)":61.32,"KinopoiskClassification (rus-Cyrl)":63.27,"RuReviewsClassification (rus-Cyrl)":67.96,"RuSciBenchGRNTIClassification (rus-Cyrl)":59.33,"RuSciBenchOECDClassification (rus-Cyrl)":46.33}
3
- {"index":11,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-bge-m3\">USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":59.36,"GeoreviewClassification (rus-Cyrl)":50.98,"HeadlineClassification (rus-Cyrl)":70.09,"InappropriatenessClassification (rus-Cyrl)":60.76,"KinopoiskClassification (rus-Cyrl)":63.33,"RuReviewsClassification (rus-Cyrl)":68.52,"RuSciBenchGRNTIClassification (rus-Cyrl)":57.67,"RuSciBenchOECDClassification (rus-Cyrl)":44.2}
4
- {"index":15,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":58.92,"GeoreviewClassification (rus-Cyrl)":49.69,"HeadlineClassification (rus-Cyrl)":77.19,"InappropriatenessClassification (rus-Cyrl)":61.6,"KinopoiskClassification (rus-Cyrl)":56.59,"RuReviewsClassification (rus-Cyrl)":65.28,"RuSciBenchGRNTIClassification (rus-Cyrl)":58.2,"RuSciBenchOECDClassification (rus-Cyrl)":43.91}
5
- {"index":10,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-base\">USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":57.86,"GeoreviewClassification (rus-Cyrl)":47.23,"HeadlineClassification (rus-Cyrl)":74.88,"InappropriatenessClassification (rus-Cyrl)":61.94,"KinopoiskClassification (rus-Cyrl)":55.69,"RuReviewsClassification (rus-Cyrl)":66.44,"RuSciBenchGRNTIClassification (rus-Cyrl)":55.55,"RuSciBenchOECDClassification (rus-Cyrl)":43.28}
6
- {"index":0,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":57.43,"GeoreviewClassification (rus-Cyrl)":48.27,"HeadlineClassification (rus-Cyrl)":70.32,"InappropriatenessClassification (rus-Cyrl)":59.87,"KinopoiskClassification (rus-Cyrl)":58.23,"RuReviewsClassification (rus-Cyrl)":66.91,"RuSciBenchGRNTIClassification (rus-Cyrl)":55.81,"RuSciBenchOECDClassification (rus-Cyrl)":42.57}
7
- {"index":23,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/LaBSE-ru-turbo\">LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":56.55,"GeoreviewClassification (rus-Cyrl)":46.04,"HeadlineClassification (rus-Cyrl)":69.98,"InappropriatenessClassification (rus-Cyrl)":61.39,"KinopoiskClassification (rus-Cyrl)":53.59,"RuReviewsClassification (rus-Cyrl)":64.58,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.67,"RuSciBenchOECDClassification (rus-Cyrl)":43.58}
8
- {"index":14,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":56.19,"GeoreviewClassification (rus-Cyrl)":46.05,"HeadlineClassification (rus-Cyrl)":75.64,"InappropriatenessClassification (rus-Cyrl)":58.78,"KinopoiskClassification (rus-Cyrl)":50.89,"RuReviewsClassification (rus-Cyrl)":62.99,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.28,"RuSciBenchOECDClassification (rus-Cyrl)":42.69}
9
- {"index":5,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_mt_nlu_ru\">sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":55.44,"GeoreviewClassification (rus-Cyrl)":39.67,"HeadlineClassification (rus-Cyrl)":77.19,"InappropriatenessClassification (rus-Cyrl)":64.64,"KinopoiskClassification (rus-Cyrl)":50.33,"RuReviewsClassification (rus-Cyrl)":58.29,"RuSciBenchGRNTIClassification (rus-Cyrl)":54.19,"RuSciBenchOECDClassification (rus-Cyrl)":43.8}
10
- {"index":6,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_nlu_ru\">sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":55.21,"GeoreviewClassification (rus-Cyrl)":39.97,"HeadlineClassification (rus-Cyrl)":79.26,"InappropriatenessClassification (rus-Cyrl)":62.52,"KinopoiskClassification (rus-Cyrl)":49.51,"RuReviewsClassification (rus-Cyrl)":58.27,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.9,"RuSciBenchOECDClassification (rus-Cyrl)":43.04}
11
- {"index":16,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":55.09,"GeoreviewClassification (rus-Cyrl)":44.66,"HeadlineClassification (rus-Cyrl)":73.94,"InappropriatenessClassification (rus-Cyrl)":59.16,"KinopoiskClassification (rus-Cyrl)":49.96,"RuReviewsClassification (rus-Cyrl)":61.18,"RuSciBenchGRNTIClassification (rus-Cyrl)":54.99,"RuSciBenchOECDClassification (rus-Cyrl)":41.72}
12
- {"index":12,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":54.23,"GeoreviewClassification (rus-Cyrl)":40.19,"HeadlineClassification (rus-Cyrl)":78.75,"InappropriatenessClassification (rus-Cyrl)":61.33,"KinopoiskClassification (rus-Cyrl)":48.78,"RuReviewsClassification (rus-Cyrl)":55.66,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.53,"RuSciBenchOECDClassification (rus-Cyrl)":41.34}
13
- {"index":22,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":54.11,"GeoreviewClassification (rus-Cyrl)":42.33,"HeadlineClassification (rus-Cyrl)":70.35,"InappropriatenessClassification (rus-Cyrl)":59.32,"KinopoiskClassification (rus-Cyrl)":44.31,"RuReviewsClassification (rus-Cyrl)":62.33,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.01,"RuSciBenchOECDClassification (rus-Cyrl)":44.14}
14
- {"index":24,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":53.46,"GeoreviewClassification (rus-Cyrl)":41.36,"HeadlineClassification (rus-Cyrl)":68.9,"InappropriatenessClassification (rus-Cyrl)":59.11,"KinopoiskClassification (rus-Cyrl)":50.47,"RuReviewsClassification (rus-Cyrl)":60.66,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.93,"RuSciBenchOECDClassification (rus-Cyrl)":40.79}
15
- {"index":7,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":52.73,"GeoreviewClassification (rus-Cyrl)":40.89,"HeadlineClassification (rus-Cyrl)":68.75,"InappropriatenessClassification (rus-Cyrl)":58.48,"KinopoiskClassification (rus-Cyrl)":49.85,"RuReviewsClassification (rus-Cyrl)":58.01,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.8,"RuSciBenchOECDClassification (rus-Cyrl)":40.36}
16
- {"index":17,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":52.35,"GeoreviewClassification (rus-Cyrl)":40.86,"HeadlineClassification (rus-Cyrl)":68.75,"InappropriatenessClassification (rus-Cyrl)":58.52,"KinopoiskClassification (rus-Cyrl)":46.77,"RuReviewsClassification (rus-Cyrl)":58.01,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.04,"RuSciBenchOECDClassification (rus-Cyrl)":40.48}
17
- {"index":1,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/distilrubert-small-cased-conversational\">distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":52.16,"GeoreviewClassification (rus-Cyrl)":38.95,"HeadlineClassification (rus-Cyrl)":75.59,"InappropriatenessClassification (rus-Cyrl)":60.68,"KinopoiskClassification (rus-Cyrl)":49.67,"RuReviewsClassification (rus-Cyrl)":54.05,"RuSciBenchGRNTIClassification (rus-Cyrl)":48.53,"RuSciBenchOECDClassification (rus-Cyrl)":37.65}
18
- {"index":3,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":51.49,"GeoreviewClassification (rus-Cyrl)":38.05,"HeadlineClassification (rus-Cyrl)":67.64,"InappropriatenessClassification (rus-Cyrl)":58.27,"KinopoiskClassification (rus-Cyrl)":45.86,"RuReviewsClassification (rus-Cyrl)":58.34,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.18,"RuSciBenchOECDClassification (rus-Cyrl)":40.11}
19
- {"index":21,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":51.38,"GeoreviewClassification (rus-Cyrl)":38.24,"HeadlineClassification (rus-Cyrl)":68.3,"InappropriatenessClassification (rus-Cyrl)":58.18,"KinopoiskClassification (rus-Cyrl)":41.45,"RuReviewsClassification (rus-Cyrl)":58.88,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.19,"RuSciBenchOECDClassification (rus-Cyrl)":41.41}
20
- {"index":9,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny2\">rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":51.37,"GeoreviewClassification (rus-Cyrl)":39.64,"HeadlineClassification (rus-Cyrl)":74.19,"InappropriatenessClassification (rus-Cyrl)":58.57,"KinopoiskClassification (rus-Cyrl)":49.06,"RuReviewsClassification (rus-Cyrl)":56.99,"RuSciBenchGRNTIClassification (rus-Cyrl)":45.63,"RuSciBenchOECDClassification (rus-Cyrl)":35.48}
21
- {"index":2,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased\">rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":50.66,"GeoreviewClassification (rus-Cyrl)":37.22,"HeadlineClassification (rus-Cyrl)":75.23,"InappropriatenessClassification (rus-Cyrl)":57.34,"KinopoiskClassification (rus-Cyrl)":49.91,"RuReviewsClassification (rus-Cyrl)":50.74,"RuSciBenchGRNTIClassification (rus-Cyrl)":48.03,"RuSciBenchOECDClassification (rus-Cyrl)":36.13}
22
- {"index":8,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny\">rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":42.68,"GeoreviewClassification (rus-Cyrl)":33.45,"HeadlineClassification (rus-Cyrl)":57.65,"InappropriatenessClassification (rus-Cyrl)":54.5,"KinopoiskClassification (rus-Cyrl)":41.36,"RuReviewsClassification (rus-Cyrl)":49.56,"RuSciBenchGRNTIClassification (rus-Cyrl)":35.71,"RuSciBenchOECDClassification (rus-Cyrl)":26.51}
23
- {"index":20,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":29.53,"GeoreviewClassification (rus-Cyrl)":25.93,"HeadlineClassification (rus-Cyrl)":28.53,"InappropriatenessClassification (rus-Cyrl)":51.82,"KinopoiskClassification (rus-Cyrl)":34.18,"RuReviewsClassification (rus-Cyrl)":42.33,"RuSciBenchGRNTIClassification (rus-Cyrl)":13.29,"RuSciBenchOECDClassification (rus-Cyrl)":10.62}
24
- {"index":19,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":28.67,"GeoreviewClassification (rus-Cyrl)":27.08,"HeadlineClassification (rus-Cyrl)":27.77,"InappropriatenessClassification (rus-Cyrl)":51.73,"KinopoiskClassification (rus-Cyrl)":33.93,"RuReviewsClassification (rus-Cyrl)":41.79,"RuSciBenchGRNTIClassification (rus-Cyrl)":10.08,"RuSciBenchOECDClassification (rus-Cyrl)":8.3}
25
- {"index":18,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":28.33,"GeoreviewClassification (rus-Cyrl)":23.49,"HeadlineClassification (rus-Cyrl)":28.49,"InappropriatenessClassification (rus-Cyrl)":50.85,"KinopoiskClassification (rus-Cyrl)":34.17,"RuReviewsClassification (rus-Cyrl)":42.49,"RuSciBenchGRNTIClassification (rus-Cyrl)":10.49,"RuSciBenchOECDClassification (rus-Cyrl)":8.31}
 
1
+ {"index":13,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":67.52,"GeoreviewClassification (rus-Cyrl)":50.25,"HeadlineClassification (rus-Cyrl)":85.68,"InappropriatenessClassification (rus-Cyrl)":67.19,"KinopoiskClassification (rus-Cyrl)":65.49,"RuReviewsClassification (rus-Cyrl)":67.68,"RuSciBenchGRNTIClassification (rus-Cyrl)":64.59,"RuSciBenchOECDClassification (rus-Cyrl)":51.13,"MassiveIntentClassification (rus-Cyrl)":76.08,"MassiveScenarioClassification (rus-Cyrl)":79.61}
2
+ {"index":11,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-bge-m3\">USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":61.92,"GeoreviewClassification (rus-Cyrl)":50.98,"HeadlineClassification (rus-Cyrl)":70.09,"InappropriatenessClassification (rus-Cyrl)":60.76,"KinopoiskClassification (rus-Cyrl)":63.33,"RuReviewsClassification (rus-Cyrl)":68.52,"RuSciBenchGRNTIClassification (rus-Cyrl)":57.67,"RuSciBenchOECDClassification (rus-Cyrl)":44.2,"MassiveIntentClassification (rus-Cyrl)":68.85,"MassiveScenarioClassification (rus-Cyrl)":72.9}
3
+ {"index":15,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":61.01,"GeoreviewClassification (rus-Cyrl)":49.69,"HeadlineClassification (rus-Cyrl)":77.19,"InappropriatenessClassification (rus-Cyrl)":61.6,"KinopoiskClassification (rus-Cyrl)":56.59,"RuReviewsClassification (rus-Cyrl)":65.28,"RuSciBenchGRNTIClassification (rus-Cyrl)":58.2,"RuSciBenchOECDClassification (rus-Cyrl)":43.91,"MassiveIntentClassification (rus-Cyrl)":65.76,"MassiveScenarioClassification (rus-Cyrl)":70.85}
4
+ {"index":0,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":60.46,"GeoreviewClassification (rus-Cyrl)":48.27,"HeadlineClassification (rus-Cyrl)":70.32,"InappropriatenessClassification (rus-Cyrl)":59.87,"KinopoiskClassification (rus-Cyrl)":58.23,"RuReviewsClassification (rus-Cyrl)":66.91,"RuSciBenchGRNTIClassification (rus-Cyrl)":55.81,"RuSciBenchOECDClassification (rus-Cyrl)":42.57,"MassiveIntentClassification (rus-Cyrl)":68.75,"MassiveScenarioClassification (rus-Cyrl)":73.42}
5
+ {"index":10,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-base\">USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":59.88,"GeoreviewClassification (rus-Cyrl)":47.23,"HeadlineClassification (rus-Cyrl)":74.88,"InappropriatenessClassification (rus-Cyrl)":61.94,"KinopoiskClassification (rus-Cyrl)":55.69,"RuReviewsClassification (rus-Cyrl)":66.44,"RuSciBenchGRNTIClassification (rus-Cyrl)":55.55,"RuSciBenchOECDClassification (rus-Cyrl)":43.28,"MassiveIntentClassification (rus-Cyrl)":65.57,"MassiveScenarioClassification (rus-Cyrl)":68.33}
6
+ {"index":23,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/LaBSE-ru-turbo\">LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":59.23,"GeoreviewClassification (rus-Cyrl)":46.04,"HeadlineClassification (rus-Cyrl)":69.98,"InappropriatenessClassification (rus-Cyrl)":61.39,"KinopoiskClassification (rus-Cyrl)":53.59,"RuReviewsClassification (rus-Cyrl)":64.58,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.67,"RuSciBenchOECDClassification (rus-Cyrl)":43.58,"MassiveIntentClassification (rus-Cyrl)":66.08,"MassiveScenarioClassification (rus-Cyrl)":71.13}
7
+ {"index":14,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":58.26,"GeoreviewClassification (rus-Cyrl)":46.05,"HeadlineClassification (rus-Cyrl)":75.64,"InappropriatenessClassification (rus-Cyrl)":58.78,"KinopoiskClassification (rus-Cyrl)":50.89,"RuReviewsClassification (rus-Cyrl)":62.99,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.28,"RuSciBenchOECDClassification (rus-Cyrl)":42.69,"MassiveIntentClassification (rus-Cyrl)":62.78,"MassiveScenarioClassification (rus-Cyrl)":68.21}
8
+ {"index":5,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_mt_nlu_ru\">sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":57.52,"GeoreviewClassification (rus-Cyrl)":39.67,"HeadlineClassification (rus-Cyrl)":77.19,"InappropriatenessClassification (rus-Cyrl)":64.64,"KinopoiskClassification (rus-Cyrl)":50.33,"RuReviewsClassification (rus-Cyrl)":58.29,"RuSciBenchGRNTIClassification (rus-Cyrl)":54.19,"RuSciBenchOECDClassification (rus-Cyrl)":43.8,"MassiveIntentClassification (rus-Cyrl)":61.42,"MassiveScenarioClassification (rus-Cyrl)":68.13}
9
+ {"index":6,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_nlu_ru\">sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":57.24,"GeoreviewClassification (rus-Cyrl)":39.97,"HeadlineClassification (rus-Cyrl)":79.26,"InappropriatenessClassification (rus-Cyrl)":62.52,"KinopoiskClassification (rus-Cyrl)":49.51,"RuReviewsClassification (rus-Cyrl)":58.27,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.9,"RuSciBenchOECDClassification (rus-Cyrl)":43.04,"MassiveIntentClassification (rus-Cyrl)":61.09,"MassiveScenarioClassification (rus-Cyrl)":67.6}
10
+ {"index":22,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":56.88,"GeoreviewClassification (rus-Cyrl)":42.33,"HeadlineClassification (rus-Cyrl)":70.35,"InappropriatenessClassification (rus-Cyrl)":59.32,"KinopoiskClassification (rus-Cyrl)":44.31,"RuReviewsClassification (rus-Cyrl)":62.33,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.01,"RuSciBenchOECDClassification (rus-Cyrl)":44.14,"MassiveIntentClassification (rus-Cyrl)":63.23,"MassiveScenarioClassification (rus-Cyrl)":69.92}
11
+ {"index":16,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":56.44,"GeoreviewClassification (rus-Cyrl)":44.66,"HeadlineClassification (rus-Cyrl)":73.94,"InappropriatenessClassification (rus-Cyrl)":59.16,"KinopoiskClassification (rus-Cyrl)":49.96,"RuReviewsClassification (rus-Cyrl)":61.18,"RuSciBenchGRNTIClassification (rus-Cyrl)":54.99,"RuSciBenchOECDClassification (rus-Cyrl)":41.72,"MassiveIntentClassification (rus-Cyrl)":58.43,"MassiveScenarioClassification (rus-Cyrl)":63.89}
12
+ {"index":12,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":56.18,"GeoreviewClassification (rus-Cyrl)":40.19,"HeadlineClassification (rus-Cyrl)":78.75,"InappropriatenessClassification (rus-Cyrl)":61.33,"KinopoiskClassification (rus-Cyrl)":48.78,"RuReviewsClassification (rus-Cyrl)":55.66,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.53,"RuSciBenchOECDClassification (rus-Cyrl)":41.34,"MassiveIntentClassification (rus-Cyrl)":61.32,"MassiveScenarioClassification (rus-Cyrl)":64.71}
13
+ {"index":1,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/distilrubert-small-cased-conversational\">distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":55.15,"GeoreviewClassification (rus-Cyrl)":38.95,"HeadlineClassification (rus-Cyrl)":75.59,"InappropriatenessClassification (rus-Cyrl)":60.68,"KinopoiskClassification (rus-Cyrl)":49.67,"RuReviewsClassification (rus-Cyrl)":54.05,"RuSciBenchGRNTIClassification (rus-Cyrl)":48.53,"RuSciBenchOECDClassification (rus-Cyrl)":37.65,"MassiveIntentClassification (rus-Cyrl)":63.12,"MassiveScenarioClassification (rus-Cyrl)":68.08}
14
+ {"index":24,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":55.01,"GeoreviewClassification (rus-Cyrl)":41.36,"HeadlineClassification (rus-Cyrl)":68.9,"InappropriatenessClassification (rus-Cyrl)":59.11,"KinopoiskClassification (rus-Cyrl)":50.47,"RuReviewsClassification (rus-Cyrl)":60.66,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.93,"RuSciBenchOECDClassification (rus-Cyrl)":40.79,"MassiveIntentClassification (rus-Cyrl)":57.98,"MassiveScenarioClassification (rus-Cyrl)":62.9}
15
+ {"index":7,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":54.98,"GeoreviewClassification (rus-Cyrl)":40.89,"HeadlineClassification (rus-Cyrl)":68.75,"InappropriatenessClassification (rus-Cyrl)":58.48,"KinopoiskClassification (rus-Cyrl)":49.85,"RuReviewsClassification (rus-Cyrl)":58.01,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.8,"RuSciBenchOECDClassification (rus-Cyrl)":40.36,"MassiveIntentClassification (rus-Cyrl)":60.53,"MassiveScenarioClassification (rus-Cyrl)":65.15}
16
+ {"index":17,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":54.7,"GeoreviewClassification (rus-Cyrl)":40.86,"HeadlineClassification (rus-Cyrl)":68.75,"InappropriatenessClassification (rus-Cyrl)":58.52,"KinopoiskClassification (rus-Cyrl)":46.77,"RuReviewsClassification (rus-Cyrl)":58.01,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.04,"RuSciBenchOECDClassification (rus-Cyrl)":40.48,"MassiveIntentClassification (rus-Cyrl)":60.64,"MassiveScenarioClassification (rus-Cyrl)":65.23}
17
+ {"index":21,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":53.77,"GeoreviewClassification (rus-Cyrl)":38.24,"HeadlineClassification (rus-Cyrl)":68.3,"InappropriatenessClassification (rus-Cyrl)":58.18,"KinopoiskClassification (rus-Cyrl)":41.45,"RuReviewsClassification (rus-Cyrl)":58.88,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.19,"RuSciBenchOECDClassification (rus-Cyrl)":41.41,"MassiveIntentClassification (rus-Cyrl)":59.06,"MassiveScenarioClassification (rus-Cyrl)":65.25}
18
+ {"index":9,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny2\">rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":52.17,"GeoreviewClassification (rus-Cyrl)":39.64,"HeadlineClassification (rus-Cyrl)":74.19,"InappropriatenessClassification (rus-Cyrl)":58.57,"KinopoiskClassification (rus-Cyrl)":49.06,"RuReviewsClassification (rus-Cyrl)":56.99,"RuSciBenchGRNTIClassification (rus-Cyrl)":45.63,"RuSciBenchOECDClassification (rus-Cyrl)":35.48,"MassiveIntentClassification (rus-Cyrl)":50.83,"MassiveScenarioClassification (rus-Cyrl)":59.15}
19
+ {"index":2,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased\">rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":51.6,"GeoreviewClassification (rus-Cyrl)":37.22,"HeadlineClassification (rus-Cyrl)":75.23,"InappropriatenessClassification (rus-Cyrl)":57.34,"KinopoiskClassification (rus-Cyrl)":49.91,"RuReviewsClassification (rus-Cyrl)":50.74,"RuSciBenchGRNTIClassification (rus-Cyrl)":48.03,"RuSciBenchOECDClassification (rus-Cyrl)":36.13,"MassiveIntentClassification (rus-Cyrl)":53.02,"MassiveScenarioClassification (rus-Cyrl)":56.79}
20
+ {"index":3,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":51.27,"GeoreviewClassification (rus-Cyrl)":38.05,"HeadlineClassification (rus-Cyrl)":67.64,"InappropriatenessClassification (rus-Cyrl)":58.27,"KinopoiskClassification (rus-Cyrl)":45.86,"RuReviewsClassification (rus-Cyrl)":58.34,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.18,"RuSciBenchOECDClassification (rus-Cyrl)":40.11,"MassiveIntentClassification (rus-Cyrl)":49.1,"MassiveScenarioClassification (rus-Cyrl)":51.91}
21
+ {"index":8,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny\">rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":44.55,"GeoreviewClassification (rus-Cyrl)":33.45,"HeadlineClassification (rus-Cyrl)":57.65,"InappropriatenessClassification (rus-Cyrl)":54.5,"KinopoiskClassification (rus-Cyrl)":41.36,"RuReviewsClassification (rus-Cyrl)":49.56,"RuSciBenchGRNTIClassification (rus-Cyrl)":35.71,"RuSciBenchOECDClassification (rus-Cyrl)":26.51,"MassiveIntentClassification (rus-Cyrl)":50.1,"MassiveScenarioClassification (rus-Cyrl)":52.15}
22
+ {"index":20,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":28.82,"GeoreviewClassification (rus-Cyrl)":25.93,"HeadlineClassification (rus-Cyrl)":28.53,"InappropriatenessClassification (rus-Cyrl)":51.82,"KinopoiskClassification (rus-Cyrl)":34.18,"RuReviewsClassification (rus-Cyrl)":42.33,"RuSciBenchGRNTIClassification (rus-Cyrl)":13.29,"RuSciBenchOECDClassification (rus-Cyrl)":10.62,"MassiveIntentClassification (rus-Cyrl)":23.98,"MassiveScenarioClassification (rus-Cyrl)":28.71}
23
+ {"index":19,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":28.75,"GeoreviewClassification (rus-Cyrl)":27.08,"HeadlineClassification (rus-Cyrl)":27.77,"InappropriatenessClassification (rus-Cyrl)":51.73,"KinopoiskClassification (rus-Cyrl)":33.93,"RuReviewsClassification (rus-Cyrl)":41.79,"RuSciBenchGRNTIClassification (rus-Cyrl)":10.08,"RuSciBenchOECDClassification (rus-Cyrl)":8.3,"MassiveIntentClassification (rus-Cyrl)":27.58,"MassiveScenarioClassification (rus-Cyrl)":30.46}
24
+ {"index":18,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":28.15,"GeoreviewClassification (rus-Cyrl)":23.49,"HeadlineClassification (rus-Cyrl)":28.49,"InappropriatenessClassification (rus-Cyrl)":50.85,"KinopoiskClassification (rus-Cyrl)":34.17,"RuReviewsClassification (rus-Cyrl)":42.49,"RuSciBenchGRNTIClassification (rus-Cyrl)":10.49,"RuSciBenchOECDClassification (rus-Cyrl)":8.31,"MassiveIntentClassification (rus-Cyrl)":26.29,"MassiveScenarioClassification (rus-Cyrl)":28.77}
25
+ {"index":4,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/ru-en-RoSBERTa\">ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":"","GeoreviewClassification (rus-Cyrl)":49.7,"HeadlineClassification (rus-Cyrl)":78.0,"InappropriatenessClassification (rus-Cyrl)":61.32,"KinopoiskClassification (rus-Cyrl)":63.27,"RuReviewsClassification (rus-Cyrl)":67.96,"RuSciBenchGRNTIClassification (rus-Cyrl)":59.33,"RuSciBenchOECDClassification (rus-Cyrl)":46.33,"MassiveIntentClassification (rus-Cyrl)":"","MassiveScenarioClassification (rus-Cyrl)":""}
all_data_tasks/34/default.jsonl CHANGED
@@ -11,8 +11,8 @@
11
  {"index":14,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":50.27,"GeoreviewClusteringP2P (rus-Cyrl)":54.46,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":51.56,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":44.78}
12
  {"index":24,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":49.57,"GeoreviewClusteringP2P (rus-Cyrl)":59.71,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":47.55,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.44}
13
  {"index":22,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":49.18,"GeoreviewClusteringP2P (rus-Cyrl)":56.18,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":48.47,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":42.9}
14
- {"index":17,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":47.75,"GeoreviewClusteringP2P (rus-Cyrl)":52.19,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":49.09,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.97}
15
- {"index":21,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":47.75,"GeoreviewClusteringP2P (rus-Cyrl)":53.35,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":48.22,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.68}
16
  {"index":7,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":46.84,"GeoreviewClusteringP2P (rus-Cyrl)":51.89,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":47.48,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.16}
17
  {"index":3,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":43.13,"GeoreviewClusteringP2P (rus-Cyrl)":41.82,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":46.29,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.28}
18
  {"index":12,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":42.92,"GeoreviewClusteringP2P (rus-Cyrl)":58.79,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":36.66,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":33.31}
 
11
  {"index":14,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":50.27,"GeoreviewClusteringP2P (rus-Cyrl)":54.46,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":51.56,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":44.78}
12
  {"index":24,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":49.57,"GeoreviewClusteringP2P (rus-Cyrl)":59.71,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":47.55,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.44}
13
  {"index":22,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":49.18,"GeoreviewClusteringP2P (rus-Cyrl)":56.18,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":48.47,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":42.9}
14
+ {"index":21,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":47.75,"GeoreviewClusteringP2P (rus-Cyrl)":53.35,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":48.22,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.68}
15
+ {"index":17,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":47.75,"GeoreviewClusteringP2P (rus-Cyrl)":52.19,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":49.09,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.97}
16
  {"index":7,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":46.84,"GeoreviewClusteringP2P (rus-Cyrl)":51.89,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":47.48,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.16}
17
  {"index":3,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":43.13,"GeoreviewClusteringP2P (rus-Cyrl)":41.82,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":46.29,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.28}
18
  {"index":12,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":42.92,"GeoreviewClusteringP2P (rus-Cyrl)":58.79,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":36.66,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":33.31}
all_data_tasks/36/default.jsonl CHANGED
@@ -1,25 +1,25 @@
1
- {"index":15,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"RuBQReranking (rus-Cyrl)":75.58}
2
- {"index":13,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"RuBQReranking (rus-Cyrl)":74.61}
3
- {"index":0,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"RuBQReranking (rus-Cyrl)":74.02}
4
- {"index":11,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-bge-m3\">USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"RuBQReranking (rus-Cyrl)":73.08}
5
- {"index":14,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"RuBQReranking (rus-Cyrl)":72.01}
6
- {"index":16,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"RuBQReranking (rus-Cyrl)":71.46}
7
- {"index":4,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/ru-en-RoSBERTa\">ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"RuBQReranking (rus-Cyrl)":70.87}
8
- {"index":23,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/LaBSE-ru-turbo\">LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"RuBQReranking (rus-Cyrl)":68.65}
9
- {"index":10,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-base\">USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"RuBQReranking (rus-Cyrl)":64.42}
10
- {"index":24,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"RuBQReranking (rus-Cyrl)":62.15}
11
- {"index":22,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"RuBQReranking (rus-Cyrl)":58.77}
12
- {"index":5,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_mt_nlu_ru\">sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"RuBQReranking (rus-Cyrl)":56.13}
13
- {"index":17,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"RuBQReranking (rus-Cyrl)":55.13}
14
- {"index":7,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"RuBQReranking (rus-Cyrl)":54.83}
15
- {"index":21,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"RuBQReranking (rus-Cyrl)":52.8}
16
- {"index":6,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_nlu_ru\">sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"RuBQReranking (rus-Cyrl)":46.81}
17
- {"index":9,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny2\">rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"RuBQReranking (rus-Cyrl)":46.09}
18
- {"index":1,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/distilrubert-small-cased-conversational\">distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"RuBQReranking (rus-Cyrl)":42.58}
19
- {"index":2,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased\">rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"RuBQReranking (rus-Cyrl)":41.65}
20
- {"index":3,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"RuBQReranking (rus-Cyrl)":39.89}
21
- {"index":18,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"RuBQReranking (rus-Cyrl)":38.51}
22
- {"index":8,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny\">rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"RuBQReranking (rus-Cyrl)":35.44}
23
- {"index":12,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"RuBQReranking (rus-Cyrl)":34.01}
24
- {"index":20,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"RuBQReranking (rus-Cyrl)":30.96}
25
- {"index":19,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"RuBQReranking (rus-Cyrl)":27.05}
 
1
+ {"index":0,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":69.7,"RuBQReranking (rus-Cyrl)":74.02,"MIRACLReranking (rus-Cyrl)":65.38}
2
+ {"index":15,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":69.64,"RuBQReranking (rus-Cyrl)":75.58,"MIRACLReranking (rus-Cyrl)":63.71}
3
+ {"index":14,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":66.24,"RuBQReranking (rus-Cyrl)":72.01,"MIRACLReranking (rus-Cyrl)":60.47}
4
+ {"index":16,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":65.29,"RuBQReranking (rus-Cyrl)":71.46,"MIRACLReranking (rus-Cyrl)":59.12}
5
+ {"index":5,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_mt_nlu_ru\">sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":40.56,"RuBQReranking (rus-Cyrl)":56.13,"MIRACLReranking (rus-Cyrl)":24.99}
6
+ {"index":6,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_nlu_ru\">sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":32.8,"RuBQReranking (rus-Cyrl)":46.81,"MIRACLReranking (rus-Cyrl)":18.8}
7
+ {"index":9,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny2\">rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":30.95,"RuBQReranking (rus-Cyrl)":46.09,"MIRACLReranking (rus-Cyrl)":15.81}
8
+ {"index":1,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/distilrubert-small-cased-conversational\">distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":"","RuBQReranking (rus-Cyrl)":42.58,"MIRACLReranking (rus-Cyrl)":""}
9
+ {"index":2,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased\">rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":"","RuBQReranking (rus-Cyrl)":41.65,"MIRACLReranking (rus-Cyrl)":""}
10
+ {"index":3,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":"","RuBQReranking (rus-Cyrl)":39.89,"MIRACLReranking (rus-Cyrl)":""}
11
+ {"index":4,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/ru-en-RoSBERTa\">ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":"","RuBQReranking (rus-Cyrl)":70.87,"MIRACLReranking (rus-Cyrl)":""}
12
+ {"index":7,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":"","RuBQReranking (rus-Cyrl)":54.83,"MIRACLReranking (rus-Cyrl)":""}
13
+ {"index":8,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny\">rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":"","RuBQReranking (rus-Cyrl)":35.44,"MIRACLReranking (rus-Cyrl)":""}
14
+ {"index":10,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-base\">USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","RuBQReranking (rus-Cyrl)":64.42,"MIRACLReranking (rus-Cyrl)":""}
15
+ {"index":11,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-bge-m3\">USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":"","RuBQReranking (rus-Cyrl)":73.08,"MIRACLReranking (rus-Cyrl)":""}
16
+ {"index":12,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","RuBQReranking (rus-Cyrl)":34.01,"MIRACLReranking (rus-Cyrl)":""}
17
+ {"index":13,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","RuBQReranking (rus-Cyrl)":74.61,"MIRACLReranking (rus-Cyrl)":""}
18
+ {"index":17,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","RuBQReranking (rus-Cyrl)":55.13,"MIRACLReranking (rus-Cyrl)":""}
19
+ {"index":18,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","RuBQReranking (rus-Cyrl)":38.51,"MIRACLReranking (rus-Cyrl)":""}
20
+ {"index":19,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","RuBQReranking (rus-Cyrl)":27.05,"MIRACLReranking (rus-Cyrl)":""}
21
+ {"index":20,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","RuBQReranking (rus-Cyrl)":30.96,"MIRACLReranking (rus-Cyrl)":""}
22
+ {"index":21,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","RuBQReranking (rus-Cyrl)":52.8,"MIRACLReranking (rus-Cyrl)":""}
23
+ {"index":22,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","RuBQReranking (rus-Cyrl)":58.77,"MIRACLReranking (rus-Cyrl)":""}
24
+ {"index":23,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/LaBSE-ru-turbo\">LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":"","RuBQReranking (rus-Cyrl)":68.65,"MIRACLReranking (rus-Cyrl)":""}
25
+ {"index":24,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","RuBQReranking (rus-Cyrl)":62.15,"MIRACLReranking (rus-Cyrl)":""}
all_data_tasks/37/default.jsonl CHANGED
@@ -1,25 +1,25 @@
1
- {"index":13,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":77.96,"RiaNewsRetrieval (rus-Cyrl)":81.94,"RuBQRetrieval (rus-Cyrl)":73.98}
2
- {"index":15,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":77.39,"RiaNewsRetrieval (rus-Cyrl)":80.67,"RuBQRetrieval (rus-Cyrl)":74.11}
3
- {"index":0,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":77.1,"RiaNewsRetrieval (rus-Cyrl)":82.98,"RuBQRetrieval (rus-Cyrl)":71.21}
4
- {"index":11,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-bge-m3\">USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":76.78,"RiaNewsRetrieval (rus-Cyrl)":83.53,"RuBQRetrieval (rus-Cyrl)":70.03}
5
- {"index":4,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/ru-en-RoSBERTa\">ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":72.82,"RiaNewsRetrieval (rus-Cyrl)":78.86,"RuBQRetrieval (rus-Cyrl)":66.77}
6
- {"index":14,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":69.91,"RiaNewsRetrieval (rus-Cyrl)":70.24,"RuBQRetrieval (rus-Cyrl)":69.58}
7
- {"index":16,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":69.27,"RiaNewsRetrieval (rus-Cyrl)":70.01,"RuBQRetrieval (rus-Cyrl)":68.53}
8
- {"index":23,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/LaBSE-ru-turbo\">LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":67.54,"RiaNewsRetrieval (rus-Cyrl)":69.36,"RuBQRetrieval (rus-Cyrl)":65.71}
9
- {"index":10,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-base\">USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":67.34,"RiaNewsRetrieval (rus-Cyrl)":77.83,"RuBQRetrieval (rus-Cyrl)":56.86}
10
- {"index":24,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":51.5,"RiaNewsRetrieval (rus-Cyrl)":51.27,"RuBQRetrieval (rus-Cyrl)":51.73}
11
- {"index":22,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":44.4,"RiaNewsRetrieval (rus-Cyrl)":51.75,"RuBQRetrieval (rus-Cyrl)":37.04}
12
- {"index":21,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":37.26,"RiaNewsRetrieval (rus-Cyrl)":44.82,"RuBQRetrieval (rus-Cyrl)":29.7}
13
- {"index":17,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":36.38,"RiaNewsRetrieval (rus-Cyrl)":42.75,"RuBQRetrieval (rus-Cyrl)":30.02}
14
- {"index":7,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":31.88,"RiaNewsRetrieval (rus-Cyrl)":34.73,"RuBQRetrieval (rus-Cyrl)":29.03}
15
- {"index":5,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_mt_nlu_ru\">sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":25.6,"RiaNewsRetrieval (rus-Cyrl)":21.4,"RuBQRetrieval (rus-Cyrl)":29.8}
16
- {"index":9,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny2\">rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":12.4,"RiaNewsRetrieval (rus-Cyrl)":13.92,"RuBQRetrieval (rus-Cyrl)":10.87}
17
- {"index":6,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_nlu_ru\">sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":11.78,"RiaNewsRetrieval (rus-Cyrl)":11.11,"RuBQRetrieval (rus-Cyrl)":12.45}
18
- {"index":3,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":9.68,"RiaNewsRetrieval (rus-Cyrl)":6.72,"RuBQRetrieval (rus-Cyrl)":12.63}
19
- {"index":2,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased\">rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":7.55,"RiaNewsRetrieval (rus-Cyrl)":5.58,"RuBQRetrieval (rus-Cyrl)":9.52}
20
- {"index":12,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":7.5,"RiaNewsRetrieval (rus-Cyrl)":4.84,"RuBQRetrieval (rus-Cyrl)":10.15}
21
- {"index":1,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/distilrubert-small-cased-conversational\">distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":7.37,"RiaNewsRetrieval (rus-Cyrl)":4.14,"RuBQRetrieval (rus-Cyrl)":10.6}
22
- {"index":8,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny\">rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":2.02,"RiaNewsRetrieval (rus-Cyrl)":0.79,"RuBQRetrieval (rus-Cyrl)":3.24}
23
- {"index":19,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":1.66,"RiaNewsRetrieval (rus-Cyrl)":0.67,"RuBQRetrieval (rus-Cyrl)":2.64}
24
- {"index":18,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","RiaNewsRetrieval (rus-Cyrl)":"","RuBQRetrieval (rus-Cyrl)":8.84}
25
- {"index":20,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","RiaNewsRetrieval (rus-Cyrl)":"","RuBQRetrieval (rus-Cyrl)":4.75}
 
1
+ {"index":0,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":74.77,"RiaNewsRetrieval (rus-Cyrl)":82.98,"RuBQRetrieval (rus-Cyrl)":71.21,"MIRACLRetrieval (rus-Cyrl)":70.11}
2
+ {"index":15,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":74.04,"RiaNewsRetrieval (rus-Cyrl)":80.67,"RuBQRetrieval (rus-Cyrl)":74.11,"MIRACLRetrieval (rus-Cyrl)":67.33}
3
+ {"index":14,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":67.14,"RiaNewsRetrieval (rus-Cyrl)":70.24,"RuBQRetrieval (rus-Cyrl)":69.58,"MIRACLRetrieval (rus-Cyrl)":61.6}
4
+ {"index":16,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":65.85,"RiaNewsRetrieval (rus-Cyrl)":70.01,"RuBQRetrieval (rus-Cyrl)":68.53,"MIRACLRetrieval (rus-Cyrl)":59.01}
5
+ {"index":5,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_mt_nlu_ru\">sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":19.13,"RiaNewsRetrieval (rus-Cyrl)":21.4,"RuBQRetrieval (rus-Cyrl)":29.8,"MIRACLRetrieval (rus-Cyrl)":6.2}
6
+ {"index":9,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny2\">rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":8.89,"RiaNewsRetrieval (rus-Cyrl)":13.92,"RuBQRetrieval (rus-Cyrl)":10.87,"MIRACLRetrieval (rus-Cyrl)":1.89}
7
+ {"index":6,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_nlu_ru\">sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":8.51,"RiaNewsRetrieval (rus-Cyrl)":11.11,"RuBQRetrieval (rus-Cyrl)":12.45,"MIRACLRetrieval (rus-Cyrl)":1.98}
8
+ {"index":19,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":1.23,"RiaNewsRetrieval (rus-Cyrl)":0.67,"RuBQRetrieval (rus-Cyrl)":2.64,"MIRACLRetrieval (rus-Cyrl)":0.39}
9
+ {"index":1,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/distilrubert-small-cased-conversational\">distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":"","RiaNewsRetrieval (rus-Cyrl)":4.14,"RuBQRetrieval (rus-Cyrl)":10.6,"MIRACLRetrieval (rus-Cyrl)":""}
10
+ {"index":2,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased\">rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":"","RiaNewsRetrieval (rus-Cyrl)":5.58,"RuBQRetrieval (rus-Cyrl)":9.52,"MIRACLRetrieval (rus-Cyrl)":""}
11
+ {"index":3,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":"","RiaNewsRetrieval (rus-Cyrl)":6.72,"RuBQRetrieval (rus-Cyrl)":12.63,"MIRACLRetrieval (rus-Cyrl)":""}
12
+ {"index":4,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/ru-en-RoSBERTa\">ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":"","RiaNewsRetrieval (rus-Cyrl)":78.86,"RuBQRetrieval (rus-Cyrl)":66.77,"MIRACLRetrieval (rus-Cyrl)":""}
13
+ {"index":7,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":"","RiaNewsRetrieval (rus-Cyrl)":34.73,"RuBQRetrieval (rus-Cyrl)":29.03,"MIRACLRetrieval (rus-Cyrl)":""}
14
+ {"index":8,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny\">rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":"","RiaNewsRetrieval (rus-Cyrl)":0.79,"RuBQRetrieval (rus-Cyrl)":3.24,"MIRACLRetrieval (rus-Cyrl)":""}
15
+ {"index":10,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-base\">USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","RiaNewsRetrieval (rus-Cyrl)":77.83,"RuBQRetrieval (rus-Cyrl)":56.86,"MIRACLRetrieval (rus-Cyrl)":""}
16
+ {"index":11,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-bge-m3\">USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":"","RiaNewsRetrieval (rus-Cyrl)":83.53,"RuBQRetrieval (rus-Cyrl)":70.03,"MIRACLRetrieval (rus-Cyrl)":""}
17
+ {"index":12,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","RiaNewsRetrieval (rus-Cyrl)":4.84,"RuBQRetrieval (rus-Cyrl)":10.15,"MIRACLRetrieval (rus-Cyrl)":""}
18
+ {"index":13,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","RiaNewsRetrieval (rus-Cyrl)":81.94,"RuBQRetrieval (rus-Cyrl)":73.98,"MIRACLRetrieval (rus-Cyrl)":""}
19
+ {"index":17,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","RiaNewsRetrieval (rus-Cyrl)":42.75,"RuBQRetrieval (rus-Cyrl)":30.02,"MIRACLRetrieval (rus-Cyrl)":""}
20
+ {"index":18,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","RiaNewsRetrieval (rus-Cyrl)":"","RuBQRetrieval (rus-Cyrl)":8.84,"MIRACLRetrieval (rus-Cyrl)":""}
21
+ {"index":20,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","RiaNewsRetrieval (rus-Cyrl)":"","RuBQRetrieval (rus-Cyrl)":4.75,"MIRACLRetrieval (rus-Cyrl)":""}
22
+ {"index":21,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","RiaNewsRetrieval (rus-Cyrl)":44.82,"RuBQRetrieval (rus-Cyrl)":29.7,"MIRACLRetrieval (rus-Cyrl)":""}
23
+ {"index":22,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","RiaNewsRetrieval (rus-Cyrl)":51.75,"RuBQRetrieval (rus-Cyrl)":37.04,"MIRACLRetrieval (rus-Cyrl)":""}
24
+ {"index":23,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/LaBSE-ru-turbo\">LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":"","RiaNewsRetrieval (rus-Cyrl)":69.36,"RuBQRetrieval (rus-Cyrl)":65.71,"MIRACLRetrieval (rus-Cyrl)":""}
25
+ {"index":24,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","RiaNewsRetrieval (rus-Cyrl)":51.27,"RuBQRetrieval (rus-Cyrl)":51.73,"MIRACLRetrieval (rus-Cyrl)":""}
all_data_tasks/38/default.jsonl CHANGED
@@ -1,25 +1,25 @@
1
- {"index":13,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":80.15,"RUParaPhraserSTS (rus-Cyrl)":76.17,"RuSTSBenchmarkSTS (rus-Cyrl)":84.13}
2
- {"index":11,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-bge-m3\">USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":79.85,"RUParaPhraserSTS (rus-Cyrl)":76.36,"RuSTSBenchmarkSTS (rus-Cyrl)":83.35}
3
- {"index":10,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-base\">USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":77.91,"RUParaPhraserSTS (rus-Cyrl)":73.56,"RuSTSBenchmarkSTS (rus-Cyrl)":82.26}
4
- {"index":15,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":77.48,"RUParaPhraserSTS (rus-Cyrl)":71.82,"RuSTSBenchmarkSTS (rus-Cyrl)":83.15}
5
- {"index":4,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/ru-en-RoSBERTa\">ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":77.42,"RUParaPhraserSTS (rus-Cyrl)":76.16,"RuSTSBenchmarkSTS (rus-Cyrl)":78.69}
6
- {"index":0,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":77.39,"RUParaPhraserSTS (rus-Cyrl)":74.9,"RuSTSBenchmarkSTS (rus-Cyrl)":79.87}
7
- {"index":23,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/LaBSE-ru-turbo\">LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":77.37,"RUParaPhraserSTS (rus-Cyrl)":72.97,"RuSTSBenchmarkSTS (rus-Cyrl)":81.77}
8
- {"index":24,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":75.32,"RUParaPhraserSTS (rus-Cyrl)":72.15,"RuSTSBenchmarkSTS (rus-Cyrl)":78.48}
9
- {"index":14,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":74.9,"RUParaPhraserSTS (rus-Cyrl)":70.17,"RuSTSBenchmarkSTS (rus-Cyrl)":79.64}
10
- {"index":16,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":74.27,"RUParaPhraserSTS (rus-Cyrl)":70.46,"RuSTSBenchmarkSTS (rus-Cyrl)":78.08}
11
- {"index":22,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":74.1,"RUParaPhraserSTS (rus-Cyrl)":65.74,"RuSTSBenchmarkSTS (rus-Cyrl)":82.46}
12
- {"index":21,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":70.71,"RUParaPhraserSTS (rus-Cyrl)":61.87,"RuSTSBenchmarkSTS (rus-Cyrl)":79.55}
13
- {"index":7,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":69.6,"RUParaPhraserSTS (rus-Cyrl)":65.87,"RuSTSBenchmarkSTS (rus-Cyrl)":73.32}
14
- {"index":17,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":69.54,"RUParaPhraserSTS (rus-Cyrl)":65.74,"RuSTSBenchmarkSTS (rus-Cyrl)":73.34}
15
- {"index":5,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_mt_nlu_ru\">sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":68.19,"RUParaPhraserSTS (rus-Cyrl)":65.17,"RuSTSBenchmarkSTS (rus-Cyrl)":71.22}
16
- {"index":9,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny2\">rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":67.28,"RUParaPhraserSTS (rus-Cyrl)":65.14,"RuSTSBenchmarkSTS (rus-Cyrl)":69.43}
17
- {"index":3,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":66.13,"RUParaPhraserSTS (rus-Cyrl)":66.24,"RuSTSBenchmarkSTS (rus-Cyrl)":66.03}
18
- {"index":6,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_nlu_ru\">sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":60.44,"RUParaPhraserSTS (rus-Cyrl)":62.06,"RuSTSBenchmarkSTS (rus-Cyrl)":58.82}
19
- {"index":1,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/distilrubert-small-cased-conversational\">distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":58.36,"RUParaPhraserSTS (rus-Cyrl)":55.01,"RuSTSBenchmarkSTS (rus-Cyrl)":61.72}
20
- {"index":12,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":56.25,"RUParaPhraserSTS (rus-Cyrl)":54.03,"RuSTSBenchmarkSTS (rus-Cyrl)":58.47}
21
- {"index":8,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny\">rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":55.78,"RUParaPhraserSTS (rus-Cyrl)":53.41,"RuSTSBenchmarkSTS (rus-Cyrl)":58.16}
22
- {"index":2,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased\">rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":51.84,"RUParaPhraserSTS (rus-Cyrl)":49.72,"RuSTSBenchmarkSTS (rus-Cyrl)":53.95}
23
- {"index":18,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":50.9,"RUParaPhraserSTS (rus-Cyrl)":45.47,"RuSTSBenchmarkSTS (rus-Cyrl)":56.33}
24
- {"index":19,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":49.74,"RUParaPhraserSTS (rus-Cyrl)":43.93,"RuSTSBenchmarkSTS (rus-Cyrl)":55.56}
25
- {"index":20,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":48.92,"RUParaPhraserSTS (rus-Cyrl)":42.15,"RuSTSBenchmarkSTS (rus-Cyrl)":55.68}
 
1
+ {"index":11,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-bge-m3\">USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":75.38,"RUParaPhraserSTS (rus-Cyrl)":76.36,"RuSTSBenchmarkSTS (rus-Cyrl)":83.35,"STS22 (rus-Cyrl)":66.42}
2
+ {"index":0,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":73.68,"RUParaPhraserSTS (rus-Cyrl)":74.9,"RuSTSBenchmarkSTS (rus-Cyrl)":79.87,"STS22 (rus-Cyrl)":66.26}
3
+ {"index":10,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-base\">USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":73.07,"RUParaPhraserSTS (rus-Cyrl)":73.56,"RuSTSBenchmarkSTS (rus-Cyrl)":82.26,"STS22 (rus-Cyrl)":63.39}
4
+ {"index":23,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/LaBSE-ru-turbo\">LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":72.54,"RUParaPhraserSTS (rus-Cyrl)":72.97,"RuSTSBenchmarkSTS (rus-Cyrl)":81.77,"STS22 (rus-Cyrl)":62.89}
5
+ {"index":15,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":71.62,"RUParaPhraserSTS (rus-Cyrl)":71.82,"RuSTSBenchmarkSTS (rus-Cyrl)":83.15,"STS22 (rus-Cyrl)":59.89}
6
+ {"index":24,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":70.23,"RUParaPhraserSTS (rus-Cyrl)":72.15,"RuSTSBenchmarkSTS (rus-Cyrl)":78.48,"STS22 (rus-Cyrl)":60.06}
7
+ {"index":14,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":70.16,"RUParaPhraserSTS (rus-Cyrl)":70.17,"RuSTSBenchmarkSTS (rus-Cyrl)":79.64,"STS22 (rus-Cyrl)":60.67}
8
+ {"index":16,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":69.48,"RUParaPhraserSTS (rus-Cyrl)":70.46,"RuSTSBenchmarkSTS (rus-Cyrl)":78.08,"STS22 (rus-Cyrl)":59.9}
9
+ {"index":22,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":68.98,"RUParaPhraserSTS (rus-Cyrl)":65.74,"RuSTSBenchmarkSTS (rus-Cyrl)":82.46,"STS22 (rus-Cyrl)":58.74}
10
+ {"index":21,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":66.17,"RUParaPhraserSTS (rus-Cyrl)":61.87,"RuSTSBenchmarkSTS (rus-Cyrl)":79.55,"STS22 (rus-Cyrl)":57.08}
11
+ {"index":7,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":65.91,"RUParaPhraserSTS (rus-Cyrl)":65.87,"RuSTSBenchmarkSTS (rus-Cyrl)":73.32,"STS22 (rus-Cyrl)":58.53}
12
+ {"index":17,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":65.52,"RUParaPhraserSTS (rus-Cyrl)":65.74,"RuSTSBenchmarkSTS (rus-Cyrl)":73.34,"STS22 (rus-Cyrl)":57.49}
13
+ {"index":5,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_mt_nlu_ru\">sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":64.4,"RUParaPhraserSTS (rus-Cyrl)":65.17,"RuSTSBenchmarkSTS (rus-Cyrl)":71.22,"STS22 (rus-Cyrl)":56.82}
14
+ {"index":9,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny2\">rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":61.6,"RUParaPhraserSTS (rus-Cyrl)":65.14,"RuSTSBenchmarkSTS (rus-Cyrl)":69.43,"STS22 (rus-Cyrl)":50.23}
15
+ {"index":3,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":61.18,"RUParaPhraserSTS (rus-Cyrl)":66.24,"RuSTSBenchmarkSTS (rus-Cyrl)":66.03,"STS22 (rus-Cyrl)":51.27}
16
+ {"index":6,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_nlu_ru\">sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":57.21,"RUParaPhraserSTS (rus-Cyrl)":62.06,"RuSTSBenchmarkSTS (rus-Cyrl)":58.82,"STS22 (rus-Cyrl)":50.75}
17
+ {"index":1,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/distilrubert-small-cased-conversational\">distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":56.2,"RUParaPhraserSTS (rus-Cyrl)":55.01,"RuSTSBenchmarkSTS (rus-Cyrl)":61.72,"STS22 (rus-Cyrl)":51.87}
18
+ {"index":12,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":53.39,"RUParaPhraserSTS (rus-Cyrl)":54.03,"RuSTSBenchmarkSTS (rus-Cyrl)":58.47,"STS22 (rus-Cyrl)":47.67}
19
+ {"index":8,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny\">rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":53.15,"RUParaPhraserSTS (rus-Cyrl)":53.41,"RuSTSBenchmarkSTS (rus-Cyrl)":58.16,"STS22 (rus-Cyrl)":47.88}
20
+ {"index":2,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased\">rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":46.22,"RUParaPhraserSTS (rus-Cyrl)":49.72,"RuSTSBenchmarkSTS (rus-Cyrl)":53.95,"STS22 (rus-Cyrl)":34.98}
21
+ {"index":19,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":38.07,"RUParaPhraserSTS (rus-Cyrl)":43.93,"RuSTSBenchmarkSTS (rus-Cyrl)":55.56,"STS22 (rus-Cyrl)":14.72}
22
+ {"index":20,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":37.89,"RUParaPhraserSTS (rus-Cyrl)":42.15,"RuSTSBenchmarkSTS (rus-Cyrl)":55.68,"STS22 (rus-Cyrl)":15.83}
23
+ {"index":18,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":37.66,"RUParaPhraserSTS (rus-Cyrl)":45.47,"RuSTSBenchmarkSTS (rus-Cyrl)":56.33,"STS22 (rus-Cyrl)":11.19}
24
+ {"index":4,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/ru-en-RoSBERTa\">ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":"","RUParaPhraserSTS (rus-Cyrl)":76.16,"RuSTSBenchmarkSTS (rus-Cyrl)":78.69,"STS22 (rus-Cyrl)":""}
25
+ {"index":13,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","RUParaPhraserSTS (rus-Cyrl)":76.17,"RuSTSBenchmarkSTS (rus-Cyrl)":84.13,"STS22 (rus-Cyrl)":""}
all_data_tasks/39/default.jsonl CHANGED
@@ -1,52 +1,25 @@
1
- {"index":15,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/NbAiLab\/nb-bert-large\">nb-bert-large<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":66.73,"MassiveIntentClassification (nb)":62.68,"MassiveScenarioClassification (nb)":67.44,"NoRecClassification":55.46,"NordicLangClassification":85.27,"NorwegianParliament":62.58,"ScalaNbClassification":66.97}
2
- {"index":14,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/NbAiLab\/nb-bert-base\">nb-bert-base<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":63.94,"MassiveIntentClassification (nb)":60.67,"MassiveScenarioClassification (nb)":67.31,"NoRecClassification":51.32,"NordicLangClassification":84.69,"NorwegianParliament":57.41,"ScalaNbClassification":62.25}
3
- {"index":26,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":63.64,"MassiveIntentClassification (nb)":59.83,"MassiveScenarioClassification (nb)":66.18,"NoRecClassification":62.76,"NordicLangClassification":82.29,"NorwegianParliament":60.36,"ScalaNbClassification":50.44}
4
- {"index":32,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ltg\/norbert3-base\">norbert3-base<\/a>","Model Size (Million Parameters)":131,"Memory Usage (GB, fp32)":0.49,"Average":61.75,"MassiveIntentClassification (nb)":54.2,"MassiveScenarioClassification (nb)":60.69,"NoRecClassification":53.4,"NordicLangClassification":82.67,"NorwegianParliament":59.33,"ScalaNbClassification":60.19}
5
- {"index":25,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":61.63,"MassiveIntentClassification (nb)":59.83,"MassiveScenarioClassification (nb)":66.18,"NoRecClassification":57.58,"NordicLangClassification":75.94,"NorwegianParliament":59.94,"ScalaNbClassification":50.32}
6
- {"index":33,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ltg\/norbert3-large\">norbert3-large<\/a>","Model Size (Million Parameters)":368,"Memory Usage (GB, fp32)":1.37,"Average":60.34,"MassiveIntentClassification (nb)":47.42,"MassiveScenarioClassification (nb)":54.25,"NoRecClassification":50.46,"NordicLangClassification":84.25,"NorwegianParliament":58.85,"ScalaNbClassification":66.79}
7
- {"index":28,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":58.86,"MassiveIntentClassification (nb)":53.96,"MassiveScenarioClassification (nb)":59.9,"NoRecClassification":53.96,"NordicLangClassification":75.15,"NorwegianParliament":60.15,"ScalaNbClassification":50.06}
8
- {"index":19,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/chcaa\/dfm-encoder-large-v1\">dfm-encoder-large-v1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":58.46,"MassiveIntentClassification (nb)":52.49,"MassiveScenarioClassification (nb)":54.59,"NoRecClassification":48.3,"NordicLangClassification":77.68,"NorwegianParliament":58.78,"ScalaNbClassification":58.95}
9
- {"index":50,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/vesteinn\/DanskBERT\">DanskBERT<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":55.0,"MassiveIntentClassification (nb)":45.38,"MassiveScenarioClassification (nb)":47.55,"NoRecClassification":46.06,"NordicLangClassification":74.25,"NorwegianParliament":56.79,"ScalaNbClassification":59.99}
10
- {"index":20,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/facebook\/SONAR\">SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.92,"MassiveIntentClassification (nb)":59.9,"MassiveScenarioClassification (nb)":65.81,"NoRecClassification":48.25,"NordicLangClassification":48.4,"NorwegianParliament":55.99,"ScalaNbClassification":51.18}
11
- {"index":51,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/xlm-roberta-base\">xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":54.34,"MassiveIntentClassification (nb)":40.46,"MassiveScenarioClassification (nb)":44.83,"NoRecClassification":46.28,"NordicLangClassification":79.39,"NorwegianParliament":56.75,"ScalaNbClassification":58.33}
12
- {"index":21,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-base\">e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":50.14,"MassiveIntentClassification (nb)":41.57,"MassiveScenarioClassification (nb)":50.33,"NoRecClassification":42.0,"NordicLangClassification":59.34,"NorwegianParliament":57.42,"ScalaNbClassification":50.18}
13
- {"index":22,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-large\">e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":50.01,"MassiveIntentClassification (nb)":40.63,"MassiveScenarioClassification (nb)":51.91,"NoRecClassification":41.83,"NordicLangClassification":58.3,"NorwegianParliament":57.26,"ScalaNbClassification":50.13}
14
- {"index":8,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/KBLab\/sentence-bert-swedish-cased\">sentence-bert-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":48.88,"MassiveIntentClassification (nb)":42.74,"MassiveScenarioClassification (nb)":49.49,"NoRecClassification":43.53,"NordicLangClassification":51.45,"NorwegianParliament":55.74,"ScalaNbClassification":50.34}
15
- {"index":24,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-small\">e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":48.46,"MassiveIntentClassification (nb)":40.25,"MassiveScenarioClassification (nb)":48.58,"NoRecClassification":41.84,"NordicLangClassification":53.47,"NorwegianParliament":56.57,"ScalaNbClassification":50.03}
16
- {"index":6,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/KB\/bert-base-swedish-cased\">bert-base-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":48.18,"MassiveIntentClassification (nb)":35.75,"MassiveScenarioClassification (nb)":35.76,"NoRecClassification":43.91,"NordicLangClassification":62.45,"NorwegianParliament":57.56,"ScalaNbClassification":53.63}
17
- {"index":36,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":47.28,"MassiveIntentClassification (nb)":39.34,"MassiveScenarioClassification (nb)":44.67,"NoRecClassification":40.02,"NordicLangClassification":54.71,"NorwegianParliament":54.8,"ScalaNbClassification":50.17}
18
- {"index":31,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/jonfd\/electra-small-nordic\">electra-small-nordic<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":47.28,"MassiveIntentClassification (nb)":24.6,"MassiveScenarioClassification (nb)":27.3,"NoRecClassification":45.44,"NordicLangClassification":57.82,"NorwegianParliament":53.25,"ScalaNbClassification":75.28}
19
- {"index":7,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/KBLab\/electra-small-swedish-cased-discriminator\">electra-small-swedish-cased-discriminator<\/a>","Model Size (Million Parameters)":16,"Memory Usage (GB, fp32)":0.06,"Average":34.34,"MassiveIntentClassification (nb)":5.66,"MassiveScenarioClassification (nb)":11.26,"NoRecClassification":39.72,"NordicLangClassification":44.53,"NorwegianParliament":52.44,"ScalaNbClassification":52.41}
20
- {"index":0,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/github.com\/facebookresearch\/LASER\">LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","MassiveIntentClassification (nb)":37.74,"MassiveScenarioClassification (nb)":43.9,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
21
- {"index":1,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Alibaba-NLP\/gte-multilingual-base\">gte-multilingual-base<\/a>","Model Size (Million Parameters)":305,"Memory Usage (GB, fp32)":1.14,"Average":"","MassiveIntentClassification (nb)":63.74,"MassiveScenarioClassification (nb)":71.5,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
22
- {"index":2,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ClayAtlas\/winberta-base\">winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":36.89,"MassiveScenarioClassification (nb)":44.27,"NoRecClassification":43.53,"NordicLangClassification":"","NorwegianParliament":54.9,"ScalaNbClassification":""}
23
- {"index":3,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ClayAtlas\/winberta-large\">winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":28.65,"MassiveScenarioClassification (nb)":35.24,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
24
- {"index":4,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ClayAtlas\/windberta-large\">windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":31.07,"MassiveScenarioClassification (nb)":38.02,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
25
- {"index":5,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Jechto\/e5-dansk-test-0.1\">e5-dansk-test-0.1<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NordicLangClassification":61.0,"NorwegianParliament":"","ScalaNbClassification":""}
26
- {"index":9,"Rank":26,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/KeyurRamoliya\/multilingual-e5-large-GGUF\">multilingual-e5-large-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":69.88,"MassiveScenarioClassification (nb)":74.84,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
27
- {"index":10,"Rank":27,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/KeyurRamoliya\/multilingual-e5-large-instruct-GGUF\">multilingual-e5-large-instruct-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":71.66,"MassiveScenarioClassification (nb)":77.21,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
28
- {"index":11,"Rank":28,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Labib11\/MUG-B-1.6\">MUG-B-1.6<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","MassiveIntentClassification (nb)":39.67,"MassiveScenarioClassification (nb)":50.89,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
29
- {"index":12,"Rank":29,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Muennighoff\/SGPT-125M-weightedmean-msmarco-specb-bitfit\">SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","MassiveIntentClassification (nb)":39.82,"MassiveScenarioClassification (nb)":39.02,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
30
- {"index":13,"Rank":30,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Muennighoff\/SGPT-125M-weightedmean-nli-bitfit\">SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","MassiveIntentClassification (nb)":39.48,"MassiveScenarioClassification (nb)":40.47,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
31
- {"index":16,"Rank":31,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/amazon\/Titan-text-embeddings-v2\">Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":54.41,"MassiveScenarioClassification (nb)":64.64,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
32
- {"index":17,"Rank":32,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/bigscience\/sgpt-bloom-7b1-msmarco\">sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":7068,"Memory Usage (GB, fp32)":26.33,"Average":"","MassiveIntentClassification (nb)":49.41,"MassiveScenarioClassification (nb)":51.8,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
33
- {"index":18,"Rank":33,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/brahmairesearch\/slx-v0.1\">slx-v0.1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.08,"Average":"","MassiveIntentClassification (nb)":38.18,"MassiveScenarioClassification (nb)":43.39,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
34
- {"index":23,"Rank":34,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","MassiveIntentClassification (nb)":70.93,"MassiveScenarioClassification (nb)":75.7,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
35
- {"index":27,"Rank":35,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large-instruct\">multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","MassiveIntentClassification (nb)":71.66,"MassiveScenarioClassification (nb)":77.21,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
36
- {"index":29,"Rank":36,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/izhx\/udever-bloom-1b1\">udever-bloom-1b1<\/a>","Model Size (Million Parameters)":1065,"Memory Usage (GB, fp32)":3.97,"Average":"","MassiveIntentClassification (nb)":46.18,"MassiveScenarioClassification (nb)":50.32,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
37
- {"index":30,"Rank":37,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/izhx\/udever-bloom-560m\">udever-bloom-560m<\/a>","Model Size (Million Parameters)":559,"Memory Usage (GB, fp32)":2.08,"Average":"","MassiveIntentClassification (nb)":44.12,"MassiveScenarioClassification (nb)":46.79,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
38
- {"index":34,"Rank":38,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","MassiveIntentClassification (nb)":57.91,"MassiveScenarioClassification (nb)":64.29,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
39
- {"index":35,"Rank":39,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","MassiveIntentClassification (nb)":41.91,"MassiveScenarioClassification (nb)":47.36,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
40
- {"index":37,"Rank":40,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
41
- {"index":38,"Rank":41,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/allenai-specter\">allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":34.75,"MassiveScenarioClassification (nb)":35.71,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
42
- {"index":39,"Rank":42,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/distiluse-base-multilingual-cased-v2\">distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","MassiveIntentClassification (nb)":46.01,"MassiveScenarioClassification (nb)":54.98,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
43
- {"index":40,"Rank":43,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/gtr-t5-large\">gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","MassiveIntentClassification (nb)":43.78,"MassiveScenarioClassification (nb)":52.71,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
44
- {"index":41,"Rank":44,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/gtr-t5-xl\">gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","MassiveIntentClassification (nb)":45.87,"MassiveScenarioClassification (nb)":54.44,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
45
- {"index":42,"Rank":45,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
46
- {"index":43,"Rank":46,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
47
- {"index":44,"Rank":47,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/sentence-t5-base\">sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":38.53,"MassiveScenarioClassification (nb)":46.6,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
48
- {"index":45,"Rank":48,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/sentence-t5-large\">sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","MassiveIntentClassification (nb)":41.29,"MassiveScenarioClassification (nb)":49.92,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
49
- {"index":46,"Rank":49,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/sentence-t5-xl\">sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","MassiveIntentClassification (nb)":45.91,"MassiveScenarioClassification (nb)":53.43,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
50
- {"index":47,"Rank":50,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","MassiveIntentClassification (nb)":31.49,"MassiveScenarioClassification (nb)":38.05,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
51
- {"index":48,"Rank":51,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/shibing624\/text2vec-base-multilingual\">text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","MassiveIntentClassification (nb)":54.64,"MassiveScenarioClassification (nb)":60.26,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
52
- {"index":49,"Rank":52,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/soichisumi\/multilingual-e5-large-Q8_0-GGUF\">multilingual-e5-large-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":69.88,"MassiveScenarioClassification (nb)":74.84,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
 
1
+ {"index":4,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/ru-en-RoSBERTa\">ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":38.88,"CEDRClassification (rus-Cyrl)":44.69,"SensitiveTopicsClassification (rus-Cyrl)":33.07}
2
+ {"index":10,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-base\">USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":36.98,"CEDRClassification (rus-Cyrl)":46.47,"SensitiveTopicsClassification (rus-Cyrl)":27.5}
3
+ {"index":23,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/LaBSE-ru-turbo\">LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":36.32,"CEDRClassification (rus-Cyrl)":45.11,"SensitiveTopicsClassification (rus-Cyrl)":27.52}
4
+ {"index":15,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":36.01,"CEDRClassification (rus-Cyrl)":44.84,"SensitiveTopicsClassification (rus-Cyrl)":27.17}
5
+ {"index":11,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-bge-m3\">USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":35.88,"CEDRClassification (rus-Cyrl)":45.48,"SensitiveTopicsClassification (rus-Cyrl)":26.29}
6
+ {"index":0,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":34.25,"CEDRClassification (rus-Cyrl)":43.47,"SensitiveTopicsClassification (rus-Cyrl)":25.03}
7
+ {"index":14,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":33.65,"CEDRClassification (rus-Cyrl)":42.32,"SensitiveTopicsClassification (rus-Cyrl)":24.98}
8
+ {"index":13,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":33.37,"CEDRClassification (rus-Cyrl)":40.8,"SensitiveTopicsClassification (rus-Cyrl)":25.94}
9
+ {"index":22,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":32.9,"CEDRClassification (rus-Cyrl)":39.98,"SensitiveTopicsClassification (rus-Cyrl)":25.83}
10
+ {"index":5,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_mt_nlu_ru\">sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":32.64,"CEDRClassification (rus-Cyrl)":36.81,"SensitiveTopicsClassification (rus-Cyrl)":28.47}
11
+ {"index":16,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":31.99,"CEDRClassification (rus-Cyrl)":40.07,"SensitiveTopicsClassification (rus-Cyrl)":23.91}
12
+ {"index":6,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_nlu_ru\">sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":31.9,"CEDRClassification (rus-Cyrl)":35.84,"SensitiveTopicsClassification (rus-Cyrl)":27.97}
13
+ {"index":24,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":31.7,"CEDRClassification (rus-Cyrl)":38.95,"SensitiveTopicsClassification (rus-Cyrl)":24.44}
14
+ {"index":17,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":31.42,"CEDRClassification (rus-Cyrl)":40.61,"SensitiveTopicsClassification (rus-Cyrl)":22.23}
15
+ {"index":21,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":31.3,"CEDRClassification (rus-Cyrl)":37.76,"SensitiveTopicsClassification (rus-Cyrl)":24.84}
16
+ {"index":7,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":31.27,"CEDRClassification (rus-Cyrl)":40.75,"SensitiveTopicsClassification (rus-Cyrl)":21.79}
17
+ {"index":9,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny2\">rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":29.44,"CEDRClassification (rus-Cyrl)":36.87,"SensitiveTopicsClassification (rus-Cyrl)":22.02}
18
+ {"index":1,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/distilrubert-small-cased-conversational\">distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":29.32,"CEDRClassification (rus-Cyrl)":36.19,"SensitiveTopicsClassification (rus-Cyrl)":22.45}
19
+ {"index":12,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":28.9,"CEDRClassification (rus-Cyrl)":34.14,"SensitiveTopicsClassification (rus-Cyrl)":23.67}
20
+ {"index":8,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny\">rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":27.96,"CEDRClassification (rus-Cyrl)":37.39,"SensitiveTopicsClassification (rus-Cyrl)":18.54}
21
+ {"index":3,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":27.8,"CEDRClassification (rus-Cyrl)":35.55,"SensitiveTopicsClassification (rus-Cyrl)":20.05}
22
+ {"index":20,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":26.9,"CEDRClassification (rus-Cyrl)":35.98,"SensitiveTopicsClassification (rus-Cyrl)":17.83}
23
+ {"index":2,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased\">rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":26.2,"CEDRClassification (rus-Cyrl)":33.59,"SensitiveTopicsClassification (rus-Cyrl)":18.8}
24
+ {"index":18,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":25.96,"CEDRClassification (rus-Cyrl)":33.86,"SensitiveTopicsClassification (rus-Cyrl)":18.05}
25
+ {"index":19,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":25.27,"CEDRClassification (rus-Cyrl)":32.72,"SensitiveTopicsClassification (rus-Cyrl)":17.82}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
all_data_tasks/40/default.jsonl CHANGED
The diff for this file is too large to render. See raw diff
 
all_data_tasks/41/default.jsonl CHANGED
The diff for this file is too large to render. See raw diff
 
all_data_tasks/42/default.jsonl CHANGED
The diff for this file is too large to render. See raw diff
 
all_data_tasks/43/default.jsonl CHANGED
@@ -1,14 +1,36 @@
1
- {"index":4,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Alibaba-NLP\/gte-Qwen2-7B-instruct\">gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":22.38,"BrightRetrieval (aops)":15.1,"BrightRetrieval (biology)":32.09,"BrightRetrieval (earth_science)":40.66,"BrightRetrieval (economics)":16.18,"BrightRetrieval (leetcode)":31.07,"BrightRetrieval (pony)":1.25,"BrightRetrieval (psychology)":26.58,"BrightRetrieval (robotics)":12.82,"BrightRetrieval (stackoverflow)":13.95,"BrightRetrieval (sustainable_living)":20.82,"BrightRetrieval (theoremqa_questions)":29.9,"BrightRetrieval (theoremqa_theorems)":28.15}
2
- {"index":3,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Alibaba-NLP\/gte-Qwen1.5-7B-instruct\">gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":21.75,"BrightRetrieval (aops)":14.36,"BrightRetrieval (biology)":30.92,"BrightRetrieval (earth_science)":36.22,"BrightRetrieval (economics)":17.72,"BrightRetrieval (leetcode)":25.46,"BrightRetrieval (pony)":9.79,"BrightRetrieval (psychology)":24.61,"BrightRetrieval (robotics)":13.47,"BrightRetrieval (stackoverflow)":19.85,"BrightRetrieval (sustainable_living)":14.93,"BrightRetrieval (theoremqa_questions)":26.97,"BrightRetrieval (theoremqa_theorems)":26.66}
3
- {"index":7,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/GritLM\/GritLM-7B\">GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":20.43,"BrightRetrieval (aops)":8.91,"BrightRetrieval (biology)":25.04,"BrightRetrieval (earth_science)":32.77,"BrightRetrieval (economics)":19.0,"BrightRetrieval (leetcode)":29.85,"BrightRetrieval (pony)":21.98,"BrightRetrieval (psychology)":19.92,"BrightRetrieval (robotics)":17.31,"BrightRetrieval (stackoverflow)":11.62,"BrightRetrieval (sustainable_living)":18.04,"BrightRetrieval (theoremqa_questions)":23.34,"BrightRetrieval (theoremqa_theorems)":17.41}
4
- {"index":0,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/cloud.google.com\/vertex-ai\/generative-ai\/docs\/embeddings\/get-text-embeddings#latest_models\">google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":19.73,"BrightRetrieval (aops)":9.33,"BrightRetrieval (biology)":22.98,"BrightRetrieval (earth_science)":34.38,"BrightRetrieval (economics)":19.5,"BrightRetrieval (leetcode)":29.64,"BrightRetrieval (pony)":3.59,"BrightRetrieval (psychology)":27.86,"BrightRetrieval (robotics)":15.98,"BrightRetrieval (stackoverflow)":17.93,"BrightRetrieval (sustainable_living)":17.25,"BrightRetrieval (theoremqa_questions)":21.51,"BrightRetrieval (theoremqa_theorems)":16.77}
5
- {"index":10,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/hkunlp\/instructor-xl\">instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":18.64,"BrightRetrieval (aops)":8.26,"BrightRetrieval (biology)":21.91,"BrightRetrieval (earth_science)":34.35,"BrightRetrieval (economics)":22.81,"BrightRetrieval (leetcode)":27.5,"BrightRetrieval (pony)":5.02,"BrightRetrieval (psychology)":27.43,"BrightRetrieval (robotics)":17.39,"BrightRetrieval (stackoverflow)":19.06,"BrightRetrieval (sustainable_living)":18.82,"BrightRetrieval (theoremqa_questions)":14.59,"BrightRetrieval (theoremqa_theorems)":6.5}
6
- {"index":8,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Salesforce\/SFR-Embedding-Mistral\">SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":18.0,"BrightRetrieval (aops)":7.43,"BrightRetrieval (biology)":19.49,"BrightRetrieval (earth_science)":26.63,"BrightRetrieval (economics)":17.84,"BrightRetrieval (leetcode)":27.35,"BrightRetrieval (pony)":1.97,"BrightRetrieval (psychology)":18.97,"BrightRetrieval (robotics)":16.7,"BrightRetrieval (stackoverflow)":12.72,"BrightRetrieval (sustainable_living)":19.79,"BrightRetrieval (theoremqa_questions)":23.05,"BrightRetrieval (theoremqa_theorems)":24.05}
7
- {"index":1,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/docs.voyageai.com\/embeddings\/\">voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.57,"BrightRetrieval (aops)":7.45,"BrightRetrieval (biology)":23.55,"BrightRetrieval (earth_science)":25.09,"BrightRetrieval (economics)":19.85,"BrightRetrieval (leetcode)":30.6,"BrightRetrieval (pony)":1.48,"BrightRetrieval (psychology)":24.79,"BrightRetrieval (robotics)":11.21,"BrightRetrieval (stackoverflow)":15.03,"BrightRetrieval (sustainable_living)":15.58,"BrightRetrieval (theoremqa_questions)":26.06,"BrightRetrieval (theoremqa_theorems)":10.13}
8
- {"index":13,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/openai.com\/blog\/new-embedding-models-and-api-updates\">text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.43,"BrightRetrieval (aops)":8.45,"BrightRetrieval (biology)":23.67,"BrightRetrieval (earth_science)":26.27,"BrightRetrieval (economics)":19.98,"BrightRetrieval (leetcode)":23.65,"BrightRetrieval (pony)":2.45,"BrightRetrieval (psychology)":27.52,"BrightRetrieval (robotics)":12.93,"BrightRetrieval (stackoverflow)":12.49,"BrightRetrieval (sustainable_living)":20.32,"BrightRetrieval (theoremqa_questions)":22.22,"BrightRetrieval (theoremqa_theorems)":9.25}
9
- {"index":11,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":17.43,"BrightRetrieval (aops)":7.1,"BrightRetrieval (biology)":18.84,"BrightRetrieval (earth_science)":25.96,"BrightRetrieval (economics)":15.49,"BrightRetrieval (leetcode)":28.72,"BrightRetrieval (pony)":4.81,"BrightRetrieval (psychology)":15.79,"BrightRetrieval (robotics)":16.37,"BrightRetrieval (stackoverflow)":9.83,"BrightRetrieval (sustainable_living)":18.51,"BrightRetrieval (theoremqa_questions)":23.94,"BrightRetrieval (theoremqa_theorems)":23.78}
10
- {"index":6,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Cohere\/Cohere-embed-english-v3.0\">Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":16.24,"BrightRetrieval (aops)":6.46,"BrightRetrieval (biology)":18.98,"BrightRetrieval (earth_science)":27.45,"BrightRetrieval (economics)":20.18,"BrightRetrieval (leetcode)":26.78,"BrightRetrieval (pony)":1.77,"BrightRetrieval (psychology)":21.82,"BrightRetrieval (robotics)":16.21,"BrightRetrieval (stackoverflow)":16.47,"BrightRetrieval (sustainable_living)":17.69,"BrightRetrieval (theoremqa_questions)":15.07,"BrightRetrieval (theoremqa_theorems)":6.04}
11
- {"index":12,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":14.8,"BrightRetrieval (aops)":5.32,"BrightRetrieval (biology)":15.52,"BrightRetrieval (earth_science)":20.11,"BrightRetrieval (economics)":16.64,"BrightRetrieval (leetcode)":26.4,"BrightRetrieval (pony)":6.95,"BrightRetrieval (psychology)":22.63,"BrightRetrieval (robotics)":8.36,"BrightRetrieval (stackoverflow)":9.48,"BrightRetrieval (sustainable_living)":15.34,"BrightRetrieval (theoremqa_questions)":18.49,"BrightRetrieval (theoremqa_theorems)":12.38}
12
- {"index":2,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/en.wikipedia.org\/wiki\/Okapi_BM25\">bm25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":14.29,"BrightRetrieval (aops)":6.2,"BrightRetrieval (biology)":19.19,"BrightRetrieval (earth_science)":27.06,"BrightRetrieval (economics)":14.87,"BrightRetrieval (leetcode)":24.37,"BrightRetrieval (pony)":7.93,"BrightRetrieval (psychology)":12.51,"BrightRetrieval (robotics)":13.53,"BrightRetrieval (stackoverflow)":16.55,"BrightRetrieval (sustainable_living)":15.22,"BrightRetrieval (theoremqa_questions)":9.78,"BrightRetrieval (theoremqa_theorems)":4.25}
13
- {"index":9,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/hkunlp\/instructor-large\">instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":14.12,"BrightRetrieval (aops)":7.94,"BrightRetrieval (biology)":15.61,"BrightRetrieval (earth_science)":21.52,"BrightRetrieval (economics)":15.99,"BrightRetrieval (leetcode)":20.0,"BrightRetrieval (pony)":1.32,"BrightRetrieval (psychology)":21.94,"BrightRetrieval (robotics)":11.45,"BrightRetrieval (stackoverflow)":11.21,"BrightRetrieval (sustainable_living)":13.16,"BrightRetrieval (theoremqa_questions)":20.07,"BrightRetrieval (theoremqa_theorems)":9.29}
14
- {"index":5,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-large-en-v1.5\">bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":13.47,"BrightRetrieval (aops)":6.08,"BrightRetrieval (biology)":11.96,"BrightRetrieval (earth_science)":24.15,"BrightRetrieval (economics)":16.59,"BrightRetrieval (leetcode)":26.68,"BrightRetrieval (pony)":5.64,"BrightRetrieval (psychology)":17.44,"BrightRetrieval (robotics)":12.21,"BrightRetrieval (stackoverflow)":9.51,"BrightRetrieval (sustainable_living)":13.27,"BrightRetrieval (theoremqa_questions)":12.56,"BrightRetrieval (theoremqa_theorems)":5.51}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"index":13,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/GritLM\/GritLM-7B\">GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":35.2,"ARCChallenge":26.68,"AlphaNLI":34.0,"HellaSwag":39.45,"PIQA":44.35,"Quail":11.69,"RARbCode":84.0,"RARbMath":82.35,"SIQA":7.23,"SpartQA":9.29,"TempReasonL1":7.15,"TempReasonL2Fact":58.38,"TempReasonL2Pure":11.22,"TempReasonL3Fact":44.29,"TempReasonL3Pure":14.15,"WinoGrande":53.74}
2
+ {"index":32,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/openai.com\/blog\/new-embedding-models-and-api-updates\">text-embedding-3-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":31.13,"ARCChallenge":21.22,"AlphaNLI":34.23,"HellaSwag":31.4,"PIQA":37.52,"Quail":13.6,"RARbCode":89.41,"RARbMath":87.73,"SIQA":4.99,"SpartQA":7.45,"TempReasonL1":2.07,"TempReasonL2Fact":39.77,"TempReasonL2Pure":11.04,"TempReasonL3Fact":37.04,"TempReasonL3Pure":15.51,"WinoGrande":33.92}
3
+ {"index":12,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/GritLM\/GritLM-7B\">GritLM-7B-noinstruct<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":30.57,"ARCChallenge":16.57,"AlphaNLI":29.56,"HellaSwag":36.03,"PIQA":35.8,"Quail":8.68,"RARbCode":83.14,"RARbMath":83.01,"SIQA":5.73,"SpartQA":1.56,"TempReasonL1":2.57,"TempReasonL2Fact":48.25,"TempReasonL2Pure":8.98,"TempReasonL3Fact":34.11,"TempReasonL3Pure":12.44,"WinoGrande":52.12}
4
+ {"index":33,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/openai.com\/blog\/new-embedding-models-and-api-updates\">text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":29.95,"ARCChallenge":23.98,"AlphaNLI":37.27,"HellaSwag":34.12,"PIQA":41.96,"Quail":10.15,"RARbCode":89.64,"RARbMath":90.08,"SIQA":3.44,"SpartQA":7.51,"TempReasonL1":2.13,"TempReasonL2Fact":28.65,"TempReasonL2Pure":10.34,"TempReasonL3Fact":25.52,"TempReasonL3Pure":15.28,"WinoGrande":29.11}
5
+ {"index":16,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":28.41,"ARCChallenge":17.81,"AlphaNLI":26.12,"HellaSwag":34.85,"PIQA":39.37,"Quail":7.01,"RARbCode":78.46,"RARbMath":72.16,"SIQA":5.42,"SpartQA":9.92,"TempReasonL1":3.31,"TempReasonL2Fact":36.9,"TempReasonL2Pure":9.18,"TempReasonL3Fact":30.18,"TempReasonL3Pure":14.31,"WinoGrande":41.21}
6
+ {"index":17,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct-noinstruct\">e5-mistral-7b-instruct-noinstruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":28.04,"ARCChallenge":20.48,"AlphaNLI":18.88,"HellaSwag":32.25,"PIQA":32.8,"Quail":6.25,"RARbCode":79.84,"RARbMath":76.19,"SIQA":5.08,"SpartQA":10.87,"TempReasonL1":3.04,"TempReasonL2Fact":35.63,"TempReasonL2Pure":9.32,"TempReasonL3Fact":30.41,"TempReasonL3Pure":14.39,"WinoGrande":45.18}
7
+ {"index":10,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Cohere\/Cohere-embed-english-v3.0\">Cohere-embed-english-v3.0-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":25.41,"ARCChallenge":10.1,"AlphaNLI":18.75,"HellaSwag":29.02,"PIQA":27.89,"Quail":7.77,"RARbCode":56.56,"RARbMath":72.05,"SIQA":5.03,"SpartQA":3.33,"TempReasonL1":1.43,"TempReasonL2Fact":40.46,"TempReasonL2Pure":2.39,"TempReasonL3Fact":33.87,"TempReasonL3Pure":7.52,"WinoGrande":65.02}
8
+ {"index":19,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":24.69,"ARCChallenge":10.83,"AlphaNLI":13.59,"HellaSwag":27.35,"PIQA":28.82,"Quail":4.85,"RARbCode":58.92,"RARbMath":67.32,"SIQA":5.36,"SpartQA":5.64,"TempReasonL1":1.14,"TempReasonL2Fact":42.97,"TempReasonL2Pure":2.05,"TempReasonL3Fact":38.22,"TempReasonL3Pure":8.31,"WinoGrande":54.99}
9
+ {"index":35,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/openai.com\/blog\/new-embedding-models-and-api-updates\">text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":24.2,"ARCChallenge":14.63,"AlphaNLI":30.61,"HellaSwag":30.94,"PIQA":33.69,"Quail":6.11,"RARbCode":72.03,"RARbMath":71.07,"SIQA":3.03,"SpartQA":6.63,"TempReasonL1":2.35,"TempReasonL2Fact":25.68,"TempReasonL2Pure":2.76,"TempReasonL3Fact":22.09,"TempReasonL3Pure":9.79,"WinoGrande":31.53}
10
+ {"index":11,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Cohere\/Cohere-embed-english-v3.0\">Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":23.65,"ARCChallenge":9.89,"AlphaNLI":15.1,"HellaSwag":26.35,"PIQA":28.49,"Quail":4.1,"RARbCode":57.19,"RARbMath":72.26,"SIQA":4.26,"SpartQA":3.75,"TempReasonL1":1.5,"TempReasonL2Fact":35.91,"TempReasonL2Pure":1.89,"TempReasonL3Fact":27.51,"TempReasonL3Pure":8.53,"WinoGrande":58.01}
11
+ {"index":18,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":23.26,"ARCChallenge":9.61,"AlphaNLI":16.44,"HellaSwag":24.79,"PIQA":25.09,"Quail":3.52,"RARbCode":52.16,"RARbMath":65.35,"SIQA":3.72,"SpartQA":7.91,"TempReasonL1":0.72,"TempReasonL2Fact":38.76,"TempReasonL2Pure":1.63,"TempReasonL3Fact":35.85,"TempReasonL3Pure":7.11,"WinoGrande":56.18}
12
+ {"index":31,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/openai.com\/blog\/new-and-improved-embedding-model\">text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":22.57,"ARCChallenge":13.3,"AlphaNLI":25.65,"HellaSwag":29.29,"PIQA":31.02,"Quail":5.83,"RARbCode":83.39,"RARbMath":73.21,"SIQA":3.14,"SpartQA":4.23,"TempReasonL1":1.68,"TempReasonL2Fact":19.93,"TempReasonL2Pure":2.6,"TempReasonL3Fact":18.02,"TempReasonL3Pure":7.58,"WinoGrande":19.65}
13
+ {"index":34,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/openai.com\/blog\/new-embedding-models-and-api-updates\">text-embedding-3-small-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":22.09,"ARCChallenge":13.76,"AlphaNLI":21.14,"HellaSwag":27.2,"PIQA":29.59,"Quail":6.64,"RARbCode":72.14,"RARbMath":64.31,"SIQA":2.98,"SpartQA":3.58,"TempReasonL1":2.29,"TempReasonL2Fact":26.34,"TempReasonL2Pure":3.17,"TempReasonL3Fact":22.72,"TempReasonL3Pure":9.98,"WinoGrande":25.49}
14
+ {"index":7,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":21.48,"ARCChallenge":9.02,"AlphaNLI":24.73,"HellaSwag":25.67,"PIQA":22.93,"Quail":7.51,"RARbCode":38.8,"RARbMath":69.19,"SIQA":4.89,"SpartQA":7.49,"TempReasonL1":0.99,"TempReasonL2Fact":33.23,"TempReasonL2Pure":0.68,"TempReasonL3Fact":30.05,"TempReasonL3Pure":5.28,"WinoGrande":41.72}
15
+ {"index":6,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3-instruct<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":20.83,"ARCChallenge":9.03,"AlphaNLI":24.69,"HellaSwag":25.55,"PIQA":19.03,"Quail":7.08,"RARbCode":39.58,"RARbMath":64.51,"SIQA":4.77,"SpartQA":7.0,"TempReasonL1":0.8,"TempReasonL2Fact":34.99,"TempReasonL2Pure":0.62,"TempReasonL3Fact":32.47,"TempReasonL3Pure":7.01,"WinoGrande":35.33}
16
+ {"index":20,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":20.04,"ARCChallenge":7.14,"AlphaNLI":13.0,"HellaSwag":23.73,"PIQA":21.08,"Quail":2.38,"RARbCode":46.96,"RARbMath":63.91,"SIQA":2.57,"SpartQA":5.43,"TempReasonL1":0.8,"TempReasonL2Fact":36.76,"TempReasonL2Pure":0.62,"TempReasonL3Fact":32.42,"TempReasonL3Pure":6.36,"WinoGrande":37.46}
17
+ {"index":24,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":19.61,"ARCChallenge":9.48,"AlphaNLI":28.19,"HellaSwag":24.21,"PIQA":25.28,"Quail":3.92,"RARbCode":44.27,"RARbMath":68.19,"SIQA":1.56,"SpartQA":1.65,"TempReasonL1":1.53,"TempReasonL2Fact":17.65,"TempReasonL2Pure":0.46,"TempReasonL3Fact":14.16,"TempReasonL3Pure":6.33,"WinoGrande":47.33}
18
+ {"index":30,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/openai.com\/blog\/new-and-improved-embedding-model\">text-embedding-ada-002-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":19.56,"ARCChallenge":11.85,"AlphaNLI":10.62,"HellaSwag":24.8,"PIQA":23.87,"Quail":5.79,"RARbCode":82.36,"RARbMath":67.26,"SIQA":2.64,"SpartQA":4.75,"TempReasonL1":1.44,"TempReasonL2Fact":19.38,"TempReasonL2Pure":2.43,"TempReasonL3Fact":17.58,"TempReasonL3Pure":7.31,"WinoGrande":11.36}
19
+ {"index":1,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/github.com\/facebookresearch\/dpr-scale\/tree\/main\/dragon\">dragon-plus<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":19.1,"ARCChallenge":8.91,"AlphaNLI":32.1,"HellaSwag":27.69,"PIQA":28.01,"Quail":4.09,"RARbCode":17.58,"RARbMath":45.09,"SIQA":2.0,"SpartQA":10.34,"TempReasonL1":1.82,"TempReasonL2Fact":17.45,"TempReasonL2Pure":0.55,"TempReasonL3Fact":15.71,"TempReasonL3Pure":7.97,"WinoGrande":67.18}
20
+ {"index":26,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":18.03,"ARCChallenge":11.8,"AlphaNLI":22.41,"HellaSwag":26.27,"PIQA":29.03,"Quail":3.41,"RARbCode":53.21,"RARbMath":71.85,"SIQA":2.38,"SpartQA":0.22,"TempReasonL1":1.77,"TempReasonL2Fact":11.2,"TempReasonL2Pure":1.15,"TempReasonL3Fact":9.42,"TempReasonL3Pure":5.59,"WinoGrande":20.8}
21
+ {"index":5,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-large-en-v1.5\">bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":17.7,"ARCChallenge":9.99,"AlphaNLI":13.13,"HellaSwag":28.5,"PIQA":27.99,"Quail":1.83,"RARbCode":48.12,"RARbMath":57.36,"SIQA":1.04,"SpartQA":2.99,"TempReasonL1":1.46,"TempReasonL2Fact":24.25,"TempReasonL2Pure":2.35,"TempReasonL3Fact":20.64,"TempReasonL3Pure":6.67,"WinoGrande":19.18}
22
+ {"index":22,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":17.35,"ARCChallenge":10.23,"AlphaNLI":25.35,"HellaSwag":24.08,"PIQA":26.44,"Quail":3.08,"RARbCode":42.44,"RARbMath":66.36,"SIQA":2.09,"SpartQA":2.67,"TempReasonL1":1.66,"TempReasonL2Fact":10.31,"TempReasonL2Pure":0.63,"TempReasonL3Fact":11.11,"TempReasonL3Pure":6.63,"WinoGrande":27.2}
23
+ {"index":0,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/github.com\/facebookresearch\/dpr-scale\/tree\/main\/dragon\">dragon-plus-instruct<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":16.73,"ARCChallenge":8.24,"AlphaNLI":25.18,"HellaSwag":24.06,"PIQA":26.35,"Quail":4.2,"RARbCode":12.84,"RARbMath":36.15,"SIQA":1.75,"SpartQA":10.82,"TempReasonL1":1.54,"TempReasonL2Fact":16.11,"TempReasonL2Pure":0.57,"TempReasonL3Fact":14.81,"TempReasonL3Pure":7.46,"WinoGrande":60.84}
24
+ {"index":15,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/facebook\/contriever\">contriever<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":16.09,"ARCChallenge":8.62,"AlphaNLI":31.77,"HellaSwag":17.73,"PIQA":24.64,"Quail":4.97,"RARbCode":9.28,"RARbMath":30.76,"SIQA":1.27,"SpartQA":10.94,"TempReasonL1":1.93,"TempReasonL2Fact":22.68,"TempReasonL2Pure":1.12,"TempReasonL3Fact":20.62,"TempReasonL3Pure":7.8,"WinoGrande":47.15}
25
+ {"index":23,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2-instruct<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":15.95,"ARCChallenge":9.4,"AlphaNLI":15.09,"HellaSwag":20.51,"PIQA":24.68,"Quail":3.46,"RARbCode":42.47,"RARbMath":62.39,"SIQA":1.53,"SpartQA":0.57,"TempReasonL1":1.05,"TempReasonL2Fact":16.57,"TempReasonL2Pure":0.49,"TempReasonL3Fact":14.01,"TempReasonL3Pure":6.27,"WinoGrande":20.73}
26
+ {"index":3,"Rank":26,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-base-en-v1.5\">bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":14.93,"ARCChallenge":9.66,"AlphaNLI":10.99,"HellaSwag":26.64,"PIQA":25.69,"Quail":1.42,"RARbCode":46.47,"RARbMath":46.86,"SIQA":0.94,"SpartQA":3.37,"TempReasonL1":1.07,"TempReasonL2Fact":17.23,"TempReasonL2Pure":1.29,"TempReasonL3Fact":13.36,"TempReasonL3Pure":5.2,"WinoGrande":13.76}
27
+ {"index":4,"Rank":27,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-large-en-v1.5\">bge-large-en-v1.5-instruct<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":14.55,"ARCChallenge":8.86,"AlphaNLI":0.86,"HellaSwag":26.24,"PIQA":23.26,"Quail":2.72,"RARbCode":45.25,"RARbMath":49.82,"SIQA":0.59,"SpartQA":2.34,"TempReasonL1":1.17,"TempReasonL2Fact":21.19,"TempReasonL2Pure":2.1,"TempReasonL3Fact":17.59,"TempReasonL3Pure":5.99,"WinoGrande":10.31}
28
+ {"index":9,"Rank":28,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-small-en-v1.5\">bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":14.15,"ARCChallenge":8.95,"AlphaNLI":11.64,"HellaSwag":25.44,"PIQA":23.92,"Quail":1.75,"RARbCode":42.36,"RARbMath":44.98,"SIQA":0.77,"SpartQA":3.55,"TempReasonL1":1.41,"TempReasonL2Fact":17.56,"TempReasonL2Pure":1.05,"TempReasonL3Fact":13.88,"TempReasonL3Pure":4.76,"WinoGrande":10.28}
29
+ {"index":25,"Rank":29,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2-instruct<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":13.84,"ARCChallenge":10.35,"AlphaNLI":1.96,"HellaSwag":13.01,"PIQA":27.18,"Quail":3.02,"RARbCode":48.95,"RARbMath":69.21,"SIQA":1.29,"SpartQA":1.01,"TempReasonL1":1.52,"TempReasonL2Fact":7.28,"TempReasonL2Pure":1.03,"TempReasonL3Fact":7.03,"TempReasonL3Pure":5.16,"WinoGrande":9.66}
30
+ {"index":2,"Rank":30,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-base-en-v1.5\">bge-base-en-v1.5-instruct<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":13.52,"ARCChallenge":8.85,"AlphaNLI":4.13,"HellaSwag":24.03,"PIQA":23.03,"Quail":1.25,"RARbCode":46.32,"RARbMath":45.62,"SIQA":0.24,"SpartQA":2.67,"TempReasonL1":0.8,"TempReasonL2Fact":16.56,"TempReasonL2Pure":1.33,"TempReasonL3Fact":12.68,"TempReasonL3Pure":5.08,"WinoGrande":10.27}
31
+ {"index":8,"Rank":31,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-small-en-v1.5\">bge-small-en-v1.5-instruct<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":12.6,"ARCChallenge":7.72,"AlphaNLI":1.26,"HellaSwag":23.41,"PIQA":20.79,"Quail":2.01,"RARbCode":41.52,"RARbMath":46.5,"SIQA":0.98,"SpartQA":2.86,"TempReasonL1":1.27,"TempReasonL2Fact":16.72,"TempReasonL2Pure":1.1,"TempReasonL3Fact":12.81,"TempReasonL3Pure":4.63,"WinoGrande":5.35}
32
+ {"index":28,"Rank":32,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":12.24,"ARCChallenge":7.19,"AlphaNLI":21.87,"HellaSwag":17.53,"PIQA":18.65,"Quail":2.98,"RARbCode":11.02,"RARbMath":30.93,"SIQA":1.21,"SpartQA":5.69,"TempReasonL1":1.94,"TempReasonL2Fact":5.34,"TempReasonL2Pure":0.33,"TempReasonL3Fact":6.79,"TempReasonL3Pure":3.19,"WinoGrande":49.01}
33
+ {"index":27,"Rank":33,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":11.55,"ARCChallenge":6.19,"AlphaNLI":20.89,"HellaSwag":16.98,"PIQA":15.79,"Quail":2.96,"RARbCode":8.48,"RARbMath":30.02,"SIQA":0.88,"SpartQA":4.94,"TempReasonL1":1.43,"TempReasonL2Fact":6.21,"TempReasonL2Pure":0.22,"TempReasonL3Fact":6.77,"TempReasonL3Pure":4.9,"WinoGrande":46.52}
34
+ {"index":21,"Rank":34,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":9.31,"ARCChallenge":3.78,"AlphaNLI":13.11,"HellaSwag":5.59,"PIQA":6.53,"Quail":1.91,"RARbCode":2.31,"RARbMath":27.19,"SIQA":1.07,"SpartQA":1.56,"TempReasonL1":1.56,"TempReasonL2Fact":7.06,"TempReasonL2Pure":0.14,"TempReasonL3Fact":8.74,"TempReasonL3Pure":4.73,"WinoGrande":54.3}
35
+ {"index":14,"Rank":35,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/facebook\/contriever\">contriever-instruct<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":"","ARCChallenge":7.63,"AlphaNLI":27.09,"HellaSwag":"","PIQA":21.73,"Quail":4.92,"RARbCode":7.12,"RARbMath":21.83,"SIQA":0.88,"SpartQA":10.56,"TempReasonL1":1.8,"TempReasonL2Fact":22.03,"TempReasonL2Pure":0.94,"TempReasonL3Fact":20.82,"TempReasonL3Pure":7.15,"WinoGrande":26.3}
36
+ {"index":29,"Rank":36,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","ARCChallenge":3.85,"AlphaNLI":14.15,"HellaSwag":"","PIQA":"","Quail":"","RARbCode":"","RARbMath":"","SIQA":"","SpartQA":"","TempReasonL1":"","TempReasonL2Fact":"","TempReasonL2Pure":"","TempReasonL3Fact":"","TempReasonL3Pure":"","WinoGrande":""}
all_data_tasks/44/default.jsonl ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"index":4,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Alibaba-NLP\/gte-Qwen2-7B-instruct\">gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":22.38,"BrightRetrieval (aops)":15.1,"BrightRetrieval (biology)":32.09,"BrightRetrieval (earth_science)":40.66,"BrightRetrieval (economics)":16.18,"BrightRetrieval (leetcode)":31.07,"BrightRetrieval (pony)":1.25,"BrightRetrieval (psychology)":26.58,"BrightRetrieval (robotics)":12.82,"BrightRetrieval (stackoverflow)":13.95,"BrightRetrieval (sustainable_living)":20.82,"BrightRetrieval (theoremqa_questions)":29.9,"BrightRetrieval (theoremqa_theorems)":28.15}
2
+ {"index":3,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Alibaba-NLP\/gte-Qwen1.5-7B-instruct\">gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":21.75,"BrightRetrieval (aops)":14.36,"BrightRetrieval (biology)":30.92,"BrightRetrieval (earth_science)":36.22,"BrightRetrieval (economics)":17.72,"BrightRetrieval (leetcode)":25.46,"BrightRetrieval (pony)":9.79,"BrightRetrieval (psychology)":24.61,"BrightRetrieval (robotics)":13.47,"BrightRetrieval (stackoverflow)":19.85,"BrightRetrieval (sustainable_living)":14.93,"BrightRetrieval (theoremqa_questions)":26.97,"BrightRetrieval (theoremqa_theorems)":26.66}
3
+ {"index":7,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/GritLM\/GritLM-7B\">GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":20.43,"BrightRetrieval (aops)":8.91,"BrightRetrieval (biology)":25.04,"BrightRetrieval (earth_science)":32.77,"BrightRetrieval (economics)":19.0,"BrightRetrieval (leetcode)":29.85,"BrightRetrieval (pony)":21.98,"BrightRetrieval (psychology)":19.92,"BrightRetrieval (robotics)":17.31,"BrightRetrieval (stackoverflow)":11.62,"BrightRetrieval (sustainable_living)":18.04,"BrightRetrieval (theoremqa_questions)":23.34,"BrightRetrieval (theoremqa_theorems)":17.41}
4
+ {"index":0,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/cloud.google.com\/vertex-ai\/generative-ai\/docs\/embeddings\/get-text-embeddings#latest_models\">google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":19.73,"BrightRetrieval (aops)":9.33,"BrightRetrieval (biology)":22.98,"BrightRetrieval (earth_science)":34.38,"BrightRetrieval (economics)":19.5,"BrightRetrieval (leetcode)":29.64,"BrightRetrieval (pony)":3.59,"BrightRetrieval (psychology)":27.86,"BrightRetrieval (robotics)":15.98,"BrightRetrieval (stackoverflow)":17.93,"BrightRetrieval (sustainable_living)":17.25,"BrightRetrieval (theoremqa_questions)":21.51,"BrightRetrieval (theoremqa_theorems)":16.77}
5
+ {"index":10,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/hkunlp\/instructor-xl\">instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":18.64,"BrightRetrieval (aops)":8.26,"BrightRetrieval (biology)":21.91,"BrightRetrieval (earth_science)":34.35,"BrightRetrieval (economics)":22.81,"BrightRetrieval (leetcode)":27.5,"BrightRetrieval (pony)":5.02,"BrightRetrieval (psychology)":27.43,"BrightRetrieval (robotics)":17.39,"BrightRetrieval (stackoverflow)":19.06,"BrightRetrieval (sustainable_living)":18.82,"BrightRetrieval (theoremqa_questions)":14.59,"BrightRetrieval (theoremqa_theorems)":6.5}
6
+ {"index":8,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Salesforce\/SFR-Embedding-Mistral\">SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":18.0,"BrightRetrieval (aops)":7.43,"BrightRetrieval (biology)":19.49,"BrightRetrieval (earth_science)":26.63,"BrightRetrieval (economics)":17.84,"BrightRetrieval (leetcode)":27.35,"BrightRetrieval (pony)":1.97,"BrightRetrieval (psychology)":18.97,"BrightRetrieval (robotics)":16.7,"BrightRetrieval (stackoverflow)":12.72,"BrightRetrieval (sustainable_living)":19.79,"BrightRetrieval (theoremqa_questions)":23.05,"BrightRetrieval (theoremqa_theorems)":24.05}
7
+ {"index":1,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/docs.voyageai.com\/embeddings\/\">voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.57,"BrightRetrieval (aops)":7.45,"BrightRetrieval (biology)":23.55,"BrightRetrieval (earth_science)":25.09,"BrightRetrieval (economics)":19.85,"BrightRetrieval (leetcode)":30.6,"BrightRetrieval (pony)":1.48,"BrightRetrieval (psychology)":24.79,"BrightRetrieval (robotics)":11.21,"BrightRetrieval (stackoverflow)":15.03,"BrightRetrieval (sustainable_living)":15.58,"BrightRetrieval (theoremqa_questions)":26.06,"BrightRetrieval (theoremqa_theorems)":10.13}
8
+ {"index":12,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/openai.com\/blog\/new-embedding-models-and-api-updates\">text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.43,"BrightRetrieval (aops)":8.45,"BrightRetrieval (biology)":23.67,"BrightRetrieval (earth_science)":26.27,"BrightRetrieval (economics)":19.98,"BrightRetrieval (leetcode)":23.65,"BrightRetrieval (pony)":2.45,"BrightRetrieval (psychology)":27.52,"BrightRetrieval (robotics)":12.93,"BrightRetrieval (stackoverflow)":12.49,"BrightRetrieval (sustainable_living)":20.32,"BrightRetrieval (theoremqa_questions)":22.22,"BrightRetrieval (theoremqa_theorems)":9.25}
9
+ {"index":11,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":17.43,"BrightRetrieval (aops)":7.1,"BrightRetrieval (biology)":18.84,"BrightRetrieval (earth_science)":25.96,"BrightRetrieval (economics)":15.49,"BrightRetrieval (leetcode)":28.72,"BrightRetrieval (pony)":4.81,"BrightRetrieval (psychology)":15.79,"BrightRetrieval (robotics)":16.37,"BrightRetrieval (stackoverflow)":9.83,"BrightRetrieval (sustainable_living)":18.51,"BrightRetrieval (theoremqa_questions)":23.94,"BrightRetrieval (theoremqa_theorems)":23.78}
10
+ {"index":6,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Cohere\/Cohere-embed-english-v3.0\">Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":16.24,"BrightRetrieval (aops)":6.46,"BrightRetrieval (biology)":18.98,"BrightRetrieval (earth_science)":27.45,"BrightRetrieval (economics)":20.18,"BrightRetrieval (leetcode)":26.78,"BrightRetrieval (pony)":1.77,"BrightRetrieval (psychology)":21.82,"BrightRetrieval (robotics)":16.21,"BrightRetrieval (stackoverflow)":16.47,"BrightRetrieval (sustainable_living)":17.69,"BrightRetrieval (theoremqa_questions)":15.07,"BrightRetrieval (theoremqa_theorems)":6.04}
11
+ {"index":2,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/en.wikipedia.org\/wiki\/Okapi_BM25\">bm25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":14.29,"BrightRetrieval (aops)":6.2,"BrightRetrieval (biology)":19.19,"BrightRetrieval (earth_science)":27.06,"BrightRetrieval (economics)":14.87,"BrightRetrieval (leetcode)":24.37,"BrightRetrieval (pony)":7.93,"BrightRetrieval (psychology)":12.51,"BrightRetrieval (robotics)":13.53,"BrightRetrieval (stackoverflow)":16.55,"BrightRetrieval (sustainable_living)":15.22,"BrightRetrieval (theoremqa_questions)":9.78,"BrightRetrieval (theoremqa_theorems)":4.25}
12
+ {"index":9,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/hkunlp\/instructor-large\">instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":14.12,"BrightRetrieval (aops)":7.94,"BrightRetrieval (biology)":15.61,"BrightRetrieval (earth_science)":21.52,"BrightRetrieval (economics)":15.99,"BrightRetrieval (leetcode)":20.0,"BrightRetrieval (pony)":1.32,"BrightRetrieval (psychology)":21.94,"BrightRetrieval (robotics)":11.45,"BrightRetrieval (stackoverflow)":11.21,"BrightRetrieval (sustainable_living)":13.16,"BrightRetrieval (theoremqa_questions)":20.07,"BrightRetrieval (theoremqa_theorems)":9.29}
13
+ {"index":5,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-large-en-v1.5\">bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":13.47,"BrightRetrieval (aops)":6.08,"BrightRetrieval (biology)":11.96,"BrightRetrieval (earth_science)":24.15,"BrightRetrieval (economics)":16.59,"BrightRetrieval (leetcode)":26.68,"BrightRetrieval (pony)":5.64,"BrightRetrieval (psychology)":17.44,"BrightRetrieval (robotics)":12.21,"BrightRetrieval (stackoverflow)":9.51,"BrightRetrieval (sustainable_living)":13.27,"BrightRetrieval (theoremqa_questions)":12.56,"BrightRetrieval (theoremqa_theorems)":5.51}
boards_data/bright/data_tasks/Retrieval/default.jsonl CHANGED
@@ -5,10 +5,9 @@
5
  {"index":10,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/hkunlp\/instructor-xl\">instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":18.64,"BrightRetrieval (aops)":8.26,"BrightRetrieval (biology)":21.91,"BrightRetrieval (earth_science)":34.35,"BrightRetrieval (economics)":22.81,"BrightRetrieval (leetcode)":27.5,"BrightRetrieval (pony)":5.02,"BrightRetrieval (psychology)":27.43,"BrightRetrieval (robotics)":17.39,"BrightRetrieval (stackoverflow)":19.06,"BrightRetrieval (sustainable_living)":18.82,"BrightRetrieval (theoremqa_questions)":14.59,"BrightRetrieval (theoremqa_theorems)":6.5}
6
  {"index":8,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Salesforce\/SFR-Embedding-Mistral\">SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":18.0,"BrightRetrieval (aops)":7.43,"BrightRetrieval (biology)":19.49,"BrightRetrieval (earth_science)":26.63,"BrightRetrieval (economics)":17.84,"BrightRetrieval (leetcode)":27.35,"BrightRetrieval (pony)":1.97,"BrightRetrieval (psychology)":18.97,"BrightRetrieval (robotics)":16.7,"BrightRetrieval (stackoverflow)":12.72,"BrightRetrieval (sustainable_living)":19.79,"BrightRetrieval (theoremqa_questions)":23.05,"BrightRetrieval (theoremqa_theorems)":24.05}
7
  {"index":1,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/docs.voyageai.com\/embeddings\/\">voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.57,"BrightRetrieval (aops)":7.45,"BrightRetrieval (biology)":23.55,"BrightRetrieval (earth_science)":25.09,"BrightRetrieval (economics)":19.85,"BrightRetrieval (leetcode)":30.6,"BrightRetrieval (pony)":1.48,"BrightRetrieval (psychology)":24.79,"BrightRetrieval (robotics)":11.21,"BrightRetrieval (stackoverflow)":15.03,"BrightRetrieval (sustainable_living)":15.58,"BrightRetrieval (theoremqa_questions)":26.06,"BrightRetrieval (theoremqa_theorems)":10.13}
8
- {"index":13,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/openai.com\/blog\/new-embedding-models-and-api-updates\">text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.43,"BrightRetrieval (aops)":8.45,"BrightRetrieval (biology)":23.67,"BrightRetrieval (earth_science)":26.27,"BrightRetrieval (economics)":19.98,"BrightRetrieval (leetcode)":23.65,"BrightRetrieval (pony)":2.45,"BrightRetrieval (psychology)":27.52,"BrightRetrieval (robotics)":12.93,"BrightRetrieval (stackoverflow)":12.49,"BrightRetrieval (sustainable_living)":20.32,"BrightRetrieval (theoremqa_questions)":22.22,"BrightRetrieval (theoremqa_theorems)":9.25}
9
  {"index":11,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":17.43,"BrightRetrieval (aops)":7.1,"BrightRetrieval (biology)":18.84,"BrightRetrieval (earth_science)":25.96,"BrightRetrieval (economics)":15.49,"BrightRetrieval (leetcode)":28.72,"BrightRetrieval (pony)":4.81,"BrightRetrieval (psychology)":15.79,"BrightRetrieval (robotics)":16.37,"BrightRetrieval (stackoverflow)":9.83,"BrightRetrieval (sustainable_living)":18.51,"BrightRetrieval (theoremqa_questions)":23.94,"BrightRetrieval (theoremqa_theorems)":23.78}
10
  {"index":6,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Cohere\/Cohere-embed-english-v3.0\">Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":16.24,"BrightRetrieval (aops)":6.46,"BrightRetrieval (biology)":18.98,"BrightRetrieval (earth_science)":27.45,"BrightRetrieval (economics)":20.18,"BrightRetrieval (leetcode)":26.78,"BrightRetrieval (pony)":1.77,"BrightRetrieval (psychology)":21.82,"BrightRetrieval (robotics)":16.21,"BrightRetrieval (stackoverflow)":16.47,"BrightRetrieval (sustainable_living)":17.69,"BrightRetrieval (theoremqa_questions)":15.07,"BrightRetrieval (theoremqa_theorems)":6.04}
11
- {"index":12,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":14.8,"BrightRetrieval (aops)":5.32,"BrightRetrieval (biology)":15.52,"BrightRetrieval (earth_science)":20.11,"BrightRetrieval (economics)":16.64,"BrightRetrieval (leetcode)":26.4,"BrightRetrieval (pony)":6.95,"BrightRetrieval (psychology)":22.63,"BrightRetrieval (robotics)":8.36,"BrightRetrieval (stackoverflow)":9.48,"BrightRetrieval (sustainable_living)":15.34,"BrightRetrieval (theoremqa_questions)":18.49,"BrightRetrieval (theoremqa_theorems)":12.38}
12
- {"index":2,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/en.wikipedia.org\/wiki\/Okapi_BM25\">bm25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":14.29,"BrightRetrieval (aops)":6.2,"BrightRetrieval (biology)":19.19,"BrightRetrieval (earth_science)":27.06,"BrightRetrieval (economics)":14.87,"BrightRetrieval (leetcode)":24.37,"BrightRetrieval (pony)":7.93,"BrightRetrieval (psychology)":12.51,"BrightRetrieval (robotics)":13.53,"BrightRetrieval (stackoverflow)":16.55,"BrightRetrieval (sustainable_living)":15.22,"BrightRetrieval (theoremqa_questions)":9.78,"BrightRetrieval (theoremqa_theorems)":4.25}
13
- {"index":9,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/hkunlp\/instructor-large\">instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":14.12,"BrightRetrieval (aops)":7.94,"BrightRetrieval (biology)":15.61,"BrightRetrieval (earth_science)":21.52,"BrightRetrieval (economics)":15.99,"BrightRetrieval (leetcode)":20.0,"BrightRetrieval (pony)":1.32,"BrightRetrieval (psychology)":21.94,"BrightRetrieval (robotics)":11.45,"BrightRetrieval (stackoverflow)":11.21,"BrightRetrieval (sustainable_living)":13.16,"BrightRetrieval (theoremqa_questions)":20.07,"BrightRetrieval (theoremqa_theorems)":9.29}
14
- {"index":5,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-large-en-v1.5\">bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":13.47,"BrightRetrieval (aops)":6.08,"BrightRetrieval (biology)":11.96,"BrightRetrieval (earth_science)":24.15,"BrightRetrieval (economics)":16.59,"BrightRetrieval (leetcode)":26.68,"BrightRetrieval (pony)":5.64,"BrightRetrieval (psychology)":17.44,"BrightRetrieval (robotics)":12.21,"BrightRetrieval (stackoverflow)":9.51,"BrightRetrieval (sustainable_living)":13.27,"BrightRetrieval (theoremqa_questions)":12.56,"BrightRetrieval (theoremqa_theorems)":5.51}
 
5
  {"index":10,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/hkunlp\/instructor-xl\">instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":18.64,"BrightRetrieval (aops)":8.26,"BrightRetrieval (biology)":21.91,"BrightRetrieval (earth_science)":34.35,"BrightRetrieval (economics)":22.81,"BrightRetrieval (leetcode)":27.5,"BrightRetrieval (pony)":5.02,"BrightRetrieval (psychology)":27.43,"BrightRetrieval (robotics)":17.39,"BrightRetrieval (stackoverflow)":19.06,"BrightRetrieval (sustainable_living)":18.82,"BrightRetrieval (theoremqa_questions)":14.59,"BrightRetrieval (theoremqa_theorems)":6.5}
6
  {"index":8,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Salesforce\/SFR-Embedding-Mistral\">SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":18.0,"BrightRetrieval (aops)":7.43,"BrightRetrieval (biology)":19.49,"BrightRetrieval (earth_science)":26.63,"BrightRetrieval (economics)":17.84,"BrightRetrieval (leetcode)":27.35,"BrightRetrieval (pony)":1.97,"BrightRetrieval (psychology)":18.97,"BrightRetrieval (robotics)":16.7,"BrightRetrieval (stackoverflow)":12.72,"BrightRetrieval (sustainable_living)":19.79,"BrightRetrieval (theoremqa_questions)":23.05,"BrightRetrieval (theoremqa_theorems)":24.05}
7
  {"index":1,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/docs.voyageai.com\/embeddings\/\">voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.57,"BrightRetrieval (aops)":7.45,"BrightRetrieval (biology)":23.55,"BrightRetrieval (earth_science)":25.09,"BrightRetrieval (economics)":19.85,"BrightRetrieval (leetcode)":30.6,"BrightRetrieval (pony)":1.48,"BrightRetrieval (psychology)":24.79,"BrightRetrieval (robotics)":11.21,"BrightRetrieval (stackoverflow)":15.03,"BrightRetrieval (sustainable_living)":15.58,"BrightRetrieval (theoremqa_questions)":26.06,"BrightRetrieval (theoremqa_theorems)":10.13}
8
+ {"index":12,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/openai.com\/blog\/new-embedding-models-and-api-updates\">text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.43,"BrightRetrieval (aops)":8.45,"BrightRetrieval (biology)":23.67,"BrightRetrieval (earth_science)":26.27,"BrightRetrieval (economics)":19.98,"BrightRetrieval (leetcode)":23.65,"BrightRetrieval (pony)":2.45,"BrightRetrieval (psychology)":27.52,"BrightRetrieval (robotics)":12.93,"BrightRetrieval (stackoverflow)":12.49,"BrightRetrieval (sustainable_living)":20.32,"BrightRetrieval (theoremqa_questions)":22.22,"BrightRetrieval (theoremqa_theorems)":9.25}
9
  {"index":11,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":17.43,"BrightRetrieval (aops)":7.1,"BrightRetrieval (biology)":18.84,"BrightRetrieval (earth_science)":25.96,"BrightRetrieval (economics)":15.49,"BrightRetrieval (leetcode)":28.72,"BrightRetrieval (pony)":4.81,"BrightRetrieval (psychology)":15.79,"BrightRetrieval (robotics)":16.37,"BrightRetrieval (stackoverflow)":9.83,"BrightRetrieval (sustainable_living)":18.51,"BrightRetrieval (theoremqa_questions)":23.94,"BrightRetrieval (theoremqa_theorems)":23.78}
10
  {"index":6,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Cohere\/Cohere-embed-english-v3.0\">Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":16.24,"BrightRetrieval (aops)":6.46,"BrightRetrieval (biology)":18.98,"BrightRetrieval (earth_science)":27.45,"BrightRetrieval (economics)":20.18,"BrightRetrieval (leetcode)":26.78,"BrightRetrieval (pony)":1.77,"BrightRetrieval (psychology)":21.82,"BrightRetrieval (robotics)":16.21,"BrightRetrieval (stackoverflow)":16.47,"BrightRetrieval (sustainable_living)":17.69,"BrightRetrieval (theoremqa_questions)":15.07,"BrightRetrieval (theoremqa_theorems)":6.04}
11
+ {"index":2,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/en.wikipedia.org\/wiki\/Okapi_BM25\">bm25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":14.29,"BrightRetrieval (aops)":6.2,"BrightRetrieval (biology)":19.19,"BrightRetrieval (earth_science)":27.06,"BrightRetrieval (economics)":14.87,"BrightRetrieval (leetcode)":24.37,"BrightRetrieval (pony)":7.93,"BrightRetrieval (psychology)":12.51,"BrightRetrieval (robotics)":13.53,"BrightRetrieval (stackoverflow)":16.55,"BrightRetrieval (sustainable_living)":15.22,"BrightRetrieval (theoremqa_questions)":9.78,"BrightRetrieval (theoremqa_theorems)":4.25}
12
+ {"index":9,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/hkunlp\/instructor-large\">instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":14.12,"BrightRetrieval (aops)":7.94,"BrightRetrieval (biology)":15.61,"BrightRetrieval (earth_science)":21.52,"BrightRetrieval (economics)":15.99,"BrightRetrieval (leetcode)":20.0,"BrightRetrieval (pony)":1.32,"BrightRetrieval (psychology)":21.94,"BrightRetrieval (robotics)":11.45,"BrightRetrieval (stackoverflow)":11.21,"BrightRetrieval (sustainable_living)":13.16,"BrightRetrieval (theoremqa_questions)":20.07,"BrightRetrieval (theoremqa_theorems)":9.29}
13
+ {"index":5,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-large-en-v1.5\">bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":13.47,"BrightRetrieval (aops)":6.08,"BrightRetrieval (biology)":11.96,"BrightRetrieval (earth_science)":24.15,"BrightRetrieval (economics)":16.59,"BrightRetrieval (leetcode)":26.68,"BrightRetrieval (pony)":5.64,"BrightRetrieval (psychology)":17.44,"BrightRetrieval (robotics)":12.21,"BrightRetrieval (stackoverflow)":9.51,"BrightRetrieval (sustainable_living)":13.27,"BrightRetrieval (theoremqa_questions)":12.56,"BrightRetrieval (theoremqa_theorems)":5.51}
 
boards_data/en/data_tasks/Classification/default.jsonl CHANGED
@@ -205,7 +205,7 @@
205
  {"index":107,"Rank":238,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Shimin\/LLaMA-embeeding\">LLaMA-embeeding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":84.82,"AmazonPolarityClassification":76.88,"AmazonReviewsClassification (en)":36.72,"Banking77Classification":"","EmotionClassification":41.93,"ImdbClassification":"","MassiveIntentClassification (en)":65.91,"MassiveScenarioClassification (en)":67.62,"MTOPDomainClassification (en)":87.95,"MTOPIntentClassification (en)":78.43,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":56.28}
206
  {"index":108,"Rank":239,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Shimin\/yiyouliao\">yiyouliao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":74.78,"AmazonPolarityClassification":71.89,"AmazonReviewsClassification (en)":36.7,"Banking77Classification":81.37,"EmotionClassification":42.6,"ImdbClassification":63.96,"MassiveIntentClassification (en)":68.56,"MassiveScenarioClassification (en)":74.15,"MTOPDomainClassification (en)":90.19,"MTOPIntentClassification (en)":69.5,"ToxicConversationsClassification":69.85,"TweetSentimentExtractionClassification":""}
207
  {"index":112,"Rank":240,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Snowflake\/snowflake-arctic-embed-m-v1.5\">snowflake-arctic-embed-m-v1.5<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonCounterfactualClassification (en)":68.3,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":46.27,"Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":66.69,"MassiveScenarioClassification (en)":73.06,"MTOPDomainClassification (en)":91.36,"MTOPIntentClassification (en)":60.64,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
208
- {"index":121,"Rank":243,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/YanshekWoo\/EminEmbed\">EminEmbed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":62.97,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":48.48,"Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":66.09,"MassiveScenarioClassification (en)":72.55,"MTOPDomainClassification (en)":90.15,"MTOPIntentClassification (en)":57.92,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
209
  {"index":140,"Rank":248,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/brahmairesearch\/slx-v0.1\">slx-v0.1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.08,"Average":"","AmazonCounterfactualClassification (en)":61.46,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":30.3,"Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":67.94,"MassiveScenarioClassification (en)":73.91,"MTOPDomainClassification (en)":91.97,"MTOPIntentClassification (en)":63.3,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
210
  {"index":142,"Rank":249,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":"","AmazonCounterfactualClassification (en)":"","AmazonPolarityClassification":"","AmazonReviewsClassification (en)":"","Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":60.48,"MassiveScenarioClassification (en)":65.43,"MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
211
  {"index":212,"Rank":272,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/mukaj\/fin-mpnet-base\">fin-mpnet-base<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonCounterfactualClassification (en)":"","AmazonPolarityClassification":"","AmazonReviewsClassification (en)":29.13,"Banking77Classification":80.25,"EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":"","MassiveScenarioClassification (en)":"","MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
 
205
  {"index":107,"Rank":238,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Shimin\/LLaMA-embeeding\">LLaMA-embeeding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":84.82,"AmazonPolarityClassification":76.88,"AmazonReviewsClassification (en)":36.72,"Banking77Classification":"","EmotionClassification":41.93,"ImdbClassification":"","MassiveIntentClassification (en)":65.91,"MassiveScenarioClassification (en)":67.62,"MTOPDomainClassification (en)":87.95,"MTOPIntentClassification (en)":78.43,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":56.28}
206
  {"index":108,"Rank":239,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Shimin\/yiyouliao\">yiyouliao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":74.78,"AmazonPolarityClassification":71.89,"AmazonReviewsClassification (en)":36.7,"Banking77Classification":81.37,"EmotionClassification":42.6,"ImdbClassification":63.96,"MassiveIntentClassification (en)":68.56,"MassiveScenarioClassification (en)":74.15,"MTOPDomainClassification (en)":90.19,"MTOPIntentClassification (en)":69.5,"ToxicConversationsClassification":69.85,"TweetSentimentExtractionClassification":""}
207
  {"index":112,"Rank":240,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Snowflake\/snowflake-arctic-embed-m-v1.5\">snowflake-arctic-embed-m-v1.5<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonCounterfactualClassification (en)":68.3,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":46.27,"Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":66.69,"MassiveScenarioClassification (en)":73.06,"MTOPDomainClassification (en)":91.36,"MTOPIntentClassification (en)":60.64,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
208
+ {"index":121,"Rank":243,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/YanshekWoo\/EminEmbed\">EminEmbed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":67.79,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":48.48,"Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":66.09,"MassiveScenarioClassification (en)":72.55,"MTOPDomainClassification (en)":90.15,"MTOPIntentClassification (en)":57.92,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
209
  {"index":140,"Rank":248,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/brahmairesearch\/slx-v0.1\">slx-v0.1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.08,"Average":"","AmazonCounterfactualClassification (en)":61.46,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":30.3,"Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":67.94,"MassiveScenarioClassification (en)":73.91,"MTOPDomainClassification (en)":91.97,"MTOPIntentClassification (en)":63.3,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
210
  {"index":142,"Rank":249,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":"","AmazonCounterfactualClassification (en)":"","AmazonPolarityClassification":"","AmazonReviewsClassification (en)":"","Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":60.48,"MassiveScenarioClassification (en)":65.43,"MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
211
  {"index":212,"Rank":272,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/mukaj\/fin-mpnet-base\">fin-mpnet-base<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonCounterfactualClassification (en)":"","AmazonPolarityClassification":"","AmazonReviewsClassification (en)":29.13,"Banking77Classification":80.25,"EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":"","MassiveScenarioClassification (en)":"","MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
boards_data/ru/data_overall/default.jsonl CHANGED
@@ -1,25 +1,25 @@
1
- {"index":13,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (16 datasets)":67.64,"Classification Average (7 datasets)":64.57,"Clustering Average (3 datasets)":59.98,"PairClassification Average (1 datasets)":59.38,"Reranking Average (1 datasets)":74.61,"Retrieval Average (2 datasets)":77.96,"STS Average (2 datasets)":80.15}
2
- {"index":11,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-bge-m3\">USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Embedding Dimensions":1024,"Max Tokens":8192,"Average (16 datasets)":64.23,"Classification Average (7 datasets)":59.36,"Clustering Average (3 datasets)":53.61,"PairClassification Average (1 datasets)":64.99,"Reranking Average (1 datasets)":73.08,"Retrieval Average (2 datasets)":76.78,"STS Average (2 datasets)":79.85}
3
- {"index":4,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/ru-en-RoSBERTa\">ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Embedding Dimensions":1024,"Max Tokens":514,"Average (16 datasets)":64.14,"Classification Average (7 datasets)":60.84,"Clustering Average (3 datasets)":56.06,"PairClassification Average (1 datasets)":60.79,"Reranking Average (1 datasets)":70.87,"Retrieval Average (2 datasets)":72.82,"STS Average (2 datasets)":77.42}
4
- {"index":15,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average (16 datasets)":63.36,"Classification Average (7 datasets)":58.92,"Clustering Average (3 datasets)":52.55,"PairClassification Average (1 datasets)":58.4,"Reranking Average (1 datasets)":75.58,"Retrieval Average (2 datasets)":77.39,"STS Average (2 datasets)":77.48}
5
- {"index":0,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Embedding Dimensions":1024,"Max Tokens":8192,"Average (16 datasets)":62.69,"Classification Average (7 datasets)":57.43,"Clustering Average (3 datasets)":52.51,"PairClassification Average (1 datasets)":60.6,"Reranking Average (1 datasets)":74.02,"Retrieval Average (2 datasets)":77.1,"STS Average (2 datasets)":77.39}
6
- {"index":10,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-base\">USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":61.26,"Classification Average (7 datasets)":57.86,"Clustering Average (3 datasets)":53.42,"PairClassification Average (1 datasets)":60.02,"Reranking Average (1 datasets)":64.42,"Retrieval Average (2 datasets)":67.34,"STS Average (2 datasets)":77.91}
7
- {"index":23,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/LaBSE-ru-turbo\">LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":60.74,"Classification Average (7 datasets)":56.55,"Clustering Average (3 datasets)":53.22,"PairClassification Average (1 datasets)":57.81,"Reranking Average (1 datasets)":68.65,"Retrieval Average (2 datasets)":67.54,"STS Average (2 datasets)":77.37}
8
- {"index":14,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (16 datasets)":60.04,"Classification Average (7 datasets)":56.19,"Clustering Average (3 datasets)":50.27,"PairClassification Average (1 datasets)":54.96,"Reranking Average (1 datasets)":72.01,"Retrieval Average (2 datasets)":69.91,"STS Average (2 datasets)":74.9}
9
- {"index":16,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (16 datasets)":59.64,"Classification Average (7 datasets)":55.09,"Clustering Average (3 datasets)":51.65,"PairClassification Average (1 datasets)":55.14,"Reranking Average (1 datasets)":71.46,"Retrieval Average (2 datasets)":69.27,"STS Average (2 datasets)":74.27}
10
- {"index":24,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Embedding Dimensions":312,"Max Tokens":2048,"Average (16 datasets)":55.92,"Classification Average (7 datasets)":53.46,"Clustering Average (3 datasets)":49.57,"PairClassification Average (1 datasets)":56.09,"Reranking Average (1 datasets)":62.15,"Retrieval Average (2 datasets)":51.5,"STS Average (2 datasets)":75.32}
11
- {"index":22,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (16 datasets)":55.42,"Classification Average (7 datasets)":54.11,"Clustering Average (3 datasets)":49.18,"PairClassification Average (1 datasets)":64.57,"Reranking Average (1 datasets)":58.77,"Retrieval Average (2 datasets)":44.4,"STS Average (2 datasets)":74.1}
12
- {"index":5,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_mt_nlu_ru\">sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Embedding Dimensions":1024,"Max Tokens":514,"Average (16 datasets)":52.61,"Classification Average (7 datasets)":55.44,"Clustering Average (3 datasets)":52.65,"PairClassification Average (1 datasets)":51.97,"Reranking Average (1 datasets)":56.13,"Retrieval Average (2 datasets)":25.6,"STS Average (2 datasets)":68.19}
13
- {"index":17,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":52.02,"Classification Average (7 datasets)":52.35,"Clustering Average (3 datasets)":47.75,"PairClassification Average (1 datasets)":55.71,"Reranking Average (1 datasets)":55.13,"Retrieval Average (2 datasets)":36.38,"STS Average (2 datasets)":69.54}
14
- {"index":21,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (16 datasets)":51.89,"Classification Average (7 datasets)":51.38,"Clustering Average (3 datasets)":47.75,"PairClassification Average (1 datasets)":58.56,"Reranking Average (1 datasets)":52.8,"Retrieval Average (2 datasets)":37.26,"STS Average (2 datasets)":70.71}
15
- {"index":7,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":51.44,"Classification Average (7 datasets)":52.73,"Clustering Average (3 datasets)":46.84,"PairClassification Average (1 datasets)":55.61,"Reranking Average (1 datasets)":54.83,"Retrieval Average (2 datasets)":31.88,"STS Average (2 datasets)":69.6}
16
- {"index":6,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_nlu_ru\">sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Embedding Dimensions":1024,"Max Tokens":512,"Average (16 datasets)":48.98,"Classification Average (7 datasets)":55.21,"Clustering Average (3 datasets)":51.94,"PairClassification Average (1 datasets)":50.17,"Reranking Average (1 datasets)":46.81,"Retrieval Average (2 datasets)":11.78,"STS Average (2 datasets)":60.44}
17
- {"index":9,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny2\">rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Embedding Dimensions":2048,"Max Tokens":514,"Average (16 datasets)":46.28,"Classification Average (7 datasets)":51.37,"Clustering Average (3 datasets)":41.23,"PairClassification Average (1 datasets)":51.87,"Reranking Average (1 datasets)":46.09,"Retrieval Average (2 datasets)":12.4,"STS Average (2 datasets)":67.28}
18
- {"index":3,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":46.28,"Classification Average (7 datasets)":51.49,"Clustering Average (3 datasets)":43.13,"PairClassification Average (1 datasets)":59.12,"Reranking Average (1 datasets)":39.89,"Retrieval Average (2 datasets)":9.68,"STS Average (2 datasets)":66.13}
19
- {"index":12,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":45.23,"Classification Average (7 datasets)":54.23,"Clustering Average (3 datasets)":42.92,"PairClassification Average (1 datasets)":53.78,"Reranking Average (1 datasets)":34.01,"Retrieval Average (2 datasets)":7.5,"STS Average (2 datasets)":56.25}
20
- {"index":1,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/distilrubert-small-cased-conversational\">distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":44.18,"Classification Average (7 datasets)":52.16,"Clustering Average (3 datasets)":38.41,"PairClassification Average (1 datasets)":52.48,"Reranking Average (1 datasets)":42.58,"Retrieval Average (2 datasets)":7.37,"STS Average (2 datasets)":58.36}
21
- {"index":2,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased\">rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":40.68,"Classification Average (7 datasets)":50.66,"Clustering Average (3 datasets)":27.91,"PairClassification Average (1 datasets)":52.12,"Reranking Average (1 datasets)":41.65,"Retrieval Average (2 datasets)":7.55,"STS Average (2 datasets)":51.84}
22
- {"index":8,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny\">rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Embedding Dimensions":512,"Max Tokens":514,"Average (16 datasets)":37.07,"Classification Average (7 datasets)":42.68,"Clustering Average (3 datasets)":30.76,"PairClassification Average (1 datasets)":51.06,"Reranking Average (1 datasets)":35.44,"Retrieval Average (2 datasets)":2.02,"STS Average (2 datasets)":55.78}
23
- {"index":19,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":384,"Max Tokens":512,"Average (16 datasets)":25.97,"Classification Average (7 datasets)":28.67,"Clustering Average (3 datasets)":13.3,"PairClassification Average (1 datasets)":45.03,"Reranking Average (1 datasets)":27.05,"Retrieval Average (2 datasets)":1.66,"STS Average (2 datasets)":49.74}
24
- {"index":18,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Embedding Dimensions":384,"Max Tokens":512,"Average (16 datasets)":"","Classification Average (7 datasets)":28.33,"Clustering Average (3 datasets)":13.87,"PairClassification Average (1 datasets)":46.4,"Reranking Average (1 datasets)":38.51,"Retrieval Average (2 datasets)":"","STS Average (2 datasets)":50.9}
25
- {"index":20,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":514,"Average (16 datasets)":"","Classification Average (7 datasets)":29.53,"Clustering Average (3 datasets)":15.83,"PairClassification Average (1 datasets)":44.52,"Reranking Average (1 datasets)":30.96,"Retrieval Average (2 datasets)":"","STS Average (2 datasets)":48.92}
 
1
+ {"index":0,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Embedding Dimensions":1024,"Max Tokens":8192,"Average (23 datasets)":61.54,"Classification Average (9 datasets)":60.46,"Clustering Average (3 datasets)":52.51,"PairClassification Average (1 datasets)":60.6,"Reranking Average (2 datasets)":69.7,"Retrieval Average (3 datasets)":74.77,"STS Average (3 datasets)":73.68,"MultilabelClassification Average (2 datasets)":34.25}
2
+ {"index":15,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average (23 datasets)":61.45,"Classification Average (9 datasets)":61.01,"Clustering Average (3 datasets)":52.55,"PairClassification Average (1 datasets)":58.4,"Reranking Average (2 datasets)":69.64,"Retrieval Average (3 datasets)":74.04,"STS Average (3 datasets)":71.62,"MultilabelClassification Average (2 datasets)":36.01}
3
+ {"index":14,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (23 datasets)":58.34,"Classification Average (9 datasets)":58.26,"Clustering Average (3 datasets)":50.27,"PairClassification Average (1 datasets)":54.96,"Reranking Average (2 datasets)":66.24,"Retrieval Average (3 datasets)":67.14,"STS Average (3 datasets)":70.16,"MultilabelClassification Average (2 datasets)":33.65}
4
+ {"index":16,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (23 datasets)":57.33,"Classification Average (9 datasets)":56.44,"Clustering Average (3 datasets)":51.65,"PairClassification Average (1 datasets)":55.14,"Reranking Average (2 datasets)":65.29,"Retrieval Average (3 datasets)":65.85,"STS Average (3 datasets)":69.48,"MultilabelClassification Average (2 datasets)":31.99}
5
+ {"index":5,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_mt_nlu_ru\">sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Embedding Dimensions":1024,"Max Tokens":514,"Average (23 datasets)":48.89,"Classification Average (9 datasets)":57.52,"Clustering Average (3 datasets)":52.65,"PairClassification Average (1 datasets)":51.97,"Reranking Average (2 datasets)":40.56,"Retrieval Average (3 datasets)":19.13,"STS Average (3 datasets)":64.4,"MultilabelClassification Average (2 datasets)":32.64}
6
+ {"index":6,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_nlu_ru\">sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Embedding Dimensions":1024,"Max Tokens":512,"Average (23 datasets)":45.55,"Classification Average (9 datasets)":57.24,"Clustering Average (3 datasets)":51.94,"PairClassification Average (1 datasets)":50.17,"Reranking Average (2 datasets)":32.8,"Retrieval Average (3 datasets)":8.51,"STS Average (3 datasets)":57.21,"MultilabelClassification Average (2 datasets)":31.9}
7
+ {"index":9,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny2\">rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Embedding Dimensions":2048,"Max Tokens":514,"Average (23 datasets)":42.49,"Classification Average (9 datasets)":52.17,"Clustering Average (3 datasets)":41.23,"PairClassification Average (1 datasets)":51.87,"Reranking Average (2 datasets)":30.95,"Retrieval Average (3 datasets)":8.89,"STS Average (3 datasets)":61.6,"MultilabelClassification Average (2 datasets)":29.44}
8
+ {"index":1,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/distilrubert-small-cased-conversational\">distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Embedding Dimensions":768,"Max Tokens":512,"Average (23 datasets)":"","Classification Average (9 datasets)":55.15,"Clustering Average (3 datasets)":38.41,"PairClassification Average (1 datasets)":52.48,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":56.2,"MultilabelClassification Average (2 datasets)":29.32}
9
+ {"index":2,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased\">rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average (23 datasets)":"","Classification Average (9 datasets)":51.6,"Clustering Average (3 datasets)":27.91,"PairClassification Average (1 datasets)":52.12,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":46.22,"MultilabelClassification Average (2 datasets)":26.2}
10
+ {"index":3,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average (23 datasets)":"","Classification Average (9 datasets)":51.27,"Clustering Average (3 datasets)":43.13,"PairClassification Average (1 datasets)":59.12,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":61.18,"MultilabelClassification Average (2 datasets)":27.8}
11
+ {"index":4,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/ru-en-RoSBERTa\">ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Embedding Dimensions":1024,"Max Tokens":514,"Average (23 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (3 datasets)":56.06,"PairClassification Average (1 datasets)":60.79,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":"","MultilabelClassification Average (2 datasets)":38.88}
12
+ {"index":7,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Embedding Dimensions":768,"Max Tokens":512,"Average (23 datasets)":"","Classification Average (9 datasets)":54.98,"Clustering Average (3 datasets)":46.84,"PairClassification Average (1 datasets)":55.61,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":65.91,"MultilabelClassification Average (2 datasets)":31.27}
13
+ {"index":8,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny\">rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Embedding Dimensions":512,"Max Tokens":514,"Average (23 datasets)":"","Classification Average (9 datasets)":44.55,"Clustering Average (3 datasets)":30.76,"PairClassification Average (1 datasets)":51.06,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":53.15,"MultilabelClassification Average (2 datasets)":27.96}
14
+ {"index":10,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-base\">USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Embedding Dimensions":768,"Max Tokens":512,"Average (23 datasets)":"","Classification Average (9 datasets)":59.88,"Clustering Average (3 datasets)":53.42,"PairClassification Average (1 datasets)":60.02,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":73.07,"MultilabelClassification Average (2 datasets)":36.98}
15
+ {"index":11,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-bge-m3\">USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Embedding Dimensions":1024,"Max Tokens":8192,"Average (23 datasets)":"","Classification Average (9 datasets)":61.92,"Clustering Average (3 datasets)":53.61,"PairClassification Average (1 datasets)":64.99,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":75.38,"MultilabelClassification Average (2 datasets)":35.88}
16
+ {"index":12,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Embedding Dimensions":768,"Max Tokens":512,"Average (23 datasets)":"","Classification Average (9 datasets)":56.18,"Clustering Average (3 datasets)":42.92,"PairClassification Average (1 datasets)":53.78,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":53.39,"MultilabelClassification Average (2 datasets)":28.9}
17
+ {"index":13,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (23 datasets)":"","Classification Average (9 datasets)":67.52,"Clustering Average (3 datasets)":59.98,"PairClassification Average (1 datasets)":59.38,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":"","MultilabelClassification Average (2 datasets)":33.37}
18
+ {"index":17,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Embedding Dimensions":768,"Max Tokens":512,"Average (23 datasets)":"","Classification Average (9 datasets)":54.7,"Clustering Average (3 datasets)":47.75,"PairClassification Average (1 datasets)":55.71,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":65.52,"MultilabelClassification Average (2 datasets)":31.42}
19
+ {"index":18,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Embedding Dimensions":384,"Max Tokens":512,"Average (23 datasets)":"","Classification Average (9 datasets)":28.15,"Clustering Average (3 datasets)":13.87,"PairClassification Average (1 datasets)":46.4,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":37.66,"MultilabelClassification Average (2 datasets)":25.96}
20
+ {"index":19,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":384,"Max Tokens":512,"Average (23 datasets)":"","Classification Average (9 datasets)":28.75,"Clustering Average (3 datasets)":13.3,"PairClassification Average (1 datasets)":45.03,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":1.23,"STS Average (3 datasets)":38.07,"MultilabelClassification Average (2 datasets)":25.27}
21
+ {"index":20,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":514,"Average (23 datasets)":"","Classification Average (9 datasets)":28.82,"Clustering Average (3 datasets)":15.83,"PairClassification Average (1 datasets)":44.52,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":37.89,"MultilabelClassification Average (2 datasets)":26.9}
22
+ {"index":21,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (23 datasets)":"","Classification Average (9 datasets)":53.77,"Clustering Average (3 datasets)":47.75,"PairClassification Average (1 datasets)":58.56,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":66.17,"MultilabelClassification Average (2 datasets)":31.3}
23
+ {"index":22,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (23 datasets)":"","Classification Average (9 datasets)":56.88,"Clustering Average (3 datasets)":49.18,"PairClassification Average (1 datasets)":64.57,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":68.98,"MultilabelClassification Average (2 datasets)":32.9}
24
+ {"index":23,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/LaBSE-ru-turbo\">LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Embedding Dimensions":768,"Max Tokens":512,"Average (23 datasets)":"","Classification Average (9 datasets)":59.23,"Clustering Average (3 datasets)":53.22,"PairClassification Average (1 datasets)":57.81,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":72.54,"MultilabelClassification Average (2 datasets)":36.32}
25
+ {"index":24,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Embedding Dimensions":312,"Max Tokens":2048,"Average (23 datasets)":"","Classification Average (9 datasets)":55.01,"Clustering Average (3 datasets)":49.57,"PairClassification Average (1 datasets)":56.09,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":70.23,"MultilabelClassification Average (2 datasets)":31.7}
boards_data/ru/data_tasks/Classification/default.jsonl CHANGED
@@ -1,25 +1,25 @@
1
- {"index":13,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":64.57,"GeoreviewClassification (rus-Cyrl)":50.25,"HeadlineClassification (rus-Cyrl)":85.68,"InappropriatenessClassification (rus-Cyrl)":67.19,"KinopoiskClassification (rus-Cyrl)":65.49,"RuReviewsClassification (rus-Cyrl)":67.68,"RuSciBenchGRNTIClassification (rus-Cyrl)":64.59,"RuSciBenchOECDClassification (rus-Cyrl)":51.13}
2
- {"index":4,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/ru-en-RoSBERTa\">ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":60.84,"GeoreviewClassification (rus-Cyrl)":49.7,"HeadlineClassification (rus-Cyrl)":78.0,"InappropriatenessClassification (rus-Cyrl)":61.32,"KinopoiskClassification (rus-Cyrl)":63.27,"RuReviewsClassification (rus-Cyrl)":67.96,"RuSciBenchGRNTIClassification (rus-Cyrl)":59.33,"RuSciBenchOECDClassification (rus-Cyrl)":46.33}
3
- {"index":11,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-bge-m3\">USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":59.36,"GeoreviewClassification (rus-Cyrl)":50.98,"HeadlineClassification (rus-Cyrl)":70.09,"InappropriatenessClassification (rus-Cyrl)":60.76,"KinopoiskClassification (rus-Cyrl)":63.33,"RuReviewsClassification (rus-Cyrl)":68.52,"RuSciBenchGRNTIClassification (rus-Cyrl)":57.67,"RuSciBenchOECDClassification (rus-Cyrl)":44.2}
4
- {"index":15,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":58.92,"GeoreviewClassification (rus-Cyrl)":49.69,"HeadlineClassification (rus-Cyrl)":77.19,"InappropriatenessClassification (rus-Cyrl)":61.6,"KinopoiskClassification (rus-Cyrl)":56.59,"RuReviewsClassification (rus-Cyrl)":65.28,"RuSciBenchGRNTIClassification (rus-Cyrl)":58.2,"RuSciBenchOECDClassification (rus-Cyrl)":43.91}
5
- {"index":10,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-base\">USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":57.86,"GeoreviewClassification (rus-Cyrl)":47.23,"HeadlineClassification (rus-Cyrl)":74.88,"InappropriatenessClassification (rus-Cyrl)":61.94,"KinopoiskClassification (rus-Cyrl)":55.69,"RuReviewsClassification (rus-Cyrl)":66.44,"RuSciBenchGRNTIClassification (rus-Cyrl)":55.55,"RuSciBenchOECDClassification (rus-Cyrl)":43.28}
6
- {"index":0,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":57.43,"GeoreviewClassification (rus-Cyrl)":48.27,"HeadlineClassification (rus-Cyrl)":70.32,"InappropriatenessClassification (rus-Cyrl)":59.87,"KinopoiskClassification (rus-Cyrl)":58.23,"RuReviewsClassification (rus-Cyrl)":66.91,"RuSciBenchGRNTIClassification (rus-Cyrl)":55.81,"RuSciBenchOECDClassification (rus-Cyrl)":42.57}
7
- {"index":23,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/LaBSE-ru-turbo\">LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":56.55,"GeoreviewClassification (rus-Cyrl)":46.04,"HeadlineClassification (rus-Cyrl)":69.98,"InappropriatenessClassification (rus-Cyrl)":61.39,"KinopoiskClassification (rus-Cyrl)":53.59,"RuReviewsClassification (rus-Cyrl)":64.58,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.67,"RuSciBenchOECDClassification (rus-Cyrl)":43.58}
8
- {"index":14,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":56.19,"GeoreviewClassification (rus-Cyrl)":46.05,"HeadlineClassification (rus-Cyrl)":75.64,"InappropriatenessClassification (rus-Cyrl)":58.78,"KinopoiskClassification (rus-Cyrl)":50.89,"RuReviewsClassification (rus-Cyrl)":62.99,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.28,"RuSciBenchOECDClassification (rus-Cyrl)":42.69}
9
- {"index":5,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_mt_nlu_ru\">sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":55.44,"GeoreviewClassification (rus-Cyrl)":39.67,"HeadlineClassification (rus-Cyrl)":77.19,"InappropriatenessClassification (rus-Cyrl)":64.64,"KinopoiskClassification (rus-Cyrl)":50.33,"RuReviewsClassification (rus-Cyrl)":58.29,"RuSciBenchGRNTIClassification (rus-Cyrl)":54.19,"RuSciBenchOECDClassification (rus-Cyrl)":43.8}
10
- {"index":6,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_nlu_ru\">sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":55.21,"GeoreviewClassification (rus-Cyrl)":39.97,"HeadlineClassification (rus-Cyrl)":79.26,"InappropriatenessClassification (rus-Cyrl)":62.52,"KinopoiskClassification (rus-Cyrl)":49.51,"RuReviewsClassification (rus-Cyrl)":58.27,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.9,"RuSciBenchOECDClassification (rus-Cyrl)":43.04}
11
- {"index":16,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":55.09,"GeoreviewClassification (rus-Cyrl)":44.66,"HeadlineClassification (rus-Cyrl)":73.94,"InappropriatenessClassification (rus-Cyrl)":59.16,"KinopoiskClassification (rus-Cyrl)":49.96,"RuReviewsClassification (rus-Cyrl)":61.18,"RuSciBenchGRNTIClassification (rus-Cyrl)":54.99,"RuSciBenchOECDClassification (rus-Cyrl)":41.72}
12
- {"index":12,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":54.23,"GeoreviewClassification (rus-Cyrl)":40.19,"HeadlineClassification (rus-Cyrl)":78.75,"InappropriatenessClassification (rus-Cyrl)":61.33,"KinopoiskClassification (rus-Cyrl)":48.78,"RuReviewsClassification (rus-Cyrl)":55.66,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.53,"RuSciBenchOECDClassification (rus-Cyrl)":41.34}
13
- {"index":22,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":54.11,"GeoreviewClassification (rus-Cyrl)":42.33,"HeadlineClassification (rus-Cyrl)":70.35,"InappropriatenessClassification (rus-Cyrl)":59.32,"KinopoiskClassification (rus-Cyrl)":44.31,"RuReviewsClassification (rus-Cyrl)":62.33,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.01,"RuSciBenchOECDClassification (rus-Cyrl)":44.14}
14
- {"index":24,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":53.46,"GeoreviewClassification (rus-Cyrl)":41.36,"HeadlineClassification (rus-Cyrl)":68.9,"InappropriatenessClassification (rus-Cyrl)":59.11,"KinopoiskClassification (rus-Cyrl)":50.47,"RuReviewsClassification (rus-Cyrl)":60.66,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.93,"RuSciBenchOECDClassification (rus-Cyrl)":40.79}
15
- {"index":7,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":52.73,"GeoreviewClassification (rus-Cyrl)":40.89,"HeadlineClassification (rus-Cyrl)":68.75,"InappropriatenessClassification (rus-Cyrl)":58.48,"KinopoiskClassification (rus-Cyrl)":49.85,"RuReviewsClassification (rus-Cyrl)":58.01,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.8,"RuSciBenchOECDClassification (rus-Cyrl)":40.36}
16
- {"index":17,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":52.35,"GeoreviewClassification (rus-Cyrl)":40.86,"HeadlineClassification (rus-Cyrl)":68.75,"InappropriatenessClassification (rus-Cyrl)":58.52,"KinopoiskClassification (rus-Cyrl)":46.77,"RuReviewsClassification (rus-Cyrl)":58.01,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.04,"RuSciBenchOECDClassification (rus-Cyrl)":40.48}
17
- {"index":1,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/distilrubert-small-cased-conversational\">distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":52.16,"GeoreviewClassification (rus-Cyrl)":38.95,"HeadlineClassification (rus-Cyrl)":75.59,"InappropriatenessClassification (rus-Cyrl)":60.68,"KinopoiskClassification (rus-Cyrl)":49.67,"RuReviewsClassification (rus-Cyrl)":54.05,"RuSciBenchGRNTIClassification (rus-Cyrl)":48.53,"RuSciBenchOECDClassification (rus-Cyrl)":37.65}
18
- {"index":3,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":51.49,"GeoreviewClassification (rus-Cyrl)":38.05,"HeadlineClassification (rus-Cyrl)":67.64,"InappropriatenessClassification (rus-Cyrl)":58.27,"KinopoiskClassification (rus-Cyrl)":45.86,"RuReviewsClassification (rus-Cyrl)":58.34,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.18,"RuSciBenchOECDClassification (rus-Cyrl)":40.11}
19
- {"index":21,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":51.38,"GeoreviewClassification (rus-Cyrl)":38.24,"HeadlineClassification (rus-Cyrl)":68.3,"InappropriatenessClassification (rus-Cyrl)":58.18,"KinopoiskClassification (rus-Cyrl)":41.45,"RuReviewsClassification (rus-Cyrl)":58.88,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.19,"RuSciBenchOECDClassification (rus-Cyrl)":41.41}
20
- {"index":9,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny2\">rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":51.37,"GeoreviewClassification (rus-Cyrl)":39.64,"HeadlineClassification (rus-Cyrl)":74.19,"InappropriatenessClassification (rus-Cyrl)":58.57,"KinopoiskClassification (rus-Cyrl)":49.06,"RuReviewsClassification (rus-Cyrl)":56.99,"RuSciBenchGRNTIClassification (rus-Cyrl)":45.63,"RuSciBenchOECDClassification (rus-Cyrl)":35.48}
21
- {"index":2,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased\">rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":50.66,"GeoreviewClassification (rus-Cyrl)":37.22,"HeadlineClassification (rus-Cyrl)":75.23,"InappropriatenessClassification (rus-Cyrl)":57.34,"KinopoiskClassification (rus-Cyrl)":49.91,"RuReviewsClassification (rus-Cyrl)":50.74,"RuSciBenchGRNTIClassification (rus-Cyrl)":48.03,"RuSciBenchOECDClassification (rus-Cyrl)":36.13}
22
- {"index":8,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny\">rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":42.68,"GeoreviewClassification (rus-Cyrl)":33.45,"HeadlineClassification (rus-Cyrl)":57.65,"InappropriatenessClassification (rus-Cyrl)":54.5,"KinopoiskClassification (rus-Cyrl)":41.36,"RuReviewsClassification (rus-Cyrl)":49.56,"RuSciBenchGRNTIClassification (rus-Cyrl)":35.71,"RuSciBenchOECDClassification (rus-Cyrl)":26.51}
23
- {"index":20,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":29.53,"GeoreviewClassification (rus-Cyrl)":25.93,"HeadlineClassification (rus-Cyrl)":28.53,"InappropriatenessClassification (rus-Cyrl)":51.82,"KinopoiskClassification (rus-Cyrl)":34.18,"RuReviewsClassification (rus-Cyrl)":42.33,"RuSciBenchGRNTIClassification (rus-Cyrl)":13.29,"RuSciBenchOECDClassification (rus-Cyrl)":10.62}
24
- {"index":19,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":28.67,"GeoreviewClassification (rus-Cyrl)":27.08,"HeadlineClassification (rus-Cyrl)":27.77,"InappropriatenessClassification (rus-Cyrl)":51.73,"KinopoiskClassification (rus-Cyrl)":33.93,"RuReviewsClassification (rus-Cyrl)":41.79,"RuSciBenchGRNTIClassification (rus-Cyrl)":10.08,"RuSciBenchOECDClassification (rus-Cyrl)":8.3}
25
- {"index":18,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":28.33,"GeoreviewClassification (rus-Cyrl)":23.49,"HeadlineClassification (rus-Cyrl)":28.49,"InappropriatenessClassification (rus-Cyrl)":50.85,"KinopoiskClassification (rus-Cyrl)":34.17,"RuReviewsClassification (rus-Cyrl)":42.49,"RuSciBenchGRNTIClassification (rus-Cyrl)":10.49,"RuSciBenchOECDClassification (rus-Cyrl)":8.31}
 
1
+ {"index":13,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":67.52,"GeoreviewClassification (rus-Cyrl)":50.25,"HeadlineClassification (rus-Cyrl)":85.68,"InappropriatenessClassification (rus-Cyrl)":67.19,"KinopoiskClassification (rus-Cyrl)":65.49,"RuReviewsClassification (rus-Cyrl)":67.68,"RuSciBenchGRNTIClassification (rus-Cyrl)":64.59,"RuSciBenchOECDClassification (rus-Cyrl)":51.13,"MassiveIntentClassification (rus-Cyrl)":76.08,"MassiveScenarioClassification (rus-Cyrl)":79.61}
2
+ {"index":11,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-bge-m3\">USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":61.92,"GeoreviewClassification (rus-Cyrl)":50.98,"HeadlineClassification (rus-Cyrl)":70.09,"InappropriatenessClassification (rus-Cyrl)":60.76,"KinopoiskClassification (rus-Cyrl)":63.33,"RuReviewsClassification (rus-Cyrl)":68.52,"RuSciBenchGRNTIClassification (rus-Cyrl)":57.67,"RuSciBenchOECDClassification (rus-Cyrl)":44.2,"MassiveIntentClassification (rus-Cyrl)":68.85,"MassiveScenarioClassification (rus-Cyrl)":72.9}
3
+ {"index":15,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":61.01,"GeoreviewClassification (rus-Cyrl)":49.69,"HeadlineClassification (rus-Cyrl)":77.19,"InappropriatenessClassification (rus-Cyrl)":61.6,"KinopoiskClassification (rus-Cyrl)":56.59,"RuReviewsClassification (rus-Cyrl)":65.28,"RuSciBenchGRNTIClassification (rus-Cyrl)":58.2,"RuSciBenchOECDClassification (rus-Cyrl)":43.91,"MassiveIntentClassification (rus-Cyrl)":65.76,"MassiveScenarioClassification (rus-Cyrl)":70.85}
4
+ {"index":0,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":60.46,"GeoreviewClassification (rus-Cyrl)":48.27,"HeadlineClassification (rus-Cyrl)":70.32,"InappropriatenessClassification (rus-Cyrl)":59.87,"KinopoiskClassification (rus-Cyrl)":58.23,"RuReviewsClassification (rus-Cyrl)":66.91,"RuSciBenchGRNTIClassification (rus-Cyrl)":55.81,"RuSciBenchOECDClassification (rus-Cyrl)":42.57,"MassiveIntentClassification (rus-Cyrl)":68.75,"MassiveScenarioClassification (rus-Cyrl)":73.42}
5
+ {"index":10,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-base\">USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":59.88,"GeoreviewClassification (rus-Cyrl)":47.23,"HeadlineClassification (rus-Cyrl)":74.88,"InappropriatenessClassification (rus-Cyrl)":61.94,"KinopoiskClassification (rus-Cyrl)":55.69,"RuReviewsClassification (rus-Cyrl)":66.44,"RuSciBenchGRNTIClassification (rus-Cyrl)":55.55,"RuSciBenchOECDClassification (rus-Cyrl)":43.28,"MassiveIntentClassification (rus-Cyrl)":65.57,"MassiveScenarioClassification (rus-Cyrl)":68.33}
6
+ {"index":23,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/LaBSE-ru-turbo\">LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":59.23,"GeoreviewClassification (rus-Cyrl)":46.04,"HeadlineClassification (rus-Cyrl)":69.98,"InappropriatenessClassification (rus-Cyrl)":61.39,"KinopoiskClassification (rus-Cyrl)":53.59,"RuReviewsClassification (rus-Cyrl)":64.58,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.67,"RuSciBenchOECDClassification (rus-Cyrl)":43.58,"MassiveIntentClassification (rus-Cyrl)":66.08,"MassiveScenarioClassification (rus-Cyrl)":71.13}
7
+ {"index":14,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":58.26,"GeoreviewClassification (rus-Cyrl)":46.05,"HeadlineClassification (rus-Cyrl)":75.64,"InappropriatenessClassification (rus-Cyrl)":58.78,"KinopoiskClassification (rus-Cyrl)":50.89,"RuReviewsClassification (rus-Cyrl)":62.99,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.28,"RuSciBenchOECDClassification (rus-Cyrl)":42.69,"MassiveIntentClassification (rus-Cyrl)":62.78,"MassiveScenarioClassification (rus-Cyrl)":68.21}
8
+ {"index":5,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_mt_nlu_ru\">sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":57.52,"GeoreviewClassification (rus-Cyrl)":39.67,"HeadlineClassification (rus-Cyrl)":77.19,"InappropriatenessClassification (rus-Cyrl)":64.64,"KinopoiskClassification (rus-Cyrl)":50.33,"RuReviewsClassification (rus-Cyrl)":58.29,"RuSciBenchGRNTIClassification (rus-Cyrl)":54.19,"RuSciBenchOECDClassification (rus-Cyrl)":43.8,"MassiveIntentClassification (rus-Cyrl)":61.42,"MassiveScenarioClassification (rus-Cyrl)":68.13}
9
+ {"index":6,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_nlu_ru\">sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":57.24,"GeoreviewClassification (rus-Cyrl)":39.97,"HeadlineClassification (rus-Cyrl)":79.26,"InappropriatenessClassification (rus-Cyrl)":62.52,"KinopoiskClassification (rus-Cyrl)":49.51,"RuReviewsClassification (rus-Cyrl)":58.27,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.9,"RuSciBenchOECDClassification (rus-Cyrl)":43.04,"MassiveIntentClassification (rus-Cyrl)":61.09,"MassiveScenarioClassification (rus-Cyrl)":67.6}
10
+ {"index":22,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":56.88,"GeoreviewClassification (rus-Cyrl)":42.33,"HeadlineClassification (rus-Cyrl)":70.35,"InappropriatenessClassification (rus-Cyrl)":59.32,"KinopoiskClassification (rus-Cyrl)":44.31,"RuReviewsClassification (rus-Cyrl)":62.33,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.01,"RuSciBenchOECDClassification (rus-Cyrl)":44.14,"MassiveIntentClassification (rus-Cyrl)":63.23,"MassiveScenarioClassification (rus-Cyrl)":69.92}
11
+ {"index":16,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":56.44,"GeoreviewClassification (rus-Cyrl)":44.66,"HeadlineClassification (rus-Cyrl)":73.94,"InappropriatenessClassification (rus-Cyrl)":59.16,"KinopoiskClassification (rus-Cyrl)":49.96,"RuReviewsClassification (rus-Cyrl)":61.18,"RuSciBenchGRNTIClassification (rus-Cyrl)":54.99,"RuSciBenchOECDClassification (rus-Cyrl)":41.72,"MassiveIntentClassification (rus-Cyrl)":58.43,"MassiveScenarioClassification (rus-Cyrl)":63.89}
12
+ {"index":12,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":56.18,"GeoreviewClassification (rus-Cyrl)":40.19,"HeadlineClassification (rus-Cyrl)":78.75,"InappropriatenessClassification (rus-Cyrl)":61.33,"KinopoiskClassification (rus-Cyrl)":48.78,"RuReviewsClassification (rus-Cyrl)":55.66,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.53,"RuSciBenchOECDClassification (rus-Cyrl)":41.34,"MassiveIntentClassification (rus-Cyrl)":61.32,"MassiveScenarioClassification (rus-Cyrl)":64.71}
13
+ {"index":1,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/distilrubert-small-cased-conversational\">distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":55.15,"GeoreviewClassification (rus-Cyrl)":38.95,"HeadlineClassification (rus-Cyrl)":75.59,"InappropriatenessClassification (rus-Cyrl)":60.68,"KinopoiskClassification (rus-Cyrl)":49.67,"RuReviewsClassification (rus-Cyrl)":54.05,"RuSciBenchGRNTIClassification (rus-Cyrl)":48.53,"RuSciBenchOECDClassification (rus-Cyrl)":37.65,"MassiveIntentClassification (rus-Cyrl)":63.12,"MassiveScenarioClassification (rus-Cyrl)":68.08}
14
+ {"index":24,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":55.01,"GeoreviewClassification (rus-Cyrl)":41.36,"HeadlineClassification (rus-Cyrl)":68.9,"InappropriatenessClassification (rus-Cyrl)":59.11,"KinopoiskClassification (rus-Cyrl)":50.47,"RuReviewsClassification (rus-Cyrl)":60.66,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.93,"RuSciBenchOECDClassification (rus-Cyrl)":40.79,"MassiveIntentClassification (rus-Cyrl)":57.98,"MassiveScenarioClassification (rus-Cyrl)":62.9}
15
+ {"index":7,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":54.98,"GeoreviewClassification (rus-Cyrl)":40.89,"HeadlineClassification (rus-Cyrl)":68.75,"InappropriatenessClassification (rus-Cyrl)":58.48,"KinopoiskClassification (rus-Cyrl)":49.85,"RuReviewsClassification (rus-Cyrl)":58.01,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.8,"RuSciBenchOECDClassification (rus-Cyrl)":40.36,"MassiveIntentClassification (rus-Cyrl)":60.53,"MassiveScenarioClassification (rus-Cyrl)":65.15}
16
+ {"index":17,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":54.7,"GeoreviewClassification (rus-Cyrl)":40.86,"HeadlineClassification (rus-Cyrl)":68.75,"InappropriatenessClassification (rus-Cyrl)":58.52,"KinopoiskClassification (rus-Cyrl)":46.77,"RuReviewsClassification (rus-Cyrl)":58.01,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.04,"RuSciBenchOECDClassification (rus-Cyrl)":40.48,"MassiveIntentClassification (rus-Cyrl)":60.64,"MassiveScenarioClassification (rus-Cyrl)":65.23}
17
+ {"index":21,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":53.77,"GeoreviewClassification (rus-Cyrl)":38.24,"HeadlineClassification (rus-Cyrl)":68.3,"InappropriatenessClassification (rus-Cyrl)":58.18,"KinopoiskClassification (rus-Cyrl)":41.45,"RuReviewsClassification (rus-Cyrl)":58.88,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.19,"RuSciBenchOECDClassification (rus-Cyrl)":41.41,"MassiveIntentClassification (rus-Cyrl)":59.06,"MassiveScenarioClassification (rus-Cyrl)":65.25}
18
+ {"index":9,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny2\">rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":52.17,"GeoreviewClassification (rus-Cyrl)":39.64,"HeadlineClassification (rus-Cyrl)":74.19,"InappropriatenessClassification (rus-Cyrl)":58.57,"KinopoiskClassification (rus-Cyrl)":49.06,"RuReviewsClassification (rus-Cyrl)":56.99,"RuSciBenchGRNTIClassification (rus-Cyrl)":45.63,"RuSciBenchOECDClassification (rus-Cyrl)":35.48,"MassiveIntentClassification (rus-Cyrl)":50.83,"MassiveScenarioClassification (rus-Cyrl)":59.15}
19
+ {"index":2,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased\">rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":51.6,"GeoreviewClassification (rus-Cyrl)":37.22,"HeadlineClassification (rus-Cyrl)":75.23,"InappropriatenessClassification (rus-Cyrl)":57.34,"KinopoiskClassification (rus-Cyrl)":49.91,"RuReviewsClassification (rus-Cyrl)":50.74,"RuSciBenchGRNTIClassification (rus-Cyrl)":48.03,"RuSciBenchOECDClassification (rus-Cyrl)":36.13,"MassiveIntentClassification (rus-Cyrl)":53.02,"MassiveScenarioClassification (rus-Cyrl)":56.79}
20
+ {"index":3,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":51.27,"GeoreviewClassification (rus-Cyrl)":38.05,"HeadlineClassification (rus-Cyrl)":67.64,"InappropriatenessClassification (rus-Cyrl)":58.27,"KinopoiskClassification (rus-Cyrl)":45.86,"RuReviewsClassification (rus-Cyrl)":58.34,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.18,"RuSciBenchOECDClassification (rus-Cyrl)":40.11,"MassiveIntentClassification (rus-Cyrl)":49.1,"MassiveScenarioClassification (rus-Cyrl)":51.91}
21
+ {"index":8,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny\">rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":44.55,"GeoreviewClassification (rus-Cyrl)":33.45,"HeadlineClassification (rus-Cyrl)":57.65,"InappropriatenessClassification (rus-Cyrl)":54.5,"KinopoiskClassification (rus-Cyrl)":41.36,"RuReviewsClassification (rus-Cyrl)":49.56,"RuSciBenchGRNTIClassification (rus-Cyrl)":35.71,"RuSciBenchOECDClassification (rus-Cyrl)":26.51,"MassiveIntentClassification (rus-Cyrl)":50.1,"MassiveScenarioClassification (rus-Cyrl)":52.15}
22
+ {"index":20,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":28.82,"GeoreviewClassification (rus-Cyrl)":25.93,"HeadlineClassification (rus-Cyrl)":28.53,"InappropriatenessClassification (rus-Cyrl)":51.82,"KinopoiskClassification (rus-Cyrl)":34.18,"RuReviewsClassification (rus-Cyrl)":42.33,"RuSciBenchGRNTIClassification (rus-Cyrl)":13.29,"RuSciBenchOECDClassification (rus-Cyrl)":10.62,"MassiveIntentClassification (rus-Cyrl)":23.98,"MassiveScenarioClassification (rus-Cyrl)":28.71}
23
+ {"index":19,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":28.75,"GeoreviewClassification (rus-Cyrl)":27.08,"HeadlineClassification (rus-Cyrl)":27.77,"InappropriatenessClassification (rus-Cyrl)":51.73,"KinopoiskClassification (rus-Cyrl)":33.93,"RuReviewsClassification (rus-Cyrl)":41.79,"RuSciBenchGRNTIClassification (rus-Cyrl)":10.08,"RuSciBenchOECDClassification (rus-Cyrl)":8.3,"MassiveIntentClassification (rus-Cyrl)":27.58,"MassiveScenarioClassification (rus-Cyrl)":30.46}
24
+ {"index":18,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":28.15,"GeoreviewClassification (rus-Cyrl)":23.49,"HeadlineClassification (rus-Cyrl)":28.49,"InappropriatenessClassification (rus-Cyrl)":50.85,"KinopoiskClassification (rus-Cyrl)":34.17,"RuReviewsClassification (rus-Cyrl)":42.49,"RuSciBenchGRNTIClassification (rus-Cyrl)":10.49,"RuSciBenchOECDClassification (rus-Cyrl)":8.31,"MassiveIntentClassification (rus-Cyrl)":26.29,"MassiveScenarioClassification (rus-Cyrl)":28.77}
25
+ {"index":4,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/ru-en-RoSBERTa\">ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":"","GeoreviewClassification (rus-Cyrl)":49.7,"HeadlineClassification (rus-Cyrl)":78.0,"InappropriatenessClassification (rus-Cyrl)":61.32,"KinopoiskClassification (rus-Cyrl)":63.27,"RuReviewsClassification (rus-Cyrl)":67.96,"RuSciBenchGRNTIClassification (rus-Cyrl)":59.33,"RuSciBenchOECDClassification (rus-Cyrl)":46.33,"MassiveIntentClassification (rus-Cyrl)":"","MassiveScenarioClassification (rus-Cyrl)":""}
boards_data/ru/data_tasks/Clustering/default.jsonl CHANGED
@@ -11,8 +11,8 @@
11
  {"index":14,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":50.27,"GeoreviewClusteringP2P (rus-Cyrl)":54.46,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":51.56,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":44.78}
12
  {"index":24,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":49.57,"GeoreviewClusteringP2P (rus-Cyrl)":59.71,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":47.55,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.44}
13
  {"index":22,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":49.18,"GeoreviewClusteringP2P (rus-Cyrl)":56.18,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":48.47,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":42.9}
14
- {"index":17,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":47.75,"GeoreviewClusteringP2P (rus-Cyrl)":52.19,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":49.09,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.97}
15
- {"index":21,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":47.75,"GeoreviewClusteringP2P (rus-Cyrl)":53.35,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":48.22,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.68}
16
  {"index":7,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":46.84,"GeoreviewClusteringP2P (rus-Cyrl)":51.89,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":47.48,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.16}
17
  {"index":3,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":43.13,"GeoreviewClusteringP2P (rus-Cyrl)":41.82,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":46.29,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.28}
18
  {"index":12,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":42.92,"GeoreviewClusteringP2P (rus-Cyrl)":58.79,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":36.66,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":33.31}
 
11
  {"index":14,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":50.27,"GeoreviewClusteringP2P (rus-Cyrl)":54.46,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":51.56,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":44.78}
12
  {"index":24,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":49.57,"GeoreviewClusteringP2P (rus-Cyrl)":59.71,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":47.55,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.44}
13
  {"index":22,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":49.18,"GeoreviewClusteringP2P (rus-Cyrl)":56.18,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":48.47,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":42.9}
14
+ {"index":21,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":47.75,"GeoreviewClusteringP2P (rus-Cyrl)":53.35,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":48.22,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.68}
15
+ {"index":17,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":47.75,"GeoreviewClusteringP2P (rus-Cyrl)":52.19,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":49.09,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.97}
16
  {"index":7,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":46.84,"GeoreviewClusteringP2P (rus-Cyrl)":51.89,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":47.48,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.16}
17
  {"index":3,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":43.13,"GeoreviewClusteringP2P (rus-Cyrl)":41.82,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":46.29,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.28}
18
  {"index":12,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":42.92,"GeoreviewClusteringP2P (rus-Cyrl)":58.79,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":36.66,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":33.31}
boards_data/ru/data_tasks/MultilabelClassification/default.jsonl ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"index":4,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/ru-en-RoSBERTa\">ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":38.88,"CEDRClassification (rus-Cyrl)":44.69,"SensitiveTopicsClassification (rus-Cyrl)":33.07}
2
+ {"index":10,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-base\">USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":36.98,"CEDRClassification (rus-Cyrl)":46.47,"SensitiveTopicsClassification (rus-Cyrl)":27.5}
3
+ {"index":23,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/LaBSE-ru-turbo\">LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":36.32,"CEDRClassification (rus-Cyrl)":45.11,"SensitiveTopicsClassification (rus-Cyrl)":27.52}
4
+ {"index":15,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":36.01,"CEDRClassification (rus-Cyrl)":44.84,"SensitiveTopicsClassification (rus-Cyrl)":27.17}
5
+ {"index":11,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-bge-m3\">USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":35.88,"CEDRClassification (rus-Cyrl)":45.48,"SensitiveTopicsClassification (rus-Cyrl)":26.29}
6
+ {"index":0,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":34.25,"CEDRClassification (rus-Cyrl)":43.47,"SensitiveTopicsClassification (rus-Cyrl)":25.03}
7
+ {"index":14,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":33.65,"CEDRClassification (rus-Cyrl)":42.32,"SensitiveTopicsClassification (rus-Cyrl)":24.98}
8
+ {"index":13,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":33.37,"CEDRClassification (rus-Cyrl)":40.8,"SensitiveTopicsClassification (rus-Cyrl)":25.94}
9
+ {"index":22,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":32.9,"CEDRClassification (rus-Cyrl)":39.98,"SensitiveTopicsClassification (rus-Cyrl)":25.83}
10
+ {"index":5,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_mt_nlu_ru\">sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":32.64,"CEDRClassification (rus-Cyrl)":36.81,"SensitiveTopicsClassification (rus-Cyrl)":28.47}
11
+ {"index":16,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":31.99,"CEDRClassification (rus-Cyrl)":40.07,"SensitiveTopicsClassification (rus-Cyrl)":23.91}
12
+ {"index":6,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_nlu_ru\">sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":31.9,"CEDRClassification (rus-Cyrl)":35.84,"SensitiveTopicsClassification (rus-Cyrl)":27.97}
13
+ {"index":24,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":31.7,"CEDRClassification (rus-Cyrl)":38.95,"SensitiveTopicsClassification (rus-Cyrl)":24.44}
14
+ {"index":17,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":31.42,"CEDRClassification (rus-Cyrl)":40.61,"SensitiveTopicsClassification (rus-Cyrl)":22.23}
15
+ {"index":21,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":31.3,"CEDRClassification (rus-Cyrl)":37.76,"SensitiveTopicsClassification (rus-Cyrl)":24.84}
16
+ {"index":7,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":31.27,"CEDRClassification (rus-Cyrl)":40.75,"SensitiveTopicsClassification (rus-Cyrl)":21.79}
17
+ {"index":9,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny2\">rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":29.44,"CEDRClassification (rus-Cyrl)":36.87,"SensitiveTopicsClassification (rus-Cyrl)":22.02}
18
+ {"index":1,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/distilrubert-small-cased-conversational\">distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":29.32,"CEDRClassification (rus-Cyrl)":36.19,"SensitiveTopicsClassification (rus-Cyrl)":22.45}
19
+ {"index":12,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":28.9,"CEDRClassification (rus-Cyrl)":34.14,"SensitiveTopicsClassification (rus-Cyrl)":23.67}
20
+ {"index":8,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny\">rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":27.96,"CEDRClassification (rus-Cyrl)":37.39,"SensitiveTopicsClassification (rus-Cyrl)":18.54}
21
+ {"index":3,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":27.8,"CEDRClassification (rus-Cyrl)":35.55,"SensitiveTopicsClassification (rus-Cyrl)":20.05}
22
+ {"index":20,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":26.9,"CEDRClassification (rus-Cyrl)":35.98,"SensitiveTopicsClassification (rus-Cyrl)":17.83}
23
+ {"index":2,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased\">rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":26.2,"CEDRClassification (rus-Cyrl)":33.59,"SensitiveTopicsClassification (rus-Cyrl)":18.8}
24
+ {"index":18,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":25.96,"CEDRClassification (rus-Cyrl)":33.86,"SensitiveTopicsClassification (rus-Cyrl)":18.05}
25
+ {"index":19,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":25.27,"CEDRClassification (rus-Cyrl)":32.72,"SensitiveTopicsClassification (rus-Cyrl)":17.82}
boards_data/ru/data_tasks/Reranking/default.jsonl CHANGED
@@ -1,25 +1,25 @@
1
- {"index":15,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"RuBQReranking (rus-Cyrl)":75.58}
2
- {"index":13,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"RuBQReranking (rus-Cyrl)":74.61}
3
- {"index":0,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"RuBQReranking (rus-Cyrl)":74.02}
4
- {"index":11,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-bge-m3\">USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"RuBQReranking (rus-Cyrl)":73.08}
5
- {"index":14,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"RuBQReranking (rus-Cyrl)":72.01}
6
- {"index":16,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"RuBQReranking (rus-Cyrl)":71.46}
7
- {"index":4,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/ru-en-RoSBERTa\">ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"RuBQReranking (rus-Cyrl)":70.87}
8
- {"index":23,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/LaBSE-ru-turbo\">LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"RuBQReranking (rus-Cyrl)":68.65}
9
- {"index":10,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-base\">USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"RuBQReranking (rus-Cyrl)":64.42}
10
- {"index":24,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"RuBQReranking (rus-Cyrl)":62.15}
11
- {"index":22,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"RuBQReranking (rus-Cyrl)":58.77}
12
- {"index":5,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_mt_nlu_ru\">sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"RuBQReranking (rus-Cyrl)":56.13}
13
- {"index":17,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"RuBQReranking (rus-Cyrl)":55.13}
14
- {"index":7,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"RuBQReranking (rus-Cyrl)":54.83}
15
- {"index":21,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"RuBQReranking (rus-Cyrl)":52.8}
16
- {"index":6,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_nlu_ru\">sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"RuBQReranking (rus-Cyrl)":46.81}
17
- {"index":9,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny2\">rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"RuBQReranking (rus-Cyrl)":46.09}
18
- {"index":1,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/distilrubert-small-cased-conversational\">distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"RuBQReranking (rus-Cyrl)":42.58}
19
- {"index":2,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased\">rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"RuBQReranking (rus-Cyrl)":41.65}
20
- {"index":3,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"RuBQReranking (rus-Cyrl)":39.89}
21
- {"index":18,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"RuBQReranking (rus-Cyrl)":38.51}
22
- {"index":8,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny\">rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"RuBQReranking (rus-Cyrl)":35.44}
23
- {"index":12,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"RuBQReranking (rus-Cyrl)":34.01}
24
- {"index":20,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"RuBQReranking (rus-Cyrl)":30.96}
25
- {"index":19,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"RuBQReranking (rus-Cyrl)":27.05}
 
1
+ {"index":0,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":69.7,"RuBQReranking (rus-Cyrl)":74.02,"MIRACLReranking (rus-Cyrl)":65.38}
2
+ {"index":15,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":69.64,"RuBQReranking (rus-Cyrl)":75.58,"MIRACLReranking (rus-Cyrl)":63.71}
3
+ {"index":14,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":66.24,"RuBQReranking (rus-Cyrl)":72.01,"MIRACLReranking (rus-Cyrl)":60.47}
4
+ {"index":16,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":65.29,"RuBQReranking (rus-Cyrl)":71.46,"MIRACLReranking (rus-Cyrl)":59.12}
5
+ {"index":5,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_mt_nlu_ru\">sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":40.56,"RuBQReranking (rus-Cyrl)":56.13,"MIRACLReranking (rus-Cyrl)":24.99}
6
+ {"index":6,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_nlu_ru\">sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":32.8,"RuBQReranking (rus-Cyrl)":46.81,"MIRACLReranking (rus-Cyrl)":18.8}
7
+ {"index":9,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny2\">rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":30.95,"RuBQReranking (rus-Cyrl)":46.09,"MIRACLReranking (rus-Cyrl)":15.81}
8
+ {"index":1,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/distilrubert-small-cased-conversational\">distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":"","RuBQReranking (rus-Cyrl)":42.58,"MIRACLReranking (rus-Cyrl)":""}
9
+ {"index":2,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased\">rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":"","RuBQReranking (rus-Cyrl)":41.65,"MIRACLReranking (rus-Cyrl)":""}
10
+ {"index":3,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":"","RuBQReranking (rus-Cyrl)":39.89,"MIRACLReranking (rus-Cyrl)":""}
11
+ {"index":4,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/ru-en-RoSBERTa\">ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":"","RuBQReranking (rus-Cyrl)":70.87,"MIRACLReranking (rus-Cyrl)":""}
12
+ {"index":7,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":"","RuBQReranking (rus-Cyrl)":54.83,"MIRACLReranking (rus-Cyrl)":""}
13
+ {"index":8,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny\">rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":"","RuBQReranking (rus-Cyrl)":35.44,"MIRACLReranking (rus-Cyrl)":""}
14
+ {"index":10,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-base\">USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","RuBQReranking (rus-Cyrl)":64.42,"MIRACLReranking (rus-Cyrl)":""}
15
+ {"index":11,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-bge-m3\">USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":"","RuBQReranking (rus-Cyrl)":73.08,"MIRACLReranking (rus-Cyrl)":""}
16
+ {"index":12,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","RuBQReranking (rus-Cyrl)":34.01,"MIRACLReranking (rus-Cyrl)":""}
17
+ {"index":13,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","RuBQReranking (rus-Cyrl)":74.61,"MIRACLReranking (rus-Cyrl)":""}
18
+ {"index":17,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","RuBQReranking (rus-Cyrl)":55.13,"MIRACLReranking (rus-Cyrl)":""}
19
+ {"index":18,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","RuBQReranking (rus-Cyrl)":38.51,"MIRACLReranking (rus-Cyrl)":""}
20
+ {"index":19,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","RuBQReranking (rus-Cyrl)":27.05,"MIRACLReranking (rus-Cyrl)":""}
21
+ {"index":20,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","RuBQReranking (rus-Cyrl)":30.96,"MIRACLReranking (rus-Cyrl)":""}
22
+ {"index":21,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","RuBQReranking (rus-Cyrl)":52.8,"MIRACLReranking (rus-Cyrl)":""}
23
+ {"index":22,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","RuBQReranking (rus-Cyrl)":58.77,"MIRACLReranking (rus-Cyrl)":""}
24
+ {"index":23,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/LaBSE-ru-turbo\">LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":"","RuBQReranking (rus-Cyrl)":68.65,"MIRACLReranking (rus-Cyrl)":""}
25
+ {"index":24,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","RuBQReranking (rus-Cyrl)":62.15,"MIRACLReranking (rus-Cyrl)":""}
boards_data/ru/data_tasks/Retrieval/default.jsonl CHANGED
@@ -1,25 +1,25 @@
1
- {"index":13,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":77.96,"RiaNewsRetrieval (rus-Cyrl)":81.94,"RuBQRetrieval (rus-Cyrl)":73.98}
2
- {"index":15,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":77.39,"RiaNewsRetrieval (rus-Cyrl)":80.67,"RuBQRetrieval (rus-Cyrl)":74.11}
3
- {"index":0,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":77.1,"RiaNewsRetrieval (rus-Cyrl)":82.98,"RuBQRetrieval (rus-Cyrl)":71.21}
4
- {"index":11,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-bge-m3\">USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":76.78,"RiaNewsRetrieval (rus-Cyrl)":83.53,"RuBQRetrieval (rus-Cyrl)":70.03}
5
- {"index":4,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/ru-en-RoSBERTa\">ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":72.82,"RiaNewsRetrieval (rus-Cyrl)":78.86,"RuBQRetrieval (rus-Cyrl)":66.77}
6
- {"index":14,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":69.91,"RiaNewsRetrieval (rus-Cyrl)":70.24,"RuBQRetrieval (rus-Cyrl)":69.58}
7
- {"index":16,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":69.27,"RiaNewsRetrieval (rus-Cyrl)":70.01,"RuBQRetrieval (rus-Cyrl)":68.53}
8
- {"index":23,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/LaBSE-ru-turbo\">LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":67.54,"RiaNewsRetrieval (rus-Cyrl)":69.36,"RuBQRetrieval (rus-Cyrl)":65.71}
9
- {"index":10,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-base\">USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":67.34,"RiaNewsRetrieval (rus-Cyrl)":77.83,"RuBQRetrieval (rus-Cyrl)":56.86}
10
- {"index":24,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":51.5,"RiaNewsRetrieval (rus-Cyrl)":51.27,"RuBQRetrieval (rus-Cyrl)":51.73}
11
- {"index":22,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":44.4,"RiaNewsRetrieval (rus-Cyrl)":51.75,"RuBQRetrieval (rus-Cyrl)":37.04}
12
- {"index":21,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":37.26,"RiaNewsRetrieval (rus-Cyrl)":44.82,"RuBQRetrieval (rus-Cyrl)":29.7}
13
- {"index":17,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":36.38,"RiaNewsRetrieval (rus-Cyrl)":42.75,"RuBQRetrieval (rus-Cyrl)":30.02}
14
- {"index":7,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":31.88,"RiaNewsRetrieval (rus-Cyrl)":34.73,"RuBQRetrieval (rus-Cyrl)":29.03}
15
- {"index":5,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_mt_nlu_ru\">sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":25.6,"RiaNewsRetrieval (rus-Cyrl)":21.4,"RuBQRetrieval (rus-Cyrl)":29.8}
16
- {"index":9,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny2\">rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":12.4,"RiaNewsRetrieval (rus-Cyrl)":13.92,"RuBQRetrieval (rus-Cyrl)":10.87}
17
- {"index":6,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_nlu_ru\">sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":11.78,"RiaNewsRetrieval (rus-Cyrl)":11.11,"RuBQRetrieval (rus-Cyrl)":12.45}
18
- {"index":3,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":9.68,"RiaNewsRetrieval (rus-Cyrl)":6.72,"RuBQRetrieval (rus-Cyrl)":12.63}
19
- {"index":2,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased\">rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":7.55,"RiaNewsRetrieval (rus-Cyrl)":5.58,"RuBQRetrieval (rus-Cyrl)":9.52}
20
- {"index":12,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":7.5,"RiaNewsRetrieval (rus-Cyrl)":4.84,"RuBQRetrieval (rus-Cyrl)":10.15}
21
- {"index":1,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/distilrubert-small-cased-conversational\">distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":7.37,"RiaNewsRetrieval (rus-Cyrl)":4.14,"RuBQRetrieval (rus-Cyrl)":10.6}
22
- {"index":8,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny\">rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":2.02,"RiaNewsRetrieval (rus-Cyrl)":0.79,"RuBQRetrieval (rus-Cyrl)":3.24}
23
- {"index":19,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":1.66,"RiaNewsRetrieval (rus-Cyrl)":0.67,"RuBQRetrieval (rus-Cyrl)":2.64}
24
- {"index":18,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","RiaNewsRetrieval (rus-Cyrl)":"","RuBQRetrieval (rus-Cyrl)":8.84}
25
- {"index":20,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","RiaNewsRetrieval (rus-Cyrl)":"","RuBQRetrieval (rus-Cyrl)":4.75}
 
1
+ {"index":0,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":74.77,"RiaNewsRetrieval (rus-Cyrl)":82.98,"RuBQRetrieval (rus-Cyrl)":71.21,"MIRACLRetrieval (rus-Cyrl)":70.11}
2
+ {"index":15,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":74.04,"RiaNewsRetrieval (rus-Cyrl)":80.67,"RuBQRetrieval (rus-Cyrl)":74.11,"MIRACLRetrieval (rus-Cyrl)":67.33}
3
+ {"index":14,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":67.14,"RiaNewsRetrieval (rus-Cyrl)":70.24,"RuBQRetrieval (rus-Cyrl)":69.58,"MIRACLRetrieval (rus-Cyrl)":61.6}
4
+ {"index":16,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":65.85,"RiaNewsRetrieval (rus-Cyrl)":70.01,"RuBQRetrieval (rus-Cyrl)":68.53,"MIRACLRetrieval (rus-Cyrl)":59.01}
5
+ {"index":5,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_mt_nlu_ru\">sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":19.13,"RiaNewsRetrieval (rus-Cyrl)":21.4,"RuBQRetrieval (rus-Cyrl)":29.8,"MIRACLRetrieval (rus-Cyrl)":6.2}
6
+ {"index":9,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny2\">rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":8.89,"RiaNewsRetrieval (rus-Cyrl)":13.92,"RuBQRetrieval (rus-Cyrl)":10.87,"MIRACLRetrieval (rus-Cyrl)":1.89}
7
+ {"index":6,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_nlu_ru\">sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":8.51,"RiaNewsRetrieval (rus-Cyrl)":11.11,"RuBQRetrieval (rus-Cyrl)":12.45,"MIRACLRetrieval (rus-Cyrl)":1.98}
8
+ {"index":19,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":1.23,"RiaNewsRetrieval (rus-Cyrl)":0.67,"RuBQRetrieval (rus-Cyrl)":2.64,"MIRACLRetrieval (rus-Cyrl)":0.39}
9
+ {"index":1,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/distilrubert-small-cased-conversational\">distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":"","RiaNewsRetrieval (rus-Cyrl)":4.14,"RuBQRetrieval (rus-Cyrl)":10.6,"MIRACLRetrieval (rus-Cyrl)":""}
10
+ {"index":2,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased\">rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":"","RiaNewsRetrieval (rus-Cyrl)":5.58,"RuBQRetrieval (rus-Cyrl)":9.52,"MIRACLRetrieval (rus-Cyrl)":""}
11
+ {"index":3,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":"","RiaNewsRetrieval (rus-Cyrl)":6.72,"RuBQRetrieval (rus-Cyrl)":12.63,"MIRACLRetrieval (rus-Cyrl)":""}
12
+ {"index":4,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/ru-en-RoSBERTa\">ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":"","RiaNewsRetrieval (rus-Cyrl)":78.86,"RuBQRetrieval (rus-Cyrl)":66.77,"MIRACLRetrieval (rus-Cyrl)":""}
13
+ {"index":7,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":"","RiaNewsRetrieval (rus-Cyrl)":34.73,"RuBQRetrieval (rus-Cyrl)":29.03,"MIRACLRetrieval (rus-Cyrl)":""}
14
+ {"index":8,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny\">rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":"","RiaNewsRetrieval (rus-Cyrl)":0.79,"RuBQRetrieval (rus-Cyrl)":3.24,"MIRACLRetrieval (rus-Cyrl)":""}
15
+ {"index":10,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-base\">USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","RiaNewsRetrieval (rus-Cyrl)":77.83,"RuBQRetrieval (rus-Cyrl)":56.86,"MIRACLRetrieval (rus-Cyrl)":""}
16
+ {"index":11,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-bge-m3\">USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":"","RiaNewsRetrieval (rus-Cyrl)":83.53,"RuBQRetrieval (rus-Cyrl)":70.03,"MIRACLRetrieval (rus-Cyrl)":""}
17
+ {"index":12,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","RiaNewsRetrieval (rus-Cyrl)":4.84,"RuBQRetrieval (rus-Cyrl)":10.15,"MIRACLRetrieval (rus-Cyrl)":""}
18
+ {"index":13,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","RiaNewsRetrieval (rus-Cyrl)":81.94,"RuBQRetrieval (rus-Cyrl)":73.98,"MIRACLRetrieval (rus-Cyrl)":""}
19
+ {"index":17,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","RiaNewsRetrieval (rus-Cyrl)":42.75,"RuBQRetrieval (rus-Cyrl)":30.02,"MIRACLRetrieval (rus-Cyrl)":""}
20
+ {"index":18,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","RiaNewsRetrieval (rus-Cyrl)":"","RuBQRetrieval (rus-Cyrl)":8.84,"MIRACLRetrieval (rus-Cyrl)":""}
21
+ {"index":20,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","RiaNewsRetrieval (rus-Cyrl)":"","RuBQRetrieval (rus-Cyrl)":4.75,"MIRACLRetrieval (rus-Cyrl)":""}
22
+ {"index":21,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","RiaNewsRetrieval (rus-Cyrl)":44.82,"RuBQRetrieval (rus-Cyrl)":29.7,"MIRACLRetrieval (rus-Cyrl)":""}
23
+ {"index":22,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","RiaNewsRetrieval (rus-Cyrl)":51.75,"RuBQRetrieval (rus-Cyrl)":37.04,"MIRACLRetrieval (rus-Cyrl)":""}
24
+ {"index":23,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/LaBSE-ru-turbo\">LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":"","RiaNewsRetrieval (rus-Cyrl)":69.36,"RuBQRetrieval (rus-Cyrl)":65.71,"MIRACLRetrieval (rus-Cyrl)":""}
25
+ {"index":24,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","RiaNewsRetrieval (rus-Cyrl)":51.27,"RuBQRetrieval (rus-Cyrl)":51.73,"MIRACLRetrieval (rus-Cyrl)":""}
boards_data/ru/data_tasks/STS/default.jsonl CHANGED
@@ -1,25 +1,25 @@
1
- {"index":13,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":80.15,"RUParaPhraserSTS (rus-Cyrl)":76.17,"RuSTSBenchmarkSTS (rus-Cyrl)":84.13}
2
- {"index":11,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-bge-m3\">USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":79.85,"RUParaPhraserSTS (rus-Cyrl)":76.36,"RuSTSBenchmarkSTS (rus-Cyrl)":83.35}
3
- {"index":10,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-base\">USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":77.91,"RUParaPhraserSTS (rus-Cyrl)":73.56,"RuSTSBenchmarkSTS (rus-Cyrl)":82.26}
4
- {"index":15,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":77.48,"RUParaPhraserSTS (rus-Cyrl)":71.82,"RuSTSBenchmarkSTS (rus-Cyrl)":83.15}
5
- {"index":4,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/ru-en-RoSBERTa\">ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":77.42,"RUParaPhraserSTS (rus-Cyrl)":76.16,"RuSTSBenchmarkSTS (rus-Cyrl)":78.69}
6
- {"index":0,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":77.39,"RUParaPhraserSTS (rus-Cyrl)":74.9,"RuSTSBenchmarkSTS (rus-Cyrl)":79.87}
7
- {"index":23,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/LaBSE-ru-turbo\">LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":77.37,"RUParaPhraserSTS (rus-Cyrl)":72.97,"RuSTSBenchmarkSTS (rus-Cyrl)":81.77}
8
- {"index":24,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":75.32,"RUParaPhraserSTS (rus-Cyrl)":72.15,"RuSTSBenchmarkSTS (rus-Cyrl)":78.48}
9
- {"index":14,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":74.9,"RUParaPhraserSTS (rus-Cyrl)":70.17,"RuSTSBenchmarkSTS (rus-Cyrl)":79.64}
10
- {"index":16,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":74.27,"RUParaPhraserSTS (rus-Cyrl)":70.46,"RuSTSBenchmarkSTS (rus-Cyrl)":78.08}
11
- {"index":22,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":74.1,"RUParaPhraserSTS (rus-Cyrl)":65.74,"RuSTSBenchmarkSTS (rus-Cyrl)":82.46}
12
- {"index":21,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":70.71,"RUParaPhraserSTS (rus-Cyrl)":61.87,"RuSTSBenchmarkSTS (rus-Cyrl)":79.55}
13
- {"index":7,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":69.6,"RUParaPhraserSTS (rus-Cyrl)":65.87,"RuSTSBenchmarkSTS (rus-Cyrl)":73.32}
14
- {"index":17,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":69.54,"RUParaPhraserSTS (rus-Cyrl)":65.74,"RuSTSBenchmarkSTS (rus-Cyrl)":73.34}
15
- {"index":5,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_mt_nlu_ru\">sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":68.19,"RUParaPhraserSTS (rus-Cyrl)":65.17,"RuSTSBenchmarkSTS (rus-Cyrl)":71.22}
16
- {"index":9,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny2\">rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":67.28,"RUParaPhraserSTS (rus-Cyrl)":65.14,"RuSTSBenchmarkSTS (rus-Cyrl)":69.43}
17
- {"index":3,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":66.13,"RUParaPhraserSTS (rus-Cyrl)":66.24,"RuSTSBenchmarkSTS (rus-Cyrl)":66.03}
18
- {"index":6,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_nlu_ru\">sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":60.44,"RUParaPhraserSTS (rus-Cyrl)":62.06,"RuSTSBenchmarkSTS (rus-Cyrl)":58.82}
19
- {"index":1,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/distilrubert-small-cased-conversational\">distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":58.36,"RUParaPhraserSTS (rus-Cyrl)":55.01,"RuSTSBenchmarkSTS (rus-Cyrl)":61.72}
20
- {"index":12,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":56.25,"RUParaPhraserSTS (rus-Cyrl)":54.03,"RuSTSBenchmarkSTS (rus-Cyrl)":58.47}
21
- {"index":8,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny\">rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":55.78,"RUParaPhraserSTS (rus-Cyrl)":53.41,"RuSTSBenchmarkSTS (rus-Cyrl)":58.16}
22
- {"index":2,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased\">rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":51.84,"RUParaPhraserSTS (rus-Cyrl)":49.72,"RuSTSBenchmarkSTS (rus-Cyrl)":53.95}
23
- {"index":18,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":50.9,"RUParaPhraserSTS (rus-Cyrl)":45.47,"RuSTSBenchmarkSTS (rus-Cyrl)":56.33}
24
- {"index":19,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":49.74,"RUParaPhraserSTS (rus-Cyrl)":43.93,"RuSTSBenchmarkSTS (rus-Cyrl)":55.56}
25
- {"index":20,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":48.92,"RUParaPhraserSTS (rus-Cyrl)":42.15,"RuSTSBenchmarkSTS (rus-Cyrl)":55.68}
 
1
+ {"index":11,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-bge-m3\">USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":75.38,"RUParaPhraserSTS (rus-Cyrl)":76.36,"RuSTSBenchmarkSTS (rus-Cyrl)":83.35,"STS22 (rus-Cyrl)":66.42}
2
+ {"index":0,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":73.68,"RUParaPhraserSTS (rus-Cyrl)":74.9,"RuSTSBenchmarkSTS (rus-Cyrl)":79.87,"STS22 (rus-Cyrl)":66.26}
3
+ {"index":10,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-base\">USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":73.07,"RUParaPhraserSTS (rus-Cyrl)":73.56,"RuSTSBenchmarkSTS (rus-Cyrl)":82.26,"STS22 (rus-Cyrl)":63.39}
4
+ {"index":23,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/LaBSE-ru-turbo\">LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":72.54,"RUParaPhraserSTS (rus-Cyrl)":72.97,"RuSTSBenchmarkSTS (rus-Cyrl)":81.77,"STS22 (rus-Cyrl)":62.89}
5
+ {"index":15,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":71.62,"RUParaPhraserSTS (rus-Cyrl)":71.82,"RuSTSBenchmarkSTS (rus-Cyrl)":83.15,"STS22 (rus-Cyrl)":59.89}
6
+ {"index":24,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":70.23,"RUParaPhraserSTS (rus-Cyrl)":72.15,"RuSTSBenchmarkSTS (rus-Cyrl)":78.48,"STS22 (rus-Cyrl)":60.06}
7
+ {"index":14,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":70.16,"RUParaPhraserSTS (rus-Cyrl)":70.17,"RuSTSBenchmarkSTS (rus-Cyrl)":79.64,"STS22 (rus-Cyrl)":60.67}
8
+ {"index":16,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":69.48,"RUParaPhraserSTS (rus-Cyrl)":70.46,"RuSTSBenchmarkSTS (rus-Cyrl)":78.08,"STS22 (rus-Cyrl)":59.9}
9
+ {"index":22,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":68.98,"RUParaPhraserSTS (rus-Cyrl)":65.74,"RuSTSBenchmarkSTS (rus-Cyrl)":82.46,"STS22 (rus-Cyrl)":58.74}
10
+ {"index":21,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":66.17,"RUParaPhraserSTS (rus-Cyrl)":61.87,"RuSTSBenchmarkSTS (rus-Cyrl)":79.55,"STS22 (rus-Cyrl)":57.08}
11
+ {"index":7,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":65.91,"RUParaPhraserSTS (rus-Cyrl)":65.87,"RuSTSBenchmarkSTS (rus-Cyrl)":73.32,"STS22 (rus-Cyrl)":58.53}
12
+ {"index":17,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":65.52,"RUParaPhraserSTS (rus-Cyrl)":65.74,"RuSTSBenchmarkSTS (rus-Cyrl)":73.34,"STS22 (rus-Cyrl)":57.49}
13
+ {"index":5,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_mt_nlu_ru\">sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":64.4,"RUParaPhraserSTS (rus-Cyrl)":65.17,"RuSTSBenchmarkSTS (rus-Cyrl)":71.22,"STS22 (rus-Cyrl)":56.82}
14
+ {"index":9,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny2\">rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":61.6,"RUParaPhraserSTS (rus-Cyrl)":65.14,"RuSTSBenchmarkSTS (rus-Cyrl)":69.43,"STS22 (rus-Cyrl)":50.23}
15
+ {"index":3,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":61.18,"RUParaPhraserSTS (rus-Cyrl)":66.24,"RuSTSBenchmarkSTS (rus-Cyrl)":66.03,"STS22 (rus-Cyrl)":51.27}
16
+ {"index":6,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_nlu_ru\">sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":57.21,"RUParaPhraserSTS (rus-Cyrl)":62.06,"RuSTSBenchmarkSTS (rus-Cyrl)":58.82,"STS22 (rus-Cyrl)":50.75}
17
+ {"index":1,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/distilrubert-small-cased-conversational\">distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":56.2,"RUParaPhraserSTS (rus-Cyrl)":55.01,"RuSTSBenchmarkSTS (rus-Cyrl)":61.72,"STS22 (rus-Cyrl)":51.87}
18
+ {"index":12,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":53.39,"RUParaPhraserSTS (rus-Cyrl)":54.03,"RuSTSBenchmarkSTS (rus-Cyrl)":58.47,"STS22 (rus-Cyrl)":47.67}
19
+ {"index":8,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny\">rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":53.15,"RUParaPhraserSTS (rus-Cyrl)":53.41,"RuSTSBenchmarkSTS (rus-Cyrl)":58.16,"STS22 (rus-Cyrl)":47.88}
20
+ {"index":2,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased\">rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":46.22,"RUParaPhraserSTS (rus-Cyrl)":49.72,"RuSTSBenchmarkSTS (rus-Cyrl)":53.95,"STS22 (rus-Cyrl)":34.98}
21
+ {"index":19,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":38.07,"RUParaPhraserSTS (rus-Cyrl)":43.93,"RuSTSBenchmarkSTS (rus-Cyrl)":55.56,"STS22 (rus-Cyrl)":14.72}
22
+ {"index":20,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":37.89,"RUParaPhraserSTS (rus-Cyrl)":42.15,"RuSTSBenchmarkSTS (rus-Cyrl)":55.68,"STS22 (rus-Cyrl)":15.83}
23
+ {"index":18,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":37.66,"RUParaPhraserSTS (rus-Cyrl)":45.47,"RuSTSBenchmarkSTS (rus-Cyrl)":56.33,"STS22 (rus-Cyrl)":11.19}
24
+ {"index":4,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/ru-en-RoSBERTa\">ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":"","RUParaPhraserSTS (rus-Cyrl)":76.16,"RuSTSBenchmarkSTS (rus-Cyrl)":78.69,"STS22 (rus-Cyrl)":""}
25
+ {"index":13,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","RUParaPhraserSTS (rus-Cyrl)":76.17,"RuSTSBenchmarkSTS (rus-Cyrl)":84.13,"STS22 (rus-Cyrl)":""}
config.yaml CHANGED
@@ -43,6 +43,11 @@ tasks:
43
  metric: spearman
44
  metric_description: "Spearman correlation based on the model's similarity metric (usually cosine)"
45
  task_description: "Summarization is the task of generating a summary of a text."
 
 
 
 
 
46
  InstructionRetrieval:
47
  icon: "πŸ”ŽπŸ“‹"
48
  metric: "p-MRR"
@@ -382,6 +387,8 @@ boards:
382
  - RuReviewsClassification (rus-Cyrl)
383
  - RuSciBenchGRNTIClassification (rus-Cyrl)
384
  - RuSciBenchOECDClassification (rus-Cyrl)
 
 
385
  Clustering:
386
  - GeoreviewClusteringP2P (rus-Cyrl)
387
  - RuSciBenchGRNTIClusteringP2P (rus-Cyrl)
@@ -390,12 +397,18 @@ boards:
390
  - TERRa (rus-Cyrl)
391
  Reranking:
392
  - RuBQReranking (rus-Cyrl)
 
393
  Retrieval:
394
  - RiaNewsRetrieval (rus-Cyrl)
395
  - RuBQRetrieval (rus-Cyrl)
 
396
  STS:
397
  - RUParaPhraserSTS (rus-Cyrl)
398
  - RuSTSBenchmarkSTS (rus-Cyrl)
 
 
 
 
399
  se:
400
  title: Swedish
401
  language_long: Swedish
 
43
  metric: spearman
44
  metric_description: "Spearman correlation based on the model's similarity metric (usually cosine)"
45
  task_description: "Summarization is the task of generating a summary of a text."
46
+ MultilabelClassification:
47
+ icon: "🏷️"
48
+ metric: accuracy
49
+ metric_description: "Accuracy"
50
+ task_description: "Multilabel classification is the task of assigning multiple labels to a text."
51
  InstructionRetrieval:
52
  icon: "πŸ”ŽπŸ“‹"
53
  metric: "p-MRR"
 
387
  - RuReviewsClassification (rus-Cyrl)
388
  - RuSciBenchGRNTIClassification (rus-Cyrl)
389
  - RuSciBenchOECDClassification (rus-Cyrl)
390
+ - MassiveIntentClassification (rus-Cyrl)
391
+ - MassiveScenarioClassification (rus-Cyrl)
392
  Clustering:
393
  - GeoreviewClusteringP2P (rus-Cyrl)
394
  - RuSciBenchGRNTIClusteringP2P (rus-Cyrl)
 
397
  - TERRa (rus-Cyrl)
398
  Reranking:
399
  - RuBQReranking (rus-Cyrl)
400
+ - MIRACLReranking (rus-Cyrl)
401
  Retrieval:
402
  - RiaNewsRetrieval (rus-Cyrl)
403
  - RuBQRetrieval (rus-Cyrl)
404
+ - MIRACLRetrieval (rus-Cyrl)
405
  STS:
406
  - RUParaPhraserSTS (rus-Cyrl)
407
  - RuSTSBenchmarkSTS (rus-Cyrl)
408
+ - STS22 (rus-Cyrl)
409
+ MultilabelClassification:
410
+ - CEDRClassification (rus-Cyrl)
411
+ - SensitiveTopicsClassification (rus-Cyrl)
412
  se:
413
  title: Swedish
414
  language_long: Swedish
refresh.py CHANGED
@@ -161,6 +161,8 @@ def filter_metric_external(x, task, metrics) -> bool:
161
  # This is a hack for the passkey and needle retrieval test, which reports ndcg_at_1 (i.e. accuracy), rather than the ndcg_at_10 that is commonly used for retrieval tasks.
162
  if x["mteb_dataset_name"] in ["LEMBNeedleRetrieval", "LEMBPasskeyRetrieval"]:
163
  return bool(x["mteb_task"] == task and x["metric"] == "ndcg_at_1")
 
 
164
  else:
165
  return bool(x["mteb_task"] == task and x["metric"] in metrics)
166
 
@@ -534,7 +536,7 @@ def get_mteb_average(task_dict: dict) -> tuple[Any, dict]:
534
  rank=False,
535
  )
536
  # Debugging:
537
- # DATA_OVERALL.to_csv("overall.csv")
538
  DATA_OVERALL.insert(
539
  1,
540
  f"Average ({len(all_tasks)} datasets)",
@@ -608,6 +610,8 @@ def refresh_leaderboard() -> tuple[list, dict]:
608
  leave=True,
609
  )
610
  for board, board_config in pbar_tasks:
 
 
611
  boards_data[board] = {"data_overall": None, "data_tasks": {}}
612
  pbar_tasks.set_description(f"Fetching leaderboard results for {board!r}")
613
  pbar_tasks.refresh()
 
161
  # This is a hack for the passkey and needle retrieval test, which reports ndcg_at_1 (i.e. accuracy), rather than the ndcg_at_10 that is commonly used for retrieval tasks.
162
  if x["mteb_dataset_name"] in ["LEMBNeedleRetrieval", "LEMBPasskeyRetrieval"]:
163
  return bool(x["mteb_task"] == task and x["metric"] == "ndcg_at_1")
164
+ elif x["mteb_dataset_name"] == "MIRACLReranking":
165
+ return bool(x["mteb_task"] == task and x["metric"] in ["NDCG@10(MIRACL)"])
166
  else:
167
  return bool(x["mteb_task"] == task and x["metric"] in metrics)
168
 
 
536
  rank=False,
537
  )
538
  # Debugging:
539
+ DATA_OVERALL.to_csv("overall.csv")
540
  DATA_OVERALL.insert(
541
  1,
542
  f"Average ({len(all_tasks)} datasets)",
 
610
  leave=True,
611
  )
612
  for board, board_config in pbar_tasks:
613
+ if board == "longembed":
614
+ pass
615
  boards_data[board] = {"data_overall": None, "data_tasks": {}}
616
  pbar_tasks.set_description(f"Fetching leaderboard results for {board!r}")
617
  pbar_tasks.refresh()