Add feature-extraction pipeline tag, project page link and paper link
#1
by
nielsr
HF staff
- opened
README.md
CHANGED
@@ -1,11 +1,19 @@
|
|
1 |
---
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
tags:
|
3 |
- sentence-transformers
|
4 |
- loss:MatryoshkaLoss
|
5 |
- loss:MultipleNegativesRankingLoss
|
6 |
- EuroBert
|
7 |
- Arabic
|
8 |
-
base_model: EuroBERT/EuroBERT-210m
|
9 |
widget:
|
10 |
- source_sentence: امرأة شقراء تطل على مشهد (سياتل سبيس نيدل)
|
11 |
sentences:
|
@@ -21,7 +29,7 @@ widget:
|
|
21 |
sentences:
|
22 |
- هناك العديد من الناس الحاضرين.
|
23 |
- الكلب الأبيض على الشاطئ
|
24 |
-
- هناك شخص واحد فقط
|
25 |
- source_sentence: مجموعة من الناس تمشي بجانب شاحنة.
|
26 |
sentences:
|
27 |
- الناس يقفون
|
@@ -32,11 +40,6 @@ widget:
|
|
32 |
- شخصان يلعبان كرة البيسبول
|
33 |
- الرجل ينظف
|
34 |
- لاعبين لكرة البيسبول يجلسان على مقعد
|
35 |
-
pipeline_tag: sentence-similarity
|
36 |
-
library_name: sentence-transformers
|
37 |
-
metrics:
|
38 |
-
- pearson_cosine
|
39 |
-
- spearman_cosine
|
40 |
model-index:
|
41 |
- name: SentenceTransformer based on EuroBERT/EuroBERT-210m
|
42 |
results:
|
@@ -105,12 +108,8 @@ model-index:
|
|
105 |
- type: spearman_cosine
|
106 |
value: 0.7975941111911333
|
107 |
name: Spearman Cosine
|
108 |
-
license: mit
|
109 |
-
language:
|
110 |
-
- ar
|
111 |
---
|
112 |
|
113 |
-
|
114 |
# Ara-EuroBERT: Arabic-optimized Sentence Transformer
|
115 |
|
116 |
<img src="https://i.ibb.co/d4svDscP/Clear-Familiar-situations-that-you-already-have-best-practices-for-4.png" width="100" align="left"/>
|
@@ -119,13 +118,14 @@ Ara-EuroBERT is a [sentence-transformers](https://www.SBERT.net) model fine-tune
|
|
119 |
|
120 |
This model maps sentences and paragraphs to a **768-dimensional dense vector space** and **Maximum Sequence Length:** 8,192 tokens.
|
121 |
|
|
|
122 |
|
123 |
-
|
124 |
|
|
|
125 |
|
126 |
Our fine-tuned model shows remarkable improvements over the base models, achieving a 73.5% relative improvement on STS17 and a 21.6% relative improvement on STS22.v2 compared to the base EuroBERT-210M.
|
127 |
|
128 |
-
|
129 |
## Model Details
|
130 |
|
131 |
### Model Description
|
@@ -243,11 +243,11 @@ If you use this model in your research, please cite the following works:
|
|
243 |
```bibtex
|
244 |
@misc{boizard2025eurobertscalingmultilingualencoders,
|
245 |
title={EuroBERT: Scaling Multilingual Encoders for European Languages},
|
246 |
-
author={Nicolas Boizard and Hippolyte Gisserot-Boukhlef and Duarte M. Alves and André Martins and Ayoub Hammal and Caio Corro and Céline Hudelot and Emmanuel Malherbe and Etienne Malaboeuf and Fanny Jourdan and Gabriel Hautreux and João Alves and Kevin El-Haddad and Manuel Faysse and Maxime Peyrard and Nuno M. Guerreiro and Patrick Fernandes and Ricardo Rei and Pierre Colombo}
|
247 |
-
year={2025}
|
248 |
-
eprint={2503.05500}
|
249 |
-
archivePrefix={arXiv}
|
250 |
-
primaryClass={cs.CL}
|
251 |
url={https://arxiv.org/abs/2503.05500},
|
252 |
}
|
253 |
```
|
@@ -268,9 +268,9 @@ If you use this model in your research, please cite the following works:
|
|
268 |
@misc{kusupati2024matryoshka,
|
269 |
title={Matryoshka Representation Learning},
|
270 |
author={Aditya Kusupati and Gantavya Bhatt and Aniket Rege and Matthew Wallingford and Aditya Sinha and Vivek Ramanujan and William Howard-Snyder and Kaifeng Chen and Sham Kakade and Prateek Jain and Ali Farhadi},
|
271 |
-
year={2024}
|
272 |
-
eprint={2205.13147}
|
273 |
-
archivePrefix={arXiv}
|
274 |
-
primaryClass={cs.LG}
|
275 |
}
|
276 |
```
|
|
|
1 |
---
|
2 |
+
base_model: EuroBERT/EuroBERT-210m
|
3 |
+
language:
|
4 |
+
- ar
|
5 |
+
library_name: sentence-transformers
|
6 |
+
license: mit
|
7 |
+
metrics:
|
8 |
+
- pearson_cosine
|
9 |
+
- spearman_cosine
|
10 |
+
pipeline_tag: feature-extraction
|
11 |
tags:
|
12 |
- sentence-transformers
|
13 |
- loss:MatryoshkaLoss
|
14 |
- loss:MultipleNegativesRankingLoss
|
15 |
- EuroBert
|
16 |
- Arabic
|
|
|
17 |
widget:
|
18 |
- source_sentence: امرأة شقراء تطل على مشهد (سياتل سبيس نيدل)
|
19 |
sentences:
|
|
|
29 |
sentences:
|
30 |
- هناك العديد من الناس الحاضرين.
|
31 |
- الكلب الأبيض على الشاطئ
|
32 |
+
- هناك شخص واحد فقط موجود.\
|
33 |
- source_sentence: مجموعة من الناس تمشي بجانب شاحنة.
|
34 |
sentences:
|
35 |
- الناس يقفون
|
|
|
40 |
- شخصان يلعبان كرة البيسبول
|
41 |
- الرجل ينظف
|
42 |
- لاعبين لكرة البيسبول يجلسان على مقعد
|
|
|
|
|
|
|
|
|
|
|
43 |
model-index:
|
44 |
- name: SentenceTransformer based on EuroBERT/EuroBERT-210m
|
45 |
results:
|
|
|
108 |
- type: spearman_cosine
|
109 |
value: 0.7975941111911333
|
110 |
name: Spearman Cosine
|
|
|
|
|
|
|
111 |
---
|
112 |
|
|
|
113 |
# Ara-EuroBERT: Arabic-optimized Sentence Transformer
|
114 |
|
115 |
<img src="https://i.ibb.co/d4svDscP/Clear-Familiar-situations-that-you-already-have-best-practices-for-4.png" width="100" align="left"/>
|
|
|
118 |
|
119 |
This model maps sentences and paragraphs to a **768-dimensional dense vector space** and **Maximum Sequence Length:** 8,192 tokens.
|
120 |
|
121 |
+
Paper: [](https://huggingface.co/papers/2503.05500)
|
122 |
|
123 |
+
You can find more information on the base model at https://huggingface.co/EuroBERT
|
124 |
|
125 |
+

|
126 |
|
127 |
Our fine-tuned model shows remarkable improvements over the base models, achieving a 73.5% relative improvement on STS17 and a 21.6% relative improvement on STS22.v2 compared to the base EuroBERT-210M.
|
128 |
|
|
|
129 |
## Model Details
|
130 |
|
131 |
### Model Description
|
|
|
243 |
```bibtex
|
244 |
@misc{boizard2025eurobertscalingmultilingualencoders,
|
245 |
title={EuroBERT: Scaling Multilingual Encoders for European Languages},
|
246 |
+
author={Nicolas Boizard and Hippolyte Gisserot-Boukhlef and Duarte M. Alves and André Martins and Ayoub Hammal and Caio Corro and Céline Hudelot and Emmanuel Malherbe and Etienne Malaboeuf and Fanny Jourdan and Gabriel Hautreux and João Alves and Kevin El-Haddad and Manuel Faysse and Maxime Peyrard and Nuno M. Guerreiro and Patrick Fernandes and Ricardo Rei and Pierre Colombo},\
|
247 |
+
year={2025},\
|
248 |
+
eprint={2503.05500},\
|
249 |
+
archivePrefix={arXiv},\
|
250 |
+
primaryClass={cs.CL},\
|
251 |
url={https://arxiv.org/abs/2503.05500},
|
252 |
}
|
253 |
```
|
|
|
268 |
@misc{kusupati2024matryoshka,
|
269 |
title={Matryoshka Representation Learning},
|
270 |
author={Aditya Kusupati and Gantavya Bhatt and Aniket Rege and Matthew Wallingford and Aditya Sinha and Vivek Ramanujan and William Howard-Snyder and Kaifeng Chen and Sham Kakade and Prateek Jain and Ali Farhadi},
|
271 |
+
year={2024},\
|
272 |
+
eprint={2205.13147},\
|
273 |
+
archivePrefix={arXiv},\
|
274 |
+
primaryClass={cs.LG}\
|
275 |
}
|
276 |
```
|