romain125 commited on
Commit
998638c
·
verified ·
1 Parent(s): 3423e22

End of training

Browse files
Files changed (34) hide show
  1. .gitattributes +1 -0
  2. 1_Pooling/config.json +10 -0
  3. README.md +664 -0
  4. config.json +28 -0
  5. config_sentence_transformers.json +10 -0
  6. eval/binary_classification_evaluation_BinaryClassifEval_results.csv +111 -0
  7. model.safetensors +3 -0
  8. modules.json +20 -0
  9. runs/Mar11_09-06-35_algo-1/events.out.tfevents.1741683996.algo-1.88.0 +3 -0
  10. runs/Mar11_09-06-35_algo-1/events.out.tfevents.1741685196.algo-1.88.1 +3 -0
  11. runs/Mar11_09-26-41_algo-1/events.out.tfevents.1741685202.algo-1.88.2 +3 -0
  12. runs/Mar11_09-26-41_algo-1/events.out.tfevents.1741686086.algo-1.88.3 +3 -0
  13. runs/Mar11_09-41-30_algo-1/events.out.tfevents.1741686091.algo-1.88.4 +3 -0
  14. runs/Mar11_09-41-30_algo-1/events.out.tfevents.1741687314.algo-1.88.5 +3 -0
  15. runs/Mar11_10-01-57_algo-1/events.out.tfevents.1741687318.algo-1.88.6 +3 -0
  16. runs/Mar11_10-01-57_algo-1/events.out.tfevents.1741688552.algo-1.88.7 +3 -0
  17. runs/Mar11_10-22-35_algo-1/events.out.tfevents.1741688556.algo-1.88.8 +3 -0
  18. runs/Mar11_10-22-35_algo-1/events.out.tfevents.1741689857.algo-1.88.9 +3 -0
  19. runs/Mar11_10-44-20_algo-1/events.out.tfevents.1741689861.algo-1.88.10 +3 -0
  20. runs/Mar11_10-44-20_algo-1/events.out.tfevents.1741691061.algo-1.88.11 +3 -0
  21. runs/Mar11_11-04-24_algo-1/events.out.tfevents.1741691065.algo-1.88.12 +3 -0
  22. runs/Mar11_11-04-24_algo-1/events.out.tfevents.1741692361.algo-1.88.13 +3 -0
  23. runs/Mar11_11-26-05_algo-1/events.out.tfevents.1741692366.algo-1.88.14 +3 -0
  24. runs/Mar11_11-26-05_algo-1/events.out.tfevents.1741693671.algo-1.88.15 +3 -0
  25. runs/Mar11_11-47-55_algo-1/events.out.tfevents.1741693676.algo-1.88.16 +3 -0
  26. runs/Mar11_11-47-55_algo-1/events.out.tfevents.1741694970.algo-1.88.17 +3 -0
  27. runs/Mar11_12-09-34_algo-1/events.out.tfevents.1741694976.algo-1.88.18 +3 -0
  28. runs/Mar11_12-09-34_algo-1/events.out.tfevents.1741695809.algo-1.88.19 +3 -0
  29. sentence_bert_config.json +4 -0
  30. sentencepiece.bpe.model +3 -0
  31. special_tokens_map.json +51 -0
  32. tokenizer.json +3 -0
  33. tokenizer_config.json +55 -0
  34. training_args.bin +3 -0
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
1_Pooling/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "word_embedding_dimension": 768,
3
+ "pooling_mode_cls_token": false,
4
+ "pooling_mode_mean_tokens": true,
5
+ "pooling_mode_max_tokens": false,
6
+ "pooling_mode_mean_sqrt_len_tokens": false,
7
+ "pooling_mode_weightedmean_tokens": false,
8
+ "pooling_mode_lasttoken": false,
9
+ "include_prompt": true
10
+ }
README.md ADDED
@@ -0,0 +1,664 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model: intfloat/multilingual-e5-base
3
+ library_name: sentence-transformers
4
+ metrics:
5
+ - cosine_accuracy
6
+ - cosine_accuracy_threshold
7
+ - cosine_f1
8
+ - cosine_f1_threshold
9
+ - cosine_precision
10
+ - cosine_recall
11
+ - cosine_ap
12
+ - cosine_mcc
13
+ pipeline_tag: sentence-similarity
14
+ tags:
15
+ - sentence-transformers
16
+ - sentence-similarity
17
+ - feature-extraction
18
+ - generated_from_trainer
19
+ - dataset_size:4861
20
+ - loss:ContrastiveLoss
21
+ widget:
22
+ - source_sentence: 'Type de project: Actions de valorisation (expos physiques ou virtuelles,
23
+ journées d’étude, site internet, publications, documentaires…),Outils de médiation (cartes
24
+ et itinéraires papier ou numériques, livrets de visite, outils numériques, multimédia,
25
+ parcours d’interprétation…),Dispositifs pédagogiques (mallettes pédagogiques,
26
+ Moocs, supports de visite à destination des jeunes…),Événements rayonnant à l’échelle
27
+ de l’Île-de-France. Une attention particulière sera portée à la qualité des contenus,
28
+ à l’originalité et la pertinence des outils ou actions proposés, et à leur adéquation
29
+ avec les publics ciblés.'
30
+ sentences:
31
+ - '''Actions de valorisation'':projet|ÉVALUÉ_PAR|''adéquation avec les publics ciblés'':critère'
32
+ - '''mesdemarches.iledefrance.fr'':plateforme|ACCEPTE_DEMANDE|''Association - Fondation'':entité'
33
+ - '''projets de coopération'':projet|IMPLIQUE|''agriculteur cédant'':personne'
34
+ - source_sentence: 'Description: Cet appel à projets vise à soutenir les structures
35
+ en investissement qui agissent en faveur des jeunes en situation de précarité,
36
+ suite à une rupture familiale ou sociale pouvant entraîner de graves conséquences
37
+ sur leur santé ou leur sécurité.
38
+
39
+ Thèmes: Santé & Social : Solidarité
40
+
41
+ Nature de l''aide: Les dépenses éligibles se composent de dépenses de fonctionnement
42
+ exclusivement imputables à la mise en œuvre des projets retenus dans le cadre
43
+ de ce dispositif. La subvention régionale est fixée à 50 % maximum de la dépense
44
+ subventionnable (total des dépenses éligibles), dans la limite d’un plafond de
45
+ subvention fixé à 75 000 € maximum.
46
+
47
+ Délibération cadre: CR 100-16 du 22 septembre 2016 / CP 2018-428 du 17 octobre
48
+ 2018'
49
+ sentences:
50
+ - '''C''POSSIBLE'':programme|FAVORISE_INSERTION_PROFESSIONNELLE|''lycéens'':groupe'
51
+ - '''Date de début'':concept|EST|''non précisée'':__inferred__'
52
+ - '''subvention régionale'':aide|LIMITE|''appel à projets'':projet'
53
+ - source_sentence: 'Type de project: Le programme propose des rencontres le samedi
54
+ après-midi dans une université ou une grande école réputée, entre les professionnels
55
+ bénévoles et les lycéens et collégiens sous la forme d''atelier thématiques. Ces
56
+ moments de rencontre touchent à une grande multitude de domaines d’activités. L''objectif
57
+ est de donner l’opportunité aux jeunes les plus enclavés d’échanger avec des intervenants
58
+ professionnels aux parcours atypiques et inspirants. Les intervenants suscitent
59
+ les ambitions et élargissent les perspectives des élèves.'
60
+ sentences:
61
+ - '''concours'':événement|CIBLE|''jeunes'':groupe'
62
+ - '''projets'':__inferred__|TÉLÉCHARGER_ET_REMPLIR|''charte des valeurs de la République
63
+ et de la laïcité'':document'
64
+ - '''programme'':initiative|IMPLIQUE|''lycéens'':groupe'
65
+ - source_sentence: 'Type de project: Le Prix des Innovateurs vise à encourager, soutenir
66
+ et valoriser la recherche, le transfert de technologie et l’émergence d’innovations
67
+ en santé dont l’impact sociétal et de santé publique est remarquable. Ce prix
68
+ a ainsi vocation à :  Contribuer à la reconnaissance d’un chercheur et de son
69
+ équipe menant des recherches dans le secteur de la santé,Encourager la création
70
+ de spin-off de laboratoires académiques en garantissant les meilleures conditions
71
+ d’essaimage notamment par l’acquisition des compétences requises par l’ensemble
72
+ des membres de l’équipe,Renforcer'
73
+ sentences:
74
+ - '''2nde session de dépôt'':session|diffusion prévue|''diffusion à partir de novembre
75
+ 2025'':__inferred__'
76
+ - '''chercheur'':personne|DIRIGE|''équipe de recherche'':groupe'
77
+ - '''Collectivité ou institution - Communes de > 20 000 hab'':organisation|éligible
78
+ pour|''dépôt des demandes de subvention'':procédure'
79
+ - source_sentence: 'Date de début: non précisée
80
+
81
+ Date de fin (clôture): non précisée
82
+
83
+ Date de début de la future campagne: non précisée'
84
+ sentences:
85
+ - '''Date de fin'':concept|EST|''Lundi 18 Novembre 2024'':__inferred__'
86
+ - '''Région IDF'':organisation|PROPOSE|''Grands Lieux d''Innovation'':programme'
87
+ - '''Date de fin'':concept|EST|''non précisée'':__inferred__'
88
+ model-index:
89
+ - name: SentenceTransformer based on intfloat/multilingual-e5-base
90
+ results:
91
+ - task:
92
+ type: binary-classification
93
+ name: Binary Classification
94
+ dataset:
95
+ name: BinaryClassifEval
96
+ type: BinaryClassifEval
97
+ metrics:
98
+ - type: cosine_accuracy
99
+ value: 0.7058340180772391
100
+ name: Cosine Accuracy
101
+ - type: cosine_accuracy_threshold
102
+ value: 0.793916642665863
103
+ name: Cosine Accuracy Threshold
104
+ - type: cosine_f1
105
+ value: 0.7171875
106
+ name: Cosine F1
107
+ - type: cosine_f1_threshold
108
+ value: 0.7811518907546997
109
+ name: Cosine F1 Threshold
110
+ - type: cosine_precision
111
+ value: 0.6912650602409639
112
+ name: Cosine Precision
113
+ - type: cosine_recall
114
+ value: 0.7451298701298701
115
+ name: Cosine Recall
116
+ - type: cosine_ap
117
+ value: 0.7612878163621353
118
+ name: Cosine Ap
119
+ - type: cosine_mcc
120
+ value: 0.4056919853026572
121
+ name: Cosine Mcc
122
+ ---
123
+
124
+ # SentenceTransformer based on intfloat/multilingual-e5-base
125
+
126
+ This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [intfloat/multilingual-e5-base](https://huggingface.co/intfloat/multilingual-e5-base) on the json dataset. It maps sentences & paragraphs to a 768-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
127
+
128
+ ## Model Details
129
+
130
+ ### Model Description
131
+ - **Model Type:** Sentence Transformer
132
+ - **Base model:** [intfloat/multilingual-e5-base](https://huggingface.co/intfloat/multilingual-e5-base) <!-- at revision 835193815a3936a24a0ee7dc9e3d48c1fbb19c55 -->
133
+ - **Maximum Sequence Length:** 512 tokens
134
+ - **Output Dimensionality:** 768 dimensions
135
+ - **Similarity Function:** Cosine Similarity
136
+ - **Training Dataset:**
137
+ - json
138
+ <!-- - **Language:** Unknown -->
139
+ <!-- - **License:** Unknown -->
140
+
141
+ ### Model Sources
142
+
143
+ - **Documentation:** [Sentence Transformers Documentation](https://sbert.net)
144
+ - **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers)
145
+ - **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers)
146
+
147
+ ### Full Model Architecture
148
+
149
+ ```
150
+ SentenceTransformer(
151
+ (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: XLMRobertaModel
152
+ (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
153
+ (2): Normalize()
154
+ )
155
+ ```
156
+
157
+ ## Usage
158
+
159
+ ### Direct Usage (Sentence Transformers)
160
+
161
+ First install the Sentence Transformers library:
162
+
163
+ ```bash
164
+ pip install -U sentence-transformers
165
+ ```
166
+
167
+ Then you can load this model and run inference.
168
+ ```python
169
+ from sentence_transformers import SentenceTransformer
170
+
171
+ # Download from the 🤗 Hub
172
+ model = SentenceTransformer("Lettria/grag-go-idf-contrastive_8083-v2-trial-6")
173
+ # Run inference
174
+ sentences = [
175
+ 'Date de début: non précisée\nDate de fin (clôture): non précisée\nDate de début de la future campagne: non précisée',
176
+ "'Date de fin':concept|EST|'non précisée':__inferred__",
177
+ "'Date de fin':concept|EST|'Lundi 18 Novembre 2024':__inferred__",
178
+ ]
179
+ embeddings = model.encode(sentences)
180
+ print(embeddings.shape)
181
+ # [3, 768]
182
+
183
+ # Get the similarity scores for the embeddings
184
+ similarities = model.similarity(embeddings, embeddings)
185
+ print(similarities.shape)
186
+ # [3, 3]
187
+ ```
188
+
189
+ <!--
190
+ ### Direct Usage (Transformers)
191
+
192
+ <details><summary>Click to see the direct usage in Transformers</summary>
193
+
194
+ </details>
195
+ -->
196
+
197
+ <!--
198
+ ### Downstream Usage (Sentence Transformers)
199
+
200
+ You can finetune this model on your own dataset.
201
+
202
+ <details><summary>Click to expand</summary>
203
+
204
+ </details>
205
+ -->
206
+
207
+ <!--
208
+ ### Out-of-Scope Use
209
+
210
+ *List how the model may foreseeably be misused and address what users ought not to do with the model.*
211
+ -->
212
+
213
+ ## Evaluation
214
+
215
+ ### Metrics
216
+
217
+ #### Binary Classification
218
+
219
+ * Dataset: `BinaryClassifEval`
220
+ * Evaluated with [<code>BinaryClassificationEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.BinaryClassificationEvaluator)
221
+
222
+ | Metric | Value |
223
+ |:--------------------------|:-----------|
224
+ | cosine_accuracy | 0.7058 |
225
+ | cosine_accuracy_threshold | 0.7939 |
226
+ | cosine_f1 | 0.7172 |
227
+ | cosine_f1_threshold | 0.7812 |
228
+ | cosine_precision | 0.6913 |
229
+ | cosine_recall | 0.7451 |
230
+ | **cosine_ap** | **0.7613** |
231
+ | cosine_mcc | 0.4057 |
232
+
233
+ <!--
234
+ ## Bias, Risks and Limitations
235
+
236
+ *What are the known or foreseeable issues stemming from this model? You could also flag here known failure cases or weaknesses of the model.*
237
+ -->
238
+
239
+ <!--
240
+ ### Recommendations
241
+
242
+ *What are recommendations with respect to the foreseeable issues? For example, filtering explicit content.*
243
+ -->
244
+
245
+ ## Training Details
246
+
247
+ ### Training Dataset
248
+
249
+ #### json
250
+
251
+ * Dataset: json
252
+ * Size: 4,861 training samples
253
+ * Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>label</code>
254
+ * Approximate statistics based on the first 1000 samples:
255
+ | | sentence1 | sentence2 | label |
256
+ |:--------|:-------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:-----------------------------|
257
+ | type | string | string | int |
258
+ | details | <ul><li>min: 26 tokens</li><li>mean: 191.64 tokens</li><li>max: 429 tokens</li></ul> | <ul><li>min: 18 tokens</li><li>mean: 31.2 tokens</li><li>max: 72 tokens</li></ul> | <ul><li>1: 100.00%</li></ul> |
259
+ * Samples:
260
+ | sentence1 | sentence2 | label |
261
+ |:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------|:---------------|
262
+ | <code>Type de project: L’excès de précipitations tout au long de l’année a conduit à une chute spectaculaire des rendements des céréales d’été et des protéagineux (blé, orge, pois, féverole, etc.) que produisent 90% des agriculteurs d’Île-de-France, historique grenier à blé du pays. Tributaires naturels du fleurissement des cultures, les apiculteurs professionnels de la région ont également souffert de ces dérèglements climatiques.La Région accompagne les exploitations concernées en leur apportant une aide exceptionnelle.</code> | <code>'excès de précipitations':phénomène|DIMINUE|'rendements des protéagineux':concept</code> | <code>1</code> |
263
+ | <code>Type de project: Dans le cadre de sa stratégie « Impact 2028 », la Région s’engage dans la défense de la souveraineté industrielle en renforçant son soutien à une industrie circulaire et décarbonée, porteuse d’innovations et créatrice d’emplois. PM'up Jeunes pousses industrielles soutient les projets d’implantation d’une première usine tournée vers la décarbonation, l’efficacité énergétique et la circularité des processus de production. Ces projets peuvent prendre l'une de ces formes : Une première unité de production industrielle, après une phase de prototypage,Une ligne pilote de production industrielle, en interne ou chez un tiers situé en Île-de-France, à condition que sa production soit destinée à de premières commercialisations,La transformation d’une unité de production pilote à une unité de production industrielle</code> | <code>'Région Île-de-France':organisation|soutient|'industrie décarbonée':concept</code> | <code>1</code> |
264
+ | <code>Procédures et démarches: Le dépôt des demandes de subvention se fait en ligne sur la plateforme régionale mesdemarches.iledefrance.fr : Session de dépôt unique pour les nouvelles demandes : du 30 septembre au 4 novembre 2024 (11 heures) pour des festivals qui se déroulent entre le 1er mars 2025 et le 28 février 2026 (vote à la CP de mars 2025). Pour les demandes de renouvellement, un mail est envoyé aux structures concernées par le service du Spectacle vivant en amont de chaque session de dépôt.<br>Bénéficiaires: Professionnel - Culture, Association - Fondation, Association - Régie par la loi de 1901, Association - ONG, Collectivité ou institution - Communes de 10 000 à 20 000 hab, Collectivité ou institution - Autre (GIP, copropriété, EPA...), Collectivité ou institution - Communes de 2000 à 10 000 hab, Collectivité ou institution - Communes de < 2000 hab, Collectivité ou institution - Communes de > 20 000 hab, Collectivité ou institution - Département, Collectivité ou institution - EPC...</code> | <code>'Collectivité ou institution - EPCI':bénéficiaire|PEUT_BÉNÉFICIER|'demandes de subvention':procédure</code> | <code>1</code> |
265
+ * Loss: [<code>ContrastiveLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#contrastiveloss) with these parameters:
266
+ ```json
267
+ {
268
+ "distance_metric": "SiameseDistanceMetric.COSINE_DISTANCE",
269
+ "margin": 0.5,
270
+ "size_average": true
271
+ }
272
+ ```
273
+
274
+ ### Evaluation Dataset
275
+
276
+ #### json
277
+
278
+ * Dataset: json
279
+ * Size: 1,217 evaluation samples
280
+ * Columns: <code>sentence1</code>, <code>sentence2</code>, and <code>label</code>
281
+ * Approximate statistics based on the first 1000 samples:
282
+ | | sentence1 | sentence2 | label |
283
+ |:--------|:-------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|:------------------------------------------------|
284
+ | type | string | string | int |
285
+ | details | <ul><li>min: 24 tokens</li><li>mean: 188.47 tokens</li><li>max: 394 tokens</li></ul> | <ul><li>min: 17 tokens</li><li>mean: 31.22 tokens</li><li>max: 133 tokens</li></ul> | <ul><li>0: ~38.40%</li><li>1: ~61.60%</li></ul> |
286
+ * Samples:
287
+ | sentence1 | sentence2 | label |
288
+ |:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------|:---------------|
289
+ | <code>Type de project: Le programme propose des rencontres le samedi après-midi dans une université ou une grande école réputée, entre les professionnels bénévoles et les lycéens et collégiens sous la forme d'atelier thématiques. Ces moments de rencontre touchent à une grande multitude de domaines d’activités. L'objectif est de donner l’opportunité aux jeunes les plus enclavés d’échanger avec des intervenants professionnels aux parcours atypiques et inspirants. Les intervenants suscitent les ambitions et élargissent les perspectives des élèves.</code> | <code>'rencontres':événement|impliquent|'professionnels bénévoles':groupe</code> | <code>1</code> |
290
+ | <code>Précision sure les bénéficiaires: Communes,Établissements publics de coopération intercommunale (avec ou sans fiscalité propre),Établissements publics territoriaux franciliens,Départements,Aménageurs publics et privés (lorsque ces derniers interviennent à la demande ou pour le compte d'une collectivité précitée).</code> | <code>'Aménageurs privés':entité|INTERVIENT_POUR|'Départements':entité</code> | <code>1</code> |
291
+ | <code>Date de début: non précisée<br>Date de fin (clôture): non précisée<br>Date de début de la future campagne: non précisée</code> | <code>'Date de fin':concept|EST|'non précisée':__inferred__</code> | <code>1</code> |
292
+ * Loss: [<code>ContrastiveLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#contrastiveloss) with these parameters:
293
+ ```json
294
+ {
295
+ "distance_metric": "SiameseDistanceMetric.COSINE_DISTANCE",
296
+ "margin": 0.5,
297
+ "size_average": true
298
+ }
299
+ ```
300
+
301
+ ### Training Hyperparameters
302
+ #### Non-Default Hyperparameters
303
+
304
+ - `eval_strategy`: epoch
305
+ - `per_device_train_batch_size`: 4
306
+ - `per_device_eval_batch_size`: 4
307
+ - `gradient_accumulation_steps`: 2
308
+ - `learning_rate`: 2.0007927807284357e-05
309
+ - `num_train_epochs`: 10
310
+ - `lr_scheduler_type`: cosine
311
+ - `warmup_steps`: 320
312
+ - `bf16`: True
313
+ - `tf32`: True
314
+ - `load_best_model_at_end`: True
315
+ - `optim`: adamw_torch_fused
316
+ - `hub_model_id`: Lettria/grag-go-idf-contrastive_8083-v2-trial-6
317
+
318
+ #### All Hyperparameters
319
+ <details><summary>Click to expand</summary>
320
+
321
+ - `overwrite_output_dir`: False
322
+ - `do_predict`: False
323
+ - `eval_strategy`: epoch
324
+ - `prediction_loss_only`: True
325
+ - `per_device_train_batch_size`: 4
326
+ - `per_device_eval_batch_size`: 4
327
+ - `per_gpu_train_batch_size`: None
328
+ - `per_gpu_eval_batch_size`: None
329
+ - `gradient_accumulation_steps`: 2
330
+ - `eval_accumulation_steps`: None
331
+ - `torch_empty_cache_steps`: None
332
+ - `learning_rate`: 2.0007927807284357e-05
333
+ - `weight_decay`: 0.0
334
+ - `adam_beta1`: 0.9
335
+ - `adam_beta2`: 0.999
336
+ - `adam_epsilon`: 1e-08
337
+ - `max_grad_norm`: 1.0
338
+ - `num_train_epochs`: 10
339
+ - `max_steps`: -1
340
+ - `lr_scheduler_type`: cosine
341
+ - `lr_scheduler_kwargs`: {}
342
+ - `warmup_ratio`: 0.0
343
+ - `warmup_steps`: 320
344
+ - `log_level`: passive
345
+ - `log_level_replica`: warning
346
+ - `log_on_each_node`: True
347
+ - `logging_nan_inf_filter`: True
348
+ - `save_safetensors`: True
349
+ - `save_on_each_node`: False
350
+ - `save_only_model`: False
351
+ - `restore_callback_states_from_checkpoint`: False
352
+ - `no_cuda`: False
353
+ - `use_cpu`: False
354
+ - `use_mps_device`: False
355
+ - `seed`: 42
356
+ - `data_seed`: None
357
+ - `jit_mode_eval`: False
358
+ - `use_ipex`: False
359
+ - `bf16`: True
360
+ - `fp16`: False
361
+ - `fp16_opt_level`: O1
362
+ - `half_precision_backend`: auto
363
+ - `bf16_full_eval`: False
364
+ - `fp16_full_eval`: False
365
+ - `tf32`: True
366
+ - `local_rank`: 0
367
+ - `ddp_backend`: None
368
+ - `tpu_num_cores`: None
369
+ - `tpu_metrics_debug`: False
370
+ - `debug`: []
371
+ - `dataloader_drop_last`: False
372
+ - `dataloader_num_workers`: 0
373
+ - `dataloader_prefetch_factor`: None
374
+ - `past_index`: -1
375
+ - `disable_tqdm`: False
376
+ - `remove_unused_columns`: True
377
+ - `label_names`: None
378
+ - `load_best_model_at_end`: True
379
+ - `ignore_data_skip`: False
380
+ - `fsdp`: []
381
+ - `fsdp_min_num_params`: 0
382
+ - `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}
383
+ - `fsdp_transformer_layer_cls_to_wrap`: None
384
+ - `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}
385
+ - `deepspeed`: None
386
+ - `label_smoothing_factor`: 0.0
387
+ - `optim`: adamw_torch_fused
388
+ - `optim_args`: None
389
+ - `adafactor`: False
390
+ - `group_by_length`: False
391
+ - `length_column_name`: length
392
+ - `ddp_find_unused_parameters`: None
393
+ - `ddp_bucket_cap_mb`: None
394
+ - `ddp_broadcast_buffers`: False
395
+ - `dataloader_pin_memory`: True
396
+ - `dataloader_persistent_workers`: False
397
+ - `skip_memory_metrics`: True
398
+ - `use_legacy_prediction_loop`: False
399
+ - `push_to_hub`: False
400
+ - `resume_from_checkpoint`: None
401
+ - `hub_model_id`: Lettria/grag-go-idf-contrastive_8083-v2-trial-6
402
+ - `hub_strategy`: every_save
403
+ - `hub_private_repo`: None
404
+ - `hub_always_push`: False
405
+ - `gradient_checkpointing`: False
406
+ - `gradient_checkpointing_kwargs`: None
407
+ - `include_inputs_for_metrics`: False
408
+ - `include_for_metrics`: []
409
+ - `eval_do_concat_batches`: True
410
+ - `fp16_backend`: auto
411
+ - `push_to_hub_model_id`: None
412
+ - `push_to_hub_organization`: None
413
+ - `mp_parameters`:
414
+ - `auto_find_batch_size`: False
415
+ - `full_determinism`: False
416
+ - `torchdynamo`: None
417
+ - `ray_scope`: last
418
+ - `ddp_timeout`: 1800
419
+ - `torch_compile`: False
420
+ - `torch_compile_backend`: None
421
+ - `torch_compile_mode`: None
422
+ - `dispatch_batches`: None
423
+ - `split_batches`: None
424
+ - `include_tokens_per_second`: False
425
+ - `include_num_input_tokens_seen`: False
426
+ - `neftune_noise_alpha`: None
427
+ - `optim_target_modules`: None
428
+ - `batch_eval_metrics`: False
429
+ - `eval_on_start`: False
430
+ - `use_liger_kernel`: False
431
+ - `eval_use_gather_object`: False
432
+ - `average_tokens_across_devices`: False
433
+ - `prompts`: None
434
+ - `batch_sampler`: batch_sampler
435
+ - `multi_dataset_batch_sampler`: proportional
436
+
437
+ </details>
438
+
439
+ ### Training Logs
440
+ <details><summary>Click to expand</summary>
441
+
442
+ | Epoch | Step | Training Loss | Validation Loss | BinaryClassifEval_cosine_ap |
443
+ |:-------:|:-------:|:-------------:|:---------------:|:---------------------------:|
444
+ | 0.0658 | 40 | 0.0617 | - | - |
445
+ | 0.1316 | 80 | 0.0552 | - | - |
446
+ | 0.1974 | 120 | 0.0538 | - | - |
447
+ | 0.2632 | 160 | 0.0488 | - | - |
448
+ | 0.3289 | 200 | 0.0498 | - | - |
449
+ | 0.3947 | 240 | 0.0458 | - | - |
450
+ | 0.4605 | 280 | 0.0425 | - | - |
451
+ | 0.5263 | 320 | 0.0398 | - | - |
452
+ | 0.5921 | 360 | 0.0403 | - | - |
453
+ | 0.6579 | 400 | 0.0377 | - | - |
454
+ | 0.7237 | 440 | 0.0339 | - | - |
455
+ | 0.7895 | 480 | 0.0372 | - | - |
456
+ | 0.8553 | 520 | 0.0364 | - | - |
457
+ | 0.9211 | 560 | 0.0358 | - | - |
458
+ | 0.9868 | 600 | 0.0326 | - | - |
459
+ | **1.0** | **608** | **-** | **0.0268** | **0.7613** |
460
+ | 1.0526 | 640 | 0.0335 | - | - |
461
+ | 1.1184 | 680 | 0.0296 | - | - |
462
+ | 1.1842 | 720 | 0.0273 | - | - |
463
+ | 1.25 | 760 | 0.0253 | - | - |
464
+ | 1.3158 | 800 | 0.0249 | - | - |
465
+ | 1.3816 | 840 | 0.0276 | - | - |
466
+ | 1.4474 | 880 | 0.0255 | - | - |
467
+ | 1.5132 | 920 | 0.0204 | - | - |
468
+ | 1.5789 | 960 | 0.026 | - | - |
469
+ | 1.6447 | 1000 | 0.0202 | - | - |
470
+ | 1.7105 | 1040 | 0.0224 | - | - |
471
+ | 1.7763 | 1080 | 0.0246 | - | - |
472
+ | 1.8421 | 1120 | 0.0249 | - | - |
473
+ | 1.9079 | 1160 | 0.0214 | - | - |
474
+ | 1.9737 | 1200 | 0.0212 | - | - |
475
+ | 2.0 | 1216 | - | 0.0286 | 0.7398 |
476
+ | 2.0395 | 1240 | 0.0181 | - | - |
477
+ | 2.1053 | 1280 | 0.0156 | - | - |
478
+ | 2.1711 | 1320 | 0.0142 | - | - |
479
+ | 2.2368 | 1360 | 0.0189 | - | - |
480
+ | 2.3026 | 1400 | 0.0154 | - | - |
481
+ | 2.3684 | 1440 | 0.0184 | - | - |
482
+ | 2.4342 | 1480 | 0.0144 | - | - |
483
+ | 2.5 | 1520 | 0.0181 | - | - |
484
+ | 2.5658 | 1560 | 0.0154 | - | - |
485
+ | 2.6316 | 1600 | 0.0144 | - | - |
486
+ | 2.6974 | 1640 | 0.0175 | - | - |
487
+ | 2.7632 | 1680 | 0.0133 | - | - |
488
+ | 2.8289 | 1720 | 0.0163 | - | - |
489
+ | 2.8947 | 1760 | 0.012 | - | - |
490
+ | 2.9605 | 1800 | 0.0168 | - | - |
491
+ | 3.0 | 1824 | - | 0.0296 | 0.7407 |
492
+ | 3.0263 | 1840 | 0.0125 | - | - |
493
+ | 3.0921 | 1880 | 0.0115 | - | - |
494
+ | 3.1579 | 1920 | 0.0102 | - | - |
495
+ | 3.2237 | 1960 | 0.0097 | - | - |
496
+ | 3.2895 | 2000 | 0.0101 | - | - |
497
+ | 3.3553 | 2040 | 0.0104 | - | - |
498
+ | 3.4211 | 2080 | 0.0105 | - | - |
499
+ | 3.4868 | 2120 | 0.0105 | - | - |
500
+ | 3.5526 | 2160 | 0.0104 | - | - |
501
+ | 3.6184 | 2200 | 0.0088 | - | - |
502
+ | 3.6842 | 2240 | 0.0109 | - | - |
503
+ | 3.75 | 2280 | 0.0123 | - | - |
504
+ | 3.8158 | 2320 | 0.0102 | - | - |
505
+ | 3.8816 | 2360 | 0.0099 | - | - |
506
+ | 3.9474 | 2400 | 0.0103 | - | - |
507
+ | 4.0 | 2432 | - | 0.0294 | 0.7537 |
508
+ | 4.0132 | 2440 | 0.0093 | - | - |
509
+ | 4.0789 | 2480 | 0.0067 | - | - |
510
+ | 4.1447 | 2520 | 0.0083 | - | - |
511
+ | 4.2105 | 2560 | 0.0081 | - | - |
512
+ | 4.2763 | 2600 | 0.0083 | - | - |
513
+ | 4.3421 | 2640 | 0.0059 | - | - |
514
+ | 4.4079 | 2680 | 0.008 | - | - |
515
+ | 4.4737 | 2720 | 0.0078 | - | - |
516
+ | 4.5395 | 2760 | 0.0062 | - | - |
517
+ | 4.6053 | 2800 | 0.0064 | - | - |
518
+ | 4.6711 | 2840 | 0.0051 | - | - |
519
+ | 4.7368 | 2880 | 0.0059 | - | - |
520
+ | 4.8026 | 2920 | 0.0074 | - | - |
521
+ | 4.8684 | 2960 | 0.0068 | - | - |
522
+ | 4.9342 | 3000 | 0.0082 | - | - |
523
+ | 5.0 | 3040 | 0.0085 | 0.0319 | 0.7341 |
524
+ | 5.0658 | 3080 | 0.004 | - | - |
525
+ | 5.1316 | 3120 | 0.0049 | - | - |
526
+ | 5.1974 | 3160 | 0.005 | - | - |
527
+ | 5.2632 | 3200 | 0.0059 | - | - |
528
+ | 5.3289 | 3240 | 0.005 | - | - |
529
+ | 5.3947 | 3280 | 0.0047 | - | - |
530
+ | 5.4605 | 3320 | 0.0044 | - | - |
531
+ | 5.5263 | 3360 | 0.0046 | - | - |
532
+ | 5.5921 | 3400 | 0.0044 | - | - |
533
+ | 5.6579 | 3440 | 0.0065 | - | - |
534
+ | 5.7237 | 3480 | 0.0054 | - | - |
535
+ | 5.7895 | 3520 | 0.0062 | - | - |
536
+ | 5.8553 | 3560 | 0.0054 | - | - |
537
+ | 5.9211 | 3600 | 0.0041 | - | - |
538
+ | 5.9868 | 3640 | 0.0048 | - | - |
539
+ | 6.0 | 3648 | - | 0.0336 | 0.7182 |
540
+ | 6.0526 | 3680 | 0.0035 | - | - |
541
+ | 6.1184 | 3720 | 0.0029 | - | - |
542
+ | 6.1842 | 3760 | 0.0033 | - | - |
543
+ | 6.25 | 3800 | 0.0048 | - | - |
544
+ | 6.3158 | 3840 | 0.0058 | - | - |
545
+ | 6.3816 | 3880 | 0.0037 | - | - |
546
+ | 6.4474 | 3920 | 0.0035 | - | - |
547
+ | 6.5132 | 3960 | 0.0043 | - | - |
548
+ | 6.5789 | 4000 | 0.004 | - | - |
549
+ | 6.6447 | 4040 | 0.0026 | - | - |
550
+ | 6.7105 | 4080 | 0.0055 | - | - |
551
+ | 6.7763 | 4120 | 0.0031 | - | - |
552
+ | 6.8421 | 4160 | 0.0037 | - | - |
553
+ | 6.9079 | 4200 | 0.0036 | - | - |
554
+ | 6.9737 | 4240 | 0.0046 | - | - |
555
+ | 7.0 | 4256 | - | 0.0338 | 0.7097 |
556
+ | 7.0395 | 4280 | 0.0027 | - | - |
557
+ | 7.1053 | 4320 | 0.0026 | - | - |
558
+ | 7.1711 | 4360 | 0.0034 | - | - |
559
+ | 7.2368 | 4400 | 0.0039 | - | - |
560
+ | 7.3026 | 4440 | 0.0023 | - | - |
561
+ | 7.3684 | 4480 | 0.0034 | - | - |
562
+ | 7.4342 | 4520 | 0.0022 | - | - |
563
+ | 7.5 | 4560 | 0.0045 | - | - |
564
+ | 7.5658 | 4600 | 0.0027 | - | - |
565
+ | 7.6316 | 4640 | 0.0036 | - | - |
566
+ | 7.6974 | 4680 | 0.0031 | - | - |
567
+ | 7.7632 | 4720 | 0.0018 | - | - |
568
+ | 7.8289 | 4760 | 0.0019 | - | - |
569
+ | 7.8947 | 4800 | 0.0029 | - | - |
570
+ | 7.9605 | 4840 | 0.0033 | - | - |
571
+ | 8.0 | 4864 | - | 0.0338 | 0.7093 |
572
+ | 8.0263 | 4880 | 0.0029 | - | - |
573
+ | 8.0921 | 4920 | 0.0023 | - | - |
574
+ | 8.1579 | 4960 | 0.0026 | - | - |
575
+ | 8.2237 | 5000 | 0.0026 | - | - |
576
+ | 8.2895 | 5040 | 0.0025 | - | - |
577
+ | 8.3553 | 5080 | 0.0033 | - | - |
578
+ | 8.4211 | 5120 | 0.0031 | - | - |
579
+ | 8.4868 | 5160 | 0.0025 | - | - |
580
+ | 8.5526 | 5200 | 0.0025 | - | - |
581
+ | 8.6184 | 5240 | 0.0022 | - | - |
582
+ | 8.6842 | 5280 | 0.002 | - | - |
583
+ | 8.75 | 5320 | 0.0025 | - | - |
584
+ | 8.8158 | 5360 | 0.0018 | - | - |
585
+ | 8.8816 | 5400 | 0.0018 | - | - |
586
+ | 8.9474 | 5440 | 0.0031 | - | - |
587
+ | 9.0 | 5472 | - | 0.0342 | 0.7133 |
588
+ | 9.0132 | 5480 | 0.002 | - | - |
589
+ | 9.0789 | 5520 | 0.0026 | - | - |
590
+ | 9.1447 | 5560 | 0.0017 | - | - |
591
+ | 9.2105 | 5600 | 0.003 | - | - |
592
+ | 9.2763 | 5640 | 0.002 | - | - |
593
+ | 9.3421 | 5680 | 0.0019 | - | - |
594
+ | 9.4079 | 5720 | 0.0022 | - | - |
595
+ | 9.4737 | 5760 | 0.0018 | - | - |
596
+ | 9.5395 | 5800 | 0.0035 | - | - |
597
+ | 9.6053 | 5840 | 0.0024 | - | - |
598
+ | 9.6711 | 5880 | 0.0027 | - | - |
599
+ | 9.7368 | 5920 | 0.002 | - | - |
600
+ | 9.8026 | 5960 | 0.0029 | - | - |
601
+ | 9.8684 | 6000 | 0.0018 | - | - |
602
+ | 9.9342 | 6040 | 0.0022 | - | - |
603
+ | 10.0 | 6080 | 0.0023 | 0.0268 | 0.7613 |
604
+
605
+ * The bold row denotes the saved checkpoint.
606
+ </details>
607
+
608
+ ### Framework Versions
609
+ - Python: 3.11.9
610
+ - Sentence Transformers: 3.4.1
611
+ - Transformers: 4.48.3
612
+ - PyTorch: 2.3.0
613
+ - Accelerate: 1.1.0
614
+ - Datasets: 3.3.2
615
+ - Tokenizers: 0.21.0
616
+
617
+ ## Citation
618
+
619
+ ### BibTeX
620
+
621
+ #### Sentence Transformers
622
+ ```bibtex
623
+ @inproceedings{reimers-2019-sentence-bert,
624
+ title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
625
+ author = "Reimers, Nils and Gurevych, Iryna",
626
+ booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
627
+ month = "11",
628
+ year = "2019",
629
+ publisher = "Association for Computational Linguistics",
630
+ url = "https://arxiv.org/abs/1908.10084",
631
+ }
632
+ ```
633
+
634
+ #### ContrastiveLoss
635
+ ```bibtex
636
+ @inproceedings{hadsell2006dimensionality,
637
+ author={Hadsell, R. and Chopra, S. and LeCun, Y.},
638
+ booktitle={2006 IEEE Computer Society Conference on Computer Vision and Pattern Recognition (CVPR'06)},
639
+ title={Dimensionality Reduction by Learning an Invariant Mapping},
640
+ year={2006},
641
+ volume={2},
642
+ number={},
643
+ pages={1735-1742},
644
+ doi={10.1109/CVPR.2006.100}
645
+ }
646
+ ```
647
+
648
+ <!--
649
+ ## Glossary
650
+
651
+ *Clearly define terms in order to be accessible across audiences.*
652
+ -->
653
+
654
+ <!--
655
+ ## Model Card Authors
656
+
657
+ *Lists the people who create the model card, providing recognition and accountability for the detailed work that goes into its construction.*
658
+ -->
659
+
660
+ <!--
661
+ ## Model Card Contact
662
+
663
+ *Provides a way for people who have updates to the Model Card, suggestions, or questions, to contact the Model Card authors.*
664
+ -->
config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "intfloat/multilingual-e5-base",
3
+ "architectures": [
4
+ "XLMRobertaModel"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "bos_token_id": 0,
8
+ "classifier_dropout": null,
9
+ "eos_token_id": 2,
10
+ "hidden_act": "gelu",
11
+ "hidden_dropout_prob": 0.1,
12
+ "hidden_size": 768,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 3072,
15
+ "layer_norm_eps": 1e-05,
16
+ "max_position_embeddings": 514,
17
+ "model_type": "xlm-roberta",
18
+ "num_attention_heads": 12,
19
+ "num_hidden_layers": 12,
20
+ "output_past": true,
21
+ "pad_token_id": 1,
22
+ "position_embedding_type": "absolute",
23
+ "torch_dtype": "float32",
24
+ "transformers_version": "4.48.3",
25
+ "type_vocab_size": 1,
26
+ "use_cache": true,
27
+ "vocab_size": 250002
28
+ }
config_sentence_transformers.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "__version__": {
3
+ "sentence_transformers": "3.4.1",
4
+ "transformers": "4.48.3",
5
+ "pytorch": "2.3.0"
6
+ },
7
+ "prompts": {},
8
+ "default_prompt_name": null,
9
+ "similarity_fn_name": "cosine"
10
+ }
eval/binary_classification_evaluation_BinaryClassifEval_results.csv ADDED
@@ -0,0 +1,111 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ epoch,steps,cosine_accuracy,cosine_accuracy_threshold,cosine_f1,cosine_precision,cosine_recall,cosine_f1_threshold,cosine_ap,cosine_mcc
2
+ 1.0,304,0.5299917830731307,0.9999865293502808,0.6713973799126639,0.5057565789473685,0.9983766233766234,0.9999450445175171,0.5103561522386265,-0.02832566355788566
3
+ 2.0,608,0.543138866064092,0.9999858140945435,0.6741699493528418,0.5159345391903531,0.9724025974025974,0.9999829530715942,0.4891530104489654,0.08246955751079205
4
+ 3.0,912,0.5217748562037798,0.9999897480010986,0.6713973799126639,0.5057565789473685,0.9983766233766234,0.9999785423278809,0.49239015181900314,-0.02832566355788566
5
+ 4.0,1216,0.5357436318816763,0.9999793171882629,0.6722129783693843,0.510530749789385,0.9837662337662337,0.9999723434448242,0.5099561218168573,0.054956084208341985
6
+ 5.0,1520,0.5094494658997535,0.9999905824661255,0.6713973799126639,0.5057565789473685,0.9983766233766234,0.9999655485153198,0.4791064390970818,-0.02832566355788566
7
+ 6.0,1824,0.5546425636811833,0.9999820590019226,0.6713973799126639,0.5057565789473685,0.9983766233766234,0.999976634979248,0.5264566589677706,-0.02832566355788566
8
+ 7.0,2128,0.5365653245686114,0.9999875426292419,0.6725082146768894,0.5074380165289256,0.9967532467532467,0.9999773502349854,0.5244772336662195,0.033537160606952436
9
+ 8.0,2432,0.5365653245686114,0.9999938011169434,0.6748057713651497,0.5126475548060708,0.987012987012987,0.9999848008155823,0.5207749801784689,0.008469058612636159
10
+ 9.0,2736,0.5160230073952342,0.9999929666519165,0.6713973799126639,0.5057565789473685,0.9983766233766234,0.999974250793457,0.4771343967251126,-0.02832566355788566
11
+ 10.0,3040,0.5538208709942481,0.9999875426292419,0.6743674367436743,0.5099833610648918,0.9951298701298701,0.9999788403511047,0.5160906077171419,0.06840827734473164
12
+ 10.0,3040,0.543138866064092,0.9999858140945435,0.6741699493528418,0.5159345391903531,0.9724025974025974,0.9999829530715942,0.4891530104489654,0.08246955751079205
13
+ 1.0,304,0.5373870172555464,0.9999977946281433,0.673614920460779,0.5086992543496272,0.9967532467532467,0.9999951720237732,0.46751660821345303,-0.04007501917108359
14
+ 2.0,608,0.5258833196384552,0.9999921917915344,0.6713973799126639,0.5057565789473685,0.9983766233766234,0.9998164176940918,0.5104468864738587,-0.02832566355788566
15
+ 3.0,912,0.5291700903861956,0.9999878406524658,0.6713973799126639,0.5057565789473685,0.9983766233766234,0.9998915195465088,0.5052511216176045,-0.02832566355788566
16
+ 4.0,1216,0.5217748562037798,0.9999913573265076,0.6754385964912281,0.5099337748344371,1.0,0.9997200965881348,0.5194470096560825,0.041075227636252064
17
+ 5.0,1520,0.5308134757600658,0.9997667074203491,0.6721763085399447,0.5087572977481234,0.9902597402597403,0.9997559189796448,0.47997952203749455,0.04235524258352252
18
+ 6.0,1824,0.5538208709942481,0.9999933242797852,0.6713973799126639,0.5057565789473685,0.9983766233766234,0.9998000860214233,0.5595295395617994,-0.02832566355788566
19
+ 7.0,2128,0.5891536565324569,0.9999944567680359,0.6724890829694323,0.506578947368421,1.0,0.9998043775558472,0.591501472964294,0.029032626874638215
20
+ 8.0,2432,0.5308134757600658,0.9999936819076538,0.6770083102493074,0.5138772077375946,0.9918831168831169,0.9997878670692444,0.5242974536145439,0.07568734414508116
21
+ 9.0,2736,0.5324568611339359,0.9999854564666748,0.6713973799126639,0.5057565789473685,0.9983766233766234,0.9998009204864502,0.5042332723406886,-0.02832566355788566
22
+ 10.0,3040,0.5349219391947412,0.9999963641166687,0.6735921268452706,0.5078318219291014,1.0,0.9998081922531128,0.5205665068448684,0.05813701310158088
23
+ 10.0,3040,0.5217748562037798,0.9999913573265076,0.6754385964912281,0.5099337748344371,1.0,0.9997200965881348,0.5194470096560825,0.041075227636252064
24
+ 1.0,304,0.6803615447822514,0.7676568031311035,0.6957055214723926,0.5591715976331361,0.9204545454545454,0.6421376466751099,0.7391842614786176,0.23696421005283075
25
+ 2.0,608,0.6828266228430567,0.8075315356254578,0.7161676646706586,0.5673624288425048,0.9707792207792207,0.6787775754928589,0.7220466877305356,0.3112712585929256
26
+ 3.0,912,0.6803615447822514,0.8426893949508667,0.7145534041224234,0.5807106598984771,0.9285714285714286,0.6910696029663086,0.7410553926111666,0.3072365644298024
27
+ 4.0,1216,0.6729663105998357,0.8889249563217163,0.7137767220902613,0.5627340823970037,0.9756493506493507,0.6178470849990845,0.7285986590255118,0.30293699070674
28
+ 5.0,1520,0.6466721446179129,0.8491288423538208,0.7146282973621103,0.5665399239543726,0.9675324675324676,0.6918087005615234,0.7227846720988773,0.30493133829634406
29
+ 6.0,1824,0.657354149548069,0.8472837805747986,0.716399506781751,0.5775347912524851,0.9431818181818182,0.7481781840324402,0.7072927632379679,0.3117088984803184
30
+ 7.0,2128,0.6483155299917831,0.801467776298523,0.7160493827160495,0.5776892430278885,0.9415584415584416,0.7177152633666992,0.6873002494303153,0.3106041869965272
31
+ 8.0,2432,0.6557107641741988,0.8115390539169312,0.7167004732927653,0.6141367323290846,0.8603896103896104,0.8115390539169312,0.6747022883367421,0.33719885639354774
32
+ 9.0,2736,0.6557107641741988,0.8156951665878296,0.7139931740614335,0.6160188457008245,0.849025974025974,0.8156951665878296,0.6837828319391104,0.33374694486775713
33
+ 10.0,3040,0.6598192276088742,0.8151745796203613,0.717983651226158,0.6185446009389671,0.8555194805194806,0.8151745796203613,0.6838437261664336,0.3325716510398042
34
+ 10.0,3040,0.6803615447822514,0.7676568031311035,0.6957055214723926,0.5591715976331361,0.9204545454545454,0.6421376466751099,0.7391842614786176,0.23696421005283075
35
+ 1.0,304,0.6647493837304848,0.7785875201225281,0.7037914691943128,0.5541044776119403,0.9642857142857143,0.6376957893371582,0.7464309702675364,0.2607297298353511
36
+ 2.0,608,0.6926869350862778,0.8346554040908813,0.7038461538461538,0.5815677966101694,0.8912337662337663,0.7237919569015503,0.72843217710582,0.2804583588808661
37
+ 3.0,912,0.6770747740345111,0.8514093160629272,0.7065420560747663,0.5733063700707786,0.9204545454545454,0.672287106513977,0.7423757623446288,0.2797040893264587
38
+ 4.0,1216,0.666392769104355,0.8769499063491821,0.7106184364060677,0.5546448087431693,0.9886363636363636,0.5549906492233276,0.7269407944822253,0.2945589481090938
39
+ 5.0,1520,0.6606409202958093,0.8764141798019409,0.7105263157894737,0.5973451327433629,0.8766233766233766,0.755174994468689,0.7104436682034249,0.3099461389760333
40
+ 6.0,1824,0.6655710764174199,0.8744558095932007,0.7074663402692778,0.5677799607072691,0.9383116883116883,0.7367913722991943,0.7196921893153956,0.2787486407142624
41
+ 7.0,2128,0.6499589153656532,0.9079380035400391,0.7103403982016698,0.5876726886291179,0.8977272727272727,0.7598819732666016,0.715158055795128,0.3010330059234246
42
+ 8.0,2432,0.6655710764174199,0.8962639570236206,0.7078787878787878,0.5647969052224371,0.948051948051948,0.7066574096679688,0.708204031069927,0.27877176791727787
43
+ 9.0,2736,0.6589975349219392,0.9086312055587769,0.7075242718446602,0.564922480620155,0.9464285714285714,0.7072000503540039,0.7137426919628725,0.2775857528555435
44
+ 10.0,3040,0.6606409202958093,0.9104288816452026,0.709090909090909,0.5657640232108317,0.9496753246753247,0.7109345197677612,0.7123908377241845,0.2833698547864545
45
+ 10.0,3040,0.6647493837304848,0.7785875201225281,0.7037914691943128,0.5541044776119403,0.9642857142857143,0.6376957893371582,0.7464309702675364,0.2607297298353511
46
+ 1.0,608,0.552999178307313,0.9999938607215881,0.6713973799126639,0.5057565789473685,0.9983766233766234,0.9999879598617554,0.4964417435877794,-0.02832566355788566
47
+ 2.0,1216,0.5152013147082991,0.9999690055847168,0.6713973799126639,0.5057565789473685,0.9983766233766234,0.9999641180038452,0.47796513654351513,-0.02832566355788566
48
+ 3.0,1824,0.5390304026294166,0.9999920129776001,0.6713973799126639,0.5057565789473685,0.9983766233766234,0.9999752640724182,0.49626881957331004,-0.02832566355788566
49
+ 4.0,2432,0.5299917830731307,0.9999737739562988,0.6713973799126639,0.5057565789473685,0.9983766233766234,0.9999620914459229,0.48812276274265204,-0.02832566355788566
50
+ 5.0,3040,0.5193097781429745,0.9999939203262329,0.6713973799126639,0.5057565789473685,0.9983766233766234,0.9999735355377197,0.48905071064483163,-0.02832566355788566
51
+ 6.0,3648,0.5193097781429745,0.9999791979789734,0.6713973799126639,0.5057565789473685,0.9983766233766234,0.9999599456787109,0.4973929389152414,-0.02832566355788566
52
+ 7.0,4256,0.5184880854560394,0.9999957084655762,0.6725178277564453,0.507870753935377,0.9951298701298701,0.9999697208404541,0.5082296710029497,0.0375335319689264
53
+ 8.0,4864,0.5160230073952342,0.9999775886535645,0.6735921268452706,0.5078318219291014,1.0,0.9999709129333496,0.45434923500754876,0.05813701310158088
54
+ 9.0,5472,0.5267050123253904,0.9999933838844299,0.6725274725274726,0.5083056478405316,0.9935064935064936,0.999973714351654,0.4846250461000494,0.027225808270363368
55
+ 10.0,6080,0.5341002465078061,0.9999944567680359,0.6713973799126639,0.5057565789473685,0.9983766233766234,0.9999751448631287,0.5172951634894758,-0.02832566355788566
56
+ 10.0,6080,0.5299917830731307,0.9999737739562988,0.6713973799126639,0.5057565789473685,0.9983766233766234,0.9999620914459229,0.48812276274265204,-0.02832566355788566
57
+ 1.0,304,0.5414954806902219,0.999996542930603,0.6714051394204484,0.5061830173124485,0.9967532467532467,0.9999871850013733,0.5241515851111833,0.0007078370101653516
58
+ 2.0,608,0.5439605587510271,0.9999966621398926,0.6739606126914661,0.5082508250825083,1.0,0.9999781847000122,0.5259605277504239,0.06502596595673575
59
+ 3.0,912,0.5390304026294166,0.9999964833259583,0.6721311475409836,0.5065897858319605,0.9983766233766234,0.999991774559021,0.49135871517826196,-0.02832566355788566
60
+ 4.0,1216,0.5595727198027938,0.9999883770942688,0.6818960593946317,0.5259911894273128,0.9691558441558441,0.9999823570251465,0.5050279029943431,0.13999922396110287
61
+ 5.0,1520,0.5135579293344289,0.9999842047691345,0.6717724288840263,0.5066006600660066,0.9967532467532467,0.9999830722808838,0.48602149819199947,-0.01595879342746578
62
+ 6.0,1824,0.5406737880032868,0.9999973177909851,0.6762430939226518,0.5125628140703518,0.9935064935064936,0.999971330165863,0.5301071155503133,0.05351243309199619
63
+ 7.0,2128,0.5225965488907148,0.9999979734420776,0.6713973799126639,0.5057565789473685,0.9983766233766234,0.9999740123748779,0.4818467324022103,-0.02832566355788566
64
+ 8.0,2432,0.5299917830731307,0.9999971985816956,0.675809105869446,0.5103562551781275,1.0,0.9999845027923584,0.5087529651776477,0.09215087960485716
65
+ 9.0,2736,0.5390304026294166,0.9999837875366211,0.6714051394204484,0.5061830173124485,0.9967532467532467,0.9999738931655884,0.497108322990204,0.0007078370101653516
66
+ 10.0,3040,0.5102711585866886,0.9999852180480957,0.6739606126914661,0.5082508250825083,1.0,0.9999852180480957,0.4837982398594371,0.06502596595673575
67
+ 10.0,3040,0.5406737880032868,0.9999973177909851,0.6762430939226518,0.5125628140703518,0.9935064935064936,0.999971330165863,0.5301071155503133,0.05351243309199619
68
+ 1.0,608,0.7058340180772391,0.793916642665863,0.7171875,0.6912650602409639,0.7451298701298701,0.7811518907546997,0.7612878163621353,0.4056919853026572
69
+ 2.0,1216,0.7066557107641742,0.8494635820388794,0.7171010807374443,0.5893416927899686,0.9155844155844156,0.7360906004905701,0.7398135858706354,0.31918756495105144
70
+ 3.0,1824,0.676253081347576,0.8604937791824341,0.7242960052390307,0.6070252469813392,0.8977272727272727,0.769359827041626,0.7407439629415769,0.34808989265431284
71
+ 4.0,2432,0.6828266228430567,0.8775991201400757,0.7255563047875927,0.6205305651672434,0.8733766233766234,0.768502414226532,0.7536580624747373,0.36003319592061894
72
+ 5.0,3040,0.6729663105998357,0.8885157108306885,0.7186030893216925,0.6128293241695304,0.8685064935064936,0.796892523765564,0.7341219816150957,0.33987518502747516
73
+ 6.0,3648,0.6606409202958093,0.9281609058380127,0.7121588089330023,0.5763052208835341,0.9318181818181818,0.748379111289978,0.7182104981421685,0.29783649931195894
74
+ 7.0,4256,0.6614626129827444,0.911262571811676,0.7157326130992573,0.6127167630057804,0.8603896103896104,0.8033319711685181,0.709654450349197,0.3340948201401711
75
+ 8.0,4864,0.6672144617912901,0.9275944232940674,0.7165706973768393,0.5913410770855333,0.9090909090909091,0.754738450050354,0.7093186507833783,0.31906963787582776
76
+ 9.0,5472,0.6746096959737058,0.9246115684509277,0.7191011235955056,0.6064659977703456,0.8831168831168831,0.7898432016372681,0.7132762194016629,0.33433897774200566
77
+ 10.0,6080,0.6713229252259655,0.9239299297332764,0.7187293183322302,0.6067039106145251,0.8814935064935064,0.7922683954238892,0.7126430139469299,0.33526691569702416
78
+ 10.0,6080,0.7058340180772391,0.793916642665863,0.7171875,0.6912650602409639,0.7451298701298701,0.7811518907546997,0.7612878163621353,0.4056919853026572
79
+ 1.0,608,0.543138866064092,0.9999997019767761,0.6728563626433642,0.5069958847736625,1.0,0.9999970197677612,0.49726072092927,0.0
80
+ 2.0,1216,0.5406737880032868,0.9999989867210388,0.6713973799126639,0.5057565789473685,0.9983766233766234,0.9999916553497314,0.5350519657348317,-0.02832566355788566
81
+ 3.0,1824,0.5497124075595727,0.9999966621398926,0.6733111849390919,0.5109243697478991,0.987012987012987,0.9998738765716553,0.5217659713614683,0.06322870659083427
82
+ 4.0,2432,0.5135579293344289,0.9995465278625488,0.673614920460779,0.5086992543496272,0.9967532467532467,0.9995412230491638,0.4841565845552425,-0.04007501917108359
83
+ 5.0,3040,0.5225965488907148,0.9999977350234985,0.6713973799126639,0.5057565789473685,0.9983766233766234,0.9997003078460693,0.5231872869784522,-0.02832566355788566
84
+ 6.0,3648,0.5546425636811833,0.9999721646308899,0.6713973799126639,0.5057565789473685,0.9983766233766234,0.999503493309021,0.5169661909870291,-0.02832566355788566
85
+ 7.0,4256,0.5308134757600658,0.9997500777244568,0.6767337807606262,0.5162116040955631,0.9821428571428571,0.9997462034225464,0.4956333937225472,0.04893383570402448
86
+ 8.0,4864,0.5439605587510271,0.9999799728393555,0.6717724288840263,0.5066006600660066,0.9967532467532467,0.9996611475944519,0.5569753180682906,0.013638562989627045
87
+ 9.0,5472,0.5357436318816763,0.9997211694717407,0.675068493150685,0.5095119933829612,1.0,0.9997063875198364,0.49172714803967155,0.08235405026943028
88
+ 10.0,6080,0.5250616269515201,0.9999856948852539,0.673224043715847,0.5074135090609555,1.0,0.9997073411941528,0.5147729244281627,0.050327389511871615
89
+ 10.0,6080,0.5135579293344289,0.9995465278625488,0.673614920460779,0.5086992543496272,0.9967532467532467,0.9995412230491638,0.4841565845552425,-0.04007501917108359
90
+ 1.0,608,0.6170912078882498,0.8871210813522339,0.698090692124105,0.5518867924528302,0.9496753246753247,0.7523710131645203,0.6927587311758633,0.21699344737848222
91
+ 2.0,1216,0.6368118323746919,0.7256832718849182,0.6878547105561861,0.5287958115183246,0.9837662337662337,0.5916898250579834,0.6945240071102398,0.1818739407308707
92
+ 3.0,1824,0.6170912078882498,0.7738906741142273,0.6803278688524589,0.5872641509433962,0.8084415584415584,0.7612220048904419,0.6781170951661508,0.24591010722366508
93
+ 4.0,2432,0.6236647493837305,0.8943555355072021,0.6911242603550295,0.5437616387337058,0.948051948051948,0.6510113477706909,0.6837879533403316,0.20609751284776784
94
+ 5.0,3040,0.6343467543138867,0.9221521019935608,0.6931742876076873,0.5856662933930571,0.849025974025974,0.8438745737075806,0.671031432226483,0.2607502303588794
95
+ 6.0,3648,0.6277732128184059,0.9465571641921997,0.6821428571428572,0.5385338345864662,0.9301948051948052,0.7515051364898682,0.6752224775714175,0.17074424367030783
96
+ 7.0,4256,0.6368118323746919,0.9416571259498596,0.6844192634560907,0.525674499564839,0.9805194805194806,0.6278055906295776,0.7062408970715746,0.16042275784742707
97
+ 8.0,4864,0.6211996713229252,0.9144222736358643,0.6866077498300476,0.5906432748538012,0.8198051948051948,0.8532370328903198,0.7045970487820908,0.25968606377463
98
+ 9.0,5472,0.6327033689400164,0.8782525062561035,0.6859504132231404,0.5956937799043063,0.8084415584415584,0.8525110483169556,0.7040456612170579,0.2652634925726873
99
+ 10.0,6080,0.6359901396877568,0.8785731792449951,0.6899862825788752,0.5973871733966746,0.8165584415584416,0.8482794761657715,0.7041222783268621,0.27341024631405014
100
+ 10.0,6080,0.6368118323746919,0.7256832718849182,0.6878547105561861,0.5287958115183246,0.9837662337662337,0.5916898250579834,0.6945240071102398,0.1818739407308707
101
+ 1.0,152,0.6565324568611339,0.818817138671875,0.6935779816513761,0.5564278704612365,0.9204545454545454,0.6538575887680054,0.7233497480111735,0.2280786767210387
102
+ 2.0,304,0.6820049301561216,0.8504238724708557,0.6999383857054835,0.564051638530288,0.922077922077922,0.6941171884536743,0.7228070313415067,0.25355004777003337
103
+ 3.0,456,0.6844700082169268,0.8048432469367981,0.703030303030303,0.6590909090909091,0.7532467532467533,0.7840628623962402,0.7415748633589909,0.3583259405986469
104
+ 4.0,608,0.6729663105998357,0.829609751701355,0.6967113276492083,0.557504873294347,0.9285714285714286,0.6935693025588989,0.72794915877324,0.23800948913042216
105
+ 5.0,760,0.686113393590797,0.9183405637741089,0.6992429456297317,0.6069295101553166,0.8246753246753247,0.8268334865570068,0.7452586973213189,0.29912413008767047
106
+ 6.0,912,0.6589975349219392,0.9357782006263733,0.686298076923077,0.5448473282442748,0.926948051948052,0.7767436504364014,0.7165148381219967,0.19268094040213465
107
+ 7.0,1064,0.6828266228430567,0.9075065851211548,0.684931506849315,0.5409219190968956,0.9334415584415584,0.7321123480796814,0.7034916304287689,0.1826582655976784
108
+ 8.0,1216,0.6557107641741988,0.9162940979003906,0.6814637072585482,0.5404376784015223,0.922077922077922,0.7570115923881531,0.697102499123418,0.17249938512165613
109
+ 9.0,1368,0.657354149548069,0.920343279838562,0.6830143540669856,0.540719696969697,0.926948051948052,0.7494903802871704,0.6992476571756254,0.17701825817874464
110
+ 10.0,1520,0.6565324568611339,0.922082245349884,0.6814371257485029,0.5398481973434535,0.9237012987012987,0.7519190907478333,0.6978115056028087,0.1713296452643464
111
+ 10.0,1520,0.6565324568611339,0.818817138671875,0.6935779816513761,0.5564278704612365,0.9204545454545454,0.6538575887680054,0.7233497480111735,0.2280786767210387
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:841debe789240335b2b403866e68f4ba0f615af8e0884d9896af265173dac376
3
+ size 1112197096
modules.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "idx": 0,
4
+ "name": "0",
5
+ "path": "",
6
+ "type": "sentence_transformers.models.Transformer"
7
+ },
8
+ {
9
+ "idx": 1,
10
+ "name": "1",
11
+ "path": "1_Pooling",
12
+ "type": "sentence_transformers.models.Pooling"
13
+ },
14
+ {
15
+ "idx": 2,
16
+ "name": "2",
17
+ "path": "2_Normalize",
18
+ "type": "sentence_transformers.models.Normalize"
19
+ }
20
+ ]
runs/Mar11_09-06-35_algo-1/events.out.tfevents.1741683996.algo-1.88.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b5617af14b6083fbd95f8f98bc7114631776a0dc9f0d2c71d5bf04ae4b696e9
3
+ size 30240
runs/Mar11_09-06-35_algo-1/events.out.tfevents.1741685196.algo-1.88.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:df036f9ea651272a343bc9049b7caa1fb403771ccde57fafcfd90976611aa99c
3
+ size 1031
runs/Mar11_09-26-41_algo-1/events.out.tfevents.1741685202.algo-1.88.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4a274f350e26a3bbac26c857668ac815cb0760898ddf518e38420b6f89e5e10
3
+ size 30241
runs/Mar11_09-26-41_algo-1/events.out.tfevents.1741686086.algo-1.88.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:50732ec69d923383239f7164beb31658c848bbb22f8f8f52b69e4e3998978961
3
+ size 1031
runs/Mar11_09-41-30_algo-1/events.out.tfevents.1741686091.algo-1.88.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af2bfcde636276c28fefebbb1b2fd76e5efb478614200987ac2be451b356cc0e
3
+ size 30242
runs/Mar11_09-41-30_algo-1/events.out.tfevents.1741687314.algo-1.88.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4746e5a3bebd2c83e066998f4cc36512db1f04b1a58fa0717667173323c3f34f
3
+ size 1031
runs/Mar11_10-01-57_algo-1/events.out.tfevents.1741687318.algo-1.88.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cf7dc1c1baa38c5e5382843b231c57c5bebf22d2659b719a5360ec6bcad575d4
3
+ size 30242
runs/Mar11_10-01-57_algo-1/events.out.tfevents.1741688552.algo-1.88.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:03dcc0aca6b60ebbd9d5ac7576b499bf4b46c516346caec22653b5c784208e9a
3
+ size 1031
runs/Mar11_10-22-35_algo-1/events.out.tfevents.1741688556.algo-1.88.8 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e7248089a7ff031d408161e6976cadfc8a81d4e896a995beb9aa207a5c6121d
3
+ size 46278
runs/Mar11_10-22-35_algo-1/events.out.tfevents.1741689857.algo-1.88.9 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:284f56df06ba3c6bbeaf60ebca50ba923a1c5567f8049099ddc34db8b93de367
3
+ size 1031
runs/Mar11_10-44-20_algo-1/events.out.tfevents.1741689861.algo-1.88.10 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:086d273b22b8d96f075eaa09b6f1ed86a07f9a129ed20f3363e90a4dcaef01fa
3
+ size 30241
runs/Mar11_10-44-20_algo-1/events.out.tfevents.1741691061.algo-1.88.11 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2e31b3d5e49264d22989596c72ca57110ef8502c60257d17a25a26c38b029040
3
+ size 1031
runs/Mar11_11-04-24_algo-1/events.out.tfevents.1741691065.algo-1.88.12 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:868d33275f1b9f1de40c24f8ccca9b51e028d9273b266d30449fb71be705bfe2
3
+ size 46278
runs/Mar11_11-04-24_algo-1/events.out.tfevents.1741692361.algo-1.88.13 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fd03f1dc0dd5050508debebe07d97457604ce16fb82a2dacd814c7ec6cc6da97
3
+ size 1031
runs/Mar11_11-26-05_algo-1/events.out.tfevents.1741692366.algo-1.88.14 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4bc6f2ec215a0e5e205f362b29673bcfdb1d5bff45fc8325cead7f3653c62a54
3
+ size 46275
runs/Mar11_11-26-05_algo-1/events.out.tfevents.1741693671.algo-1.88.15 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1de72a6e991c4fdde9c92b6dc35730b5e1b291d8ce8ce86260b8f5495247f87b
3
+ size 1031
runs/Mar11_11-47-55_algo-1/events.out.tfevents.1741693676.algo-1.88.16 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a07520bb3d5192849ed593a4602372478b7e8bc9185a9bf63ad10d6730132a82
3
+ size 46277
runs/Mar11_11-47-55_algo-1/events.out.tfevents.1741694970.algo-1.88.17 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:147f67a030bfe24ae552440308cae9a1e57b5491bb375abaf466bf04234667cb
3
+ size 1031
runs/Mar11_12-09-34_algo-1/events.out.tfevents.1741694976.algo-1.88.18 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:94584fb5fa775df496add69ca33356b1067d32b41b3130664e15dae87cd14f5e
3
+ size 22222
runs/Mar11_12-09-34_algo-1/events.out.tfevents.1741695809.algo-1.88.19 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6dd17d0313f73095d46f5118ae1784b6b1a05212a79c1bd705945fca093491cd
3
+ size 1031
sentence_bert_config.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "max_seq_length": 512,
3
+ "do_lower_case": false
4
+ }
sentencepiece.bpe.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cfc8146abe2a0488e9e2a0c56de7952f7c11ab059eca145a0a727afce0db2865
3
+ size 5069051
special_tokens_map.json ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "cls_token": {
10
+ "content": "<s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "eos_token": {
17
+ "content": "</s>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "mask_token": {
24
+ "content": "<mask>",
25
+ "lstrip": true,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ },
30
+ "pad_token": {
31
+ "content": "<pad>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false
36
+ },
37
+ "sep_token": {
38
+ "content": "</s>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false
43
+ },
44
+ "unk_token": {
45
+ "content": "<unk>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false
50
+ }
51
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:883b037111086fd4dfebbbc9b7cee11e1517b5e0c0514879478661440f137085
3
+ size 17082987
tokenizer_config.json ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "<s>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "<pad>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "</s>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "3": {
28
+ "content": "<unk>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "250001": {
36
+ "content": "<mask>",
37
+ "lstrip": true,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ }
43
+ },
44
+ "bos_token": "<s>",
45
+ "clean_up_tokenization_spaces": true,
46
+ "cls_token": "<s>",
47
+ "eos_token": "</s>",
48
+ "extra_special_tokens": {},
49
+ "mask_token": "<mask>",
50
+ "model_max_length": 512,
51
+ "pad_token": "<pad>",
52
+ "sep_token": "</s>",
53
+ "tokenizer_class": "XLMRobertaTokenizer",
54
+ "unk_token": "<unk>"
55
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7be0a616532ad24a265658ea3c6e34432b32bfcb61d8195507788c9b063a970e
3
+ size 5624