manu commited on
Commit
545c4d5
·
verified ·
1 Parent(s): 172724c

Upload folder using huggingface_hub

Browse files
.gitignore ADDED
@@ -0,0 +1,172 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Custom
2
+ !*/configs/data/
3
+ .DS_Store
4
+ /.vscode/
5
+ /data/
6
+ /logs/
7
+ /models/
8
+ /outputs/
9
+
10
+ # Byte-compiled / optimized / DLL files
11
+ __pycache__/
12
+ *.py[cod]
13
+ *$py.class
14
+
15
+ # C extensions
16
+ *.so
17
+
18
+ # Distribution / packaging
19
+ .Python
20
+ build/
21
+ develop-eggs/
22
+ dist/
23
+ downloads/
24
+ eggs/
25
+ .eggs/
26
+ lib/
27
+ lib64/
28
+ parts/
29
+ sdist/
30
+ var/
31
+ wheels/
32
+ share/python-wheels/
33
+ *.egg-info/
34
+ .installed.cfg
35
+ *.egg
36
+ MANIFEST
37
+
38
+ # PyInstaller
39
+ # Usually these files are written by a python script from a template
40
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
41
+ *.manifest
42
+ *.spec
43
+
44
+ # Installer logs
45
+ pip-log.txt
46
+ pip-delete-this-directory.txt
47
+
48
+ # Unit test / coverage reports
49
+ htmlcov/
50
+ .tox/
51
+ .nox/
52
+ .coverage
53
+ .coverage.*
54
+ .cache
55
+ nosetests.xml
56
+ coverage.xml
57
+ *.cover
58
+ *.py,cover
59
+ .hypothesis/
60
+ .pytest_cache/
61
+ cover/
62
+
63
+ # Translations
64
+ *.mo
65
+ *.pot
66
+
67
+ # Django stuff:
68
+ *.log
69
+ local_settings.py
70
+ db.sqlite3
71
+ db.sqlite3-journal
72
+
73
+ # Flask stuff:
74
+ instance/
75
+ .webassets-cache
76
+
77
+ # Scrapy stuff:
78
+ .scrapy
79
+
80
+ # Sphinx documentation
81
+ docs/_build/
82
+
83
+ # PyBuilder
84
+ .pybuilder/
85
+ target/
86
+
87
+ # Jupyter Notebook
88
+ .ipynb_checkpoints
89
+ notebooks/
90
+
91
+ # IPython
92
+ profile_default/
93
+ ipython_config.py
94
+
95
+ # pyenv
96
+ # For a library or package, you might want to ignore these files since the code is
97
+ # intended to run in multiple environments; otherwise, check them in:
98
+ # .python-version
99
+
100
+ # pipenv
101
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
102
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
103
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
104
+ # install all needed dependencies.
105
+ #Pipfile.lock
106
+
107
+ # poetry
108
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
109
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
110
+ # commonly ignored for libraries.
111
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
112
+ #poetry.lock
113
+
114
+ # pdm
115
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
116
+ #pdm.lock
117
+ # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
118
+ # in version control.
119
+ # https://pdm.fming.dev/latest/usage/project/#working-with-version-control
120
+ .pdm.toml
121
+ .pdm-python
122
+ .pdm-build/
123
+
124
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
125
+ __pypackages__/
126
+
127
+ # Celery stuff
128
+ celerybeat-schedule
129
+ celerybeat.pid
130
+
131
+ # SageMath parsed files
132
+ *.sage.py
133
+
134
+ # Environments
135
+ .env
136
+ .venv
137
+ env/
138
+ venv/
139
+ ENV/
140
+ env.bak/
141
+ venv.bak/
142
+
143
+ # Spyder project settings
144
+ .spyderproject
145
+ .spyproject
146
+
147
+ # Rope project settings
148
+ .ropeproject
149
+
150
+ # mkdocs documentation
151
+ /site
152
+
153
+ # mypy
154
+ .mypy_cache/
155
+ .dmypy.json
156
+ dmypy.json
157
+
158
+ # Pyre type checker
159
+ .pyre/
160
+
161
+ # pytype static type analyzer
162
+ .pytype/
163
+
164
+ # Cython debug symbols
165
+ cython_debug/
166
+
167
+ # PyCharm
168
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
169
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
170
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
171
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
172
+ #.idea/
LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2024 Manuel Faysse, Hugues Sibille, Tony Wu
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
README.md ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Contextualized Embeddings Benchmark
2
+
3
+ This repository contains evaluation code for the "Contextualized Embeddings" project.
4
+
5
+ ## Installation
6
+ ```bash
7
+ pip install -e .
8
+ pip install git+https://github.com/jina-ai/late-chunking --no-deps # for late-chunking with jina
9
+ ```
10
+
11
+ ## Usage
12
+
13
+ Refer to `scripts/evaluation.py` for an example of how to use the code.
14
+
15
+ ```python
16
+ from datasets import load_dataset
17
+ from cde_benchmark.embedders.sentence_transformer_embedder import SentenceTransformerEmbedder
18
+ from cde_benchmark.embedders.naive_contextual_embedder import NaiveContextualEmbedder
19
+ from cde_benchmark.formatters.data_formatter import DataFormatter
20
+
21
+ # Datasets should be correctly formatted
22
+ formatter = DataFormatter("illuin-cde/chunked-mldr", split="test")
23
+
24
+ # Non-nested example
25
+ embedder = SentenceTransformerEmbedder("intfloat/e5-base-v2")
26
+ metrics = embedder.compute_metrics_e2e(formatter)
27
+ print(metrics)
28
+
29
+ # Nested example (for conxtualized embeddings models)
30
+ embedder = NaiveContextualEmbedder("intfloat/e5-base-v2")
31
+ metrics = embedder.compute_metrics_e2e(formatter)
32
+ print(metrics)
33
+ ```
cde_benchmark/__init__.py ADDED
File without changes
cde_benchmark/embedders/__init__.py ADDED
File without changes
cde_benchmark/embedders/base_embedder.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Dict
2
+ import torch
3
+ from cde_benchmark.formatters.data_formatter import BaseDataFormatter
4
+ from cde_benchmark.evaluators.eval_utils import CustomRetrievalEvaluator
5
+
6
+
7
+ class Embedder:
8
+ def __init__(
9
+ self,
10
+ is_contextual_model: bool = False,
11
+ ):
12
+
13
+ self.is_contextual_model = is_contextual_model
14
+ self.evaluator = CustomRetrievalEvaluator()
15
+
16
+ def embed_queries(self, queries):
17
+ raise NotImplementedError
18
+
19
+ def embed_documents(self, documents):
20
+ raise NotImplementedError
21
+
22
+ def process_queries(self, data_formatter):
23
+ queries, document_ids = data_formatter.get_queries()
24
+ query_embeddings = self.embed_queries(queries)
25
+
26
+ # make into a contiguous tensor, and map position to document_ids
27
+ return query_embeddings, document_ids
28
+
29
+ def process_documents(self, data_formatter):
30
+ if self.is_contextual_model:
31
+ documents, document_ids = data_formatter.get_nested()
32
+ # embed documents in contextual models receive a list of list of documents and should return embeddings in the same shape
33
+ doc_embeddings = self.embed_documents(documents)
34
+ # flatten
35
+ document_ids = [id_ for nested_ids in document_ids for id_ in nested_ids]
36
+ doc_embeddings = [
37
+ embed_ for nested_embeds in doc_embeddings for embed_ in nested_embeds
38
+ ]
39
+
40
+ else:
41
+ documents, document_ids = data_formatter.get_flattened()
42
+ doc_embeddings = self.embed_documents(documents)
43
+
44
+ # make into a contiguous tensor, and map position to document_ids
45
+ return doc_embeddings, document_ids
46
+
47
+ def get_similarities(self, query_embeddings, doc_embeddings):
48
+ # convert to torch tensors and compute similarity with dot product
49
+ query_embeddings = torch.tensor(query_embeddings)
50
+ doc_embeddings = torch.tensor(doc_embeddings)
51
+ scores = torch.mm(query_embeddings, doc_embeddings.t())
52
+ return scores
53
+
54
+ def get_metrics(self, scores, all_document_ids, label_documents_id):
55
+ # scores are a list of list of scores (or 2D tensor)
56
+ # label_document_ids are a list of document ids corresponding to the true label
57
+ # all_document_ids are a list of all document ids in the same order as the scores
58
+
59
+ assert scores.shape[1] == len(all_document_ids)
60
+ assert scores.shape[0] == len(label_documents_id)
61
+ assert set(label_documents_id).issubset(set(all_document_ids))
62
+
63
+ relevant_docs = {}
64
+ for idx, label in enumerate(label_documents_id):
65
+ relevant_docs[str(idx)] = {label: 1}
66
+
67
+ results = {}
68
+ for idx, scores_per_query in enumerate(scores):
69
+ results[str(idx)] = {
70
+ str(doc_id): score.item()
71
+ for doc_id, score in zip(all_document_ids, scores_per_query)
72
+ }
73
+
74
+ metrics: Dict[str, float] = self.evaluator.compute_mteb_metrics(
75
+ relevant_docs, results
76
+ )
77
+ return metrics
78
+
79
+ def compute_metrics_e2e(self, data_formatter):
80
+ queries_embeddings, label_ids = self.process_queries(data_formatter)
81
+ documents_embeddings, all_doc_ids = self.process_documents(data_formatter)
82
+
83
+ scores = self.get_similarities(queries_embeddings, documents_embeddings)
84
+ metrics = self.get_metrics(scores, all_doc_ids, label_ids)
85
+ return metrics
cde_benchmark/embedders/jina_late_chunking_embedder.py ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ from tqdm import tqdm
3
+
4
+ from sentence_transformers import SentenceTransformer
5
+ from cde_benchmark.embedders.base_embedder import Embedder
6
+
7
+
8
+ class LateChunkingEmbedder(Embedder):
9
+ def __init__(
10
+ self,
11
+ model: SentenceTransformer = None,
12
+ batch_size: int = 16,
13
+ show_progress_bar: bool = True,
14
+ ):
15
+ super().__init__(is_contextual_model=True)
16
+ self.model: SentenceTransformer = model
17
+ self.show_progress_bar = show_progress_bar
18
+ self.batch_size = batch_size
19
+ self.sep_token = self.model.tokenizer.sep_token
20
+
21
+ def embed_queries(self, queries):
22
+ return self.model.encode(
23
+ queries,
24
+ show_progress_bar=self.show_progress_bar,
25
+ batch_size=self.batch_size,
26
+ )
27
+
28
+ def embed_documents(self, documents):
29
+ # documents is a list of list of documents
30
+ # This is just for the demo, but here it's not contextual at all
31
+ embeddings = []
32
+ for document in tqdm(documents):
33
+ doc = self.sep_token + f"{self.sep_token}".join(document)
34
+ encodings = self.model.tokenizer(
35
+ [doc],
36
+ max_length=8192,
37
+ truncation=True,
38
+ padding=True,
39
+ return_tensors="pt",
40
+ ).to(self.model.device)
41
+
42
+ # split the model outputs on the [SEP] token
43
+ sep_indices = (
44
+ encodings["input_ids"] == self.model.tokenizer.sep_token_id
45
+ ).nonzero(as_tuple=True)[1]
46
+
47
+ # assert sep_token is at the end
48
+ assert (sep_indices[-1] == encodings.input_ids.shape[1] - 1).item()
49
+ if len(document) != len(sep_indices) - 1:
50
+ print(f"Warning: number of documents ({len(document)}) does not match number of [SEP] tokens - 1 ({len(sep_indices)}), indicating document was too long and was truncated")
51
+ print(f"The length of the document was {len(doc)} with {len(encodings.input_ids[0])} tokens while model max_length is {8192}")
52
+ breakpoint()
53
+
54
+ model_outputs = (
55
+ self.model._modules["0"].auto_model(**encodings).last_hidden_state
56
+ )
57
+ tmp_embeddings = []
58
+ for i in range(len(sep_indices) - 1):
59
+ # normalize embeddings
60
+ tmp_embeddings.append(
61
+ model_outputs[
62
+ 0,
63
+ sep_indices[i] + 1 : sep_indices[i + 1],
64
+ :,
65
+ ]
66
+ .mean(dim=0)
67
+ .detach()
68
+ .cpu()
69
+ .numpy()
70
+ )
71
+ # concatenate embeddings
72
+ tmp_embeddings = np.array(tmp_embeddings)
73
+ # normalize embeddings
74
+ tmp_embeddings = (
75
+ tmp_embeddings / np.linalg.norm(tmp_embeddings, axis=1)[:, None]
76
+ )
77
+
78
+ embeddings.append(tmp_embeddings)
79
+
80
+ return embeddings
cde_benchmark/embedders/naive_contextual_embedder.py ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from tqdm import tqdm
2
+
3
+ from sentence_transformers import SentenceTransformer
4
+ from cde_benchmark.embedders.base_embedder import Embedder
5
+
6
+
7
+ class NaiveContextualEmbedder(Embedder):
8
+ def __init__(
9
+ self,
10
+ model: SentenceTransformer = None,
11
+ batch_size: int = 16,
12
+ show_progress_bar: bool = True,
13
+ ):
14
+ super().__init__(is_contextual_model=True)
15
+ self.model: SentenceTransformer = model
16
+ self.show_progress_bar = show_progress_bar
17
+ self.batch_size = batch_size
18
+
19
+ def embed_queries(self, queries):
20
+ return self.model.encode(
21
+ queries,
22
+ show_progress_bar=self.show_progress_bar,
23
+ batch_size=self.batch_size,
24
+ )
25
+
26
+ def embed_documents(self, documents):
27
+ # documents is a list of list of documents
28
+ # This is just for the demo, but here it's not contextual at all
29
+ embeddings = []
30
+ for document in tqdm(documents):
31
+ embeddings.append(
32
+ self.model.encode(
33
+ document,
34
+ show_progress_bar=False,
35
+ batch_size=self.batch_size,
36
+ )
37
+ )
38
+ return embeddings
cde_benchmark/embedders/sentence_transformer_embedder.py ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from sentence_transformers import SentenceTransformer
2
+ from cde_benchmark.embedders.base_embedder import Embedder
3
+
4
+
5
+ class SentenceTransformerEmbedder(Embedder):
6
+ def __init__(
7
+ self,
8
+ model: SentenceTransformer = None,
9
+ batch_size: int = 16,
10
+ show_progress_bar: bool = True,
11
+ ):
12
+ super().__init__(is_contextual_model=False)
13
+ self.model = model
14
+ self.show_progress_bar = show_progress_bar
15
+ self.batch_size = batch_size
16
+
17
+ def embed_queries(self, queries):
18
+ return self.model.encode(
19
+ queries,
20
+ show_progress_bar=self.show_progress_bar,
21
+ batch_size=self.batch_size,
22
+ )
23
+
24
+ def embed_documents(self, documents):
25
+ return self.model.encode(
26
+ documents,
27
+ show_progress_bar=self.show_progress_bar,
28
+ batch_size=self.batch_size,
29
+ )
cde_benchmark/evaluators/__init__.py ADDED
File without changes
cde_benchmark/evaluators/eval_utils.py ADDED
@@ -0,0 +1,183 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # from mteb.evaluation.evaluators.RetrievalEvaluator
2
+ from __future__ import annotations
3
+
4
+ import logging
5
+ from typing import Dict
6
+
7
+ import numpy as np
8
+ import pytrec_eval
9
+ from mteb.evaluation.evaluators.RetrievalEvaluator import RetrievalEvaluator
10
+ from mteb.evaluation.evaluators.utils import (
11
+ confidence_scores,
12
+ hole,
13
+ mrr,
14
+ nAUC,
15
+ recall_cap,
16
+ top_k_accuracy,
17
+ )
18
+
19
+ logger = logging.getLogger(__name__)
20
+
21
+
22
+ class CustomRetrievalEvaluator:
23
+ """
24
+ Wrapper class for the MTEB retrieval evaluator.
25
+ """
26
+
27
+ def __init__(self, k_values: list[int] = [1, 3, 5, 10, 20, 50, 100]):
28
+ self.k_values = k_values
29
+
30
+ def compute_mteb_metrics(
31
+ self,
32
+ relevant_docs: Dict[str, dict[str, int]],
33
+ results: Dict[str, dict[str, float]],
34
+ **kwargs,
35
+ ) -> Dict[str, float]:
36
+ """
37
+ Compute the MTEB retrieval metrics.
38
+ """
39
+ ndcg, _map, recall, precision, naucs = self.evaluate(
40
+ relevant_docs,
41
+ results,
42
+ self.k_values,
43
+ ignore_identical_ids=kwargs.get("ignore_identical_ids", True),
44
+ )
45
+
46
+ mrr = self.evaluate_custom(relevant_docs, results, self.k_values, "mrr")
47
+
48
+ scores = {
49
+ **{f"ndcg_at_{k.split('@')[1]}": v for (k, v) in ndcg.items()},
50
+ **{f"map_at_{k.split('@')[1]}": v for (k, v) in _map.items()},
51
+ **{f"recall_at_{k.split('@')[1]}": v for (k, v) in recall.items()},
52
+ **{f"precision_at_{k.split('@')[1]}": v for (k, v) in precision.items()},
53
+ **{f"mrr_at_{k.split('@')[1]}": v for (k, v) in mrr[0].items()},
54
+ **{f"naucs_at_{k.split('@')[1]}": v for (k, v) in naucs.items()},
55
+ }
56
+ return scores
57
+
58
+ @staticmethod
59
+ def evaluate(
60
+ qrels: dict[str, dict[str, int]],
61
+ results: dict[str, dict[str, float]],
62
+ k_values: list[int],
63
+ ignore_identical_ids: bool = False,
64
+ ) -> tuple[
65
+ dict[str, float],
66
+ dict[str, float],
67
+ dict[str, float],
68
+ dict[str, float],
69
+ dict[str, float],
70
+ ]:
71
+ if ignore_identical_ids:
72
+ logger.debug(
73
+ "For evaluation, ``ignore_identical_ids=True`` is set to True, the evaluator will ignore "
74
+ "identical query and document ids."
75
+ )
76
+ # Remove identical ids from results dict
77
+ for qid, rels in results.items():
78
+ for pid in list(rels):
79
+ if qid == pid:
80
+ results[qid].pop(pid)
81
+ else:
82
+ logger.debug(
83
+ "For evaluation, we DO NOT ignore identical query and document ids (default), please explicitly "
84
+ "set ``ignore_identical_ids=True`` to ignore this."
85
+ )
86
+
87
+ all_ndcgs, all_aps, all_recalls, all_precisions = {}, {}, {}, {}
88
+
89
+ for k in k_values:
90
+ all_ndcgs[f"NDCG@{k}"] = []
91
+ all_aps[f"MAP@{k}"] = []
92
+ all_recalls[f"Recall@{k}"] = []
93
+ all_precisions[f"P@{k}"] = []
94
+
95
+ map_string = "map_cut." + ",".join([str(k) for k in k_values])
96
+ ndcg_string = "ndcg_cut." + ",".join([str(k) for k in k_values])
97
+ recall_string = "recall." + ",".join([str(k) for k in k_values])
98
+ precision_string = "P." + ",".join([str(k) for k in k_values])
99
+ evaluator = pytrec_eval.RelevanceEvaluator(
100
+ qrels, {map_string, ndcg_string, recall_string, precision_string}
101
+ )
102
+ scores = evaluator.evaluate(results)
103
+
104
+ for query_id in scores.keys():
105
+ for k in k_values:
106
+ all_ndcgs[f"NDCG@{k}"].append(scores[query_id]["ndcg_cut_" + str(k)])
107
+ all_aps[f"MAP@{k}"].append(scores[query_id]["map_cut_" + str(k)])
108
+ all_recalls[f"Recall@{k}"].append(scores[query_id]["recall_" + str(k)])
109
+ all_precisions[f"P@{k}"].append(scores[query_id]["P_" + str(k)])
110
+
111
+ ndcg, _map, recall, precision = (
112
+ all_ndcgs.copy(),
113
+ all_aps.copy(),
114
+ all_recalls.copy(),
115
+ all_precisions.copy(),
116
+ )
117
+
118
+ for k in k_values:
119
+ ndcg[f"NDCG@{k}"] = round(sum(ndcg[f"NDCG@{k}"]) / len(scores), 5)
120
+ _map[f"MAP@{k}"] = round(sum(_map[f"MAP@{k}"]) / len(scores), 5)
121
+ recall[f"Recall@{k}"] = round(sum(recall[f"Recall@{k}"]) / len(scores), 5)
122
+ precision[f"P@{k}"] = round(sum(precision[f"P@{k}"]) / len(scores), 5)
123
+
124
+ naucs = RetrievalEvaluator.evaluate_abstention(
125
+ results, {**all_ndcgs, **all_aps, **all_recalls, **all_precisions}
126
+ )
127
+
128
+ return ndcg, _map, recall, precision, naucs
129
+
130
+ @staticmethod
131
+ def evaluate_custom(
132
+ qrels: dict[str, dict[str, int]],
133
+ results: dict[str, dict[str, float]],
134
+ k_values: list[int],
135
+ metric: str,
136
+ output_type: str = "all",
137
+ ) -> tuple[dict[str, float], dict[str, float]]:
138
+ if metric.lower() in ["mrr", "mrr@k", "mrr_cut"]:
139
+ metric_scores = mrr(qrels, results, k_values, output_type)
140
+
141
+ elif metric.lower() in ["recall_cap", "r_cap", "r_cap@k"]:
142
+ metric_scores = recall_cap(qrels, results, k_values, output_type)
143
+
144
+ elif metric.lower() in ["hole", "hole@k"]:
145
+ metric_scores = hole(qrels, results, k_values, output_type)
146
+
147
+ elif metric.lower() in [
148
+ "acc",
149
+ "top_k_acc",
150
+ "accuracy",
151
+ "accuracy@k",
152
+ "top_k_accuracy",
153
+ ]:
154
+ metric_scores = top_k_accuracy(qrels, results, k_values, output_type)
155
+
156
+ naucs = RetrievalEvaluator.evaluate_abstention(results, metric_scores)
157
+ metric_scores_avg = {k: sum(v) / len(v) for k, v in metric_scores.items()}
158
+
159
+ return metric_scores_avg, naucs
160
+
161
+ @staticmethod
162
+ def evaluate_abstention(
163
+ results: dict[str, dict[str, float]],
164
+ metric_scores: dict[str, list[float]],
165
+ ) -> dict[str, float]:
166
+ """Computes normalized Area Under the Curve on a set of evaluated instances as presented in
167
+ the paper https://arxiv.org/abs/2402.12997"""
168
+ all_sim_scores = [list(results[qid].values()) for qid in list(results.keys())]
169
+ all_conf_scores = [
170
+ confidence_scores(sim_scores) for sim_scores in all_sim_scores
171
+ ]
172
+ conf_fcts = list(all_conf_scores[0].keys())
173
+ all_conf_scores = {
174
+ fct: np.array([x[fct] for x in all_conf_scores]) for fct in conf_fcts
175
+ }
176
+ metric_scores = {k: np.array(v) for k, v in metric_scores.items()}
177
+ naucs = {}
178
+
179
+ for metric_name, scores in metric_scores.items():
180
+ for fct, conf_scores in all_conf_scores.items():
181
+ naucs[f"nAUC_{metric_name}_{fct}"] = nAUC(conf_scores, scores)
182
+
183
+ return naucs
cde_benchmark/evaluators/nanobeir.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from cde_benchmark.formatters.data_formatter import BEIRDataFormatter
2
+ from cde_benchmark.embedders.sentence_transformer_embedder import (
3
+ SentenceTransformerEmbedder,
4
+ )
5
+
6
+
7
+ class NanoBEIR:
8
+ def __init__(self, path, embedder, is_contextual_model=False):
9
+ self.datasets = [
10
+ "NanoClimateFEVER",
11
+ "NanoDBPedia",
12
+ "NanoFEVER",
13
+ "NanoFiQA2018",
14
+ "NanoHotpotQA",
15
+ "NanoMSMARCO",
16
+ "NanoNFCorpus",
17
+ "NanoNQ",
18
+ "NanoQuoraRetrieval",
19
+ "NanoSCIDOCS",
20
+ "NanoArguAna",
21
+ "NanoSciFact",
22
+ "NanoTouche2020",
23
+ ]
24
+ self.path = path
25
+ self.embedder = embedder
26
+ self.is_contextual_model = is_contextual_model
27
+
28
+ def run_task(self, task):
29
+ formatter = BEIRDataFormatter(self.path + "/" + task, "train")
30
+ metrics = self.embedder.compute_metrics_e2e(formatter)
31
+ return metrics
32
+
33
+ def run_all_tasks(self):
34
+ results = {}
35
+ for task in self.datasets:
36
+ print(f"Running task: {task}")
37
+ res = self.run_task(task)
38
+ print(res)
39
+ results[task] = res
40
+ return results
41
+
42
+
43
+ if __name__ == "__main__":
44
+ from sentence_transformers import SentenceTransformer
45
+
46
+ model = SentenceTransformer("nomic-ai/modernbert-embed-base")
47
+ model._modules["0"].auto_model.config.reference_compile = False
48
+
49
+ nanobeir = NanoBEIR("zeta-alpha-ai", SentenceTransformerEmbedder(model))
50
+ results = nanobeir.run_all_tasks()
51
+ print(results)
cde_benchmark/formatters/__init__.py ADDED
File without changes
cde_benchmark/formatters/data_formatter.py ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import List, Tuple
2
+ from datasets import DatasetDict, Dataset, load_dataset
3
+
4
+
5
+ class BaseDataFormatter:
6
+ def get_nested(self) -> Tuple[List[List[str]], List[List[str]]]:
7
+ raise NotImplementedError
8
+
9
+ def get_flattened(self) -> Tuple[List[str], List[str]]:
10
+ raise NotImplementedError
11
+
12
+ def get_queries(self) -> Tuple[List[str], List[str]]:
13
+ raise NotImplementedError
14
+
15
+
16
+ class DataFormatter(BaseDataFormatter):
17
+ def __init__(self, dataset_path, split, query_key="queries", doc_key="documents"):
18
+ self.doc_dataset = None
19
+ self.queries_dataset = None
20
+ self._load_from_path(dataset_path, split, query_key, doc_key)
21
+ self.doc_dataset = self.doc_dataset.map(self.parse_id)
22
+
23
+ def _load_from_path(self, path, split, query_key, doc_key):
24
+ self.doc_dataset = load_dataset(path, doc_key, split=split)
25
+ self.queries_dataset = load_dataset(path, query_key, split=split)
26
+ # mapping dataset is used to map queries to relevant documents
27
+
28
+ @staticmethod
29
+ def parse_id(sample):
30
+ doc_id, internal_id = sample["chunk_id"].split("_")
31
+ return {"doc_id": doc_id, "internal_id": int(internal_id)}
32
+
33
+ def get_nested(self) -> Tuple[List[List[str]], List[List[str]]]:
34
+ # TODO: verify it's sorted
35
+ return list(
36
+ self.doc_dataset.to_pandas().groupby("doc_id")["chunk"].apply(list)
37
+ ), list(self.doc_dataset.to_pandas().groupby("doc_id")["chunk_id"].apply(list))
38
+
39
+ def get_flattened(self) -> Tuple[List[str], List[str]]:
40
+ # flatten data
41
+ return self.doc_dataset["chunk"], self.doc_dataset["chunk_id"]
42
+
43
+ def get_queries(self) -> Tuple[List[str], List[str]]:
44
+ return self.queries_dataset["query"], self.queries_dataset["chunk_id"]
45
+
46
+
47
+ class BEIRDataFormatter(BaseDataFormatter):
48
+ def __init__(
49
+ self,
50
+ dataset_path,
51
+ split,
52
+ query_key="queries",
53
+ doc_key="corpus",
54
+ concat_num_docs=2,
55
+ ):
56
+ self.doc_dataset = None
57
+ self.queries_dataset = None
58
+ self.mapping = None
59
+ self._load_from_path(dataset_path, split, query_key, doc_key)
60
+ self.concat_num_docs = concat_num_docs
61
+
62
+ def _load_from_path(self, path, split, query_key, doc_key):
63
+ self.doc_dataset = load_dataset(path, doc_key, split=split)
64
+ self.queries_dataset = load_dataset(path, query_key, split=split)
65
+ mapping_dataset = load_dataset(path, "qrels", split=split)
66
+ self.mapping = {
67
+ query["query-id"]: query["corpus-id"] for query in mapping_dataset
68
+ }
69
+ # mapping dataset is used to map queries to relevant documents
70
+
71
+ def get_nested(self) -> Tuple[List[List[str]], List[List[str]]]:
72
+
73
+ self.doc_dataset = self.doc_dataset.shuffle(seed=42)
74
+ idx = []
75
+ for i in range(0, len(self.doc_dataset)):
76
+ idx.extend([i] * self.concat_num_docs)
77
+ idx = idx[: len(self.doc_dataset)]
78
+ self.doc_dataset = self.doc_dataset.add_column("doc_id", idx)
79
+
80
+ return list(
81
+ self.doc_dataset.to_pandas().groupby("doc_id")["text"].apply(list)
82
+ ), list(self.doc_dataset.to_pandas().groupby("doc_id")["_id"].apply(list))
83
+
84
+ def get_flattened(self) -> Tuple[List[str], List[str]]:
85
+ # flatten data
86
+ return self.doc_dataset["text"], self.doc_dataset["_id"]
87
+
88
+ def get_queries(self) -> Tuple[List[str], List[str]]:
89
+ gold_docs = []
90
+ for query in self.queries_dataset:
91
+ gold_docs.append(self.mapping[query["_id"]])
92
+ return self.queries_dataset["text"], gold_docs
cde_benchmark/utils/__init__.py ADDED
File without changes
cde_benchmark/utils/faiss_clustering.py ADDED
@@ -0,0 +1,103 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Tuple
2
+
3
+ import gc
4
+
5
+ try:
6
+ import faiss
7
+ except ImportError as e:
8
+ print("Error loading faiss:", e)
9
+ faiss = None
10
+ import torch
11
+ import numpy as np
12
+
13
+
14
+ def paired_kmeans_faiss(
15
+ q: torch.Tensor,
16
+ X: torch.Tensor,
17
+ k: int,
18
+ max_iters: int = 100,
19
+ n_redo: int = 3,
20
+ seed: int = 42,
21
+ ) -> Tuple[torch.Tensor, torch.Tensor]:
22
+ # https://github.com/facebookresearch/faiss/blob/dafdff110489db7587b169a0afee8470f220d295/faiss/python/extra_wrappers.py#L437
23
+ # https://github.com/facebookresearch/faiss/blob/dafdff110489db7587b169a0afee8470f220d295/faiss/Clustering.cpp#L56
24
+ # https://github.com/facebookresearch/faiss/blob/main/faiss/Clustering.h
25
+ assert q.shape == X.shape
26
+ print("[paired_kmeans_faiss]", q.shape, X.shape, k)
27
+ paired_vectors = torch.cat(
28
+ [
29
+ torch.cat((q, X), dim=0),
30
+ torch.cat((X, q), dim=0),
31
+ ],
32
+ dim=1,
33
+ )
34
+ paired_vectors /= paired_vectors.norm(dim=1, keepdim=True, p=2)
35
+ paired_vectors = paired_vectors.cpu()
36
+
37
+ dim = paired_vectors[0].numel()
38
+ # TODO: How to make kmeans use more gpu mem?
39
+ print(
40
+ f"[paired_kmeans_faiss] initializing Kmeans object (gpu={torch.cuda.is_available()})"
41
+ )
42
+ gc.collect()
43
+ torch.cuda.empty_cache()
44
+ kmeans = faiss.Kmeans(
45
+ dim,
46
+ k,
47
+ niter=max_iters,
48
+ nredo=n_redo,
49
+ gpu=torch.cuda.is_available(),
50
+ verbose=True,
51
+ spherical=True,
52
+ decode_block_size=2**27,
53
+ seed=seed,
54
+ )
55
+ # otherwise the kmeans implementation sub-samples the training set
56
+ # to <= 256 points per centroid
57
+ kmeans.max_points_per_centroid = k * 2
58
+ print("[paired_kmeans_faiss] calling kmeans.train()")
59
+ paired_vectors = np.array(paired_vectors)
60
+
61
+ kmeans.train(paired_vectors)
62
+
63
+ queries = paired_vectors[: len(q)]
64
+ _distances, assignments = kmeans.index.search(queries, 1)
65
+ assert assignments.shape == (q.shape[0], 1)
66
+
67
+ print("Finished kmeans. Average distance:", _distances.mean())
68
+
69
+ centroids = torch.tensor(kmeans.centroids)
70
+ assert centroids.shape == (k, paired_vectors.shape[1])
71
+
72
+ return centroids, assignments
73
+
74
+
75
+ if __name__ == "__main__":
76
+
77
+ def test_cluster_tiny():
78
+ torch.manual_seed(42)
79
+ d = 8
80
+ A = torch.randn((d,), dtype=torch.float32) * 100
81
+ B = torch.randn((d,), dtype=torch.float32) * 100
82
+ C = torch.randn((d,), dtype=torch.float32)
83
+
84
+ c1_size = 6
85
+ c1 = A + torch.randn((c1_size, d), dtype=torch.float32)
86
+ c2_size = 9
87
+ c2 = B + torch.randn((c2_size, d), dtype=torch.float32)
88
+ c3_size = 5
89
+ c3 = C + torch.randn((c3_size, d), dtype=torch.float32)
90
+
91
+ points = torch.cat([c1, c2, c3], dim=0)
92
+ _centroids, assignments = paired_kmeans_faiss(
93
+ q=points, X=points, k=3, max_iters=10, seed=42
94
+ )
95
+ assignments = torch.tensor(assignments.flatten())
96
+ assert (
97
+ assignments
98
+ == torch.tensor(
99
+ [2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
100
+ )
101
+ ).all()
102
+
103
+ test_cluster_tiny()
pyproject.toml ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [build-system]
2
+ requires = ["hatchling", "hatch-vcs"]
3
+ build-backend = "hatchling.build"
4
+
5
+ [tool.hatch.version]
6
+ source = "vcs"
7
+
8
+ [tool.hatch.build.targets.wheel]
9
+ include = ["cde_benchmark"]
10
+
11
+ [project]
12
+ name = "cde_benchmark"
13
+ dynamic = ["version"]
14
+ description = "The code used to train and run inference with the OptimusVLM architecture."
15
+ authors = [
16
+ { name = "Manuel Faysse", email = "[email protected]" },
17
+ ]
18
+ maintainers = [
19
+ { name = "Manuel Faysse", email = "[email protected]" },
20
+ ]
21
+ readme = "README.md"
22
+ requires-python = ">=3.10"
23
+ classifiers = [
24
+ "Programming Language :: Python :: 3",
25
+ "License :: OSI Approved :: MIT License",
26
+ "Intended Audience :: Science/Research",
27
+ "Intended Audience :: Developers",
28
+ "Operating System :: OS Independent",
29
+ "Topic :: Scientific/Engineering :: Artificial Intelligence",
30
+ ]
31
+
32
+ dependencies = [
33
+ "torch>=2.2.0",
34
+ "mteb>=1.16.3,<1.29.0",
35
+ "datasets>=2.19.1",
36
+ "transformers>=4.48.0,<4.49.0",
37
+
38
+ ]
39
+
40
+ [project.optional-dependencies]
41
+
42
+
43
+ dev = ["pytest>=8.0.0", "ruff>=0.4.0"]
44
+
45
+ all = ["cde_benchmark"]
46
+
47
+ [project.urls]
48
+ homepage = "https://github.com/ManuelFay/contextualized-embeddings-benchmark"
49
+
50
+ [tool.pytest.ini_options]
51
+ filterwarnings = ["ignore::Warning"]
52
+ markers = ["slow: marks test as slow"]
53
+ testpaths = ["tests"]
54
+
55
+ [tool.ruff]
56
+ line-length = 120
57
+
58
+ [tool.ruff.lint]
59
+ select = ["E", "F", "W", "I", "N"]
60
+
61
+ [tool.ruff.lint.per-file-ignores]
62
+ "__init__.py" = ["F401"]
results/metrics_e5-base-v2.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"model": "e5-base-v2", "date": "2025-01-19 23:33:10", "is_contextual": false, "metrics": {"chunked-mldr": {"ndcg_at_1": 0.45978, "ndcg_at_3": 0.61059, "ndcg_at_5": 0.64442, "ndcg_at_10": 0.67019, "ndcg_at_20": 0.68136, "ndcg_at_50": 0.68695, "ndcg_at_100": 0.68822, "map_at_1": 0.45978, "map_at_3": 0.57423, "map_at_5": 0.5931, "map_at_10": 0.60384, "map_at_20": 0.60696, "map_at_50": 0.60791, "map_at_100": 0.60802, "recall_at_1": 0.45978, "recall_at_3": 0.7155, "recall_at_5": 0.79725, "recall_at_10": 0.87639, "recall_at_20": 0.92021, "recall_at_50": 0.94768, "recall_at_100": 0.95553, "precision_at_1": 0.45978, "precision_at_3": 0.2385, "precision_at_5": 0.15945, "precision_at_10": 0.08764, "precision_at_20": 0.04601, "precision_at_50": 0.01895, "precision_at_100": 0.00956, "mrr_at_1": 0.4597776324395029, "mrr_at_3": 0.5742315238718116, "mrr_at_5": 0.5931000654022237, "mrr_at_10": 0.6038436783041099, "mrr_at_20": 0.606956226258994, "mrr_at_50": 0.6079123862051082, "mrr_at_100": 0.6080235608349778, "naucs_at_1_max": 0.37817911339834265, "naucs_at_1_std": -0.047995778502660236, "naucs_at_1_diff1": 0.3798863286461798, "naucs_at_3_max": 0.41971022878948283, "naucs_at_3_std": -0.0201412272795716, "naucs_at_3_diff1": 0.25466398697356407, "naucs_at_5_max": 0.4530335171575435, "naucs_at_5_std": 0.10587910909160908, "naucs_at_5_diff1": 0.2333453688850834, "naucs_at_10_max": 0.5530222236589474, "naucs_at_10_std": 0.22569123074048242, "naucs_at_10_diff1": 0.2602767560460191, "naucs_at_20_max": 0.6710512653210156, "naucs_at_20_std": 0.4777020775712352, "naucs_at_20_diff1": 0.31181041300859064, "naucs_at_50_max": 0.8042366394316232, "naucs_at_50_std": 0.7476231311772823, "naucs_at_50_diff1": 0.3428597809917119, "naucs_at_100_max": 0.823592812547546, "naucs_at_100_std": 0.7576874684266497, "naucs_at_100_diff1": 0.33772855025497106}, "NanoClimateFEVER": {"ndcg_at_1": 0.16, "ndcg_at_3": 0.18262, "ndcg_at_5": 0.19123, "ndcg_at_10": 0.24239, "ndcg_at_20": 0.26739, "ndcg_at_50": 0.2997, "ndcg_at_100": 0.31586, "map_at_1": 0.16, "map_at_3": 0.17667, "map_at_5": 0.18167, "map_at_10": 0.2024, "map_at_20": 0.20911, "map_at_50": 0.21453, "map_at_100": 0.21594, "recall_at_1": 0.16, "recall_at_3": 0.2, "recall_at_5": 0.22, "recall_at_10": 0.38, "recall_at_20": 0.48, "recall_at_50": 0.64, "recall_at_100": 0.74, "precision_at_1": 0.16, "precision_at_3": 0.06667, "precision_at_5": 0.044, "precision_at_10": 0.038, "precision_at_20": 0.024, "precision_at_50": 0.0128, "precision_at_100": 0.0074, "mrr_at_1": 0.16, "mrr_at_3": 0.17666666666666667, "mrr_at_5": 0.18166666666666667, "mrr_at_10": 0.2023968253968254, "mrr_at_20": 0.20910763093116036, "mrr_at_50": 0.214531008284596, "mrr_at_100": 0.21593876545727192, "naucs_at_1_max": 0.4295206636964201, "naucs_at_1_std": -0.13212475034567528, "naucs_at_1_diff1": 0.6499462282992781, "naucs_at_3_max": 0.3229899903132065, "naucs_at_3_std": -0.2050048433968356, "naucs_at_3_diff1": 0.5131417500807233, "naucs_at_5_max": 0.3027006868259938, "naucs_at_5_std": -0.18590918979395227, "naucs_at_5_diff1": 0.4273238648724245, "naucs_at_10_max": 0.12881761235151887, "naucs_at_10_std": -0.10506069191380972, "naucs_at_10_diff1": 0.19210875159202537, "naucs_at_20_max": 0.2796655022758545, "naucs_at_20_std": 0.00542500264634253, "naucs_at_20_diff1": 0.18879009209272762, "naucs_at_50_max": 0.23323540682079458, "naucs_at_50_std": -0.12051347553966059, "naucs_at_50_diff1": 0.27040490484097596, "naucs_at_100_max": 0.2956490004460488, "naucs_at_100_std": -0.006366327399537655, "naucs_at_100_diff1": 0.11666193585012845}, "NanoDBPedia": {"ndcg_at_1": 0.06, "ndcg_at_3": 0.11262, "ndcg_at_5": 0.12897, "ndcg_at_10": 0.18618, "ndcg_at_20": 0.20602, "ndcg_at_50": 0.25436, "ndcg_at_100": 0.27441, "map_at_1": 0.06, "map_at_3": 0.09667, "map_at_5": 0.10567, "map_at_10": 0.12871, "map_at_20": 0.13398, "map_at_50": 0.14202, "map_at_100": 0.14396, "recall_at_1": 0.06, "recall_at_3": 0.16, "recall_at_5": 0.2, "recall_at_10": 0.38, "recall_at_20": 0.46, "recall_at_50": 0.7, "recall_at_100": 0.82, "precision_at_1": 0.06, "precision_at_3": 0.05333, "precision_at_5": 0.04, "precision_at_10": 0.038, "precision_at_20": 0.023, "precision_at_50": 0.014, "precision_at_100": 0.0082, "mrr_at_1": 0.06, "mrr_at_3": 0.09666666666666666, "mrr_at_5": 0.10566666666666666, "mrr_at_10": 0.12871428571428573, "mrr_at_20": 0.13397691197691197, "mrr_at_50": 0.14202015608179958, "mrr_at_100": 0.14396238541287246, "naucs_at_1_max": -0.32841427421518654, "naucs_at_1_std": -0.283695554959306, "naucs_at_1_diff1": -0.21169841695733838, "naucs_at_3_max": -0.21462590259640502, "naucs_at_3_std": -0.24957750806575513, "naucs_at_3_diff1": -0.2087494238746351, "naucs_at_5_max": -0.04824023248304829, "naucs_at_5_std": -0.2845334194381661, "naucs_at_5_diff1": -0.20568291895382626, "naucs_at_10_max": 0.2379850804460269, "naucs_at_10_std": -0.28198996698983714, "naucs_at_10_diff1": -0.09079094429859906, "naucs_at_20_max": 0.3951013113978607, "naucs_at_20_std": -0.2526871829912483, "naucs_at_20_diff1": 0.11566056082625926, "naucs_at_50_max": 0.1290938569627099, "naucs_at_50_std": -0.46672671590704307, "naucs_at_50_diff1": 0.11605116195280142, "naucs_at_100_max": 0.07616477585637149, "naucs_at_100_std": -0.3852847229871143, "naucs_at_100_diff1": -0.3211256746337713}, "NanoFEVER": {"ndcg_at_1": 0.66, "ndcg_at_3": 0.77571, "ndcg_at_5": 0.7998, "ndcg_at_10": 0.81323, "ndcg_at_20": 0.81849, "ndcg_at_50": 0.82274, "ndcg_at_100": 0.82274, "map_at_1": 0.66, "map_at_3": 0.74667, "map_at_5": 0.75967, "map_at_10": 0.7655, "map_at_20": 0.76704, "map_at_50": 0.76784, "map_at_100": 0.76784, "recall_at_1": 0.66, "recall_at_3": 0.86, "recall_at_5": 0.92, "recall_at_10": 0.96, "recall_at_20": 0.98, "recall_at_50": 1.0, "recall_at_100": 1.0, "precision_at_1": 0.66, "precision_at_3": 0.28667, "precision_at_5": 0.184, "precision_at_10": 0.096, "precision_at_20": 0.049, "precision_at_50": 0.02, "precision_at_100": 0.01, "mrr_at_1": 0.66, "mrr_at_3": 0.7466666666666667, "mrr_at_5": 0.7596666666666667, "mrr_at_10": 0.7655, "mrr_at_20": 0.7670384615384616, "mrr_at_50": 0.7678384615384616, "mrr_at_100": 0.7678384615384616, "naucs_at_1_max": 0.20116830960204407, "naucs_at_1_std": 0.3569318596700851, "naucs_at_1_diff1": 0.5398453317401833, "naucs_at_3_max": 0.6928779271165304, "naucs_at_3_std": 0.4482471941249824, "naucs_at_3_diff1": 0.5595122627130389, "naucs_at_5_max": 0.8558590102707765, "naucs_at_5_std": 0.6679505135387499, "naucs_at_5_diff1": 0.2947012138188635, "naucs_at_10_max": 1.0, "naucs_at_10_std": 0.7222222222222252, "naucs_at_10_diff1": 0.49603174603175243, "naucs_at_20_max": 1.0, "naucs_at_20_std": 0.7222222222222204, "naucs_at_20_diff1": 0.12278244631187159, "naucs_at_50_max": 1.0, "naucs_at_50_std": 1.0, "naucs_at_50_diff1": 1.0, "naucs_at_100_max": 1.0, "naucs_at_100_std": 1.0, "naucs_at_100_diff1": 1.0}, "NanoFiQA2018": {"ndcg_at_1": 0.2, "ndcg_at_3": 0.33571, "ndcg_at_5": 0.36666, "ndcg_at_10": 0.37822, "ndcg_at_20": 0.3991, "ndcg_at_50": 0.4156, "ndcg_at_100": 0.42856, "map_at_1": 0.2, "map_at_3": 0.3, "map_at_5": 0.316, "map_at_10": 0.32, "map_at_20": 0.32607, "map_at_50": 0.32898, "map_at_100": 0.33011, "recall_at_1": 0.2, "recall_at_3": 0.44, "recall_at_5": 0.52, "recall_at_10": 0.56, "recall_at_20": 0.64, "recall_at_50": 0.72, "recall_at_100": 0.8, "precision_at_1": 0.2, "precision_at_3": 0.14667, "precision_at_5": 0.104, "precision_at_10": 0.056, "precision_at_20": 0.032, "precision_at_50": 0.0144, "precision_at_100": 0.008, "mrr_at_1": 0.2, "mrr_at_3": 0.3, "mrr_at_5": 0.316, "mrr_at_10": 0.32, "mrr_at_20": 0.32607157548334015, "mrr_at_50": 0.3289784427804514, "mrr_at_100": 0.3301135889155975, "naucs_at_1_max": -0.1927026154342914, "naucs_at_1_std": -0.23874717468517936, "naucs_at_1_diff1": 0.18046496609622217, "naucs_at_3_max": 0.20512084355588078, "naucs_at_3_std": -0.059743174818603986, "naucs_at_3_diff1": 0.2973847679699327, "naucs_at_5_max": 0.18224604167800193, "naucs_at_5_std": 0.04842483268948236, "naucs_at_5_diff1": 0.31935905109091894, "naucs_at_10_max": 0.17882693617987683, "naucs_at_10_std": 0.03917783329548003, "naucs_at_10_diff1": 0.2729389052918461, "naucs_at_20_max": 0.1428981990037043, "naucs_at_20_std": 0.07484991697534824, "naucs_at_20_diff1": 0.19846085068335675, "naucs_at_50_max": -0.05242673992674025, "naucs_at_50_std": 0.1525488400488401, "naucs_at_50_diff1": 0.18128052503052536, "naucs_at_100_max": -0.050149850149850454, "naucs_at_100_std": -0.013786213786213876, "naucs_at_100_diff1": 0.3052447552447543}, "NanoHotpotQA": {"ndcg_at_1": 0.28, "ndcg_at_3": 0.51713, "ndcg_at_5": 0.5421, "ndcg_at_10": 0.58177, "ndcg_at_20": 0.59731, "ndcg_at_50": 0.60091, "ndcg_at_100": 0.60392, "map_at_1": 0.28, "map_at_3": 0.46667, "map_at_5": 0.48067, "map_at_10": 0.49755, "map_at_20": 0.50202, "map_at_50": 0.50245, "map_at_100": 0.50265, "recall_at_1": 0.28, "recall_at_3": 0.66, "recall_at_5": 0.72, "recall_at_10": 0.84, "recall_at_20": 0.9, "recall_at_50": 0.92, "recall_at_100": 0.94, "precision_at_1": 0.28, "precision_at_3": 0.22, "precision_at_5": 0.144, "precision_at_10": 0.084, "precision_at_20": 0.045, "precision_at_50": 0.0184, "precision_at_100": 0.0094, "mrr_at_1": 0.28, "mrr_at_3": 0.4666666666666666, "mrr_at_5": 0.4806666666666667, "mrr_at_10": 0.49754761904761907, "mrr_at_20": 0.5020153735153735, "mrr_at_50": 0.5024501561240692, "mrr_at_100": 0.5026521763260894, "naucs_at_1_max": -0.11143080619029001, "naucs_at_1_std": -0.08070283256047849, "naucs_at_1_diff1": -0.1334990781607914, "naucs_at_3_max": 0.5630455707126023, "naucs_at_3_std": -0.3249361080686384, "naucs_at_3_diff1": -0.1814530850675431, "naucs_at_5_max": 0.4461614774114767, "naucs_at_5_std": -0.2626297313797315, "naucs_at_5_diff1": -0.1585775335775337, "naucs_at_10_max": 0.19599901816396692, "naucs_at_10_std": -0.06836033382425157, "naucs_at_10_diff1": -0.20317869415807496, "naucs_at_20_max": -0.24948646125116383, "naucs_at_20_std": -0.3269841269841225, "naucs_at_20_diff1": -0.6654528478057857, "naucs_at_50_max": -0.26902427637721865, "naucs_at_50_std": -0.589285714285714, "naucs_at_50_diff1": -0.9213352007469653, "naucs_at_100_max": -0.1708683473389389, "naucs_at_100_std": -0.20572673513850376, "naucs_at_100_diff1": -0.6484593837535053}, "NanoMSMARCO": {"ndcg_at_1": 0.4, "ndcg_at_3": 0.55357, "ndcg_at_5": 0.58539, "ndcg_at_10": 0.63039, "ndcg_at_20": 0.6401, "ndcg_at_50": 0.64847, "ndcg_at_100": 0.65457, "map_at_1": 0.4, "map_at_3": 0.51667, "map_at_5": 0.53367, "map_at_10": 0.55207, "map_at_20": 0.55452, "map_at_50": 0.55603, "map_at_100": 0.55646, "recall_at_1": 0.4, "recall_at_3": 0.66, "recall_at_5": 0.74, "recall_at_10": 0.88, "recall_at_20": 0.92, "recall_at_50": 0.96, "recall_at_100": 1.0, "precision_at_1": 0.4, "precision_at_3": 0.22, "precision_at_5": 0.148, "precision_at_10": 0.088, "precision_at_20": 0.046, "precision_at_50": 0.0192, "precision_at_100": 0.01, "mrr_at_1": 0.4, "mrr_at_3": 0.5166666666666666, "mrr_at_5": 0.5336666666666666, "mrr_at_10": 0.5520714285714285, "mrr_at_20": 0.554515873015873, "mrr_at_50": 0.5560301587301587, "mrr_at_100": 0.5564604652657283, "naucs_at_1_max": 0.031153450051493347, "naucs_at_1_std": -0.20914006179196723, "naucs_at_1_diff1": 0.7322090628218333, "naucs_at_3_max": 0.11772710677420421, "naucs_at_3_std": -0.4108997975372564, "naucs_at_3_diff1": 0.5816655049951868, "naucs_at_5_max": -0.09192652366084055, "naucs_at_5_std": -0.5305948663882238, "naucs_at_5_diff1": 0.4876525688333799, "naucs_at_10_max": 0.05806554247534223, "naucs_at_10_std": -0.4775692013999324, "naucs_at_10_diff1": 0.5750079541839022, "naucs_at_20_max": -0.2216386554621832, "naucs_at_20_std": -0.6628151260504168, "naucs_at_20_diff1": 0.37640056022409074, "naucs_at_50_max": -0.07586367880485702, "naucs_at_50_std": -0.3699813258636854, "naucs_at_50_diff1": -0.024276377217558327, "naucs_at_100_max": 1.0, "naucs_at_100_std": 1.0, "naucs_at_100_diff1": 1.0}, "NanoNFCorpus": {"ndcg_at_1": 0.06, "ndcg_at_3": 0.08262, "ndcg_at_5": 0.09809, "ndcg_at_10": 0.1044, "ndcg_at_20": 0.12509, "ndcg_at_50": 0.13611, "ndcg_at_100": 0.14273, "map_at_1": 0.06, "map_at_3": 0.07667, "map_at_5": 0.08467, "map_at_10": 0.08717, "map_at_20": 0.09309, "map_at_50": 0.0945, "map_at_100": 0.09513, "recall_at_1": 0.06, "recall_at_3": 0.1, "recall_at_5": 0.14, "recall_at_10": 0.16, "recall_at_20": 0.24, "recall_at_50": 0.3, "recall_at_100": 0.34, "precision_at_1": 0.06, "precision_at_3": 0.03333, "precision_at_5": 0.028, "precision_at_10": 0.016, "precision_at_20": 0.012, "precision_at_50": 0.006, "precision_at_100": 0.0034, "mrr_at_1": 0.06, "mrr_at_3": 0.07666666666666667, "mrr_at_5": 0.08466666666666667, "mrr_at_10": 0.08716666666666667, "mrr_at_20": 0.0930910931174089, "mrr_at_50": 0.0944995329160926, "mrr_at_100": 0.09512511215612968, "naucs_at_1_max": -0.3816295501296843, "naucs_at_1_std": -0.3816295501296843, "naucs_at_1_diff1": 0.2599946337536893, "naucs_at_3_max": -0.03182184062248454, "naucs_at_3_std": 0.10340756640729819, "naucs_at_3_diff1": 0.27426884894016634, "naucs_at_5_max": 0.23764266621409497, "naucs_at_5_std": -0.03423989138274839, "naucs_at_5_diff1": 0.2711612711612714, "naucs_at_10_max": 0.18566600092180055, "naucs_at_10_std": 0.06375787371331998, "naucs_at_10_diff1": 0.1739514518359195, "naucs_at_20_max": 0.32015336688233875, "naucs_at_20_std": 0.18104481188593327, "naucs_at_20_diff1": 0.08803618499880171, "naucs_at_50_max": 0.35035805972165934, "naucs_at_50_std": 0.08979867585461407, "naucs_at_50_diff1": 0.16243750844480456, "naucs_at_100_max": 0.33294469911762364, "naucs_at_100_std": 0.218792230848724, "naucs_at_100_diff1": 0.2176793237764645}, "NanoNQ": {"ndcg_at_1": 0.5, "ndcg_at_3": 0.56047, "ndcg_at_5": 0.59318, "ndcg_at_10": 0.63143, "ndcg_at_20": 0.64573, "ndcg_at_50": 0.662, "ndcg_at_100": 0.6686, "map_at_1": 0.5, "map_at_3": 0.54667, "map_at_5": 0.56467, "map_at_10": 0.58019, "map_at_20": 0.58367, "map_at_50": 0.58641, "map_at_100": 0.58703, "recall_at_1": 0.5, "recall_at_3": 0.6, "recall_at_5": 0.68, "recall_at_10": 0.8, "recall_at_20": 0.86, "recall_at_50": 0.94, "recall_at_100": 0.98, "precision_at_1": 0.5, "precision_at_3": 0.2, "precision_at_5": 0.136, "precision_at_10": 0.08, "precision_at_20": 0.043, "precision_at_50": 0.0188, "precision_at_100": 0.0098, "mrr_at_1": 0.5, "mrr_at_3": 0.5466666666666666, "mrr_at_5": 0.5646666666666667, "mrr_at_10": 0.5801904761904761, "mrr_at_20": 0.583671679197995, "mrr_at_50": 0.5864124199387357, "mrr_at_100": 0.5870297038893529, "naucs_at_1_max": 0.463477681545636, "naucs_at_1_std": -0.19690872751499, "naucs_at_1_diff1": 0.5109393737508324, "naucs_at_3_max": 0.6492430988423858, "naucs_at_3_std": -0.06761650341347603, "naucs_at_3_diff1": 0.5206292668447609, "naucs_at_5_max": 0.5813350839610253, "naucs_at_5_std": 0.15050791237647684, "naucs_at_5_diff1": 0.4269228111395203, "naucs_at_10_max": 0.397502497502497, "naucs_at_10_std": -0.03966033966033968, "naucs_at_10_diff1": 0.2816183816183814, "naucs_at_20_max": 0.2675626991824867, "naucs_at_20_std": -0.27476790910350296, "naucs_at_20_diff1": 0.09401413329638521, "naucs_at_50_max": 0.043106131341422764, "naucs_at_50_std": -0.6288515406162511, "naucs_at_50_diff1": 0.13414254590724764, "naucs_at_100_max": 1.0, "naucs_at_100_std": -0.5634920634920756, "naucs_at_100_diff1": 0.5541549953314605}, "NanoQuoraRetrieval": {"ndcg_at_1": 0.64, "ndcg_at_3": 0.83404, "ndcg_at_5": 0.84266, "ndcg_at_10": 0.84266, "ndcg_at_20": 0.84266, "ndcg_at_50": 0.84266, "ndcg_at_100": 0.84591, "map_at_1": 0.64, "map_at_3": 0.79, "map_at_5": 0.795, "map_at_10": 0.795, "map_at_20": 0.795, "map_at_50": 0.795, "map_at_100": 0.79529, "recall_at_1": 0.64, "recall_at_3": 0.96, "recall_at_5": 0.98, "recall_at_10": 0.98, "recall_at_20": 0.98, "recall_at_50": 0.98, "recall_at_100": 1.0, "precision_at_1": 0.64, "precision_at_3": 0.32, "precision_at_5": 0.196, "precision_at_10": 0.098, "precision_at_20": 0.049, "precision_at_50": 0.0196, "precision_at_100": 0.01, "mrr_at_1": 0.64, "mrr_at_3": 0.79, "mrr_at_5": 0.795, "mrr_at_10": 0.795, "mrr_at_20": 0.795, "mrr_at_50": 0.795, "mrr_at_100": 0.7952857142857143, "naucs_at_1_max": 0.36728828713756556, "naucs_at_1_std": -0.35464299399667926, "naucs_at_1_diff1": 0.8600076638140246, "naucs_at_3_max": 0.09337068160597398, "naucs_at_3_std": -1.7399626517273643, "naucs_at_3_diff1": 0.7117180205415506, "naucs_at_5_max": 0.358076563958916, "naucs_at_5_std": -1.739962651727313, "naucs_at_5_diff1": 0.8692810457516293, "naucs_at_10_max": 0.358076563958916, "naucs_at_10_std": -1.739962651727313, "naucs_at_10_diff1": 0.8692810457516293, "naucs_at_20_max": 0.358076563958916, "naucs_at_20_std": -1.739962651727313, "naucs_at_20_diff1": 0.8692810457516293, "naucs_at_50_max": 0.35807656395891063, "naucs_at_50_std": -1.7399626517273847, "naucs_at_50_diff1": 0.8692810457516335, "naucs_at_100_max": 1.0, "naucs_at_100_std": 1.0, "naucs_at_100_diff1": 1.0}, "NanoSCIDOCS": {"ndcg_at_1": 0.12, "ndcg_at_3": 0.19786, "ndcg_at_5": 0.21421, "ndcg_at_10": 0.2665, "ndcg_at_20": 0.28584, "ndcg_at_50": 0.31048, "ndcg_at_100": 0.31386, "map_at_1": 0.12, "map_at_3": 0.17667, "map_at_5": 0.18567, "map_at_10": 0.20755, "map_at_20": 0.2124, "map_at_50": 0.2167, "map_at_100": 0.21704, "recall_at_1": 0.12, "recall_at_3": 0.26, "recall_at_5": 0.3, "recall_at_10": 0.46, "recall_at_20": 0.54, "recall_at_50": 0.66, "recall_at_100": 0.68, "precision_at_1": 0.12, "precision_at_3": 0.08667, "precision_at_5": 0.06, "precision_at_10": 0.046, "precision_at_20": 0.027, "precision_at_50": 0.0132, "precision_at_100": 0.0068, "mrr_at_1": 0.12, "mrr_at_3": 0.17666666666666667, "mrr_at_5": 0.18566666666666667, "mrr_at_10": 0.20754761904761904, "mrr_at_20": 0.2124047619047619, "mrr_at_50": 0.21670424071717448, "mrr_at_100": 0.21726300398780218, "naucs_at_1_max": 0.12908729030423668, "naucs_at_1_std": -0.16496066723533317, "naucs_at_1_diff1": -0.011847218273149425, "naucs_at_3_max": 0.10103784221329269, "naucs_at_3_std": 0.0011852794079385335, "naucs_at_3_diff1": -0.15946344424850373, "naucs_at_5_max": 0.22778002972571243, "naucs_at_5_std": 0.16849074449398724, "naucs_at_5_diff1": -0.15987028779894605, "naucs_at_10_max": 0.3052219389766363, "naucs_at_10_std": 0.289979238390581, "naucs_at_10_diff1": -0.33179154292922636, "naucs_at_20_max": 0.46383818176766367, "naucs_at_20_std": 0.39754383040204494, "naucs_at_20_diff1": -0.1314495290489292, "naucs_at_50_max": 0.3040592120548305, "naucs_at_50_std": 0.375385840884198, "naucs_at_50_diff1": -0.21726575724385214, "naucs_at_100_max": 0.2755165503420637, "naucs_at_100_std": 0.364280284707346, "naucs_at_100_diff1": -0.14093704650680675}, "NanoArguAna": {"ndcg_at_1": 0.16, "ndcg_at_3": 0.41928, "ndcg_at_5": 0.46745, "ndcg_at_10": 0.51459, "ndcg_at_20": 0.53054, "ndcg_at_50": 0.54317, "ndcg_at_100": 0.54317, "map_at_1": 0.16, "map_at_3": 0.35667, "map_at_5": 0.38267, "map_at_10": 0.40322, "map_at_20": 0.40804, "map_at_50": 0.41036, "map_at_100": 0.41036, "recall_at_1": 0.16, "recall_at_3": 0.6, "recall_at_5": 0.72, "recall_at_10": 0.86, "recall_at_20": 0.92, "recall_at_50": 0.98, "recall_at_100": 0.98, "precision_at_1": 0.16, "precision_at_3": 0.2, "precision_at_5": 0.144, "precision_at_10": 0.086, "precision_at_20": 0.046, "precision_at_50": 0.0196, "precision_at_100": 0.0098, "mrr_at_1": 0.16, "mrr_at_3": 0.36, "mrr_at_5": 0.39, "mrr_at_10": 0.4070238095238095, "mrr_at_20": 0.41168512859689327, "mrr_at_50": 0.4140086115655376, "mrr_at_100": 0.4140086115655376, "naucs_at_1_max": -0.31855891842064843, "naucs_at_1_std": -0.047703180212014064, "naucs_at_1_diff1": -0.36076970348747894, "naucs_at_3_max": 0.22718907687741155, "naucs_at_3_std": -0.16663698426832932, "naucs_at_3_diff1": 0.03321460373998197, "naucs_at_5_max": 0.4417735042735038, "naucs_at_5_std": -0.3107066544566543, "naucs_at_5_diff1": 0.1657509157509156, "naucs_at_10_max": 0.7406124428432874, "naucs_at_10_std": -0.27726202023001023, "naucs_at_10_diff1": 0.5847304974366084, "naucs_at_20_max": 0.5303454715219447, "naucs_at_20_std": -0.793534080298782, "naucs_at_20_diff1": 0.4159663865546221, "naucs_at_50_max": 0.5541549953314605, "naucs_at_50_std": 0.8692810457516335, "naucs_at_50_diff1": 0.35807656395891063, "naucs_at_100_max": 0.5541549953314605, "naucs_at_100_std": 0.8692810457516335, "naucs_at_100_diff1": 0.35807656395891063}, "NanoSciFact": {"ndcg_at_1": 0.58, "ndcg_at_3": 0.65047, "ndcg_at_5": 0.69953, "ndcg_at_10": 0.71296, "ndcg_at_20": 0.73302, "ndcg_at_50": 0.73302, "ndcg_at_100": 0.73302, "map_at_1": 0.58, "map_at_3": 0.63333, "map_at_5": 0.66033, "map_at_10": 0.66617, "map_at_20": 0.67156, "map_at_50": 0.67156, "map_at_100": 0.67156, "recall_at_1": 0.58, "recall_at_3": 0.7, "recall_at_5": 0.82, "recall_at_10": 0.86, "recall_at_20": 0.94, "recall_at_50": 0.94, "recall_at_100": 0.94, "precision_at_1": 0.58, "precision_at_3": 0.23333, "precision_at_5": 0.164, "precision_at_10": 0.086, "precision_at_20": 0.047, "precision_at_50": 0.0188, "precision_at_100": 0.0094, "mrr_at_1": 0.58, "mrr_at_3": 0.6333333333333333, "mrr_at_5": 0.6603333333333333, "mrr_at_10": 0.6661666666666666, "mrr_at_20": 0.6715601702219349, "mrr_at_50": 0.6715601702219349, "mrr_at_100": 0.6715601702219349, "naucs_at_1_max": -0.008213135212003468, "naucs_at_1_std": -0.007893896740865351, "naucs_at_1_diff1": 0.696636387381374, "naucs_at_3_max": -0.0883444424428038, "naucs_at_3_std": -0.1244820753017478, "naucs_at_3_diff1": 0.8241758241758232, "naucs_at_5_max": 0.052979403017954424, "naucs_at_5_std": 0.1738627602158834, "naucs_at_5_diff1": 0.7887983258068072, "naucs_at_10_max": 0.19585700429541475, "naucs_at_10_std": 0.11826243591520079, "naucs_at_10_diff1": 0.802895940141334, "naucs_at_20_max": 0.13414254590725072, "naucs_at_20_std": 0.4500466853408021, "naucs_at_20_diff1": 0.9128540305010903, "naucs_at_50_max": 0.13414254590724764, "naucs_at_50_std": 0.4500466853408014, "naucs_at_50_diff1": 0.9128540305010896, "naucs_at_100_max": 0.13414254590724764, "naucs_at_100_std": 0.4500466853408014, "naucs_at_100_diff1": 0.9128540305010896}, "NanoTouche2020": {"ndcg_at_1": 0.0, "ndcg_at_3": 0.03328, "ndcg_at_5": 0.05086, "ndcg_at_10": 0.09805, "ndcg_at_20": 0.14502, "ndcg_at_50": 0.18048, "ndcg_at_100": 0.21119, "map_at_1": 0.0, "map_at_3": 0.02381, "map_at_5": 0.03401, "map_at_10": 0.05403, "map_at_20": 0.06717, "map_at_50": 0.07247, "map_at_100": 0.07545, "recall_at_1": 0.0, "recall_at_3": 0.06122, "recall_at_5": 0.10204, "recall_at_10": 0.2449, "recall_at_20": 0.42857, "recall_at_50": 0.61224, "recall_at_100": 0.79592, "precision_at_1": 0.0, "precision_at_3": 0.02041, "precision_at_5": 0.02041, "precision_at_10": 0.02449, "precision_at_20": 0.02143, "precision_at_50": 0.01224, "precision_at_100": 0.00796, "mrr_at_1": 0.0, "mrr_at_3": 0.023809523809523808, "mrr_at_5": 0.03401360544217687, "mrr_at_10": 0.054033041788143825, "mrr_at_20": 0.06716604224107225, "mrr_at_50": 0.07247442438220576, "mrr_at_100": 0.07545302421880992, "naucs_at_1_max": NaN, "naucs_at_1_std": NaN, "naucs_at_1_diff1": NaN, "naucs_at_3_max": -0.271176241171992, "naucs_at_3_std": 0.03222568214416961, "naucs_at_3_diff1": -0.3332859137740473, "naucs_at_5_max": -0.382167866402527, "naucs_at_5_std": -0.16738571065386357, "naucs_at_5_diff1": -0.4194336699637602, "naucs_at_10_max": -0.166231260141354, "naucs_at_10_std": -0.04967559912145591, "naucs_at_10_diff1": -0.08390409298693202, "naucs_at_20_max": -0.1484715255427597, "naucs_at_20_std": -0.23579324438634008, "naucs_at_20_diff1": 0.03174137204564278, "naucs_at_50_max": 0.10098192551287241, "naucs_at_50_std": -0.044216556800366666, "naucs_at_50_diff1": 0.12820487303298858, "naucs_at_100_max": 0.19370419503510294, "naucs_at_100_std": -0.057253660859158895, "naucs_at_100_diff1": 0.12031114268926098}}}
results/metrics_modernbert-embed-base.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"model": "modernbert-embed-base", "date": "2025-01-30 12:37:17", "is_contextual": false, "metrics": {"covid-qa": {"ndcg_at_1": 0.28443, "ndcg_at_3": 0.38418, "ndcg_at_5": 0.41249, "ndcg_at_10": 0.43739, "ndcg_at_20": 0.45793, "ndcg_at_50": 0.47347, "ndcg_at_100": 0.48291, "map_at_1": 0.28443, "map_at_3": 0.35929, "map_at_5": 0.37486, "map_at_10": 0.38522, "map_at_20": 0.39089, "map_at_50": 0.39339, "map_at_100": 0.39425, "recall_at_1": 0.28443, "recall_at_3": 0.45635, "recall_at_5": 0.52565, "recall_at_10": 0.60216, "recall_at_20": 0.68317, "recall_at_50": 0.76148, "recall_at_100": 0.81908, "precision_at_1": 0.28443, "precision_at_3": 0.15212, "precision_at_5": 0.10513, "precision_at_10": 0.06022, "precision_at_20": 0.03416, "precision_at_50": 0.01523, "precision_at_100": 0.00819, "mrr_at_1": 0.2844284428442844, "mrr_at_3": 0.3592859285928593, "mrr_at_5": 0.37485748574857486, "mrr_at_10": 0.3852235223522352, "mrr_at_20": 0.3908944333950534, "mrr_at_50": 0.3933919657811897, "mrr_at_100": 0.39424987674367984, "naucs_at_1_max": 0.3109801540702206, "naucs_at_1_std": 0.11671646904484745, "naucs_at_1_diff1": 0.3518153149652583, "naucs_at_3_max": 0.355139111864426, "naucs_at_3_std": 0.12709459147416713, "naucs_at_3_diff1": 0.22819427855214794, "naucs_at_5_max": 0.3962554647833346, "naucs_at_5_std": 0.15333868258511094, "naucs_at_5_diff1": 0.21560587062883907, "naucs_at_10_max": 0.4541785265395522, "naucs_at_10_std": 0.207047059393814, "naucs_at_10_diff1": 0.2029524639940473, "naucs_at_20_max": 0.5084662059847903, "naucs_at_20_std": 0.2657546726420526, "naucs_at_20_diff1": 0.20307884791438233, "naucs_at_50_max": 0.5796794113946185, "naucs_at_50_std": 0.3665720735602068, "naucs_at_50_diff1": 0.20561076859657873, "naucs_at_100_max": 0.640236532243946, "naucs_at_100_std": 0.5131279142094812, "naucs_at_100_diff1": 0.22822903875233982}, "chunked-mldr": {"ndcg_at_1": 0.5, "ndcg_at_3": 0.6625, "ndcg_at_5": 0.71111, "ndcg_at_10": 0.72043, "ndcg_at_20": 0.73288, "ndcg_at_50": 0.73885, "ndcg_at_100": 0.73885, "map_at_1": 0.5, "map_at_3": 0.625, "map_at_5": 0.6515, "map_at_10": 0.65511, "map_at_20": 0.65843, "map_at_50": 0.65939, "map_at_100": 0.65939, "recall_at_1": 0.5, "recall_at_3": 0.77, "recall_at_5": 0.89, "recall_at_10": 0.92, "recall_at_20": 0.97, "recall_at_50": 1.0, "recall_at_100": 1.0, "precision_at_1": 0.5, "precision_at_3": 0.25667, "precision_at_5": 0.178, "precision_at_10": 0.092, "precision_at_20": 0.0485, "precision_at_50": 0.02, "precision_at_100": 0.01, "mrr_at_1": 0.5, "mrr_at_3": 0.625, "mrr_at_5": 0.6515000000000001, "mrr_at_10": 0.6551111111111111, "mrr_at_20": 0.6584269105077928, "mrr_at_50": 0.659387516568399, "mrr_at_100": 0.659387516568399, "naucs_at_1_max": 0.05269820119920055, "naucs_at_1_std": -0.40546302465023293, "naucs_at_1_diff1": 0.49900066622251804, "naucs_at_3_max": 0.27548622456972743, "naucs_at_3_std": -0.30035137978111454, "naucs_at_3_diff1": 0.5053826011056142, "naucs_at_5_max": 0.40271454342410373, "naucs_at_5_std": -0.4267245082037608, "naucs_at_5_diff1": 0.6353835581135637, "naucs_at_10_max": 0.6365546218487413, "naucs_at_10_std": -0.3439542483660078, "naucs_at_10_diff1": 0.4882703081232498, "naucs_at_20_max": 0.6640211640211615, "naucs_at_20_std": -0.2563025210084044, "naucs_at_20_diff1": 0.7860255213196407, "naucs_at_50_max": NaN, "naucs_at_50_std": NaN, "naucs_at_50_diff1": NaN, "naucs_at_100_max": NaN, "naucs_at_100_std": NaN, "naucs_at_100_diff1": NaN}, "tech-qa": {"ndcg_at_1": 0.12605, "ndcg_at_3": 0.2507, "ndcg_at_5": 0.29517, "ndcg_at_10": 0.34055, "ndcg_at_20": 0.36757, "ndcg_at_50": 0.38445, "ndcg_at_100": 0.39411, "map_at_1": 0.12605, "map_at_3": 0.21849, "map_at_5": 0.24286, "map_at_10": 0.26271, "map_at_20": 0.2698, "map_at_50": 0.27259, "map_at_100": 0.27348, "recall_at_1": 0.12605, "recall_at_3": 0.34454, "recall_at_5": 0.45378, "recall_at_10": 0.58824, "recall_at_20": 0.69748, "recall_at_50": 0.78151, "recall_at_100": 0.84034, "precision_at_1": 0.12605, "precision_at_3": 0.11485, "precision_at_5": 0.09076, "precision_at_10": 0.05882, "precision_at_20": 0.03487, "precision_at_50": 0.01563, "precision_at_100": 0.0084, "mrr_at_1": 0.12605042016806722, "mrr_at_3": 0.2184873949579832, "mrr_at_5": 0.24285714285714285, "mrr_at_10": 0.2627050820328131, "mrr_at_20": 0.2697985552877974, "mrr_at_50": 0.2725926492668331, "mrr_at_100": 0.27348048802580804, "naucs_at_1_max": 0.14060107674525096, "naucs_at_1_std": -0.0548743769296886, "naucs_at_1_diff1": 0.03291272917596544, "naucs_at_3_max": 0.2047896455201912, "naucs_at_3_std": -0.17197423330838923, "naucs_at_3_diff1": 0.19574577405867047, "naucs_at_5_max": 0.18110376873527054, "naucs_at_5_std": -0.17862845289957266, "naucs_at_5_diff1": 0.1816916535941928, "naucs_at_10_max": 0.1156523685748518, "naucs_at_10_std": -0.31179822475669183, "naucs_at_10_diff1": 0.22908692861852156, "naucs_at_20_max": 0.13968223983545405, "naucs_at_20_std": -0.4266366330114368, "naucs_at_20_diff1": 0.2503155150134539, "naucs_at_50_max": 0.29228410164538504, "naucs_at_50_std": -0.3593146348215093, "naucs_at_50_diff1": 0.19536189299889553, "naucs_at_100_max": 0.13761772182051205, "naucs_at_100_std": -0.3604327629374068, "naucs_at_100_diff1": -0.005642577157381685}, "NanoClimateFEVER": {"ndcg_at_1": 0.18, "ndcg_at_3": 0.20262, "ndcg_at_5": 0.21985, "ndcg_at_10": 0.25221, "ndcg_at_20": 0.28904, "ndcg_at_50": 0.31228, "ndcg_at_100": 0.32928, "map_at_1": 0.18, "map_at_3": 0.19667, "map_at_5": 0.20667, "map_at_10": 0.22006, "map_at_20": 0.23091, "map_at_50": 0.23443, "map_at_100": 0.23615, "recall_at_1": 0.18, "recall_at_3": 0.22, "recall_at_5": 0.26, "recall_at_10": 0.36, "recall_at_20": 0.5, "recall_at_50": 0.62, "recall_at_100": 0.72, "precision_at_1": 0.18, "precision_at_3": 0.07333, "precision_at_5": 0.052, "precision_at_10": 0.036, "precision_at_20": 0.025, "precision_at_50": 0.0124, "precision_at_100": 0.0072, "mrr_at_1": 0.18, "mrr_at_3": 0.19666666666666668, "mrr_at_5": 0.20666666666666667, "mrr_at_10": 0.22005555555555556, "mrr_at_20": 0.23091234582411055, "mrr_at_50": 0.23442730024512085, "mrr_at_100": 0.23615411713180204, "naucs_at_1_max": -0.13563484545486434, "naucs_at_1_std": -0.24050099989474794, "naucs_at_1_diff1": 0.07514998421218826, "naucs_at_3_max": -0.1763992914193368, "naucs_at_3_std": -0.28495509214656434, "naucs_at_3_diff1": 0.0005283276874786015, "naucs_at_5_max": -0.09430198606573982, "naucs_at_5_std": -0.19528200977132795, "naucs_at_5_diff1": -0.01844410395767692, "naucs_at_10_max": 0.035296278801238534, "naucs_at_10_std": -0.18981262793260917, "naucs_at_10_diff1": -0.10064031910985154, "naucs_at_20_max": 0.1647435043304461, "naucs_at_20_std": -0.14339773484343807, "naucs_at_20_diff1": -0.08026648900732876, "naucs_at_50_max": 0.13897178721964154, "naucs_at_50_std": -0.16281574008553065, "naucs_at_50_diff1": 0.13909485278281955, "naucs_at_100_max": -0.01106532356532296, "naucs_at_100_std": -0.10077075702075629, "naucs_at_100_diff1": -0.06658272283272249}, "NanoDBPedia": {"ndcg_at_1": 0.06, "ndcg_at_3": 0.09262, "ndcg_at_5": 0.10809, "ndcg_at_10": 0.1518, "ndcg_at_20": 0.1882, "ndcg_at_50": 0.22966, "ndcg_at_100": 0.243, "map_at_1": 0.06, "map_at_3": 0.08333, "map_at_5": 0.09133, "map_at_10": 0.10841, "map_at_20": 0.11893, "map_at_50": 0.12633, "map_at_100": 0.12761, "recall_at_1": 0.06, "recall_at_3": 0.12, "recall_at_5": 0.16, "recall_at_10": 0.3, "recall_at_20": 0.44, "recall_at_50": 0.64, "recall_at_100": 0.72, "precision_at_1": 0.06, "precision_at_3": 0.04, "precision_at_5": 0.032, "precision_at_10": 0.03, "precision_at_20": 0.022, "precision_at_50": 0.0128, "precision_at_100": 0.0072, "mrr_at_1": 0.06, "mrr_at_3": 0.08333333333333334, "mrr_at_5": 0.09133333333333332, "mrr_at_10": 0.10841269841269842, "mrr_at_20": 0.11893066084242555, "mrr_at_50": 0.1263311069142712, "mrr_at_100": 0.1276135806673332, "naucs_at_1_max": -0.35345675699847956, "naucs_at_1_std": -0.15222252034701722, "naucs_at_1_diff1": -0.16876844647169306, "naucs_at_3_max": -0.0947777461851957, "naucs_at_3_std": -0.0788550848260828, "naucs_at_3_diff1": -0.056629703345654356, "naucs_at_5_max": 0.06429559072054074, "naucs_at_5_std": -0.1213320018436012, "naucs_at_5_diff1": 0.046819787985865696, "naucs_at_10_max": -0.11060667477367941, "naucs_at_10_std": -0.29493311714633175, "naucs_at_10_diff1": -0.1995946493717066, "naucs_at_20_max": 0.2564858798350471, "naucs_at_20_std": -0.17732421569139206, "naucs_at_20_diff1": 0.20525134415618299, "naucs_at_50_max": 0.17032826670072793, "naucs_at_50_std": -0.39475028739302653, "naucs_at_50_diff1": 0.12996551283688798, "naucs_at_100_max": 0.3044108669108674, "naucs_at_100_std": -0.39484126984126905, "naucs_at_100_diff1": 0.14400183150183227}, "NanoFEVER": {"ndcg_at_1": 0.6, "ndcg_at_3": 0.74833, "ndcg_at_5": 0.77329, "ndcg_at_10": 0.77329, "ndcg_at_20": 0.78805, "ndcg_at_50": 0.79176, "ndcg_at_100": 0.79176, "map_at_1": 0.6, "map_at_3": 0.71, "map_at_5": 0.724, "map_at_10": 0.724, "map_at_20": 0.72783, "map_at_50": 0.72831, "map_at_100": 0.72831, "recall_at_1": 0.6, "recall_at_3": 0.86, "recall_at_5": 0.92, "recall_at_10": 0.92, "recall_at_20": 0.98, "recall_at_50": 1.0, "recall_at_100": 1.0, "precision_at_1": 0.6, "precision_at_3": 0.28667, "precision_at_5": 0.184, "precision_at_10": 0.092, "precision_at_20": 0.049, "precision_at_50": 0.02, "precision_at_100": 0.01, "mrr_at_1": 0.6, "mrr_at_3": 0.71, "mrr_at_5": 0.7240000000000001, "mrr_at_10": 0.7240000000000001, "mrr_at_20": 0.7278260432378079, "mrr_at_50": 0.7283138481158568, "mrr_at_100": 0.7283138481158568, "naucs_at_1_max": 0.2541703769664586, "naucs_at_1_std": -0.024606708222024733, "naucs_at_1_diff1": 0.6279311368358562, "naucs_at_3_max": 0.7309131217957593, "naucs_at_3_std": 0.18927532215602214, "naucs_at_3_diff1": 0.25890259110433794, "naucs_at_5_max": 0.8068394024276385, "naucs_at_5_std": 0.39554154995331575, "naucs_at_5_diff1": 0.21591970121382054, "naucs_at_10_max": 0.8068394024276385, "naucs_at_10_std": 0.39554154995331575, "naucs_at_10_diff1": 0.21591970121382054, "naucs_at_20_max": 0.358076563958916, "naucs_at_20_std": -0.563492063492042, "naucs_at_20_diff1": 0.12278244631187159, "naucs_at_50_max": 1.0, "naucs_at_50_std": 1.0, "naucs_at_50_diff1": 1.0, "naucs_at_100_max": 1.0, "naucs_at_100_std": 1.0, "naucs_at_100_diff1": 1.0}, "NanoFiQA2018": {"ndcg_at_1": 0.24, "ndcg_at_3": 0.38357, "ndcg_at_5": 0.45671, "ndcg_at_10": 0.46249, "ndcg_at_20": 0.46249, "ndcg_at_50": 0.47963, "ndcg_at_100": 0.49601, "map_at_1": 0.24, "map_at_3": 0.35, "map_at_5": 0.39, "map_at_10": 0.392, "map_at_20": 0.392, "map_at_50": 0.3953, "map_at_100": 0.39679, "recall_at_1": 0.24, "recall_at_3": 0.48, "recall_at_5": 0.66, "recall_at_10": 0.68, "recall_at_20": 0.68, "recall_at_50": 0.76, "recall_at_100": 0.86, "precision_at_1": 0.24, "precision_at_3": 0.16, "precision_at_5": 0.132, "precision_at_10": 0.068, "precision_at_20": 0.034, "precision_at_50": 0.0152, "precision_at_100": 0.0086, "mrr_at_1": 0.24, "mrr_at_3": 0.35, "mrr_at_5": 0.39, "mrr_at_10": 0.392, "mrr_at_20": 0.392, "mrr_at_50": 0.3953008658008658, "mrr_at_100": 0.3967894175748325, "naucs_at_1_max": -0.033249460819554225, "naucs_at_1_std": -0.2066259285885454, "naucs_at_1_diff1": 0.3411214953271028, "naucs_at_3_max": -0.1927596062241982, "naucs_at_3_std": -0.44551180268868407, "naucs_at_3_diff1": 0.2538636604212979, "naucs_at_5_max": -0.06913604832553338, "naucs_at_5_std": -0.43841481628995277, "naucs_at_5_diff1": 0.2969232301105246, "naucs_at_10_max": -0.09847280768433422, "naucs_at_10_std": -0.44371501623937526, "naucs_at_10_diff1": 0.2447653928546748, "naucs_at_20_max": -0.09847280768433422, "naucs_at_20_std": -0.44371501623937526, "naucs_at_20_diff1": 0.2447653928546748, "naucs_at_50_max": -0.10080470710391927, "naucs_at_50_std": -0.38305788699489424, "naucs_at_50_diff1": 0.24097949294799717, "naucs_at_100_max": 0.45455175280587484, "naucs_at_100_std": -0.31751420257725066, "naucs_at_100_diff1": 0.20458639323818634}, "NanoHotpotQA": {"ndcg_at_1": 0.18, "ndcg_at_3": 0.32619, "ndcg_at_5": 0.33392, "ndcg_at_10": 0.37235, "ndcg_at_20": 0.39143, "ndcg_at_50": 0.41971, "ndcg_at_100": 0.43611, "map_at_1": 0.18, "map_at_3": 0.29333, "map_at_5": 0.29733, "map_at_10": 0.313, "map_at_20": 0.31764, "map_at_50": 0.32238, "map_at_100": 0.32388, "recall_at_1": 0.18, "recall_at_3": 0.42, "recall_at_5": 0.44, "recall_at_10": 0.56, "recall_at_20": 0.64, "recall_at_50": 0.78, "recall_at_100": 0.88, "precision_at_1": 0.18, "precision_at_3": 0.14, "precision_at_5": 0.088, "precision_at_10": 0.056, "precision_at_20": 0.032, "precision_at_50": 0.0156, "precision_at_100": 0.0088, "mrr_at_1": 0.18, "mrr_at_3": 0.29333333333333333, "mrr_at_5": 0.29733333333333334, "mrr_at_10": 0.313, "mrr_at_20": 0.3176359649122807, "mrr_at_50": 0.32237769330366817, "mrr_at_100": 0.32387960757485346, "naucs_at_1_max": -0.29688103006701044, "naucs_at_1_std": -0.35799740378205797, "naucs_at_1_diff1": -0.15689576535803249, "naucs_at_3_max": 0.013868367161780162, "naucs_at_3_std": -0.38582575109521233, "naucs_at_3_diff1": -0.12502268190890936, "naucs_at_5_max": -0.0419428929373075, "naucs_at_5_std": -0.4370726105340083, "naucs_at_5_diff1": -0.18178733622174634, "naucs_at_10_max": 0.23143311378605466, "naucs_at_10_std": -0.5790086304792187, "naucs_at_10_diff1": 0.05675107880990175, "naucs_at_20_max": 0.19980201813769305, "naucs_at_20_std": -0.67087112019415, "naucs_at_20_diff1": -0.01947886064631492, "naucs_at_50_max": 0.03231789307738679, "naucs_at_50_std": -0.6213196086613806, "naucs_at_50_diff1": -0.09931840311587146, "naucs_at_100_max": 0.07588291441298209, "naucs_at_100_std": -0.5960865415208394, "naucs_at_100_diff1": -0.42379891823098975}, "NanoMSMARCO": {"ndcg_at_1": 0.36, "ndcg_at_3": 0.52619, "ndcg_at_5": 0.55713, "ndcg_at_10": 0.5829, "ndcg_at_20": 0.62767, "ndcg_at_50": 0.63174, "ndcg_at_100": 0.63174, "map_at_1": 0.36, "map_at_3": 0.48667, "map_at_5": 0.50267, "map_at_10": 0.51322, "map_at_20": 0.5251, "map_at_50": 0.52579, "map_at_100": 0.52579, "recall_at_1": 0.36, "recall_at_3": 0.64, "recall_at_5": 0.72, "recall_at_10": 0.8, "recall_at_20": 0.98, "recall_at_50": 1.0, "recall_at_100": 1.0, "precision_at_1": 0.36, "precision_at_3": 0.21333, "precision_at_5": 0.144, "precision_at_10": 0.08, "precision_at_20": 0.049, "precision_at_50": 0.02, "precision_at_100": 0.01, "mrr_at_1": 0.36, "mrr_at_3": 0.48666666666666664, "mrr_at_5": 0.5026666666666667, "mrr_at_10": 0.5132222222222222, "mrr_at_20": 0.5251021180401986, "mrr_at_50": 0.5257917732126124, "mrr_at_100": 0.5257917732126124, "naucs_at_1_max": 0.16971080669710809, "naucs_at_1_std": -0.05316748018684722, "naucs_at_1_diff1": 0.5812995328819608, "naucs_at_3_max": 0.2566419721548089, "naucs_at_3_std": -0.03244347937156718, "naucs_at_3_diff1": 0.5522735981606842, "naucs_at_5_max": 0.11404914529914512, "naucs_at_5_std": -0.09424603174603138, "naucs_at_5_diff1": 0.45127442002442003, "naucs_at_10_max": 0.11883116883116829, "naucs_at_10_std": -0.13986013986013895, "naucs_at_10_diff1": 0.4506493506493507, "naucs_at_20_max": -1.739962651727313, "naucs_at_20_std": -0.1713352007469462, "naucs_at_20_diff1": -1.1517273576096776, "naucs_at_50_max": 1.0, "naucs_at_50_std": 1.0, "naucs_at_50_diff1": 1.0, "naucs_at_100_max": 1.0, "naucs_at_100_std": 1.0, "naucs_at_100_diff1": 1.0}, "NanoNFCorpus": {"ndcg_at_1": 0.02, "ndcg_at_3": 0.03262, "ndcg_at_5": 0.04897, "ndcg_at_10": 0.06077, "ndcg_at_20": 0.07147, "ndcg_at_50": 0.087, "ndcg_at_100": 0.09362, "map_at_1": 0.02, "map_at_3": 0.03, "map_at_5": 0.039, "map_at_10": 0.04322, "map_at_20": 0.04647, "map_at_50": 0.04882, "map_at_100": 0.04943, "recall_at_1": 0.02, "recall_at_3": 0.04, "recall_at_5": 0.08, "recall_at_10": 0.12, "recall_at_20": 0.16, "recall_at_50": 0.24, "recall_at_100": 0.28, "precision_at_1": 0.02, "precision_at_3": 0.01333, "precision_at_5": 0.016, "precision_at_10": 0.012, "precision_at_20": 0.008, "precision_at_50": 0.0048, "precision_at_100": 0.0028, "mrr_at_1": 0.02, "mrr_at_3": 0.03, "mrr_at_5": 0.039, "mrr_at_10": 0.04322222222222222, "mrr_at_20": 0.046468975468975465, "mrr_at_50": 0.04881656295571991, "mrr_at_100": 0.049434286880642685, "naucs_at_1_max": -0.4995975315266971, "naucs_at_1_std": -0.41507915213308294, "naucs_at_1_diff1": 0.6619264824255433, "naucs_at_3_max": 0.2502012342366514, "naucs_at_3_std": 0.12342366514623024, "naucs_at_3_diff1": 0.8309632412127718, "naucs_at_5_max": 0.05601019586799033, "naucs_at_5_std": 0.10229407029782675, "naucs_at_5_diff1": 0.2602629460692245, "naucs_at_10_max": 0.040517486494171216, "naucs_at_10_std": 0.10093829968723352, "naucs_at_10_diff1": 0.2589801914510473, "naucs_at_20_max": 0.0937163926870487, "naucs_at_20_std": 0.013634966968812444, "naucs_at_20_diff1": 0.14172683976033185, "naucs_at_50_max": -0.015127006949436793, "naucs_at_50_std": 0.07904984423676013, "naucs_at_50_diff1": 0.02159717229810695, "naucs_at_100_max": -0.013548242918599072, "naucs_at_100_std": 0.1564053857757416, "naucs_at_100_diff1": 0.09930722386725484}, "NanoNQ": {"ndcg_at_1": 0.46, "ndcg_at_3": 0.56309, "ndcg_at_5": 0.61901, "ndcg_at_10": 0.62503, "ndcg_at_20": 0.63523, "ndcg_at_50": 0.63897, "ndcg_at_100": 0.64835, "map_at_1": 0.46, "map_at_3": 0.53667, "map_at_5": 0.56667, "map_at_10": 0.56889, "map_at_20": 0.57176, "map_at_50": 0.57226, "map_at_100": 0.57299, "recall_at_1": 0.46, "recall_at_3": 0.64, "recall_at_5": 0.78, "recall_at_10": 0.8, "recall_at_20": 0.84, "recall_at_50": 0.86, "recall_at_100": 0.92, "precision_at_1": 0.46, "precision_at_3": 0.21333, "precision_at_5": 0.156, "precision_at_10": 0.08, "precision_at_20": 0.042, "precision_at_50": 0.0172, "precision_at_100": 0.0092, "mrr_at_1": 0.46, "mrr_at_3": 0.5366666666666666, "mrr_at_5": 0.5666666666666667, "mrr_at_10": 0.5688888888888889, "mrr_at_20": 0.571759702286018, "mrr_at_50": 0.5722597022860181, "mrr_at_100": 0.5729875934424807, "naucs_at_1_max": 0.5522062495072401, "naucs_at_1_std": -0.14995663714488444, "naucs_at_1_diff1": 0.4830096449501983, "naucs_at_3_max": 0.5385425980329537, "naucs_at_3_std": -0.1763954528036788, "naucs_at_3_diff1": 0.4041703921318173, "naucs_at_5_max": 0.4733620809570179, "naucs_at_5_std": -0.20433996383363512, "naucs_at_5_diff1": 0.27361246348588103, "naucs_at_10_max": 0.4029470529470527, "naucs_at_10_std": -0.3131368631368631, "naucs_at_10_diff1": 0.2564435564435555, "naucs_at_20_max": 0.3751227295041737, "naucs_at_20_std": -0.3475699558173772, "naucs_at_20_diff1": 0.20612420225822345, "naucs_at_50_max": 0.27511431342663045, "naucs_at_50_std": -0.54080642926424, "naucs_at_50_diff1": 0.15047803796591358, "naucs_at_100_max": 0.6136788048552735, "naucs_at_100_std": -0.2923669467787128, "naucs_at_100_diff1": -0.04318394024276533}, "NanoQuoraRetrieval": {"ndcg_at_1": 0.76, "ndcg_at_3": 0.89357, "ndcg_at_5": 0.89357, "ndcg_at_10": 0.89935, "ndcg_at_20": 0.89935, "ndcg_at_50": 0.89935, "ndcg_at_100": 0.89935, "map_at_1": 0.76, "map_at_3": 0.86333, "map_at_5": 0.86333, "map_at_10": 0.86533, "map_at_20": 0.86533, "map_at_50": 0.86533, "map_at_100": 0.86533, "recall_at_1": 0.76, "recall_at_3": 0.98, "recall_at_5": 0.98, "recall_at_10": 1.0, "recall_at_20": 1.0, "recall_at_50": 1.0, "recall_at_100": 1.0, "precision_at_1": 0.76, "precision_at_3": 0.32667, "precision_at_5": 0.196, "precision_at_10": 0.1, "precision_at_20": 0.05, "precision_at_50": 0.02, "precision_at_100": 0.01, "mrr_at_1": 0.76, "mrr_at_3": 0.8633333333333333, "mrr_at_5": 0.8633333333333333, "mrr_at_10": 0.8653333333333333, "mrr_at_20": 0.8653333333333333, "mrr_at_50": 0.8653333333333333, "mrr_at_100": 0.8653333333333333, "naucs_at_1_max": -0.01297914683741451, "naucs_at_1_std": -0.43969023102881377, "naucs_at_1_diff1": 0.7732975685731596, "naucs_at_3_max": -0.17133520074696149, "naucs_at_3_std": -1.7399626517273423, "naucs_at_3_diff1": 0.5541549953314663, "naucs_at_5_max": -0.1713352007469462, "naucs_at_5_std": -1.739962651727313, "naucs_at_5_diff1": 0.5541549953314721, "naucs_at_10_max": 1.0, "naucs_at_10_std": 1.0, "naucs_at_10_diff1": 1.0, "naucs_at_20_max": 1.0, "naucs_at_20_std": 1.0, "naucs_at_20_diff1": 1.0, "naucs_at_50_max": 1.0, "naucs_at_50_std": 1.0, "naucs_at_50_diff1": 1.0, "naucs_at_100_max": 1.0, "naucs_at_100_std": 1.0, "naucs_at_100_diff1": 1.0}, "NanoSCIDOCS": {"ndcg_at_1": 0.1, "ndcg_at_3": 0.14786, "ndcg_at_5": 0.20464, "ndcg_at_10": 0.2251, "ndcg_at_20": 0.27214, "ndcg_at_50": 0.30745, "ndcg_at_100": 0.31411, "map_at_1": 0.1, "map_at_3": 0.13667, "map_at_5": 0.16767, "map_at_10": 0.17671, "map_at_20": 0.19044, "map_at_50": 0.1959, "map_at_100": 0.19654, "recall_at_1": 0.1, "recall_at_3": 0.18, "recall_at_5": 0.32, "recall_at_10": 0.38, "recall_at_20": 0.56, "recall_at_50": 0.74, "recall_at_100": 0.78, "precision_at_1": 0.1, "precision_at_3": 0.06, "precision_at_5": 0.064, "precision_at_10": 0.038, "precision_at_20": 0.028, "precision_at_50": 0.0148, "precision_at_100": 0.0078, "mrr_at_1": 0.1, "mrr_at_3": 0.13666666666666666, "mrr_at_5": 0.16766666666666666, "mrr_at_10": 0.1767142857142857, "mrr_at_20": 0.19043623043623042, "mrr_at_50": 0.19590281916155888, "mrr_at_100": 0.1965378998067202, "naucs_at_1_max": 0.1283606117520794, "naucs_at_1_std": -0.2128253286825867, "naucs_at_1_diff1": 0.23241212771666217, "naucs_at_3_max": 0.15900080693260357, "naucs_at_3_std": 0.08918359470932885, "naucs_at_3_diff1": -0.06308107918464725, "naucs_at_5_max": 0.1368404152841697, "naucs_at_5_std": 0.18219522637268537, "naucs_at_5_diff1": -0.12439794498555087, "naucs_at_10_max": 0.11725105918436285, "naucs_at_10_std": 0.2656408390299687, "naucs_at_10_diff1": -0.19156291425155322, "naucs_at_20_max": 0.01740290710878925, "naucs_at_20_std": 0.22677719736543236, "naucs_at_20_diff1": -0.3766182148535091, "naucs_at_50_max": -0.08182960950488574, "naucs_at_50_std": 0.18482624386683422, "naucs_at_50_diff1": -0.4373301974777989, "naucs_at_100_max": 0.1936291556544729, "naucs_at_100_std": 0.06996800667686785, "naucs_at_100_diff1": -0.27129410673714455}, "NanoArguAna": {"ndcg_at_1": 0.16, "ndcg_at_3": 0.41404, "ndcg_at_5": 0.48207, "ndcg_at_10": 0.53523, "ndcg_at_20": 0.55566, "ndcg_at_50": 0.55566, "ndcg_at_100": 0.55566, "map_at_1": 0.16, "map_at_3": 0.35, "map_at_5": 0.389, "map_at_10": 0.41178, "map_at_20": 0.41751, "map_at_50": 0.41751, "map_at_100": 0.41751, "recall_at_1": 0.16, "recall_at_3": 0.6, "recall_at_5": 0.76, "recall_at_10": 0.92, "recall_at_20": 1.0, "recall_at_50": 1.0, "recall_at_100": 1.0, "precision_at_1": 0.16, "precision_at_3": 0.2, "precision_at_5": 0.152, "precision_at_10": 0.092, "precision_at_20": 0.05, "precision_at_50": 0.02, "precision_at_100": 0.01, "mrr_at_1": 0.16, "mrr_at_3": 0.35666666666666663, "mrr_at_5": 0.3906666666666666, "mrr_at_10": 0.412968253968254, "mrr_at_20": 0.41885461760461756, "mrr_at_50": 0.41885461760461756, "mrr_at_100": 0.41885461760461756, "naucs_at_1_max": -0.30123674911660786, "naucs_at_1_std": -0.12536487939775703, "naucs_at_1_diff1": -0.27592564141957293, "naucs_at_3_max": 0.23443158207183123, "naucs_at_3_std": 0.03906203621252589, "naucs_at_3_diff1": 0.071148708815672, "naucs_at_5_max": 0.3689971445876948, "naucs_at_5_std": -0.07069308644111846, "naucs_at_5_diff1": 0.11521156009345, "naucs_at_10_max": 0.17331932773109288, "naucs_at_10_std": -1.0683940242763748, "naucs_at_10_diff1": 0.3167600373482728, "naucs_at_20_max": 1.0, "naucs_at_20_std": 1.0, "naucs_at_20_diff1": 1.0, "naucs_at_50_max": 1.0, "naucs_at_50_std": 1.0, "naucs_at_50_diff1": 1.0, "naucs_at_100_max": 1.0, "naucs_at_100_std": 1.0, "naucs_at_100_diff1": 1.0}, "NanoSciFact": {"ndcg_at_1": 0.52, "ndcg_at_3": 0.64833, "ndcg_at_5": 0.65694, "ndcg_at_10": 0.67565, "ndcg_at_20": 0.69148, "ndcg_at_50": 0.69148, "ndcg_at_100": 0.69792, "map_at_1": 0.52, "map_at_3": 0.61667, "map_at_5": 0.62167, "map_at_10": 0.62897, "map_at_20": 0.63366, "map_at_50": 0.63366, "map_at_100": 0.63422, "recall_at_1": 0.52, "recall_at_3": 0.74, "recall_at_5": 0.76, "recall_at_10": 0.82, "recall_at_20": 0.88, "recall_at_50": 0.88, "recall_at_100": 0.92, "precision_at_1": 0.52, "precision_at_3": 0.24667, "precision_at_5": 0.152, "precision_at_10": 0.082, "precision_at_20": 0.044, "precision_at_50": 0.0176, "precision_at_100": 0.0092, "mrr_at_1": 0.52, "mrr_at_3": 0.6166666666666667, "mrr_at_5": 0.6216666666666666, "mrr_at_10": 0.628968253968254, "mrr_at_20": 0.6336582306582307, "mrr_at_50": 0.6336582306582307, "mrr_at_100": 0.6342173937173937, "naucs_at_1_max": 0.20806355079166425, "naucs_at_1_std": -0.34308177811632856, "naucs_at_1_diff1": 0.7306708743674843, "naucs_at_3_max": 0.20952110620007275, "naucs_at_3_std": -0.25850533230607003, "naucs_at_3_diff1": 0.5222821458983815, "naucs_at_5_max": 0.14298693432551624, "naucs_at_5_std": -0.25992904733062244, "naucs_at_5_diff1": 0.4903089036947301, "naucs_at_10_max": 0.23802180856922625, "naucs_at_10_std": -0.1013878180416342, "naucs_at_10_diff1": 0.5169071483643584, "naucs_at_20_max": 0.3208717785555198, "naucs_at_20_std": 0.20839961819917424, "naucs_at_20_diff1": 0.4545020680878155, "naucs_at_50_max": 0.3208717785555213, "naucs_at_50_std": 0.20839961819917246, "naucs_at_50_diff1": 0.4545020680878144, "naucs_at_100_max": 0.13130252100840237, "naucs_at_100_std": -0.08274976657329618, "naucs_at_100_diff1": 0.6202147525676938}, "NanoTouche2020": {"ndcg_at_1": 0.02041, "ndcg_at_3": 0.07677, "ndcg_at_5": 0.09346, "ndcg_at_10": 0.11964, "ndcg_at_20": 0.16558, "ndcg_at_50": 0.21955, "ndcg_at_100": 0.24624, "map_at_1": 0.02041, "map_at_3": 0.06122, "map_at_5": 0.07041, "map_at_10": 0.08106, "map_at_20": 0.09338, "map_at_50": 0.10257, "map_at_100": 0.10498, "recall_at_1": 0.02041, "recall_at_3": 0.12245, "recall_at_5": 0.16327, "recall_at_10": 0.2449, "recall_at_20": 0.42857, "recall_at_50": 0.69388, "recall_at_100": 0.85714, "precision_at_1": 0.02041, "precision_at_3": 0.04082, "precision_at_5": 0.03265, "precision_at_10": 0.02449, "precision_at_20": 0.02143, "precision_at_50": 0.01388, "precision_at_100": 0.00857, "mrr_at_1": 0.02040816326530612, "mrr_at_3": 0.061224489795918366, "mrr_at_5": 0.07040816326530612, "mrr_at_10": 0.08105766115970198, "mrr_at_20": 0.09338171165902259, "mrr_at_50": 0.10256824075659578, "mrr_at_100": 0.10497884422978679, "naucs_at_1_max": -0.5101944889290618, "naucs_at_1_std": -0.20715679015408345, "naucs_at_1_diff1": 0.6615448251904438, "naucs_at_3_max": -0.2545499766823494, "naucs_at_3_std": -0.18576978775594621, "naucs_at_3_diff1": 0.11280718285139574, "naucs_at_5_max": -0.3744474176165914, "naucs_at_5_std": -0.13992361065445957, "naucs_at_5_diff1": 0.07088609770314565, "naucs_at_10_max": -0.17925877054572567, "naucs_at_10_std": -0.229449684364072, "naucs_at_10_diff1": -0.053448834679196956, "naucs_at_20_max": -0.1343060840596677, "naucs_at_20_std": -0.35780059907661843, "naucs_at_20_diff1": 0.08618240298709662, "naucs_at_50_max": 0.007431928846090263, "naucs_at_50_std": -0.38555455659272087, "naucs_at_50_diff1": 0.2179165245141893, "naucs_at_100_max": 0.1955309778494358, "naucs_at_100_std": -0.2402990875100416, "naucs_at_100_diff1": 0.3020695838606588}}}
results/metrics_modernbert-embed-base_contextual.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"model": "modernbert-embed-base", "date": "2025-01-30 11:11:40", "is_contextual": true, "metrics": {"chunked-mldr": {"ndcg_at_1": 0.54, "ndcg_at_3": 0.69226, "ndcg_at_5": 0.73489, "ndcg_at_10": 0.74836, "ndcg_at_20": 0.75874, "ndcg_at_50": 0.76098, "ndcg_at_100": 0.76255, "map_at_1": 0.54, "map_at_3": 0.655, "map_at_5": 0.6795, "map_at_10": 0.68537, "map_at_20": 0.68837, "map_at_50": 0.68885, "map_at_100": 0.68897, "recall_at_1": 0.54, "recall_at_3": 0.8, "recall_at_5": 0.9, "recall_at_10": 0.94, "recall_at_20": 0.98, "recall_at_50": 0.99, "recall_at_100": 1.0, "precision_at_1": 0.54, "precision_at_3": 0.26667, "precision_at_5": 0.18, "precision_at_10": 0.094, "precision_at_20": 0.049, "precision_at_50": 0.0198, "precision_at_100": 0.01, "mrr_at_1": 0.54, "mrr_at_3": 0.655, "mrr_at_5": 0.6795, "mrr_at_10": 0.6853730158730158, "mrr_at_20": 0.6883730158730158, "mrr_at_50": 0.6888492063492063, "mrr_at_100": 0.6889696882769172, "naucs_at_1_max": 0.02039398738573539, "naucs_at_1_std": -0.21196965908143692, "naucs_at_1_diff1": 0.6891028312633714, "naucs_at_3_max": 0.32757242757242716, "naucs_at_3_std": -0.01920579420579387, "naucs_at_3_diff1": 0.4480519480519475, "naucs_at_5_max": 0.4231092436974794, "naucs_at_5_std": -0.21680672268907383, "naucs_at_5_diff1": 0.4484593837535024, "naucs_at_10_max": 0.5414721444133189, "naucs_at_10_std": -0.23646125116713304, "naucs_at_10_diff1": 0.3546529723000323, "naucs_at_20_max": 0.861111111111116, "naucs_at_20_std": 0.24042950513538955, "naucs_at_20_diff1": -0.3699813258636707, "naucs_at_50_max": 1.0, "naucs_at_50_std": 0.35807656395892007, "naucs_at_50_diff1": -1.7399626517273008, "naucs_at_100_max": NaN, "naucs_at_100_std": NaN, "naucs_at_100_diff1": NaN}, "tech-qa": {"ndcg_at_1": 0.14286, "ndcg_at_3": 0.33603, "ndcg_at_5": 0.39136, "ndcg_at_10": 0.42113, "ndcg_at_20": 0.45119, "ndcg_at_50": 0.46105, "ndcg_at_100": 0.46929, "map_at_1": 0.14286, "map_at_3": 0.28431, "map_at_5": 0.31499, "map_at_10": 0.32717, "map_at_20": 0.33561, "map_at_50": 0.33714, "map_at_100": 0.33789, "recall_at_1": 0.14286, "recall_at_3": 0.48739, "recall_at_5": 0.62185, "recall_at_10": 0.71429, "recall_at_20": 0.83193, "recall_at_50": 0.88235, "recall_at_100": 0.93277, "precision_at_1": 0.14286, "precision_at_3": 0.16246, "precision_at_5": 0.12437, "precision_at_10": 0.07143, "precision_at_20": 0.0416, "precision_at_50": 0.01765, "precision_at_100": 0.00933, "mrr_at_1": 0.14285714285714285, "mrr_at_3": 0.2843137254901961, "mrr_at_5": 0.3149859943977591, "mrr_at_10": 0.32717420301453914, "mrr_at_20": 0.33561341356564706, "mrr_at_50": 0.33714495424980173, "mrr_at_100": 0.3378853697744552, "naucs_at_1_max": -0.052195357895422845, "naucs_at_1_std": -0.14145916803327638, "naucs_at_1_diff1": 0.1526058919315945, "naucs_at_3_max": 0.17744994915544282, "naucs_at_3_std": -0.27509255183406733, "naucs_at_3_diff1": 0.19017888819871795, "naucs_at_5_max": 0.24060033529674488, "naucs_at_5_std": -0.2566309217991452, "naucs_at_5_diff1": 0.1745200778474903, "naucs_at_10_max": 0.34713884413559315, "naucs_at_10_std": -0.17164730674209766, "naucs_at_10_diff1": 0.2049708069302246, "naucs_at_20_max": 0.4678372816230481, "naucs_at_20_std": -0.05249191524576156, "naucs_at_20_diff1": 0.3844174940330258, "naucs_at_50_max": 0.2682386729502065, "naucs_at_50_std": -0.19636320576353225, "naucs_at_50_diff1": 0.45747055500086636, "naucs_at_100_max": 0.31362810576697464, "naucs_at_100_std": 0.020185860456340046, "naucs_at_100_diff1": 0.36973752957412565}, "NanoClimateFEVER": {"ndcg_at_1": 0.14, "ndcg_at_3": 0.17, "ndcg_at_5": 0.18547, "ndcg_at_10": 0.18547, "ndcg_at_20": 0.21688, "ndcg_at_50": 0.24106, "ndcg_at_100": 0.27047, "map_at_1": 0.14, "map_at_3": 0.16, "map_at_5": 0.168, "map_at_10": 0.168, "map_at_20": 0.17719, "map_at_50": 0.18126, "map_at_100": 0.18391, "recall_at_1": 0.14, "recall_at_3": 0.2, "recall_at_5": 0.24, "recall_at_10": 0.24, "recall_at_20": 0.36, "recall_at_50": 0.48, "recall_at_100": 0.66, "precision_at_1": 0.14, "precision_at_3": 0.06667, "precision_at_5": 0.048, "precision_at_10": 0.024, "precision_at_20": 0.018, "precision_at_50": 0.0096, "precision_at_100": 0.0066, "mrr_at_1": 0.14, "mrr_at_3": 0.16, "mrr_at_5": 0.168, "mrr_at_10": 0.168, "mrr_at_20": 0.17719448972080548, "mrr_at_50": 0.18125967140628796, "mrr_at_100": 0.18390534046295873, "naucs_at_1_max": -0.13878399592685325, "naucs_at_1_std": -0.2217743646315077, "naucs_at_1_diff1": 0.49556620985192396, "naucs_at_3_max": -0.015466580561833993, "naucs_at_3_std": -0.10574749757830151, "naucs_at_3_diff1": 0.3937035841136584, "naucs_at_5_max": 0.0328001437814521, "naucs_at_5_std": -0.09750179726815256, "naucs_at_5_diff1": 0.2780972921159835, "naucs_at_10_max": 0.0328001437814521, "naucs_at_10_std": -0.09750179726815256, "naucs_at_10_diff1": 0.2780972921159835, "naucs_at_20_max": 0.1663517556290347, "naucs_at_20_std": -0.08935600692804294, "naucs_at_20_diff1": 0.3206319214821813, "naucs_at_50_max": 0.43497935852651637, "naucs_at_50_std": -0.17823118450301728, "naucs_at_50_diff1": 0.2903831904308244, "naucs_at_100_max": 0.3837498755351984, "naucs_at_100_std": -0.1847389558232937, "naucs_at_100_diff1": 0.10843373493975847}, "NanoDBPedia": {"ndcg_at_1": 0.06, "ndcg_at_3": 0.07262, "ndcg_at_5": 0.1062, "ndcg_at_10": 0.151, "ndcg_at_20": 0.18144, "ndcg_at_50": 0.21768, "ndcg_at_100": 0.24304, "map_at_1": 0.06, "map_at_3": 0.07, "map_at_5": 0.089, "map_at_10": 0.10725, "map_at_20": 0.11563, "map_at_50": 0.1217, "map_at_100": 0.12376, "recall_at_1": 0.06, "recall_at_3": 0.08, "recall_at_5": 0.16, "recall_at_10": 0.3, "recall_at_20": 0.42, "recall_at_50": 0.6, "recall_at_100": 0.76, "precision_at_1": 0.06, "precision_at_3": 0.02667, "precision_at_5": 0.032, "precision_at_10": 0.03, "precision_at_20": 0.021, "precision_at_50": 0.012, "precision_at_100": 0.0076, "mrr_at_1": 0.06, "mrr_at_3": 0.07, "mrr_at_5": 0.08900000000000001, "mrr_at_10": 0.10724603174603174, "mrr_at_20": 0.11563045288045287, "mrr_at_50": 0.12170203226163792, "mrr_at_100": 0.12376012545439886, "naucs_at_1_max": -0.2501565155173956, "naucs_at_1_std": -0.18978624452195686, "naucs_at_1_diff1": 0.06189070745013864, "naucs_at_3_max": -0.267239066273142, "naucs_at_3_std": -0.1937885698953582, "naucs_at_3_diff1": 0.29641803058760396, "naucs_at_5_max": 0.06467967429712705, "naucs_at_5_std": -0.02243048087263797, "naucs_at_5_diff1": 0.33257796896604713, "naucs_at_10_max": 0.10058100256722037, "naucs_at_10_std": -0.1948655587082827, "naucs_at_10_diff1": 0.18011079583839976, "naucs_at_20_max": 0.11867174741426241, "naucs_at_20_std": -0.19278326463955192, "naucs_at_20_diff1": 0.26842419057987943, "naucs_at_50_max": -0.16494508756307572, "naucs_at_50_std": -0.4835559513208673, "naucs_at_50_diff1": 0.39142178688038, "naucs_at_100_max": 0.02275677078826726, "naucs_at_100_std": -0.39378731504715714, "naucs_at_100_diff1": 0.13191139569092322}, "NanoFEVER": {"ndcg_at_1": 0.58, "ndcg_at_3": 0.70309, "ndcg_at_5": 0.71944, "ndcg_at_10": 0.72522, "ndcg_at_20": 0.73002, "ndcg_at_50": 0.74965, "ndcg_at_100": 0.75306, "map_at_1": 0.58, "map_at_3": 0.67, "map_at_5": 0.679, "map_at_10": 0.681, "map_at_20": 0.68218, "map_at_50": 0.68523, "map_at_100": 0.68558, "recall_at_1": 0.58, "recall_at_3": 0.8, "recall_at_5": 0.84, "recall_at_10": 0.86, "recall_at_20": 0.88, "recall_at_50": 0.98, "recall_at_100": 1.0, "precision_at_1": 0.58, "precision_at_3": 0.26667, "precision_at_5": 0.168, "precision_at_10": 0.086, "precision_at_20": 0.044, "precision_at_50": 0.0196, "precision_at_100": 0.01, "mrr_at_1": 0.58, "mrr_at_3": 0.67, "mrr_at_5": 0.679, "mrr_at_10": 0.6809999999999999, "mrr_at_20": 0.6821764705882353, "mrr_at_50": 0.6852329598506068, "mrr_at_100": 0.6855838370435893, "naucs_at_1_max": 0.3191223844211627, "naucs_at_1_std": -0.02182430275415772, "naucs_at_1_diff1": 0.501842876628842, "naucs_at_3_max": 0.4558941058941055, "naucs_at_3_std": -0.18376623376623313, "naucs_at_3_diff1": 0.3835164835164838, "naucs_at_5_max": 0.392304860088366, "naucs_at_5_std": -0.07762641138929713, "naucs_at_5_diff1": 0.37941826215022095, "naucs_at_10_max": 0.33573506997367414, "naucs_at_10_std": -0.0040182901482591574, "naucs_at_10_diff1": 0.34612719966745237, "naucs_at_20_max": 0.2150811326757896, "naucs_at_20_std": -0.1276646516067436, "naucs_at_20_diff1": 0.24928412344893436, "naucs_at_50_max": 0.35807656395891063, "naucs_at_50_std": 0.8692810457516335, "naucs_at_50_diff1": 0.12278244631184881, "naucs_at_100_max": 1.0, "naucs_at_100_std": 1.0, "naucs_at_100_diff1": 1.0}, "NanoFiQA2018": {"ndcg_at_1": 0.14, "ndcg_at_3": 0.26833, "ndcg_at_5": 0.31913, "ndcg_at_10": 0.33835, "ndcg_at_20": 0.35273, "ndcg_at_50": 0.36033, "ndcg_at_100": 0.36033, "map_at_1": 0.14, "map_at_3": 0.23667, "map_at_5": 0.26567, "map_at_10": 0.2735, "map_at_20": 0.27704, "map_at_50": 0.27812, "map_at_100": 0.27812, "recall_at_1": 0.14, "recall_at_3": 0.36, "recall_at_5": 0.48, "recall_at_10": 0.54, "recall_at_20": 0.6, "recall_at_50": 0.64, "recall_at_100": 0.64, "precision_at_1": 0.14, "precision_at_3": 0.12, "precision_at_5": 0.096, "precision_at_10": 0.054, "precision_at_20": 0.03, "precision_at_50": 0.0128, "precision_at_100": 0.0064, "mrr_at_1": 0.14, "mrr_at_3": 0.2, "mrr_at_5": 0.2, "mrr_at_10": 0.20285714285714285, "mrr_at_20": 0.20285714285714285, "mrr_at_50": 0.20285714285714285, "mrr_at_100": 0.20324929971988795, "naucs_at_1_max": -0.2569901141329715, "naucs_at_1_std": -0.03279731851160429, "naucs_at_1_diff1": 0.1479485765200049, "naucs_at_3_max": -0.31378260641368794, "naucs_at_3_std": -0.13995171364089637, "naucs_at_3_diff1": 0.07452894557287582, "naucs_at_5_max": -0.08809675029109798, "naucs_at_5_std": -0.18452948025828314, "naucs_at_5_diff1": 0.10667407642637843, "naucs_at_10_max": -0.24450557083715413, "naucs_at_10_std": -0.20602372815426115, "naucs_at_10_diff1": -0.01278097302103334, "naucs_at_20_max": -0.1599584446423274, "naucs_at_20_std": -0.2903532205402198, "naucs_at_20_diff1": 0.05143959631938217, "naucs_at_50_max": -0.07756418444245793, "naucs_at_50_std": -0.15480904330054943, "naucs_at_50_diff1": 0.009036914037552118, "naucs_at_100_max": -0.07756418444245793, "naucs_at_100_std": -0.15480904330054943, "naucs_at_100_diff1": 0.009036914037552118}, "NanoHotpotQA": {"ndcg_at_1": 0.18, "ndcg_at_3": 0.31357, "ndcg_at_5": 0.34627, "ndcg_at_10": 0.38633, "ndcg_at_20": 0.40652, "ndcg_at_50": 0.43409, "ndcg_at_100": 0.44091, "map_at_1": 0.18, "map_at_3": 0.28333, "map_at_5": 0.30133, "map_at_10": 0.31867, "map_at_20": 0.32419, "map_at_50": 0.32854, "map_at_100": 0.32924, "recall_at_1": 0.18, "recall_at_3": 0.4, "recall_at_5": 0.48, "recall_at_10": 0.6, "recall_at_20": 0.68, "recall_at_50": 0.82, "recall_at_100": 0.86, "precision_at_1": 0.18, "precision_at_3": 0.13333, "precision_at_5": 0.096, "precision_at_10": 0.06, "precision_at_20": 0.034, "precision_at_50": 0.0164, "precision_at_100": 0.0086, "mrr_at_1": 0.18, "mrr_at_3": 0.2833333333333333, "mrr_at_5": 0.30133333333333334, "mrr_at_10": 0.31866666666666665, "mrr_at_20": 0.3241904761904762, "mrr_at_50": 0.32853635095063666, "mrr_at_100": 0.32923842665271236, "naucs_at_1_max": -0.24825456969441811, "naucs_at_1_std": -0.4017121004806511, "naucs_at_1_diff1": -0.12991614917727953, "naucs_at_3_max": -0.15975798146240988, "naucs_at_3_std": -0.2994850669412977, "naucs_at_3_diff1": -0.14961380020597323, "naucs_at_5_max": 0.024055255636709922, "naucs_at_5_std": -0.4065047104901027, "naucs_at_5_diff1": 0.06782576479305578, "naucs_at_10_max": 0.07105966162065888, "naucs_at_10_std": -0.35194419709112507, "naucs_at_10_diff1": -0.07925200356188798, "naucs_at_20_max": 0.05400456084582945, "naucs_at_20_std": -0.24296869601271498, "naucs_at_20_diff1": -0.32506392094533865, "naucs_at_50_max": -0.13646877409406358, "naucs_at_50_std": -0.3616037008481119, "naucs_at_50_diff1": -0.49399713624848635, "naucs_at_100_max": 0.08285991409172686, "naucs_at_100_std": -0.2585561867812138, "naucs_at_100_diff1": -0.5795344325897214}, "NanoMSMARCO": {"ndcg_at_1": 0.28, "ndcg_at_3": 0.40833, "ndcg_at_5": 0.43329, "ndcg_at_10": 0.50108, "ndcg_at_20": 0.54721, "ndcg_at_50": 0.55137, "ndcg_at_100": 0.55793, "map_at_1": 0.28, "map_at_3": 0.37667, "map_at_5": 0.39067, "map_at_10": 0.42043, "map_at_20": 0.43342, "map_at_50": 0.43416, "map_at_100": 0.43475, "recall_at_1": 0.28, "recall_at_3": 0.5, "recall_at_5": 0.56, "recall_at_10": 0.76, "recall_at_20": 0.94, "recall_at_50": 0.96, "recall_at_100": 1.0, "precision_at_1": 0.28, "precision_at_3": 0.16667, "precision_at_5": 0.112, "precision_at_10": 0.076, "precision_at_20": 0.047, "precision_at_50": 0.0192, "precision_at_100": 0.01, "mrr_at_1": 0.28, "mrr_at_3": 0.37666666666666665, "mrr_at_5": 0.3906666666666667, "mrr_at_10": 0.42042857142857143, "mrr_at_20": 0.4334168135785783, "mrr_at_50": 0.434157554319319, "mrr_at_100": 0.4347504052347506, "naucs_at_1_max": 0.11307894295770693, "naucs_at_1_std": -0.07444549974858959, "naucs_at_1_diff1": 0.5648639588803842, "naucs_at_3_max": 0.16922051965356413, "naucs_at_3_std": -0.19096602265156523, "naucs_at_3_diff1": 0.4283277814790141, "naucs_at_5_max": 0.08386327503974521, "naucs_at_5_std": -0.2739041562570975, "naucs_at_5_diff1": 0.3790597320009083, "naucs_at_10_max": 0.3378039283551084, "naucs_at_10_std": 0.03028467595396747, "naucs_at_10_diff1": 0.3086008479709261, "naucs_at_20_max": -0.20028011204481524, "naucs_at_20_std": -0.694211017740424, "naucs_at_20_diff1": 0.6374105197634622, "naucs_at_50_max": -0.6615312791783471, "naucs_at_50_std": -0.9556489262371703, "naucs_at_50_diff1": 0.7770774976657314, "naucs_at_100_max": 1.0, "naucs_at_100_std": 1.0, "naucs_at_100_diff1": 1.0}, "NanoNFCorpus": {"ndcg_at_1": 0.0, "ndcg_at_3": 0.02524, "ndcg_at_5": 0.03385, "ndcg_at_10": 0.04016, "ndcg_at_20": 0.05537, "ndcg_at_50": 0.07395, "ndcg_at_100": 0.08043, "map_at_1": 0.0, "map_at_3": 0.02, "map_at_5": 0.025, "map_at_10": 0.0275, "map_at_20": 0.03171, "map_at_50": 0.03417, "map_at_100": 0.03474, "recall_at_1": 0.0, "recall_at_3": 0.04, "recall_at_5": 0.06, "recall_at_10": 0.08, "recall_at_20": 0.14, "recall_at_50": 0.24, "recall_at_100": 0.28, "precision_at_1": 0.0, "precision_at_3": 0.01333, "precision_at_5": 0.012, "precision_at_10": 0.008, "precision_at_20": 0.007, "precision_at_50": 0.0048, "precision_at_100": 0.0028, "mrr_at_1": 0.0, "mrr_at_3": 0.02, "mrr_at_5": 0.025, "mrr_at_10": 0.0275, "mrr_at_20": 0.031705128205128205, "mrr_at_50": 0.034174471537338075, "mrr_at_100": 0.03474043562789233, "naucs_at_1_max": NaN, "naucs_at_1_std": NaN, "naucs_at_1_diff1": NaN, "naucs_at_3_max": 0.2126375100617117, "naucs_at_3_std": 0.12342366514623024, "naucs_at_3_diff1": 0.29246042393345845, "naucs_at_5_max": 0.07315982470262038, "naucs_at_5_std": -0.05607727394687428, "naucs_at_5_diff1": 0.3029246042393343, "naucs_at_10_max": 0.13583310973973703, "naucs_at_10_std": 0.2079420445398444, "naucs_at_10_diff1": 0.3081566943922727, "naucs_at_20_max": 0.4177521320378465, "naucs_at_20_std": 0.21388264245407115, "naucs_at_20_diff1": 0.2647969790826936, "naucs_at_50_max": 0.2673136832015337, "naucs_at_50_std": 0.12694704049844246, "naucs_at_50_diff1": 0.17190869877785792, "naucs_at_100_max": 0.16411531370467605, "naucs_at_100_std": 0.10888876473545996, "naucs_at_100_diff1": 0.07905469579306093}, "NanoNQ": {"ndcg_at_1": 0.46, "ndcg_at_3": 0.53309, "ndcg_at_5": 0.55806, "ndcg_at_10": 0.58862, "ndcg_at_20": 0.60457, "ndcg_at_50": 0.60899, "ndcg_at_100": 0.62576, "map_at_1": 0.46, "map_at_3": 0.51667, "map_at_5": 0.53067, "map_at_10": 0.54225, "map_at_20": 0.54703, "map_at_50": 0.54794, "map_at_100": 0.54957, "recall_at_1": 0.46, "recall_at_3": 0.58, "recall_at_5": 0.64, "recall_at_10": 0.74, "recall_at_20": 0.8, "recall_at_50": 0.82, "recall_at_100": 0.92, "precision_at_1": 0.46, "precision_at_3": 0.19333, "precision_at_5": 0.128, "precision_at_10": 0.074, "precision_at_20": 0.04, "precision_at_50": 0.0164, "precision_at_100": 0.0092, "mrr_at_1": 0.46, "mrr_at_3": 0.5166666666666666, "mrr_at_5": 0.5306666666666667, "mrr_at_10": 0.5422460317460317, "mrr_at_20": 0.5470312465312466, "mrr_at_50": 0.5479403374403374, "mrr_at_100": 0.5495727352728472, "naucs_at_1_max": 0.22327928306746203, "naucs_at_1_std": -0.15629024204357309, "naucs_at_1_diff1": 0.5027988751938187, "naucs_at_3_max": 0.313434135299068, "naucs_at_3_std": -0.2863278869315382, "naucs_at_3_diff1": 0.47401108628145194, "naucs_at_5_max": 0.39602758973048924, "naucs_at_5_std": -0.2088069996168091, "naucs_at_5_diff1": 0.4512389832673393, "naucs_at_10_max": 0.411986537447791, "naucs_at_10_std": -0.16106402822269944, "naucs_at_10_diff1": 0.3302785775110502, "naucs_at_20_max": 0.4282217782217782, "naucs_at_20_std": -0.10789210789210843, "naucs_at_20_diff1": 0.16853146853146764, "naucs_at_50_max": 0.36958916180196066, "naucs_at_50_std": -0.18432646767265126, "naucs_at_50_diff1": 0.14357308073576425, "naucs_at_100_max": 0.14063958916899952, "naucs_at_100_std": -0.7935340802987858, "naucs_at_100_diff1": -0.2567693744164331}, "NanoQuoraRetrieval": {"ndcg_at_1": 0.74, "ndcg_at_3": 0.84095, "ndcg_at_5": 0.84869, "ndcg_at_10": 0.86914, "ndcg_at_20": 0.87472, "ndcg_at_50": 0.87472, "ndcg_at_100": 0.87472, "map_at_1": 0.74, "map_at_3": 0.82, "map_at_5": 0.824, "map_at_10": 0.83305, "map_at_20": 0.83487, "map_at_50": 0.83487, "map_at_100": 0.83487, "recall_at_1": 0.74, "recall_at_3": 0.9, "recall_at_5": 0.92, "recall_at_10": 0.98, "recall_at_20": 1.0, "recall_at_50": 1.0, "recall_at_100": 1.0, "precision_at_1": 0.74, "precision_at_3": 0.3, "precision_at_5": 0.184, "precision_at_10": 0.098, "precision_at_20": 0.05, "precision_at_50": 0.02, "precision_at_100": 0.01, "mrr_at_1": 0.74, "mrr_at_3": 0.82, "mrr_at_5": 0.8240000000000001, "mrr_at_10": 0.833047619047619, "mrr_at_20": 0.8348658008658009, "mrr_at_50": 0.8348658008658009, "mrr_at_100": 0.8348658008658009, "naucs_at_1_max": 0.14565508292445548, "naucs_at_1_std": -0.4959652893232229, "naucs_at_1_diff1": 0.7229228336239404, "naucs_at_3_max": 0.5043884220354813, "naucs_at_3_std": -0.9828197945845016, "naucs_at_3_diff1": 0.7134453781512597, "naucs_at_5_max": 0.599789915966389, "naucs_at_5_std": -0.9405929038281939, "naucs_at_5_diff1": 0.6744864612511682, "naucs_at_10_max": 0.7222222222222204, "naucs_at_10_std": -1.739962651727313, "naucs_at_10_diff1": -0.1713352007469462, "naucs_at_20_max": 1.0, "naucs_at_20_std": 1.0, "naucs_at_20_diff1": 1.0, "naucs_at_50_max": 1.0, "naucs_at_50_std": 1.0, "naucs_at_50_diff1": 1.0, "naucs_at_100_max": 1.0, "naucs_at_100_std": 1.0, "naucs_at_100_diff1": 1.0}, "NanoSCIDOCS": {"ndcg_at_1": 0.0, "ndcg_at_3": 0.03262, "ndcg_at_5": 0.04809, "ndcg_at_10": 0.09772, "ndcg_at_20": 0.12763, "ndcg_at_50": 0.18003, "ndcg_at_100": 0.18322, "map_at_1": 0.0, "map_at_3": 0.02333, "map_at_5": 0.03133, "map_at_10": 0.05057, "map_at_20": 0.0586, "map_at_50": 0.06738, "map_at_100": 0.06765, "recall_at_1": 0.0, "recall_at_3": 0.06, "recall_at_5": 0.1, "recall_at_10": 0.26, "recall_at_20": 0.38, "recall_at_50": 0.64, "recall_at_100": 0.66, "precision_at_1": 0.0, "precision_at_3": 0.02, "precision_at_5": 0.02, "precision_at_10": 0.026, "precision_at_20": 0.019, "precision_at_50": 0.0128, "precision_at_100": 0.0066, "mrr_at_1": 0.0, "mrr_at_3": 0.02, "mrr_at_5": 0.02, "mrr_at_10": 0.02, "mrr_at_20": 0.021818181818181816, "mrr_at_50": 0.02225296442687747, "mrr_at_100": 0.02225296442687747, "naucs_at_1_max": NaN, "naucs_at_1_std": NaN, "naucs_at_1_diff1": NaN, "naucs_at_3_max": -0.1043734907432252, "naucs_at_3_std": 0.07754225918969676, "naucs_at_3_diff1": 0.2747518111081297, "naucs_at_5_max": 0.15707002951435467, "naucs_at_5_std": -0.03370002683123162, "naucs_at_5_diff1": 0.1466058492084787, "naucs_at_10_max": -0.0627619901130353, "naucs_at_10_std": -0.01873319649619841, "naucs_at_10_diff1": -0.16189182157208526, "naucs_at_20_max": 0.09170067319938621, "naucs_at_20_std": 0.026953967717619855, "naucs_at_20_diff1": 0.04595430561692605, "naucs_at_50_max": 0.46152126708391844, "naucs_at_50_std": 0.2846468259036912, "naucs_at_50_diff1": 0.34496742879039444, "naucs_at_100_max": 0.4403066812705362, "naucs_at_100_std": 0.22476683593879634, "naucs_at_100_diff1": 0.33310099903747165}, "NanoArguAna": {"ndcg_at_1": 0.16, "ndcg_at_3": 0.3588, "ndcg_at_5": 0.39151, "ndcg_at_10": 0.4585, "ndcg_at_20": 0.46931, "ndcg_at_50": 0.50164, "ndcg_at_100": 0.50514, "map_at_1": 0.16, "map_at_3": 0.31, "map_at_5": 0.328, "map_at_10": 0.35702, "map_at_20": 0.36036, "map_at_50": 0.36579, "map_at_100": 0.36617, "recall_at_1": 0.16, "recall_at_3": 0.5, "recall_at_5": 0.58, "recall_at_10": 0.78, "recall_at_20": 0.82, "recall_at_50": 0.98, "recall_at_100": 1.0, "precision_at_1": 0.16, "precision_at_3": 0.16667, "precision_at_5": 0.116, "precision_at_10": 0.078, "precision_at_20": 0.041, "precision_at_50": 0.0196, "precision_at_100": 0.01, "mrr_at_1": 0.16, "mrr_at_3": 0.31, "mrr_at_5": 0.32799999999999996, "mrr_at_10": 0.35702380952380947, "mrr_at_20": 0.3603571428571428, "mrr_at_50": 0.3657869129204316, "mrr_at_100": 0.366171528305047, "naucs_at_1_max": -0.35727454294054395, "naucs_at_1_std": -0.27550314948532806, "naucs_at_1_diff1": -0.25111384237210027, "naucs_at_3_max": 0.32618254497002014, "naucs_at_3_std": 0.0018387741505664723, "naucs_at_3_diff1": 0.39117921385742854, "naucs_at_5_max": 0.35812752125837993, "naucs_at_5_std": -0.06997126853759777, "naucs_at_5_diff1": 0.3276547581043039, "naucs_at_10_max": 0.5760652849260443, "naucs_at_10_std": -0.12704594983076084, "naucs_at_10_diff1": 0.3735336393564236, "naucs_at_20_max": 0.524011455006058, "naucs_at_20_std": 0.0745676836656027, "naucs_at_20_diff1": 0.31622425377244257, "naucs_at_50_max": 0.5541549953314605, "naucs_at_50_std": -1.7399626517273847, "naucs_at_50_diff1": 0.8692810457516335, "naucs_at_100_max": 1.0, "naucs_at_100_std": 1.0, "naucs_at_100_diff1": 1.0}, "NanoSciFact": {"ndcg_at_1": 0.56, "ndcg_at_3": 0.63786, "ndcg_at_5": 0.66282, "ndcg_at_10": 0.68158, "ndcg_at_20": 0.69256, "ndcg_at_50": 0.70121, "ndcg_at_100": 0.70121, "map_at_1": 0.56, "map_at_3": 0.61667, "map_at_5": 0.63067, "map_at_10": 0.63802, "map_at_20": 0.64151, "map_at_50": 0.6432, "map_at_100": 0.6432, "recall_at_1": 0.56, "recall_at_3": 0.7, "recall_at_5": 0.76, "recall_at_10": 0.82, "recall_at_20": 0.86, "recall_at_50": 0.9, "recall_at_100": 0.9, "precision_at_1": 0.56, "precision_at_3": 0.23333, "precision_at_5": 0.152, "precision_at_10": 0.082, "precision_at_20": 0.043, "precision_at_50": 0.018, "precision_at_100": 0.009, "mrr_at_1": 0.56, "mrr_at_3": 0.6166666666666667, "mrr_at_5": 0.6306666666666667, "mrr_at_10": 0.6380238095238095, "mrr_at_20": 0.641508658008658, "mrr_at_50": 0.6432017797017796, "mrr_at_100": 0.6432017797017796, "naucs_at_1_max": 0.30243583920054484, "naucs_at_1_std": -0.10887463093345483, "naucs_at_1_diff1": 0.6184987508516916, "naucs_at_3_max": 0.4397405872815703, "naucs_at_3_std": -0.19174923437218513, "naucs_at_3_diff1": 0.5923257070798049, "naucs_at_5_max": 0.27576360647226733, "naucs_at_5_std": -0.4567794410314099, "naucs_at_5_diff1": 0.4574716621960714, "naucs_at_10_max": 0.16615265998458092, "naucs_at_10_std": -0.10447185813415441, "naucs_at_10_diff1": 0.43710761097037154, "naucs_at_20_max": 0.37626437577941124, "naucs_at_20_std": 0.21040598586670434, "naucs_at_20_diff1": 0.4681308022724129, "naucs_at_50_max": 0.363865546218487, "naucs_at_50_std": 0.20700280112044842, "naucs_at_50_diff1": 0.6406162464986, "naucs_at_100_max": 0.363865546218487, "naucs_at_100_std": 0.20700280112044842, "naucs_at_100_diff1": 0.6406162464986}, "NanoTouche2020": {"ndcg_at_1": 0.08163, "ndcg_at_3": 0.12512, "ndcg_at_5": 0.1427, "ndcg_at_10": 0.18147, "ndcg_at_20": 0.23829, "ndcg_at_50": 0.2667, "ndcg_at_100": 0.29357, "map_at_1": 0.08163, "map_at_3": 0.11224, "map_at_5": 0.12245, "map_at_10": 0.13792, "map_at_20": 0.15355, "map_at_50": 0.15812, "map_at_100": 0.16058, "recall_at_1": 0.08163, "recall_at_3": 0.16327, "recall_at_5": 0.20408, "recall_at_10": 0.32653, "recall_at_20": 0.55102, "recall_at_50": 0.69388, "recall_at_100": 0.85714, "precision_at_1": 0.08163, "precision_at_3": 0.05442, "precision_at_5": 0.04082, "precision_at_10": 0.03265, "precision_at_20": 0.02755, "precision_at_50": 0.01388, "precision_at_100": 0.00857, "mrr_at_1": 0.08163265306122448, "mrr_at_3": 0.11224489795918367, "mrr_at_5": 0.12244897959183673, "mrr_at_10": 0.13791707159054098, "mrr_at_20": 0.15355051111353632, "mrr_at_50": 0.1581188884564793, "mrr_at_100": 0.1605830062068737, "naucs_at_1_max": -0.13018042981735609, "naucs_at_1_std": -0.06150173984136389, "naucs_at_1_diff1": -0.0069041450584293946, "naucs_at_3_max": -0.07960286806285084, "naucs_at_3_std": 0.0030461489713089485, "naucs_at_3_diff1": -0.19347323004590275, "naucs_at_5_max": -0.016304636145487442, "naucs_at_5_std": -0.10981700267695477, "naucs_at_5_diff1": -0.07130016492283335, "naucs_at_10_max": 0.03537637043694802, "naucs_at_10_std": -0.10291209769340832, "naucs_at_10_diff1": -2.3286717774064405e-05, "naucs_at_20_max": 0.15672270723355194, "naucs_at_20_std": -0.24968657238358571, "naucs_at_20_diff1": 0.10626755584746775, "naucs_at_50_max": 0.15523605801496243, "naucs_at_50_std": -0.4340577675600269, "naucs_at_50_diff1": 0.28720014217037526, "naucs_at_100_max": 0.04291173870078549, "naucs_at_100_std": -0.27217272585426816, "naucs_at_100_diff1": -0.140401507139844}}}
results/metrics_modernbert-trained-v0.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"model": "modernbert-trained-v0", "date": "2025-01-19 23:24:52", "is_contextual": false, "metrics": {"chunked-mldr": {"ndcg_at_1": 0.69588, "ndcg_at_3": 0.78684, "ndcg_at_5": 0.80302, "ndcg_at_10": 0.81278, "ndcg_at_20": 0.82016, "ndcg_at_50": 0.82494, "ndcg_at_100": 0.82762, "map_at_1": 0.69588, "map_at_3": 0.76488, "map_at_5": 0.77387, "map_at_10": 0.77792, "map_at_20": 0.78, "map_at_50": 0.78076, "map_at_100": 0.781, "recall_at_1": 0.69588, "recall_at_3": 0.85023, "recall_at_5": 0.88947, "recall_at_10": 0.91956, "recall_at_20": 0.94833, "recall_at_50": 0.97253, "recall_at_100": 0.98888, "precision_at_1": 0.69588, "precision_at_3": 0.28341, "precision_at_5": 0.17789, "precision_at_10": 0.09196, "precision_at_20": 0.04742, "precision_at_50": 0.01945, "precision_at_100": 0.00989, "mrr_at_1": 0.6958796599084369, "mrr_at_3": 0.7648790058862002, "mrr_at_5": 0.7738718116415958, "mrr_at_10": 0.7779181849325735, "mrr_at_20": 0.7799966991746358, "mrr_at_50": 0.7807584580921276, "mrr_at_100": 0.7809999327383695, "naucs_at_1_max": 0.38860910953230854, "naucs_at_1_std": -0.26471484310755783, "naucs_at_1_diff1": 0.6846135815770551, "naucs_at_3_max": 0.15791786683933048, "naucs_at_3_std": -0.47994298869138013, "naucs_at_3_diff1": 0.5042053414619686, "naucs_at_5_max": 0.021833084186355517, "naucs_at_5_std": -0.5329750128719849, "naucs_at_5_diff1": 0.4330462669808838, "naucs_at_10_max": -0.1803614207648025, "naucs_at_10_std": -0.6292017384034406, "naucs_at_10_diff1": 0.3665183789583318, "naucs_at_20_max": -0.37106795446866236, "naucs_at_20_std": -0.7058135943446324, "naucs_at_20_diff1": 0.3431560751517611, "naucs_at_50_max": -0.5884522189218432, "naucs_at_50_std": -0.9046063484169791, "naucs_at_50_diff1": 0.29363214153488454, "naucs_at_100_max": -0.7596937774603543, "naucs_at_100_std": -0.8759409330688408, "naucs_at_100_diff1": 0.22060151613733198}, "NanoClimateFEVER": {"ndcg_at_1": 0.08, "ndcg_at_3": 0.15047, "ndcg_at_5": 0.1677, "ndcg_at_10": 0.17974, "ndcg_at_20": 0.20997, "ndcg_at_50": 0.22559, "ndcg_at_100": 0.24782, "map_at_1": 0.08, "map_at_3": 0.13333, "map_at_5": 0.14333, "map_at_10": 0.14778, "map_at_20": 0.15598, "map_at_50": 0.1584, "map_at_100": 0.16023, "recall_at_1": 0.08, "recall_at_3": 0.2, "recall_at_5": 0.24, "recall_at_10": 0.28, "recall_at_20": 0.4, "recall_at_50": 0.48, "recall_at_100": 0.62, "precision_at_1": 0.08, "precision_at_3": 0.06667, "precision_at_5": 0.048, "precision_at_10": 0.028, "precision_at_20": 0.02, "precision_at_50": 0.0096, "precision_at_100": 0.0062, "mrr_at_1": 0.08, "mrr_at_3": 0.13333333333333333, "mrr_at_5": 0.14333333333333334, "mrr_at_10": 0.14777777777777779, "mrr_at_20": 0.15597771672771674, "mrr_at_50": 0.15840451353339552, "mrr_at_100": 0.16022578529105386, "naucs_at_1_max": -0.44868526965387723, "naucs_at_1_std": -0.534813522940703, "naucs_at_1_diff1": -0.09961094714247391, "naucs_at_3_max": 0.004197610590894376, "naucs_at_3_std": -0.3756215692605747, "naucs_at_3_diff1": -0.030513400064578564, "naucs_at_5_max": 0.08878504672897188, "naucs_at_5_std": -0.22582674335010797, "naucs_at_5_diff1": -0.018571770908219622, "naucs_at_10_max": 0.15023185652829746, "naucs_at_10_std": -0.07276942845969075, "naucs_at_10_diff1": -0.09858092630873244, "naucs_at_20_max": 0.3335736354273945, "naucs_at_20_std": -0.046215242018537626, "naucs_at_20_diff1": -0.10208547888774462, "naucs_at_50_max": 0.26389329946014617, "naucs_at_50_std": -0.17989838043823458, "naucs_at_50_diff1": -0.11061712713030632, "naucs_at_100_max": 0.21508783804571838, "naucs_at_100_std": 0.0068609051472172155, "naucs_at_100_diff1": -0.11943512906500983}, "NanoDBPedia": {"ndcg_at_1": 0.04, "ndcg_at_3": 0.09524, "ndcg_at_5": 0.11159, "ndcg_at_10": 0.13747, "ndcg_at_20": 0.15799, "ndcg_at_50": 0.20403, "ndcg_at_100": 0.22339, "map_at_1": 0.04, "map_at_3": 0.08, "map_at_5": 0.089, "map_at_10": 0.09969, "map_at_20": 0.10548, "map_at_50": 0.11221, "map_at_100": 0.11388, "recall_at_1": 0.04, "recall_at_3": 0.14, "recall_at_5": 0.18, "recall_at_10": 0.26, "recall_at_20": 0.34, "recall_at_50": 0.58, "recall_at_100": 0.7, "precision_at_1": 0.04, "precision_at_3": 0.04667, "precision_at_5": 0.036, "precision_at_10": 0.026, "precision_at_20": 0.017, "precision_at_50": 0.0116, "precision_at_100": 0.007, "mrr_at_1": 0.04, "mrr_at_3": 0.08, "mrr_at_5": 0.08900000000000001, "mrr_at_10": 0.09969047619047618, "mrr_at_20": 0.10547691197691197, "mrr_at_50": 0.11220994055292793, "mrr_at_100": 0.11388350975106225, "naucs_at_1_max": -0.5747249798765763, "naucs_at_1_std": -0.4995975315266971, "naucs_at_1_diff1": -0.158304266165817, "naucs_at_3_max": -0.1699690271118842, "naucs_at_3_std": -0.23424837710551993, "naucs_at_3_diff1": 0.24952267809410678, "naucs_at_5_max": -0.2151001648949234, "naucs_at_5_std": -0.28961863663474036, "naucs_at_5_diff1": 0.17352559379714402, "naucs_at_10_max": -0.2296840218553961, "naucs_at_10_std": -0.3711370009540055, "naucs_at_10_diff1": 0.21335029342892656, "naucs_at_20_max": 0.02869710379183339, "naucs_at_20_std": -0.1732955298232598, "naucs_at_20_diff1": 0.35464638702668333, "naucs_at_50_max": -0.03024058972052094, "naucs_at_50_std": -0.2331021272890852, "naucs_at_50_diff1": 0.34300722639811937, "naucs_at_100_max": -0.005116195280129022, "naucs_at_100_std": -0.034516303368761866, "naucs_at_100_diff1": 0.36876238515582804}, "NanoFEVER": {"ndcg_at_1": 0.56, "ndcg_at_3": 0.68309, "ndcg_at_5": 0.71492, "ndcg_at_10": 0.73413, "ndcg_at_20": 0.74425, "ndcg_at_50": 0.75294, "ndcg_at_100": 0.75294, "map_at_1": 0.56, "map_at_3": 0.65, "map_at_5": 0.667, "map_at_10": 0.67483, "map_at_20": 0.6776, "map_at_50": 0.67932, "map_at_100": 0.67932, "recall_at_1": 0.56, "recall_at_3": 0.78, "recall_at_5": 0.86, "recall_at_10": 0.92, "recall_at_20": 0.96, "recall_at_50": 1.0, "recall_at_100": 1.0, "precision_at_1": 0.56, "precision_at_3": 0.26, "precision_at_5": 0.172, "precision_at_10": 0.092, "precision_at_20": 0.048, "precision_at_50": 0.02, "precision_at_100": 0.01, "mrr_at_1": 0.56, "mrr_at_3": 0.65, "mrr_at_5": 0.667, "mrr_at_10": 0.6748333333333334, "mrr_at_20": 0.6775952380952381, "mrr_at_50": 0.6793168498168498, "mrr_at_100": 0.6793168498168498, "naucs_at_1_max": 0.2817965023847371, "naucs_at_1_std": 0.29264138087667485, "naucs_at_1_diff1": 0.696797637974108, "naucs_at_3_max": 0.63935642416655, "naucs_at_3_std": 0.21291788380395923, "naucs_at_3_diff1": 0.6628645615987375, "naucs_at_5_max": 0.8094776222807272, "naucs_at_5_std": 0.19973673271442538, "naucs_at_5_diff1": 0.6765969239296113, "naucs_at_10_max": 0.9346405228758174, "naucs_at_10_std": 0.45518207282913165, "naucs_at_10_diff1": 0.6418067226890769, "naucs_at_20_max": 0.934640522875822, "naucs_at_20_std": 0.34897292250234174, "naucs_at_20_diff1": 0.7222222222222252, "naucs_at_50_max": 1.0, "naucs_at_50_std": 1.0, "naucs_at_50_diff1": 1.0, "naucs_at_100_max": 1.0, "naucs_at_100_std": 1.0, "naucs_at_100_diff1": 1.0}, "NanoFiQA2018": {"ndcg_at_1": 0.26, "ndcg_at_3": 0.35309, "ndcg_at_5": 0.39353, "ndcg_at_10": 0.4186, "ndcg_at_20": 0.42793, "ndcg_at_50": 0.44338, "ndcg_at_100": 0.45927, "map_at_1": 0.26, "map_at_3": 0.33, "map_at_5": 0.352, "map_at_10": 0.36186, "map_at_20": 0.36402, "map_at_50": 0.36631, "map_at_100": 0.36762, "recall_at_1": 0.26, "recall_at_3": 0.42, "recall_at_5": 0.52, "recall_at_10": 0.6, "recall_at_20": 0.64, "recall_at_50": 0.72, "recall_at_100": 0.82, "precision_at_1": 0.26, "precision_at_3": 0.14, "precision_at_5": 0.104, "precision_at_10": 0.06, "precision_at_20": 0.032, "precision_at_50": 0.0144, "precision_at_100": 0.0082, "mrr_at_1": 0.26, "mrr_at_3": 0.33, "mrr_at_5": 0.35200000000000004, "mrr_at_10": 0.3618571428571428, "mrr_at_20": 0.3640208855472013, "mrr_at_50": 0.3663135326849713, "mrr_at_100": 0.36761869531109714, "naucs_at_1_max": -0.08724812812581306, "naucs_at_1_std": -0.14127952357549653, "naucs_at_1_diff1": -0.07279350119973407, "naucs_at_3_max": -0.16776835938512602, "naucs_at_3_std": -0.24968245327526767, "naucs_at_3_diff1": 0.08098089535215273, "naucs_at_5_max": -0.08966755536209803, "naucs_at_5_std": -0.31759072854888726, "naucs_at_5_diff1": 0.1500625714130255, "naucs_at_10_max": -0.003739982190561103, "naucs_at_10_std": -0.44295043039477605, "naucs_at_10_diff1": 0.27548233897298857, "naucs_at_20_max": -0.11288159407331708, "naucs_at_20_std": -0.493326095286754, "naucs_at_20_diff1": 0.25006386511687295, "naucs_at_50_max": -0.04201007326007331, "naucs_at_50_std": -0.33634768009767935, "naucs_at_50_diff1": 0.2730845543345547, "naucs_at_100_max": 0.09676175790285234, "naucs_at_100_std": -0.261978191430775, "naucs_at_100_diff1": 0.36017182509086904}, "NanoHotpotQA": {"ndcg_at_1": 0.16, "ndcg_at_3": 0.3188, "ndcg_at_5": 0.35924, "ndcg_at_10": 0.38582, "ndcg_at_20": 0.40544, "ndcg_at_50": 0.43355, "ndcg_at_100": 0.44683, "map_at_1": 0.16, "map_at_3": 0.28333, "map_at_5": 0.30533, "map_at_10": 0.31672, "map_at_20": 0.32177, "map_at_50": 0.3264, "map_at_100": 0.32766, "recall_at_1": 0.16, "recall_at_3": 0.42, "recall_at_5": 0.52, "recall_at_10": 0.6, "recall_at_20": 0.68, "recall_at_50": 0.82, "recall_at_100": 0.9, "precision_at_1": 0.16, "precision_at_3": 0.14, "precision_at_5": 0.104, "precision_at_10": 0.06, "precision_at_20": 0.034, "precision_at_50": 0.0164, "precision_at_100": 0.009, "mrr_at_1": 0.16, "mrr_at_3": 0.2833333333333333, "mrr_at_5": 0.30533333333333335, "mrr_at_10": 0.3167222222222222, "mrr_at_20": 0.3217717086834734, "mrr_at_50": 0.32639956416132887, "mrr_at_100": 0.3276620100487747, "naucs_at_1_max": -0.07647104009832548, "naucs_at_1_std": -0.4103164848671072, "naucs_at_1_diff1": 0.24020586879705016, "naucs_at_3_max": 0.20854395105892098, "naucs_at_3_std": -0.3210462192498121, "naucs_at_3_diff1": 0.30468932265339455, "naucs_at_5_max": 0.21500081614886546, "naucs_at_5_std": -0.3327983024103594, "naucs_at_5_diff1": 0.3587518363349475, "naucs_at_10_max": 0.403146334223805, "naucs_at_10_std": -0.30955773226476685, "naucs_at_10_diff1": 0.5682991985752444, "naucs_at_20_max": 0.472012991500242, "naucs_at_20_std": -0.4629258517034064, "naucs_at_20_diff1": 0.6807062400663397, "naucs_at_50_max": 0.28637515144839676, "naucs_at_50_std": -0.8596211036457774, "naucs_at_50_diff1": 0.52379116642802, "naucs_at_100_max": -0.04136321195144804, "naucs_at_100_std": -0.994864612511675, "naucs_at_100_diff1": 0.1664799253034547}, "NanoMSMARCO": {"ndcg_at_1": 0.34, "ndcg_at_3": 0.46095, "ndcg_at_5": 0.49453, "ndcg_at_10": 0.5518, "ndcg_at_20": 0.57191, "ndcg_at_50": 0.59249, "ndcg_at_100": 0.5956, "map_at_1": 0.34, "map_at_3": 0.43333, "map_at_5": 0.45233, "map_at_10": 0.4754, "map_at_20": 0.48084, "map_at_50": 0.48445, "map_at_100": 0.48469, "recall_at_1": 0.34, "recall_at_3": 0.54, "recall_at_5": 0.62, "recall_at_10": 0.8, "recall_at_20": 0.88, "recall_at_50": 0.98, "recall_at_100": 1.0, "precision_at_1": 0.34, "precision_at_3": 0.18, "precision_at_5": 0.124, "precision_at_10": 0.08, "precision_at_20": 0.044, "precision_at_50": 0.0196, "precision_at_100": 0.01, "mrr_at_1": 0.34, "mrr_at_3": 0.43333333333333335, "mrr_at_5": 0.45233333333333337, "mrr_at_10": 0.4754047619047619, "mrr_at_20": 0.4808427960927961, "mrr_at_50": 0.4844530774780775, "mrr_at_100": 0.48468563561761235, "naucs_at_1_max": -0.12573200137788498, "naucs_at_1_std": -0.2152945229073373, "naucs_at_1_diff1": 0.4961710697156785, "naucs_at_3_max": 0.034147425745325255, "naucs_at_3_std": -0.2938234558639661, "naucs_at_3_diff1": 0.3673140507349058, "naucs_at_5_max": 0.010429806479401406, "naucs_at_5_std": -0.4382364704796484, "naucs_at_5_diff1": 0.42380703319693563, "naucs_at_10_max": -0.026423576423576078, "naucs_at_10_std": -0.386513486513486, "naucs_at_10_diff1": 0.19055944055943985, "naucs_at_20_max": 0.05695195672924083, "naucs_at_20_std": -0.06442888959592562, "naucs_at_20_diff1": 0.09592745784282593, "naucs_at_50_max": 1.0, "naucs_at_50_std": 0.8692810457516335, "naucs_at_50_diff1": 0.722222222222216, "naucs_at_100_max": 1.0, "naucs_at_100_std": 1.0, "naucs_at_100_diff1": 1.0}, "NanoNFCorpus": {"ndcg_at_1": 0.02, "ndcg_at_3": 0.03, "ndcg_at_5": 0.03, "ndcg_at_10": 0.05449, "ndcg_at_20": 0.06515, "ndcg_at_50": 0.07608, "ndcg_at_100": 0.07912, "map_at_1": 0.02, "map_at_3": 0.02667, "map_at_5": 0.02667, "map_at_10": 0.03597, "map_at_20": 0.03917, "map_at_50": 0.04054, "map_at_100": 0.04075, "recall_at_1": 0.02, "recall_at_3": 0.04, "recall_at_5": 0.04, "recall_at_10": 0.12, "recall_at_20": 0.16, "recall_at_50": 0.22, "recall_at_100": 0.24, "precision_at_1": 0.02, "precision_at_3": 0.01333, "precision_at_5": 0.008, "precision_at_10": 0.012, "precision_at_20": 0.008, "precision_at_50": 0.0044, "precision_at_100": 0.0024, "mrr_at_1": 0.02, "mrr_at_3": 0.026666666666666665, "mrr_at_5": 0.026666666666666665, "mrr_at_10": 0.03596825396825397, "mrr_at_20": 0.03917338217338217, "mrr_at_50": 0.04054041410457106, "mrr_at_100": 0.04075318006201786, "naucs_at_1_max": 0.32385296485108656, "naucs_at_1_std": 0.32385296485108656, "naucs_at_1_diff1": -0.318486718540381, "naucs_at_3_max": 0.6619264824255432, "naucs_at_3_std": 0.3238529648510868, "naucs_at_3_diff1": 0.34075664072980955, "naucs_at_5_max": 0.6619264824255433, "naucs_at_5_std": 0.3238529648510868, "naucs_at_5_diff1": 0.3407566407298095, "naucs_at_10_max": 0.31333522888825693, "naucs_at_10_std": 0.19045588095915075, "naucs_at_10_diff1": -0.09875841152497399, "naucs_at_20_max": 0.3148333077277616, "naucs_at_20_std": 0.12286833614994636, "naucs_at_20_diff1": -0.13746351206022434, "naucs_at_50_max": 0.2958324268887717, "naucs_at_50_std": 0.0701432700376046, "naucs_at_50_diff1": 0.19709730552879404, "naucs_at_100_max": 0.22936137071651094, "naucs_at_100_std": 0.003444763958782701, "naucs_at_100_diff1": 0.263868919242751}, "NanoNQ": {"ndcg_at_1": 0.36, "ndcg_at_3": 0.43047, "ndcg_at_5": 0.47179, "ndcg_at_10": 0.4905, "ndcg_at_20": 0.52632, "ndcg_at_50": 0.53779, "ndcg_at_100": 0.54742, "map_at_1": 0.36, "map_at_3": 0.41333, "map_at_5": 0.43633, "map_at_10": 0.44363, "map_at_20": 0.45371, "map_at_50": 0.45536, "map_at_100": 0.45617, "recall_at_1": 0.36, "recall_at_3": 0.48, "recall_at_5": 0.58, "recall_at_10": 0.64, "recall_at_20": 0.78, "recall_at_50": 0.84, "recall_at_100": 0.9, "precision_at_1": 0.36, "precision_at_3": 0.16, "precision_at_5": 0.116, "precision_at_10": 0.064, "precision_at_20": 0.039, "precision_at_50": 0.0168, "precision_at_100": 0.009, "mrr_at_1": 0.36, "mrr_at_3": 0.41333333333333333, "mrr_at_5": 0.43633333333333335, "mrr_at_10": 0.4436349206349206, "mrr_at_20": 0.45370931845931844, "mrr_at_50": 0.45535873949898337, "mrr_at_100": 0.45617452118754903, "naucs_at_1_max": 0.5815619587466542, "naucs_at_1_std": 0.0012858867369969628, "naucs_at_1_diff1": 0.4922059518186112, "naucs_at_3_max": 0.5974118767862813, "naucs_at_3_std": 0.003016830739917168, "naucs_at_3_diff1": 0.5446173388377262, "naucs_at_5_max": 0.44954581072060806, "naucs_at_5_std": -0.09382708883536021, "naucs_at_5_diff1": 0.3967263545868762, "naucs_at_10_max": 0.495497509260442, "naucs_at_10_std": -0.09308340784263644, "naucs_at_10_diff1": 0.2841678375271422, "naucs_at_20_max": 0.6028191218064631, "naucs_at_20_std": -0.29661056243334705, "naucs_at_20_diff1": 0.36124634858812066, "naucs_at_50_max": 0.5359597447226317, "naucs_at_50_std": -0.41022336769759604, "naucs_at_50_diff1": 0.2576705940108003, "naucs_at_100_max": 0.6840336134453773, "naucs_at_100_std": -0.12819794584500657, "naucs_at_100_diff1": 0.24855275443510672}, "NanoQuoraRetrieval": {"ndcg_at_1": 0.74, "ndcg_at_3": 0.83309, "ndcg_at_5": 0.84944, "ndcg_at_10": 0.86813, "ndcg_at_20": 0.86813, "ndcg_at_50": 0.86813, "ndcg_at_100": 0.86813, "map_at_1": 0.74, "map_at_3": 0.81, "map_at_5": 0.819, "map_at_10": 0.82633, "map_at_20": 0.82633, "map_at_50": 0.82633, "map_at_100": 0.82633, "recall_at_1": 0.74, "recall_at_3": 0.9, "recall_at_5": 0.94, "recall_at_10": 1.0, "recall_at_20": 1.0, "recall_at_50": 1.0, "recall_at_100": 1.0, "precision_at_1": 0.74, "precision_at_3": 0.3, "precision_at_5": 0.188, "precision_at_10": 0.1, "precision_at_20": 0.05, "precision_at_50": 0.02, "precision_at_100": 0.01, "mrr_at_1": 0.74, "mrr_at_3": 0.81, "mrr_at_5": 0.8190000000000001, "mrr_at_10": 0.8263333333333334, "mrr_at_20": 0.8263333333333334, "mrr_at_50": 0.8263333333333334, "mrr_at_100": 0.8263333333333334, "naucs_at_1_max": 0.19163861968290002, "naucs_at_1_std": -0.5670491869753861, "naucs_at_1_diff1": 0.8520335752808077, "naucs_at_3_max": 0.5327731092436961, "naucs_at_3_std": -0.1416433239962656, "naucs_at_3_diff1": 0.8954248366013078, "naucs_at_5_max": 0.2648615001556181, "naucs_at_5_std": -0.29831932773108943, "naucs_at_5_diff1": 0.9128540305010903, "naucs_at_10_max": 1.0, "naucs_at_10_std": 1.0, "naucs_at_10_diff1": 1.0, "naucs_at_20_max": 1.0, "naucs_at_20_std": 1.0, "naucs_at_20_diff1": 1.0, "naucs_at_50_max": 1.0, "naucs_at_50_std": 1.0, "naucs_at_50_diff1": 1.0, "naucs_at_100_max": 1.0, "naucs_at_100_std": 1.0, "naucs_at_100_diff1": 1.0}, "NanoSCIDOCS": {"ndcg_at_1": 0.06, "ndcg_at_3": 0.13786, "ndcg_at_5": 0.19464, "ndcg_at_10": 0.21959, "ndcg_at_20": 0.24889, "ndcg_at_50": 0.26101, "ndcg_at_100": 0.27449, "map_at_1": 0.06, "map_at_3": 0.11667, "map_at_5": 0.14767, "map_at_10": 0.15744, "map_at_20": 0.16493, "map_at_50": 0.16694, "map_at_100": 0.16828, "recall_at_1": 0.06, "recall_at_3": 0.2, "recall_at_5": 0.34, "recall_at_10": 0.42, "recall_at_20": 0.54, "recall_at_50": 0.6, "recall_at_100": 0.68, "precision_at_1": 0.06, "precision_at_3": 0.06667, "precision_at_5": 0.068, "precision_at_10": 0.042, "precision_at_20": 0.027, "precision_at_50": 0.012, "precision_at_100": 0.0068, "mrr_at_1": 0.06, "mrr_at_3": 0.11666666666666665, "mrr_at_5": 0.14766666666666667, "mrr_at_10": 0.15744444444444444, "mrr_at_20": 0.16492564745196325, "mrr_at_50": 0.16693622946254524, "mrr_at_100": 0.16827569948993545, "naucs_at_1_max": 0.1714515696270459, "naucs_at_1_std": -0.24335927019050171, "naucs_at_1_diff1": 0.1923799302387979, "naucs_at_3_max": -0.16929286406199542, "naucs_at_3_std": 0.12043913464643208, "naucs_at_3_diff1": 0.04756215692605749, "naucs_at_5_max": -0.04645062137311537, "naucs_at_5_std": 0.17053975993004572, "naucs_at_5_diff1": -0.13826545483452105, "naucs_at_10_max": -0.11812738160043512, "naucs_at_10_std": 0.28138528138528157, "naucs_at_10_diff1": -0.2209347538688852, "naucs_at_20_max": -0.16965352449223434, "naucs_at_20_std": 0.21908254841488134, "naucs_at_20_diff1": -0.1887138451279486, "naucs_at_50_max": -0.2359157019887209, "naucs_at_50_std": 0.300296823983378, "naucs_at_50_diff1": -0.24722469575541728, "naucs_at_100_max": -0.2122866422500169, "naucs_at_100_std": 0.18288300739409874, "naucs_at_100_diff1": -0.1093566443231286}, "NanoArguAna": {"ndcg_at_1": 0.28, "ndcg_at_3": 0.43571, "ndcg_at_5": 0.49425, "ndcg_at_10": 0.54753, "ndcg_at_20": 0.5634, "ndcg_at_50": 0.57607, "ndcg_at_100": 0.57947, "map_at_1": 0.28, "map_at_3": 0.39333, "map_at_5": 0.42633, "map_at_10": 0.44919, "map_at_20": 0.45394, "map_at_50": 0.4563, "map_at_100": 0.45664, "recall_at_1": 0.28, "recall_at_3": 0.56, "recall_at_5": 0.7, "recall_at_10": 0.86, "recall_at_20": 0.92, "recall_at_50": 0.98, "recall_at_100": 1.0, "precision_at_1": 0.28, "precision_at_3": 0.18667, "precision_at_5": 0.14, "precision_at_10": 0.086, "precision_at_20": 0.046, "precision_at_50": 0.0196, "precision_at_100": 0.01, "mrr_at_1": 0.28, "mrr_at_3": 0.39666666666666667, "mrr_at_5": 0.4286666666666667, "mrr_at_10": 0.4518809523809524, "mrr_at_20": 0.4566284271284271, "mrr_at_50": 0.45898686868686867, "mrr_at_100": 0.4593316962730756, "naucs_at_1_max": -0.5253086764623724, "naucs_at_1_std": -0.21492820827979234, "naucs_at_1_diff1": -0.4177887032795131, "naucs_at_3_max": -0.22532932091755642, "naucs_at_3_std": -0.5419600272541452, "naucs_at_3_diff1": -0.06129343629343672, "naucs_at_5_max": -0.06146640245000927, "naucs_at_5_std": -0.46975319762205003, "naucs_at_5_diff1": 0.04997297784182997, "naucs_at_10_max": -0.1535956768740457, "naucs_at_10_std": -0.29250381044755214, "naucs_at_10_diff1": -0.27795482887626133, "naucs_at_20_max": -0.030929038281980327, "naucs_at_20_std": -0.45284780578898093, "naucs_at_20_diff1": -0.22000466853408016, "naucs_at_50_max": 0.8692810457516335, "naucs_at_50_std": 0.722222222222216, "naucs_at_50_diff1": 0.8692810457516335, "naucs_at_100_max": 1.0, "naucs_at_100_std": 1.0, "naucs_at_100_diff1": 1.0}, "NanoSciFact": {"ndcg_at_1": 0.58, "ndcg_at_3": 0.66571, "ndcg_at_5": 0.67345, "ndcg_at_10": 0.69928, "ndcg_at_20": 0.70399, "ndcg_at_50": 0.7213, "ndcg_at_100": 0.7213, "map_at_1": 0.58, "map_at_3": 0.64667, "map_at_5": 0.65067, "map_at_10": 0.6613, "map_at_20": 0.66241, "map_at_50": 0.6658, "map_at_100": 0.6658, "recall_at_1": 0.58, "recall_at_3": 0.72, "recall_at_5": 0.74, "recall_at_10": 0.82, "recall_at_20": 0.84, "recall_at_50": 0.92, "recall_at_100": 0.92, "precision_at_1": 0.58, "precision_at_3": 0.24, "precision_at_5": 0.148, "precision_at_10": 0.082, "precision_at_20": 0.042, "precision_at_50": 0.0184, "precision_at_100": 0.0092, "mrr_at_1": 0.58, "mrr_at_3": 0.6466666666666667, "mrr_at_5": 0.6506666666666666, "mrr_at_10": 0.6613015873015873, "mrr_at_20": 0.6624126984126985, "mrr_at_50": 0.6658038753517015, "mrr_at_100": 0.6658038753517015, "naucs_at_1_max": 0.44475723365353975, "naucs_at_1_std": -0.16545259308703608, "naucs_at_1_diff1": 0.6190614388948545, "naucs_at_3_max": 0.29262057387057366, "naucs_at_3_std": -0.22737332112332106, "naucs_at_3_diff1": 0.6904380341880342, "naucs_at_5_max": 0.2628441669032076, "naucs_at_5_std": -0.1416000973196547, "naucs_at_5_diff1": 0.7486314423583792, "naucs_at_10_max": 0.4615596431325048, "naucs_at_10_std": -0.2388478907368644, "naucs_at_10_diff1": 0.6128978962440808, "naucs_at_20_max": 0.38285468826705943, "naucs_at_20_std": -0.3610702012763866, "naucs_at_20_diff1": 0.6358615611192938, "naucs_at_50_max": 0.49194677871148357, "naucs_at_50_std": 0.18557422969187634, "naucs_at_50_diff1": 0.4025443510737607, "naucs_at_100_max": 0.49194677871148357, "naucs_at_100_std": 0.18557422969187634, "naucs_at_100_diff1": 0.4025443510737607}, "NanoTouche2020": {"ndcg_at_1": 0.02041, "ndcg_at_3": 0.06657, "ndcg_at_5": 0.08236, "ndcg_at_10": 0.10811, "ndcg_at_20": 0.15496, "ndcg_at_50": 0.19209, "ndcg_at_100": 0.21472, "map_at_1": 0.02041, "map_at_3": 0.05442, "map_at_5": 0.06259, "map_at_10": 0.07285, "map_at_20": 0.08595, "map_at_50": 0.09226, "map_at_100": 0.0941, "recall_at_1": 0.02041, "recall_at_3": 0.10204, "recall_at_5": 0.14286, "recall_at_10": 0.22449, "recall_at_20": 0.40816, "recall_at_50": 0.59184, "recall_at_100": 0.73469, "precision_at_1": 0.02041, "precision_at_3": 0.03401, "precision_at_5": 0.02857, "precision_at_10": 0.02245, "precision_at_20": 0.02041, "precision_at_50": 0.01184, "precision_at_100": 0.00735, "mrr_at_1": 0.02040816326530612, "mrr_at_3": 0.05442176870748299, "mrr_at_5": 0.06258503401360543, "mrr_at_10": 0.07284580498866214, "mrr_at_20": 0.0859484702972098, "mrr_at_50": 0.09226003256598633, "mrr_at_100": 0.09410092602585393, "naucs_at_1_max": -0.5101944889290618, "naucs_at_1_std": -0.5101944889290618, "naucs_at_1_diff1": -0.5871161195675973, "naucs_at_3_max": -0.4006290577557756, "naucs_at_3_std": -0.316365390096293, "naucs_at_3_diff1": -0.11244275867306988, "naucs_at_5_max": -0.33266438805656484, "naucs_at_5_std": -0.07660401956783436, "naucs_at_5_diff1": -0.13302757832937354, "naucs_at_10_max": -0.12907172004635845, "naucs_at_10_std": -0.03867532963147697, "naucs_at_10_diff1": 0.04723284620090628, "naucs_at_20_max": 0.014772561728743815, "naucs_at_20_std": 0.11665187098850476, "naucs_at_20_diff1": -0.009944541296054212, "naucs_at_50_max": 0.12152224895073281, "naucs_at_50_std": -0.09214223340642187, "naucs_at_50_diff1": -0.000978461361136742, "naucs_at_100_max": 0.24618545713066825, "naucs_at_100_std": -0.28701704043911425, "naucs_at_100_diff1": -0.03315989423650674}}}
results/metrics_modernbert-trained-v0_contextual.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"model": "modernbert-trained-v0", "date": "2025-01-19 23:17:13", "is_contextual": true, "metrics": {"chunked-mldr": {"ndcg_at_1": 0.82995, "ndcg_at_3": 0.89833, "ndcg_at_5": 0.90568, "ndcg_at_10": 0.90828, "ndcg_at_20": 0.91043, "ndcg_at_50": 0.91224, "ndcg_at_100": 0.91352, "map_at_1": 0.82995, "map_at_3": 0.88249, "map_at_5": 0.88661, "map_at_10": 0.88773, "map_at_20": 0.88832, "map_at_50": 0.88861, "map_at_100": 0.88872, "recall_at_1": 0.82995, "recall_at_3": 0.94375, "recall_at_5": 0.96141, "recall_at_10": 0.96926, "recall_at_20": 0.97776, "recall_at_50": 0.98692, "recall_at_100": 0.99477, "precision_at_1": 0.82995, "precision_at_3": 0.31458, "precision_at_5": 0.19228, "precision_at_10": 0.09693, "precision_at_20": 0.04889, "precision_at_50": 0.01974, "precision_at_100": 0.00995, "mrr_at_1": 0.8299542184434271, "mrr_at_3": 0.882494004796163, "mrr_at_5": 0.8866143448877263, "mrr_at_10": 0.8877256636249442, "mrr_at_20": 0.8883183314683091, "mrr_at_50": 0.8886050383032084, "mrr_at_100": 0.8887207747014747, "naucs_at_1_max": 0.6249922493328072, "naucs_at_1_std": 0.046821306000138556, "naucs_at_1_diff1": 0.7960722755499415, "naucs_at_3_max": 0.8312621831507966, "naucs_at_3_std": 0.27839977795413673, "naucs_at_3_diff1": 0.7889064581896198, "naucs_at_5_max": 0.9077920057467096, "naucs_at_5_std": 0.5271891506444805, "naucs_at_5_diff1": 0.8107081414688172, "naucs_at_10_max": 0.9143877987551429, "naucs_at_10_std": 0.5429694793413634, "naucs_at_10_diff1": 0.8442960430548601, "naucs_at_20_max": 0.9585268203237728, "naucs_at_20_std": 0.5718021294805872, "naucs_at_20_diff1": 0.860191714227169, "naucs_at_50_max": 0.9499225835874832, "naucs_at_50_std": 0.5895788616421765, "naucs_at_50_diff1": 0.8529881650714412, "naucs_at_100_max": 1.0, "naucs_at_100_std": 0.7221886734309177, "naucs_at_100_diff1": 1.0}, "NanoClimateFEVER": {"ndcg_at_1": 0.04, "ndcg_at_3": 0.12309, "ndcg_at_5": 0.14806, "ndcg_at_10": 0.15962, "ndcg_at_20": 0.18583, "ndcg_at_50": 0.21013, "ndcg_at_100": 0.22652, "map_at_1": 0.04, "map_at_3": 0.10333, "map_at_5": 0.11733, "map_at_10": 0.12133, "map_at_20": 0.12899, "map_at_50": 0.13312, "map_at_100": 0.13461, "recall_at_1": 0.04, "recall_at_3": 0.18, "recall_at_5": 0.24, "recall_at_10": 0.28, "recall_at_20": 0.38, "recall_at_50": 0.5, "recall_at_100": 0.6, "precision_at_1": 0.04, "precision_at_3": 0.06, "precision_at_5": 0.048, "precision_at_10": 0.028, "precision_at_20": 0.019, "precision_at_50": 0.01, "precision_at_100": 0.006, "mrr_at_1": 0.04, "mrr_at_3": 0.10333333333333333, "mrr_at_5": 0.11733333333333333, "mrr_at_10": 0.12133333333333333, "mrr_at_20": 0.12899358974358976, "mrr_at_50": 0.13312395042066252, "mrr_at_100": 0.13460938074806753, "naucs_at_1_max": -0.23812718003756375, "naucs_at_1_std": -0.49490206600482967, "naucs_at_1_diff1": 0.12342366514623021, "naucs_at_3_max": -0.08567519208504376, "naucs_at_3_std": -0.38908185103322457, "naucs_at_3_diff1": -0.12325018419113777, "naucs_at_5_max": 0.0101545650611069, "naucs_at_5_std": -0.2174394919722025, "naucs_at_5_diff1": -0.22792355619458435, "naucs_at_10_max": 0.08863623666126585, "naucs_at_10_std": -0.1670205039387677, "naucs_at_10_diff1": -0.23085088552433108, "naucs_at_20_max": 0.2808203155459671, "naucs_at_20_std": -0.006186156525355518, "naucs_at_20_diff1": -0.16533672965456297, "naucs_at_50_max": 0.17694870086608921, "naucs_at_50_std": -0.15832111925383088, "naucs_at_50_diff1": -0.2779480346435711, "naucs_at_100_max": 0.09035322054021944, "naucs_at_100_std": 0.08382309290590671, "naucs_at_100_diff1": -0.4363609379637881}, "NanoDBPedia": {"ndcg_at_1": 0.04, "ndcg_at_3": 0.08524, "ndcg_at_5": 0.10159, "ndcg_at_10": 0.12682, "ndcg_at_20": 0.15277, "ndcg_at_50": 0.19669, "ndcg_at_100": 0.22299, "map_at_1": 0.04, "map_at_3": 0.07333, "map_at_5": 0.08233, "map_at_10": 0.09239, "map_at_20": 0.09984, "map_at_50": 0.10707, "map_at_100": 0.10948, "recall_at_1": 0.04, "recall_at_3": 0.12, "recall_at_5": 0.16, "recall_at_10": 0.24, "recall_at_20": 0.34, "recall_at_50": 0.56, "recall_at_100": 0.72, "precision_at_1": 0.04, "precision_at_3": 0.04, "precision_at_5": 0.032, "precision_at_10": 0.024, "precision_at_20": 0.017, "precision_at_50": 0.0112, "precision_at_100": 0.0072, "mrr_at_1": 0.04, "mrr_at_3": 0.07333333333333333, "mrr_at_5": 0.08233333333333333, "mrr_at_10": 0.0923888888888889, "mrr_at_20": 0.09984077034077034, "mrr_at_50": 0.10707036170856046, "mrr_at_100": 0.10948346773145612, "naucs_at_1_max": -0.5747249798765763, "naucs_at_1_std": -0.4995975315266971, "naucs_at_1_diff1": -0.053662463107056656, "naucs_at_3_max": -0.13358923324803332, "naucs_at_3_std": -0.38233342811107945, "naucs_at_3_diff1": 0.3889678703440432, "naucs_at_5_max": -0.04781840528499009, "naucs_at_5_std": -0.1768320786603165, "naucs_at_5_diff1": 0.4289445383315411, "naucs_at_10_max": -0.06826623532231024, "naucs_at_10_std": -0.2835789599808294, "naucs_at_10_diff1": 0.2665348670021567, "naucs_at_20_max": -0.13927237075704177, "naucs_at_20_std": -0.31360131429025656, "naucs_at_20_diff1": 0.15440260738228337, "naucs_at_50_max": 0.1288893935952756, "naucs_at_50_std": -0.25207245060186284, "naucs_at_50_diff1": 0.3805359981830573, "naucs_at_100_max": 0.2197420634920639, "naucs_at_100_std": -0.3223443223443217, "naucs_at_100_diff1": 0.26137057387057444}, "NanoFEVER": {"ndcg_at_1": 0.54, "ndcg_at_3": 0.67357, "ndcg_at_5": 0.69853, "ndcg_at_10": 0.72465, "ndcg_at_20": 0.73478, "ndcg_at_50": 0.74291, "ndcg_at_100": 0.74637, "map_at_1": 0.54, "map_at_3": 0.64333, "map_at_5": 0.65733, "map_at_10": 0.66825, "map_at_20": 0.67106, "map_at_50": 0.67244, "map_at_100": 0.67281, "recall_at_1": 0.54, "recall_at_3": 0.76, "recall_at_5": 0.82, "recall_at_10": 0.9, "recall_at_20": 0.94, "recall_at_50": 0.98, "recall_at_100": 1.0, "precision_at_1": 0.54, "precision_at_3": 0.25333, "precision_at_5": 0.164, "precision_at_10": 0.09, "precision_at_20": 0.047, "precision_at_50": 0.0196, "precision_at_100": 0.01, "mrr_at_1": 0.54, "mrr_at_3": 0.6433333333333333, "mrr_at_5": 0.6573333333333333, "mrr_at_10": 0.6682460317460317, "mrr_at_20": 0.6710642135642135, "mrr_at_50": 0.6724356421356421, "mrr_at_100": 0.6728060125060125, "naucs_at_1_max": 0.569948042566197, "naucs_at_1_std": 0.13675641132505315, "naucs_at_1_diff1": 0.6908949459587118, "naucs_at_3_max": 0.6023189409016169, "naucs_at_3_std": 0.27727784026996505, "naucs_at_3_diff1": 0.5694816994029588, "naucs_at_5_max": 0.6074457539376594, "naucs_at_5_std": 0.18790615706575683, "naucs_at_5_diff1": 0.4390351360281983, "naucs_at_10_max": 0.7134453781512606, "naucs_at_10_std": 0.07497665732960017, "naucs_at_10_diff1": 0.5085901027077511, "naucs_at_20_max": 0.9564270152505424, "naucs_at_20_std": 0.6640211640211642, "naucs_at_20_diff1": 0.8638344226579531, "naucs_at_50_max": 1.0, "naucs_at_50_std": 0.8692810457516335, "naucs_at_50_diff1": 1.0, "naucs_at_100_max": 1.0, "naucs_at_100_std": 1.0, "naucs_at_100_diff1": 1.0}, "NanoFiQA2018": {"ndcg_at_1": 0.18, "ndcg_at_3": 0.32095, "ndcg_at_5": 0.3373, "ndcg_at_10": 0.35109, "ndcg_at_20": 0.37576, "ndcg_at_50": 0.39919, "ndcg_at_100": 0.40237, "map_at_1": 0.18, "map_at_3": 0.28667, "map_at_5": 0.29567, "map_at_10": 0.30186, "map_at_20": 0.30828, "map_at_50": 0.31191, "map_at_100": 0.31217, "recall_at_1": 0.18, "recall_at_3": 0.42, "recall_at_5": 0.46, "recall_at_10": 0.5, "recall_at_20": 0.6, "recall_at_50": 0.72, "recall_at_100": 0.74, "precision_at_1": 0.18, "precision_at_3": 0.14, "precision_at_5": 0.092, "precision_at_10": 0.05, "precision_at_20": 0.03, "precision_at_50": 0.0144, "precision_at_100": 0.0074, "mrr_at_1": 0.16, "mrr_at_3": 0.18, "mrr_at_5": 0.19, "mrr_at_10": 0.19285714285714284, "mrr_at_20": 0.19419047619047616, "mrr_at_50": 0.19419047619047616, "mrr_at_100": 0.19419047619047616, "naucs_at_1_max": -0.07108023716801741, "naucs_at_1_std": 0.26719994386555795, "naucs_at_1_diff1": -0.19369890888678382, "naucs_at_3_max": -0.24802343365217622, "naucs_at_3_std": 0.23423283303522796, "naucs_at_3_diff1": 0.18653601887134835, "naucs_at_5_max": -0.22664318940369493, "naucs_at_5_std": 0.2269585556227169, "naucs_at_5_diff1": 0.2801503245644005, "naucs_at_10_max": -0.27805463024650257, "naucs_at_10_std": 0.1684743504330446, "naucs_at_10_diff1": 0.3293804130579611, "naucs_at_20_max": -0.31350549124369276, "naucs_at_20_std": 0.18875037102997916, "naucs_at_20_diff1": 0.2483823092905904, "naucs_at_50_max": -0.39388736263736257, "naucs_at_50_std": 0.23481379731379795, "naucs_at_50_diff1": 0.29945054945054983, "naucs_at_100_max": -0.2674668504926804, "naucs_at_100_std": 0.4006731276103972, "naucs_at_100_diff1": 0.418271765135234}, "NanoHotpotQA": {"ndcg_at_1": 0.16, "ndcg_at_3": 0.31357, "ndcg_at_5": 0.35401, "ndcg_at_10": 0.37276, "ndcg_at_20": 0.39821, "ndcg_at_50": 0.42174, "ndcg_at_100": 0.43537, "map_at_1": 0.16, "map_at_3": 0.27667, "map_at_5": 0.29867, "map_at_10": 0.30602, "map_at_20": 0.31307, "map_at_50": 0.31673, "map_at_100": 0.31812, "recall_at_1": 0.16, "recall_at_3": 0.42, "recall_at_5": 0.52, "recall_at_10": 0.58, "recall_at_20": 0.68, "recall_at_50": 0.8, "recall_at_100": 0.88, "precision_at_1": 0.16, "precision_at_3": 0.14, "precision_at_5": 0.104, "precision_at_10": 0.058, "precision_at_20": 0.034, "precision_at_50": 0.016, "precision_at_100": 0.0088, "mrr_at_1": 0.16, "mrr_at_3": 0.27666666666666667, "mrr_at_5": 0.2986666666666667, "mrr_at_10": 0.30602380952380953, "mrr_at_20": 0.3130656647274294, "mrr_at_50": 0.31672586496759864, "mrr_at_100": 0.31812099802081995, "naucs_at_1_max": -0.09663542786910433, "naucs_at_1_std": -0.31663850053771714, "naucs_at_1_diff1": 0.2659010600706714, "naucs_at_3_max": 0.05640666718511012, "naucs_at_3_std": -0.35889260440158643, "naucs_at_3_diff1": 0.12510044845374182, "naucs_at_5_max": 0.146172261820556, "naucs_at_5_std": -0.36707655476358886, "naucs_at_5_diff1": 0.21092007182109995, "naucs_at_10_max": 0.05952346402762859, "naucs_at_10_std": -0.4291145485677803, "naucs_at_10_diff1": 0.16585889659575714, "naucs_at_20_max": 0.27855711422845664, "naucs_at_20_std": -0.4890470596365135, "naucs_at_20_diff1": 0.47771404878722934, "naucs_at_50_max": 0.04090909090909088, "naucs_at_50_std": -0.7733766233766237, "naucs_at_50_diff1": 0.3025974025974016, "naucs_at_100_max": -0.05528157811008395, "naucs_at_100_std": -0.7130130448615968, "naucs_at_100_diff1": 0.004374801145402827}, "NanoMSMARCO": {"ndcg_at_1": 0.32, "ndcg_at_3": 0.45095, "ndcg_at_5": 0.49139, "ndcg_at_10": 0.52211, "ndcg_at_20": 0.56262, "ndcg_at_50": 0.57824, "ndcg_at_100": 0.58135, "map_at_1": 0.32, "map_at_3": 0.42, "map_at_5": 0.442, "map_at_10": 0.45378, "map_at_20": 0.4649, "map_at_50": 0.4673, "map_at_100": 0.46753, "recall_at_1": 0.32, "recall_at_3": 0.54, "recall_at_5": 0.64, "recall_at_10": 0.74, "recall_at_20": 0.9, "recall_at_50": 0.98, "recall_at_100": 1.0, "precision_at_1": 0.32, "precision_at_3": 0.18, "precision_at_5": 0.128, "precision_at_10": 0.074, "precision_at_20": 0.045, "precision_at_50": 0.0196, "precision_at_100": 0.01, "mrr_at_1": 0.32, "mrr_at_3": 0.42, "mrr_at_5": 0.442, "mrr_at_10": 0.4537777777777778, "mrr_at_20": 0.46490208647561587, "mrr_at_50": 0.46729897825230304, "mrr_at_100": 0.4675315363918379, "naucs_at_1_max": -0.08707053408969277, "naucs_at_1_std": -0.10783474258803391, "naucs_at_1_diff1": 0.6010382104249171, "naucs_at_3_max": -0.018949181739879455, "naucs_at_3_std": -0.21688755522213876, "naucs_at_3_diff1": 0.3648689950265341, "naucs_at_5_max": -0.08621790777877113, "naucs_at_5_std": -0.317505428534934, "naucs_at_5_diff1": 0.3776025035125816, "naucs_at_10_max": 0.06273062730627342, "naucs_at_10_std": -0.2844977900328453, "naucs_at_10_diff1": 0.4388305421515753, "naucs_at_20_max": -0.1799253034547126, "naucs_at_20_std": -0.09365079365079096, "naucs_at_20_diff1": 0.2514472455648956, "naucs_at_50_max": 1.0, "naucs_at_50_std": 1.0, "naucs_at_50_diff1": 1.0, "naucs_at_100_max": 1.0, "naucs_at_100_std": 1.0, "naucs_at_100_diff1": 1.0}, "NanoNFCorpus": {"ndcg_at_1": 0.0, "ndcg_at_3": 0.02524, "ndcg_at_5": 0.02524, "ndcg_at_10": 0.03867, "ndcg_at_20": 0.05868, "ndcg_at_50": 0.07434, "ndcg_at_100": 0.07744, "map_at_1": 0.0, "map_at_3": 0.02, "map_at_5": 0.02, "map_at_10": 0.02583, "map_at_20": 0.03119, "map_at_50": 0.03364, "map_at_100": 0.03388, "recall_at_1": 0.0, "recall_at_3": 0.04, "recall_at_5": 0.04, "recall_at_10": 0.08, "recall_at_20": 0.16, "recall_at_50": 0.24, "recall_at_100": 0.26, "precision_at_1": 0.0, "precision_at_3": 0.01333, "precision_at_5": 0.008, "precision_at_10": 0.008, "precision_at_20": 0.008, "precision_at_50": 0.0048, "precision_at_100": 0.0026, "mrr_at_1": 0.0, "mrr_at_3": 0.02, "mrr_at_5": 0.02, "mrr_at_10": 0.025833333333333333, "mrr_at_20": 0.031194444444444445, "mrr_at_50": 0.03364353419684939, "mrr_at_100": 0.03387609233638427, "naucs_at_1_max": NaN, "naucs_at_1_std": NaN, "naucs_at_1_diff1": NaN, "naucs_at_3_max": 0.5492353099007243, "naucs_at_3_std": 0.3238529648510868, "naucs_at_3_diff1": 0.29246042393345845, "naucs_at_5_max": 0.5492353099007246, "naucs_at_5_std": 0.3238529648510868, "naucs_at_5_diff1": 0.2924604239334585, "naucs_at_10_max": 0.1555540649315805, "naucs_at_10_std": -0.08552455057687147, "naucs_at_10_diff1": -0.04889991950630539, "naucs_at_20_max": 0.2825702872945155, "naucs_at_20_std": 0.10904132739284085, "naucs_at_20_diff1": -0.020125979413120276, "naucs_at_50_max": 0.3422597651569615, "naucs_at_50_std": 0.16846393481907507, "naucs_at_50_diff1": 0.24592619218787448, "naucs_at_100_max": 0.28559451880546965, "naucs_at_100_std": 0.15498250990141949, "naucs_at_100_diff1": 0.21517157642161236}, "NanoNQ": {"ndcg_at_1": 0.36, "ndcg_at_3": 0.47571, "ndcg_at_5": 0.49206, "ndcg_at_10": 0.5118, "ndcg_at_20": 0.53147, "ndcg_at_50": 0.55137, "ndcg_at_100": 0.56132, "map_at_1": 0.36, "map_at_3": 0.44667, "map_at_5": 0.45567, "map_at_10": 0.464, "map_at_20": 0.46909, "map_at_50": 0.47233, "map_at_100": 0.47328, "recall_at_1": 0.36, "recall_at_3": 0.56, "recall_at_5": 0.6, "recall_at_10": 0.66, "recall_at_20": 0.74, "recall_at_50": 0.84, "recall_at_100": 0.9, "precision_at_1": 0.36, "precision_at_3": 0.18667, "precision_at_5": 0.12, "precision_at_10": 0.066, "precision_at_20": 0.037, "precision_at_50": 0.0168, "precision_at_100": 0.009, "mrr_at_1": 0.36, "mrr_at_3": 0.44666666666666666, "mrr_at_5": 0.4556666666666667, "mrr_at_10": 0.46399999999999997, "mrr_at_20": 0.46909401709401705, "mrr_at_50": 0.47233064713064715, "mrr_at_100": 0.47328293168819485, "naucs_at_1_max": 0.49456778460085044, "naucs_at_1_std": 0.020311761927255645, "naucs_at_1_diff1": 0.7433999895029656, "naucs_at_3_max": 0.3422950261185553, "naucs_at_3_std": -0.07409720645014772, "naucs_at_3_diff1": 0.6112593686123099, "naucs_at_5_max": 0.2590679726921932, "naucs_at_5_std": -0.13119620065301274, "naucs_at_5_diff1": 0.5379044226773517, "naucs_at_10_max": 0.23900560921371458, "naucs_at_10_std": -0.32165023731288755, "naucs_at_10_diff1": 0.5278967108101826, "naucs_at_20_max": 0.3343741129718989, "naucs_at_20_std": -0.47650135842017705, "naucs_at_20_diff1": 0.5577632699403915, "naucs_at_50_max": 0.4594992636229747, "naucs_at_50_std": -0.281357388316152, "naucs_at_50_diff1": 0.6027859597447226, "naucs_at_100_max": 0.6578898225957042, "naucs_at_100_std": -0.1674136321195162, "naucs_at_100_diff1": 0.6373482726423899}, "NanoQuoraRetrieval": {"ndcg_at_1": 0.66, "ndcg_at_3": 0.75047, "ndcg_at_5": 0.77369, "ndcg_at_10": 0.80016, "ndcg_at_20": 0.80951, "ndcg_at_50": 0.80951, "ndcg_at_100": 0.80951, "map_at_1": 0.66, "map_at_3": 0.72667, "map_at_5": 0.73867, "map_at_10": 0.74994, "map_at_20": 0.75211, "map_at_50": 0.75211, "map_at_100": 0.75211, "recall_at_1": 0.66, "recall_at_3": 0.82, "recall_at_5": 0.88, "recall_at_10": 0.96, "recall_at_20": 1.0, "recall_at_50": 1.0, "recall_at_100": 1.0, "precision_at_1": 0.66, "precision_at_3": 0.27333, "precision_at_5": 0.176, "precision_at_10": 0.096, "precision_at_20": 0.05, "precision_at_50": 0.02, "precision_at_100": 0.01, "mrr_at_1": 0.66, "mrr_at_3": 0.7266666666666667, "mrr_at_5": 0.7386666666666666, "mrr_at_10": 0.7499365079365079, "mrr_at_20": 0.7521129785247431, "mrr_at_50": 0.7521129785247431, "mrr_at_100": 0.7521129785247431, "naucs_at_1_max": 0.36061601779016894, "naucs_at_1_std": -0.4166749643200908, "naucs_at_1_diff1": 0.936473165388828, "naucs_at_3_max": 0.3008040533098355, "naucs_at_3_std": -0.47780592576275016, "naucs_at_3_diff1": 0.8645225245071041, "naucs_at_5_max": 0.41154947502386235, "naucs_at_5_std": -0.15820871778555362, "naucs_at_5_diff1": 0.8293827553293032, "naucs_at_10_max": 0.934640522875822, "naucs_at_10_std": 0.15289449112979145, "naucs_at_10_diff1": 0.7770774976657333, "naucs_at_20_max": 1.0, "naucs_at_20_std": 1.0, "naucs_at_20_diff1": 1.0, "naucs_at_50_max": 1.0, "naucs_at_50_std": 1.0, "naucs_at_50_diff1": 1.0, "naucs_at_100_max": 1.0, "naucs_at_100_std": 1.0, "naucs_at_100_diff1": 1.0}, "NanoSCIDOCS": {"ndcg_at_1": 0.0, "ndcg_at_3": 0.0, "ndcg_at_5": 0.0327, "ndcg_at_10": 0.08382, "ndcg_at_20": 0.11934, "ndcg_at_50": 0.12728, "ndcg_at_100": 0.1399, "map_at_1": 0.0, "map_at_3": 0.0, "map_at_5": 0.018, "map_at_10": 0.03875, "map_at_20": 0.04857, "map_at_50": 0.04983, "map_at_100": 0.05084, "recall_at_1": 0.0, "recall_at_3": 0.0, "recall_at_5": 0.08, "recall_at_10": 0.24, "recall_at_20": 0.38, "recall_at_50": 0.42, "recall_at_100": 0.5, "precision_at_1": 0.0, "precision_at_3": 0.0, "precision_at_5": 0.016, "precision_at_10": 0.024, "precision_at_20": 0.019, "precision_at_50": 0.0084, "precision_at_100": 0.005, "mrr_at_1": 0.0, "mrr_at_3": 0.02, "mrr_at_5": 0.02, "mrr_at_10": 0.02, "mrr_at_20": 0.021818181818181816, "mrr_at_50": 0.02225296442687747, "mrr_at_100": 0.02225296442687747, "naucs_at_1_max": NaN, "naucs_at_1_std": NaN, "naucs_at_1_diff1": NaN, "naucs_at_3_max": NaN, "naucs_at_3_std": NaN, "naucs_at_3_diff1": NaN, "naucs_at_5_max": 0.3997182720686881, "naucs_at_5_std": -0.30245505768714787, "naucs_at_5_diff1": 0.34478132546283863, "naucs_at_10_max": -0.03190150970524822, "naucs_at_10_std": 0.0041936256889526366, "naucs_at_10_diff1": 0.08794632159118132, "naucs_at_20_max": -0.06139370467600675, "naucs_at_20_std": 0.30114625841499204, "naucs_at_20_diff1": 0.018038624489901713, "naucs_at_50_max": -0.03675765352412081, "naucs_at_50_std": 0.31936127744510967, "naucs_at_50_diff1": -0.061046737693444535, "naucs_at_100_max": 0.04946035976015966, "naucs_at_100_std": 0.32938041305796145, "naucs_at_100_diff1": -0.0295802798134579}, "NanoArguAna": {"ndcg_at_1": 0.22, "ndcg_at_3": 0.4188, "ndcg_at_5": 0.46873, "ndcg_at_10": 0.5284, "ndcg_at_20": 0.54362, "ndcg_at_50": 0.55529, "ndcg_at_100": 0.55529, "map_at_1": 0.22, "map_at_3": 0.37, "map_at_5": 0.398, "map_at_10": 0.42345, "map_at_20": 0.42764, "map_at_50": 0.42939, "map_at_100": 0.42939, "recall_at_1": 0.22, "recall_at_3": 0.56, "recall_at_5": 0.68, "recall_at_10": 0.86, "recall_at_20": 0.92, "recall_at_50": 0.98, "recall_at_100": 0.98, "precision_at_1": 0.22, "precision_at_3": 0.18667, "precision_at_5": 0.136, "precision_at_10": 0.086, "precision_at_20": 0.046, "precision_at_50": 0.0196, "precision_at_100": 0.0098, "mrr_at_1": 0.22, "mrr_at_3": 0.37, "mrr_at_5": 0.39799999999999996, "mrr_at_10": 0.42345238095238097, "mrr_at_20": 0.4276404151404152, "mrr_at_50": 0.4293923992673993, "mrr_at_100": 0.4293923992673993, "naucs_at_1_max": -0.4249619293284022, "naucs_at_1_std": -0.08506075768406005, "naucs_at_1_diff1": -0.3151319265313733, "naucs_at_3_max": -0.05870997047467624, "naucs_at_3_std": -0.3637008857597096, "naucs_at_3_diff1": -0.017459686577334113, "naucs_at_5_max": -0.015168267569622152, "naucs_at_5_std": -0.5232879552207862, "naucs_at_5_diff1": -0.06063851841614279, "naucs_at_10_max": 0.2901482610503006, "naucs_at_10_std": -0.5400443397533576, "naucs_at_10_diff1": -0.21975890259110176, "naucs_at_20_max": 0.543767507002801, "naucs_at_20_std": -0.6832399626517252, "naucs_at_20_diff1": 0.39554154995331575, "naucs_at_50_max": 0.8692810457516335, "naucs_at_50_std": -0.5634920634920756, "naucs_at_50_diff1": 0.8692810457516335, "naucs_at_100_max": 0.8692810457516335, "naucs_at_100_std": -0.5634920634920756, "naucs_at_100_diff1": 0.8692810457516335}, "NanoSciFact": {"ndcg_at_1": 0.56, "ndcg_at_3": 0.64047, "ndcg_at_5": 0.64909, "ndcg_at_10": 0.67439, "ndcg_at_20": 0.68837, "ndcg_at_50": 0.70071, "ndcg_at_100": 0.70071, "map_at_1": 0.56, "map_at_3": 0.62, "map_at_5": 0.625, "map_at_10": 0.63508, "map_at_20": 0.63831, "map_at_50": 0.64047, "map_at_100": 0.64047, "recall_at_1": 0.56, "recall_at_3": 0.7, "recall_at_5": 0.72, "recall_at_10": 0.8, "recall_at_20": 0.86, "recall_at_50": 0.92, "recall_at_100": 0.92, "precision_at_1": 0.56, "precision_at_3": 0.23333, "precision_at_5": 0.144, "precision_at_10": 0.08, "precision_at_20": 0.043, "precision_at_50": 0.0184, "precision_at_100": 0.0092, "mrr_at_1": 0.56, "mrr_at_3": 0.62, "mrr_at_5": 0.625, "mrr_at_10": 0.635079365079365, "mrr_at_20": 0.6383084672465478, "mrr_at_50": 0.6404745938029183, "mrr_at_100": 0.6404745938029183, "naucs_at_1_max": 0.39583806495571167, "naucs_at_1_std": -0.24977288212582358, "naucs_at_1_diff1": 0.6281228707699293, "naucs_at_3_max": 0.2659700954782918, "naucs_at_3_std": -0.09843271482615809, "naucs_at_3_diff1": 0.5090974599171314, "naucs_at_5_max": 0.21062271062271018, "naucs_at_5_std": -0.16327075702075716, "naucs_at_5_diff1": 0.5852029914529912, "naucs_at_10_max": 0.3705794205794201, "naucs_at_10_std": -0.25594405594405556, "naucs_at_10_diff1": 0.3998001998001999, "naucs_at_20_max": 0.5027712345850092, "naucs_at_20_std": -0.008105861161144997, "naucs_at_20_diff1": 0.38256893446030366, "naucs_at_50_max": 0.4184173669467756, "naucs_at_50_std": 0.38165266106442414, "naucs_at_50_diff1": 0.2947012138188589, "naucs_at_100_max": 0.4184173669467756, "naucs_at_100_std": 0.38165266106442414, "naucs_at_100_diff1": 0.2947012138188589}, "NanoTouche2020": {"ndcg_at_1": 0.02041, "ndcg_at_3": 0.07944, "ndcg_at_5": 0.10492, "ndcg_at_10": 0.1175, "ndcg_at_20": 0.1704, "ndcg_at_50": 0.20596, "ndcg_at_100": 0.22872, "map_at_1": 0.02041, "map_at_3": 0.06463, "map_at_5": 0.07891, "map_at_10": 0.08373, "map_at_20": 0.09893, "map_at_50": 0.1043, "map_at_100": 0.10621, "recall_at_1": 0.02041, "recall_at_3": 0.12245, "recall_at_5": 0.18367, "recall_at_10": 0.22449, "recall_at_20": 0.42857, "recall_at_50": 0.61224, "recall_at_100": 0.7551, "precision_at_1": 0.02041, "precision_at_3": 0.04082, "precision_at_5": 0.03673, "precision_at_10": 0.02245, "precision_at_20": 0.02143, "precision_at_50": 0.01224, "precision_at_100": 0.00755, "mrr_at_1": 0.02040816326530612, "mrr_at_3": 0.06462585034013606, "mrr_at_5": 0.07891156462585035, "mrr_at_10": 0.08373015873015874, "mrr_at_20": 0.09892544053392603, "mrr_at_50": 0.10430458659630192, "mrr_at_100": 0.10620513754050609, "naucs_at_1_max": -0.071774720230261, "naucs_at_1_std": 0.0974528671745171, "naucs_at_1_diff1": -0.42341111077276533, "naucs_at_3_max": -0.45751848449938637, "naucs_at_3_std": -0.3067121939003939, "naucs_at_3_diff1": -0.16050447387723582, "naucs_at_5_max": -0.30571320633420834, "naucs_at_5_std": -0.1502981307068792, "naucs_at_5_diff1": 0.03173441966468508, "naucs_at_10_max": -0.1436811354299066, "naucs_at_10_std": -0.019067956353556893, "naucs_at_10_diff1": -0.07743586471085244, "naucs_at_20_max": 0.026667352237445627, "naucs_at_20_std": 0.06976762199884466, "naucs_at_20_diff1": 0.007019409814706438, "naucs_at_50_max": 0.08303623338234126, "naucs_at_50_std": -0.19360160241779997, "naucs_at_50_diff1": 0.3898158252244685, "naucs_at_100_max": 0.13319518697533214, "naucs_at_100_std": -0.3101176132313557, "naucs_at_100_diff1": 0.22931139742389609}}}
scripts/apps/gradio_app.py ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import json
3
+ from huggingface_hub import HfApi
4
+ import pandas as pd
5
+
6
+
7
+ def compute_df():
8
+ api = HfApi()
9
+ # download all files in https://huggingface.co/illuin-cde/baselines
10
+ files = [
11
+ f
12
+ for f in api.list_repo_files("illuin-cde/baselines")
13
+ if f.startswith("metrics")
14
+ ]
15
+ print(files)
16
+
17
+ metrics = []
18
+ for file in files:
19
+ result_path = api.hf_hub_download("illuin-cde/baselines", filename=file)
20
+ with open(result_path, "r") as f:
21
+ dic = json.load(f)
22
+ dic.update(dic["metrics"])
23
+ del dic["metrics"]
24
+ metrics.append(dic)
25
+
26
+ df = pd.DataFrame(metrics)
27
+ df = df[
28
+ [
29
+ "model",
30
+ "dataset",
31
+ "split",
32
+ "is_contextual",
33
+ "ndcg_at_1",
34
+ "ndcg_at_5",
35
+ "ndcg_at_10",
36
+ "ndcg_at_100",
37
+ ]
38
+ ]
39
+ df["model"] = df["model"].apply(lambda x: x.split("/")[-1])
40
+ df["dataset"] = df["dataset"].apply(lambda x: x.split("/")[-1])
41
+ # round all numeric columns
42
+ df = df.round(3)
43
+
44
+ # sort by ndcg_at_5
45
+ df = df.sort_values("ndcg_at_5", ascending=False)
46
+
47
+ # gradio display
48
+ gradio_df = gr.Dataframe(df)
49
+ return gradio_df
50
+
51
+
52
+ # refresh button and precompute
53
+ gr.Interface(
54
+ fn=compute_df, title="Results Leaderboard", inputs=None, outputs="dataframe"
55
+ ).launch()
scripts/apps/gradio_app_v2.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import json
3
+ from huggingface_hub import HfApi
4
+ import pandas as pd
5
+
6
+
7
+ def compute_df():
8
+ api = HfApi()
9
+ # download all files in https://huggingface.co/illuin-cde/baselines
10
+ files = [
11
+ f
12
+ for f in api.list_repo_files("illuin-cde/baselines-v2")
13
+ if f.startswith("metrics")
14
+ ]
15
+ print(files)
16
+
17
+ metrics = []
18
+ cols = ["model", "is_contextual"]
19
+ for file in files:
20
+ result_path = api.hf_hub_download("illuin-cde/baselines-v2", filename=file)
21
+ with open(result_path, "r") as f:
22
+ dic = json.load(f)
23
+ metrics_cur = dic["metrics"]
24
+ for k, v in metrics_cur.items():
25
+ dic.update({k: v["ndcg_at_5"]})
26
+ cols.append(k)
27
+ del dic["metrics"]
28
+ metrics.append(dic)
29
+
30
+ df = pd.DataFrame(metrics)
31
+ df = df[cols]
32
+ df["model"] = df["model"].apply(lambda x: x.split("/")[-1])
33
+ # round all numeric columns
34
+ # avg all numeric columns
35
+ df["avg"] = df.iloc[:, 2:].mean(axis=1)
36
+ df = df.round(3)
37
+
38
+ # sort by ndcg_at_5
39
+ df = df.sort_values(by="avg", ascending=False)
40
+
41
+ # gradio display
42
+ gradio_df = gr.Dataframe(df)
43
+ return gradio_df
44
+
45
+
46
+ # refresh button and precompute
47
+ gr.Interface(
48
+ fn=compute_df, title="Results Leaderboard", inputs=None, outputs="dataframe"
49
+ ).launch()
scripts/data_processing/embed_docs.py ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from datasets import load_dataset
3
+ from sentence_transformers import SentenceTransformer
4
+ import math
5
+
6
+ ON_JZ = False
7
+ DATASET_NAME = (
8
+ "./data_dir/nomic_embed_supervised" if ON_JZ else "jxm/nomic_embed_supervised"
9
+ )
10
+ MODEL_NAME = "./models/modernbert-embed-base" if ON_JZ else "intfloat/e5-base-v2"
11
+
12
+
13
+ if ON_JZ:
14
+ dataset = load_dataset(DATASET_NAME, split="train")
15
+ else:
16
+ dataset = load_dataset(
17
+ DATASET_NAME,
18
+ split="train[:2000]",
19
+ data_files=["data/train-00000-of-00116.parquet"],
20
+ verification_mode="no_checks",
21
+ )
22
+
23
+ # map query column to an embedding
24
+ # model = SentenceTransformer('nomic-ai/modernbert-embed-base')
25
+ model = SentenceTransformer(MODEL_NAME)
26
+
27
+ # map query column to an embedding
28
+ def map_to_embedding(example):
29
+ example["query_embedding"] = model.encode(example["query"])
30
+ example["document_embedding"] = model.encode(example["document"])
31
+ return example
32
+
33
+
34
+ # apparently this prevents the dataset from getting cached
35
+ # dataset = dataset.remove_columns(["negative", "dataset"])
36
+ dataset = dataset.map(map_to_embedding, batched=True, batch_size=128)
37
+ # remove negative and dataset column
38
+ print(dataset)
39
+ print(dataset[0])
40
+
41
+ from cde_benchmark.utils.faiss_clustering import paired_kmeans_faiss
42
+
43
+ q = torch.Tensor(dataset["query_embedding"])
44
+ X = torch.Tensor(dataset["document_embedding"])
45
+ cluster_size = 1024
46
+ k = math.ceil(len(X) / cluster_size)
47
+ print(k)
48
+ max_iters = 100
49
+
50
+ centroids, assignments = paired_kmeans_faiss(q=q, X=X, k=k, max_iters=max_iters)
51
+
52
+ # flatten assignments
53
+ assignments = list(assignments.flatten())
54
+ print(assignments)
55
+
56
+ # add these assignments to the dataset
57
+ dataset = dataset.add_column("cluster_assignment", assignments)
58
+
59
+ print(dataset)
60
+
61
+ # save dataset
62
+ dataset.save_to_disk("./data_dir/nomic_embed_supervised_clustered")
scripts/data_processing/gutenberg_processing.py ADDED
@@ -0,0 +1,163 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+
3
+ from datetime import timezone
4
+
5
+ from langchain_text_splitters import RecursiveCharacterTextSplitter
6
+
7
+ from datasets import load_dataset
8
+ import datetime
9
+ from tqdm import tqdm
10
+
11
+ import vertexai
12
+ from vertexai.generative_models import Part
13
+ from vertexai.preview import caching
14
+ from vertexai.preview.generative_models import GenerativeModel
15
+
16
+ PROJECT_ID = "llm-testing"
17
+ vertexai.init(project=PROJECT_ID, location="us-central1")
18
+
19
+ # from vertexai.generative_models import GenerativeModel
20
+ # model = GenerativeModel("gemini-1.5-flash-002")
21
+
22
+ system_instruction = """
23
+ First generate a summary of the text that should be a paragraph long.
24
+ Then, for chunks with specified ids, ask 1 question that pertains to the chunk.
25
+ Make sure the questions contain the necessary context and can be at least partially with chunk information.
26
+ Questions can necessitate previous context from the book to be understood, but should not assume the person answering knows which part of the book they are referring to.
27
+ Do not mention the chunk id in the question.
28
+ Answer each questions in a brief paragraph.
29
+ Format the output as a JSON consisting of a key 'chunks', associated with a list of chunk ids (ex: 13), each with 2 fields 'question' and 'answer' and their associated value.
30
+ If no relevant questions can be asked about the chunk, or the chunk contains noisy information, do not ask any questions.
31
+ Make sure the output is valid JSON, that in text quotes are preceded by a backslash.
32
+ """
33
+
34
+
35
+ # load dataset
36
+ ds = load_dataset(
37
+ "manu/project_gutenberg",
38
+ data_files=["data/en-00001-of-00052-5c2b3fd5e60f0124.parquet"],
39
+ verification_mode="no_checks",
40
+ ) # split="en",
41
+
42
+ text_splitter = RecursiveCharacterTextSplitter(
43
+ # Set a really small chunk size, just to show.
44
+ chunk_size=3000,
45
+ chunk_overlap=0,
46
+ length_function=len,
47
+ is_separator_regex=False,
48
+ )
49
+
50
+ import os
51
+
52
+ if os.path.exists(f"/home/manuel/Desktop/all_books.json"):
53
+ with open(f"/home/manuel/Desktop/all_books.json", "r") as f:
54
+ all_books = json.load(f)
55
+ else:
56
+ all_books = {}
57
+
58
+
59
+ for book_id in tqdm(range(10, 20)):
60
+ if book_id in all_books:
61
+ print("Book already processed, skipping.")
62
+ continue
63
+ total_cost = 0
64
+ assert total_cost < 10
65
+
66
+ sample = ds["train"][book_id]
67
+
68
+ # count words
69
+ num_words = len(sample["text"].split(" "))
70
+ num_chars = len(sample["text"])
71
+
72
+ print("The text has", num_words, "words and", num_chars, "characters.")
73
+ if num_words < 26000:
74
+ print("The text is too short, skipping.")
75
+ continue
76
+ # input cost is 0.00001875 per 1000 characters
77
+
78
+ texts = text_splitter.create_documents([sample["text"]])
79
+ text = "\n\n".join(
80
+ [f"<chunk_{i+1}> {chunk.page_content}" for i, chunk in enumerate(texts)]
81
+ )
82
+
83
+ contents = [
84
+ Part.from_text(text),
85
+ Part.from_text(system_instruction),
86
+ ]
87
+
88
+ print("Caching Input")
89
+
90
+ cached_content = caching.CachedContent.create(
91
+ model_name="gemini-1.5-flash-002",
92
+ system_instruction=system_instruction,
93
+ contents=contents,
94
+ ttl=datetime.timedelta(minutes=20),
95
+ display_name="example-cache",
96
+ )
97
+
98
+ # cache_id --> can be found in response of the above call
99
+ # cached_content = caching.CachedContent(cached_content_name=cache_id)
100
+ print(cached_content)
101
+
102
+ model = GenerativeModel.from_cached_content(cached_content=cached_content)
103
+
104
+ print("The cost of processing this input is", 0.00001875 * num_chars / 1000, "USD.")
105
+ total_cost += 0.00001875 * num_chars / 1000
106
+
107
+ total_text = {"chunks": []}
108
+ for i in tqdm(range(0, len(texts), 20)):
109
+ # choose 20 random chunk ids amongst all chunks
110
+ chunk_ids = list(range(i, min(i + 20, len(texts))))
111
+ # print("The chunk ids are", chunk_ids)
112
+
113
+ response = model.generate_content(
114
+ f"The chunks ids are {chunk_ids}. Generate the output JSON."
115
+ )
116
+ # print("The cost of using this cached input is", 0.0000046875 * num_chars / 1000, "USD.")
117
+ total_cost += 0.0000046875 * num_chars / 1000
118
+
119
+ # response = model.generate_content(f"Generate a summary of the text: {text}\n\n First generate a summary of the previous text that should be a paragraph long. Then, for chunks with ids {chunk_ids} of the book, ask 2 questions that pertain to the chunk. Make sure the questions contain the necessary context and can be at least partially with chunk information. "
120
+ # f"Questions can necessitate previous context from the book to be understood, but should not assume the person answering knows which part of the book they are referring to. Answer each questions in a brief paragraph. "
121
+ # f"Format the output as a JSON with a summary key, then a chunks key, consisting of a list of chunk ids, each with 2 question:answer pairs. "
122
+ # f"If no relevant questions can be asked about the chunk, or the chunk contains noisy information, do not ask any questions. Make sure the output is valid JSON and not markdown, that in text quotes are preceded by a backslash.",
123
+ # # labels={"project-name": "llm-testing"}
124
+ # )
125
+
126
+ # print("The cost of processing this output is", 0.000075 * len(response.text) / 1000, "USD.")
127
+ total_cost += 0.000075 * len(response.text) / 1000
128
+
129
+ # 0.0000046875 / 1k characters (cached input) + 0.00025 / 1k characters / hr (storage)
130
+ try:
131
+ text = eval(response.text.replace("```json\n", "").replace("\n```", ""))
132
+ total_text["chunks"] += text["chunks"]
133
+ except SyntaxError as e:
134
+ print("Error parsing response:", e)
135
+ print(response.text)
136
+
137
+ # compute how long the cache has been stored
138
+ cache_time = datetime.datetime.now(timezone.utc) - cached_content.create_time
139
+ # print(f"The cost of storing this cached input for {cache_time.total_seconds() / 60 } mn is", (cache_time.total_seconds() / 3600) * 0.00025 * num_chars / 1000, "USD.")
140
+ total_cost += (cache_time.total_seconds() / 3600) * 0.00025 * num_chars / 1000
141
+ cached_content.delete()
142
+ # cost
143
+ total_text["og_chunks"] = [c.page_content for c in texts]
144
+ total_text["book_id"] = book_id
145
+ total_text["cost"] = total_cost
146
+ print("The total cost is", total_cost, "USD.")
147
+
148
+ with open(f"/home/manuel/Desktop/summary_{book_id}.json", "w") as f:
149
+ json.dump(total_text, f)
150
+
151
+ all_books[book_id] = total_text
152
+
153
+ with open(f"/home/manuel/Desktop/all_books.json", "w") as f:
154
+ json.dump(all_books, f)
155
+
156
+ print(
157
+ "All books have been processed and saved to /home/manuel/Desktop/summary_all_books.json"
158
+ )
159
+
160
+ # upload to huggingface
161
+ from datasets import Dataset
162
+
163
+ Dataset.from_list(all_books).push_to_hub("manu/all_books_test")
scripts/data_processing/mldr_processing.py ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+
3
+ from tqdm import tqdm
4
+ from langchain_text_splitters import RecursiveCharacterTextSplitter
5
+
6
+ from datasets import load_dataset, Dataset
7
+
8
+ import vertexai
9
+
10
+ PROJECT_ID = "llm-testing"
11
+ vertexai.init(project=PROJECT_ID, location="us-central1")
12
+
13
+ from vertexai.generative_models import GenerativeModel
14
+
15
+ model = GenerativeModel("gemini-1.5-flash-002")
16
+
17
+
18
+ system_instruction = """
19
+ Given a query and a wikipedia article, that is composed of multiple chunks, output which chunk is the most relevant to answer the query.
20
+ If the query is not understandable without the context of the entire article, reformulate the query to be more specific.
21
+ For example, if the query is "What is the population of the country?", it should be reformulated to "What is the population of France?" if the article clearly is about France.
22
+ Similarly, "What is her occupation?" should be reformulated to "What is Marie Curie's occupation?" if the article is about Marie Curie.
23
+ If multiple chunks are relevant, output only the most relevant, it should contain at least partially the necessary information to answer the query.
24
+ If no chunk is relevant, output an empty list.
25
+ The output should be a JSON with format:
26
+ {"query": <original_query>, "reformulated_query": <reformulated_query>, "answer": <brief answer>, "relevant_chunks_id": [<relevant_chunk1>]}
27
+ """
28
+
29
+ dataset = load_dataset("sentence-transformers/mldr", "en-triplet", split="train")
30
+
31
+ text_splitter = RecursiveCharacterTextSplitter(
32
+ chunk_size=1000,
33
+ chunk_overlap=0,
34
+ length_function=len,
35
+ is_separator_regex=False,
36
+ )
37
+
38
+
39
+ outputs = []
40
+
41
+ for i in tqdm(range(2000)): # len(dataset)):
42
+ sample = dataset[i]
43
+ # count words
44
+ num_words = len(sample["positive"].split(" "))
45
+ num_chars = len(sample["positive"])
46
+ print("The text has", num_words, "words and", num_chars, "characters.")
47
+
48
+ # input cost is 0.00001875 per 1000 characters
49
+ texts = text_splitter.create_documents([sample["positive"]])
50
+ text = "\n\n".join(
51
+ [f"<chunk_{i}> {chunk.page_content}" for i, chunk in enumerate(texts)]
52
+ )
53
+
54
+ # parse output string to get a python list of integers
55
+ output = None
56
+ try:
57
+ response = model.generate_content(
58
+ f"{system_instruction}\n\n{text}\n\n The query is: {sample['anchor']}\n What is the most relevant chunk to answer the query?"
59
+ )
60
+ output = response.text
61
+ output = json.loads(output.replace("```json\n", "").replace("\n```", ""))
62
+ output["positive"] = sample["positive"]
63
+ output["negative"] = sample["negative"]
64
+ output["chunks"] = [t.page_content for t in texts]
65
+ print(
66
+ output["reformulated_query"],
67
+ output["answer"],
68
+ output["relevant_chunks_id"],
69
+ output["chunks"][output["relevant_chunks_id"][0]],
70
+ )
71
+ outputs.append(output)
72
+ except:
73
+ print(output)
74
+ print("Error parsing output")
75
+
76
+ Dataset.from_list(outputs).push_to_hub("manu/mldr-zoomed-1000char-2000")
scripts/evaluation/local_test_eval_script.py ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ from datetime import datetime
3
+ from cde_benchmark.embedders.sentence_transformer_embedder import (
4
+ SentenceTransformerEmbedder,
5
+ )
6
+ from sentence_transformers import SentenceTransformer
7
+ from cde_benchmark.embedders.jina_late_chunking_embedder import LateChunkingEmbedder
8
+ from cde_benchmark.formatters.data_formatter import DataFormatter
9
+ from cde_benchmark.evaluators.nanobeir import NanoBEIR
10
+
11
+
12
+ # Values
13
+ MODEL_NAME = "./models/modernbert-embed-base"
14
+ # MODEL_NAME = "./models/jina-embeddings-v2-small-en"
15
+ # MODEL_NAME = "./models/e5-base-v2"
16
+ model = SentenceTransformer(MODEL_NAME)
17
+ embedder = SentenceTransformerEmbedder(model) if False else LateChunkingEmbedder(model)
18
+
19
+ output_dic = {
20
+ "model": MODEL_NAME.split("/")[-1],
21
+ "date": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
22
+ "is_contextual": embedder.is_contextual_model,
23
+ }
24
+
25
+ print(output_dic)
26
+
27
+ output_dic["metrics"] = {}
28
+
29
+ output_dic["metrics"].update({"covid-qa": embedder.compute_metrics_e2e(DataFormatter(
30
+ "./data_dir/covid-qa", "train", query_key="queries"
31
+ ))})
32
+
33
+ output_dic["metrics"].update({"chunked-mldr": embedder.compute_metrics_e2e( DataFormatter(
34
+ "./data_dir/chunked-mldr", "test", query_key="queries"
35
+ ))})
36
+
37
+ output_dic["metrics"].update({"tech-qa": embedder.compute_metrics_e2e(DataFormatter(
38
+ "./data_dir/tech-qa", "train", query_key="queries"
39
+ ))})
40
+
41
+
42
+
43
+ print(output_dic)
44
+
45
+ # nanobeir
46
+ nanobeir = NanoBEIR("./data_dir/nanobeir", embedder, is_contextual_model=False)
47
+ output_dic["metrics"].update(nanobeir.run_all_tasks())
48
+
49
+ # save as json
50
+ with open(
51
+ f"results/metrics_{MODEL_NAME.split('/')[-1]}{'_contextual' if embedder.is_contextual_model else ''}.json",
52
+ "w",
53
+ ) as f:
54
+ json.dump(output_dic, f)
scripts/evaluation/test_eval_script.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from cde_benchmark.embedders.sentence_transformer_embedder import (
2
+ SentenceTransformerEmbedder,
3
+ )
4
+ from sentence_transformers import SentenceTransformer
5
+
6
+ from cde_benchmark.embedders.jina_late_chunking_embedder import LateChunkingEmbedder
7
+ from cde_benchmark.formatters.data_formatter import DataFormatter, BEIRDataFormatter
8
+
9
+
10
+ # Values
11
+ DATASET_NAME = "illuin-cde/tech-qa"
12
+
13
+ MODEL_NAME = "nomic-ai/modernbert-embed-base"
14
+ model = SentenceTransformer(MODEL_NAME)
15
+ model._modules[
16
+ "0"
17
+ ].auto_model.config.reference_compile = False # hacky but necessary for the moment
18
+
19
+
20
+ formatter = DataFormatter(DATASET_NAME, "train")
21
+ # formatter = BEIRDataFormatter("zeta-alpha-ai/NanoClimateFEVER", "train")
22
+
23
+ # Non-nested
24
+ embedder = SentenceTransformerEmbedder(model) if True else LateChunkingEmbedder(model)
25
+
26
+ metrics = embedder.compute_metrics_e2e(formatter)
27
+ print(metrics)