msalnikov commited on
Commit
4b29ab3
Β·
1 Parent(s): e50399b

Upload prepraring.ipynb

Browse files
Files changed (1) hide show
  1. prepraring.ipynb +430 -0
prepraring.ipynb ADDED
@@ -0,0 +1,430 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "id": "501d0d55-8d15-463d-95cb-1f70d72de7fb",
7
+ "metadata": {
8
+ "scrolled": true
9
+ },
10
+ "outputs": [
11
+ {
12
+ "name": "stdout",
13
+ "output_type": "stream",
14
+ "text": [
15
+ "--2023-12-30 12:56:11-- https://raw.githubusercontent.com/askplatypus/wikidata-simplequestions/master/annotated_wd_data_train_answerable.txt\n",
16
+ "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.108.133, 185.199.109.133, 185.199.110.133, ...\n",
17
+ "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.108.133|:443... connected.\n",
18
+ "HTTP request sent, awaiting response... 200 OK\n",
19
+ "Length: 1193868 (1,1M) [text/plain]\n",
20
+ "Saving to: β€˜annotated_wd_data_train_answerable.txt’\n",
21
+ "\n",
22
+ "annotated_wd_data_t 100%[===================>] 1,14M 6,45MB/s in 0,2s \n",
23
+ "\n",
24
+ "2023-12-30 12:56:11 (6,45 MB/s) - β€˜annotated_wd_data_train_answerable.txt’ saved [1193868/1193868]\n",
25
+ "\n",
26
+ "--2023-12-30 12:56:11-- https://raw.githubusercontent.com/askplatypus/wikidata-simplequestions/master/annotated_wd_data_valid_answerable.txt\n",
27
+ "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.109.133, 185.199.110.133, 185.199.111.133, ...\n",
28
+ "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.109.133|:443... connected.\n",
29
+ "HTTP request sent, awaiting response... 200 OK\n",
30
+ "Length: 173187 (169K) [text/plain]\n",
31
+ "Saving to: β€˜annotated_wd_data_valid_answerable.txt’\n",
32
+ "\n",
33
+ "annotated_wd_data_v 100%[===================>] 169,13K --.-KB/s in 0,09s \n",
34
+ "\n",
35
+ "2023-12-30 12:56:12 (1,86 MB/s) - β€˜annotated_wd_data_valid_answerable.txt’ saved [173187/173187]\n",
36
+ "\n",
37
+ "--2023-12-30 12:56:12-- https://raw.githubusercontent.com/askplatypus/wikidata-simplequestions/master/annotated_wd_data_test_answerable.txt\n",
38
+ "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.109.133, 185.199.108.133, 185.199.110.133, ...\n",
39
+ "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.109.133|:443... connected.\n",
40
+ "HTTP request sent, awaiting response... 200 OK\n",
41
+ "Length: 345052 (337K) [text/plain]\n",
42
+ "Saving to: β€˜annotated_wd_data_test_answerable.txt’\n",
43
+ "\n",
44
+ "annotated_wd_data_t 100%[===================>] 336,96K --.-KB/s in 0,1s \n",
45
+ "\n",
46
+ "2023-12-30 12:56:12 (2,70 MB/s) - β€˜annotated_wd_data_test_answerable.txt’ saved [345052/345052]\n",
47
+ "\n"
48
+ ]
49
+ }
50
+ ],
51
+ "source": [
52
+ "!wget -nc https://raw.githubusercontent.com/askplatypus/wikidata-simplequestions/master/annotated_wd_data_train_answerable.txt\n",
53
+ "!wget -nc https://raw.githubusercontent.com/askplatypus/wikidata-simplequestions/master/annotated_wd_data_valid_answerable.txt\n",
54
+ "!wget -nc https://raw.githubusercontent.com/askplatypus/wikidata-simplequestions/master/annotated_wd_data_test_answerable.txt"
55
+ ]
56
+ },
57
+ {
58
+ "cell_type": "code",
59
+ "execution_count": 2,
60
+ "id": "73af4417-0637-4848-9d35-e734a685ebc4",
61
+ "metadata": {},
62
+ "outputs": [
63
+ {
64
+ "name": "stderr",
65
+ "output_type": "stream",
66
+ "text": [
67
+ "/home/salnikov/.cache/pypoetry/virtualenvs/kgqa-signatures-J3ZJKtLx-py3.10/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
68
+ " from .autonotebook import tqdm as notebook_tqdm\n"
69
+ ]
70
+ }
71
+ ],
72
+ "source": [
73
+ "import pandas as pd\n",
74
+ "import datasets \n",
75
+ "import numpy as np\n",
76
+ "import random\n",
77
+ "import logging\n",
78
+ "\n",
79
+ "np.random.seed(8)\n",
80
+ "random.seed(8)"
81
+ ]
82
+ },
83
+ {
84
+ "cell_type": "code",
85
+ "execution_count": 3,
86
+ "id": "6ffea0dd-3d28-416c-b7c3-c8dd73d5e304",
87
+ "metadata": {},
88
+ "outputs": [
89
+ {
90
+ "data": {
91
+ "text/plain": [
92
+ "DatasetDict({\n",
93
+ " train: Dataset({\n",
94
+ " features: ['subject', 'property', 'object', 'question'],\n",
95
+ " num_rows: 19481\n",
96
+ " })\n",
97
+ " valid: Dataset({\n",
98
+ " features: ['subject', 'property', 'object', 'question'],\n",
99
+ " num_rows: 2821\n",
100
+ " })\n",
101
+ " test: Dataset({\n",
102
+ " features: ['subject', 'property', 'object', 'question'],\n",
103
+ " num_rows: 5622\n",
104
+ " })\n",
105
+ "})"
106
+ ]
107
+ },
108
+ "execution_count": 3,
109
+ "metadata": {},
110
+ "output_type": "execute_result"
111
+ }
112
+ ],
113
+ "source": [
114
+ "dataset = datasets.DatasetDict()\n",
115
+ "for split, data_path in [\n",
116
+ " (\"train\", \"annotated_wd_data_train_answerable.txt\"),\n",
117
+ " (\"valid\", \"annotated_wd_data_valid_answerable.txt\"),\n",
118
+ " (\"test\", \"annotated_wd_data_test_answerable.txt\"),\n",
119
+ "]:\n",
120
+ " df = pd.read_csv(data_path, names=['subject', 'property', 'object', 'question'], sep='\\t')\n",
121
+ " dataset[split] = datasets.Dataset.from_pandas(df)\n",
122
+ "\n",
123
+ "dataset"
124
+ ]
125
+ },
126
+ {
127
+ "cell_type": "code",
128
+ "execution_count": 4,
129
+ "id": "f7e71241",
130
+ "metadata": {},
131
+ "outputs": [],
132
+ "source": [
133
+ "import os\n",
134
+ "import os.path\n",
135
+ "import pickle\n",
136
+ "import warnings\n",
137
+ "\n",
138
+ "from joblib import register_store_backend, numpy_pickle\n",
139
+ "from joblib._store_backends import FileSystemStoreBackend, CacheWarning\n",
140
+ "\n",
141
+ "\n",
142
+ "class FileSystemStoreBackendNoNumpy(FileSystemStoreBackend):\n",
143
+ " NAME = \"no_numpy\"\n",
144
+ "\n",
145
+ " def load_item(self, path, verbose=1, msg=None):\n",
146
+ " \"\"\"Load an item from the store given its path as a list of\n",
147
+ " strings.\"\"\"\n",
148
+ " full_path = os.path.join(self.location, *path)\n",
149
+ "\n",
150
+ " if verbose > 1:\n",
151
+ " if verbose < 10:\n",
152
+ " print('{0}...'.format(msg))\n",
153
+ " else:\n",
154
+ " print('{0} from {1}'.format(msg, full_path))\n",
155
+ "\n",
156
+ " mmap_mode = (None if not hasattr(self, 'mmap_mode')\n",
157
+ " else self.mmap_mode)\n",
158
+ "\n",
159
+ " filename = os.path.join(full_path, 'output.pkl')\n",
160
+ " if not self._item_exists(filename):\n",
161
+ " raise KeyError(\"Non-existing item (may have been \"\n",
162
+ " \"cleared).\\nFile %s does not exist\" % filename)\n",
163
+ "\n",
164
+ " # file-like object cannot be used when mmap_mode is set\n",
165
+ " if mmap_mode is None:\n",
166
+ " with self._open_item(filename, \"rb\") as f:\n",
167
+ " item = pickle.load(f)\n",
168
+ " else:\n",
169
+ " item = numpy_pickle.load(filename, mmap_mode=mmap_mode)\n",
170
+ " return item\n",
171
+ "\n",
172
+ " def dump_item(self, path, item, verbose=1):\n",
173
+ " \"\"\"Dump an item in the store at the path given as a list of\n",
174
+ " strings.\"\"\"\n",
175
+ " try:\n",
176
+ " item_path = os.path.join(self.location, *path)\n",
177
+ " if not self._item_exists(item_path):\n",
178
+ " self.create_location(item_path)\n",
179
+ " filename = os.path.join(item_path, 'output.pkl')\n",
180
+ " if verbose > 10:\n",
181
+ " print('Persisting in %s' % item_path)\n",
182
+ "\n",
183
+ " def write_func(to_write, dest_filename):\n",
184
+ " mmap_mode = (None if not hasattr(self, 'mmap_mode')\n",
185
+ " else self.mmap_mode)\n",
186
+ " with self._open_item(dest_filename, \"wb\") as f:\n",
187
+ " try:\n",
188
+ " if mmap_mode is None:\n",
189
+ " pickle.dump(to_write, f)\n",
190
+ " else:\n",
191
+ " numpy_pickle.dump(to_write, f, compress=self.compress)\n",
192
+ " except pickle.PicklingError as e:\n",
193
+ " # TODO(1.5) turn into error\n",
194
+ " warnings.warn(\n",
195
+ " \"Unable to cache to disk: failed to pickle \"\n",
196
+ " \"output. In version 1.5 this will raise an \"\n",
197
+ " f\"exception. Exception: {e}.\",\n",
198
+ " FutureWarning\n",
199
+ " )\n",
200
+ "\n",
201
+ " self._concurrency_safe_write(item, filename, write_func)\n",
202
+ " except Exception as e: # noqa: E722\n",
203
+ " warnings.warn(\n",
204
+ " \"Unable to cache to disk. Possibly a race condition in the \"\n",
205
+ " f\"creation of the directory. Exception: {e}.\",\n",
206
+ " CacheWarning\n",
207
+ " )\n",
208
+ "\n",
209
+ "\n",
210
+ "register_store_backend(FileSystemStoreBackendNoNumpy.NAME, FileSystemStoreBackendNoNumpy)\n"
211
+ ]
212
+ },
213
+ {
214
+ "cell_type": "code",
215
+ "execution_count": 22,
216
+ "id": "41eb3bbd-19bb-4c0d-892c-35478eabc00b",
217
+ "metadata": {},
218
+ "outputs": [],
219
+ "source": [
220
+ "import time\n",
221
+ "from http.client import RemoteDisconnected\n",
222
+ "\n",
223
+ "import requests\n",
224
+ "from joblib import Memory\n",
225
+ "from urllib3.exceptions import ProtocolError\n",
226
+ "\n",
227
+ "\n",
228
+ "SPARQL_API_URL = \"http://127.0.0.1:7001\"\n",
229
+ "CACHE_DIRECTORY = \"wikidata/cache\"\n",
230
+ "\n",
231
+ "logger = logging.getLogger()\n",
232
+ "memory = Memory(CACHE_DIRECTORY, verbose=0, backend=FileSystemStoreBackendNoNumpy.NAME)\n",
233
+ "\n",
234
+ "\n",
235
+ "def execute_wiki_request_with_delays(api_url, params, headers):\n",
236
+ " response = requests.get(\n",
237
+ " api_url,\n",
238
+ " params=params,\n",
239
+ " headers=headers,\n",
240
+ " )\n",
241
+ " to_sleep = 0.2\n",
242
+ " while response.status_code == 429:\n",
243
+ " logger.warning(\n",
244
+ " {\n",
245
+ " \"msg\": f\"Request to wikidata endpoint failed. Retry.\",\n",
246
+ " \"params\": params,\n",
247
+ " \"endpoint\": api_url,\n",
248
+ " \"response\": {\n",
249
+ " \"status_code\": response.status_code,\n",
250
+ " \"headers\": dict(response.headers),\n",
251
+ " },\n",
252
+ " \"retry_after\": to_sleep,\n",
253
+ " }\n",
254
+ " )\n",
255
+ " if \"retry-after\" in response.headers:\n",
256
+ " to_sleep += int(response.headers[\"retry-after\"])\n",
257
+ " to_sleep += 0.5\n",
258
+ " time.sleep(to_sleep)\n",
259
+ " response = requests.get(\n",
260
+ " api_url,\n",
261
+ " params=params,\n",
262
+ " headers=headers,\n",
263
+ " )\n",
264
+ "\n",
265
+ " return response\n",
266
+ "\n",
267
+ "\n",
268
+ "@memory.cache(ignore=['api_url'])\n",
269
+ "def execute_sparql_request(request: str, api_url: str = SPARQL_API_URL):\n",
270
+ " params = {\"format\": \"json\", \"query\": request}\n",
271
+ " headers = {\n",
272
+ " \"Accept\": \"application/sparql-results+json\",\n",
273
+ " \"User-Agent\": \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36\",\n",
274
+ " }\n",
275
+ " logger.info(\n",
276
+ " {\n",
277
+ " \"msg\": \"Send request to Wikidata\",\n",
278
+ " \"params\": params,\n",
279
+ " \"endpoint\": api_url,\n",
280
+ " \"request\": request\n",
281
+ " }\n",
282
+ " )\n",
283
+ " try:\n",
284
+ " response = execute_wiki_request_with_delays(api_url, params, headers)\n",
285
+ " except (ProtocolError, RemoteDisconnected, requests.exceptions.ConnectionError) as e:\n",
286
+ " logger.error(\n",
287
+ " {\n",
288
+ " \"msg\": str(e),\n",
289
+ " \"request\": request,\n",
290
+ " \"endpoint\": api_url,\n",
291
+ " }\n",
292
+ " )\n",
293
+ " return None\n",
294
+ "\n",
295
+ " try:\n",
296
+ " response = response.json()[\"results\"][\"bindings\"]\n",
297
+ " logger.debug(\n",
298
+ " {\n",
299
+ " \"msg\": \"Received response from Wikidata\",\n",
300
+ " \"params\": params,\n",
301
+ " \"endpoint\": api_url,\n",
302
+ " \"request\": request,\n",
303
+ " \"response\": response\n",
304
+ " }\n",
305
+ " )\n",
306
+ " return response\n",
307
+ " except Exception as e:\n",
308
+ " logger.error(\n",
309
+ " {\n",
310
+ " \"msg\": str(e),\n",
311
+ " \"params\": params,\n",
312
+ " \"endpoint\": api_url,\n",
313
+ " \"response\": {\n",
314
+ " \"status_code\": response.status_code,\n",
315
+ " \"headers\": dict(response.headers),\n",
316
+ " },\n",
317
+ " }\n",
318
+ " )\n",
319
+ " raise e\n",
320
+ "\n",
321
+ "def get_label(entity_id):\n",
322
+ " query = \"\"\"\n",
323
+ " PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> \n",
324
+ " PREFIX wd: <http://www.wikidata.org/entity/> \n",
325
+ " SELECT DISTINCT ?label\n",
326
+ " WHERE {\n",
327
+ " wd:<ENTITY> rdfs:label ?label\n",
328
+ " } \n",
329
+ " \"\"\".replace(\n",
330
+ " \"<ENTITY>\", entity_id\n",
331
+ " )\n",
332
+ " \n",
333
+ " for lbl_obj in execute_sparql_request(query):\n",
334
+ " if lbl_obj['label']['xml:lang'] == 'en':\n",
335
+ " return lbl_obj['label']['value']"
336
+ ]
337
+ },
338
+ {
339
+ "cell_type": "code",
340
+ "execution_count": 25,
341
+ "id": "2568fa09",
342
+ "metadata": {},
343
+ "outputs": [
344
+ {
345
+ "name": "stderr",
346
+ "output_type": "stream",
347
+ "text": [
348
+ "Map: 0%| | 0/19481 [00:00<?, ? examples/s]"
349
+ ]
350
+ },
351
+ {
352
+ "name": "stderr",
353
+ "output_type": "stream",
354
+ "text": [
355
+ "Map: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 19481/19481 [01:44<00:00, 185.89 examples/s]\n",
356
+ "Map: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 2821/2821 [00:08<00:00, 334.13 examples/s]\n",
357
+ "Map: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 5622/5622 [00:15<00:00, 352.59 examples/s]\n"
358
+ ]
359
+ }
360
+ ],
361
+ "source": [
362
+ "dataset = dataset.map(\n",
363
+ " lambda record: {'object_label': get_label(record['object'])}\n",
364
+ ")"
365
+ ]
366
+ },
367
+ {
368
+ "cell_type": "code",
369
+ "execution_count": 35,
370
+ "id": "222d50c1",
371
+ "metadata": {},
372
+ "outputs": [
373
+ {
374
+ "name": "stderr",
375
+ "output_type": "stream",
376
+ "text": [
377
+ "Creating parquet from Arrow format: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 20/20 [00:00<00:00, 474.06ba/s]\n",
378
+ "Uploading the dataset shards: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 1/1 [00:02<00:00, 2.07s/it]\n",
379
+ "Creating parquet from Arrow format: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 3/3 [00:00<00:00, 921.29ba/s]\n",
380
+ "Uploading the dataset shards: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 1/1 [00:01<00:00, 1.63s/it]\n",
381
+ "Creating parquet from Arrow format: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 6/6 [00:00<00:00, 1000.75ba/s]\n",
382
+ "Uploading the dataset shards: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 1/1 [00:01<00:00, 1.73s/it]\n"
383
+ ]
384
+ },
385
+ {
386
+ "data": {
387
+ "text/plain": [
388
+ "CommitInfo(commit_url='https://huggingface.co/datasets/s-nlp/sqwd/commit/680b8199969fc0389fc96feb4f3b8be15b2674d0', commit_message='Upload dataset', commit_description='', oid='680b8199969fc0389fc96feb4f3b8be15b2674d0', pr_url=None, pr_revision=None, pr_num=None)"
389
+ ]
390
+ },
391
+ "execution_count": 35,
392
+ "metadata": {},
393
+ "output_type": "execute_result"
394
+ }
395
+ ],
396
+ "source": [
397
+ "dataset.push_to_hub('s-nlp/sqwd', 'answerable', set_default=True)"
398
+ ]
399
+ },
400
+ {
401
+ "cell_type": "code",
402
+ "execution_count": null,
403
+ "id": "2652ed4d",
404
+ "metadata": {},
405
+ "outputs": [],
406
+ "source": []
407
+ }
408
+ ],
409
+ "metadata": {
410
+ "kernelspec": {
411
+ "display_name": "Python 3 (ipykernel)",
412
+ "language": "python",
413
+ "name": "python3"
414
+ },
415
+ "language_info": {
416
+ "codemirror_mode": {
417
+ "name": "ipython",
418
+ "version": 3
419
+ },
420
+ "file_extension": ".py",
421
+ "mimetype": "text/x-python",
422
+ "name": "python",
423
+ "nbconvert_exporter": "python",
424
+ "pygments_lexer": "ipython3",
425
+ "version": "3.10.12"
426
+ }
427
+ },
428
+ "nbformat": 4,
429
+ "nbformat_minor": 5
430
+ }