{ "cells": [ { "cell_type": "code", "execution_count": 1, "id": "501d0d55-8d15-463d-95cb-1f70d72de7fb", "metadata": { "scrolled": true }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "--2023-12-30 12:56:11-- https://raw.githubusercontent.com/askplatypus/wikidata-simplequestions/master/annotated_wd_data_train_answerable.txt\n", "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.108.133, 185.199.109.133, 185.199.110.133, ...\n", "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.108.133|:443... connected.\n", "HTTP request sent, awaiting response... 200 OK\n", "Length: 1193868 (1,1M) [text/plain]\n", "Saving to: ‘annotated_wd_data_train_answerable.txt’\n", "\n", "annotated_wd_data_t 100%[===================>] 1,14M 6,45MB/s in 0,2s \n", "\n", "2023-12-30 12:56:11 (6,45 MB/s) - ‘annotated_wd_data_train_answerable.txt’ saved [1193868/1193868]\n", "\n", "--2023-12-30 12:56:11-- https://raw.githubusercontent.com/askplatypus/wikidata-simplequestions/master/annotated_wd_data_valid_answerable.txt\n", "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.109.133, 185.199.110.133, 185.199.111.133, ...\n", "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.109.133|:443... connected.\n", "HTTP request sent, awaiting response... 200 OK\n", "Length: 173187 (169K) [text/plain]\n", "Saving to: ‘annotated_wd_data_valid_answerable.txt’\n", "\n", "annotated_wd_data_v 100%[===================>] 169,13K --.-KB/s in 0,09s \n", "\n", "2023-12-30 12:56:12 (1,86 MB/s) - ‘annotated_wd_data_valid_answerable.txt’ saved [173187/173187]\n", "\n", "--2023-12-30 12:56:12-- https://raw.githubusercontent.com/askplatypus/wikidata-simplequestions/master/annotated_wd_data_test_answerable.txt\n", "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.109.133, 185.199.108.133, 185.199.110.133, ...\n", "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.109.133|:443... connected.\n", "HTTP request sent, awaiting response... 200 OK\n", "Length: 345052 (337K) [text/plain]\n", "Saving to: ‘annotated_wd_data_test_answerable.txt’\n", "\n", "annotated_wd_data_t 100%[===================>] 336,96K --.-KB/s in 0,1s \n", "\n", "2023-12-30 12:56:12 (2,70 MB/s) - ‘annotated_wd_data_test_answerable.txt’ saved [345052/345052]\n", "\n" ] } ], "source": [ "!wget -nc https://raw.githubusercontent.com/askplatypus/wikidata-simplequestions/master/annotated_wd_data_train_answerable.txt\n", "!wget -nc https://raw.githubusercontent.com/askplatypus/wikidata-simplequestions/master/annotated_wd_data_valid_answerable.txt\n", "!wget -nc https://raw.githubusercontent.com/askplatypus/wikidata-simplequestions/master/annotated_wd_data_test_answerable.txt" ] }, { "cell_type": "code", "execution_count": 2, "id": "73af4417-0637-4848-9d35-e734a685ebc4", "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "/home/salnikov/.cache/pypoetry/virtualenvs/kgqa-signatures-J3ZJKtLx-py3.10/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", " from .autonotebook import tqdm as notebook_tqdm\n" ] } ], "source": [ "import pandas as pd\n", "import datasets \n", "import numpy as np\n", "import random\n", "import logging\n", "\n", "np.random.seed(8)\n", "random.seed(8)" ] }, { "cell_type": "code", "execution_count": 3, "id": "6ffea0dd-3d28-416c-b7c3-c8dd73d5e304", "metadata": {}, "outputs": [ { "data": { "text/plain": [ "DatasetDict({\n", " train: Dataset({\n", " features: ['subject', 'property', 'object', 'question'],\n", " num_rows: 19481\n", " })\n", " valid: Dataset({\n", " features: ['subject', 'property', 'object', 'question'],\n", " num_rows: 2821\n", " })\n", " test: Dataset({\n", " features: ['subject', 'property', 'object', 'question'],\n", " num_rows: 5622\n", " })\n", "})" ] }, "execution_count": 3, "metadata": {}, "output_type": "execute_result" } ], "source": [ "dataset = datasets.DatasetDict()\n", "for split, data_path in [\n", " (\"train\", \"annotated_wd_data_train_answerable.txt\"),\n", " (\"valid\", \"annotated_wd_data_valid_answerable.txt\"),\n", " (\"test\", \"annotated_wd_data_test_answerable.txt\"),\n", "]:\n", " df = pd.read_csv(data_path, names=['subject', 'property', 'object', 'question'], sep='\\t')\n", " dataset[split] = datasets.Dataset.from_pandas(df)\n", "\n", "dataset" ] }, { "cell_type": "code", "execution_count": 4, "id": "f7e71241", "metadata": {}, "outputs": [], "source": [ "import os\n", "import os.path\n", "import pickle\n", "import warnings\n", "\n", "from joblib import register_store_backend, numpy_pickle\n", "from joblib._store_backends import FileSystemStoreBackend, CacheWarning\n", "\n", "\n", "class FileSystemStoreBackendNoNumpy(FileSystemStoreBackend):\n", " NAME = \"no_numpy\"\n", "\n", " def load_item(self, path, verbose=1, msg=None):\n", " \"\"\"Load an item from the store given its path as a list of\n", " strings.\"\"\"\n", " full_path = os.path.join(self.location, *path)\n", "\n", " if verbose > 1:\n", " if verbose < 10:\n", " print('{0}...'.format(msg))\n", " else:\n", " print('{0} from {1}'.format(msg, full_path))\n", "\n", " mmap_mode = (None if not hasattr(self, 'mmap_mode')\n", " else self.mmap_mode)\n", "\n", " filename = os.path.join(full_path, 'output.pkl')\n", " if not self._item_exists(filename):\n", " raise KeyError(\"Non-existing item (may have been \"\n", " \"cleared).\\nFile %s does not exist\" % filename)\n", "\n", " # file-like object cannot be used when mmap_mode is set\n", " if mmap_mode is None:\n", " with self._open_item(filename, \"rb\") as f:\n", " item = pickle.load(f)\n", " else:\n", " item = numpy_pickle.load(filename, mmap_mode=mmap_mode)\n", " return item\n", "\n", " def dump_item(self, path, item, verbose=1):\n", " \"\"\"Dump an item in the store at the path given as a list of\n", " strings.\"\"\"\n", " try:\n", " item_path = os.path.join(self.location, *path)\n", " if not self._item_exists(item_path):\n", " self.create_location(item_path)\n", " filename = os.path.join(item_path, 'output.pkl')\n", " if verbose > 10:\n", " print('Persisting in %s' % item_path)\n", "\n", " def write_func(to_write, dest_filename):\n", " mmap_mode = (None if not hasattr(self, 'mmap_mode')\n", " else self.mmap_mode)\n", " with self._open_item(dest_filename, \"wb\") as f:\n", " try:\n", " if mmap_mode is None:\n", " pickle.dump(to_write, f)\n", " else:\n", " numpy_pickle.dump(to_write, f, compress=self.compress)\n", " except pickle.PicklingError as e:\n", " # TODO(1.5) turn into error\n", " warnings.warn(\n", " \"Unable to cache to disk: failed to pickle \"\n", " \"output. In version 1.5 this will raise an \"\n", " f\"exception. Exception: {e}.\",\n", " FutureWarning\n", " )\n", "\n", " self._concurrency_safe_write(item, filename, write_func)\n", " except Exception as e: # noqa: E722\n", " warnings.warn(\n", " \"Unable to cache to disk. Possibly a race condition in the \"\n", " f\"creation of the directory. Exception: {e}.\",\n", " CacheWarning\n", " )\n", "\n", "\n", "register_store_backend(FileSystemStoreBackendNoNumpy.NAME, FileSystemStoreBackendNoNumpy)\n" ] }, { "cell_type": "code", "execution_count": 22, "id": "41eb3bbd-19bb-4c0d-892c-35478eabc00b", "metadata": {}, "outputs": [], "source": [ "import time\n", "from http.client import RemoteDisconnected\n", "\n", "import requests\n", "from joblib import Memory\n", "from urllib3.exceptions import ProtocolError\n", "\n", "\n", "SPARQL_API_URL = \"http://127.0.0.1:7001\"\n", "CACHE_DIRECTORY = \"wikidata/cache\"\n", "\n", "logger = logging.getLogger()\n", "memory = Memory(CACHE_DIRECTORY, verbose=0, backend=FileSystemStoreBackendNoNumpy.NAME)\n", "\n", "\n", "def execute_wiki_request_with_delays(api_url, params, headers):\n", " response = requests.get(\n", " api_url,\n", " params=params,\n", " headers=headers,\n", " )\n", " to_sleep = 0.2\n", " while response.status_code == 429:\n", " logger.warning(\n", " {\n", " \"msg\": f\"Request to wikidata endpoint failed. Retry.\",\n", " \"params\": params,\n", " \"endpoint\": api_url,\n", " \"response\": {\n", " \"status_code\": response.status_code,\n", " \"headers\": dict(response.headers),\n", " },\n", " \"retry_after\": to_sleep,\n", " }\n", " )\n", " if \"retry-after\" in response.headers:\n", " to_sleep += int(response.headers[\"retry-after\"])\n", " to_sleep += 0.5\n", " time.sleep(to_sleep)\n", " response = requests.get(\n", " api_url,\n", " params=params,\n", " headers=headers,\n", " )\n", "\n", " return response\n", "\n", "\n", "@memory.cache(ignore=['api_url'])\n", "def execute_sparql_request(request: str, api_url: str = SPARQL_API_URL):\n", " params = {\"format\": \"json\", \"query\": request}\n", " headers = {\n", " \"Accept\": \"application/sparql-results+json\",\n", " \"User-Agent\": \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36\",\n", " }\n", " logger.info(\n", " {\n", " \"msg\": \"Send request to Wikidata\",\n", " \"params\": params,\n", " \"endpoint\": api_url,\n", " \"request\": request\n", " }\n", " )\n", " try:\n", " response = execute_wiki_request_with_delays(api_url, params, headers)\n", " except (ProtocolError, RemoteDisconnected, requests.exceptions.ConnectionError) as e:\n", " logger.error(\n", " {\n", " \"msg\": str(e),\n", " \"request\": request,\n", " \"endpoint\": api_url,\n", " }\n", " )\n", " return None\n", "\n", " try:\n", " response = response.json()[\"results\"][\"bindings\"]\n", " logger.debug(\n", " {\n", " \"msg\": \"Received response from Wikidata\",\n", " \"params\": params,\n", " \"endpoint\": api_url,\n", " \"request\": request,\n", " \"response\": response\n", " }\n", " )\n", " return response\n", " except Exception as e:\n", " logger.error(\n", " {\n", " \"msg\": str(e),\n", " \"params\": params,\n", " \"endpoint\": api_url,\n", " \"response\": {\n", " \"status_code\": response.status_code,\n", " \"headers\": dict(response.headers),\n", " },\n", " }\n", " )\n", " raise e\n", "\n", "def get_label(entity_id):\n", " query = \"\"\"\n", " PREFIX rdfs: \n", " PREFIX wd: \n", " SELECT DISTINCT ?label\n", " WHERE {\n", " wd: rdfs:label ?label\n", " } \n", " \"\"\".replace(\n", " \"\", entity_id\n", " )\n", " \n", " for lbl_obj in execute_sparql_request(query):\n", " if lbl_obj['label']['xml:lang'] == 'en':\n", " return lbl_obj['label']['value']" ] }, { "cell_type": "code", "execution_count": 25, "id": "2568fa09", "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "Map: 0%| | 0/19481 [00:00