devrim commited on
Commit
46d8059
·
verified ·
1 Parent(s): b43592f

Delete goodwiki_long_toy.py

Browse files
Files changed (1) hide show
  1. goodwiki_long_toy.py +0 -104
goodwiki_long_toy.py DELETED
@@ -1,104 +0,0 @@
1
- # coding=utf-8
2
- # Copyright 2023 Devrim Cavusoglu and the HuggingFace Datasets Authors.
3
- #
4
- # Licensed under the Apache License, Version 2.0 (the "License");
5
- # you may not use this file except in compliance with the License.
6
- # You may obtain a copy of the License at
7
- #
8
- # http://www.apache.org/licenses/LICENSE-2.0
9
- #
10
- # Unless required by applicable law or agreed to in writing, software
11
- # distributed under the License is distributed on an "AS IS" BASIS,
12
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
- # See the License for the specific language governing permissions and
14
- # limitations under the License.
15
-
16
- # Lint as: python3
17
- """Goodwiki Long Subset."""
18
-
19
-
20
- import json
21
-
22
- import datasets
23
-
24
- logger = datasets.logging.get_logger(__name__)
25
-
26
-
27
- _DESCRIPTION = """\
28
- Dataset consisting of long wikipedia articles in markdown format.
29
- """
30
-
31
- _URLS = {
32
- "train": [
33
- "data/train.jsonl",
34
- ],
35
- "test": [
36
- "data/test.jsonl",
37
- ]
38
- }
39
-
40
-
41
- class GoodWikiLongDatasetConfig(datasets.BuilderConfig):
42
- """BuilderConfig for Dataset."""
43
-
44
- def __init__(self, **kwargs):
45
- """BuilderConfig for Dataset.
46
-
47
- Args:
48
- **kwargs: keyword arguments forwarded to super.
49
- """
50
- super(GoodWikiLongDatasetConfig, self).__init__(**kwargs)
51
-
52
- @property
53
- def features(self):
54
- return {
55
- "id": datasets.Value("string"),
56
- "url": datasets.Value("null"),
57
- "title": datasets.Value("string"),
58
- "text": datasets.Value("string"),
59
- "revid": datasets.Value("string"),
60
- "description": datasets.Value("string"),
61
- "categories": datasets.Sequence(datasets.Value("string")),
62
- }
63
-
64
-
65
- class GoodWikiLongDataset(datasets.GeneratorBasedBuilder):
66
- """WikiLongDataset Classification dataset. Version 1.0."""
67
-
68
- BUILDER_CONFIGS = [
69
- GoodWikiLongDatasetConfig(
70
- version=datasets.Version("1.0.0", ""), description="Goodwiki Long Articles"
71
- )
72
- ]
73
- BUILDER_CONFIG_CLASS = GoodWikiLongDatasetConfig
74
-
75
- def _info(self):
76
- return datasets.DatasetInfo(
77
- description=_DESCRIPTION,
78
- features=datasets.Features(self.config.features),
79
- )
80
-
81
- def _split_generators(self, dl_manager):
82
- data_dir = dl_manager.download_and_extract(_URLS)
83
-
84
- return [
85
- datasets.SplitGenerator(
86
- name=datasets.Split.TRAIN, gen_kwargs={"filepath": data_dir["train"]}
87
- ),
88
- ]
89
-
90
- def _generate_examples(self, filepath):
91
- """This function returns the examples in the raw (text) form."""
92
- logger.info("generating examples from = %s", filepath)
93
- if isinstance(filepath, str):
94
- filepath = [filepath]
95
- key = 0
96
- for path in filepath:
97
- with open(path, encoding="utf-8") as data:
98
- for article_data in data:
99
- article = json.loads(article_data)
100
- article["id"] = article.pop("pageid")
101
- article["text"] = "# " + article["title"] + "\n\n" + article.pop("markdown")
102
- article["url"] = None
103
- yield key, article
104
- key += 1