devrim commited on
Commit
ddfa726
·
verified ·
1 Parent(s): bd0d898

Upload goodwiki_long_toy.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. goodwiki_long_toy.py +11 -12
goodwiki_long_toy.py CHANGED
@@ -30,15 +30,15 @@ Dataset consisting of long wikipedia articles in markdown format.
30
 
31
  _URLS = {
32
  "train": [
33
- "train/partition_0.jsonl",
34
  ],
35
  "test": [
36
- "test/partition_0.jsonl",
37
  ]
38
  }
39
 
40
 
41
- class GoodWikiLongToyDatasetConfig(datasets.BuilderConfig):
42
  """BuilderConfig for Dataset."""
43
 
44
  def __init__(self, **kwargs):
@@ -47,7 +47,7 @@ class GoodWikiLongToyDatasetConfig(datasets.BuilderConfig):
47
  Args:
48
  **kwargs: keyword arguments forwarded to super.
49
  """
50
- super(GoodWikiLongToyDatasetConfig, self).__init__(**kwargs)
51
 
52
  @property
53
  def features(self):
@@ -62,15 +62,15 @@ class GoodWikiLongToyDatasetConfig(datasets.BuilderConfig):
62
  }
63
 
64
 
65
- class GoodWikiLongToyDataset(datasets.GeneratorBasedBuilder):
66
  """WikiLongDataset Classification dataset. Version 1.0."""
67
 
68
  BUILDER_CONFIGS = [
69
- GoodWikiLongToyDatasetConfig(
70
  version=datasets.Version("1.0.0", ""), description="Goodwiki Long Articles"
71
  )
72
  ]
73
- BUILDER_CONFIG_CLASS = GoodWikiLongToyDatasetConfig
74
 
75
  def _info(self):
76
  return datasets.DatasetInfo(
@@ -85,9 +85,6 @@ class GoodWikiLongToyDataset(datasets.GeneratorBasedBuilder):
85
  datasets.SplitGenerator(
86
  name=datasets.Split.TRAIN, gen_kwargs={"filepath": data_dir["train"]}
87
  ),
88
- datasets.SplitGenerator(
89
- name=datasets.Split.TEST, gen_kwargs={"filepath": data_dir["test"]}
90
- ),
91
  ]
92
 
93
  def _generate_examples(self, filepath):
@@ -100,6 +97,8 @@ class GoodWikiLongToyDataset(datasets.GeneratorBasedBuilder):
100
  with open(path, encoding="utf-8") as data:
101
  for article_data in data:
102
  article = json.loads(article_data)
103
- article["text"] = "# " + article["title"] + "\n\n" + article.pop("text")
 
 
104
  yield key, article
105
- key += 1
 
30
 
31
  _URLS = {
32
  "train": [
33
+ "data/train.jsonl",
34
  ],
35
  "test": [
36
+ "data/test.jsonl",
37
  ]
38
  }
39
 
40
 
41
+ class GoodWikiLongDatasetConfig(datasets.BuilderConfig):
42
  """BuilderConfig for Dataset."""
43
 
44
  def __init__(self, **kwargs):
 
47
  Args:
48
  **kwargs: keyword arguments forwarded to super.
49
  """
50
+ super(GoodWikiLongDatasetConfig, self).__init__(**kwargs)
51
 
52
  @property
53
  def features(self):
 
62
  }
63
 
64
 
65
+ class GoodWikiLongDataset(datasets.GeneratorBasedBuilder):
66
  """WikiLongDataset Classification dataset. Version 1.0."""
67
 
68
  BUILDER_CONFIGS = [
69
+ GoodWikiLongDatasetConfig(
70
  version=datasets.Version("1.0.0", ""), description="Goodwiki Long Articles"
71
  )
72
  ]
73
+ BUILDER_CONFIG_CLASS = GoodWikiLongDatasetConfig
74
 
75
  def _info(self):
76
  return datasets.DatasetInfo(
 
85
  datasets.SplitGenerator(
86
  name=datasets.Split.TRAIN, gen_kwargs={"filepath": data_dir["train"]}
87
  ),
 
 
 
88
  ]
89
 
90
  def _generate_examples(self, filepath):
 
97
  with open(path, encoding="utf-8") as data:
98
  for article_data in data:
99
  article = json.loads(article_data)
100
+ article["id"] = article.pop("pageid")
101
+ article["text"] = "# " + article["title"] + "\n\n" + article.pop("markdown")
102
+ article["url"] = None
103
  yield key, article
104
+ key += 1