lara-martin commited on
Commit
d853f37
1 Parent(s): 3530a96

Update FIREBALL.py

Browse files
Files changed (1) hide show
  1. FIREBALL.py +3 -15
FIREBALL.py CHANGED
@@ -24,8 +24,6 @@ import datasets
24
  from datasets import Features
25
 
26
 
27
- # TODO: Add BibTeX citation
28
- # Find for instance the citation on arxiv or on the dataset repo/website
29
  _CITATION = """\
30
  @inproceedings{Zhu2023FIREBALL,
31
  title={{FIREBALL: A Dataset of Dungeons and Dragons Actual-Play with Structured Game State Information}},
@@ -40,20 +38,11 @@ publisher={ACL},
40
  doi={10.18653/v1/2023.acl-long.229}
41
  }
42
  """
43
-
44
-
45
  _DESCRIPTION = """\
46
  FIREBALL Dungeons & Dragons data with narrative and Avrae scripting commands.
47
  """
48
-
49
  _HOMEPAGE = "https://github.com/zhudotexe/FIREBALL"
50
-
51
- # TODO: Add the licence for the dataset here if you can find it
52
  _LICENSE = "cc-by-4.0"
53
-
54
-
55
- # The HuggingFace Datasets library doesn't host the datasets but only points to the original files.
56
- # This can be an arbitrary nested dict/list of URLs (see below in `_split_generators` method)
57
  _URLS = {
58
  "FIREBALL": "https://huggingface.co/datasets/lara-martin/FIREBALL/raw/main/"
59
  }
@@ -184,12 +173,11 @@ class Fireball(datasets.GeneratorBasedBuilder):
184
  # dl_manager is a datasets.download.DownloadManager that can be used to download and extract URLS
185
  # It can accept any type or nested list/dict and will give back the same structure with the url replaced with path to local files.
186
  # By default the archives will be extracted and a path to a cached folder where they are extracted is returned instead of the archive
187
- #data_dir = dl_manager.download_and_extract(urls)
188
  file_list = dl_manager.download(url+"files.txt")
189
  with open(file_list) as f:
190
  data_filenames = [line.strip() for line in f if line]
191
- # data_urls = dl_manager.download([url+"filtered/"+data_filename for data_filename in data_filenames])
192
- data_urls = dl_manager.download([url+"filtered/00068c6b03adc2c102756053cf6edd05.jsonl"])
193
  downloaded_files = dl_manager.download(data_urls)
194
  return [
195
  datasets.SplitGenerator(
@@ -203,7 +191,7 @@ class Fireball(datasets.GeneratorBasedBuilder):
203
 
204
  # method parameters are unpacked from `gen_kwargs` as given in `_split_generators`
205
  def _generate_examples(self, filepath):
206
- # TODO: This method handles input defined in _split_generators to yield (key, example) tuples from the dataset.
207
  # The `key` is for legacy reasons (tfds) and is not important in itself, but must be unique for each example.
208
  key = 0
209
  for file in filepath:
 
24
  from datasets import Features
25
 
26
 
 
 
27
  _CITATION = """\
28
  @inproceedings{Zhu2023FIREBALL,
29
  title={{FIREBALL: A Dataset of Dungeons and Dragons Actual-Play with Structured Game State Information}},
 
38
  doi={10.18653/v1/2023.acl-long.229}
39
  }
40
  """
 
 
41
  _DESCRIPTION = """\
42
  FIREBALL Dungeons & Dragons data with narrative and Avrae scripting commands.
43
  """
 
44
  _HOMEPAGE = "https://github.com/zhudotexe/FIREBALL"
 
 
45
  _LICENSE = "cc-by-4.0"
 
 
 
 
46
  _URLS = {
47
  "FIREBALL": "https://huggingface.co/datasets/lara-martin/FIREBALL/raw/main/"
48
  }
 
173
  # dl_manager is a datasets.download.DownloadManager that can be used to download and extract URLS
174
  # It can accept any type or nested list/dict and will give back the same structure with the url replaced with path to local files.
175
  # By default the archives will be extracted and a path to a cached folder where they are extracted is returned instead of the archive
 
176
  file_list = dl_manager.download(url+"files.txt")
177
  with open(file_list) as f:
178
  data_filenames = [line.strip() for line in f if line]
179
+ data_urls = dl_manager.download([url+"filtered/"+data_filename for data_filename in data_filenames])
180
+ # data_urls = dl_manager.download([url+"filtered/00068c6b03adc2c102756053cf6edd05.jsonl"])
181
  downloaded_files = dl_manager.download(data_urls)
182
  return [
183
  datasets.SplitGenerator(
 
191
 
192
  # method parameters are unpacked from `gen_kwargs` as given in `_split_generators`
193
  def _generate_examples(self, filepath):
194
+ # This method handles input defined in _split_generators to yield (key, example) tuples from the dataset.
195
  # The `key` is for legacy reasons (tfds) and is not important in itself, but must be unique for each example.
196
  key = 0
197
  for file in filepath: