frascuchon HF Staff commited on
Commit
4beb2d4
·
verified ·
1 Parent(s): 162ea59

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. extend_dataset/script.py +9 -6
extend_dataset/script.py CHANGED
@@ -104,13 +104,14 @@ class Pipeline:
104
  self.console.print("[yellow]Warning: Could not determine dataset size. Using streaming mode.")
105
  return None
106
 
107
-
108
- def _load_config(self, yml_source: str) -> dict:
109
  """Load and parse YAML configuration from file or URL."""
110
  if yml_source.startswith(('http://', 'https://')):
111
- if self._is_sheets_dataset_url(yml_source):
112
- yml_source = yml_source + '/json'
113
- response = requests.get(yml_source)
 
114
  response.raise_for_status()
115
  return yaml.safe_load(response.text)
116
 
@@ -438,6 +439,7 @@ class Pipeline:
438
  """Check if the URL points to a (AI)Sheets dataset."""
439
  return "/home/dataset/" in url and "/json" not in url
440
 
 
441
  def main(
442
  *,
443
  repo_id: str,
@@ -482,7 +484,8 @@ def main(
482
  augmented_dataset = pipeline.run()
483
  augmented_dataset.push_to_hub(destination, split=destination_split, create_pr=create_pr)
484
 
485
- rprint(f"\n[bold green]✓[/] Successfully pushed augmented dataset to [cyan] https://huggingface.co/datasets/{destination}[/].")
 
486
 
487
 
488
  if __name__ == "__main__":
 
104
  self.console.print("[yellow]Warning: Could not determine dataset size. Using streaming mode.")
105
  return None
106
 
107
+ @staticmethod
108
+ def _load_config(yml_source: str) -> dict:
109
  """Load and parse YAML configuration from file or URL."""
110
  if yml_source.startswith(('http://', 'https://')):
111
+ response = requests.get(
112
+ yml_source,
113
+ headers={'Accept': 'application/x-yaml; application/json'}
114
+ )
115
  response.raise_for_status()
116
  return yaml.safe_load(response.text)
117
 
 
439
  """Check if the URL points to a (AI)Sheets dataset."""
440
  return "/home/dataset/" in url and "/json" not in url
441
 
442
+
443
  def main(
444
  *,
445
  repo_id: str,
 
484
  augmented_dataset = pipeline.run()
485
  augmented_dataset.push_to_hub(destination, split=destination_split, create_pr=create_pr)
486
 
487
+ rprint(
488
+ f"\n[bold green]✓[/] Successfully pushed augmented dataset to [cyan] https://huggingface.co/datasets/{destination}[/].")
489
 
490
 
491
  if __name__ == "__main__":