spine-crook commited on
Commit
0b12b3f
·
verified ·
1 Parent(s): 7f63859

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. conditioning_images.zip +3 -0
  2. dataset.jsonl +4 -0
  3. images.zip +3 -0
  4. test.py +86 -0
conditioning_images.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3dad6acf005d9cd9cc3c4204c220221fda9c622f4b8ef88aa1eb30496c7d27b1
3
+ size 24404
dataset.jsonl ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {"text": "three image", "image": "images/3.png", "conditioning_image": "conditioning_images/3.png"}
2
+ {"text": "zero image", "image": "images/0.png", "conditioning_image": "conditioning_images/0.png"}
3
+ {"text": "two image", "image": "images/2.png", "conditioning_image": "conditioning_images/2.png"}
4
+ {"text": "one image", "image": "images/1.png", "conditioning_image": "conditioning_images/1.png"}
images.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:211bc6be242253eae8c5c522b99ec89cbafbb5b497f3155de84a5bdb50939543
3
+ size 286108
test.py ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ from huggingface_hub import hf_hub_url
3
+ import datasets
4
+ import os
5
+
6
+ _VERSION = datasets.Version("0.0.2")
7
+
8
+ _DESCRIPTION = "This dataset includes images and conditioning images for XYZ purpose."
9
+ _HOMEPAGE = "https://www.example.com"
10
+ _LICENSE = "MIT"
11
+ _CITATION = """@article{YourDataset2021,
12
+ title={Your Dataset Title},
13
+ author={Your Name},
14
+ journal={Your Journal},
15
+ year={2021}
16
+ }"""
17
+
18
+ _FEATURES = datasets.Features({
19
+ "image": datasets.Value("string"), # Change from datasets.Image() to Value("string") if using paths directly
20
+ "conditioning_image": datasets.Value("string"),
21
+ "text": datasets.Value("string"),
22
+ })
23
+
24
+ METADATA_URL = hf_hub_url(
25
+ "spine-crook/test",
26
+ filename="train.jsonl",
27
+ repo_type="dataset",
28
+ )
29
+
30
+ IMAGES_URL = hf_hub_url(
31
+ "spine-crook/test",
32
+ filename="images.zip",
33
+ repo_type="dataset",
34
+ )
35
+
36
+ CONDITIONING_IMAGES_URL = hf_hub_url(
37
+ "spine-crook/test",
38
+ filename="conditioning_images.zip",
39
+ repo_type="dataset",
40
+ )
41
+
42
+ _DEFAULT_CONFIG = datasets.BuilderConfig(name="default", version=_VERSION)
43
+
44
+ class Test(datasets.GeneratorBasedBuilder):
45
+ BUILDER_CONFIGS = [_DEFAULT_CONFIG]
46
+ DEFAULT_CONFIG_NAME = "default"
47
+
48
+ def _info(self):
49
+ return datasets.DatasetInfo(
50
+ description=_DESCRIPTION,
51
+ features=_FEATURES,
52
+ supervised_keys=None,
53
+ homepage=_HOMEPAGE,
54
+ license=_LICENSE,
55
+ citation=_CITATION,
56
+ )
57
+
58
+ def _split_generators(self, dl_manager):
59
+ metadata_path = dl_manager.download(METADATA_URL)
60
+ images_dir = dl_manager.download_and_extract(IMAGES_URL)
61
+ conditioning_images_dir = dl_manager.download_and_extract(CONDITIONING_IMAGES_URL)
62
+
63
+ return [
64
+ datasets.SplitGenerator(
65
+ name=datasets.Split.TRAIN,
66
+ gen_kwargs={
67
+ "metadata_path": metadata_path,
68
+ "images_dir": images_dir,
69
+ "conditioning_images_dir": conditioning_images_dir,
70
+ },
71
+ ),
72
+ ]
73
+
74
+ def _generate_examples(self, metadata_path, images_dir, conditioning_images_dir):
75
+ metadata = pd.read_json(metadata_path, lines=True)
76
+
77
+ for _, row in metadata.iterrows():
78
+ text = row["text"]
79
+ image_path = os.path.join(images_dir, row["image"])
80
+ conditioning_image_path = os.path.join(conditioning_images_dir, row["conditioning_image"])
81
+
82
+ yield row["image"], {
83
+ "text": text,
84
+ "image": image_path,
85
+ "conditioning_image": conditioning_image_path,
86
+ }