animelover
commited on
Commit
·
68e50d2
1
Parent(s):
a5f90e3
Update touhou-images.py
Browse files- touhou-images.py +42 -11
touhou-images.py
CHANGED
@@ -5,22 +5,43 @@ from datasets import DownloadManager, DatasetInfo
|
|
5 |
from datasets.data_files import DataFilesDict
|
6 |
|
7 |
_EXTENSION = [".png", ".jpg", ".jpeg"]
|
8 |
-
_DESCRIPTION = ""
|
9 |
_NAME = "animelover/touhou-images"
|
10 |
_REVISION = "main"
|
11 |
|
12 |
|
13 |
class DanbooruDataset(datasets.GeneratorBasedBuilder):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
14 |
|
15 |
def _info(self) -> DatasetInfo:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
return datasets.DatasetInfo(
|
17 |
-
description=
|
18 |
-
|
19 |
-
|
20 |
-
"image": datasets.Image(),
|
21 |
-
"tags": datasets.Value("string")
|
22 |
-
}
|
23 |
-
),
|
24 |
supervised_keys=None,
|
25 |
citation="",
|
26 |
)
|
@@ -42,11 +63,21 @@ class DanbooruDataset(datasets.GeneratorBasedBuilder):
|
|
42 |
for path in filepath:
|
43 |
all_fnames = {os.path.relpath(os.path.join(root, fname), start=path)
|
44 |
for root, _dirs, files in os.walk(path) for fname in files}
|
45 |
-
image_fnames = sorted(fname for fname in all_fnames
|
46 |
-
|
47 |
for image_fname in image_fnames:
|
48 |
image_path = os.path.join(path, image_fname)
|
49 |
tags_path = os.path.join(path, os.path.splitext(image_fname)[0] + ".txt")
|
50 |
with open(tags_path, "r", encoding="utf-8") as f:
|
51 |
tags = f.read()
|
52 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
from datasets.data_files import DataFilesDict
|
6 |
|
7 |
_EXTENSION = [".png", ".jpg", ".jpeg"]
|
|
|
8 |
_NAME = "animelover/touhou-images"
|
9 |
_REVISION = "main"
|
10 |
|
11 |
|
12 |
class DanbooruDataset(datasets.GeneratorBasedBuilder):
|
13 |
+
BUILDER_CONFIGS = [
|
14 |
+
# add number before name for sorting
|
15 |
+
datasets.BuilderConfig(
|
16 |
+
name="0-sfw",
|
17 |
+
description="sfw subset",
|
18 |
+
),
|
19 |
+
datasets.BuilderConfig(
|
20 |
+
name="1-full",
|
21 |
+
description="full dataset",
|
22 |
+
),
|
23 |
+
datasets.BuilderConfig(
|
24 |
+
name="2-tags",
|
25 |
+
description="only tags of dataset",
|
26 |
+
),
|
27 |
+
]
|
28 |
|
29 |
def _info(self) -> DatasetInfo:
|
30 |
+
if self.config.name == "2-tags":
|
31 |
+
features = {
|
32 |
+
"tags": datasets.Value("string"),
|
33 |
+
"post_id": datasets.Value("int64")
|
34 |
+
}
|
35 |
+
else:
|
36 |
+
features = {
|
37 |
+
"image": datasets.Image(),
|
38 |
+
"tags": datasets.Value("string"),
|
39 |
+
"post_id": datasets.Value("int64")
|
40 |
+
}
|
41 |
return datasets.DatasetInfo(
|
42 |
+
description=self.config.description,
|
43 |
+
|
44 |
+
features=datasets.Features(features),
|
|
|
|
|
|
|
|
|
45 |
supervised_keys=None,
|
46 |
citation="",
|
47 |
)
|
|
|
63 |
for path in filepath:
|
64 |
all_fnames = {os.path.relpath(os.path.join(root, fname), start=path)
|
65 |
for root, _dirs, files in os.walk(path) for fname in files}
|
66 |
+
image_fnames = sorted([fname for fname in all_fnames if os.path.splitext(fname)[1].lower() in _EXTENSION],
|
67 |
+
reverse=True)
|
68 |
for image_fname in image_fnames:
|
69 |
image_path = os.path.join(path, image_fname)
|
70 |
tags_path = os.path.join(path, os.path.splitext(image_fname)[0] + ".txt")
|
71 |
with open(tags_path, "r", encoding="utf-8") as f:
|
72 |
tags = f.read()
|
73 |
+
if self.config.name == "0-sfw" and any(tag.strip() in nsfw_tags for tag in tags.split(",")):
|
74 |
+
continue
|
75 |
+
post_id = int(os.path.splitext(os.path.basename(image_fname))[0])
|
76 |
+
if self.config.name == "2-tags":
|
77 |
+
yield image_fname, {"tags": tags, "post_id": post_id}
|
78 |
+
else:
|
79 |
+
yield image_fname, {"image": image_path, "tags": tags, "post_id": post_id}
|
80 |
+
|
81 |
+
|
82 |
+
nsfw_tags = ["nude", "completely nude", "topless", "bottomless", "sex", "oral", "fellatio gesture", "tentacle sex",
|
83 |
+
"nipples", "pussy", "vaginal", "pubic hair", "anus", "ass focus", "penis", "cum", "condom", "sex toy"]
|