File size: 4,960 Bytes
81f161c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
import json, random, math, os
from pathlib import Path
from datasets import (
    BuilderConfig, DatasetInfo, DownloadManager, GeneratorBasedBuilder,
    SplitGenerator, Split, Features, Image, Value
)
from huggingface_hub import hf_hub_url

_REPO_ID = "infosys/OpenHumnoidActuatedFaceData"
_IMAGES_PER_SHARD = 10_000            # how many files you put in each tar
_TAR_TPL = "images-{start:05d}-{end:05d}.tar"  # file-name pattern


class ImageSubsetConfig(BuilderConfig):
    def __init__(self, name, sample_size=None, **kw):
        super().__init__(name=name, version="1.0.2",
                         description=kw.get("description", ""))
        self.sample_size = sample_size


class MyImageDataset(GeneratorBasedBuilder):
    BUILDER_CONFIGS = [
        ImageSubsetConfig("full",  sample_size=None,
                          description="Entire dataset (≈100 GB)"),
        ImageSubsetConfig("small", sample_size=20_000,
                          description="20 K random images"),
    ]
    DEFAULT_CONFIG_NAME = "small"

    # ------------------------------------------------------------------ #
    # 1. Schema                                                          #
    # ------------------------------------------------------------------ #
    def _info(self):
        return DatasetInfo(
            description="Humanoid face images + 16 servo angles.",
            features=Features(
                {
                    "image": Image(),              # PIL.Image is fine
                    "actuated_angle":
                        {str(i): Value("int32") for i in range(16)},
                }
            ),
        )

    # ------------------------------------------------------------------ #
    # 2. Download                                                         #
    # ------------------------------------------------------------------ #
    def _split_generators(self, dl_manager: DownloadManager):
        # ---- 2-a: load metadata -------------------------------------- #
        meta_url = hf_hub_url(_REPO_ID, "metadata.json", repo_type="dataset")
        meta_path = dl_manager.download(meta_url)
        with open(meta_path, encoding="utf-8") as f:
            metadata = json.load(f)

        all_names = sorted(metadata)
        selected = (                           # sampling logic
            sorted(random.sample(all_names, self.config.sample_size))
            if self.config.sample_size else all_names
        )
        selected_set = set(selected)

        # ---- 2-b: figure out which shards we need -------------------- #
        max_idx = len(all_names) - 1
        n_shards = math.floor(max_idx / _IMAGES_PER_SHARD) + 1
        shard_files = [
            _TAR_TPL.format(start=s*_IMAGES_PER_SHARD,
                            end=min((s+1)*_IMAGES_PER_SHARD-1, max_idx))
            for s in range(n_shards)
        ]

        # ---- 2-c: download (and extract) each tar -------------------- #
        tar_urls   = [hf_hub_url(_REPO_ID, f, repo_type="dataset")
                      for f in shard_files]
        local_tars = dl_manager.download(tar_urls)          # .tar paths

        #   we’ll stream from the tar, so no extract() needed

        return [
            SplitGenerator(
                name=Split.TRAIN,
                gen_kwargs={
                    "tar_paths": local_tars,
                    "metadata": metadata,
                    "want": selected_set,
                },
            )
        ]

    # ------------------------------------------------------------------ #
    # 3. Generate                                                         #
    # ------------------------------------------------------------------ #
    def _generate_examples(self, tar_paths, metadata, want):
        """Stream over each tar and yield only requested files."""
        idx = 0
        for tar_path in tar_paths:
            # iterate without extraction
            for inner_path, fobj in \
                    self._iter_archive_fast(tar_path):      # helper below
                fname = Path(inner_path).name               # strip tar prefix
                if fname not in want:
                    continue

                angles = metadata[fname]
                yield idx, {
                    "image": {"bytes": fobj.read(), "path": fname},
                    "actuated_angle":
                        {str(i): int(angles.get(str(i), 0)) for i in range(16)}
                }
                idx += 1

    # Small wrapper so we don’t import datasets.utils.file_utils directly
    @staticmethod
    def _iter_archive_fast(tar_path):
        import tarfile
        with tarfile.open(tar_path) as tar:
            for member in tar:
                if member.isfile():
                    f = tar.extractfile(member)
                    yield member.name, f