| |
| """ |
| Download VLM-Gym inference dataset from Hugging Face Hub. |
| |
| Usage: |
| # Download everything |
| python load_from_hf.py --output_dir ./inference_dataset |
| |
| # Download only test sets (no large assets) |
| python load_from_hf.py --output_dir ./inference_dataset --subset test_sets |
| |
| # Download specific difficulty |
| python load_from_hf.py --output_dir ./inference_dataset --subset easy |
| |
| Examples: |
| from load_from_hf import download_dataset, get_dataset_path |
| |
| # Download and get path |
| dataset_path = download_dataset() |
| |
| # Use in your code |
| test_set_easy = dataset_path / "test_set_easy" |
| """ |
|
|
| import argparse |
| from pathlib import Path |
| from typing import Optional, List |
|
|
| from huggingface_hub import snapshot_download, hf_hub_download |
|
|
| REPO_ID = "VisGym/inference-dataset" |
|
|
| |
| SUBSETS = { |
| "test_sets": ["test_set_easy", "test_set_hard"], |
| "initial_states": ["initial_states_easy", "initial_states_hard"], |
| "easy": ["test_set_easy", "initial_states_easy"], |
| "hard": ["test_set_hard", "initial_states_hard"], |
| "partial_datasets": ["partial_datasets"], |
| "all": [ |
| "test_set_easy", |
| "test_set_hard", |
| "initial_states_easy", |
| "initial_states_hard", |
| "partial_datasets", |
| ], |
| } |
|
|
|
|
| def download_dataset( |
| output_dir: Optional[str] = None, |
| subset: str = "all", |
| repo_id: str = REPO_ID, |
| token: Optional[str] = None, |
| ) -> Path: |
| """ |
| Download VLM-Gym inference dataset from Hugging Face Hub. |
| |
| Args: |
| output_dir: Directory to download to. If None, uses HF cache. |
| subset: Which subset to download. Options: |
| - "all": Everything (default) |
| - "test_sets": Only test_set_easy and test_set_hard |
| - "initial_states": Only initial_states_easy and initial_states_hard |
| - "easy": Only easy difficulty (test_set + initial_states) |
| - "hard": Only hard difficulty (test_set + initial_states) |
| - "partial_datasets": Only the large asset files |
| repo_id: Hugging Face repository ID |
| token: Optional HF token for private repos |
| |
| Returns: |
| Path to the downloaded dataset directory |
| """ |
| if subset not in SUBSETS: |
| raise ValueError(f"Unknown subset: {subset}. Choose from: {list(SUBSETS.keys())}") |
|
|
| folders = SUBSETS[subset] |
|
|
| |
| allow_patterns = [f"{folder}/**" for folder in folders] |
|
|
| print(f"Downloading subset '{subset}' from {repo_id}...") |
| print(f"Folders: {folders}") |
|
|
| local_dir = snapshot_download( |
| repo_id=repo_id, |
| repo_type="dataset", |
| local_dir=output_dir, |
| allow_patterns=allow_patterns, |
| token=token, |
| ) |
|
|
| print(f"Downloaded to: {local_dir}") |
| return Path(local_dir) |
|
|
|
|
| def get_dataset_path( |
| output_dir: Optional[str] = None, |
| subset: str = "all", |
| repo_id: str = REPO_ID, |
| token: Optional[str] = None, |
| ) -> Path: |
| """ |
| Get path to dataset, downloading if necessary. |
| |
| This is a convenience wrapper that downloads the dataset if not present |
| and returns the path. |
| """ |
| return download_dataset(output_dir, subset, repo_id, token) |
|
|
|
|
| def list_available_subsets(): |
| """Print available subsets and their contents.""" |
| print("Available subsets:") |
| for name, folders in SUBSETS.items(): |
| print(f" {name}: {', '.join(folders)}") |
|
|
|
|
| def main(): |
| parser = argparse.ArgumentParser( |
| description="Download VLM-Gym inference dataset from Hugging Face Hub" |
| ) |
| parser.add_argument( |
| "--output_dir", |
| type=str, |
| default=None, |
| help="Output directory (default: HF cache)", |
| ) |
| parser.add_argument( |
| "--subset", |
| type=str, |
| default="all", |
| choices=list(SUBSETS.keys()), |
| help="Which subset to download", |
| ) |
| parser.add_argument( |
| "--repo_id", |
| type=str, |
| default=REPO_ID, |
| help="Hugging Face repository ID", |
| ) |
| parser.add_argument( |
| "--token", |
| type=str, |
| default=None, |
| help="Hugging Face token (for private repos)", |
| ) |
| parser.add_argument( |
| "--list-subsets", |
| action="store_true", |
| help="List available subsets and exit", |
| ) |
|
|
| args = parser.parse_args() |
|
|
| if args.list_subsets: |
| list_available_subsets() |
| return |
|
|
| download_dataset( |
| output_dir=args.output_dir, |
| subset=args.subset, |
| repo_id=args.repo_id, |
| token=args.token, |
| ) |
|
|
|
|
| if __name__ == "__main__": |
| main() |
|
|