| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | """Download ColliderML dataset assets and manage README URL updates.""" |
| |
|
| | from __future__ import annotations |
| |
|
| | import asyncio |
| | import contextlib |
| | import hashlib |
| | import logging |
| | import os |
| | import re |
| | import shutil |
| | import tempfile |
| | from dataclasses import dataclass |
| | from pathlib import Path |
| | from typing import Annotated, Any, Dict, List, Sequence |
| | from urllib.parse import urlparse |
| |
|
| | import aiohttp |
| | import pyarrow.parquet as pq |
| | import typer |
| | import yaml |
| | from rich.console import Console |
| | from rich.logging import RichHandler |
| | from rich.progress import ( |
| | BarColumn, |
| | DownloadColumn, |
| | Progress, |
| | TextColumn, |
| | TimeElapsedColumn, |
| | TransferSpeedColumn, |
| | ) |
| | from rich.table import Table |
| |
|
| |
|
| | console = Console() |
| | app = typer.Typer() |
| |
|
| | |
| | logging.basicConfig( |
| | level=logging.INFO, |
| | format="%(message)s", |
| | datefmt="[%X]", |
| | handlers=[RichHandler(console=console, show_path=False)], |
| | ) |
| | logger = logging.getLogger(__name__) |
| |
|
| |
|
| | @app.callback() |
| | def main_callback( |
| | verbose: Annotated[ |
| | bool, |
| | typer.Option( |
| | "--verbose", |
| | "-v", |
| | help="Enable verbose logging.", |
| | ), |
| | ] = False, |
| | ) -> None: |
| | """Manage dataset file URLs declared in README front matter.""" |
| | if verbose: |
| | logging.getLogger().setLevel(logging.DEBUG) |
| | logger.debug("Verbose logging enabled") |
| |
|
| |
|
| | @dataclass |
| | class DataFileEntry: |
| | config_name: str |
| | path: Sequence[str] |
| | url: str |
| |
|
| | def parsed(self) -> tuple[str, str]: |
| | parsed = urlparse(self.url) |
| | return parsed.netloc, parsed.path.lstrip("/") |
| |
|
| | def relative_path(self) -> Path: |
| | netloc, remainder = self.parsed() |
| | return Path(netloc) / Path(remainder) |
| |
|
| |
|
| | @dataclass |
| | class DownloadResult: |
| | entry: DataFileEntry |
| | path: Path |
| | success: bool |
| | skipped: bool |
| | error: Exception | None = None |
| | order: int = 0 |
| |
|
| |
|
| | @dataclass |
| | class VerifyResult: |
| | entry: DataFileEntry |
| | ok: bool |
| | status: int | None |
| | detail: str = "" |
| | order: int = 0 |
| |
|
| |
|
| | @dataclass |
| | class ChecksumResult: |
| | entry: DataFileEntry |
| | filename: str |
| | computed_hash: str | None |
| | expected_hash: str | None |
| | matches: bool |
| | success: bool |
| | error: Exception | None = None |
| | order: int = 0 |
| |
|
| |
|
| | @dataclass |
| | class SchemaInfo: |
| | config_name: str |
| | filename: str |
| | columns: List[tuple[str, str]] |
| | num_rows: int | None |
| | success: bool |
| | error: Exception | None = None |
| |
|
| |
|
| | def read_front_matter(readme_path: Path) -> tuple[str, str]: |
| | text = readme_path.read_text(encoding="utf-8") |
| | if not text.startswith("---\n"): |
| | raise ValueError("README.md does not start with YAML front matter.") |
| | try: |
| | front_matter, body = text[4:].split("\n---\n", 1) |
| | except ValueError as exc: |
| | raise ValueError("README.md front matter is not terminated by '---'.") from exc |
| | return front_matter, body |
| |
|
| |
|
| | def load_data_file_entries(front_matter_text: str) -> List[DataFileEntry]: |
| | data = yaml.safe_load(front_matter_text) |
| | configs = data.get("configs", []) if isinstance(data, dict) else [] |
| | entries: List[DataFileEntry] = [] |
| |
|
| | def _walk(value: Any, path: list[str]) -> None: |
| | if value is None: |
| | return |
| | if isinstance(value, str): |
| | entries.append( |
| | DataFileEntry(config_name=current_config, path=tuple(path), url=value) |
| | ) |
| | elif isinstance(value, list): |
| | for idx, item in enumerate(value): |
| | _walk(item, [*path, str(idx)]) |
| | elif isinstance(value, dict): |
| | for key, item in value.items(): |
| | _walk(item, [*path, str(key)]) |
| |
|
| | for config in configs: |
| | if not isinstance(config, dict): |
| | continue |
| | current_config = str(config.get("config_name", "<unnamed>")) |
| | _walk(config.get("data_files"), ["data_files"]) |
| |
|
| | return entries |
| |
|
| |
|
| | def replace_once(text: str, old: str, new: str) -> str: |
| | index = text.find(old) |
| | if index == -1: |
| | raise ValueError(f"Value '{old}' was not found in front matter.") |
| | return f"{text[:index]}{new}{text[index + len(old):]}" |
| |
|
| |
|
| | def build_rewritten_url( |
| | result: DownloadResult, output_dir: Path, base_url: str |
| | ) -> str: |
| | """Build new URL by replacing output_dir with base_url in the file path. |
| | |
| | For example: |
| | - File path: /data/output/particles/file.parquet |
| | - Output dir: /data/output |
| | - Base URL: https://example.com/files |
| | - Result: https://example.com/files/particles/file.parquet |
| | """ |
| | relative_path = result.path.relative_to(output_dir) |
| | |
| | base_url = base_url.rstrip("/") |
| | return f"{base_url}/{relative_path.as_posix()}" |
| |
|
| |
|
| | def resolve_destination(entry: DataFileEntry, output_dir: Path) -> Path: |
| | _, remote_path = entry.parsed() |
| | filename = Path(remote_path).name or remote_path |
| | base_dir = output_dir / entry.config_name |
| | return (base_dir / filename).resolve() |
| |
|
| |
|
| | def move_with_progress(src: Path, dst: Path, progress: Progress, task_id: int) -> None: |
| | """Move file with progress tracking for cross-filesystem moves.""" |
| | |
| | try: |
| | os.rename(src, dst) |
| | return |
| | except OSError: |
| | |
| | pass |
| |
|
| | |
| | file_size = src.stat().st_size |
| | progress.update(task_id, total=file_size) |
| |
|
| | |
| | buffer_size = 8 << 20 |
| | with src.open("rb") as fsrc, dst.open("wb") as fdst: |
| | copied = 0 |
| | while True: |
| | buf = fsrc.read(buffer_size) |
| | if not buf: |
| | break |
| | fdst.write(buf) |
| | copied += len(buf) |
| | progress.update(task_id, completed=copied) |
| |
|
| | |
| | fdst.flush() |
| | os.fsync(fdst.fileno()) |
| |
|
| | |
| | if dst.stat().st_size != file_size: |
| | raise IOError( |
| | f"File size mismatch after copy: expected {file_size}, got {dst.stat().st_size}" |
| | ) |
| |
|
| | |
| | src.unlink() |
| |
|
| |
|
| | async def download_one( |
| | entry: DataFileEntry, |
| | output_dir: Path, |
| | session: aiohttp.ClientSession, |
| | semaphore: asyncio.Semaphore, |
| | skip_existing: bool, |
| | progress: Progress, |
| | order: int, |
| | staging_dir: Path | None = None, |
| | ) -> DownloadResult: |
| | _, remote_path = entry.parsed() |
| | filename = Path(remote_path).name or remote_path |
| | terse_name = (filename[:32] + "…") if len(filename) > 33 else filename |
| | description = f"{entry.config_name}: {terse_name}" |
| | async with semaphore: |
| | task_id: int | None = None |
| | destination = resolve_destination(entry, output_dir) |
| |
|
| | |
| | if staging_dir: |
| | download_dest = resolve_destination(entry, staging_dir) |
| | tmp_path = download_dest.parent / f"{download_dest.name}.part" |
| | else: |
| | download_dest = destination |
| | tmp_path = destination.parent / f"{destination.name}.part" |
| |
|
| | try: |
| | destination.parent.mkdir(parents=True, exist_ok=True) |
| | download_dest.parent.mkdir(parents=True, exist_ok=True) |
| | task_id = progress.add_task(description, total=0, start=False) |
| | progress.start_task(task_id) |
| | async with session.get(entry.url) as response: |
| | response.raise_for_status() |
| | total_bytes = response.content_length or 0 |
| |
|
| | |
| | if skip_existing and destination.exists(): |
| | local_size = destination.stat().st_size |
| | if total_bytes and local_size == total_bytes: |
| | if task_id is not None: |
| | progress.remove_task(task_id) |
| | task_id = None |
| | logger.info( |
| | f"Skipped {destination.name} ({total_bytes:,} bytes) - already exists with correct size" |
| | ) |
| | return DownloadResult( |
| | entry=entry, |
| | path=destination, |
| | success=True, |
| | skipped=True, |
| | order=order, |
| | ) |
| | else: |
| | |
| | if not total_bytes: |
| | reason = f"remote size unknown, local size is {local_size:,} bytes" |
| | else: |
| | reason = f"size mismatch (local: {local_size:,} bytes, remote: {total_bytes:,} bytes)" |
| | logger.info(f"Downloading {destination.name} - {reason}") |
| | else: |
| | |
| | if not destination.exists(): |
| | size_info = f" ({total_bytes:,} bytes)" if total_bytes else "" |
| | logger.info( |
| | f"Downloading {destination.name}{size_info} - file not found locally" |
| | ) |
| | else: |
| | size_info = f" ({total_bytes:,} bytes)" if total_bytes else "" |
| | logger.info( |
| | f"Downloading {destination.name}{size_info} - skip_existing is disabled" |
| | ) |
| |
|
| | if total_bytes: |
| | progress.update(task_id, total=total_bytes) |
| | with tmp_path.open("wb") as handle: |
| | async for chunk in response.content.iter_chunked(1 << 17): |
| | handle.write(chunk) |
| | progress.update(task_id, advance=len(chunk)) |
| |
|
| | |
| | handle.flush() |
| | os.fsync(handle.fileno()) |
| |
|
| | |
| | tmp_path.rename(download_dest) |
| |
|
| | |
| | if staging_dir: |
| | logger.info( |
| | f"Moving {download_dest.name} from staging to {destination.parent.name}/" |
| | ) |
| | |
| | if task_id is not None: |
| | progress.update( |
| | task_id, description=f"{entry.config_name}: Moving {terse_name}" |
| | ) |
| | await asyncio.to_thread( |
| | move_with_progress, download_dest, destination, progress, task_id |
| | ) |
| |
|
| | return DownloadResult( |
| | entry=entry, |
| | path=destination, |
| | success=True, |
| | skipped=False, |
| | order=order, |
| | ) |
| | except Exception as exc: |
| | |
| | logger.error(f"Failed to download {entry.config_name}/{filename}: {exc}") |
| | with contextlib.suppress(FileNotFoundError): |
| | tmp_path.unlink() |
| | return DownloadResult( |
| | entry=entry, |
| | path=destination, |
| | success=False, |
| | skipped=False, |
| | error=exc, |
| | order=order, |
| | ) |
| | finally: |
| | if task_id is not None: |
| | progress.remove_task(task_id) |
| |
|
| |
|
| | async def perform_downloads( |
| | entries: Sequence[DataFileEntry], |
| | output_dir: Path, |
| | max_concurrency: int, |
| | timeout: float, |
| | skip_existing: bool, |
| | use_staging: bool = False, |
| | ) -> List[DownloadResult]: |
| | if not entries: |
| | return [] |
| |
|
| | with contextlib.ExitStack() as stack: |
| | |
| | staging_dir: Path | None = None |
| | if use_staging: |
| | staging_tmp = stack.enter_context( |
| | tempfile.TemporaryDirectory(prefix="dataset_staging_") |
| | ) |
| | staging_dir = Path(staging_tmp) |
| | logger.info(f"Using staging directory: {staging_dir}") |
| |
|
| | semaphore = asyncio.Semaphore(max_concurrency) |
| | results: List[DownloadResult] = [] |
| | timeout_cfg = aiohttp.ClientTimeout(total=timeout) |
| | progress = Progress( |
| | TextColumn("{task.description}"), |
| | BarColumn(bar_width=None), |
| | DownloadColumn(), |
| | TransferSpeedColumn(), |
| | TimeElapsedColumn(), |
| | console=console, |
| | ) |
| |
|
| | async with aiohttp.ClientSession(timeout=timeout_cfg) as session: |
| | with progress: |
| | tasks: list[asyncio.Task[DownloadResult]] = [] |
| | for order, entry in enumerate(entries): |
| | task = asyncio.create_task( |
| | download_one( |
| | entry=entry, |
| | output_dir=output_dir, |
| | session=session, |
| | semaphore=semaphore, |
| | skip_existing=skip_existing, |
| | progress=progress, |
| | order=order, |
| | staging_dir=staging_dir, |
| | ) |
| | ) |
| | tasks.append(task) |
| |
|
| | for future in asyncio.as_completed(tasks): |
| | result = await future |
| | results.append(result) |
| |
|
| | results.sort(key=lambda item: item.order) |
| | return results |
| |
|
| |
|
| | async def get_remote_file_size( |
| | entry: DataFileEntry, |
| | session: aiohttp.ClientSession, |
| | semaphore: asyncio.Semaphore, |
| | ) -> int | None: |
| | """Get remote file size via HEAD request.""" |
| | async with semaphore: |
| | try: |
| | async with session.head(entry.url, allow_redirects=True) as response: |
| | if response.status < 400: |
| | return response.content_length |
| | except Exception: |
| | pass |
| | return None |
| |
|
| |
|
| | async def fetch_remote_sizes( |
| | entries: Sequence[DataFileEntry], |
| | max_concurrency: int, |
| | timeout: float, |
| | ) -> Dict[str, int | None]: |
| | """Fetch remote file sizes for all entries.""" |
| | semaphore = asyncio.Semaphore(max_concurrency) |
| | timeout_cfg = aiohttp.ClientTimeout(total=timeout) |
| | sizes: Dict[str, int | None] = {} |
| |
|
| | progress = Progress( |
| | TextColumn("Fetching remote file sizes..."), |
| | BarColumn(), |
| | TextColumn("{task.completed}/{task.total}"), |
| | TimeElapsedColumn(), |
| | console=console, |
| | ) |
| |
|
| | async with aiohttp.ClientSession(timeout=timeout_cfg) as session: |
| | with progress: |
| | task_id = progress.add_task("Fetching sizes", total=len(entries)) |
| | tasks = { |
| | entry.url: asyncio.create_task( |
| | get_remote_file_size(entry, session, semaphore) |
| | ) |
| | for entry in entries |
| | } |
| | for url, task in tasks.items(): |
| | size = await task |
| | sizes[url] = size |
| | progress.advance(task_id) |
| |
|
| | return sizes |
| |
|
| |
|
| | async def verify_one( |
| | entry: DataFileEntry, |
| | session: aiohttp.ClientSession, |
| | semaphore: asyncio.Semaphore, |
| | order: int, |
| | ) -> VerifyResult: |
| | async with semaphore: |
| | last_error: str = "" |
| | for method in ("HEAD", "GET"): |
| | try: |
| | async with session.request( |
| | method, entry.url, allow_redirects=True |
| | ) as response: |
| | status = response.status |
| | if status < 400: |
| | return VerifyResult( |
| | entry=entry, ok=True, status=status, order=order |
| | ) |
| | if method == "HEAD" and status in {405, 501}: |
| | last_error = f"{method} returned {status}; retrying with GET." |
| | continue |
| | return VerifyResult( |
| | entry=entry, |
| | ok=False, |
| | status=status, |
| | detail=f"{method} -> {status}", |
| | order=order, |
| | ) |
| | except Exception as exc: |
| | last_error = str(exc) |
| | return VerifyResult( |
| | entry=entry, |
| | ok=False, |
| | status=None, |
| | detail=last_error or "Unknown error", |
| | order=order, |
| | ) |
| |
|
| |
|
| | async def perform_verification( |
| | entries: Sequence[DataFileEntry], |
| | max_concurrency: int, |
| | timeout: float, |
| | ) -> List[VerifyResult]: |
| | if not entries: |
| | return [] |
| |
|
| | semaphore = asyncio.Semaphore(max_concurrency) |
| | timeout_cfg = aiohttp.ClientTimeout(total=timeout) |
| | results: List[VerifyResult] = [] |
| | progress = Progress( |
| | TextColumn("{task.description}"), |
| | BarColumn(), |
| | TextColumn("{task.completed}/{task.total}"), |
| | TimeElapsedColumn(), |
| | console=console, |
| | ) |
| |
|
| | async with aiohttp.ClientSession(timeout=timeout_cfg) as session: |
| | with progress: |
| | task_id = progress.add_task("Verifying dataset URLs", total=len(entries)) |
| | tasks = [ |
| | asyncio.create_task(verify_one(entry, session, semaphore, order=order)) |
| | for order, entry in enumerate(entries) |
| | ] |
| | for future in asyncio.as_completed(tasks): |
| | result = await future |
| | results.append(result) |
| | progress.advance(task_id) |
| |
|
| | results.sort(key=lambda item: item.order) |
| | return results |
| |
|
| |
|
| | async def compute_file_hash( |
| | entry: DataFileEntry, |
| | session: aiohttp.ClientSession, |
| | semaphore: asyncio.Semaphore, |
| | progress: Progress, |
| | order: int, |
| | ) -> ChecksumResult: |
| | """Download file to temp location and compute SHA256 hash.""" |
| | |
| | _, remote_path = entry.parsed() |
| | filename = Path(remote_path).name or remote_path |
| | terse_name = (filename[:32] + "…") if len(filename) > 33 else filename |
| | description = f"{entry.config_name}: {terse_name}" |
| |
|
| | async with semaphore: |
| | task_id: int | None = None |
| | try: |
| | task_id = progress.add_task(description, total=0, start=False) |
| | progress.start_task(task_id) |
| |
|
| | async with session.get(entry.url) as response: |
| | response.raise_for_status() |
| | total_bytes = response.content_length or 0 |
| |
|
| | if total_bytes: |
| | progress.update(task_id, total=total_bytes) |
| |
|
| | hasher = hashlib.sha256() |
| | async for chunk in response.content.iter_chunked(1 << 17): |
| | hasher.update(chunk) |
| | progress.update(task_id, advance=len(chunk)) |
| |
|
| | computed_hash = hasher.hexdigest() |
| | return ChecksumResult( |
| | entry=entry, |
| | filename=filename, |
| | computed_hash=computed_hash, |
| | expected_hash=None, |
| | matches=False, |
| | success=True, |
| | order=order, |
| | ) |
| | except Exception as exc: |
| | return ChecksumResult( |
| | entry=entry, |
| | filename=filename, |
| | computed_hash=None, |
| | expected_hash=None, |
| | matches=False, |
| | success=False, |
| | error=exc, |
| | order=order, |
| | ) |
| | finally: |
| | if task_id is not None: |
| | progress.remove_task(task_id) |
| |
|
| |
|
| | async def perform_checksum_verification( |
| | entries: Sequence[DataFileEntry], |
| | expected_hashes_by_config: Dict[str, Dict[str, str]], |
| | max_concurrency: int, |
| | timeout: float, |
| | ) -> List[ChecksumResult]: |
| | """Download files and verify their checksums. |
| | |
| | expected_hashes_by_config: dict mapping config_name -> (filename -> hash) |
| | """ |
| | if not entries: |
| | return [] |
| |
|
| | semaphore = asyncio.Semaphore(max_concurrency) |
| | timeout_cfg = aiohttp.ClientTimeout(total=timeout) |
| | results: List[ChecksumResult] = [] |
| | progress = Progress( |
| | TextColumn("{task.description}"), |
| | BarColumn(), |
| | DownloadColumn(), |
| | TransferSpeedColumn(), |
| | TimeElapsedColumn(), |
| | console=console, |
| | ) |
| |
|
| | async with aiohttp.ClientSession(timeout=timeout_cfg) as session: |
| | with progress: |
| | tasks = [ |
| | asyncio.create_task( |
| | compute_file_hash( |
| | entry, session, semaphore, order=order, progress=progress |
| | ) |
| | ) |
| | for order, entry in enumerate(entries) |
| | ] |
| | for future in asyncio.as_completed(tasks): |
| | result = await future |
| | |
| | config_hashes = expected_hashes_by_config.get( |
| | result.entry.config_name, {} |
| | ) |
| | result.expected_hash = config_hashes.get(result.filename) |
| | if result.success and result.expected_hash: |
| | result.matches = result.computed_hash == result.expected_hash |
| | results.append(result) |
| |
|
| | results.sort(key=lambda item: item.order) |
| | return results |
| |
|
| |
|
| | def load_checksums_for_config(checksum_dir: Path, config_name: str) -> Dict[str, str]: |
| | """Load expected hashes from SHA256SUM-style file for a specific config. |
| | |
| | Returns a dict mapping filename -> hash. |
| | """ |
| | checksum_file = checksum_dir / f"{config_name}.sha256" |
| | if not checksum_file.exists(): |
| | return {} |
| |
|
| | hashes: Dict[str, str] = {} |
| | try: |
| | with checksum_file.open("r", encoding="utf-8") as f: |
| | for line in f: |
| | line = line.strip() |
| | if not line or line.startswith("#"): |
| | continue |
| | |
| | parts = line.split(None, 1) |
| | if len(parts) == 2: |
| | hash_value, filename = parts |
| | hashes[filename] = hash_value |
| | except OSError as exc: |
| | logger.warning(f"Failed to load checksum file {checksum_file}: {exc}") |
| |
|
| | return hashes |
| |
|
| |
|
| | def save_checksums_for_config( |
| | checksum_dir: Path, config_name: str, file_hashes: Dict[str, str] |
| | ) -> None: |
| | """Save hashes to SHA256SUM-style file for a specific config. |
| | |
| | file_hashes: dict mapping filename -> hash |
| | """ |
| | checksum_dir.mkdir(parents=True, exist_ok=True) |
| | checksum_file = checksum_dir / f"{config_name}.sha256" |
| |
|
| | with checksum_file.open("w", encoding="utf-8") as f: |
| | for filename in sorted(file_hashes.keys()): |
| | hash_value = file_hashes[filename] |
| | f.write(f"{hash_value} {filename}\n") |
| |
|
| | logger.info(f"Saved {len(file_hashes)} checksums to {checksum_file}") |
| |
|
| |
|
| |
|
| |
|
| | @app.command() |
| | def download( |
| | output_dir: Annotated[ |
| | Path, |
| | typer.Option( |
| | "--output-dir", |
| | "-o", |
| | help="Directory where files will be stored.", |
| | resolve_path=True, |
| | ), |
| | ], |
| | readme_path: Annotated[ |
| | Path, |
| | typer.Option( |
| | "--readme-path", |
| | "-r", |
| | help="Path to the README file with YAML front matter.", |
| | exists=True, |
| | resolve_path=True, |
| | dir_okay=False, |
| | ), |
| | ] = Path("README.md"), |
| | max_concurrency: Annotated[ |
| | int, |
| | typer.Option( |
| | "--max-concurrency", |
| | "-c", |
| | min=1, |
| | show_default=True, |
| | help="Maximum number of concurrent downloads.", |
| | ), |
| | ] = 4, |
| | timeout: Annotated[ |
| | float, |
| | typer.Option( |
| | "--timeout", |
| | min=1.0, |
| | show_default=True, |
| | help="Request timeout in seconds.", |
| | ), |
| | ] = 600.0, |
| | dry_run: Annotated[ |
| | bool, |
| | typer.Option( |
| | "--dry-run/--no-dry-run", |
| | show_default=True, |
| | help="Preview downloads and README rewrites without performing any changes.", |
| | ), |
| | ] = False, |
| | skip_existing: Annotated[ |
| | bool, |
| | typer.Option( |
| | "--skip-existing/--no-skip-existing", |
| | show_default=True, |
| | help="Skip downloading files that already exist locally.", |
| | ), |
| | ] = True, |
| | use_staging: Annotated[ |
| | bool, |
| | typer.Option( |
| | "--stage/--no-stage", |
| | show_default=True, |
| | help="Download to a temporary staging directory first, then move to final destination.", |
| | ), |
| | ] = False, |
| | clean_invalid: Annotated[ |
| | bool, |
| | typer.Option( |
| | "--clean-invalid", |
| | help="Delete files with incorrect sizes but don't download. Useful for cleaning up failed downloads.", |
| | ), |
| | ] = False, |
| | ) -> None: |
| | front_matter_text, body_text = read_front_matter(readme_path) |
| | entries = load_data_file_entries(front_matter_text) |
| | if not entries: |
| | logger.warning("No data_files entries found in README front matter.") |
| | raise typer.Exit(code=0) |
| |
|
| | logger.info(f"Found {len(entries)} data file URLs across {readme_path}.") |
| | output_dir = output_dir.resolve() |
| |
|
| | if clean_invalid: |
| | |
| | logger.info("Fetching remote file sizes to check for invalid files...") |
| | remote_sizes = asyncio.run( |
| | fetch_remote_sizes( |
| | entries=entries, |
| | max_concurrency=max_concurrency, |
| | timeout=timeout, |
| | ) |
| | ) |
| |
|
| | |
| | deleted = 0 |
| | skipped = 0 |
| | missing = 0 |
| | size_unknown = 0 |
| |
|
| | table = Table(title="Cleaning invalid files") |
| | table.add_column("Config") |
| | table.add_column("File") |
| | table.add_column("Status") |
| | table.add_column("Local size") |
| | table.add_column("Remote size") |
| |
|
| | for entry in entries: |
| | destination = resolve_destination(entry, output_dir) |
| | _, remote_path = entry.parsed() |
| | filename = Path(remote_path).name or remote_path |
| |
|
| | if not destination.exists(): |
| | missing += 1 |
| | table.add_row( |
| | entry.config_name, |
| | filename, |
| | "[dim]Not found[/dim]", |
| | "-", |
| | "-", |
| | ) |
| | continue |
| |
|
| | local_size = destination.stat().st_size |
| | remote_size = remote_sizes.get(entry.url) |
| |
|
| | if not remote_size: |
| | size_unknown += 1 |
| | table.add_row( |
| | entry.config_name, |
| | filename, |
| | "[cyan]Skipped (remote size unknown)[/cyan]", |
| | f"{local_size:,}", |
| | "Unknown", |
| | ) |
| | continue |
| |
|
| | if local_size == remote_size: |
| | skipped += 1 |
| | table.add_row( |
| | entry.config_name, |
| | filename, |
| | "[green]Valid[/green]", |
| | f"{local_size:,}", |
| | f"{remote_size:,}", |
| | ) |
| | else: |
| | |
| | destination.unlink() |
| | deleted += 1 |
| | logger.warning( |
| | f"Deleted {destination} (size mismatch: {local_size:,} != {remote_size:,})" |
| | ) |
| | table.add_row( |
| | entry.config_name, |
| | filename, |
| | "[red]Deleted (size mismatch)[/red]", |
| | f"{local_size:,}", |
| | f"{remote_size:,}", |
| | ) |
| |
|
| | console.print(table) |
| | logger.info( |
| | f"Summary: {deleted} deleted, {skipped} valid, {missing} missing, {size_unknown} unknown size" |
| | ) |
| | return |
| |
|
| | if dry_run: |
| | |
| | logger.info("Fetching remote file sizes for dry-run preview...") |
| | remote_sizes = asyncio.run( |
| | fetch_remote_sizes( |
| | entries=entries, |
| | max_concurrency=max_concurrency, |
| | timeout=timeout, |
| | ) |
| | ) |
| |
|
| | preview = Table(title="Download plan (dry-run)") |
| | preview.add_column("Config") |
| | preview.add_column("Local file", overflow="fold") |
| | preview.add_column("Status") |
| | preview.add_column("Local size") |
| | preview.add_column("Remote size") |
| | preview.add_column("Source URL", overflow="fold") |
| |
|
| | for order, entry in enumerate(entries): |
| | destination = resolve_destination(entry, output_dir) |
| | try: |
| | relative = destination.relative_to(output_dir) |
| | except ValueError: |
| | relative = Path(destination) |
| |
|
| | |
| | local_exists = destination.exists() |
| | local_size = destination.stat().st_size if local_exists else None |
| | remote_size = remote_sizes.get(entry.url) |
| |
|
| | |
| | if not local_exists: |
| | status = "[yellow]Will download[/yellow]" |
| | local_size_str = "-" |
| | elif not skip_existing: |
| | status = "[red]Will overwrite[/red]" |
| | local_size_str = f"{local_size:,}" |
| | elif remote_size and local_size == remote_size: |
| | status = "[green]Will skip (exists)[/green]" |
| | local_size_str = f"{local_size:,}" |
| | elif not remote_size: |
| | status = "[cyan]Will skip (exists, size unknown)[/cyan]" |
| | local_size_str = f"{local_size:,}" |
| | else: |
| | |
| | status = "[red]Will overwrite (size mismatch)[/red]" |
| | local_size_str = f"{local_size:,}" |
| |
|
| | remote_size_str = f"{remote_size:,}" if remote_size else "Unknown" |
| |
|
| | row_data = [ |
| | entry.config_name, |
| | relative.as_posix(), |
| | status, |
| | local_size_str, |
| | remote_size_str, |
| | entry.url, |
| | ] |
| |
|
| | preview.add_row(*row_data) |
| |
|
| | console.print(preview) |
| |
|
| | |
| | will_download = 0 |
| | will_skip = 0 |
| | will_overwrite = 0 |
| |
|
| | for entry in entries: |
| | dest = resolve_destination(entry, output_dir) |
| | local_exists = dest.exists() |
| | local_size = dest.stat().st_size if local_exists else None |
| | remote_size = remote_sizes.get(entry.url) |
| |
|
| | if not local_exists: |
| | will_download += 1 |
| | elif not skip_existing: |
| | will_overwrite += 1 |
| | elif remote_size and local_size == remote_size: |
| | will_skip += 1 |
| | elif not remote_size: |
| | will_skip += 1 |
| | else: |
| | will_overwrite += 1 |
| |
|
| | logger.info( |
| | f"Summary: {will_download} new, {will_skip} will skip, {will_overwrite} will overwrite" |
| | ) |
| | logger.info("Dry run: no files downloaded and README left unchanged.") |
| | return |
| |
|
| | output_dir.mkdir(parents=True, exist_ok=True) |
| |
|
| | results = asyncio.run( |
| | perform_downloads( |
| | entries=entries, |
| | output_dir=output_dir, |
| | max_concurrency=max_concurrency, |
| | timeout=timeout, |
| | skip_existing=skip_existing, |
| | use_staging=use_staging, |
| | ) |
| | ) |
| |
|
| | successes = sum(1 for item in results if item.success) |
| | failures = len(results) - successes |
| | skipped = sum(1 for item in results if item.skipped) |
| | logger.info(f"{successes} succeeded, {failures} failed, {skipped} skipped.") |
| |
|
| | if failures: |
| | for item in results: |
| | if not item.success: |
| | logger.error(f"Error: {item.entry.url} -> {item.error}") |
| | raise typer.Exit(code=1) |
| |
|
| |
|
| | @app.command() |
| | def rewrite( |
| | output_dir: Annotated[ |
| | Path, |
| | typer.Option( |
| | "--output-dir", |
| | "-o", |
| | help="Directory where downloaded files are stored.", |
| | resolve_path=True, |
| | ), |
| | ], |
| | base_url: Annotated[ |
| | str, |
| | typer.Option( |
| | "--base-url", |
| | "-u", |
| | help=( |
| | "Base URL for rewriting URLs in README. Files under output_dir will be rewritten " |
| | "to use this base URL. Example: if output_dir=/data/files and base_url=https://example.com/dataset, " |
| | "then /data/files/particles/file.parquet becomes https://example.com/dataset/particles/file.parquet" |
| | ), |
| | ), |
| | ], |
| | readme_path: Annotated[ |
| | Path, |
| | typer.Option( |
| | "--readme-path", |
| | "-r", |
| | help="Path to the README file with YAML front matter.", |
| | exists=True, |
| | resolve_path=True, |
| | dir_okay=False, |
| | ), |
| | ] = Path("README.md"), |
| | dry_run: Annotated[ |
| | bool, |
| | typer.Option( |
| | "--dry-run/--no-dry-run", |
| | show_default=True, |
| | help="Preview URL rewrites without modifying the README.", |
| | ), |
| | ] = False, |
| | skip_missing: Annotated[ |
| | bool, |
| | typer.Option( |
| | "--skip-missing/--no-skip-missing", |
| | show_default=True, |
| | help="Skip rewriting URLs for files that don't exist locally.", |
| | ), |
| | ] = True, |
| | ) -> None: |
| | """Rewrite URLs in README to point to local files served at a base URL.""" |
| | front_matter_text, body_text = read_front_matter(readme_path) |
| | entries = load_data_file_entries(front_matter_text) |
| | if not entries: |
| | logger.warning("No data_files entries found in README front matter.") |
| | raise typer.Exit(code=0) |
| |
|
| | logger.info(f"Found {len(entries)} data file URLs in {readme_path}.") |
| | output_dir = output_dir.resolve() |
| |
|
| | |
| | results: List[DownloadResult] = [] |
| | missing_files = [] |
| |
|
| | for order, entry in enumerate(entries): |
| | destination = resolve_destination(entry, output_dir) |
| | if destination.exists(): |
| | results.append( |
| | DownloadResult( |
| | entry=entry, |
| | path=destination, |
| | success=True, |
| | skipped=False, |
| | order=order, |
| | ) |
| | ) |
| | else: |
| | missing_files.append((entry, destination)) |
| |
|
| | if missing_files: |
| | if not skip_missing: |
| | logger.error(f"Found {len(missing_files)} missing files:") |
| | for entry, dest in missing_files: |
| | logger.error(f" {entry.config_name}: {dest}") |
| | logger.error( |
| | "Use --skip-missing to rewrite URLs only for existing files, " |
| | "or download the missing files first." |
| | ) |
| | raise typer.Exit(code=1) |
| | else: |
| | logger.warning( |
| | f"Skipping {len(missing_files)} missing files " |
| | f"(rewriting {len(results)} existing files)" |
| | ) |
| |
|
| | if not results: |
| | logger.error("No files found to rewrite URLs for.") |
| | raise typer.Exit(code=1) |
| |
|
| | |
| | table = Table(title="URL rewrite preview" if dry_run else "README URL updates") |
| | table.add_column("Config") |
| | table.add_column("Local file", overflow="fold") |
| | table.add_column("Old URL", overflow="fold") |
| | table.add_column("New URL", overflow="fold") |
| |
|
| | replacements: list[tuple[DownloadResult, str]] = [] |
| | for result in results: |
| | relative_path = result.path.relative_to(output_dir) |
| | new_url = build_rewritten_url(result, output_dir, base_url) |
| | replacements.append((result, new_url)) |
| | table.add_row( |
| | result.entry.config_name, |
| | relative_path.as_posix(), |
| | result.entry.url, |
| | new_url, |
| | ) |
| |
|
| | console.print(table) |
| |
|
| | if dry_run: |
| | logger.info( |
| | f"Dry run: would rewrite {len(results)} URLs. " |
| | "Remove --dry-run to apply changes." |
| | ) |
| | return |
| |
|
| | |
| | updated_front = front_matter_text |
| | for result, new_url in replacements: |
| | updated_front = replace_once(updated_front, result.entry.url, new_url) |
| |
|
| | readme_path.write_text(f"---\n{updated_front}\n---\n{body_text}", encoding="utf-8") |
| | logger.info(f"Successfully rewrote {len(results)} URLs in {readme_path}") |
| |
|
| |
|
| | @app.command() |
| | def verify( |
| | readme_path: Annotated[ |
| | Path, |
| | typer.Option( |
| | "--readme-path", |
| | "-r", |
| | help="Path to the README file with YAML front matter.", |
| | exists=True, |
| | resolve_path=True, |
| | dir_okay=False, |
| | ), |
| | ] = Path("README.md"), |
| | max_concurrency: Annotated[ |
| | int, |
| | typer.Option( |
| | "--max-concurrency", |
| | "-c", |
| | min=1, |
| | show_default=True, |
| | help="Maximum concurrent verification requests.", |
| | ), |
| | ] = 8, |
| | timeout: Annotated[ |
| | float, |
| | typer.Option( |
| | "--timeout", |
| | min=1.0, |
| | show_default=True, |
| | help="Request timeout in seconds.", |
| | ), |
| | ] = 60.0, |
| | ) -> None: |
| | front_matter_text, _ = read_front_matter(readme_path) |
| | entries = load_data_file_entries(front_matter_text) |
| | if not entries: |
| | logger.warning("No data_files entries found in README front matter.") |
| | raise typer.Exit(code=0) |
| |
|
| | logger.info(f"Verifying {len(entries)} URLs from {readme_path}.") |
| | results = asyncio.run( |
| | perform_verification( |
| | entries=entries, |
| | max_concurrency=max_concurrency, |
| | timeout=timeout, |
| | ) |
| | ) |
| |
|
| | table = Table(title="URL verification results") |
| | table.add_column("Config") |
| | table.add_column("Status") |
| | table.add_column("Detail", overflow="fold") |
| | failed = 0 |
| | for result in sorted(results, key=lambda r: (not r.ok, r.order)): |
| | status_text = str(result.status) if result.status is not None else "-" |
| | if result.ok: |
| | table.add_row( |
| | result.entry.config_name, |
| | f"[green]{status_text}[/green]", |
| | result.entry.url, |
| | ) |
| | else: |
| | failed += 1 |
| | detail = result.detail or result.entry.url |
| | table.add_row( |
| | result.entry.config_name, |
| | f"[red]{status_text}[/red]", |
| | f"{result.entry.url}\n{detail}", |
| | ) |
| |
|
| | console.print(table) |
| | if failed: |
| | logger.error(f"{failed} URLs failed verification.") |
| | raise typer.Exit(code=1) |
| | logger.info("All URLs verified successfully.") |
| |
|
| |
|
| | @app.command() |
| | def checksum( |
| | readme_path: Annotated[ |
| | Path, |
| | typer.Option( |
| | "--readme-path", |
| | "-r", |
| | help="Path to the README file with YAML front matter.", |
| | exists=True, |
| | resolve_path=True, |
| | dir_okay=False, |
| | ), |
| | ] = Path("README.md"), |
| | checksum_dir: Annotated[ |
| | Path, |
| | typer.Option( |
| | "--checksum-dir", |
| | "-d", |
| | help="Directory containing SHA256 checksum files (one per config).", |
| | resolve_path=True, |
| | ), |
| | ] = Path("checksums"), |
| | max_concurrency: Annotated[ |
| | int, |
| | typer.Option( |
| | "--max-concurrency", |
| | "-c", |
| | min=1, |
| | show_default=True, |
| | help="Maximum concurrent checksum operations.", |
| | ), |
| | ] = 8, |
| | timeout: Annotated[ |
| | float, |
| | typer.Option( |
| | "--timeout", |
| | min=1.0, |
| | show_default=True, |
| | help="Request timeout in seconds.", |
| | ), |
| | ] = 300.0, |
| | generate: Annotated[ |
| | bool, |
| | typer.Option( |
| | "--generate", |
| | help="Generate/update checksum files with current remote file checksums.", |
| | ), |
| | ] = False, |
| | update_mismatches: Annotated[ |
| | bool, |
| | typer.Option( |
| | "--update-mismatches", |
| | help="Update checksum files with new hashes for files that don't match.", |
| | ), |
| | ] = False, |
| | ) -> None: |
| | """Verify file integrity using SHA256 checksums.""" |
| | front_matter_text, _ = read_front_matter(readme_path) |
| | entries = load_data_file_entries(front_matter_text) |
| | if not entries: |
| | logger.warning("No data_files entries found in README front matter.") |
| | raise typer.Exit(code=0) |
| |
|
| | logger.info(f"Computing checksums for {len(entries)} files from {readme_path}.") |
| |
|
| | |
| | config_names = sorted(set(entry.config_name for entry in entries)) |
| |
|
| | |
| | expected_hashes_by_config: Dict[str, Dict[str, str]] = {} |
| | if not generate: |
| | for config_name in config_names: |
| | expected_hashes_by_config[config_name] = load_checksums_for_config( |
| | checksum_dir, config_name |
| | ) |
| |
|
| | |
| | results = asyncio.run( |
| | perform_checksum_verification( |
| | entries=entries, |
| | expected_hashes_by_config=expected_hashes_by_config, |
| | max_concurrency=max_concurrency, |
| | timeout=timeout, |
| | ) |
| | ) |
| |
|
| | |
| | table = Table(title="Checksum verification results") |
| | table.add_column("Config") |
| | table.add_column("Filename") |
| | table.add_column("Status") |
| | table.add_column("SHA256", overflow="fold") |
| |
|
| | failed = 0 |
| | mismatched = 0 |
| | |
| | new_hashes_by_config: Dict[str, Dict[str, str]] = {} |
| |
|
| | for result in results: |
| | if not result.success: |
| | failed += 1 |
| | error_msg = str(result.error) if result.error else "Unknown error" |
| | table.add_row( |
| | result.entry.config_name, |
| | result.filename, |
| | "[red]ERROR[/red]", |
| | error_msg, |
| | ) |
| | elif generate: |
| | |
| | if result.entry.config_name not in new_hashes_by_config: |
| | new_hashes_by_config[result.entry.config_name] = {} |
| | new_hashes_by_config[result.entry.config_name][result.filename] = ( |
| | result.computed_hash or "" |
| | ) |
| | table.add_row( |
| | result.entry.config_name, |
| | result.filename, |
| | "[cyan]COMPUTED[/cyan]", |
| | result.computed_hash or "-", |
| | ) |
| | elif result.expected_hash is None: |
| | |
| | table.add_row( |
| | result.entry.config_name, |
| | result.filename, |
| | "[yellow]NEW[/yellow]", |
| | result.computed_hash or "-", |
| | ) |
| | if update_mismatches and result.computed_hash: |
| | if result.entry.config_name not in new_hashes_by_config: |
| | new_hashes_by_config[result.entry.config_name] = {} |
| | new_hashes_by_config[result.entry.config_name][ |
| | result.filename |
| | ] = result.computed_hash |
| | elif result.matches: |
| | |
| | table.add_row( |
| | result.entry.config_name, |
| | result.filename, |
| | "[green]OK[/green]", |
| | result.computed_hash or "-", |
| | ) |
| | else: |
| | |
| | mismatched += 1 |
| | table.add_row( |
| | result.entry.config_name, |
| | result.filename, |
| | "[red]MISMATCH[/red]", |
| | f"Expected: {result.expected_hash}\nComputed: {result.computed_hash}", |
| | ) |
| | if update_mismatches and result.computed_hash: |
| | if result.entry.config_name not in new_hashes_by_config: |
| | new_hashes_by_config[result.entry.config_name] = {} |
| | new_hashes_by_config[result.entry.config_name][ |
| | result.filename |
| | ] = result.computed_hash |
| |
|
| | console.print(table) |
| |
|
| | |
| | if generate: |
| | total_saved = 0 |
| | for config_name, file_hashes in new_hashes_by_config.items(): |
| | save_checksums_for_config(checksum_dir, config_name, file_hashes) |
| | total_saved += len(file_hashes) |
| | logger.info( |
| | f"Generated {len(new_hashes_by_config)} checksum files with {total_saved} total checksums." |
| | ) |
| | elif update_mismatches and new_hashes_by_config: |
| | total_updated = 0 |
| | for config_name, new_file_hashes in new_hashes_by_config.items(): |
| | |
| | all_hashes = load_checksums_for_config(checksum_dir, config_name) |
| | all_hashes.update(new_file_hashes) |
| | save_checksums_for_config(checksum_dir, config_name, all_hashes) |
| | total_updated += len(new_file_hashes) |
| | logger.warning( |
| | f"Updated {total_updated} checksums across {len(new_hashes_by_config)} checksum files." |
| | ) |
| |
|
| | |
| | if failed: |
| | logger.error(f"{failed} files failed checksum computation.") |
| | raise typer.Exit(code=1) |
| | if mismatched and not update_mismatches: |
| | logger.error(f"{mismatched} files have checksum mismatches.") |
| | logger.warning("Use --update-mismatches to update the checksum files.") |
| | raise typer.Exit(code=1) |
| | if not generate and not mismatched and not failed: |
| | logger.info("All checksums verified successfully.") |
| |
|
| |
|
| | def clean_arrow_type(type_str: str) -> str: |
| | """Clean up Arrow type string for display.""" |
| | |
| | |
| | cleaned = re.sub(r"list<element:\s*", "list<", type_str) |
| | return cleaned |
| |
|
| |
|
| | def escape_latex(text: str) -> str: |
| | """Escape special LaTeX characters.""" |
| | |
| | replacements = { |
| | "\\": r"\\", |
| | "&": r"\&", |
| | "%": r"\%", |
| | "$": r"\$", |
| | "#": r"\#", |
| | "_": r"\_", |
| | "{": r"\{", |
| | "}": r"\}", |
| | "~": r"\textasciitilde{}", |
| | "^": r"\^{}", |
| | } |
| | for char, escaped in replacements.items(): |
| | text = text.replace(char, escaped) |
| | return text |
| |
|
| |
|
| | async def inspect_file_schema( |
| | entry: DataFileEntry, |
| | session: aiohttp.ClientSession, |
| | semaphore: asyncio.Semaphore, |
| | progress: Progress, |
| | ) -> SchemaInfo: |
| | """Download file and extract schema information using PyArrow.""" |
| | _, remote_path = entry.parsed() |
| | filename = Path(remote_path).name or remote_path |
| | terse_name = (filename[:32] + "…") if len(filename) > 33 else filename |
| | description = f"{entry.config_name}: {terse_name}" |
| |
|
| | async with semaphore: |
| | task_id: int | None = None |
| | temp_file = None |
| | try: |
| | task_id = progress.add_task(description, total=0, start=False) |
| | progress.start_task(task_id) |
| |
|
| | |
| | async with session.get(entry.url) as response: |
| | response.raise_for_status() |
| | total_bytes = response.content_length or 0 |
| |
|
| | if total_bytes: |
| | progress.update(task_id, total=total_bytes) |
| |
|
| | with tempfile.NamedTemporaryFile(delete=False, suffix=".parquet") as f: |
| | temp_file = Path(f.name) |
| | async for chunk in response.content.iter_chunked(1 << 17): |
| | f.write(chunk) |
| | progress.update(task_id, advance=len(chunk)) |
| |
|
| | |
| | parquet_file = pq.read_table(temp_file) |
| | schema = parquet_file.schema |
| |
|
| | columns = [ |
| | (field.name, clean_arrow_type(str(field.type))) for field in schema |
| | ] |
| | num_rows = len(parquet_file) |
| |
|
| | return SchemaInfo( |
| | config_name=entry.config_name, |
| | filename=filename, |
| | columns=columns, |
| | num_rows=num_rows, |
| | success=True, |
| | ) |
| |
|
| | except Exception as exc: |
| | return SchemaInfo( |
| | config_name=entry.config_name, |
| | filename=filename, |
| | columns=[], |
| | num_rows=None, |
| | success=False, |
| | error=exc, |
| | ) |
| | finally: |
| | if task_id is not None: |
| | progress.remove_task(task_id) |
| | |
| | if temp_file and temp_file.exists(): |
| | temp_file.unlink() |
| |
|
| |
|
| | async def perform_schema_inspection( |
| | entries: Sequence[DataFileEntry], |
| | max_concurrency: int, |
| | timeout: float, |
| | ) -> List[SchemaInfo]: |
| | """Download first file from each config and inspect schema.""" |
| | if not entries: |
| | return [] |
| |
|
| | semaphore = asyncio.Semaphore(max_concurrency) |
| | timeout_cfg = aiohttp.ClientTimeout(total=timeout) |
| | results: List[SchemaInfo] = [] |
| | progress = Progress( |
| | TextColumn("{task.description}"), |
| | BarColumn(bar_width=None), |
| | DownloadColumn(), |
| | TransferSpeedColumn(), |
| | TimeElapsedColumn(), |
| | console=console, |
| | ) |
| |
|
| | async with aiohttp.ClientSession(timeout=timeout_cfg) as session: |
| | with progress: |
| | tasks = [ |
| | asyncio.create_task( |
| | inspect_file_schema(entry, session, semaphore, progress) |
| | ) |
| | for entry in entries |
| | ] |
| | for future in asyncio.as_completed(tasks): |
| | result = await future |
| | results.append(result) |
| |
|
| | |
| | results.sort(key=lambda r: r.config_name) |
| | return results |
| |
|
| |
|
| | @app.command() |
| | def schema( |
| | readme_path: Annotated[ |
| | Path, |
| | typer.Option( |
| | "--readme-path", |
| | "-r", |
| | help="Path to the README file with YAML front matter.", |
| | exists=True, |
| | resolve_path=True, |
| | dir_okay=False, |
| | ), |
| | ] = Path("README.md"), |
| | output_file: Annotated[ |
| | Path | None, |
| | typer.Option( |
| | "--output-file", |
| | "-o", |
| | help="Write schema information to plain text file.", |
| | resolve_path=True, |
| | dir_okay=False, |
| | ), |
| | ] = None, |
| | latex: Annotated[ |
| | bool, |
| | typer.Option( |
| | "--latex", |
| | help="Output schema in LaTeX format (requires --output-file).", |
| | ), |
| | ] = False, |
| | max_concurrency: Annotated[ |
| | int, |
| | typer.Option( |
| | "--max-concurrency", |
| | "-c", |
| | min=1, |
| | show_default=True, |
| | help="Maximum concurrent downloads.", |
| | ), |
| | ] = 4, |
| | timeout: Annotated[ |
| | float, |
| | typer.Option( |
| | "--timeout", |
| | min=1.0, |
| | show_default=True, |
| | help="Request timeout in seconds.", |
| | ), |
| | ] = 300.0, |
| | types: Annotated[bool, typer.Option(help="Add column type info")] = True, |
| | ) -> None: |
| | """Inspect schema of first file from each dataset config.""" |
| | |
| | if latex and not output_file: |
| | logger.error("--latex requires --output-file to be specified.") |
| | raise typer.Exit(code=1) |
| |
|
| | front_matter_text, _ = read_front_matter(readme_path) |
| | entries = load_data_file_entries(front_matter_text) |
| | if not entries: |
| | logger.warning("No data_files entries found in README front matter.") |
| | raise typer.Exit(code=0) |
| |
|
| | |
| | configs_seen = set() |
| | first_files = [] |
| | for entry in entries: |
| | if entry.config_name not in configs_seen: |
| | first_files.append(entry) |
| | configs_seen.add(entry.config_name) |
| |
|
| | logger.info(f"Inspecting schema for {len(first_files)} configs.") |
| |
|
| | |
| | results = asyncio.run( |
| | perform_schema_inspection( |
| | entries=first_files, |
| | max_concurrency=max_concurrency, |
| | timeout=timeout, |
| | ) |
| | ) |
| |
|
| | |
| | failed = 0 |
| | text_output_lines = [] |
| | latex_items = [] |
| |
|
| | for result in results: |
| | if not result.success: |
| | failed += 1 |
| | error_msg = str(result.error) if result.error else "Unknown error" |
| | logger.error( |
| | f"Failed to inspect {result.config_name}/{result.filename}: {error_msg}" |
| | ) |
| | continue |
| |
|
| | |
| | if output_file: |
| | if latex: |
| | |
| | escaped_config = escape_latex(result.config_name) |
| | column_parts = [] |
| | for col_name, col_type in result.columns: |
| | escaped_col = escape_latex(col_name) |
| | escaped_type = escape_latex(col_type) |
| | part = f"\\texttt{{{escaped_col}}}" |
| | if types: |
| | part += f" (\\texttt{{{escaped_type}}})" |
| | column_parts.append(part) |
| | columns_str = ", ".join(column_parts) |
| | latex_items.append( |
| | f"\\item \\textbf{{{escaped_config}}}: {columns_str}" |
| | ) |
| | else: |
| | |
| | text_output_lines.append(f"# {result.config_name} — {result.filename}") |
| | if result.num_rows is not None: |
| | text_output_lines.append( |
| | f"# {len(result.columns)} columns, {result.num_rows:,} rows" |
| | ) |
| | text_output_lines.append("") |
| | for col_name, col_type in result.columns: |
| | text_output_lines.append(f"{col_name}: {col_type}") |
| | text_output_lines.append("") |
| |
|
| | |
| | table = Table(title=f"[bold]{result.config_name}[/bold] — {result.filename}") |
| | table.add_column("Column", style="cyan") |
| | table.add_column("Type", style="yellow") |
| |
|
| | for col_name, col_type in result.columns: |
| | table.add_row(col_name, col_type) |
| |
|
| | console.print(table) |
| | if result.num_rows is not None: |
| | logger.info( |
| | f"{result.config_name}: {len(result.columns)} columns, {result.num_rows:,} rows" |
| | ) |
| | console.print() |
| |
|
| | |
| | if output_file: |
| | if latex and latex_items: |
| | latex_content = ( |
| | "\\begin{itemize}\n" + "\n".join(latex_items) + "\n\\end{itemize}" |
| | ) |
| | output_file.write_text(latex_content, encoding="utf-8") |
| | logger.info(f"Wrote LaTeX schema to {output_file}") |
| | elif text_output_lines: |
| | output_file.write_text("\n".join(text_output_lines), encoding="utf-8") |
| | logger.info(f"Wrote schema information to {output_file}") |
| |
|
| | if failed: |
| | logger.error(f"{failed} configs failed schema inspection.") |
| | raise typer.Exit(code=1) |
| | logger.info("Schema inspection completed successfully.") |
| |
|
| |
|
| | if __name__ == "__main__": |
| | app() |
| |
|