File size: 1,393 Bytes
3c4c67b aef1f5a 3c4c67b aef1f5a 3c4c67b 52703e6 3c4c67b 52703e6 3c4c67b 8290bc9 52703e6 8290bc9 52703e6 8290bc9 52703e6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 |
"""Data loading and case management for stroke-deepisles-demo."""
from stroke_deepisles_demo.core.types import CaseFiles
from stroke_deepisles_demo.data.adapter import LocalDataset
from stroke_deepisles_demo.data.loader import DatasetInfo, load_isles_dataset
from stroke_deepisles_demo.data.staging import StagedCase, stage_case_for_deepisles
__all__ = [
"DatasetInfo",
"LocalDataset",
"StagedCase",
"get_case",
"list_case_ids",
"load_isles_dataset",
"stage_case_for_deepisles",
]
# Convenience functions (combine loader + adapter)
def get_case(case_id: str | int) -> CaseFiles:
"""
Load a single case by ID or index.
Uses context manager to ensure HuggingFace temp files are cleaned up.
This prevents unbounded disk usage from accumulating temp NIfTI files.
Returns:
CaseFiles dictionary
"""
with load_isles_dataset() as dataset:
return dataset.get_case(case_id)
def list_case_ids(source: str | None = None) -> list[str]:
"""List all available case IDs.
Args:
source: HuggingFace dataset ID or local path. If None, uses default from settings.
Uses context manager to ensure HuggingFace temp files are cleaned up.
This prevents unbounded disk usage from accumulating temp NIfTI files.
"""
with load_isles_dataset(source=source) as dataset:
return dataset.list_case_ids()
|