diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 217430f2..808817a1 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -1,5 +1,102 @@ # GitHub Copilot Instructions for AIMBAT +## Build, Test, and Lint + +Dependencies are managed with **uv**. All commands assume the virtualenv is active or are prefixed with `uv run`. + +```bash +# Install all dependencies +make sync # uv sync --locked --all-extras + +# Format and lint +make format # black . +make lint # black --check + ruff check . +uv run ruff check --fix . # auto-fix ruff issues + +# Type checking +make mypy # uv run pytest --mypy -m mypy src tests + +# Run all tests (includes mypy + matplotlib comparison) +make tests # pytest --cov --mpl + mypy + +# Run a single test file or test +uv run pytest tests/unit/test_foo.py +uv run pytest tests/unit/test_foo.py::test_specific_function + +# Regenerate matplotlib baseline images (then manually move to test directories) +make test-figs +``` + +Configuration: `pyproject.toml` (pytest, mypy, black, ruff, coverage). Tests run against Python 3.12–3.14 in CI via tox. + +## Architecture + +AIMBAT is a seismological tool for automated and interactive measurement of body-wave arrival times. It processes SAC-format seismograms and stores state in a SQLite database. + +### Module Layout + +``` +src/aimbat/ +├── app.py # Cyclopts CLI root — registers all subcommands +├── cli/ # CLI command definitions (thin layer, delegates to core/) +├── core/ # Business logic: ICCS/MCCC algorithms, event/seismogram ops +│ ├── _active_event.py # Manages the single active event constraint +│ ├── _data.py # SAC ingestion entry point +│ ├── _iccs.py # ICCS alignment (wraps pysmo.tools.iccs) +│ └── _snapshot.py # Parameter state capture for rollback/comparison +├── models/ # SQLModel ORM definitions (Events, Seismograms, Stations, etc.) +│ └── _sqlalchemy.py # SAPandasTimestamp / SAPandasTimedelta type decorators +├── aimbat_types/ # Custom Pydantic types (PydanticTimestamp, enums for parameters) +├── io/ # File I/O — _base.py defines abstract base; _sac.py implements SAC via pysmo +├── utils/ # Shared helpers (JSON→table, UUID truncation, styling, sample data) +├── _config.py # Global Settings (pydantic-settings, env prefix AIMBAT_) +├── _lib/ # Internal mixins (EventParametersValidatorMixin) +├── _utils.py # Top-level utility helpers +├── db.py # SQLite engine singleton (foreign keys enforced via PRAGMA) +└── logger.py # Loguru-based logging +``` + +### Data Flow + +1. SAC files are ingested via `aimbat data add` → `core/_data.py` → `io/` → stored in SQLite +2. One event is set "active" at a time; all processing commands operate on the active event +3. ICCS (Iterative Cross-Correlation and Stack) aligns seismograms: `core/_iccs.py` wraps `pysmo.tools.iccs` +4. MCCC (Multi-Channel Cross-Correlation) refines arrival time picks: wraps `pysmo.tools.signal.mccc` +5. Snapshots (`core/_snapshot.py`) capture parameter state for rollback/comparison + +### Key Models + +- **AimbatEvent** — seismic event with `active` flag (only one active at a time, enforced by DB trigger) +- **AimbatSeismogram** — links to AimbatEvent + AimbatStation; stores `t0` (initial pick) and processing parameters +- **AimbatEventParameters** — per-event processing settings (window, bandpass, min_ccnorm) +- **AimbatSeismogramParameters** — per-seismogram flags (`select`, `flip`, `t1` pick) +- **SAPandasTimestamp / SAPandasTimedelta** in `models/_sqlalchemy.py` — custom SQLAlchemy type decorators storing pandas timestamps as UTC datetimes and timedeltas as nanosecond integers + +### Configuration + +Settings live in `_config.py` as a `pydantic-settings` class. All settings can be overridden via environment variables prefixed with `AIMBAT_` (e.g. `AIMBAT_LOG_LEVEL=DEBUG`) or a `.env` file. The default project file is `aimbat.db` in the current directory. + +## Key Conventions + +### Testing + +- **Each test gets a fresh in-memory SQLite database** via the `engine` fixture in `tests/conftest.py`; never share state between tests +- **UUID generation is seeded** (`random.Random(42)`) in tests via `mock_uuid4` autouse fixture — do not rely on random UUIDs in assertions +- **`patch_settings` fixture** resets all settings to defaults before each test; use `@pytest.mark.parametrize` with `indirect=["patch_settings"]` to override specific settings +- Test assets (SAC files) live in `tests/assets/`; use `tmp_path_factory` copies to avoid mutating them +- Mirror `src/aimbat/` directory structure under `tests/` (e.g. `tests/unit/core/`, `tests/unit/models/`) +- Matplotlib comparison tests use `--mpl` flag; baseline images live in `baseline/` + +### CLI Pattern + +Each CLI module in `cli/` creates a Cyclopts `App` instance and registers it with the root app in `app.py`. CLI functions are thin wrappers that open a `Session` from `aimbat.db.engine` and delegate to `core/` functions. + +### Custom Types + +- Use `PydanticTimestamp` / `PydanticTimedelta` (from `aimbat.aimbat_types`) for pandas-compatible time fields in models +- Use `PydanticNegativeTimedelta` / `PydanticPositiveTimedelta` for constrained sign validation +- Use `SAPandasTimestamp` / `SAPandasTimedelta` (from `aimbat.models._sqlalchemy`) as the `sa_type` in SQLModel fields + ## Code Style and Standards ### General Principles diff --git a/.gitignore b/.gitignore index 33358aed..bbe2a139 100644 --- a/.gitignore +++ b/.gitignore @@ -34,3 +34,6 @@ reset_project.sh aimbat.log .env aimbat_test.log +GEMINI.md +CLAUDE.md +.claude/settings.local.json diff --git a/Makefile b/Makefile index 9a414845..1faafccd 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -.PHONY: help check-uv sync upgrade lint test-figs tests \ +.PHONY: help check-uv sync upgrade lint test-figs tests tests-full \ mypy docs live-docs build publish clean python \ format format-check changelog @@ -36,7 +36,10 @@ lint: check-uv ## Check formatting with black and lint code with ruff. test-figs: check-uv ## Generate baseline figures for testing (then manually move them to the test directories). uv run py.test --mpl-generate-path=baseline -tests: check-uv mypy ## Run all tests with pytest. +tests: check-uv mypy ## Run tests with pytest (excludes slow functional tests). + uv run pytest --cov --cov-report=term-missing --cov-report=html --mpl -m "not slow" + +tests-full: check-uv mypy ## Run all tests including slow functional tests. uv run pytest --cov --cov-report=term-missing --cov-report=html --mpl mypy: check-uv ## Run typing tests with pytest. diff --git a/flake.nix b/flake.nix index ff952bb1..d9b1ea80 100644 --- a/flake.nix +++ b/flake.nix @@ -23,13 +23,14 @@ in { default = pkgs.mkShell { nativeBuildInputs = with pkgs; [ + bashInteractive + sqlitebrowser uv ruff (python314.withPackages (ps: with ps; [tox])) python313 python312 gnumake - sqlitebrowser ]; shellHook = '' diff --git a/pyproject.toml b/pyproject.toml index 6eba5d90..d67fa695 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -77,14 +77,20 @@ requires = ["hatchling", "hatch-vcs"] build-backend = "hatchling.build" [tool.pytest.ini_options] -# xvfb_width = 1920 -# xvfb_height = 1080 testpaths = [ "tests", "src", ] +markers = [ + "slow: mark slow tests that may take a long time to run", + "cli: mark as command-line interface tests", + "gui: mark tests that require a GUI environment", + "mpl: mark tests that generate matplotlib figures", +] mpl-generate-summary = "html" mpl-use-full-test-name = true +# xvfb_width = 1920 +# xvfb_height = 1080 [tool.mypy] mypy_path = "src" diff --git a/src/aimbat/_config.py b/src/aimbat/_config.py index d9894bee..12227c7d 100644 --- a/src/aimbat/_config.py +++ b/src/aimbat/_config.py @@ -133,36 +133,55 @@ def set_computed_defaults(self) -> Self: def print_settings_table(pretty: bool) -> None: """Print a pretty table with AIMBAT configuration options.""" - from aimbat.utils import make_table, TABLE_STYLING - from rich.console import Console + import json + from aimbat.utils import TABLE_STYLING + from aimbat.utils._json import json_to_table env_prefix = Settings.model_config.get("env_prefix") + values: dict[str, str] = json.loads(settings.model_dump_json()) if not pretty: - for k in Settings.model_fields: - print( - f'{(env_prefix + k).upper() if env_prefix else k}="{getattr(settings, k)}"' - ) + for k, v in values.items(): + env_key = f"{env_prefix.upper()}{k.upper()}" if env_prefix else k + print(f'{env_key}="{v}"') return - table = make_table(title="AIMBAT settings") - table.add_column("Name", justify="left", style=TABLE_STYLING.id, no_wrap=True) - table.add_column("Value", justify="center", style=TABLE_STYLING.mine) - table.add_column("Description", justify="left", style=TABLE_STYLING.linked) - - for k, v in Settings.model_fields.items(): + rows = [] + for k, v in values.items(): + field_info = Settings.model_fields.get(k) env_var = ( - f"Environment variable: {env_prefix.upper()}{str(k).upper()}" + f"Environment variable: {env_prefix.upper()}{k.upper()}" if env_prefix else "" ) - description_with_env_var = ( - f"{v.description} " if v.description else "" - ) + env_var - table.add_row(k, str(getattr(settings, k)), description_with_env_var) + description = field_info.description if field_info else "" + description_with_env_var = (f"{description} " if description else "") + env_var + rows.append( + {"name": k, "value": str(v), "description": description_with_env_var} + ) - console = Console() - console.print(table) + json_to_table( + rows, + title="AIMBAT settings", + column_kwargs={ + "name": { + "header": "Name", + "justify": "left", + "style": TABLE_STYLING.id, + "no_wrap": True, + }, + "value": { + "header": "Value", + "justify": "center", + "style": TABLE_STYLING.mine, + }, + "description": { + "header": "Description", + "justify": "left", + "style": TABLE_STYLING.linked, + }, + }, + ) def cli_settings_list( diff --git a/src/aimbat/aimbat_types/_pydantic.py b/src/aimbat/aimbat_types/_pydantic.py index a624dd58..522be11d 100644 --- a/src/aimbat/aimbat_types/_pydantic.py +++ b/src/aimbat/aimbat_types/_pydantic.py @@ -11,7 +11,9 @@ ] -def _format_timedelta(td: Timedelta) -> float: +def _format_timedelta(td: Timedelta | None) -> float | None: + if td is None: + return None return td.total_seconds() @@ -40,6 +42,8 @@ def __get_pydantic_core_schema__( ) -> CoreSchema: # Define how to validate the input (from string, datetime, or object) def validate(value: Any) -> T: + if value is None: + raise ValueError(f"{cls.target_type.__name__} value cannot be None") if isinstance(value, cls.target_type): return value try: @@ -63,7 +67,7 @@ class _AnnotatedTimedelta(_PandasBaseAnnotation): type PydanticTimedelta = Annotated[ Timedelta, _AnnotatedTimedelta, - PlainSerializer(_format_timedelta, return_type=float), + PlainSerializer(_format_timedelta, return_type=float | None), ] type PydanticNegativeTimedelta = Annotated[ PydanticTimedelta, AfterValidator(_must_be_negative_pd_timedelta) diff --git a/src/aimbat/app.py b/src/aimbat/app.py index 6ebf0835..5dcc72b1 100644 --- a/src/aimbat/app.py +++ b/src/aimbat/app.py @@ -6,19 +6,6 @@ commands is available by typing `aimbat COMMAND --help`. """ -from ._config import cli_settings_list -from .cli import ( - _align, - _data, - _event, - _pick, - _plot, - _project, - _station, - _seismogram, - _snapshot, - _utils, -) from importlib import metadata from cyclopts import App from rich.console import Console @@ -32,17 +19,16 @@ console = Console() app = App(version=__version__, help=__doc__, help_format="markdown", console=console) -app.command(_align.app) -app.command(_data.app) -app.command(_event.app) -app.command(_pick.app) -app.command(_plot.app) -app.command(_project.app) -app.command(_station.app) -app.command(_seismogram.app) -app.command(cli_settings_list, name="settings") -app.command(_snapshot.app) -app.command(_utils.app) +app.command("aimbat.cli._align:app", name="align") +app.command("aimbat.cli._data:app", name="data") +app.command("aimbat.cli._event:app", name="event") +app.command("aimbat.cli._pick:app", name="pick") +app.command("aimbat.cli._plot:app", name="plot") +app.command("aimbat.cli._project:app", name="project") +app.command("aimbat.cli._station:app", name="station") +app.command("aimbat.cli._seismogram:app", name="seismogram") +app.command("aimbat.cli._snapshot:app", name="snapshot") +app.command("aimbat.cli._utils:app", name="utils") if __name__ == "__main__": diff --git a/src/aimbat/cli/_data.py b/src/aimbat/cli/_data.py index 9dcf42e0..72d6c0fa 100644 --- a/src/aimbat/cli/_data.py +++ b/src/aimbat/cli/_data.py @@ -13,36 +13,41 @@ @app.command(name="add") @simple_exception def cli_data_add( - seismogram_files: Annotated[ + data_sources: Annotated[ list[Path], Parameter( - name="files", consume_multiple=True, validator=validators.Path(exists=True) + name="sources", + consume_multiple=True, + validator=validators.Path(exists=True), ), ], *, - filetype: DataType = DataType.SAC, + data_type: Annotated[DataType, Parameter(name="type")] = DataType.SAC, + dry_run: Annotated[bool, Parameter(name="dry-run")] = False, show_progress_bar: Annotated[bool, Parameter(name="progress")] = True, global_parameters: GlobalParameters | None = None, ) -> None: """Add or update data files in the AIMBAT project. Args: - seismogram_files: Seismogram files to be added. - filetype: Specify type of seismogram file. + data_sources: Data sources to be added. + data_type: Specify type of seismogram file. + dry_run: If True, print the files that would be added without modifying the database. show_progress_bar: Display progress bar. """ from aimbat.db import engine - from aimbat.core import add_files_to_project + from aimbat.core import add_data_to_project global_parameters = global_parameters or GlobalParameters() disable_progress_bar = not show_progress_bar with Session(engine) as session: - add_files_to_project( + add_data_to_project( session, - seismogram_files, - filetype, + data_sources, + data_type, + dry_run, disable_progress_bar, ) diff --git a/src/aimbat/cli/_snapshot.py b/src/aimbat/cli/_snapshot.py index 75283e4c..1001bf2b 100644 --- a/src/aimbat/cli/_snapshot.py +++ b/src/aimbat/cli/_snapshot.py @@ -92,14 +92,14 @@ def cli_snapshot_dump( all_events: Select snapshots for all events. """ from aimbat.db import engine - from aimbat.core import dump_snapshot_table_to_json + from aimbat.core import dump_snapshot_tables_to_json from sqlmodel import Session from rich import print_json global_parameters = global_parameters or GlobalParameters() with Session(engine) as session: - print_json(dump_snapshot_table_to_json(session, all_events, as_string=True)) + print_json(dump_snapshot_tables_to_json(session, all_events, as_string=True)) @app.command(name="list") diff --git a/src/aimbat/cli/_utils/app.py b/src/aimbat/cli/_utils/app.py index 4ab83911..28015d40 100644 --- a/src/aimbat/cli/_utils/app.py +++ b/src/aimbat/cli/_utils/app.py @@ -5,40 +5,14 @@ are not strictly part of an AIMBAT workflow. """ -from .._common import GlobalParameters, simple_exception from .sampledata import app as sampledata_app -from pathlib import Path -from typing import Annotated -from cyclopts import App, Parameter - - -@simple_exception -def _run_checks(sacfiles: list[Path]) -> None: - from aimbat.utils import run_checks - - run_checks(sacfiles) - +from aimbat._config import cli_settings_list +from cyclopts import App app = App(name="utils", help=__doc__, help_format="markdown") +app.command(cli_settings_list, name="settings") app.command(sampledata_app, name="sampledata") -@app.command(name="checkdata") -def cli_checkdata( - sacfiles: Annotated[list[Path], Parameter(name="data", consume_multiple=True)], - *, - common: GlobalParameters | None = None, -) -> None: - """Check if there are any problems with SAC files before adding them to a project. - - Args: - sacfiles: One or more SAC files. - """ - - common = common or GlobalParameters() - - _run_checks(sacfiles) - - if __name__ == "__main__": app() diff --git a/src/aimbat/core/__init__.py b/src/aimbat/core/__init__.py index 2c8351f0..ab8a46dd 100644 --- a/src/aimbat/core/__init__.py +++ b/src/aimbat/core/__init__.py @@ -4,6 +4,7 @@ _internal_names = set(dir()) +from ._active_event import * from ._data import * from ._event import * from ._iccs import * diff --git a/src/aimbat/core/_active_event.py b/src/aimbat/core/_active_event.py new file mode 100644 index 00000000..b745ad4d --- /dev/null +++ b/src/aimbat/core/_active_event.py @@ -0,0 +1,86 @@ +"""Get and set the active event (i.e. the one being processed).""" + +# WARNING: Do not import other modules from `aimbat.core` here to avoid circular imports +from aimbat.logger import logger +from aimbat.models import AimbatEvent +from aimbat.cli._common import HINTS +from sqlmodel import Session, select +from sqlalchemy.exc import NoResultFound +from uuid import UUID + +__all__ = [ + "get_active_event", + "set_active_event_by_id", + "set_active_event", +] + + +def get_active_event(session: Session) -> AimbatEvent: + """ + Return the currently active event (i.e. the one being processed). + + Args: + session: SQL session. + + Returns: + Active Event + + Raises + NoResultFound: When no event is active. + """ + + logger.debug("Attempting to determine active event.") + + select_active_event = select(AimbatEvent).where(AimbatEvent.active == 1) + + # NOTE: While there technically can be no active event in the database, + # we typically don't really want to go beyond this point when that is the + # case. Hence we call `one` rather than `one_or_none`. + try: + active_event = session.exec(select_active_event).one() + except NoResultFound: + raise NoResultFound(f"No active event found. {HINTS.ACTIVATE_EVENT}") + + logger.debug(f"Active event: {active_event.id}") + + return active_event + + +def set_active_event_by_id(session: Session, event_id: UUID) -> None: + """ + Set the currently selected event (i.e. the one being processed) by its ID. + + Args: + session: SQL session. + event_id: ID of AIMBAT Event to set as active one. + + Raises: + ValueError: If no event with the given ID is found. + """ + logger.info(f"Setting active event to event with id={event_id}.") + + if event_id not in session.exec(select(AimbatEvent.id)).all(): + raise ValueError( + f"No AimbatEvent found with id: {event_id}. {HINTS.LIST_EVENTS}" + ) + + aimbat_event = session.exec( + select(AimbatEvent).where(AimbatEvent.id == event_id) + ).one() + set_active_event(session, aimbat_event) + + +def set_active_event(session: Session, event: AimbatEvent) -> None: + """ + Set the active event (i.e. the one being processed). + + Args: + session: SQL session. + event: AIMBAT Event to set as active. + """ + + logger.info(f"Activating {event=}") + + event.active = True + session.add(event) + session.commit() diff --git a/src/aimbat/core/_data.py b/src/aimbat/core/_data.py index 15a95f37..1a3b5994 100644 --- a/src/aimbat/core/_data.py +++ b/src/aimbat/core/_data.py @@ -1,10 +1,12 @@ +import os +from aimbat.core import get_active_event from aimbat.logger import logger from aimbat.aimbat_types import DataType from aimbat.utils import ( uuid_shortener, - get_active_event, make_table, TABLE_STYLING, + json_to_table, ) from aimbat.io import create_seismogram, create_station, create_event from aimbat.models import ( @@ -19,10 +21,9 @@ from collections.abc import Sequence from rich.progress import track from rich.console import Console -import os __all__ = [ - "add_files_to_project", + "add_data_to_project", "get_data_for_active_event", "print_data_table", "dump_data_table_to_json", @@ -106,93 +107,180 @@ def _create_seismogram( return aimbat_seismogram -def add_files_to_project( +def _add_datasource( + session: Session, datasource: str | os.PathLike, datatype: DataType +) -> AimbatDataSource: + """Add a data source to the AIMBAT database, creating related station, event and seismogram if necessary.""" + aimbat_station = _create_station(session, datasource, datatype) + aimbat_event = _create_event(session, datasource, datatype) + aimbat_seismogram = _create_seismogram(session, datasource, datatype) + + # TODO: perhaps adding potentially updated station and event information should be optional? + aimbat_seismogram.station = aimbat_station + aimbat_seismogram.event = aimbat_event + + # Create AimbatDataSource instance with relationship to AimbatSeismogram + select_aimbat_data_source = select(AimbatDataSource).where( + AimbatDataSource.sourcename == str(datasource) + ) + aimbat_data_source = session.exec(select_aimbat_data_source).one_or_none() + if aimbat_data_source is None: + logger.debug(f"Adding data source {datasource} to project.") + aimbat_data_source_create = AimbatDataSourceCreate( + sourcename=str(datasource), datatype=datatype + ) + aimbat_data_source = AimbatDataSource.model_validate( + aimbat_data_source_create, + update={"seismogram": aimbat_seismogram}, + ) + + else: + logger.debug( + f"Using existing data source {datasource} instead of adding new one." + ) + aimbat_data_source.seismogram = aimbat_seismogram + session.add(aimbat_data_source) + return aimbat_data_source + + +def _print_dry_run_results( + added_datasources: Sequence[AimbatDataSource], + existing_station_ids: set, + existing_event_ids: set, + existing_seismogram_ids: set, +) -> None: + """Print a summary table showing which entities were added vs skipped.""" + bool_fmt = TABLE_STYLING.bool_formatter + json_to_table( + [ + { + "Filename": str(ds.sourcename), + "Station": ds.seismogram.station_id not in existing_station_ids, + "Event": ds.seismogram.event_id not in existing_event_ids, + "Seismogram": ds.seismogram_id not in existing_seismogram_ids, + } + for ds in added_datasources + ], + title="Dry Run: Data to be added", + formatters={ + "Station": bool_fmt, + "Event": bool_fmt, + "Seismogram": bool_fmt, + }, + ) + new_stations = sum( + ds.seismogram.station_id not in existing_station_ids for ds in added_datasources + ) + new_events = sum( + ds.seismogram.event_id not in existing_event_ids for ds in added_datasources + ) + new_seismograms = sum( + ds.seismogram_id not in existing_seismogram_ids for ds in added_datasources + ) + console = Console() + console.print( + f"\n{new_stations} station(s) added, " + f"{len(added_datasources) - new_stations} skipped. " + f"{new_events} event(s) added, " + f"{len(added_datasources) - new_events} skipped. " + f"{new_seismograms} seismogram(s) added, " + f"{len(added_datasources) - new_seismograms} skipped." + ) + + +def add_data_to_project( session: Session, - datasources: Sequence[str | os.PathLike], - datatype: DataType, + datas_sources: Sequence[str | os.PathLike], + data_type: DataType, + dry_run: bool = False, disable_progress_bar: bool = True, ) -> None: """Add files to the AIMBAT database. Args: - datasources: List of data sources to add. - datatype: Type of data. + session: The SQLModel database session. + data_sources: List of data sources to add. + data_type: Type of data. + dry_run: If True, do not commit changes to the database. disable_progress_bar: Do not display progress bar. """ - logger.info(f"Adding {len(datasources)} {datatype} files to project.") - - for datasource in track( - sequence=datasources, - description="Adding files ...", - disable=disable_progress_bar, - ): - aimbat_station = _create_station(session, datasource, datatype) - aimbat_event = _create_event(session, datasource, datatype) - aimbat_seismogram = _create_seismogram(session, datasource, datatype) - - # TODO: perhaps adding potentially updated station and event information should be optional? - aimbat_seismogram.station = aimbat_station - aimbat_seismogram.event = aimbat_event - - # Create AimbatDataSource instance with relationship to AimbatSeismogram - select_aimbat_data_source = select(AimbatDataSource).where( - AimbatDataSource.sourcename == str(datasource) - ) - aimbat_data_source = session.exec(select_aimbat_data_source).one_or_none() - if aimbat_data_source is None: - logger.debug(f"Adding data source {datasource} to project.") - aimbat_data_source_create = AimbatDataSourceCreate( - sourcename=str(datasource), datatype=datatype - ) - aimbat_data_source = AimbatDataSource.model_validate( - aimbat_data_source_create, update={"seismogram": aimbat_seismogram} - ) - - else: - logger.debug( - f"Using existing data source {datasource} instead of adding new one." - ) - aimbat_data_source.seismogram = aimbat_seismogram - session.add(aimbat_data_source) - - session.commit() + logger.info(f"Adding {len(datas_sources)} {data_type} files to project.") + + # Snapshot existing IDs before entering the savepoint so we can identify + # what would be new vs reused when running a dry run. + if dry_run: + existing_station_ids = set(session.exec(select(AimbatStation.id)).all()) + existing_event_ids = set(session.exec(select(AimbatEvent.id)).all()) + existing_seismogram_ids = set(session.exec(select(AimbatSeismogram.id)).all()) + + try: + added_datasources: list[AimbatDataSource] = [] + with session.begin_nested() as nested: + for datasource in track( + sequence=datas_sources, + description="Adding data ...", + disable=disable_progress_bar, + ): + added_datasources.append( + _add_datasource(session, datasource, data_type) + ) + + if dry_run: + logger.info("Dry run: displaying data that would be added.") + session.flush() + _print_dry_run_results( + added_datasources, + existing_station_ids, + existing_event_ids, + existing_seismogram_ids, + ) + nested.rollback() + logger.info("Dry run complete. Rolling back changes.") + return + + session.commit() + logger.info("Data added successfully.") + + except Exception as e: + logger.error(f"Failed to add data. Rolling back changes. Error: {e}") + raise def get_data_for_active_event(session: Session) -> Sequence[AimbatDataSource]: - """Returns the AimbatFiles belonging to the active event. + """Returns the data sources belonging to the active event. Args: session: Database session. Returns: - List of AimbatFiles. + Sequence of AimbatDataSource objects belonging to the active event. """ - logger.info("Getting aimbatfiles in active event.") + logger.info("Getting data sources for active event.") - select_files = ( + statement = ( select(AimbatDataSource) .join(AimbatSeismogram) .join(AimbatEvent) .where(AimbatEvent.active == 1) ) - return session.exec(select_files).all() + return session.exec(statement).all() def print_data_table(session: Session, short: bool, all_events: bool = False) -> None: - """Print a pretty table with AIMBAT data. + """Print a pretty table with information about the data sources in the database. Args: short: Shorten UUIDs and format data. all_events: Print all files instead of limiting to the active event. """ - logger.info("Printing AIMBAT data table.") + logger.info("Printing data sources table.") if all_events: aimbat_data_sources = session.exec(select(AimbatDataSource)).all() - title = "AIMBAT data for all events" + title = "Data sources for all events" else: active_event = get_active_event(session) aimbat_data_sources = get_data_for_active_event(session) @@ -202,7 +290,7 @@ def print_data_table(session: Session, short: bool, all_events: bool = False) -> else active_event.time ) id = uuid_shortener(session, active_event) if short else active_event.id - title = f"AIMBAT data for event {time} (ID={id})" + title = f"Data sources for event {time} (ID={id})" logger.debug(f"Found {len(aimbat_data_sources)} files in total.") diff --git a/src/aimbat/core/_event.py b/src/aimbat/core/_event.py index 3145b25e..ddfdedef 100644 --- a/src/aimbat/core/_event.py +++ b/src/aimbat/core/_event.py @@ -1,11 +1,10 @@ """Module to manage and view events in AIMBAT.""" +from aimbat.core import get_active_event from aimbat.logger import logger from aimbat.cli._common import HINTS from aimbat.utils import ( uuid_shortener, - get_active_event, - make_table, json_to_table, TABLE_STYLING, ) @@ -13,6 +12,7 @@ AimbatEvent, AimbatEventParameters, AimbatEventParametersBase, + AimbatEventRead, AimbatStation, AimbatSeismogram, ) @@ -23,21 +23,16 @@ EventParameterTimedelta, ) from pydantic import TypeAdapter -from rich.console import Console from sqlmodel import select, Session from sqlalchemy.exc import NoResultFound from typing import overload, Any, Literal -from pandas import Timedelta +from pandas import Timedelta, Timestamp from collections.abc import Sequence from uuid import UUID -import aimbat.core._station as station __all__ = [ "delete_event_by_id", "delete_event", - "get_active_event", - "set_active_event_by_id", - "set_active_event", "get_completed_events", "get_events_using_station", "get_event_parameter", @@ -84,46 +79,6 @@ def delete_event(session: Session, event: AimbatEvent) -> None: session.commit() -def set_active_event_by_id(session: Session, event_id: UUID) -> None: - """ - Set the currently selected event (i.e. the one being processed) by its ID. - - Args: - session: SQL session. - event_id: ID of AIMBAT Event to set as active one. - - Raises: - ValueError: If no event with the given ID is found. - """ - logger.info(f"Setting active event to event with id={event_id}.") - - if event_id not in session.exec(select(AimbatEvent.id)).all(): - raise ValueError( - f"No AimbatEvent found with id: {event_id}. {HINTS.LIST_EVENTS}" - ) - - aimbat_event = session.exec( - select(AimbatEvent).where(AimbatEvent.id == event_id) - ).one() - set_active_event(session, aimbat_event) - - -def set_active_event(session: Session, event: AimbatEvent) -> None: - """ - Set the active event (i.e. the one being processed). - - Args: - session: SQL session. - event: AIMBAT Event to set as active. - """ - - logger.info(f"Activating {event=}") - - event.active = True - session.add(event) - session.commit() - - def get_completed_events(session: Session) -> Sequence[AimbatEvent]: """Get the events marked as completed. @@ -252,60 +207,32 @@ def set_event_parameter( session.commit() -def dump_event_table_to_json(session: Session) -> str: - """Dump the table data to json.""" - - logger.info("Dumping AIMBAT event table to json.") - adapter: TypeAdapter[Sequence[AimbatEvent]] = TypeAdapter(Sequence[AimbatEvent]) - aimbat_event = session.exec(select(AimbatEvent)).all() - - return adapter.dump_json(aimbat_event).decode("utf-8") +@overload +def dump_event_table_to_json( + session: Session, as_string: Literal[True] = ... +) -> str: ... -def print_event_table(session: Session, short: bool) -> None: - """Print a pretty table with AIMBAT events. +@overload +def dump_event_table_to_json( + session: Session, as_string: Literal[False] +) -> list[dict[str, Any]]: ... - Args: - session: Database session. - short: Shorten and format the output to be more human-readable. - """ - logger.info("Printing AIMBAT events table.") +def dump_event_table_to_json( + session: Session, as_string: bool = True +) -> str | list[dict[str, Any]]: + """Dump the table data to json.""" - table = make_table(title="AIMBAT Events") - table.add_column( - "ID (shortened)" if short else "ID", - justify="center", - style=TABLE_STYLING.id, - no_wrap=True, - ) - table.add_column("Active", justify="center", style=TABLE_STYLING.mine, no_wrap=True) - table.add_column( - "Date & Time", justify="center", style=TABLE_STYLING.mine, no_wrap=True + logger.info("Dumping AIMBAT event table to json.") + events = session.exec(select(AimbatEvent)).all() + event_reads = [AimbatEventRead.from_event(e) for e in events] + adapter: TypeAdapter[Sequence[AimbatEventRead]] = TypeAdapter( + Sequence[AimbatEventRead] ) - table.add_column("Latitude", justify="center", style=TABLE_STYLING.mine) - table.add_column("Longitude", justify="center", style=TABLE_STYLING.mine) - table.add_column("Depth", justify="center", style=TABLE_STYLING.mine) - table.add_column("Completed", justify="center", style=TABLE_STYLING.parameters) - table.add_column("# Seismograms", justify="center", style=TABLE_STYLING.linked) - table.add_column("# Stations", justify="center", style=TABLE_STYLING.linked) - - for event in session.exec(select(AimbatEvent)).all(): - logger.debug(f"Adding event with id={event.id} to the table.") - table.add_row( - uuid_shortener(session, event) if short else str(event.id), - TABLE_STYLING.bool_formatter(event.active), - TABLE_STYLING.timestamp_formatter(event.time, short), - f"{event.latitude:.3f}" if short else str(event.latitude), - f"{event.longitude:.3f}" if short else str(event.longitude), - f"{event.depth:.0f}" if short else str(event.depth), - TABLE_STYLING.bool_formatter(event.parameters.completed), - str(len(event.seismograms)), - str(len(station.get_stations_in_event(session, event))), - ) - - console = Console() - console.print(table) + if as_string: + return adapter.dump_json(event_reads).decode("utf-8") + return adapter.dump_python(event_reads, mode="json") @overload @@ -350,6 +277,70 @@ def dump_event_parameter_table_to_json( return active_event.parameters.model_dump(mode="json") +def print_event_table(session: Session, short: bool) -> None: + """Print a pretty table with AIMBAT events. + + Args: + session: Database session. + short: Shorten and format the output to be more human-readable. + """ + + logger.info("Printing AIMBAT events table.") + + json_to_table( + data=dump_event_table_to_json(session, as_string=False), + title="AIMBAT Events", + column_order=[ + "id", + "active", + "time", + "latitude", + "longitude", + "depth", + "completed", + "seismogram_count", + "station_count", + ], + formatters={ + "id": lambda x: ( + uuid_shortener(session, AimbatEvent, str_uuid=x) if short else x + ), + "active": TABLE_STYLING.bool_formatter, + "time": lambda x: TABLE_STYLING.timestamp_formatter(Timestamp(x), short), + "latitude": lambda x: f"{x:.3f}" if short else str(x), + "longitude": lambda x: f"{x:.3f}" if short else str(x), + "depth": lambda x: f"{x:.0f}" if short and x is not None else str(x), + "completed": TABLE_STYLING.bool_formatter, + }, + common_column_kwargs={"justify": "center"}, + column_kwargs={ + "id": { + "header": "ID (shortened)" if short else "ID", + "style": TABLE_STYLING.id, + "no_wrap": True, + }, + "active": {"style": TABLE_STYLING.mine, "no_wrap": True}, + "time": { + "header": "Date & Time", + "style": TABLE_STYLING.mine, + "no_wrap": True, + }, + "latitude": {"style": TABLE_STYLING.mine}, + "longitude": {"style": TABLE_STYLING.mine}, + "depth": {"style": TABLE_STYLING.mine}, + "completed": {"style": TABLE_STYLING.parameters}, + "seismogram_count": { + "header": "# Seismograms", + "style": TABLE_STYLING.linked, + }, + "station_count": { + "header": "# Stations", + "style": TABLE_STYLING.linked, + }, + }, + ) + + def print_event_parameter_table( session: Session, short: bool, all_events: bool ) -> None: diff --git a/src/aimbat/core/_iccs.py b/src/aimbat/core/_iccs.py index 5a02add1..5b6f591c 100644 --- a/src/aimbat/core/_iccs.py +++ b/src/aimbat/core/_iccs.py @@ -1,11 +1,9 @@ """Processing of data for AIMBAT.""" -from typing import cast - +from aimbat.core import get_active_event from aimbat import settings from aimbat.logger import logger from aimbat.models import AimbatSeismogram -from aimbat.utils import get_active_event from pysmo.tools.signal import mccc from pysmo.tools.iccs import ( ICCS, @@ -16,6 +14,7 @@ update_timewindow as _update_timewindow, ) from sqlmodel import Session +from typing import cast __all__ = [ "create_iccs_instance", diff --git a/src/aimbat/core/_project.py b/src/aimbat/core/_project.py index 6fdbdfed..6e6436ee 100644 --- a/src/aimbat/core/_project.py +++ b/src/aimbat/core/_project.py @@ -1,4 +1,4 @@ -from aimbat.utils import get_active_event +from aimbat.core import get_active_event from aimbat.logger import logger from aimbat.models import ( AimbatEvent, @@ -44,36 +44,52 @@ def _project_exists(engine: Engine) -> bool: def create_project(engine: Engine) -> None: - """Create a new AIMBAT project.""" + """Initializes a new AIMBAT project database schema and triggers. - # import this to create tables below + Args: + engine: The SQLAlchemy/SQLModel Engine instance connected to the target database. + + Raises: + RuntimeError: If a project schema already exists in the target database. + """ + + # Import locally to ensure SQLModel registers all table metadata before create_all() import aimbat.models # noqa: F401 - logger.info(f"Creating new project in {engine=}.") + logger.info(f"Creating new project in {engine.url}") if _project_exists(engine): raise RuntimeError( - f"Unable to create a new project: project already exists in {engine=}!" + f"Unable to create a new project: project already exists at {engine.url}!" ) logger.debug("Creating database tables and loading defaults.") SQLModel.metadata.create_all(engine) - if engine.driver == "pysqlite": - with engine.connect() as connection: - connection.execute(text("PRAGMA foreign_keys=ON")) # for SQLite only - # This trigger ensures that only one event can be active at a time - with engine.connect() as connection: - connection.execute(text("""CREATE TRIGGER single_active_event - BEFORE UPDATE ON aimbatevent - FOR EACH ROW - WHEN NEW.active = TRUE - BEGIN - UPDATE aimbatevent SET active = NULL - WHERE active = TRUE AND id != NEW.id; - END; - """)) + if engine.name == "sqlite": + with engine.begin() as connection: + # Trigger 1: Handle updates to existing rows + connection.execute(text(""" + CREATE TRIGGER IF NOT EXISTS single_active_event_update + BEFORE UPDATE ON aimbatevent + FOR EACH ROW WHEN NEW.active = TRUE + BEGIN + UPDATE aimbatevent SET active = NULL + WHERE active = TRUE AND id != NEW.id; + END; + """)) + + # Trigger 2: Handle brand new active events being inserted + connection.execute(text(""" + CREATE TRIGGER IF NOT EXISTS single_active_event_insert + BEFORE INSERT ON aimbatevent + FOR EACH ROW WHEN NEW.active = TRUE + BEGIN + UPDATE aimbatevent SET active = NULL + WHERE active = TRUE; + END; + """)) def delete_project(engine: Engine) -> None: @@ -119,8 +135,10 @@ def print_project_info(engine: Engine) -> None: grid.add_column() grid.add_column(justify="left") if engine.driver == "pysqlite": - project = str(engine.url.database) - grid.add_row("AIMBAT Project File: ", project) + if engine.url.database == ":memory:": + grid.add_row("AIMBAT Project: ", "in-memory database") + else: + grid.add_row("AIMBAT Project File: ", str(engine.url.database)) events = len(session.exec(select(AimbatEvent)).all()) completed_events = len(event.get_completed_events(session)) diff --git a/src/aimbat/core/_seismogram.py b/src/aimbat/core/_seismogram.py index f85fc75d..547741d5 100644 --- a/src/aimbat/core/_seismogram.py +++ b/src/aimbat/core/_seismogram.py @@ -1,7 +1,7 @@ +from aimbat.core import get_active_event from aimbat.logger import logger from aimbat.utils import ( uuid_shortener, - get_active_event, make_table, TABLE_STYLING, json_to_table, @@ -281,6 +281,44 @@ def dump_seismogram_table_to_json(session: Session) -> str: return adapter.dump_json(aimbat_seismograms).decode("utf-8") +@overload +def dump_seismogram_parameter_table_to_json( + session: Session, all_events: bool, as_string: Literal[True] +) -> str: ... + + +@overload +def dump_seismogram_parameter_table_to_json( + session: Session, all_events: bool, as_string: Literal[False] +) -> list[dict[str, Any]]: ... + + +def dump_seismogram_parameter_table_to_json( + session: Session, all_events: bool, as_string: bool +) -> str | list[dict[str, Any]]: + """Dump the seismogram parameter table data to json.""" + + logger.info("Dumping AimbatSeismogramParameters table to json.") + + adapter: TypeAdapter[Sequence[AimbatSeismogramParameters]] = TypeAdapter( + Sequence[AimbatSeismogramParameters] + ) + + if all_events: + parameters = session.exec(select(AimbatSeismogramParameters)).all() + else: + parameters = session.exec( + select(AimbatSeismogramParameters) + .join(AimbatSeismogram) + .join(AimbatEvent) + .where(AimbatEvent.active == 1) + ).all() + + if as_string: + return adapter.dump_json(parameters).decode("utf-8") + return adapter.dump_python(parameters, mode="json") + + def print_seismogram_table( session: Session, short: bool, all_events: bool = False ) -> None: @@ -335,7 +373,7 @@ def print_seismogram_table( row = [ (uuid_shortener(session, seismogram) if short else str(seismogram.id)), TABLE_STYLING.bool_formatter(seismogram.parameters.select), - str(len(seismogram)), + str(len(seismogram.data)), str(seismogram.delta.total_seconds()), ( uuid_shortener(session, seismogram.datasource) @@ -362,44 +400,6 @@ def print_seismogram_table( console.print(table) -@overload -def dump_seismogram_parameter_table_to_json( - session: Session, all_events: bool, as_string: Literal[True] -) -> str: ... - - -@overload -def dump_seismogram_parameter_table_to_json( - session: Session, all_events: bool, as_string: Literal[False] -) -> list[dict[str, Any]]: ... - - -def dump_seismogram_parameter_table_to_json( - session: Session, all_events: bool, as_string: bool -) -> str | list[dict[str, Any]]: - """Dump the seismogram parameter table data to json.""" - - logger.info("Dumping AimbatSeismogramParameters table to json.") - - adapter: TypeAdapter[Sequence[AimbatSeismogramParameters]] = TypeAdapter( - Sequence[AimbatSeismogramParameters] - ) - - if all_events: - parameters = session.exec(select(AimbatSeismogramParameters)).all() - else: - parameters = session.exec( - select(AimbatSeismogramParameters) - .join(AimbatSeismogram) - .join(AimbatEvent) - .where(AimbatEvent.active == 1) - ).all() - - if as_string: - return adapter.dump_json(parameters).decode("utf-8") - return adapter.dump_python(parameters, mode="json") - - def print_seismogram_parameter_table(session: Session, short: bool) -> None: """Print a pretty table with AIMBAT seismogram parameter values for the active event. diff --git a/src/aimbat/core/_snapshot.py b/src/aimbat/core/_snapshot.py index 11eaccea..efe922a0 100644 --- a/src/aimbat/core/_snapshot.py +++ b/src/aimbat/core/_snapshot.py @@ -1,20 +1,23 @@ +import uuid +import json +from aimbat.core import get_active_event from aimbat.logger import logger -from aimbat.utils import uuid_shortener, get_active_event, make_table, TABLE_STYLING +from aimbat.utils import uuid_shortener, json_to_table, TABLE_STYLING from aimbat.models import ( AimbatSeismogramParametersBase, AimbatSnapshot, + AimbatSnapshotRead, AimbatEvent, AimbatEventParametersBase, - AimbatEventParameters, AimbatEventParametersSnapshot, AimbatSeismogramParametersSnapshot, ) from sqlmodel import Session, select -from rich.console import Console +from sqlalchemy import true +from pandas import Timestamp from collections.abc import Sequence from typing import overload, Literal, Any from pydantic import TypeAdapter -import uuid __all__ = [ "create_snapshot", @@ -23,7 +26,7 @@ "delete_snapshot_by_id", "delete_snapshot", "get_snapshots", - "dump_snapshot_table_to_json", + "dump_snapshot_tables_to_json", "print_snapshot_table", ] @@ -184,60 +187,83 @@ def get_snapshots( logger.info("Getting AIMBAT snapshots.") - if all_events: - logger.debug("Getting snapshots for all events.") - return session.exec(select(AimbatSnapshot)).all() - - logger.debug("Getting snapshots for active event.") - select_active_event_snapshots = ( + statement = ( select(AimbatSnapshot) - .join(AimbatEventParametersSnapshot) - .join(AimbatEventParameters) .join(AimbatEvent) - .where(AimbatEvent.active == 1) + .where(AimbatEvent.active == True if not all_events else true()) # noqa: E712 ) - return session.exec(select_active_event_snapshots).all() + + logger.debug(f"Executing statement to get snapshots: {statement}") + return session.exec(statement).all() @overload -def dump_snapshot_table_to_json( +def dump_snapshot_tables_to_json( session: Session, all_events: bool, as_string: Literal[True] ) -> str: ... @overload -def dump_snapshot_table_to_json( +def dump_snapshot_tables_to_json( session: Session, all_events: bool, as_string: Literal[False] -) -> list[dict[str, Any]]: ... +) -> dict[str, list[dict[str, Any]]]: ... -def dump_snapshot_table_to_json( +def dump_snapshot_tables_to_json( session: Session, all_events: bool, as_string: bool -) -> str | list[dict[str, Any]]: - """Dump the `AimbatSnapshot` table data to json.""" +) -> str | dict[str, list[dict[str, Any]]]: + """Dump snapshot data as a dict of lists of dicts. + + Returns a structure with three keys: + + - ``snapshots``: flat list of snapshot metadata. + - ``event_parameters``: flat list of event parameter snapshots. + - ``seismogram_parameters``: flat list of seismogram parameter snapshots. + + Each entry includes a ``snapshot_id`` for cross-referencing. - logger.info("Dumping AimbatSeismogramtable to json.") + Args: + session: Database session. + all_events: Include snapshots for all events. + as_string: Return a JSON string when True, otherwise a dict. + """ + logger.info(f"Dumping AimbatSnapshot tables to json with {all_events=}.") + + snapshots = get_snapshots(session, all_events) - adapter: TypeAdapter[Sequence[AimbatSnapshot]] = TypeAdapter( - Sequence[AimbatSnapshot] + snapshot_adapter: TypeAdapter[Sequence[AimbatSnapshotRead]] = TypeAdapter( + Sequence[AimbatSnapshotRead] + ) + event_params_adapter: TypeAdapter[Sequence[AimbatEventParametersSnapshot]] = ( + TypeAdapter(Sequence[AimbatEventParametersSnapshot]) + ) + seis_params_adapter: TypeAdapter[Sequence[AimbatSeismogramParametersSnapshot]] = ( + TypeAdapter(Sequence[AimbatSeismogramParametersSnapshot]) ) - if all_events: - parameters = session.exec(select(AimbatSnapshot)).all() - else: - parameters = session.exec( - select(AimbatSnapshot).join(AimbatEvent).where(AimbatEvent.active == 1) - ).all() + snapshot_reads = [AimbatSnapshotRead.from_snapshot(s) for s in snapshots] + event_params = [s.event_parameters_snapshot for s in snapshots] + seis_params = [sp for s in snapshots for sp in s.seismogram_parameters_snapshots] - if as_string: - return adapter.dump_json(parameters).decode("utf-8") - return adapter.dump_python(parameters, mode="json") + data: dict[str, list[dict[str, Any]]] = { + "snapshots": snapshot_adapter.dump_python(snapshot_reads, mode="json"), + "event_parameters": event_params_adapter.dump_python(event_params, mode="json"), + "seismogram_parameters": seis_params_adapter.dump_python( + seis_params, mode="json" + ), + } + + return json.dumps(data) if as_string else data def print_snapshot_table(session: Session, short: bool, all_events: bool) -> None: """Print a pretty table with AIMBAT snapshots. + Uses the ``snapshots`` portion of :func:`dump_snapshot_tables_to_json` + and renders it via :func:`~aimbat.utils.json_to_table`. + Args: + session: Database session. short: Shorten and format the output to be more human-readable. all_events: Print all snapshots instead of limiting to the active event. """ @@ -246,9 +272,6 @@ def print_snapshot_table(session: Session, short: bool, all_events: bool) -> Non title = "AIMBAT snapshots for all events" - snapshots = get_snapshots(session, all_events) - logger.debug(f"Found {len(snapshots)} snapshots for the table.") - if not all_events: active_event = get_active_event(session) if short: @@ -258,36 +281,49 @@ def print_snapshot_table(session: Session, short: bool, all_events: bool) -> Non f"AIMBAT snapshots for event {active_event.time} (ID={active_event.id})" ) - table = make_table(title=title) + data = dump_snapshot_tables_to_json(session, all_events, as_string=False) + snapshot_data = data["snapshots"] - table.add_column( - "ID (shortened)" if short else "ID", - justify="center", - style=TABLE_STYLING.id, - no_wrap=True, - ) - table.add_column( - "Date & Time", justify="center", style=TABLE_STYLING.mine, no_wrap=True - ) - table.add_column("Comment", justify="center", style=TABLE_STYLING.mine) - table.add_column("# Seismograms", justify="center", style=TABLE_STYLING.linked) + column_order = ["id", "date", "comment", "seismogram_count"] if all_events: - table.add_column("Event ID", justify="center", style=TABLE_STYLING.linked) - - for snapshot in snapshots: - logger.debug(f"Adding snapshot with id={snapshot.id} to the table.") - row = [ - (uuid_shortener(session, snapshot) if short else str(snapshot.id)), - TABLE_STYLING.timestamp_formatter(snapshot.date, short), - str(snapshot.comment), - str(len(snapshot.seismogram_parameters_snapshots)), - ] - if all_events: - aimbat_event = snapshot.event - row.append( - uuid_shortener(session, aimbat_event) if short else str(aimbat_event.id) - ) - table.add_row(*row) - - console = Console() - console.print(table) + column_order.append("event_id") + + skip_keys = [] if all_events else ["event_id"] + + json_to_table( + data=snapshot_data, + title=title, + column_order=column_order, + skip_keys=skip_keys, + formatters={ + "id": lambda x: ( + uuid_shortener(session, AimbatSnapshot, str_uuid=x) if short else x + ), + "date": lambda x: TABLE_STYLING.timestamp_formatter(Timestamp(x), short), + "event_id": lambda x: ( + uuid_shortener(session, AimbatEvent, str_uuid=x) if short else x + ), + }, + common_column_kwargs={"justify": "center"}, + column_kwargs={ + "id": { + "header": "ID (shortened)" if short else "ID", + "style": TABLE_STYLING.id, + "no_wrap": True, + }, + "date": { + "header": "Date & Time", + "style": TABLE_STYLING.mine, + "no_wrap": True, + }, + "comment": {"style": TABLE_STYLING.mine}, + "seismogram_count": { + "header": "# Seismograms", + "style": TABLE_STYLING.linked, + }, + "event_id": { + "header": "Event ID (shortened)" if short else "Event ID", + "style": TABLE_STYLING.linked, + }, + }, + ) diff --git a/src/aimbat/core/_station.py b/src/aimbat/core/_station.py index 0d026e6f..8739e712 100644 --- a/src/aimbat/core/_station.py +++ b/src/aimbat/core/_station.py @@ -1,19 +1,23 @@ +import uuid +from aimbat.core import get_active_event from aimbat.logger import logger -from aimbat.utils import uuid_shortener, make_table, get_active_event, TABLE_STYLING +from aimbat.utils import uuid_shortener, json_to_table, TABLE_STYLING from aimbat.models import AimbatStation, AimbatSeismogram, AimbatEvent -from sqlmodel import Session, select +from typing import overload, Literal, Any +from sqlmodel import Session, select, col +from sqlalchemy import func from sqlalchemy.exc import NoResultFound -from rich.console import Console from collections.abc import Sequence from pydantic import TypeAdapter -import uuid __all__ = [ "delete_station_by_id", "delete_station", "get_stations_in_event", - "print_station_table", + "get_stations_in_active_event", + "get_stations_with_event_seismogram_count", "dump_station_table_to_json", + "print_station_table", ] @@ -50,6 +54,49 @@ def delete_station(session: Session, station: AimbatStation) -> None: session.commit() +@overload +def get_stations_in_active_event( + session: Session, as_json: Literal[False] +) -> Sequence[AimbatStation]: ... + + +@overload +def get_stations_in_active_event( + session: Session, as_json: Literal[True] +) -> list[dict[str, Any]]: ... + + +def get_stations_in_active_event( + session: Session, as_json: bool +) -> Sequence[AimbatStation] | list[dict[str, Any]]: + """Get the stations for the active event. + + Args: + session: Database session. + + Returns: Stations in active event. + """ + logger.info("Getting stations for active event.") + + statement = ( + select(AimbatStation) + .distinct() + .join(AimbatSeismogram) + .join(AimbatEvent) + .where(AimbatEvent.active == True) # noqa: E712 + ) + + logger.debug(f"Executing query: {statement}") + results = session.exec(statement).all() + + if not as_json: + return results + + adapter: TypeAdapter[Sequence[AimbatStation]] = TypeAdapter(Sequence[AimbatStation]) + + return adapter.dump_python(results, mode="json") + + def get_stations_in_event( session: Session, event: AimbatEvent ) -> Sequence[AimbatStation]: @@ -61,23 +108,91 @@ def get_stations_in_event( Returns: Stations in event. """ - logger.info(f"Getting stations for event: {event.id}.") - select_stations = ( + statement = ( select(AimbatStation) .join(AimbatSeismogram) .join(AimbatEvent) .where(AimbatEvent.id == event.id) ) - stations = session.exec(select_stations).all() - - logger.debug(f"Found {len(stations)}.") + logger.debug(f"Executing query: {statement}") + stations = session.exec(statement).all() return stations +@overload +def get_stations_with_event_seismogram_count( + session: Session, as_json: Literal[False] +) -> Sequence[tuple[AimbatStation, int, int]]: ... + + +@overload +def get_stations_with_event_seismogram_count( + session: Session, as_json: Literal[True] +) -> list[dict[str, Any]]: ... + + +def get_stations_with_event_seismogram_count( + session: Session, as_json: bool +) -> Sequence[tuple[AimbatStation, int, int]] | list[dict[str, Any]]: + """Get stations along with the count of seismograms and events they are associated with. + + Args: + session: Database session. + as_json: Whether to return the result as JSON. + + Returns: A sequence of tuples containing the station, count of seismograms + and count of events, or a JSON string if as_json is True. + """ + logger.info("Getting stations with associated seismogram and event counts.") + + statement = ( + select( + AimbatStation, + func.count(col(AimbatSeismogram.id)), + func.count(func.distinct(col(AimbatEvent.id))), + ) + .select_from(AimbatStation) + .join(AimbatSeismogram, isouter=True) + .join(AimbatEvent, isouter=True) + .group_by(col(AimbatStation.id)) + ) + + logger.debug(f"Executing query: {statement}") + results = session.exec(statement).all() + + if not as_json: + return results + + formatted_results = [] + + for row in results: + # 1. Dump the station to a dict. mode="json" safely converts UUIDs/Datetimes to strings! + station_dict = row[0].model_dump(mode="json") + + # 2. Add the counts directly to the dictionary + station_dict["seismogram_count"] = row[1] + station_dict["event_count"] = row[2] + + # 3. Add to our final list + formatted_results.append(station_dict) + + return formatted_results + + +def dump_station_table_to_json(session: Session) -> str: + """Create a JSON string from the AimbatStation table data.""" + + logger.info("Dumping AIMBAT station table to json.") + + adapter: TypeAdapter[Sequence[AimbatStation]] = TypeAdapter(Sequence[AimbatStation]) + aimbat_station = session.exec(select(AimbatStation)).all() + return adapter.dump_json(aimbat_station).decode("utf-8") + + def print_station_table( session: Session, short: bool, all_events: bool = False ) -> None: @@ -88,92 +203,107 @@ def print_station_table( short: Shorten and format the output to be more human-readable. all_events: Print stations for all events. """ - logger.info("Printing station table.") title = "AIMBAT stations for all events" - aimbat_stations = None if all_events: logger.debug("Selecting all AIMBAT stations.") - aimbat_stations = session.exec(select(AimbatStation)).all() + data = get_stations_with_event_seismogram_count(session, as_json=True) else: - logger.debug("Selecting AIMBAT stations for active event.") + logger.debug("Selecting AIMBAT stations used by active event.") active_event = get_active_event(session) - aimbat_stations = get_stations_in_event(session, active_event) + data = get_stations_in_active_event(session, as_json=True) + if short: title = f"AIMBAT stations for event {active_event.time.strftime('%Y-%m-%d %H:%M:%S')} (ID={uuid_shortener(session, active_event)})" else: title = ( f"AIMBAT stations for event {active_event.time} (ID={active_event.id})" ) - logger.debug("Found {len(aimbat_stations)} stations for the table.") - - table = make_table(title=title) - table.add_column( - "ID (shortened)" if short else "ID", - justify="center", - style=TABLE_STYLING.id, - no_wrap=True, - ) - table.add_column( - "Name & Network", justify="center", style=TABLE_STYLING.mine, no_wrap=True - ) - table.add_column("Channel", justify="center", style=TABLE_STYLING.mine) - table.add_column("Location", justify="center", style=TABLE_STYLING.mine) - table.add_column("Latitude", justify="center", style=TABLE_STYLING.mine) - table.add_column("Longitude", justify="center", style=TABLE_STYLING.mine) - table.add_column("Elevation", justify="center", style=TABLE_STYLING.mine) + column_order = [ + "id", + "name", + "network", + "channel", + "location", + "latitude", + "longitude", + "elevation", + ] if all_events: - table.add_column("# Seismograms", justify="center", style=TABLE_STYLING.linked) - table.add_column("# Events", justify="center", style=TABLE_STYLING.linked) - - for aimbat_station in aimbat_stations: - logger.debug(f"Adding {aimbat_station.name} to the table.") - row = [ - ( - uuid_shortener(session, aimbat_station) - if short - else str(aimbat_station.id) - ), - f"{aimbat_station.name} - {aimbat_station.network}", - f"{aimbat_station.channel}", - f"{aimbat_station.location}", - ( - f"{aimbat_station.latitude:.3f}" - if short - else str(aimbat_station.latitude) - ), - ( - f"{aimbat_station.longitude:.3f}" - if short - else str(aimbat_station.longitude) - ), - ( - f"{aimbat_station.elevation:.0f}" - if short - else str(aimbat_station.elevation) - ), - ] - if all_events: - row.extend( - [ - str(len(aimbat_station.seismograms)), - str(len({i.event_id for i in aimbat_station.seismograms})), - ] - ) - table.add_row(*row) - - console = Console() - console.print(table) - - -def dump_station_table_to_json(session: Session) -> str: - """Create a JSON string from the AimbatStation table data.""" - - logger.info("Dumping AIMBAT station table to json.") - - adapter: TypeAdapter[Sequence[AimbatStation]] = TypeAdapter(Sequence[AimbatStation]) - aimbat_station = session.exec(select(AimbatStation)).all() - return adapter.dump_json(aimbat_station).decode("utf-8") + column_order.extend(["seismogram_count", "event_count"]) + + column_kwargs: dict[str, dict[str, Any]] = { + "id": { + "header": "ID (shortened)" if short else "ID", + "style": TABLE_STYLING.id, + "justify": "center", + "no_wrap": True, + }, + "name": { + "header": "Name", + "style": TABLE_STYLING.mine, + "justify": "center", + "no_wrap": True, + }, + "network": { + "header": "Network", + "style": TABLE_STYLING.mine, + "justify": "center", + "no_wrap": True, + }, + "channel": { + "header": "Channel", + "style": TABLE_STYLING.mine, + "justify": "center", + }, + "location": { + "header": "Location", + "style": TABLE_STYLING.mine, + "justify": "center", + }, + "latitude": { + "header": "Latitude", + "style": TABLE_STYLING.mine, + "justify": "center", + }, + "longitude": { + "header": "Longitude", + "style": TABLE_STYLING.mine, + "justify": "center", + }, + "elevation": { + "header": "Elevation", + "style": TABLE_STYLING.mine, + "justify": "center", + }, + "seismogram_count": { + "header": "# Seismograms", + "style": TABLE_STYLING.linked, + "justify": "center", + }, + "event_count": { + "header": "# Events", + "style": TABLE_STYLING.linked, + "justify": "center", + }, + } + + formatters = { + "id": lambda x: ( + uuid_shortener(session, AimbatStation, str_uuid=x) if short else str(x) + ), + "latitude": lambda x: f"{x:.3f}" if short else str(x), + "longitude": lambda x: f"{x:.3f}" if short else str(x), + "elevation": lambda x: f"{x:.0f}" if short else str(x), + } + + json_to_table( + data, + title=title, + column_order=column_order, + column_kwargs=column_kwargs, + formatters=formatters, + ) diff --git a/src/aimbat/db.py b/src/aimbat/db.py index 614dcef4..f74083d1 100644 --- a/src/aimbat/db.py +++ b/src/aimbat/db.py @@ -1,9 +1,25 @@ """Module to define the AIMBAT project file and create the database engine.""" +import sqlite3 from aimbat import settings from sqlmodel import create_engine +from sqlalchemy import event +from sqlalchemy.pool import ConnectionPoolEntry __all__ = ["engine"] engine = create_engine(url=settings.db_url, echo=False) """AIMBAT database engine.""" + + +# Automatically enforce foreign keys for every new connection if using SQLite +if engine.name == "sqlite": + + @event.listens_for(engine, "connect") + def set_sqlite_pragma( + dbapi_connection: sqlite3.Connection, connection_record: ConnectionPoolEntry + ) -> None: + """Enables foreign key support for SQLite connections.""" + cursor = dbapi_connection.cursor() + cursor.execute("PRAGMA foreign_keys=ON") + cursor.close() diff --git a/src/aimbat/models/_models.py b/src/aimbat/models/_models.py index e86c70b6..d81671b7 100644 --- a/src/aimbat/models/_models.py +++ b/src/aimbat/models/_models.py @@ -4,6 +4,9 @@ as classes to use with python in AIMBAT. """ +import numpy as np +import os +import uuid from ._sqlalchemy import SAPandasTimestamp, SAPandasTimedelta from aimbat import settings from aimbat._lib._mixins import EventParametersValidatorMixin @@ -15,13 +18,12 @@ PydanticPositiveTimedelta, ) from datetime import timezone -from sqlmodel import Relationship, SQLModel, Field +from sqlmodel import Relationship, SQLModel, Field, col, select +from sqlalchemy import func +from sqlalchemy.orm import column_property from pydantic import computed_field -from typing import TYPE_CHECKING +from typing import Self, TYPE_CHECKING from pandas import Timestamp -import numpy as np -import os -import uuid __all__ = [ "AimbatTypes", @@ -37,67 +39,11 @@ "AimbatSeismogramParametersBase", "AimbatSeismogramParametersSnapshot", "AimbatSnapshot", + "AimbatEventRead", + "AimbatSnapshotRead", ] -class AimbatDataSourceCreate(SQLModel): - """Class to store data source information.""" - - sourcename: str | os.PathLike = Field(unique=True) - datatype: DataType = DataType.SAC - - -class AimbatDataSource(SQLModel, table=True): - """Class to store data source information.""" - - id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) - sourcename: str - datatype: DataType - seismogram_id: uuid.UUID = Field( - default=None, foreign_key="aimbatseismogram.id", ondelete="CASCADE" - ) - seismogram: "AimbatSeismogram" = Relationship(back_populates="datasource") - - -class AimbatEvent(SQLModel, table=True): - """Store event information.""" - - id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) - "Unique ID." - - active: bool | None = Field(default=None, unique=True) - "Indicates if an event is the active event." - - time: PydanticTimestamp = Field( - unique=True, sa_type=SAPandasTimestamp, allow_mutation=False - ) - "Event time." - - latitude: float - "Event latitude." - - longitude: float - "Event longitude." - - depth: float | None = None - "Event depth." - - seismograms: list["AimbatSeismogram"] = Relationship( - back_populates="event", cascade_delete=True - ) - "List of seismograms of this event." - - parameters: "AimbatEventParameters" = Relationship( - back_populates="event", cascade_delete=True - ) - "Event parameters." - - snapshots: list["AimbatSnapshot"] = Relationship( - back_populates="event", cascade_delete=True - ) - "List of snapshots." - - class AimbatEventParametersBase(SQLModel): """Base class that defines the event parameters used in AIMBAT. @@ -134,6 +80,74 @@ class AimbatEventParametersBase(SQLModel): "Maximum frequency for bandpass filter (ignored if `bandpass_apply` is False)." +class AimbatSeismogramParametersBase(SQLModel): + """Base class that defines the seismogram parameters used in AIMBAT.""" + + flip: bool = False + "Whether or not the seismogram should be flipped." + + select: bool = True + "Whether or not this seismogram should be used for processing." + + t1: PydanticTimestamp | None = Field(default=None, sa_type=SAPandasTimestamp) + """Working pick. + + This pick serves as working as well as output pick. It is changed by: + + 1. Picking the phase arrival in the stack. + 2. Running ICCS. + 3. Running MCCC. + """ + + +class AimbatDataSourceCreate(SQLModel): + """Class to store data source information.""" + + sourcename: str | os.PathLike = Field(unique=True) + datatype: DataType = DataType.SAC + + +class AimbatDataSource(SQLModel, table=True): + """Class to store data source information.""" + + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + sourcename: str + datatype: DataType + seismogram_id: uuid.UUID = Field( + default=None, foreign_key="aimbatseismogram.id", ondelete="CASCADE" + ) + seismogram: "AimbatSeismogram" = Relationship(back_populates="datasource") + + +class AimbatSeismogramParameters(AimbatSeismogramParametersBase, table=True): + """Class to store ICCS processing parameters of a single seismogram.""" + + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + seismogram_id: uuid.UUID = Field( + default=None, foreign_key="aimbatseismogram.id", ondelete="CASCADE" + ) + seismogram: "AimbatSeismogram" = Relationship(back_populates="parameters") + snapshots: list["AimbatSeismogramParametersSnapshot"] = Relationship( + back_populates="parameters", cascade_delete=True + ) + + +class AimbatSeismogramParametersSnapshot(AimbatSeismogramParametersBase, table=True): + """Class to store a snapshot of ICCS processing parameters of a single seismogram.""" + + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + seismogram_parameters_id: uuid.UUID = Field( + foreign_key="aimbatseismogramparameters.id", ondelete="CASCADE" + ) + parameters: AimbatSeismogramParameters = Relationship(back_populates="snapshots") + snapshot_id: uuid.UUID = Field( + default=None, foreign_key="aimbatsnapshot.id", ondelete="CASCADE" + ) + snapshot: "AimbatSnapshot" = Relationship( + back_populates="seismogram_parameters_snapshots" + ) + + class AimbatEventParameters( AimbatEventParametersBase, EventParametersValidatorMixin, table=True ): @@ -147,7 +161,7 @@ class AimbatEventParameters( ) "Event ID these parameters are associated with." - event: AimbatEvent = Relationship(back_populates="parameters") + event: "AimbatEvent" = Relationship(back_populates="parameters") "Event these parameters are associated with." snapshots: list["AimbatEventParametersSnapshot"] = Relationship( @@ -172,37 +186,36 @@ class AimbatEventParametersSnapshot(AimbatEventParametersBase, table=True): ) -class AimbatStation(SQLModel, table=True): - """Class to store station information.""" - - id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) - "Unique ID." - - name: str = Field(allow_mutation=False) - "Station name." - - network: str = Field(allow_mutation=False) - "Network name." - - location: str = Field(allow_mutation=False) - "Location ID." - - channel: str = Field(allow_mutation=False) - "Channel code." - - latitude: float - "Station latitude" +class AimbatSnapshot(SQLModel, table=True): + """Class to store AIMBAT snapshots. - longitude: float - "Station longitude" + The AimbatSnapshot class does not actually save any parameter data. + It is used to keep track of the AimbatEventParametersSnapshot and + AimbatSeismogramParametersSnapshot instances. + """ - elevation: float | None = None - "Station elevation." + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + date: PydanticTimestamp = Field( + default_factory=lambda: Timestamp.now(tz=timezone.utc), + unique=True, + allow_mutation=False, + sa_type=SAPandasTimestamp, + ) + comment: str | None = None + event_parameters_snapshot: AimbatEventParametersSnapshot = Relationship( + back_populates="snapshot", cascade_delete=True + ) + seismogram_parameters_snapshots: list[AimbatSeismogramParametersSnapshot] = ( + Relationship(back_populates="snapshot", cascade_delete=True) + ) - seismograms: list["AimbatSeismogram"] = Relationship( - back_populates="station", cascade_delete=True + event_id: uuid.UUID = Field( + default=None, foreign_key="aimbatevent.id", ondelete="CASCADE" ) - "Seismograms recorded at this station." + "Event ID this snapshot is associated with." + + event: "AimbatEvent" = Relationship(back_populates="snapshots") + "Event this snapshot is associated with." class AimbatSeismogram(SQLModel, table=True): @@ -226,24 +239,23 @@ class AimbatSeismogram(SQLModel, table=True): station_id: uuid.UUID = Field( default=None, foreign_key="aimbatstation.id", ondelete="CASCADE" ) - station: AimbatStation = Relationship(back_populates="seismograms") + station: "AimbatStation" = Relationship(back_populates="seismograms") event_id: uuid.UUID = Field( default=None, foreign_key="aimbatevent.id", ondelete="CASCADE" ) - event: AimbatEvent = Relationship(back_populates="seismograms") + event: "AimbatEvent" = Relationship(back_populates="seismograms") parameters: "AimbatSeismogramParameters" = Relationship( back_populates="seismogram", cascade_delete=True, ) - def __len__(self) -> int: - return np.size(self.data) - if TYPE_CHECKING: - flip: bool - select: bool - t1: Timestamp | None - data: np.ndarray + # Add same default values for type checking purposes + # as in AimbatSeismogramParametersBase + flip: bool = False + select: bool = True + t1: Timestamp | None = None + data: np.ndarray = np.array([]) @property def end_time(self) -> Timestamp: ... @@ -252,9 +264,9 @@ def end_time(self) -> Timestamp: ... @computed_field def end_time(self) -> PydanticTimestamp: - if len(self) == 0: + if len(self.data) == 0: return self.begin_time - return self.begin_time + self.delta * (len(self) - 1) + return self.begin_time + self.delta * (len(self.data) - 1) @property def flip(self) -> bool: @@ -297,85 +309,147 @@ def data(self, value: np.ndarray) -> None: ) -class AimbatSeismogramParametersBase(SQLModel): - """Base class that defines the seismogram parameters used in AIMBAT.""" +class AimbatStation(SQLModel, table=True): + """Class to store station information.""" - flip: bool = False - "Whether or not the seismogram should be flipped." + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + "Unique ID." - select: bool = True - "Whether or not this seismogram should be used for processing." + name: str = Field(allow_mutation=False) + "Station name." - t1: PydanticTimestamp | None = Field(default=None, sa_type=SAPandasTimestamp) - """Working pick. + network: str = Field(allow_mutation=False) + "Network name." - This pick serves as working as well as output pick. It is changed by: + location: str = Field(allow_mutation=False) + "Location ID." - 1. Picking the phase arrival in the stack. - 2. Running ICCS. - 3. Running MCCC. - """ + channel: str = Field(allow_mutation=False) + "Channel code." + latitude: float + "Station latitude" -class AimbatSeismogramParameters(AimbatSeismogramParametersBase, table=True): - """Class to store ICCS processing parameters of a single seismogram.""" + longitude: float + "Station longitude" - id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) - seismogram_id: uuid.UUID = Field( - default=None, foreign_key="aimbatseismogram.id", ondelete="CASCADE" - ) - seismogram: AimbatSeismogram = Relationship(back_populates="parameters") - snapshots: list["AimbatSeismogramParametersSnapshot"] = Relationship( - back_populates="parameters", cascade_delete=True + elevation: float | None = None + "Station elevation." + + seismograms: list[AimbatSeismogram] = Relationship( + back_populates="station", cascade_delete=True ) + "Seismograms recorded at this station." -class AimbatSeismogramParametersSnapshot(AimbatSeismogramParametersBase, table=True): - """Class to store a snapshot of ICCS processing parameters of a single seismogram.""" +class AimbatEvent(SQLModel, table=True): + """Store event information.""" id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) - seismogram_parameters_id: uuid.UUID = Field( - foreign_key="aimbatseismogramparameters.id", ondelete="CASCADE" - ) - parameters: AimbatSeismogramParameters = Relationship(back_populates="snapshots") - snapshot_id: uuid.UUID = Field( - default=None, foreign_key="aimbatsnapshot.id", ondelete="CASCADE" - ) - snapshot: "AimbatSnapshot" = Relationship( - back_populates="seismogram_parameters_snapshots" + "Unique ID." + + active: bool | None = Field(default=None, unique=True) + "Indicates if an event is the active event." + + time: PydanticTimestamp = Field( + unique=True, sa_type=SAPandasTimestamp, allow_mutation=False ) + "Event time." + latitude: float + "Event latitude." -class AimbatSnapshot(SQLModel, table=True): - """Class to store AIMBAT snapshots. + longitude: float + "Event longitude." - The AimbatSnapshot class does not actually save any parameter data. - It is used to keep track of the AimbatEventParametersSnapshot and - AimbatSeismogramParametersSnapshot instances. - """ + depth: float | None = None + "Event depth." - id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) - date: PydanticTimestamp = Field( - default_factory=lambda: Timestamp.now(tz=timezone.utc), - unique=True, - allow_mutation=False, - sa_type=SAPandasTimestamp, - ) - comment: str | None = None - event_parameters_snapshot: AimbatEventParametersSnapshot = Relationship( - back_populates="snapshot", cascade_delete=True + seismograms: list[AimbatSeismogram] = Relationship( + back_populates="event", cascade_delete=True ) - seismogram_parameters_snapshots: list[AimbatSeismogramParametersSnapshot] = ( - Relationship(back_populates="snapshot", cascade_delete=True) + "List of seismograms of this event." + + parameters: AimbatEventParameters = Relationship( + back_populates="event", cascade_delete=True ) + "Event parameters." - event_id: uuid.UUID = Field( - default=None, foreign_key="aimbatevent.id", ondelete="CASCADE" + snapshots: list[AimbatSnapshot] = Relationship( + back_populates="event", cascade_delete=True ) - "Event ID this snapshot is associated with." + "List of snapshots." - event: AimbatEvent = Relationship(back_populates="snapshots") - "Event this snapshot is associated with." + if TYPE_CHECKING: + seismogram_count: int = 0 + station_count: int = 0 + + +AimbatEvent.seismogram_count = column_property( # type: ignore[assignment] + select(func.count(col(AimbatSeismogram.id))) + .where(col(AimbatSeismogram.event_id) == col(AimbatEvent.id)) + .correlate_except(AimbatSeismogram) + .scalar_subquery() +) +"Number of seismograms for this event." + +AimbatEvent.station_count = column_property( # type: ignore[assignment] + select(func.count(func.distinct(col(AimbatSeismogram.station_id)))) + .where(col(AimbatSeismogram.event_id) == col(AimbatEvent.id)) + .correlate_except(AimbatSeismogram) + .scalar_subquery() +) +"Number of unique stations for this event." + + +class AimbatEventRead(SQLModel): + """Read model for AimbatEvent including computed counts.""" + + id: uuid.UUID + active: bool | None + time: PydanticTimestamp + latitude: float + longitude: float + depth: float | None + completed: bool = False + seismogram_count: int + station_count: int + + @classmethod + def from_event(cls, event: AimbatEvent) -> Self: + """Create an AimbatEventRead from an AimbatEvent ORM instance.""" + return cls( + id=event.id, + active=event.active, + time=event.time, + latitude=event.latitude, + longitude=event.longitude, + depth=event.depth, + completed=event.parameters.completed, + seismogram_count=event.seismogram_count, + station_count=event.station_count, + ) + + +class AimbatSnapshotRead(SQLModel): + """Read model for AimbatSnapshot with a seismogram count.""" + + id: uuid.UUID + date: PydanticTimestamp + comment: str | None + event_id: uuid.UUID + seismogram_count: int + + @classmethod + def from_snapshot(cls, snapshot: AimbatSnapshot) -> Self: + """Create an AimbatSnapshotRead from an AimbatSnapshot ORM instance.""" + return cls( + id=snapshot.id, + date=snapshot.date, + comment=snapshot.comment, + event_id=snapshot.event_id, + seismogram_count=len(snapshot.seismogram_parameters_snapshots), + ) type AimbatTypes = ( diff --git a/src/aimbat/utils/__init__.py b/src/aimbat/utils/__init__.py index b7575f40..0e2c4a85 100644 --- a/src/aimbat/utils/__init__.py +++ b/src/aimbat/utils/__init__.py @@ -6,8 +6,6 @@ _internal_names = set(dir()) from ._json import * -from ._active_event import * -from ._checkdata import * from ._sampledata import * from ._style import * from ._uuid import * diff --git a/src/aimbat/utils/_active_event.py b/src/aimbat/utils/_active_event.py deleted file mode 100644 index f313a141..00000000 --- a/src/aimbat/utils/_active_event.py +++ /dev/null @@ -1,38 +0,0 @@ -from aimbat.logger import logger -from aimbat.models import AimbatEvent -from aimbat.cli._common import HINTS -from sqlmodel import Session, select -from sqlalchemy.exc import NoResultFound - -__all__ = ["get_active_event"] - - -def get_active_event(session: Session) -> AimbatEvent: - """ - Return the currently active event (i.e. the one being processed). - - Args: - session: SQL session. - - Returns: - Active Event - - Raises - NoResultFound: When no event is active. - """ - - logger.debug("Attempting to determine active event.") - - select_active_event = select(AimbatEvent).where(AimbatEvent.active == 1) - - # NOTE: While there technically can be no active event in the database, - # we typically don't really want to go beyond this point when that is the - # case. Hence we call `one` rather than `one_or_none`. - try: - active_event = session.exec(select_active_event).one() - except NoResultFound: - raise NoResultFound(f"No active event found. {HINTS.ACTIVATE_EVENT}") - - logger.debug(f"Active event: {active_event.id}") - - return active_event diff --git a/src/aimbat/utils/_checkdata.py b/src/aimbat/utils/_checkdata.py deleted file mode 100644 index 4c54a558..00000000 --- a/src/aimbat/utils/_checkdata.py +++ /dev/null @@ -1,148 +0,0 @@ -from aimbat.logger import logger -from pysmo import Station, Event, Seismogram -from pathlib import Path - -__all__ = ["run_checks"] - - -def checkdata_station(station: Station) -> list[str]: - """Check if station information is complete. - - Args: - station: station object to test. - """ - - logger.info("Checking station information.") - - issues = list() - - try: - assert station.name is not None - except (AssertionError, Exception): - issue = "No station name found in file." - issues.append(issue) - - try: - assert station.latitude is not None - except (AssertionError, Exception): - issue = "No station latitude found in file." - issues.append(issue) - - try: - assert station.longitude is not None - except (AssertionError, Exception): - issue = "No station longitude found in file." - issues.append(issue) - - return issues - - -def checkdata_event(event: Event) -> list[str]: - """Check if event information is complete. - - Args: - event: event object to test. - """ - - logger.info("Checking event information.") - - issues = list() - - try: - assert event.latitude is not None - except (AssertionError, Exception): - issue = "No event latitude found in file." - issues.append(issue) - - try: - assert event.longitude is not None - except (AssertionError, Exception): - issue = "No event longitude found in file." - issues.append(issue) - - try: - assert event.time is not None - except (AssertionError, Exception): - issue = "No event time found in file." - issues.append(issue) - - return issues - - -def checkdata_seismogram(seismogram: Seismogram) -> list[str]: - """Check if seismogram information is complete. - - Args: - seismogram: seismogram object to test. - """ - - logger.info("Checking seismogram information.") - - issues = list() - try: - assert seismogram.data is not None - assert len(seismogram.data) > 0 - except (AssertionError, Exception): - issue = "No seismogram data found in file." - issues.append(issue) - - return issues - - -def run_checks(sacfiles: list[Path]) -> None: - """Run all checks on one or more SAC files. - - Args: - sacfiles: SAC files to test. - """ - - logger.info("Running all checks.") - - from pysmo.classes import SAC - - def checkmark() -> None: - print("\N{CHECK MARK}", end="") - - def crossmark() -> None: - print("\N{BALLOT X}", end="") - - all_issues = dict() - - for sacfile in sacfiles: - issues = list() - my_sac = SAC.from_file(str(sacfile)) - print(f"\n{sacfile}: ", end="") - - station_issues = checkdata_station(my_sac.station) - if len(station_issues) == 0: - checkmark() - else: - issues.extend(station_issues) - crossmark() - - event_issues = checkdata_event(my_sac.event) - if len(event_issues) == 0: - checkmark() - else: - issues.extend(event_issues) - crossmark() - - seismogram_issues = checkdata_seismogram(my_sac.seismogram) - if len(seismogram_issues) == 0: - checkmark() - else: - issues.extend(seismogram_issues) - crossmark() - - if len(issues) > 0: - all_issues[sacfile] = issues - - if len(all_issues) == 0: - print("\n\nNo issues found!") - return - - print("\n\nPlease fix the following issues before proceeding:") - for sacfile, issues in all_issues.items(): - print(f"\n file: {sacfile}:") - for issue in issues: - print(f" - {issue}") diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/baseline/tests.test_seismogram.TestSeismogramPlot.test_lib_plotseis_mpl.png b/tests/baseline/tests.test_seismogram.TestSeismogramPlot.test_lib_plotseis_mpl.png deleted file mode 100644 index 84c02e55..00000000 Binary files a/tests/baseline/tests.test_seismogram.TestSeismogramPlot.test_lib_plotseis_mpl.png and /dev/null differ diff --git a/tests/cli/test_cli_common.py b/tests/cli/test_cli_common.py deleted file mode 100644 index b0faa08f..00000000 --- a/tests/cli/test_cli_common.py +++ /dev/null @@ -1,15 +0,0 @@ -from aimbat._config import Settings -import pytest - - -def test_simple_exception( - patch_settings: Settings, capsys: pytest.CaptureFixture -) -> None: - patch_settings.log_level = "INFO" - from aimbat.app import app - - with pytest.raises(SystemExit) as e: - app(["event", "activate", "nonexistent-uuid-str"]) - captured = capsys.readouterr() - assert "╭─ Error ────────────────────" in captured.out - assert e.value.code == 1 diff --git a/tests/conftest.py b/tests/conftest.py index b079dfe9..445cfe04 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,75 +1,74 @@ -from aimbat.aimbat_types import DataType -from pysmo.classes import SAC -from sqlmodel import Session, select -from sqlalchemy import Engine -from pathlib import Path -from collections.abc import Callable, Iterator -from dataclasses import dataclass, field -from importlib import reload -from aimbat import settings -from aimbat._config import Settings -from aimbat.logger import configure_logging -import aimbat.db as db -import aimbat.core._project as project -import aimbat.core._data as data -import aimbat.core._event as event -import random -import shutil +import aimbat.db import pytest -import matplotlib.pyplot as plt import uuid +import shutil +import matplotlib.pyplot as plt +import random +import json +import os +import subprocess +from aimbat.app import app +from aimbat.aimbat_types import DataType +from aimbat.core import add_data_to_project, set_active_event, create_project +from aimbat.models import AimbatEvent +from aimbat.logger import configure_logging +from dataclasses import dataclass, field +from typing import Any, Literal +from pathlib import Path +from collections.abc import Callable, Generator, Sequence +from sqlmodel import Session, select, create_engine +from sqlalchemy import Engine, event + +# --------------------------------------------------------------------------- +# Constants +# --------------------------------------------------------------------------- + +_AIMBAT_LOGFILE = "aimbat_test.log" +_AIMBAT_LOG_LEVEL: Literal["DEBUG"] = "DEBUG" + + +# --------------------------------------------------------------------------- +# Test data +# --------------------------------------------------------------------------- @dataclass class TestData: + """Container for test data paths. + + Attributes: + multi_event: A list of paths to multi-event SAC files. + sacfile_good: Path to a known good SAC file. + """ + multi_event: list[Path] = field( default_factory=lambda: sorted( Path(__file__).parent.glob("assets/event_*/*.bhz") ) ) - sacfile_good = Path(__file__).parent / "assets/goodfile.sac" + sacfile_good: Path = Path(__file__).parent / "assets/goodfile.sac" TESTDATA = TestData() -# https://rednafi.com/python/patch-pydantic-settings-in-pytest/ -@pytest.fixture -def patch_settings(request: pytest.FixtureRequest) -> Iterator[Settings]: - # Make a copy of the original settings - original_settings = settings.model_copy() - - # Collect the env vars to patch - env_vars_to_patch = getattr(request, "param", {}) - - # Patch the settings to use the default values - for k, v in Settings.model_fields.items(): - setattr(settings, k, v.default) - - # Patch the settings with the parametrized env vars - for key, val in env_vars_to_patch.items(): - # Raise an error if the env var is not defined in the settings - if not hasattr(settings, key): - raise ValueError(f"Unknown setting: {key}") +# --------------------------------------------------------------------------- +# Autouse mocks +# --------------------------------------------------------------------------- - # Raise an error if the env var has an invalid type - expected_type = getattr(settings, key).__class__ - if not isinstance(val, expected_type): - raise ValueError( - f"Invalid type for {key}: {val.__class__} instead of {{expected_type}}" - ) - setattr(settings, key, val) - yield settings - - # Restore the original settings - settings.__dict__.update(original_settings.__dict__) +@pytest.fixture(autouse=True) +def patch_debug_setting(monkeypatch: pytest.MonkeyPatch) -> Generator[None, None, None]: + """Automatically patches settings to enable debug logging for tests. + Args: + monkeypatch: The pytest monkeypatch fixture. -@pytest.fixture(autouse=True) -def patch_debug_setting(patch_settings: Settings) -> Iterator[None]: - patch_settings.log_level = "DEBUG" - patch_settings.logfile = Path("aimbat_test.log") + Yields: + None + """ + monkeypatch.setattr(aimbat.settings, "logfile", _AIMBAT_LOGFILE) + monkeypatch.setattr(aimbat.settings, "log_level", _AIMBAT_LOG_LEVEL) configure_logging() yield @@ -77,6 +76,12 @@ def patch_debug_setting(patch_settings: Settings) -> Iterator[None]: @pytest.fixture(autouse=True) def mock_uuid4(monkeypatch: pytest.MonkeyPatch) -> None: + """Mocks uuid.uuid4 to produce deterministic UUIDs. + + Args: + monkeypatch: The pytest monkeypatch fixture. + """ + def make_generator() -> Callable[[], uuid.UUID]: rand = random.Random(42) return lambda: uuid.UUID(int=rand.getrandbits(128), version=4) @@ -86,147 +91,278 @@ def make_generator() -> Callable[[], uuid.UUID]: @pytest.fixture(autouse=True) def mock_show(monkeypatch: pytest.MonkeyPatch) -> None: + """Mocks plt.show to prevent plots from displaying during tests. + + Args: + monkeypatch: The pytest monkeypatch fixture. + """ monkeypatch.setattr(plt, "show", lambda: None) @pytest.fixture(autouse=True) -def increase_columns(monkeypatch: pytest.MonkeyPatch) -> Iterator[None]: +def increase_columns(monkeypatch: pytest.MonkeyPatch) -> Generator[None, None, None]: + """Increases the COLUMNS environment variable for wider output in tests. + + Args: + monkeypatch: The pytest monkeypatch fixture. + + Yields: + None + """ monkeypatch.setenv("COLUMNS", "1000") yield -@pytest.fixture(scope="session") -def test_data_dir( - tmp_path_factory: pytest.TempPathFactory, -) -> Iterator[Path]: - tmp_dir = Path(tmp_path_factory.mktemp("test_data")) +# --------------------------------------------------------------------------- +# File fixtures +# --------------------------------------------------------------------------- - yield tmp_dir - shutil.rmtree(tmp_dir) +@pytest.fixture() +def db_path(tmp_path: Path) -> Path: + """Path for the temporary project database file (does not exist yet). + Args: + tmp_path: The pytest tmp_path fixture. -@pytest.fixture(scope="session") -def test_data(test_data_dir: Path) -> Iterator[list[Path]]: - data_list: list[Path] = [] - for orgfile in TESTDATA.multi_event: - testfile = test_data_dir / f"{uuid.uuid4()}.sac" - shutil.copy(orgfile, testfile) - data_list.append(testfile) - yield data_list + Returns: + Path to the temporary project database file. + """ + return tmp_path / "test_project.db" + + +@pytest.fixture() +def sac_file_good(tmp_path_factory: pytest.TempPathFactory) -> Path: + """Provides a path to a temporary copy of a known good SAC file. + Args: + tmp_path_factory: The pytest tmp_path_factory fixture. -@pytest.fixture(scope="session") -def test_data_string(test_data: list[Path]) -> Iterator[list[str]]: - yield [str(data) for data in test_data] + Returns: + Path to the temporary SAC file. + """ + orgfile = TESTDATA.sacfile_good + tmpdir = tmp_path_factory.mktemp("aimbat") + testfile = tmpdir / "good.sac" + shutil.copy(orgfile, testfile) + return testfile @pytest.fixture -def fixture_empty_db( - patch_settings: Settings, -) -> Iterator[tuple[Engine, Session]]: - db_url: str = r"sqlite+pysqlite:///:memory:" - patch_settings.db_url = db_url - db.engine.dispose() - reload(db) +def multi_event_data(tmp_path_factory: pytest.TempPathFactory) -> list[Path]: + """Provides a list of paths to temporary copies of multi-event SAC files. + + Args: + tmp_path_factory: The pytest tmp_path_factory fixture. + + Returns: + A list of paths to the temporary SAC files. + """ + orgfiles = TESTDATA.multi_event + tmpdir = tmp_path_factory.mktemp("aimbat") + for orgfile in orgfiles: + testfile = tmpdir / orgfile.name + shutil.copy(orgfile, testfile) + return sorted(tmpdir.glob("*.bhz", case_sensitive=False)) + - with Session(db.engine) as session: - yield db.engine, session - db.engine.dispose() +# --------------------------------------------------------------------------- +# Database +# --------------------------------------------------------------------------- @pytest.fixture -def fixture_session_with_project_file( - tmp_path_factory: pytest.TempPathFactory, - patch_settings: Settings, -) -> Iterator[tuple[Engine, Session, Path]]: - db_file = Path(tmp_path_factory.mktemp("test_db")) / "mock.db" - db_url: str = rf"sqlite+pysqlite:///{db_file}" +def engine_from_file( + db_path: Path, monkeypatch: pytest.MonkeyPatch +) -> Generator[Engine, None, None]: + """Creates an empty project database backed by a file. + + Args: + db_path: Path to the temporary project database file. + monkeypatch: The pytest monkeypatch fixture. + + Yields: + A SQLAlchemy Engine connected to the file database. + """ + db_url: str = rf"sqlite+pysqlite:///{db_path}" + engine: Engine = create_engine( + db_url, + connect_args={"check_same_thread": False}, + ) - patch_settings.db_url = db_url - patch_settings.project = db_file + @event.listens_for(engine, "connect") + def set_sqlite_pragma(dbapi_connection: Any, connection_record: Any) -> None: + cursor = dbapi_connection.cursor() + cursor.execute("PRAGMA foreign_keys=ON") + cursor.close() - db.engine.dispose() - reload(db) - project.create_project(db.engine) + monkeypatch.setattr(aimbat.db, "engine", engine) - with Session(db.engine) as session: - yield db.engine, session, db_file - db.engine.dispose() + yield engine + engine.dispose() @pytest.fixture -def fixture_engine_session_with_project( - patch_settings: Settings, -) -> Iterator[tuple[Engine, Session]]: - """Yield a session with a new project.""" +def engine() -> Generator[Engine, None, None]: + """Creates an in memory database with a new project. + + Yields: + A SQLAlchemy Engine connected to the in-memory database with project. + """ + engine: Engine = create_engine( + "sqlite:///:memory:", + connect_args={"check_same_thread": False}, + ) - db_url: str = r"sqlite+pysqlite:///:memory:" - patch_settings.db_url = db_url + @event.listens_for(engine, "connect") + def set_sqlite_pragma(dbapi_connection: Any, connection_record: Any) -> None: + cursor = dbapi_connection.cursor() + cursor.execute("PRAGMA foreign_keys=ON") + cursor.close() - db.engine.dispose() - reload(db) - project.create_project(db.engine) + create_project(engine) - with Session(db.engine) as session: - yield db.engine, session - db.engine.dispose() + yield engine + engine.dispose() @pytest.fixture -def fixture_session_with_data( - test_data: list[Path], patch_settings: Settings -) -> Iterator[Session]: - """Yield a session with a test data added.""" +def patched_engine( + engine: Engine, monkeypatch: pytest.MonkeyPatch +) -> Generator[Engine, None, None]: + """Monkeypatches ``aimbat.db.engine`` so CLI functions use the test database. + + Args: + engine: The SQLAlchemy Engine for the test database. + monkeypatch: The pytest monkeypatch fixture. - db_url: str = r"sqlite+pysqlite:///:memory:" - patch_settings.db_url = db_url + Yields: + The monkeypatched SQLAlchemy Engine. + """ + monkeypatch.setattr(aimbat.db, "engine", engine) + yield engine - db.engine.dispose() - reload(db) - project.create_project(db.engine) - with Session(db.engine) as session: - data.add_files_to_project(session, test_data, DataType.SAC) +@pytest.fixture() +def loaded_engine(patched_engine: Engine, multi_event_data: Sequence[Path]) -> Engine: + """A patched engine pre-populated with multi-event data and an active event. + + Args: + patched_engine: The monkeypatched SQLAlchemy Engine. + multi_event_data: Paths to temporary copies of multi-event SAC files. + + Returns: + The monkeypatched SQLAlchemy Engine with data loaded. + """ + + datasources = multi_event_data + with Session(patched_engine) as session: + add_data_to_project(session, datasources, DataType.SAC) + events = session.exec(select(AimbatEvent)).all() + lengths = [len(e.seismograms) for e in events] + set_active_event(session, events[lengths.index(max(lengths))]) + return patched_engine + + +@pytest.fixture() +def patched_session(patched_engine: Engine) -> Generator[Session, None, None]: + """A session bound to the patched engine for CLI tests. + + Args: + patched_engine: The monkeypatched SQLAlchemy Engine. + + Yields: + A SQLModel Session bound to the patched engine. + """ + with Session(patched_engine) as session: yield session - db.engine.dispose() -@pytest.fixture -def fixture_engine_session_with_active_event( - patch_settings: Settings, test_data: list[Path] -) -> Iterator[tuple[Engine, Session]]: - """Yield a session with an active event.""" +@pytest.fixture() +def loaded_session(loaded_engine: Engine) -> Generator[Session, None, None]: + """A session pre-populated with multi-event data and an active event. - db_url: str = r"sqlite+pysqlite:///:memory:" - patch_settings.db_url = db_url + Args: + loaded_engine: The monkeypatched SQLAlchemy Engine with data loaded. - db.engine.dispose() - reload(db) - project.create_project(db.engine) + Yields: + A SQLModel Session with data populated. + """ + with Session(loaded_engine) as session: + yield session - with Session(db.engine) as session: - data.add_files_to_project(session, test_data, DataType.SAC) - events = session.exec(select(event.AimbatEvent)).all() - lengths = [len(e.seismograms) for e in events] - event.set_active_event(session, events[lengths.index(max(lengths))]) - yield db.engine, session - db.engine.dispose() + +# --------------------------------------------------------------------------- +# CLI +# --------------------------------------------------------------------------- @pytest.fixture() -def sac_file_good(tmp_path_factory: pytest.TempPathFactory) -> Path: - orgfile = TESTDATA.sacfile_good - tmpdir = tmp_path_factory.mktemp("aimbat") - testfile = tmpdir / "good.sac" - shutil.copy(orgfile, testfile) - return testfile +def cli() -> Callable[[str], None]: + """Returns a callable that invokes ``app()`` in-process with command tokens. + + Returns: + A callable that accepts a command string and runs it via the app. + """ + + def _run(command: str) -> None: + try: + app(command) + except SystemExit as exc: + if exc.code != 0: + raise + + return _run + + +@pytest.fixture() +def cli_json(capsys: pytest.CaptureFixture[str]) -> Callable[[str], list | dict]: + """Returns a callable that runs a ``dump`` sub-command and returns parsed JSON. + + Args: + capsys: The pytest capsys fixture. + + Returns: + A callable that accepts a command string and returns the parsed JSON output. + """ + + def _run(command: str) -> list | dict: + capsys.readouterr() # discard output from prior commands + try: + app(command) + except SystemExit as exc: + if exc.code != 0: + raise + captured = capsys.readouterr() + return json.loads(captured.out) + + return _run @pytest.fixture() -def sac_instance_good(sac_file_good: Path) -> Iterator[SAC]: - my_sac = SAC.from_file(sac_file_good) - try: - yield my_sac - finally: - del my_sac +def aimbat_subprocess( + db_path: Path, +) -> Callable[[Sequence[str]], subprocess.CompletedProcess[str]]: + """Returns a callable that runs ``aimbat `` as a subprocess against the test database. + + Args: + db_path: Path to the temporary project database file. + + Returns: + A callable that accepts a sequence of CLI arguments and returns the completed process. + """ + + def _run(args: Sequence[str]) -> subprocess.CompletedProcess[str]: + env = os.environ.copy() + env["AIMBAT_DB_URL"] = f"sqlite+pysqlite:///{db_path}" + env["AIMBAT_LOGFILE"] = _AIMBAT_LOGFILE + env["AIMBAT_LOG_LEVEL"] = _AIMBAT_LOG_LEVEL + env["COLUMNS"] = "1000" + return subprocess.run( + ["uv", "run", "aimbat", *args], + capture_output=True, + text=True, + env=env, + ) + + return _run diff --git a/tests/functional/test_cli_basic_ops.py b/tests/functional/test_cli_basic_ops.py new file mode 100644 index 00000000..96a44a41 --- /dev/null +++ b/tests/functional/test_cli_basic_ops.py @@ -0,0 +1,624 @@ +"""Functional tests exercising the AIMBAT CLI. + +All commands are invoked in-process via ``app()`` with ``aimbat.db.engine`` +monkeypatched to the test fixture's in-memory database. +""" + +import pytest +from pathlib import Path +from collections.abc import Callable, Sequence +from sqlalchemy import Engine + +# =================================================================== +# Project lifecycle (in-memory) +# =================================================================== + + +@pytest.mark.cli +class TestProjectLifecycle: + """Tests for project commands against an in-memory database.""" + + def test_create_project_twice_fails( + self, + patched_engine: Engine, + cli: Callable[[str], None], + ) -> None: + """Verifies that creating a project when one already exists fails. + + Args: + patched_engine: The monkeypatched in-memory engine (project already created). + cli: The in-process CLI callable. + """ + with pytest.raises((SystemExit, RuntimeError)): + cli("project create") + + def test_project_info( + self, + patched_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that project info displays a panel for an in-memory database. + + Args: + patched_engine: The monkeypatched in-memory engine (project already created). + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("project info") + output = capsys.readouterr().out + assert ( + "Project Info" in output + ), "Output should contain the 'Project Info' panel title" + assert ( + "in-memory database" in output + ), "Output should indicate this is an in-memory database" + + def test_delete_project_succeeds_for_in_memory( + self, + patched_engine: Engine, + cli: Callable[[str], None], + ) -> None: + """Verifies that project delete completes without error for an in-memory database. + + Args: + patched_engine: The monkeypatched in-memory engine (project already created). + cli: The in-process CLI callable. + """ + cli("project delete") # should not raise for in-memory + + +# =================================================================== +# Data management +# =================================================================== + + +@pytest.mark.cli +class TestDataManagement: + """Tests for adding and managing data.""" + + def test_add_data( + self, + patched_engine: Engine, + multi_event_data: Sequence[Path], + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that data can be added to the project.""" + files = " ".join(f.as_posix() for f in multi_event_data) + cli(f"data add {files} --no-progress") + events = cli_json("event dump") + assert len(events) > 0 + + def test_add_data_idempotent( + self, + loaded_engine: Engine, + multi_event_data: Sequence[Path], + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Adding the same files twice does not duplicate data.""" + events_before = cli_json("event dump") + + files = " ".join(f.as_posix() for f in multi_event_data) + cli(f"data add {files} --no-progress") + + events_after = cli_json("event dump") + assert len(events_after) == len(events_before) + + def test_data_list(self, loaded_engine: Engine, cli: Callable[[str], None]) -> None: + """Verifies that data list command runs successfully.""" + cli("data list --all") + + def test_data_dump( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that data dump returns a list of data items.""" + data = cli_json("data dump") + assert isinstance(data, list) + assert len(data) > 0 + + def test_dry_run_does_not_add( + self, + patched_engine: Engine, + multi_event_data: Sequence[Path], + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that dry-run mode does not modify the database.""" + files = " ".join(f.as_posix() for f in multi_event_data) + cli(f"data add {files} --no-progress --dry-run") + events = cli_json("event dump") + assert len(events) == 0 + + +# =================================================================== +# Event operations +# =================================================================== + + +@pytest.mark.cli +class TestEventOperations: + """Tests for event-related CLI commands.""" + + def test_event_list( + self, loaded_engine: Engine, cli: Callable[[str], None] + ) -> None: + """Verifies that event list command runs successfully.""" + cli("event list") + + def test_event_dump( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that event dump returns a list of events.""" + events = cli_json("event dump") + assert len(events) > 1 + + def test_activate_event( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that an event can be activated.""" + events = cli_json("event dump") + + inactive = [e for e in events if e["active"] is None] + assert len(inactive) > 0 + target_id = inactive[0]["id"] + + cli(f"event activate {target_id}") + + events_after = cli_json("event dump") + active = [e for e in events_after if e["active"] is True] + assert len(active) == 1 + assert active[0]["id"] == target_id + + def test_activate_switches_active( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Activating a different event deactivates the previous one.""" + events = cli_json("event dump") + ids = [e["id"] for e in events] + + cli(f"event activate {ids[0]}") + cli(f"event activate {ids[1]}") + + events_after = cli_json("event dump") + active = [e for e in events_after if e["active"] is True] + assert len(active) == 1 + assert active[0]["id"] == ids[1] + + def test_delete_event( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that an event can be deleted.""" + events_before = cli_json("event dump") + target_id = events_before[0]["id"] + + cli(f"event delete {target_id}") + + events_after = cli_json("event dump") + remaining_ids = [e["id"] for e in events_after] + assert target_id not in remaining_ids + assert len(events_after) == len(events_before) - 1 + + def test_activate_event_with_short_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that an event can be activated using a shortened ID. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + events = cli_json("event dump") + inactive = [e for e in events if e["active"] is None] + assert len(inactive) > 0 + target_id = inactive[0]["id"] + short_id = target_id[:8] + + cli(f"event activate {short_id}") + + events_after = cli_json("event dump") + active = [e for e in events_after if e["active"] is True] + assert len(active) == 1 + assert active[0]["id"] == target_id + + def test_delete_event_with_short_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that an event can be deleted using a shortened ID. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + events_before = cli_json("event dump") + target_id = events_before[0]["id"] + short_id = target_id[:8] + + cli(f"event delete {short_id}") + + events_after = cli_json("event dump") + remaining_ids = [e["id"] for e in events_after] + assert target_id not in remaining_ids + assert len(events_after) == len(events_before) - 1 + + def test_delete_event_removes_seismograms( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that deleting an event also deletes its seismograms.""" + seis_before = cli_json("seismogram dump") + events = cli_json("event dump") + target_id = events[0]["id"] + + cli(f"event delete {target_id}") + + seis_after = cli_json("seismogram dump") + assert len(seis_after) < len(seis_before) + + +# =================================================================== +# Event parameters +# =================================================================== + + +@pytest.mark.cli +class TestEventParameters: + """Tests for event parameter CLI commands.""" + + def test_parameter_list( + self, loaded_engine: Engine, cli: Callable[[str], None] + ) -> None: + """Verifies that parameter list command runs successfully.""" + cli("event parameter list") + + def test_parameter_get_and_set( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies getting and setting event parameters.""" + cli("event parameter get completed") + assert "False" in capsys.readouterr().out + + cli("event parameter set completed true") + + cli("event parameter get completed") + assert "True" in capsys.readouterr().out + + def test_parameter_dump( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that parameter dump returns parameter data.""" + data = cli_json("event parameter dump") + assert "completed" in data + + +# =================================================================== +# Station operations +# =================================================================== + + +@pytest.mark.cli +class TestStationOperations: + """Tests for station-related CLI commands.""" + + def test_station_list( + self, loaded_engine: Engine, cli: Callable[[str], None] + ) -> None: + """Verifies that station list command runs successfully.""" + cli("station list --all") + + def test_station_dump( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that station dump returns a list of stations.""" + stations = cli_json("station dump") + assert len(stations) > 0 + + def test_delete_station( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that a station can be deleted.""" + stations = cli_json("station dump") + target_id = stations[0]["id"] + + cli(f"station delete {target_id}") + + stations_after = cli_json("station dump") + assert len(stations_after) == len(stations) - 1 + + def test_delete_station_with_short_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that a station can be deleted using a shortened ID. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + stations = cli_json("station dump") + target_id = stations[0]["id"] + short_id = target_id[:8] + + cli(f"station delete {short_id}") + + stations_after = cli_json("station dump") + assert len(stations_after) == len(stations) - 1 + + +# =================================================================== +# Seismogram operations +# =================================================================== + + +@pytest.mark.cli +class TestSeismogramOperations: + """Tests for seismogram-related CLI commands.""" + + def test_seismogram_list( + self, loaded_engine: Engine, cli: Callable[[str], None] + ) -> None: + """Verifies that seismogram list command runs successfully.""" + cli("seismogram list") + + def test_seismogram_dump( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that seismogram dump returns a list of seismograms.""" + data = cli_json("seismogram dump") + assert len(data) > 0 + + def test_delete_seismogram( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that a seismogram can be deleted.""" + seis = cli_json("seismogram dump") + target_id = seis[0]["id"] + + cli(f"seismogram delete {target_id}") + + seis_after = cli_json("seismogram dump") + assert len(seis_after) == len(seis) - 1 + + def test_delete_seismogram_with_short_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that a seismogram can be deleted using a shortened ID. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + seis = cli_json("seismogram dump") + target_id = seis[0]["id"] + short_id = target_id[:8] + + cli(f"seismogram delete {short_id}") + + seis_after = cli_json("seismogram dump") + assert len(seis_after) == len(seis) - 1 + + +# =================================================================== +# Snapshot lifecycle +# =================================================================== + + +@pytest.mark.cli +class TestSnapshotLifecycle: + """Tests for snapshot creation, deletion, and rollback.""" + + def test_create_snapshot( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that a snapshot can be created.""" + cli("snapshot create initial") + data = cli_json("snapshot dump") + assert isinstance(data, dict) + snapshots = data["snapshots"] + assert len(snapshots) == 1 + assert snapshots[0]["comment"] == "initial" + assert len(data["event_parameters"]) == 1 + assert len(data["seismogram_parameters"]) > 0 + + def test_create_multiple_snapshots( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that multiple snapshots can be created.""" + cli("snapshot create first") + cli("snapshot create second") + data = cli_json("snapshot dump") + assert isinstance(data, dict) + snapshots = data["snapshots"] + assert len(snapshots) == 2 + comments = {s["comment"] for s in snapshots} + assert comments == {"first", "second"} + + def test_delete_snapshot( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that a snapshot can be deleted.""" + cli("snapshot create to-delete") + data = cli_json("snapshot dump") + assert isinstance(data, dict) + snapshots = data["snapshots"] + assert len(snapshots) == 1 + + cli(f"snapshot delete {snapshots[0]['id']}") + + data_after = cli_json("snapshot dump") + assert isinstance(data_after, dict) + assert len(data_after["snapshots"]) == 0 + + def test_delete_snapshot_with_short_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that a snapshot can be deleted using a shortened ID. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create to-delete") + data = cli_json("snapshot dump") + assert isinstance(data, dict) + snapshots = data["snapshots"] + assert len(snapshots) == 1 + short_id = snapshots[0]["id"][:8] + + cli(f"snapshot delete {short_id}") + + data_after = cli_json("snapshot dump") + assert isinstance(data_after, dict) + assert len(data_after["snapshots"]) == 0 + + def test_snapshot_list( + self, loaded_engine: Engine, cli: Callable[[str], None] + ) -> None: + """Verifies that snapshot list command runs successfully.""" + cli("snapshot create") + cli("snapshot list") + + def test_rollback_snapshot( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Rollback restores parameter values from a snapshot.""" + cli("snapshot create before-change") + + cli("event parameter set completed true") + cli("event parameter get completed") + assert "True" in capsys.readouterr().out + + data = cli_json("snapshot dump") + assert isinstance(data, dict) + cli(f"snapshot rollback {data['snapshots'][0]['id']}") + + cli("event parameter get completed") + assert "False" in capsys.readouterr().out + + def test_rollback_snapshot_with_short_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that rollback works with a shortened snapshot ID. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + capsys: The pytest capsys fixture. + """ + cli("snapshot create before-change") + + cli("event parameter set completed true") + cli("event parameter get completed") + assert "True" in capsys.readouterr().out + + data = cli_json("snapshot dump") + assert isinstance(data, dict) + short_id = data["snapshots"][0]["id"][:8] + cli(f"snapshot rollback {short_id}") + + cli("event parameter get completed") + assert "False" in capsys.readouterr().out + + +# =================================================================== +# Full workflow: add → delete → re-add +# =================================================================== + + +@pytest.mark.cli +class TestDataReaddWorkflow: + """Delete all data then add it back.""" + + def test_delete_all_events_and_readd( + self, + loaded_engine: Engine, + multi_event_data: Sequence[Path], + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that data can be re-added after deletion.""" + events_before = cli_json("event dump") + assert len(events_before) > 0 + + for event in events_before: + cli(f"event delete {event['id']}") + + events_empty = cli_json("event dump") + assert len(events_empty) == 0 + + seis_empty = cli_json("seismogram dump") + assert len(seis_empty) == 0 + + files = " ".join(f.as_posix() for f in multi_event_data) + cli(f"data add {files} --no-progress") + + events_after = cli_json("event dump") + assert len(events_after) == len(events_before) diff --git a/tests/functional/test_cli_parameters.py b/tests/functional/test_cli_parameters.py new file mode 100644 index 00000000..618695e1 --- /dev/null +++ b/tests/functional/test_cli_parameters.py @@ -0,0 +1,793 @@ +"""Functional tests for CLI commands that read and write event and seismogram parameters. + +All commands are invoked in-process via ``app()`` with ``aimbat.db.engine`` +monkeypatched to the test fixture's in-memory database. The ``dump`` +sub-commands are used as the source of truth for verifying parameter changes. +""" + +import pytest +from collections.abc import Callable +from sqlalchemy import Engine + +# =================================================================== +# Event parameter — get +# =================================================================== + + +@pytest.mark.cli +class TestEventParameterGet: + """Tests for ``event parameter get``.""" + + def test_get_bool_parameter( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that getting a bool parameter prints its current value. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("event parameter get completed") + assert "False" in capsys.readouterr().out, "'completed' should default to False" + + def test_get_float_parameter( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that getting a float parameter prints a numeric value. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("event parameter get min_ccnorm") + output = capsys.readouterr().out.strip() + assert output, "Expected a non-empty output for min_ccnorm" + assert float(output) >= 0.0, "min_ccnorm should be a non-negative float" + + def test_get_timedelta_parameter( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that getting a timedelta parameter prints a value ending in 's'. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("event parameter get window_pre") + output = capsys.readouterr().out.strip() + assert output.endswith( + "s" + ), f"window_pre should be printed in seconds (got '{output}')" + + def test_get_bandpass_bool_parameter( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that getting bandpass_apply prints a bool value. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("event parameter get bandpass_apply") + output = capsys.readouterr().out.strip() + assert output in ( + "True", + "False", + ), f"bandpass_apply should be True or False, got '{output}'" + + +# =================================================================== +# Event parameter — set + verify via dump +# =================================================================== + + +@pytest.mark.cli +class TestEventParameterSetBool: + """Tests for setting boolean event parameters and verifying via dump.""" + + def test_set_completed_true( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that setting completed=true is reflected in the dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + before = cli_json("event parameter dump") + assert isinstance(before, dict), "Dump should return a dict for active event" + assert before["completed"] is False, "'completed' should default to False" + + cli("event parameter set completed true") + + after = cli_json("event parameter dump") + assert isinstance(after, dict), "Dump should return a dict for active event" + assert after["completed"] is True, "'completed' should be True after being set" + + def test_set_completed_false( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that setting completed=false is reflected in the dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("event parameter set completed true") + cli("event parameter set completed false") + after = cli_json("event parameter dump") + assert isinstance(after, dict), "Dump should return a dict for active event" + assert ( + after["completed"] is False + ), "'completed' should be False after being set back" + + def test_set_bandpass_apply( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that setting bandpass_apply is reflected in the dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + before = cli_json("event parameter dump") + assert isinstance(before, dict), "Dump should return a dict for active event" + original = before["bandpass_apply"] + + cli(f"event parameter set bandpass_apply {not original}".lower()) + + after = cli_json("event parameter dump") + assert isinstance(after, dict), "Dump should return a dict for active event" + assert ( + after["bandpass_apply"] is not original + ), "'bandpass_apply' should have toggled after set" + + +@pytest.mark.cli +class TestEventParameterSetFloat: + """Tests for setting float event parameters and verifying via dump.""" + + def test_set_min_ccnorm( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that setting min_ccnorm is reflected in the dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("event parameter set min_ccnorm 0.42") + after = cli_json("event parameter dump") + assert isinstance(after, dict), "Dump should return a dict for active event" + assert after["min_ccnorm"] == pytest.approx( + 0.42 + ), "'min_ccnorm' should be 0.42 after being set" + + def test_set_bandpass_fmin( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that setting bandpass_fmin is reflected in the dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("event parameter set bandpass_fmin 0.1") + after = cli_json("event parameter dump") + assert isinstance(after, dict), "Dump should return a dict for active event" + assert after["bandpass_fmin"] == pytest.approx( + 0.1 + ), "'bandpass_fmin' should be 0.1 after being set" + + def test_set_bandpass_fmax( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that setting bandpass_fmax is reflected in the dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("event parameter set bandpass_fmax 2.0") + after = cli_json("event parameter dump") + assert isinstance(after, dict), "Dump should return a dict for active event" + assert after["bandpass_fmax"] == pytest.approx( + 2.0 + ), "'bandpass_fmax' should be 2.0 after being set" + + +# =================================================================== +# Event parameter — dump +# =================================================================== + + +@pytest.mark.cli +class TestEventParameterDump: + """Tests for ``event parameter dump``.""" + + def test_active_event_returns_dict( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that the active-event dump returns a dict. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli_json: The in-process CLI JSON dump callable. + """ + data = cli_json("event parameter dump") + assert isinstance(data, dict), "Active-event dump should be a dict" + + def test_active_event_contains_all_parameter_keys( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that all expected parameter keys are present in the dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli_json: The in-process CLI JSON dump callable. + """ + data = cli_json("event parameter dump") + assert isinstance(data, dict), "Active-event dump should be a dict" + for key in ( + "completed", + "min_ccnorm", + "window_pre", + "window_post", + "bandpass_apply", + "bandpass_fmin", + "bandpass_fmax", + ): + assert key in data, f"Expected key '{key}' in event parameter dump" + + def test_all_events_returns_list( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that ``--all`` returns a list of parameter dicts. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli_json: The in-process CLI JSON dump callable. + """ + data = cli_json("event parameter dump --all") + assert isinstance(data, list), "All-events dump should be a list" + assert len(data) > 1, "Expected parameters for more than one event" + + def test_all_events_entries_contain_parameter_keys( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that each entry in the all-events dump has the expected keys. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli_json: The in-process CLI JSON dump callable. + """ + data = cli_json("event parameter dump --all") + assert isinstance(data, list), "All-events dump should be a list" + for entry in data: + assert "completed" in entry, "Each entry should have 'completed' key" + assert "min_ccnorm" in entry, "Each entry should have 'min_ccnorm' key" + + def test_set_visible_in_all_events_dump( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that a parameter change to the active event appears in the all-events dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("event parameter set completed true") + all_data = cli_json("event parameter dump --all") + assert isinstance(all_data, list), "All-events dump should be a list" + active_entries = [e for e in all_data if e.get("completed") is True] + assert ( + len(active_entries) == 1 + ), "Exactly one event should have completed=True after setting it" + + +# =================================================================== +# Event parameter — list +# =================================================================== + + +@pytest.mark.cli +class TestEventParameterList: + """Tests for ``event parameter list``.""" + + def test_list_produces_output( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that the list command produces output. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("event parameter list") + assert ( + len(capsys.readouterr().out) > 0 + ), "Expected output from event parameter list" + + def test_list_short_produces_output( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that ``--short`` produces output. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("event parameter list --short") + assert ( + len(capsys.readouterr().out) > 0 + ), "Expected output from event parameter list --short" + + def test_list_all_events_produces_output( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that ``--all`` produces output covering all events. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("event parameter list --all") + assert ( + len(capsys.readouterr().out) > 0 + ), "Expected output from event parameter list --all" + + +# =================================================================== +# Seismogram parameter — get +# =================================================================== + + +@pytest.mark.cli +class TestSeismogramParameterGet: + """Tests for ``seismogram parameter get``.""" + + def test_get_select_with_full_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that 'select' can be retrieved using the full seismogram ID. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + capsys: The pytest capsys fixture. + """ + seis = cli_json("seismogram dump") + assert ( + isinstance(seis, list) and len(seis) > 0 + ), "Expected at least one seismogram in the dump" + target_id = seis[0]["id"] + + cli(f"seismogram parameter get {target_id} select") + output = capsys.readouterr().out.strip() + assert output in ( + "True", + "False", + ), f"'select' should be True or False, got '{output}'" + + def test_get_select_with_short_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that 'select' can be retrieved using a shortened seismogram ID. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + capsys: The pytest capsys fixture. + """ + seis = cli_json("seismogram dump") + assert ( + isinstance(seis, list) and len(seis) > 0 + ), "Expected at least one seismogram in the dump" + short_id = seis[0]["id"][:8] + + cli(f"seismogram parameter get {short_id} select") + output = capsys.readouterr().out.strip() + assert output in ( + "True", + "False", + ), f"'select' should be True or False, got '{output}'" + + def test_get_flip_default_is_false( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that 'flip' defaults to False. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + capsys: The pytest capsys fixture. + """ + seis = cli_json("seismogram dump") + target_id = seis[0]["id"] + + cli(f"seismogram parameter get {target_id} flip") + assert "False" in capsys.readouterr().out, "'flip' should default to False" + + def test_get_select_default_is_true( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that 'select' defaults to True. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + capsys: The pytest capsys fixture. + """ + seis = cli_json("seismogram dump") + target_id = seis[0]["id"] + + cli(f"seismogram parameter get {target_id} select") + assert "True" in capsys.readouterr().out, "'select' should default to True" + + +# =================================================================== +# Seismogram parameter — set + verify via dump +# =================================================================== + + +@pytest.mark.cli +class TestSeismogramParameterSet: + """Tests for setting seismogram parameters and verifying via dump.""" + + def test_set_select_false_with_full_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that setting select=false is reflected in the dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + seis = cli_json("seismogram dump") + assert ( + isinstance(seis, list) and len(seis) > 0 + ), "Expected at least one seismogram in the dump" + target_id = seis[0]["id"] + + cli(f"seismogram parameter set {target_id} select false") + + params = cli_json("seismogram parameter dump") + assert isinstance(params, list), "Seismogram parameter dump should be a list" + target_params = next(p for p in params if p["seismogram_id"] == target_id) + assert ( + target_params["select"] is False + ), f"'select' should be False for seismogram {target_id} after being set" + + def test_set_select_false_with_short_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that setting select=false via a shortened ID is reflected in the dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + seis = cli_json("seismogram dump") + target_id = seis[0]["id"] + short_id = target_id[:8] + + cli(f"seismogram parameter set {short_id} select false") + + params = cli_json("seismogram parameter dump") + assert isinstance(params, list), "Seismogram parameter dump should be a list" + target_params = next(p for p in params if p["seismogram_id"] == target_id) + assert ( + target_params["select"] is False + ), f"'select' should be False for seismogram {target_id} after being set via short ID" + + def test_set_flip_true_with_full_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that setting flip=true is reflected in the dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + seis = cli_json("seismogram dump") + target_id = seis[0]["id"] + + cli(f"seismogram parameter set {target_id} flip true") + + params = cli_json("seismogram parameter dump") + assert isinstance(params, list), "Seismogram parameter dump should be a list" + target_params = next(p for p in params if p["seismogram_id"] == target_id) + assert ( + target_params["flip"] is True + ), f"'flip' should be True for seismogram {target_id} after being set" + + def test_set_flip_true_with_short_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that setting flip=true via a shortened ID is reflected in the dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + seis = cli_json("seismogram dump") + target_id = seis[0]["id"] + short_id = target_id[:8] + + cli(f"seismogram parameter set {short_id} flip true") + + params = cli_json("seismogram parameter dump") + assert isinstance(params, list), "Seismogram parameter dump should be a list" + target_params = next(p for p in params if p["seismogram_id"] == target_id) + assert ( + target_params["flip"] is True + ), f"'flip' should be True for seismogram {target_id} after being set via short ID" + + def test_set_does_not_affect_other_seismograms( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that changing one seismogram's parameter does not affect others. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + params_before = cli_json("seismogram parameter dump") + assert isinstance( + params_before, list + ), "Seismogram parameter dump should be a list" + assert ( + len(params_before) > 1 + ), "Need at least two seismograms in the active event for this test" + target_id = params_before[0]["seismogram_id"] + other_id = params_before[1]["seismogram_id"] + other_select_before = params_before[1]["select"] + + cli(f"seismogram parameter set {target_id} select false") + + params_after = cli_json("seismogram parameter dump") + assert isinstance( + params_after, list + ), "Seismogram parameter dump should be a list" + other_select_after = next( + p["select"] for p in params_after if p["seismogram_id"] == other_id + ) + assert ( + other_select_after == other_select_before + ), "Changing one seismogram's 'select' should not affect another's" + + +# =================================================================== +# Seismogram parameter — dump +# =================================================================== + + +@pytest.mark.cli +class TestSeismogramParameterDump: + """Tests for ``seismogram parameter dump``.""" + + def test_returns_list( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that the dump returns a list of parameter dicts. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli_json: The in-process CLI JSON dump callable. + """ + data = cli_json("seismogram parameter dump") + assert isinstance(data, list), "Seismogram parameter dump should be a list" + assert len(data) > 0, "Expected at least one entry in the parameter dump" + + def test_entries_contain_expected_keys( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that each entry contains the expected parameter keys. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli_json: The in-process CLI JSON dump callable. + """ + data = cli_json("seismogram parameter dump") + assert isinstance(data, list), "Seismogram parameter dump should be a list" + for entry in data: + for key in ("select", "flip", "t1", "seismogram_id"): + assert ( + key in entry + ), f"Expected key '{key}' in seismogram parameter dump entry" + + def test_all_events_returns_more_entries( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that ``--all`` returns at least as many entries as the active-event dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli_json: The in-process CLI JSON dump callable. + """ + active_data = cli_json("seismogram parameter dump") + all_data = cli_json("seismogram parameter dump --all") + assert isinstance( + active_data, list + ), "Active-event seismogram parameter dump should be a list" + assert isinstance( + all_data, list + ), "All-events seismogram parameter dump should be a list" + assert len(all_data) >= len( + active_data + ), "--all should return at least as many entries as the active-event dump" + + def test_count_matches_seismogram_dump( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that the parameter dump entry count matches the seismogram count. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli_json: The in-process CLI JSON dump callable. + """ + seis = cli_json("seismogram dump") + params = cli_json("seismogram parameter dump --all") + assert isinstance(seis, list), "Seismogram dump should be a list" + assert isinstance(params, list), "Parameter dump should be a list" + assert len(params) == len( + seis + ), "One parameter entry should exist per seismogram" + + +# =================================================================== +# Seismogram parameter — list +# =================================================================== + + +@pytest.mark.cli +class TestSeismogramParameterList: + """Tests for ``seismogram parameter list``.""" + + def test_list_produces_output( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that the list command produces output. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("seismogram parameter list") + assert ( + len(capsys.readouterr().out) > 0 + ), "Expected output from seismogram parameter list" + + def test_list_short_produces_output( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that ``--short`` produces output. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("seismogram parameter list --short") + assert ( + len(capsys.readouterr().out) > 0 + ), "Expected output from seismogram parameter list --short" diff --git a/tests/functional/test_cli_project.py b/tests/functional/test_cli_project.py new file mode 100644 index 00000000..50107a8c --- /dev/null +++ b/tests/functional/test_cli_project.py @@ -0,0 +1,72 @@ +"""Functional tests for AIMBAT project CLI commands that require a real file. + +These tests are run via subprocess so that the engine lifecycle (create → delete) +operates on an actual database file rather than an in-memory database. +""" + +import subprocess +import pytest +from pathlib import Path +from collections.abc import Callable, Sequence + + +@pytest.mark.slow +@pytest.mark.cli +class TestProjectLifecycleWithFile: + """Tests for project creation and deletion against a real database file.""" + + def test_create_project( + self, + aimbat_subprocess: Callable[[Sequence[str]], subprocess.CompletedProcess[str]], + db_path: Path, + ) -> None: + """Verifies that a new project database file is created.""" + result = aimbat_subprocess(["project", "create"]) + assert result.returncode == 0, result.stderr + assert db_path.exists(), "Database file should exist after project create" + + def test_create_project_twice_fails( + self, + aimbat_subprocess: Callable[[Sequence[str]], subprocess.CompletedProcess[str]], + ) -> None: + """Verifies that creating a project when one already exists fails.""" + aimbat_subprocess(["project", "create"]) + result = aimbat_subprocess(["project", "create"]) + assert result.returncode != 0, "Second project create should fail" + + def test_project_info( + self, + aimbat_subprocess: Callable[[Sequence[str]], subprocess.CompletedProcess[str]], + ) -> None: + """Verifies that project info displays a panel after creation.""" + aimbat_subprocess(["project", "create"]) + result = aimbat_subprocess(["project", "info"]) + assert result.returncode == 0, result.stderr + assert ( + "Project Info" in result.stdout + ), "Output should contain the 'Project Info' panel title" + + def test_project_info_shows_file_path( + self, + aimbat_subprocess: Callable[[Sequence[str]], subprocess.CompletedProcess[str]], + db_path: Path, + ) -> None: + """Verifies that project info includes the database file path.""" + aimbat_subprocess(["project", "create"]) + result = aimbat_subprocess(["project", "info"]) + assert ( + db_path.name in result.stdout + ), "Output should contain the database filename" + + def test_delete_project( + self, + aimbat_subprocess: Callable[[Sequence[str]], subprocess.CompletedProcess[str]], + db_path: Path, + ) -> None: + """Verifies that the project database file is removed after deletion.""" + aimbat_subprocess(["project", "create"]) + result = aimbat_subprocess(["project", "delete"]) + assert result.returncode == 0, result.stderr + assert ( + not db_path.exists() + ), "Database file should be absent after project delete" diff --git a/tests/functional/test_cli_sampledata.py b/tests/functional/test_cli_sampledata.py new file mode 100644 index 00000000..f00cd48f --- /dev/null +++ b/tests/functional/test_cli_sampledata.py @@ -0,0 +1,196 @@ +"""Functional tests for the AIMBAT sampledata CLI commands. + +All commands are invoked in-process via ``app()`` with +``aimbat.settings.sampledata_dir`` monkeypatched to a temporary directory. +A retry helper re-attempts the download up to 3 times to tolerate transient +network issues. +""" + +import pytest +from collections.abc import Callable +from pathlib import Path + +import aimbat._config as _config + +_MAX_RETRIES = 3 + + +# =================================================================== +# Fixtures +# =================================================================== + + +@pytest.fixture() +def sampledata_dir( + tmp_path: Path, + monkeypatch: pytest.MonkeyPatch, +) -> Path: + """Patches ``aimbat.settings.sampledata_dir`` to a temporary directory. + + Args: + tmp_path: The pytest tmp_path fixture. + monkeypatch: The pytest monkeypatch fixture. + + Returns: + Path to the temporary sample data directory. + """ + target = tmp_path / "sample-data" + monkeypatch.setattr(_config.settings, "sampledata_dir", target) + return target + + +def _run_with_retries( + cli: Callable[[str], None], + command: str, + retries: int = _MAX_RETRIES, +) -> None: + """Runs a CLI command, retrying up to ``retries`` times on failure. + + Args: + cli: The in-process CLI callable. + command: The command string to run. + retries: Maximum number of attempts. + + Raises: + Exception: If all attempts fail. + """ + last_exc: Exception | None = None + for _ in range(retries): + try: + cli(command) + return + except Exception as exc: + last_exc = exc + raise last_exc # type: ignore[misc] + + +# =================================================================== +# Download +# =================================================================== + + +@pytest.mark.slow +class TestSampledataDownload: + """Tests for ``utils sampledata download``.""" + + def test_download_creates_files( + self, + sampledata_dir: Path, + cli: Callable[[str], None], + ) -> None: + """Verifies that download creates files inside the sampledata directory. + + Args: + sampledata_dir: Path to the temporary sample data directory. + cli: The in-process CLI callable. + """ + _run_with_retries(cli, "utils sampledata download") + assert ( + sampledata_dir.exists() + ), "Sample data directory should exist after download" + assert any( + sampledata_dir.rglob("*") + ), "Sample data directory should contain at least one file after download" + + def test_download_creates_seismogram_files( + self, + sampledata_dir: Path, + cli: Callable[[str], None], + ) -> None: + """Verifies that the download includes BHZ seismogram data files. + + Args: + sampledata_dir: Path to the temporary sample data directory. + cli: The in-process CLI callable. + """ + _run_with_retries(cli, "utils sampledata download") + bhz_files = list(sampledata_dir.rglob("*BHZ")) + assert ( + len(bhz_files) > 0 + ), "Expected at least one BHZ seismogram file in the downloaded sample data" + + def test_download_twice_fails_without_force( + self, + sampledata_dir: Path, + cli: Callable[[str], None], + ) -> None: + """Verifies that a second download without --force raises FileExistsError. + + Args: + sampledata_dir: Path to the temporary sample data directory. + cli: The in-process CLI callable. + """ + _run_with_retries(cli, "utils sampledata download") + assert sampledata_dir.exists(), "Directory should exist after first download" + + with pytest.raises((SystemExit, FileExistsError)): + cli("utils sampledata download") + + def test_download_force_overwrites( + self, + sampledata_dir: Path, + cli: Callable[[str], None], + ) -> None: + """Verifies that --force re-downloads and replaces existing sample data. + + Args: + sampledata_dir: Path to the temporary sample data directory. + cli: The in-process CLI callable. + """ + _run_with_retries(cli, "utils sampledata download") + assert sampledata_dir.exists(), "Directory should exist after first download" + + _run_with_retries(cli, "utils sampledata download --force") + assert ( + sampledata_dir.exists() + ), "Directory should still exist after force re-download" + assert any( + sampledata_dir.rglob("*") + ), "Directory should contain files after force re-download" + + +# =================================================================== +# Delete +# =================================================================== + + +@pytest.mark.slow +class TestSampledataDelete: + """Tests for ``utils sampledata delete``.""" + + def test_delete_removes_directory( + self, + sampledata_dir: Path, + cli: Callable[[str], None], + ) -> None: + """Verifies that the sample data directory is removed after delete. + + Args: + sampledata_dir: Path to the temporary sample data directory. + cli: The in-process CLI callable. + """ + _run_with_retries(cli, "utils sampledata download") + assert sampledata_dir.exists(), "Directory should exist before delete" + + cli("utils sampledata delete") + assert ( + not sampledata_dir.exists() + ), "Sample data directory should be absent after delete" + + def test_download_after_delete_succeeds( + self, + sampledata_dir: Path, + cli: Callable[[str], None], + ) -> None: + """Verifies that sample data can be re-downloaded after deletion. + + Args: + sampledata_dir: Path to the temporary sample data directory. + cli: The in-process CLI callable. + """ + _run_with_retries(cli, "utils sampledata download") + cli("utils sampledata delete") + assert not sampledata_dir.exists(), "Directory should be absent after delete" + + _run_with_retries(cli, "utils sampledata download") + assert sampledata_dir.exists(), "Directory should exist after re-downloading" diff --git a/tests/functional/test_cli_snapshots.py b/tests/functional/test_cli_snapshots.py new file mode 100644 index 00000000..4e05ca17 --- /dev/null +++ b/tests/functional/test_cli_snapshots.py @@ -0,0 +1,574 @@ +"""Functional tests for the AIMBAT snapshot CLI commands. + +All commands are invoked in-process via ``app()`` with ``aimbat.db.engine`` +monkeypatched to the test fixture's in-memory database. The ``snapshot dump`` +JSON output is used as the ground truth for ID verification after mutations. +""" + +import pytest +from collections.abc import Callable +from sqlalchemy import Engine + +# =================================================================== +# Snapshot creation +# =================================================================== + + +@pytest.mark.cli +class TestSnapshotCreate: + """Tests for the ``snapshot create`` CLI command.""" + + def test_create_without_comment( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that a snapshot is created with a null comment by default. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create") + data = cli_json("snapshot dump") + assert isinstance(data, dict), "Dump should return a dict" + assert len(data["snapshots"]) == 1, "Expected exactly one snapshot" + assert ( + data["snapshots"][0]["comment"] is None + ), "Comment should be None when not provided" + + def test_create_with_comment( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that the comment is stored when provided. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create my-comment") + data = cli_json("snapshot dump") + assert isinstance(data, dict), "Dump should return a dict" + assert ( + data["snapshots"][0]["comment"] == "my-comment" + ), "Comment should match the value passed to create" + + def test_create_captures_event_parameters( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that one event parameter snapshot is created per snapshot. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create") + data = cli_json("snapshot dump") + assert isinstance(data, dict), "Dump should return a dict" + assert ( + len(data["event_parameters"]) == 1 + ), "Expected one event parameter snapshot per snapshot" + + def test_create_captures_seismogram_parameters( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that seismogram parameter snapshots are created. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create") + data = cli_json("snapshot dump") + assert isinstance(data, dict), "Dump should return a dict" + assert ( + len(data["seismogram_parameters"]) > 0 + ), "Expected at least one seismogram parameter snapshot" + + def test_create_multiple_snapshots( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that multiple snapshots accumulate correctly. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create first") + cli("snapshot create second") + data = cli_json("snapshot dump") + assert isinstance(data, dict), "Dump should return a dict" + assert len(data["snapshots"]) == 2, "Expected two snapshots" + assert ( + len(data["event_parameters"]) == 2 + ), "Expected two event parameter snapshots" + comments = {s["comment"] for s in data["snapshots"]} + assert comments == { + "first", + "second", + }, "Both comments should be present in the dump" + + +# =================================================================== +# Snapshot deletion +# =================================================================== + + +@pytest.mark.cli +class TestSnapshotDelete: + """Tests for the ``snapshot delete`` CLI command. + + Uses IDs obtained from ``snapshot dump`` to verify complete removal of + the snapshot and all related child records. + """ + + def test_delete_removes_snapshot( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that the snapshot ID is absent from the dump after deletion. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create") + data_before = cli_json("snapshot dump") + assert isinstance(data_before, dict), "Dump should return a dict" + snapshot_id = data_before["snapshots"][0]["id"] + + cli(f"snapshot delete {snapshot_id}") + + data_after = cli_json("snapshot dump") + assert isinstance(data_after, dict), "Dump should return a dict" + remaining_ids = [s["id"] for s in data_after["snapshots"]] + assert ( + snapshot_id not in remaining_ids + ), f"Snapshot {snapshot_id} should be absent after deletion" + + def test_delete_removes_event_parameter_snapshot( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that the related event parameter snapshot is removed after deletion. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create") + data_before = cli_json("snapshot dump") + assert isinstance(data_before, dict), "Dump should return a dict" + snapshot_id = data_before["snapshots"][0]["id"] + event_param_ids = {ep["id"] for ep in data_before["event_parameters"]} + + cli(f"snapshot delete {snapshot_id}") + + data_after = cli_json("snapshot dump") + assert isinstance(data_after, dict), "Dump should return a dict" + remaining_event_param_ids = {ep["id"] for ep in data_after["event_parameters"]} + assert event_param_ids.isdisjoint( + remaining_event_param_ids + ), f"Event parameter snapshot IDs {event_param_ids} should all be absent after deletion" + + def test_delete_removes_seismogram_parameter_snapshots( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that all related seismogram parameter snapshots are removed after deletion. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create") + data_before = cli_json("snapshot dump") + assert isinstance(data_before, dict), "Dump should return a dict" + snapshot_id = data_before["snapshots"][0]["id"] + seis_param_ids = {sp["id"] for sp in data_before["seismogram_parameters"]} + assert ( + len(seis_param_ids) > 0 + ), "There should be seismogram parameter snapshots before deletion" + + cli(f"snapshot delete {snapshot_id}") + + data_after = cli_json("snapshot dump") + assert isinstance(data_after, dict), "Dump should return a dict" + remaining_seis_param_ids = { + sp["id"] for sp in data_after["seismogram_parameters"] + } + assert seis_param_ids.isdisjoint( + remaining_seis_param_ids + ), f"Seismogram parameter snapshot IDs {seis_param_ids} should all be absent after deletion" + + def test_delete_one_of_two_snapshots_leaves_other_intact( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that deleting one snapshot does not affect the other. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create first") + cli("snapshot create second") + data_before = cli_json("snapshot dump") + assert isinstance(data_before, dict), "Dump should return a dict" + first_id = next( + s["id"] for s in data_before["snapshots"] if s["comment"] == "first" + ) + second_id = next( + s["id"] for s in data_before["snapshots"] if s["comment"] == "second" + ) + + cli(f"snapshot delete {first_id}") + + data_after = cli_json("snapshot dump") + assert isinstance(data_after, dict), "Dump should return a dict" + remaining_ids = [s["id"] for s in data_after["snapshots"]] + assert ( + first_id not in remaining_ids + ), f"Deleted snapshot {first_id} should be absent" + assert ( + second_id in remaining_ids + ), f"Surviving snapshot {second_id} should still be present" + + def test_delete_snapshot_with_short_id_removes_all_related( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies deletion via short ID removes the snapshot and all related records. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create") + data_before = cli_json("snapshot dump") + assert isinstance(data_before, dict), "Dump should return a dict" + snapshot_id = data_before["snapshots"][0]["id"] + short_id = snapshot_id[:8] + event_param_ids = {ep["id"] for ep in data_before["event_parameters"]} + seis_param_ids = {sp["id"] for sp in data_before["seismogram_parameters"]} + + cli(f"snapshot delete {short_id}") + + data_after = cli_json("snapshot dump") + assert isinstance(data_after, dict), "Dump should return a dict" + remaining_snapshot_ids = [s["id"] for s in data_after["snapshots"]] + remaining_event_param_ids = {ep["id"] for ep in data_after["event_parameters"]} + remaining_seis_param_ids = { + sp["id"] for sp in data_after["seismogram_parameters"] + } + assert ( + snapshot_id not in remaining_snapshot_ids + ), f"Snapshot {snapshot_id} should be absent after deletion via short ID" + assert event_param_ids.isdisjoint( + remaining_event_param_ids + ), f"Event parameter snapshot IDs {event_param_ids} should all be absent" + assert seis_param_ids.isdisjoint( + remaining_seis_param_ids + ), f"Seismogram parameter snapshot IDs {seis_param_ids} should all be absent" + + +# =================================================================== +# Snapshot rollback +# =================================================================== + + +@pytest.mark.cli +class TestSnapshotRollback: + """Tests for the ``snapshot rollback`` CLI command.""" + + def test_rollback_restores_event_parameter( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that rollback restores a previously changed event parameter. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + capsys: The pytest capsys fixture. + """ + cli("snapshot create before-change") + + cli("event parameter set completed true") + cli("event parameter get completed") + assert ( + "True" in capsys.readouterr().out + ), "Parameter should read True after being set" + + data = cli_json("snapshot dump") + assert isinstance(data, dict), "Dump should return a dict" + snapshot_id = data["snapshots"][0]["id"] + + cli(f"snapshot rollback {snapshot_id}") + + cli("event parameter get completed") + assert ( + "False" in capsys.readouterr().out + ), "Parameter should be restored to False after rollback" + + def test_rollback_restores_event_parameter_with_short_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that rollback restores a parameter when given a shortened snapshot ID. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + capsys: The pytest capsys fixture. + """ + cli("snapshot create before-change") + + cli("event parameter set completed true") + cli("event parameter get completed") + assert ( + "True" in capsys.readouterr().out + ), "Parameter should read True after being set" + + data = cli_json("snapshot dump") + assert isinstance(data, dict), "Dump should return a dict" + short_id = data["snapshots"][0]["id"][:8] + + cli(f"snapshot rollback {short_id}") + + cli("event parameter get completed") + assert ( + "False" in capsys.readouterr().out + ), "Parameter should be restored to False after rollback via short ID" + + def test_rollback_does_not_delete_snapshot( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that rolling back leaves the snapshot itself in place. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create") + data_before = cli_json("snapshot dump") + assert isinstance(data_before, dict), "Dump should return a dict" + snapshot_id = data_before["snapshots"][0]["id"] + + cli(f"snapshot rollback {snapshot_id}") + + data_after = cli_json("snapshot dump") + assert isinstance(data_after, dict), "Dump should return a dict" + remaining_ids = [s["id"] for s in data_after["snapshots"]] + assert ( + snapshot_id in remaining_ids + ), "Snapshot should still exist after rollback" + + +# =================================================================== +# Snapshot dump +# =================================================================== + + +@pytest.mark.cli +class TestSnapshotDump: + """Tests for the ``snapshot dump`` CLI command.""" + + def test_dump_empty_returns_empty_lists( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that the dump is empty when no snapshots have been created. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli_json: The in-process CLI JSON dump callable. + """ + data = cli_json("snapshot dump") + assert isinstance(data, dict), "Dump should return a dict" + assert data["snapshots"] == [], "Snapshots list should be empty" + assert data["event_parameters"] == [], "Event parameters list should be empty" + assert ( + data["seismogram_parameters"] == [] + ), "Seismogram parameters list should be empty" + + def test_dump_contains_expected_keys( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that the dump dict contains the three expected top-level keys. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create") + data = cli_json("snapshot dump") + assert isinstance(data, dict), "Dump should return a dict" + assert "snapshots" in data, "Dump should contain 'snapshots' key" + assert "event_parameters" in data, "Dump should contain 'event_parameters' key" + assert ( + "seismogram_parameters" in data + ), "Dump should contain 'seismogram_parameters' key" + + def test_dump_all_events_includes_active( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that ``--all`` includes at least the active event's snapshots. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create") + active_data = cli_json("snapshot dump") + all_data = cli_json("snapshot dump --all") + assert isinstance(active_data, dict), "Active dump should return a dict" + assert isinstance(all_data, dict), "All-events dump should return a dict" + assert len(all_data["snapshots"]) >= len( + active_data["snapshots"] + ), "--all should return at least as many snapshots as the active-event dump" + + def test_dump_snapshot_ids_are_consistent( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that snapshot IDs referenced in event/seismogram params match the snapshots list. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create") + data = cli_json("snapshot dump") + assert isinstance(data, dict), "Dump should return a dict" + snapshot_ids = {s["id"] for s in data["snapshots"]} + for ep in data["event_parameters"]: + assert ( + ep["snapshot_id"] in snapshot_ids + ), f"Event parameter snapshot_id {ep['snapshot_id']} not in snapshots list" + for sp in data["seismogram_parameters"]: + assert ( + sp["snapshot_id"] in snapshot_ids + ), f"Seismogram parameter snapshot_id {sp['snapshot_id']} not in snapshots list" + + +# =================================================================== +# Snapshot list +# =================================================================== + + +@pytest.mark.cli +class TestSnapshotList: + """Tests for the ``snapshot list`` CLI command.""" + + def test_list_active_event( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that the list command produces output for the active event. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("snapshot create") + cli("snapshot list") + assert len(capsys.readouterr().out) > 0, "Expected output from snapshot list" + + def test_list_all_events( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that ``--all`` produces output for all events. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("snapshot create") + cli("snapshot list --all") + assert ( + len(capsys.readouterr().out) > 0 + ), "Expected output from snapshot list --all" + + def test_list_short( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that ``--short`` produces output. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("snapshot create") + cli("snapshot list --short") + assert ( + len(capsys.readouterr().out) > 0 + ), "Expected output from snapshot list --short" diff --git a/tests/integration/test_active_event.py b/tests/integration/test_active_event.py new file mode 100644 index 00000000..1aa52330 --- /dev/null +++ b/tests/integration/test_active_event.py @@ -0,0 +1,115 @@ +"""Integration tests for managing the active event in the database.""" + +import pytest +import uuid +from aimbat.core import set_active_event, set_active_event_by_id, get_active_event +from aimbat.models import AimbatEvent +from sqlmodel import Session, select +from sqlalchemy.exc import NoResultFound + +# ----------------------------------------------------------------------------- +# Do all tests with the session fixture that has multi_event data pre-loaded +# ----------------------------------------------------------------------------- + + +@pytest.fixture +def session(loaded_session: Session) -> Session: + """Provides a session with multi_event data pre-loaded. + + Args: + loaded_session (Session): The session fixture with data. + + Returns: + Session: The database session. + """ + return loaded_session + + +class TestActiveEvent: + """Tests for retrieving and switching the active event.""" + + def test_get(self, session: Session) -> None: + """Verifies that `get_active_event` returns the event marked as active in the DB. + + Args: + session (Session): The database session. + """ + active_event = session.exec( + select(AimbatEvent).where(AimbatEvent.active == 1) + ).one() + assert active_event == get_active_event(session) + + def test_switch(self, session: Session) -> None: + """Verifies switching the active event using an event object. + + Args: + session (Session): The database session. + """ + active_event = get_active_event(session) + assert active_event is not None, "expected an active event in the test data" + + all_events = list(session.exec(select(AimbatEvent)).all()) + assert len(all_events) > 1, "expected multiple events in the test data" + + all_events.remove(active_event) + new_active_event = all_events.pop() + assert ( + new_active_event != active_event + ), "expected a different event to switch to" + + set_active_event(session, new_active_event) + assert get_active_event(session) == new_active_event + + def test_switch_by_id(self, session: Session) -> None: + """Verifies switching the active event using an event ID. + + Args: + session (Session): The database session. + """ + active_event = get_active_event(session) + event_ids = list(session.exec(select(AimbatEvent.id)).all()) + + event_ids.remove(active_event.id) + new_active_event_id = event_ids.pop() + assert ( + new_active_event_id != active_event.id + ), "expected a different event id to switch to" + + set_active_event_by_id(session, new_active_event_id) + + assert ( + get_active_event(session).id == new_active_event_id + ), "expected the active event to switch to the new event by id" + + def test_switch_by_id_invalid(self, session: Session) -> None: + """Verifies that switching the active event using an invalid event ID raises an error.""" + + new_uuid = uuid.uuid4() + assert ( + len( + session.exec( + select(AimbatEvent).where(AimbatEvent.id == new_uuid) + ).all() + ) + == 0 + ), "expected no event with the generated UUID in the test data" + + with pytest.raises(ValueError): + set_active_event_by_id(session, uuid.uuid4()) + + def test_get_active_event_no_active(self, session: Session) -> None: + """Verifies that `get_active_event` returns None if no event is marked as active. + + Args: + session (Session): The database session. + """ + active_event = get_active_event(session) + assert active_event is not None, "expected an active event in the test data" + active_event.active = None + assert ( + session.exec(select(AimbatEvent).where(AimbatEvent.active == 1)).first() + is None + ), "expected no active event in the database after deactivating" + + with pytest.raises(NoResultFound): + get_active_event(session) diff --git a/tests/integration/test_data_io.py b/tests/integration/test_data_io.py new file mode 100644 index 00000000..20abdc34 --- /dev/null +++ b/tests/integration/test_data_io.py @@ -0,0 +1,327 @@ +"""Integration tests for adding data (SAC files) to the project.""" + +import pytest +import json +from aimbat.core import ( + add_data_to_project, + get_data_for_active_event, + print_data_table, + dump_data_table_to_json, +) +from aimbat.aimbat_types import DataType +from aimbat.models import AimbatDataSource, AimbatEvent, AimbatSeismogram +from pysmo.classes import SAC +from pathlib import Path +from sqlmodel import Session, select +from pydantic import ValidationError +from collections.abc import Generator + + +class TestAddDataToProject: + @pytest.fixture + def session(self, patched_session: Session) -> Generator[Session, None, None]: + """Provides a database session for tests. + + Args: + patched_session (Session): A patched SQLAlchemy session fixture. + """ + yield patched_session + + def test_add_single_sac_file(self, sac_file_good: Path, session: Session) -> None: + """Verifies adding a single valid SAC file to the project. + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + datasource = session.exec(select(AimbatDataSource.sourcename)).all() + assert len(datasource) == 0, "Expected no data sources before adding files." + + # do this 2 times to verify we can only add the same file once and that nothing changes on the second attempt + for _ in range(2): + add_data_to_project( + session, + [sac_file_good], + data_type=DataType.SAC, + ) + seismogram_filename = session.exec( + select(AimbatDataSource.sourcename) + ).one() + assert seismogram_filename == str(sac_file_good) + + def test_add_multiple_sac_files( + self, multi_event_data: list[Path], session: Session + ) -> None: + """Verifies adding multiple SAC files to the project at once. + + Args: + multi_event_data (list[Path]): List of paths to SAC files. + session (Session): Database session. + """ + datasource = session.exec(select(AimbatDataSource.sourcename)).all() + assert len(datasource) == 0, "Expected no data sources before adding files." + + add_data_to_project( + session, + multi_event_data, + data_type=DataType.SAC, + ) + + seismogram_filenames = session.exec(select(AimbatDataSource.sourcename)).all() + assert sorted(seismogram_filenames) == sorted( + [str(path) for path in multi_event_data] + ), "Expected all files from multi_event to be added as data sources." + + def test_add_nonexistent_file(self, session: Session) -> None: + """Verifies that adding a non-existent file raises FileNotFoundError. + + Args: + session (Session): Database session. + """ + non_existent_file = Path("this_file_does_not_exist.sac") + with pytest.raises(FileNotFoundError): + add_data_to_project( + session, + [non_existent_file], + data_type=DataType.SAC, + ) + + def test_add_mixed_valid_and_invalid_files( + self, sac_file_good: Path, session: Session + ) -> None: + """Verifies that adding a mix of valid and invalid files raises an error and adds nothing. + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + non_existent_file = Path("this_file_does_not_exist.sac") + with pytest.raises(FileNotFoundError): + add_data_to_project( + session, + [sac_file_good, non_existent_file], + data_type=DataType.SAC, + ) + + # Verify that the valid file was not added due to the error + datasource = session.exec(select(AimbatDataSource.sourcename)).all() + assert ( + len(datasource) == 0 + ), "Expected no data sources to be added when an error occurs." + + def test_add_sac_file_with_missing_pick( + self, sac_file_good: Path, session: Session + ) -> None: + """Verifies that adding a SAC file missing required pick information raises ValidationError. + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + sac = SAC.from_file(sac_file_good) + sac.timestamps.t0 = None + sac.write(sac_file_good) + with pytest.raises(ValidationError): + add_data_to_project( + session, + [sac_file_good], + data_type=DataType.SAC, + ) + + def test_dry_run_all_new( + self, + multi_event_data: list[Path], + session: Session, + capsys: pytest.CaptureFixture, + ) -> None: + """Verifies dry run behavior when all data is new. + + Args: + multi_event_data (list[Path]): List of paths to SAC files. + session (Session): Database session. + capsys (pytest.CaptureFixture): Fixture to capture stdout/stderr. + """ + add_data_to_project( + session, + multi_event_data, + data_type=DataType.SAC, + dry_run=True, + ) + + datasource = session.exec(select(AimbatDataSource.sourcename)).all() + assert len(datasource) == 0, "Expected no data sources after dry run." + + captured = capsys.readouterr() + assert "Dry Run: Data to be added" in captured.out + n = len(multi_event_data) + assert f"{n} seismogram(s) added, 0 skipped" in captured.out + assert "0 skipped" in captured.out + + def test_dry_run_all_skipped( + self, + multi_event_data: list[Path], + session: Session, + capsys: pytest.CaptureFixture, + ) -> None: + """Verifies dry run behavior when all data already exists (should be skipped). + + Args: + multi_event_data (list[Path]): List of paths to SAC files. + session (Session): Database session. + capsys (pytest.CaptureFixture): Fixture to capture stdout/stderr. + """ + add_data_to_project( + session, + multi_event_data, + data_type=DataType.SAC, + ) + capsys.readouterr() # discard output from the real add + + add_data_to_project( + session, + multi_event_data, + data_type=DataType.SAC, + dry_run=True, + ) + + captured = capsys.readouterr() + assert "Dry Run: Data to be added" in captured.out + n = len(multi_event_data) + assert f"0 station(s) added, {n} skipped" in captured.out + assert f"0 event(s) added, {n} skipped" in captured.out + assert f"0 seismogram(s) added, {n} skipped" in captured.out + + +class TestGetDataSources: + @pytest.fixture + def session(self, loaded_session: Session) -> Generator[Session, None, None]: + """Provides a database session with pre-loaded data sources for tests. + + Args: + loaded_session (Session): A SQLAlchemy session fixture with pre-loaded data sources. + """ + yield loaded_session + + def test_get_data_sources_for_active_event(self, session: Session) -> None: + """Verifies that get_data_sources returns the expected data sources. + + Args: + session (Session): Database session. + """ + + data_sources = get_data_for_active_event(session) + assert len(data_sources) != 0, "Expected data sources for the active event." + assert all( + isinstance(ds, AimbatDataSource) for ds in data_sources + ), "expected all items to be AimbatDataSource instances" + + def test_dump_data_table_to_json(self, session: Session) -> None: + """Verifies that dump_data_table_to_json returns a JSON string with expected content. + + Args: + session (Session): Database session. + """ + json_str = dump_data_table_to_json(session) + json_data = json.loads(json_str) + assert isinstance(json_data, list), "Expected JSON data to be a list." + + expected_ids = map(str, session.exec(select(AimbatDataSource.id)).all()) + returned_ids = [item["id"] for item in json_data] + assert set(expected_ids) == set(returned_ids), "Expected IDs to match." + + def test_print_data_table_for_all_events( + self, session: Session, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that get_data_sources prints the expected table output. + + Args: + session (Session): Database session. + capsys (pytest.CaptureFixture): Fixture to capture stdout/stderr. + """ + print_data_table(session, short=False, all_events=True) + + expected_ids = session.exec(select(AimbatDataSource.id)).all() + + captured = capsys.readouterr() + assert "Data sources for all events" in captured.out + for id in expected_ids: + assert ( + str(id) in captured.out + ), "expected data source ID to be in the output table" + + def test_print_data_table_for_all_events_short( + self, session: Session, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that get_data_sources prints the expected table output. + + Args: + session (Session): Database session. + capsys (pytest.CaptureFixture): Fixture to capture stdout/stderr. + """ + + expected_ids = session.exec(select(AimbatDataSource.id)).all() + + print_data_table(session, short=True, all_events=True) + + captured = capsys.readouterr() + assert "Data sources for all events" in captured.out + for id in expected_ids: + assert ( + str(id)[:2] in captured.out + ), "expected data source ID to be in the output table" + + def test_print_data_table_for_active_event( + self, session: Session, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that get_data_sources prints the expected table output. + + Args: + session (Session): Database session. + capsys (pytest.CaptureFixture): Fixture to capture stdout/stderr. + """ + + # AimbatSeismogram has external_id of datasource and event: + statement = ( + select(AimbatDataSource.id) + .join(AimbatSeismogram) + .join(AimbatEvent) + .where(AimbatEvent.active == 1) + ) + expected_ids = session.exec(statement).all() + + print_data_table(session, short=False, all_events=False) + + captured = capsys.readouterr() + assert "Data sources for event" in captured.out + for id in expected_ids: + assert ( + str(id) in captured.out + ), "expected data source ID to be in the output table" + + def test_print_data_table_for_active_event_short( + self, session: Session, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that get_data_sources prints the expected table output. + + Args: + session (Session): Database session. + capsys (pytest.CaptureFixture): Fixture to capture stdout/stderr. + """ + + # AimbatSeismogram has external_id of datasource and event: + statement = ( + select(AimbatDataSource.id) + .join(AimbatSeismogram) + .join(AimbatEvent) + .where(AimbatEvent.active == 1) + ) + expected_ids = session.exec(statement).all() + + print_data_table(session, short=True, all_events=False) + + captured = capsys.readouterr() + assert "Data sources for event" in captured.out + for id in expected_ids: + assert ( + str(id)[:2] in captured.out + ), "expected data source ID to be in the output table" diff --git a/tests/integration/test_datasource_sac.py b/tests/integration/test_datasource_sac.py new file mode 100644 index 00000000..06567927 --- /dev/null +++ b/tests/integration/test_datasource_sac.py @@ -0,0 +1,300 @@ +"""Integration tests for AIMBAT models backed by SAC files. + +Tests verify that SAC.station, SAC.event, and SAC.seismogram map correctly +to AimbatStation, AimbatEvent, and AimbatSeismogram, and that the data +property reads/writes through to the file on disk. + +Note that in production we only ever read from the SAC file once to populate +the database, and then rely on the database for all subsequent access. However, +these tests verify that the SAC → Aimbat* mapping is correct and that the data +property correctly proxies through to the file on disk. +""" + +import pytest +import numpy as np +from aimbat.models import ( + AimbatDataSource, + AimbatEvent, + AimbatEventParameters, + AimbatSeismogram, + AimbatSeismogramParameters, + AimbatStation, +) +from aimbat.aimbat_types import DataType +from datetime import timezone +from pathlib import Path +from pandas import Timestamp +from pysmo.classes import SAC +from sqlmodel import Session +from collections.abc import Generator + + +@pytest.fixture +def session(patched_session: Session) -> Generator[Session, None, None]: + yield patched_session + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _persist_sac(session: Session, sac_file: Path) -> AimbatSeismogram: + """Helper to build a full object graph from a SAC file and persist it. + + Args: + session (Session): The database session. + sac_file (Path): The path to the SAC file. + + Returns: + AimbatSeismogram: The persisted seismogram object. + """ + sac = SAC.from_file(sac_file) + + event = AimbatEvent.model_validate( + sac.event, + update={"parameters": AimbatEventParameters()}, + ) + session.add(event) + session.flush() + + station = AimbatStation.model_validate(sac.station) + session.add(station) + session.flush() + + seismogram = AimbatSeismogram.model_validate( + sac.seismogram, + update={ + "t0": sac.timestamps.t0, + "parameters": AimbatSeismogramParameters(), + "event": event, + "station": station, + }, + ) + session.add(seismogram) + session.flush() + + datasource = AimbatDataSource( + sourcename=str(sac_file), + datatype=DataType.SAC, + seismogram=seismogram, + ) + session.add(datasource) + session.commit() + return seismogram + + +# =================================================================== +# SAC → AimbatStation +# =================================================================== + + +class TestSacStation: + """Verify SAC.station maps correctly to AimbatStation.""" + + def test_station_fields_match_sac( + self, sac_file_good: Path, session: Session + ) -> None: + """Verifies that AimbatStation fields match the source SAC file headers. + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + sac = SAC.from_file(sac_file_good) + station = AimbatStation.model_validate(sac.station) + session.add(station) + session.commit() + session.refresh(station) + + assert station.name == sac.station.name + assert station.network == sac.station.network + assert station.location == sac.station.location + assert station.channel == sac.station.channel + assert station.latitude == sac.station.latitude + assert station.longitude == sac.station.longitude + assert station.elevation == sac.station.elevation + + def test_station_round_trips_through_db( + self, sac_file_good: Path, session: Session + ) -> None: + """Verifies that a Station persisted and re-fetched retains all values. + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + sac = SAC.from_file(sac_file_good) + station = AimbatStation.model_validate(sac.station) + session.add(station) + session.commit() + + # Expire in-memory state and reload from DB. + session.expire(station) + assert station.name == sac.station.name + assert station.latitude == pytest.approx(sac.station.latitude) + assert station.longitude == pytest.approx(sac.station.longitude) + + +# =================================================================== +# SAC → AimbatEvent +# =================================================================== + + +class TestSacEvent: + """Verify SAC.event maps correctly to AimbatEvent.""" + + def test_event_fields_match_sac( + self, sac_file_good: Path, session: Session + ) -> None: + """Verifies that AimbatEvent fields match the source SAC file headers. + + Note: SAPandasTimestamp truncates to microsecond precision. + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + sac = SAC.from_file(sac_file_good) + event = AimbatEvent.model_validate( + sac.event, + update={"parameters": AimbatEventParameters()}, + ) + session.add(event) + session.commit() + session.refresh(event) + + assert event.time == sac.event.time.floor("us") + assert event.latitude == sac.event.latitude + assert event.longitude == sac.event.longitude + assert event.depth == sac.event.depth + + def test_event_round_trips_through_db( + self, sac_file_good: Path, session: Session + ) -> None: + """Verifies that an Event persisted and re-fetched retains all values. + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + sac = SAC.from_file(sac_file_good) + event = AimbatEvent.model_validate( + sac.event, + update={"parameters": AimbatEventParameters()}, + ) + session.add(event) + session.commit() + + session.expire(event) + assert event.latitude == pytest.approx(sac.event.latitude) + assert event.longitude == pytest.approx(sac.event.longitude) + assert isinstance(event.time, Timestamp) + + +# =================================================================== +# SAC → AimbatSeismogram +# =================================================================== + + +class TestSacSeismogram: + """AimbatSeismogram backed by a real SAC file on disk.""" + + def test_metadata_matches_sac(self, sac_file_good: Path, session: Session) -> None: + """Verify that seismogram model fields correspond to the SAC file. + + SAPandasTimestamp truncates to microsecond precision when storing + in SQLite, so Timestamp comparisons use floor("us"). + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + sac = SAC.from_file(sac_file_good) + seis = _persist_sac(session, sac_file_good) + session.refresh(seis) + + assert seis.begin_time == sac.seismogram.begin_time.floor("us") + assert seis.delta == sac.seismogram.delta + assert seis.t0 == sac.timestamps.t0.floor("us") # type: ignore + + def test_read_data_from_sac(self, sac_file_good: Path, session: Session) -> None: + """Verifies that AimbatSeismogram.data returns the waveform from the SAC file. + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + sac = SAC.from_file(sac_file_good) + seis = _persist_sac(session, sac_file_good) + session.refresh(seis) + + np.testing.assert_array_equal(seis.data, sac.seismogram.data) + + def test_len_matches_data(self, sac_file_good: Path, session: Session) -> None: + """Verifies that len(seismogram) equals the number of data samples. + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + sac = SAC.from_file(sac_file_good) + seis = _persist_sac(session, sac_file_good) + session.refresh(seis) + + assert len(seis.data) == len(sac.seismogram.data) + + def test_end_time_computed(self, sac_file_good: Path, session: Session) -> None: + """Verifies that end_time is correctly computed from begin_time, delta, and npts. + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + seis = _persist_sac(session, sac_file_good) + session.refresh(seis) + + expected = seis.begin_time + seis.delta * (len(seis.data) - 1) + assert seis.end_time == expected + + def test_write_data_to_sac(self, sac_file_good: Path, session: Session) -> None: + """Verifies that writing to AimbatSeismogram.data updates the SAC file on disk. + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + seis = _persist_sac(session, sac_file_good) + session.refresh(seis) + + original_data = seis.data.copy() + new_data = np.zeros_like(original_data) + seis.data = new_data + + # Re-read from disk to confirm the file was updated. + reread = SAC.from_file(sac_file_good).seismogram.data + np.testing.assert_array_equal(reread, new_data) + assert not np.array_equal(reread, original_data) + + def test_proxy_properties(self, sac_file_good: Path, session: Session) -> None: + """Verifies that properties like flip, select, and t1 proxy through to parameters. + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + seis = _persist_sac(session, sac_file_good) + session.refresh(seis) + + assert seis.select is True + seis.select = False + assert seis.parameters.select is False + + assert seis.flip is False + seis.flip = True + assert seis.parameters.flip is True + + assert seis.t1 is None + new_t1 = Timestamp("2011-09-15T19:42:25", tz=timezone.utc) + seis.t1 = new_t1 + assert seis.parameters.t1 == new_t1 diff --git a/tests/integration/test_db_operations.py b/tests/integration/test_db_operations.py new file mode 100644 index 00000000..ab7abce4 --- /dev/null +++ b/tests/integration/test_db_operations.py @@ -0,0 +1,473 @@ +"""Integration tests for ORM relationships and cascade deletes in AIMBAT models.""" + +import pytest +from aimbat.core._snapshot import create_snapshot +from aimbat.models import ( + AimbatDataSource, + AimbatEvent, + AimbatEventParameters, + AimbatEventParametersSnapshot, + AimbatSeismogram, + AimbatSeismogramParameters, + AimbatSeismogramParametersSnapshot, + AimbatSnapshot, + AimbatStation, +) +from sqlmodel import Session, select + + +@pytest.fixture +def session(loaded_session: Session) -> Session: + """Provides a session with multi-event data and an active event pre-loaded. + + Args: + loaded_session: A SQLModel Session with data populated. + + Returns: + The database session. + """ + return loaded_session + + +@pytest.fixture +def event(session: Session) -> AimbatEvent: + """Provides the first event from the database. + + Args: + session: The database session. + + Returns: + An AimbatEvent. + """ + return session.exec(select(AimbatEvent)).first() # type: ignore[return-value] + + +@pytest.fixture +def station(session: Session) -> AimbatStation: + """Provides the first station from the database. + + Args: + session: The database session. + + Returns: + An AimbatStation. + """ + return session.exec(select(AimbatStation)).first() # type: ignore[return-value] + + +@pytest.fixture +def seismogram(session: Session) -> AimbatSeismogram: + """Provides the first seismogram from the database. + + Args: + session: The database session. + + Returns: + An AimbatSeismogram. + """ + return session.exec(select(AimbatSeismogram)).first() # type: ignore[return-value] + + +# --------------------------------------------------------------------------- +# Relationship traversal +# --------------------------------------------------------------------------- + + +class TestEventRelationships: + """Tests for navigating relationships on AimbatEvent.""" + + def test_event_has_parameters(self, event: AimbatEvent) -> None: + """Verifies that an event exposes its parameters via the relationship. + + Args: + event: An AimbatEvent instance. + """ + assert isinstance(event.parameters, AimbatEventParameters) + + def test_event_parameters_back_reference(self, event: AimbatEvent) -> None: + """Verifies that event parameters link back to their parent event. + + Args: + event: An AimbatEvent instance. + """ + assert event.parameters.event_id == event.id + + def test_event_has_seismograms(self, event: AimbatEvent) -> None: + """Verifies that an event exposes its seismograms via the relationship. + + Args: + event: An AimbatEvent instance. + """ + assert len(event.seismograms) > 0 + assert all(isinstance(s, AimbatSeismogram) for s in event.seismograms) + + def test_seismogram_back_reference_to_event(self, event: AimbatEvent) -> None: + """Verifies that each seismogram links back to its parent event. + + Args: + event: An AimbatEvent instance. + """ + for seis in event.seismograms: + assert seis.event_id == event.id + + def test_event_seismogram_count(self, event: AimbatEvent) -> None: + """Verifies that seismogram_count matches the number of related seismograms. + + Args: + event: An AimbatEvent instance. + """ + assert event.seismogram_count == len(event.seismograms) + + def test_event_station_count(self, event: AimbatEvent) -> None: + """Verifies that station_count reflects the number of unique stations. + + Args: + event: An AimbatEvent instance. + """ + unique_stations = {s.station_id for s in event.seismograms} + assert event.station_count == len(unique_stations) + + +class TestStationRelationships: + """Tests for navigating relationships on AimbatStation.""" + + def test_station_has_seismograms(self, station: AimbatStation) -> None: + """Verifies that a station exposes its seismograms via the relationship. + + Args: + station: An AimbatStation instance. + """ + assert len(station.seismograms) > 0 + assert all(isinstance(s, AimbatSeismogram) for s in station.seismograms) + + def test_seismogram_back_reference_to_station(self, station: AimbatStation) -> None: + """Verifies that each seismogram links back to its parent station. + + Args: + station: An AimbatStation instance. + """ + for seis in station.seismograms: + assert seis.station_id == station.id + + +class TestSeismogramRelationships: + """Tests for navigating relationships on AimbatSeismogram.""" + + def test_seismogram_has_datasource(self, seismogram: AimbatSeismogram) -> None: + """Verifies that a seismogram exposes its datasource via the relationship. + + Args: + seismogram: An AimbatSeismogram instance. + """ + assert isinstance(seismogram.datasource, AimbatDataSource) + + def test_datasource_back_reference(self, seismogram: AimbatSeismogram) -> None: + """Verifies that the datasource links back to its parent seismogram. + + Args: + seismogram: An AimbatSeismogram instance. + """ + assert seismogram.datasource.seismogram_id == seismogram.id + + def test_seismogram_has_parameters(self, seismogram: AimbatSeismogram) -> None: + """Verifies that a seismogram exposes its parameters via the relationship. + + Args: + seismogram: An AimbatSeismogram instance. + """ + assert isinstance(seismogram.parameters, AimbatSeismogramParameters) + + def test_seismogram_parameters_back_reference( + self, seismogram: AimbatSeismogram + ) -> None: + """Verifies that seismogram parameters link back to their parent seismogram. + + Args: + seismogram: An AimbatSeismogram instance. + """ + assert seismogram.parameters.seismogram_id == seismogram.id + + def test_seismogram_has_event(self, seismogram: AimbatSeismogram) -> None: + """Verifies that a seismogram exposes its parent event via the relationship. + + Args: + seismogram: An AimbatSeismogram instance. + """ + assert isinstance(seismogram.event, AimbatEvent) + + def test_seismogram_has_station(self, seismogram: AimbatSeismogram) -> None: + """Verifies that a seismogram exposes its parent station via the relationship. + + Args: + seismogram: An AimbatSeismogram instance. + """ + assert isinstance(seismogram.station, AimbatStation) + + +class TestSnapshotRelationships: + """Tests for navigating relationships on AimbatSnapshot.""" + + def test_snapshot_has_event_parameters_snapshot(self, session: Session) -> None: + """Verifies that a snapshot exposes its event parameter snapshot. + + Args: + session: The database session. + """ + create_snapshot(session) + snapshot = session.exec(select(AimbatSnapshot)).one() + assert isinstance( + snapshot.event_parameters_snapshot, AimbatEventParametersSnapshot + ) + + def test_snapshot_has_seismogram_parameter_snapshots( + self, session: Session + ) -> None: + """Verifies that a snapshot exposes its seismogram parameter snapshots. + + Args: + session: The database session. + """ + create_snapshot(session) + snapshot = session.exec(select(AimbatSnapshot)).one() + assert len(snapshot.seismogram_parameters_snapshots) > 0 + assert all( + isinstance(s, AimbatSeismogramParametersSnapshot) + for s in snapshot.seismogram_parameters_snapshots + ) + + def test_snapshot_back_reference_to_event(self, session: Session) -> None: + """Verifies that a snapshot links back to its parent event. + + Args: + session: The database session. + """ + create_snapshot(session) + snapshot = session.exec(select(AimbatSnapshot)).one() + assert isinstance(snapshot.event, AimbatEvent) + + +# --------------------------------------------------------------------------- +# Cascade delete +# --------------------------------------------------------------------------- + + +class TestCascadeDeleteEvent: + """Tests that deleting an event cascades to all its dependants.""" + + def test_seismograms_deleted(self, session: Session, event: AimbatEvent) -> None: + """Verifies that deleting an event removes all its seismograms. + + Args: + session: The database session. + event: An AimbatEvent to delete. + """ + seismogram_ids = [s.id for s in event.seismograms] + assert len(seismogram_ids) > 0 + + session.delete(event) + session.commit() + + remaining = session.exec(select(AimbatSeismogram)).all() + remaining_ids = {s.id for s in remaining} + assert not any(sid in remaining_ids for sid in seismogram_ids) + + def test_event_parameters_deleted( + self, session: Session, event: AimbatEvent + ) -> None: + """Verifies that deleting an event removes its parameters. + + Args: + session: The database session. + event: An AimbatEvent to delete. + """ + parameters_id = event.parameters.id + + session.delete(event) + session.commit() + + assert session.get(AimbatEventParameters, parameters_id) is None + + def test_snapshots_deleted(self, session: Session, event: AimbatEvent) -> None: + """Verifies that deleting an event removes all its snapshots. + + Args: + session: The database session. + event: An AimbatEvent to delete. + """ + create_snapshot(session) + session.refresh(event) + assert len(event.snapshots) > 0 + snapshot_ids = [s.id for s in event.snapshots] + + session.delete(event) + session.commit() + + for sid in snapshot_ids: + assert session.get(AimbatSnapshot, sid) is None + + def test_snapshot_parameter_snapshots_deleted( + self, session: Session, event: AimbatEvent + ) -> None: + """Verifies that deleting an event removes all descendant parameter snapshots. + + Args: + session: The database session. + event: An AimbatEvent to delete. + """ + create_snapshot(session) + session.refresh(event) + + session.delete(event) + session.commit() + + assert len(session.exec(select(AimbatEventParametersSnapshot)).all()) == 0 + assert len(session.exec(select(AimbatSeismogramParametersSnapshot)).all()) == 0 + + +class TestCascadeDeleteStation: + """Tests that deleting a station cascades to all its dependants.""" + + def test_seismograms_deleted( + self, session: Session, station: AimbatStation + ) -> None: + """Verifies that deleting a station removes all its seismograms. + + Args: + session: The database session. + station: An AimbatStation to delete. + """ + seismogram_ids = [s.id for s in station.seismograms] + assert len(seismogram_ids) > 0 + + session.delete(station) + session.commit() + + remaining_ids = {s.id for s in session.exec(select(AimbatSeismogram)).all()} + assert not any(sid in remaining_ids for sid in seismogram_ids) + + def test_seismogram_parameters_deleted( + self, session: Session, station: AimbatStation + ) -> None: + """Verifies that deleting a station also removes seismogram parameters. + + Args: + session: The database session. + station: An AimbatStation to delete. + """ + param_ids = [s.parameters.id for s in station.seismograms] + assert len(param_ids) > 0 + + session.delete(station) + session.commit() + + for pid in param_ids: + assert session.get(AimbatSeismogramParameters, pid) is None + + def test_datasources_deleted( + self, session: Session, station: AimbatStation + ) -> None: + """Verifies that deleting a station also removes all seismogram datasources. + + Args: + session: The database session. + station: An AimbatStation to delete. + """ + datasource_ids = [s.datasource.id for s in station.seismograms] + assert len(datasource_ids) > 0 + + session.delete(station) + session.commit() + + for did in datasource_ids: + assert session.get(AimbatDataSource, did) is None + + +class TestCascadeDeleteSeismogram: + """Tests that deleting a seismogram cascades to all its dependants.""" + + def test_datasource_deleted( + self, session: Session, seismogram: AimbatSeismogram + ) -> None: + """Verifies that deleting a seismogram removes its datasource. + + Args: + session: The database session. + seismogram: An AimbatSeismogram to delete. + """ + datasource_id = seismogram.datasource.id + + session.delete(seismogram) + session.commit() + + assert session.get(AimbatDataSource, datasource_id) is None + + def test_parameters_deleted( + self, session: Session, seismogram: AimbatSeismogram + ) -> None: + """Verifies that deleting a seismogram removes its parameters. + + Args: + session: The database session. + seismogram: An AimbatSeismogram to delete. + """ + parameters_id = seismogram.parameters.id + + session.delete(seismogram) + session.commit() + + assert session.get(AimbatSeismogramParameters, parameters_id) is None + + def test_parameter_snapshots_deleted( + self, session: Session, seismogram: AimbatSeismogram + ) -> None: + """Verifies that deleting a seismogram removes its parameter snapshots. + + Args: + session: The database session. + seismogram: An AimbatSeismogram to delete. + """ + create_snapshot(session) + parameters_id = seismogram.parameters.id + + session.delete(seismogram) + session.commit() + + assert session.get(AimbatSeismogramParameters, parameters_id) is None + remaining = session.exec(select(AimbatSeismogramParametersSnapshot)).all() + assert not any(s.seismogram_parameters_id == parameters_id for s in remaining) + + +class TestCascadeDeleteSnapshot: + """Tests that deleting a snapshot cascades to all its dependants.""" + + def test_event_parameters_snapshot_deleted(self, session: Session) -> None: + """Verifies that deleting a snapshot removes its event parameter snapshot. + + Args: + session: The database session. + """ + create_snapshot(session) + snapshot = session.exec(select(AimbatSnapshot)).one() + ep_snapshot_id = snapshot.event_parameters_snapshot.id + + session.delete(snapshot) + session.commit() + + assert session.get(AimbatEventParametersSnapshot, ep_snapshot_id) is None + + def test_seismogram_parameters_snapshots_deleted(self, session: Session) -> None: + """Verifies that deleting a snapshot removes all its seismogram parameter snapshots. + + Args: + session: The database session. + """ + create_snapshot(session) + snapshot = session.exec(select(AimbatSnapshot)).one() + sp_snapshot_ids = [s.id for s in snapshot.seismogram_parameters_snapshots] + assert len(sp_snapshot_ids) > 0 + + session.delete(snapshot) + session.commit() + + for sid in sp_snapshot_ids: + assert session.get(AimbatSeismogramParametersSnapshot, sid) is None diff --git a/tests/integration/test_event.py b/tests/integration/test_event.py new file mode 100644 index 00000000..721992a1 --- /dev/null +++ b/tests/integration/test_event.py @@ -0,0 +1,351 @@ +"""Integration tests for event management functions in aimbat.core._event.""" + +import json +import uuid +import pytest +from aimbat.core._event import ( + delete_event, + delete_event_by_id, + get_completed_events, + get_events_using_station, + get_event_parameter, + set_event_parameter, + dump_event_table_to_json, + dump_event_parameter_table_to_json, + print_event_table, + print_event_parameter_table, +) +from aimbat.aimbat_types import EventParameter +from aimbat.models import AimbatEvent, AimbatStation +from pandas import Timedelta +from sqlmodel import Session, select +from sqlalchemy.exc import NoResultFound + + +@pytest.fixture +def session(loaded_session: Session) -> Session: + """Provides a session with multi-event data and an active event pre-loaded. + + Args: + loaded_session: A SQLModel Session with data populated. + + Returns: + The database session. + """ + return loaded_session + + +class TestDeleteEvent: + """Tests for deleting events from the database.""" + + def test_delete_event(self, session: Session) -> None: + """Verifies that an event is removed from the database after deletion. + + Args: + session: The database session. + """ + events = session.exec(select(AimbatEvent)).all() + count_before = len(events) + non_active = next(e for e in events if not e.active) + + delete_event(session, non_active) + + remaining = session.exec(select(AimbatEvent)).all() + assert len(remaining) == count_before - 1 + assert non_active not in remaining + + def test_delete_event_by_id(self, session: Session) -> None: + """Verifies that an event is removed from the database when deleted by ID. + + Args: + session: The database session. + """ + events = session.exec(select(AimbatEvent)).all() + count_before = len(events) + non_active = next(e for e in events if not e.active) + + delete_event_by_id(session, non_active.id) + + remaining = session.exec(select(AimbatEvent)).all() + assert len(remaining) == count_before - 1 + + def test_delete_event_by_id_not_found(self, session: Session) -> None: + """Verifies that deleting a non-existent event ID raises NoResultFound. + + Args: + session: The database session. + """ + with pytest.raises(NoResultFound): + delete_event_by_id(session, uuid.uuid4()) + + +class TestGetCompletedEvents: + """Tests for retrieving events marked as completed.""" + + def test_no_completed_events(self, session: Session) -> None: + """Verifies that no events are returned when none are marked as completed. + + Args: + session: The database session. + """ + completed = get_completed_events(session) + assert len(completed) == 0 + + def test_get_completed_events(self, session: Session) -> None: + """Verifies that only events marked as completed are returned. + + Args: + session: The database session. + """ + events = session.exec(select(AimbatEvent)).all() + target = events[0] + target.parameters.completed = True + session.add(target) + session.commit() + + completed = get_completed_events(session) + assert len(completed) == 1 + assert target in completed + + +class TestGetEventsUsingStation: + """Tests for retrieving events associated with a particular station.""" + + def test_get_events_using_station(self, session: Session) -> None: + """Verifies that events linked to a station are returned. + + Args: + session: The database session. + """ + station = session.exec(select(AimbatStation)).first() + assert station is not None + + events = get_events_using_station(session, station) + assert len(events) > 0 + for event in events: + station_ids = [s.station_id for s in event.seismograms] + assert station.id in station_ids + + def test_get_events_using_station_no_match(self, session: Session) -> None: + """Verifies that an empty sequence is returned for a station with no events. + + Args: + session: The database session. + """ + orphan = AimbatStation( + network="XX", + name="NONE", + location="00", + channel="BHZ", + latitude=0.0, + longitude=0.0, + ) + session.add(orphan) + session.commit() + + events = get_events_using_station(session, orphan) + assert len(events) == 0 + + +class TestGetEventParameter: + """Tests for reading parameter values from the active event.""" + + def test_get_timedelta_parameter(self, session: Session) -> None: + """Verifies that a Timedelta parameter is returned as a Timedelta. + + Args: + session: The database session. + """ + value = get_event_parameter(session, EventParameter.WINDOW_PRE) + assert isinstance(value, Timedelta) + + def test_get_float_parameter(self, session: Session) -> None: + """Verifies that a float parameter is returned as a float. + + Args: + session: The database session. + """ + value = get_event_parameter(session, EventParameter.MIN_CCNORM) + assert isinstance(value, float) + + def test_get_bool_parameter(self, session: Session) -> None: + """Verifies that a bool parameter is returned as a bool. + + Args: + session: The database session. + """ + value = get_event_parameter(session, EventParameter.COMPLETED) + assert isinstance(value, bool) + + +class TestSetEventParameter: + """Tests for writing parameter values to the active event.""" + + def test_set_timedelta_parameter(self, session: Session) -> None: + """Verifies that a Timedelta parameter is persisted correctly. + + Args: + session: The database session. + """ + new_value = Timedelta(seconds=20) + set_event_parameter(session, EventParameter.WINDOW_POST, new_value) + assert get_event_parameter(session, EventParameter.WINDOW_POST) == new_value + + def test_set_float_parameter(self, session: Session) -> None: + """Verifies that a float parameter is persisted correctly. + + Args: + session: The database session. + """ + new_value = 0.75 + set_event_parameter(session, EventParameter.MIN_CCNORM, new_value) + assert get_event_parameter(session, EventParameter.MIN_CCNORM) == new_value + + def test_set_bool_parameter(self, session: Session) -> None: + """Verifies that a bool parameter is persisted correctly. + + Args: + session: The database session. + """ + set_event_parameter(session, EventParameter.COMPLETED, True) + assert get_event_parameter(session, EventParameter.COMPLETED) is True + + +class TestDumpEventTableToJson: + """Tests for serialising the event table to JSON.""" + + def test_as_string(self, session: Session) -> None: + """Verifies that a JSON string is returned when as_string=True. + + Args: + session: The database session. + """ + result = dump_event_table_to_json(session, as_string=True) + assert isinstance(result, str) + parsed = json.loads(result) + assert isinstance(parsed, list) + assert len(parsed) > 0 + + def test_as_list(self, session: Session) -> None: + """Verifies that a list of dicts is returned when as_string=False. + + Args: + session: The database session. + """ + result = dump_event_table_to_json(session, as_string=False) + assert isinstance(result, list) + assert len(result) > 0 + assert "id" in result[0] + assert "active" in result[0] + + +class TestDumpEventParameterTableToJson: + """Tests for serialising the event parameter table to JSON.""" + + def test_active_event_as_string(self, session: Session) -> None: + """Verifies that a JSON string of the active event parameters is returned. + + Args: + session: The database session. + """ + result = dump_event_parameter_table_to_json( + session, all_events=False, as_string=True + ) + assert isinstance(result, str) + parsed = json.loads(result) + assert "min_ccnorm" in parsed + assert "window_pre" in parsed + assert "window_post" in parsed + + def test_active_event_as_dict(self, session: Session) -> None: + """Verifies that a dict of the active event parameters is returned. + + Args: + session: The database session. + """ + result = dump_event_parameter_table_to_json( + session, all_events=False, as_string=False + ) + assert isinstance(result, dict) + assert "min_ccnorm" in result + assert "window_pre" in result + assert "window_post" in result + + def test_all_events_as_string(self, session: Session) -> None: + """Verifies that a JSON string of all event parameters is returned. + + Args: + session: The database session. + """ + result = dump_event_parameter_table_to_json( + session, all_events=True, as_string=True + ) + assert isinstance(result, str) + parsed = json.loads(result) + assert isinstance(parsed, list) + assert len(parsed) > 0 + + def test_all_events_as_list(self, session: Session) -> None: + """Verifies that a list of dicts of all event parameters is returned. + + Args: + session: The database session. + """ + result = dump_event_parameter_table_to_json( + session, all_events=True, as_string=False + ) + assert isinstance(result, list) + assert len(result) > 0 + assert "min_ccnorm" in result[0] + + +class TestPrintEventTable: + """Tests for printing the event table.""" + + def test_print_short(self, session: Session, capsys: pytest.CaptureFixture) -> None: + """Verifies that print_event_table produces output with short=True. + + Args: + session: The database session. + capsys: The pytest capsys fixture. + """ + print_event_table(session, short=True) + assert len(capsys.readouterr().out) > 0 + + def test_print_long(self, session: Session, capsys: pytest.CaptureFixture) -> None: + """Verifies that print_event_table produces output with short=False. + + Args: + session: The database session. + capsys: The pytest capsys fixture. + """ + print_event_table(session, short=False) + assert len(capsys.readouterr().out) > 0 + + +class TestPrintEventParameterTable: + """Tests for printing the event parameter table.""" + + def test_print_active_event( + self, session: Session, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that print_event_parameter_table produces output for the active event. + + Args: + session: The database session. + capsys: The pytest capsys fixture. + """ + print_event_parameter_table(session, short=False, all_events=False) + assert len(capsys.readouterr().out) > 0 + + def test_print_all_events( + self, session: Session, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that print_event_parameter_table produces output for all events. + + Args: + session: The database session. + capsys: The pytest capsys fixture. + """ + print_event_parameter_table(session, short=False, all_events=True) + assert len(capsys.readouterr().out) > 0 diff --git a/tests/integration/test_models.py b/tests/integration/test_models.py new file mode 100644 index 00000000..e9789b48 --- /dev/null +++ b/tests/integration/test_models.py @@ -0,0 +1,668 @@ +"""Integration tests for AIMBAT SQLModel ORM classes. + +Tests cover cascade deletes, the single-active-event constraint, +type validation, and round-trip persistence of custom time types. +""" + +import pytest +from aimbat.models import ( + AimbatDataSource, + AimbatEvent, + AimbatEventParameters, + AimbatEventParametersBase, + AimbatEventParametersSnapshot, + AimbatSeismogram, + AimbatSeismogramParameters, + AimbatSeismogramParametersSnapshot, + AimbatSnapshot, + AimbatStation, +) +from aimbat.aimbat_types import DataType +from datetime import timezone +from pandas import Timedelta, Timestamp +from pydantic import ValidationError +from sqlmodel import Session, select +from collections.abc import Generator + + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- +@pytest.fixture +def session(patched_session: Session) -> Generator[Session, None, None]: + yield patched_session + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _make_station(session: Session, *, name: str = "AAK") -> AimbatStation: + """Insert and return a minimal station. + + Args: + session (Session): Database session. + name (str): Station name (default: "AAK"). + + Returns: + AimbatStation: The created station. + """ + sta = AimbatStation( + name=name, + network="II", + location="00", + channel="BHZ", + latitude=42.63, + longitude=74.49, + ) + session.add(sta) + session.flush() + return sta + + +def _make_event( + session: Session, + *, + time: str = "2010-02-27T06:34:14", + active: bool | None = None, +) -> AimbatEvent: + """Insert and return an event together with its mandatory parameters. + + Args: + session (Session): Database session. + time (str): Event time string (default: "2010-02-27T06:34:14"). + active (bool | None): Whether the event is active (default: None). + + Returns: + AimbatEvent: The created event. + """ + ev = AimbatEvent( + time=Timestamp(time, tz=timezone.utc), + latitude=-36.12, + longitude=-72.90, + depth=22.9, + active=active, + ) + session.add(ev) + session.flush() + + params = AimbatEventParameters(event=ev) + session.add(params) + session.flush() + return ev + + +def _make_seismogram( + session: Session, + event: AimbatEvent, + station: AimbatStation, +) -> AimbatSeismogram: + """Insert and return a seismogram (with datasource and parameters). + + Args: + session (Session): Database session. + event (AimbatEvent): The associated event. + station (AimbatStation): The associated station. + + Returns: + AimbatSeismogram: The created seismogram. + """ + seis = AimbatSeismogram( + begin_time=Timestamp("2010-02-27T06:30:00", tz=timezone.utc), + delta=Timedelta(seconds=0.05), + t0=Timestamp("2010-02-27T06:40:00", tz=timezone.utc), + event=event, + station=station, + ) + session.add(seis) + session.flush() + + ds = AimbatDataSource( + sourcename="/tmp/fake.sac", + datatype=DataType.SAC, + seismogram=seis, + ) + session.add(ds) + + sp = AimbatSeismogramParameters(seismogram=seis) + session.add(sp) + + session.flush() + return seis + + +# =================================================================== +# Cascade delete tests +# =================================================================== + + +class TestCascadeDeleteEvent: + """Deleting an event must remove all related children.""" + + def test_delete_event_cascades_to_parameters(self, session: Session) -> None: + """Verifies that deleting an event also deletes its parameters. + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + session.commit() + + assert session.exec(select(AimbatEventParameters)).one() is not None + + session.delete(ev) + session.commit() + + assert session.exec(select(AimbatEventParameters)).first() is None + + def test_delete_event_cascades_to_seismograms(self, session: Session) -> None: + """Verifies that deleting an event also deletes its seismograms. + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + sta = _make_station(session) + _make_seismogram(session, ev, sta) + session.commit() + + assert len(session.exec(select(AimbatSeismogram)).all()) == 1 + + session.delete(ev) + session.commit() + + assert len(session.exec(select(AimbatSeismogram)).all()) == 0 + + def test_delete_event_cascades_to_datasource(self, session: Session) -> None: + """Verifies that deleting an event cascades to delete datasources (via seismograms). + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + sta = _make_station(session) + _make_seismogram(session, ev, sta) + session.commit() + + assert session.exec(select(AimbatDataSource)).first() is not None + + session.delete(ev) + session.commit() + + assert session.exec(select(AimbatDataSource)).first() is None + + def test_delete_event_cascades_to_seismogram_parameters( + self, session: Session + ) -> None: + """Verifies that deleting an event cascades to delete seismogram parameters. + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + sta = _make_station(session) + _make_seismogram(session, ev, sta) + session.commit() + + assert session.exec(select(AimbatSeismogramParameters)).first() is not None + + session.delete(ev) + session.commit() + + assert session.exec(select(AimbatSeismogramParameters)).first() is None + + def test_delete_event_cascades_to_snapshots(self, session: Session) -> None: + """Verifies that deleting an event deletes related snapshots and their parameter copies. + + Args: + session (Session): Database session. + """ + ev = _make_event(session, active=True) + sta = _make_station(session) + _make_seismogram(session, ev, sta) + session.commit() + + # Create a snapshot via the core helper (uses the active event). + from aimbat.core import create_snapshot + + create_snapshot(session, comment="before delete") + assert len(session.exec(select(AimbatSnapshot)).all()) == 1 + assert len(session.exec(select(AimbatEventParametersSnapshot)).all()) == 1 + assert len(session.exec(select(AimbatSeismogramParametersSnapshot)).all()) == 1 + + session.delete(ev) + session.commit() + + assert len(session.exec(select(AimbatSnapshot)).all()) == 0 + assert len(session.exec(select(AimbatEventParametersSnapshot)).all()) == 0 + assert len(session.exec(select(AimbatSeismogramParametersSnapshot)).all()) == 0 + + def test_delete_event_does_not_delete_station(self, session: Session) -> None: + """Stations are shared across events and must survive event deletion. + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + sta = _make_station(session) + _make_seismogram(session, ev, sta) + session.commit() + + session.delete(ev) + session.commit() + + remaining = session.exec(select(AimbatStation)).all() + assert len(remaining) == 1 + assert remaining[0].id == sta.id + + +class TestCascadeDeleteStation: + """Deleting a station must remove its seismograms (and their children).""" + + def test_delete_station_cascades_to_seismograms(self, session: Session) -> None: + """Verifies that deleting a station removes associated seismograms and their children. + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + sta = _make_station(session) + _make_seismogram(session, ev, sta) + session.commit() + + session.delete(sta) + session.commit() + + assert len(session.exec(select(AimbatSeismogram)).all()) == 0 + assert session.exec(select(AimbatDataSource)).first() is None + assert session.exec(select(AimbatSeismogramParameters)).first() is None + + +class TestCascadeDeleteSnapshot: + """Deleting a snapshot must remove its parameter snapshots.""" + + def test_delete_snapshot_cascades_to_parameter_snapshots( + self, session: Session + ) -> None: + """Verifies that deleting a snapshot removes its associated parameter snapshots. + + Args: + session (Session): Database session. + """ + ev = _make_event(session, active=True) + sta = _make_station(session) + _make_seismogram(session, ev, sta) + session.commit() + + from aimbat.core import create_snapshot + + create_snapshot(session) + + snapshot = session.exec(select(AimbatSnapshot)).one() + session.delete(snapshot) + session.commit() + + assert len(session.exec(select(AimbatEventParametersSnapshot)).all()) == 0 + assert len(session.exec(select(AimbatSeismogramParametersSnapshot)).all()) == 0 + + +# =================================================================== +# Single active event constraint +# =================================================================== + + +class TestSingleActiveEvent: + """The DB trigger ensures at most one event has active=True.""" + + def test_only_one_active_event_via_insert(self, session: Session) -> None: + """Inserting a new active event deactivates the previous one. + + Args: + session (Session): Database session. + """ + ev1 = _make_event(session, active=True) + session.commit() + session.refresh(ev1) + assert ev1.active is True + + ev2 = _make_event(session, time="2011-03-11T05:46:24", active=True) + session.commit() + + session.refresh(ev1) + session.refresh(ev2) + assert ev1.active is None + assert ev2.active is True + + def test_only_one_active_event_via_update(self, session: Session) -> None: + """Updating an event to active deactivates the previous one. + + Args: + session (Session): Database session. + """ + ev1 = _make_event(session, active=True) + ev2 = _make_event(session, time="2011-03-11T05:46:24") + session.commit() + + ev2.active = True + session.add(ev2) + session.commit() + + session.refresh(ev1) + session.refresh(ev2) + assert ev1.active is None + assert ev2.active is True + + def test_multiple_inactive_events_allowed(self, session: Session) -> None: + """Multiple events may exist without any being active. + + Args: + session (Session): Database session. + """ + _make_event(session, time="2010-01-01T00:00:00") + _make_event(session, time="2011-01-01T00:00:00") + _make_event(session, time="2012-01-01T00:00:00") + session.commit() + + active = session.exec( + select(AimbatEvent).where(AimbatEvent.active == True) # noqa: E712 + ).all() + assert len(active) == 0 + + def test_cycling_active_through_three_events(self, session: Session) -> None: + """Verifies cycling active status through multiple events ensures only one is active at a time. + + Args: + session (Session): Database session. + """ + ev1 = _make_event(session, time="2010-01-01T00:00:00", active=True) + ev2 = _make_event(session, time="2011-01-01T00:00:00") + ev3 = _make_event(session, time="2012-01-01T00:00:00") + session.commit() + + for target in [ev2, ev3, ev1]: + target.active = True + session.add(target) + session.commit() + + active = session.exec( + select(AimbatEvent).where(AimbatEvent.active == True) # noqa: E712 + ).all() + assert len(active) == 1 + session.refresh(target) + assert target.active is True + + +# =================================================================== +# Type validation +# =================================================================== + + +class TestEventValidation: + """Pydantic validation on AimbatEvent fields.""" + + def test_event_time_accepts_string(self, session: Session) -> None: + """Verifies that the event time field accepts ISO format strings and converts them to Timestamp. + + Args: + session (Session): Database session. + """ + ev = AimbatEvent( + time="2010-02-27T06:34:14+00:00", + latitude=0.0, + longitude=0.0, + ) + session.add(ev) + session.flush() + params = AimbatEventParameters(event=ev) + session.add(params) + session.commit() + + session.refresh(ev) + assert isinstance(ev.time, Timestamp) + + def test_event_rejects_invalid_time(self) -> None: + """model_validate enforces Pydantic type coercion for table models.""" + with pytest.raises(ValidationError): + AimbatEvent.model_validate( + {"time": "not-a-date", "latitude": 0.0, "longitude": 0.0} + ) + + +class TestEventParametersValidation: + """Validation rules on AimbatEventParametersBase (non-table base class). + + SQLModel table models skip Pydantic validation on __init__, so we test + via the base class and via model_validate on the table class. + """ + + def test_min_ccnorm_rejects_out_of_range(self) -> None: + """Verifies that min_ccnorm rejects values > 1.0.""" + with pytest.raises(ValidationError): + AimbatEventParametersBase(min_ccnorm=1.5) + + def test_min_ccnorm_rejects_negative(self) -> None: + """Verifies that min_ccnorm rejects negative values.""" + with pytest.raises(ValidationError): + AimbatEventParametersBase(min_ccnorm=-0.1) + + def test_window_pre_must_be_negative(self) -> None: + """Verifies that window_pre must be a negative Timedelta.""" + with pytest.raises(ValidationError): + AimbatEventParametersBase(window_pre=Timedelta(seconds=5)) + + def test_window_post_must_be_positive(self) -> None: + """Verifies that window_post must be a positive Timedelta.""" + with pytest.raises(ValidationError): + AimbatEventParametersBase(window_post=Timedelta(seconds=-5)) + + def test_bandpass_fmax_must_exceed_fmin(self) -> None: + """The bandpass validator mixin is on AimbatEventParameters (table model), + so we must use model_validate to trigger it.""" + with pytest.raises(ValidationError): + AimbatEventParameters.model_validate( + {"bandpass_fmin": 2.0, "bandpass_fmax": 1.0} + ) + + def test_bandpass_fmax_must_not_equal_fmin(self) -> None: + """Verifies that bandpass_fmax cannot equal bandpass_fmin.""" + with pytest.raises(ValidationError): + AimbatEventParameters.model_validate( + {"bandpass_fmin": 1.0, "bandpass_fmax": 1.0} + ) + + def test_model_validate_enforces_rules_on_table_class(self) -> None: + """model_validate on the table class must also reject invalid values.""" + with pytest.raises(ValidationError): + AimbatEventParameters.model_validate({"min_ccnorm": 1.5}) + + def test_valid_parameters_accepted(self, session: Session) -> None: + """Verifies that valid parameters are accepted. + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + session.commit() + session.refresh(ev) + + params = ev.parameters + assert params.completed is False + assert params.min_ccnorm >= 0 + assert params.min_ccnorm <= 1 + assert params.window_pre.total_seconds() < 0 + assert params.window_post.total_seconds() > 0 + + +class TestSeismogramParametersValidation: + """Validation rules on seismogram-related models.""" + + def test_default_select_is_true(self, session: Session) -> None: + """Verifies that the default 'select' parameter is True. + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + sta = _make_station(session) + seis = _make_seismogram(session, ev, sta) + session.commit() + session.refresh(seis) + + assert seis.parameters.select is True + + def test_default_flip_is_false(self, session: Session) -> None: + """Verifies that the default 'flip' parameter is False. + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + sta = _make_station(session) + seis = _make_seismogram(session, ev, sta) + session.commit() + session.refresh(seis) + + assert seis.parameters.flip is False + + def test_default_t1_is_none(self, session: Session) -> None: + """Verifies that the default 't1' parameter is None. + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + sta = _make_station(session) + seis = _make_seismogram(session, ev, sta) + session.commit() + session.refresh(seis) + + assert seis.parameters.t1 is None + + def test_seismogram_delta_must_be_positive(self) -> None: + """model_validate enforces Pydantic type constraints for table models.""" + with pytest.raises(ValidationError): + AimbatSeismogram.model_validate( + { + "begin_time": Timestamp("2010-01-01", tz=timezone.utc), + "delta": Timedelta(seconds=-1), + "t0": Timestamp("2010-01-01", tz=timezone.utc), + } + ) + + +# =================================================================== +# Round-trip persistence of custom time types +# =================================================================== + + +class TestTimestampRoundTrip: + """Pandas Timestamp values survive a write→read cycle via SQLAlchemy.""" + + def test_event_time_round_trip(self, session: Session) -> None: + """Verifies round-trip persistence of event time as a Timestamp. + + Args: + session (Session): Database session. + """ + ts = Timestamp("2010-02-27T06:34:14", tz=timezone.utc) + ev = _make_event(session, time="2010-02-27T06:34:14") + session.commit() + + session.refresh(ev) + assert isinstance(ev.time, Timestamp) + assert ev.time == ts + + def test_seismogram_times_round_trip(self, session: Session) -> None: + """Verifies round-trip persistence of seismogram times as Timestamps. + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + sta = _make_station(session) + seis = _make_seismogram(session, ev, sta) + session.commit() + + session.refresh(seis) + assert isinstance(seis.begin_time, Timestamp) + assert isinstance(seis.t0, Timestamp) + + +class TestTimedeltaRoundTrip: + """Pandas Timedelta values survive a write→read cycle via SQLAlchemy.""" + + def test_event_parameters_window_round_trip(self, session: Session) -> None: + """Verifies round-trip persistence of event parameter windows as Timedeltas. + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + session.commit() + + session.refresh(ev) + params = ev.parameters + assert isinstance(params.window_pre, Timedelta) + assert isinstance(params.window_post, Timedelta) + assert params.window_pre.total_seconds() < 0 + assert params.window_post.total_seconds() > 0 + + def test_seismogram_delta_round_trip(self, session: Session) -> None: + """Verifies round-trip persistence of seismogram delta as Timedelta. + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + sta = _make_station(session) + seis = _make_seismogram(session, ev, sta) + session.commit() + + session.refresh(seis) + assert isinstance(seis.delta, Timedelta) + assert seis.delta == Timedelta(seconds=0.05) + + +# =================================================================== +# Unique constraints +# =================================================================== + + +class TestUniqueConstraints: + """Verify that unique column constraints are enforced.""" + + def test_duplicate_event_time_rejected(self, session: Session) -> None: + """Verifies that creating two events with the same time raises an IntegrityError. + + Args: + session (Session): Database session. + """ + from sqlalchemy.exc import IntegrityError + + same_time = "2010-02-27T06:34:14" + _make_event(session, time=same_time) + session.commit() + + # Manually insert a second event with the same time (bypass helper flush). + ev2 = AimbatEvent( + time=Timestamp(same_time, tz=timezone.utc), + latitude=0.0, + longitude=0.0, + ) + session.add(ev2) + with pytest.raises(IntegrityError): + session.flush() + + def test_different_event_times_allowed(self, session: Session) -> None: + """Verifies that events with different times are allowed. + + Args: + session (Session): Database session. + """ + _make_event(session, time="2010-01-01T00:00:00") + _make_event(session, time="2011-01-01T00:00:00") + session.commit() + + events = session.exec(select(AimbatEvent)).all() + assert len(events) == 2 diff --git a/tests/integration/test_project.py b/tests/integration/test_project.py new file mode 100644 index 00000000..43f63f68 --- /dev/null +++ b/tests/integration/test_project.py @@ -0,0 +1,131 @@ +"""Do some tests with a real file-based database. + +This is to verify that the project creation and deletion works as expected. +""" + +import pytest +from pathlib import Path +from aimbat.core import create_project, delete_project +from aimbat.core._project import _project_exists, print_project_info +from collections.abc import Generator +from sqlalchemy import Engine + + +class TestProjectLifecycle: + """Integration tests for core project management functions.""" + + @pytest.fixture + def engine(self, engine_from_file: Engine) -> Generator[Engine, None, None]: + yield engine_from_file + + def test_create(self, engine: Engine, db_path: Path) -> None: + """Verifies that a new project can be created successfully. + + This test ensures that `create_project` creates the database file and that `_project_exists` + correctly reflects the project's existence. + + Args: + engine (Engine): The SQLAlchemy engine. + project_file (Path): The path to the expected project database file. + """ + assert not db_path.exists(), "expected no project file at the start of the test" + assert ( + _project_exists(engine) is False + ), "expected _project_exists() to return False at the start of the test" + + create_project(engine) + + assert ( + db_path.exists() + ), "expected project file to be created after calling create_project()" + assert ( + _project_exists(engine) is True + ), "expected _project_exists() to return True after creating project" + + def test_create_if_one_exists(self, engine: Engine) -> None: + """Verifies that creating a project fails if one already exists. + + Args: + engine (Engine): The SQLAlchemy engine. + """ + assert not _project_exists( + engine + ), "expected no project at the start of the test" + create_project(engine) + assert _project_exists( + engine + ), "expected project to exist after calling create_project()" + + with pytest.raises(RuntimeError): + create_project(engine) + + def test_delete_project(self, engine: Engine) -> None: + """Verifies that an existing project can be deleted. + + Args: + engine (Engine): The SQLAlchemy engine. + """ + assert not _project_exists( + engine + ), "expected no project at the start of the test" + create_project(engine) + assert _project_exists( + engine + ), "expected project to exist after calling create_project()" + + delete_project(engine) + assert not _project_exists( + engine + ), "expected no project after calling delete_project()" + + def test_delete_project_when_there_is_none(self, engine: Engine) -> None: + """Verifies that attempting to delete a non-existent project raises an error. + + Args: + engine (Engine): The SQLAlchemy engine. + """ + assert not _project_exists( + engine + ), "expected no project at the start of the test" + with pytest.raises(RuntimeError): + delete_project(engine) + + +class TestPrintProjectInfo: + """Tests for printing project summary information.""" + + def test_raises_when_no_project( + self, engine_from_file: Engine, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that a RuntimeError is raised when no project exists. + + Args: + engine_from_file: A SQLAlchemy Engine connected to an empty file database. + capsys: The pytest capsys fixture. + """ + with pytest.raises(RuntimeError): + print_project_info(engine_from_file) + + def test_with_empty_project( + self, patched_engine: Engine, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that output is produced for a project with no data or active event. + + Args: + patched_engine: The monkeypatched SQLAlchemy Engine. + capsys: The pytest capsys fixture. + """ + print_project_info(patched_engine) + assert len(capsys.readouterr().out) > 0 + + def test_with_data_and_active_event( + self, loaded_engine: Engine, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that output is produced for a project with data and an active event. + + Args: + loaded_engine: The monkeypatched SQLAlchemy Engine with data loaded. + capsys: The pytest capsys fixture. + """ + print_project_info(loaded_engine) + assert len(capsys.readouterr().out) > 0 diff --git a/tests/integration/test_seismogram.py b/tests/integration/test_seismogram.py new file mode 100644 index 00000000..2f802ed0 --- /dev/null +++ b/tests/integration/test_seismogram.py @@ -0,0 +1,416 @@ +"""Integration tests for seismogram management functions in aimbat.core._seismogram.""" + +import json +import uuid +import pytest +from aimbat.core._seismogram import ( + delete_seismogram, + delete_seismogram_by_id, + get_seismogram_parameter, + get_seismogram_parameter_by_id, + set_seismogram_parameter, + set_seismogram_parameter_by_id, + get_selected_seismograms, + dump_seismogram_table_to_json, + dump_seismogram_parameter_table_to_json, + print_seismogram_table, + print_seismogram_parameter_table, + plot_all_seismograms, +) +from aimbat.aimbat_types import SeismogramParameter +from aimbat.models import AimbatSeismogram +from matplotlib.figure import Figure +from pandas import Timestamp +from sqlmodel import Session, select +from sqlalchemy.exc import NoResultFound + + +@pytest.fixture +def session(loaded_session: Session) -> Session: + """Provides a session with multi-event data and an active event pre-loaded. + + Args: + loaded_session: A SQLModel Session with data populated. + + Returns: + The database session. + """ + return loaded_session + + +@pytest.fixture +def seismogram(session: Session) -> AimbatSeismogram: + """Provides the first seismogram from the active event. + + Args: + session: The database session. + + Returns: + An AimbatSeismogram from the active event. + """ + return session.exec(select(AimbatSeismogram)).first() # type: ignore[return-value] + + +class TestDeleteSeismogram: + """Tests for deleting seismograms from the database.""" + + def test_delete_seismogram( + self, session: Session, seismogram: AimbatSeismogram + ) -> None: + """Verifies that a seismogram is removed from the database after deletion. + + Args: + session: The database session. + seismogram: An AimbatSeismogram to delete. + """ + count_before = len(session.exec(select(AimbatSeismogram)).all()) + delete_seismogram(session, seismogram) + assert len(session.exec(select(AimbatSeismogram)).all()) == count_before - 1 + + def test_delete_seismogram_by_id( + self, session: Session, seismogram: AimbatSeismogram + ) -> None: + """Verifies that a seismogram is removed from the database when deleted by ID. + + Args: + session: The database session. + seismogram: An AimbatSeismogram whose ID is used for deletion. + """ + count_before = len(session.exec(select(AimbatSeismogram)).all()) + delete_seismogram_by_id(session, seismogram.id) + assert len(session.exec(select(AimbatSeismogram)).all()) == count_before - 1 + + def test_delete_seismogram_by_id_not_found(self, session: Session) -> None: + """Verifies that deleting a non-existent seismogram ID raises NoResultFound. + + Args: + session: The database session. + """ + with pytest.raises(NoResultFound): + delete_seismogram_by_id(session, uuid.uuid4()) + + +class TestGetSeismogramParameter: + """Tests for reading parameter values from a seismogram instance.""" + + def test_get_bool_parameter(self, seismogram: AimbatSeismogram) -> None: + """Verifies that a bool parameter is returned as a bool. + + Args: + seismogram: An AimbatSeismogram instance. + """ + value = get_seismogram_parameter(seismogram, SeismogramParameter.SELECT) + assert isinstance(value, bool) + + def test_get_timestamp_parameter_default_none( + self, seismogram: AimbatSeismogram + ) -> None: + """Verifies that the t1 parameter returns None when not set. + + Args: + seismogram: An AimbatSeismogram instance. + """ + value = get_seismogram_parameter(seismogram, SeismogramParameter.T1) + assert value is None + + def test_get_timestamp_parameter_after_set( + self, session: Session, seismogram: AimbatSeismogram + ) -> None: + """Verifies that t1 is returned as a Timestamp after being set. + + Args: + session: The database session. + seismogram: An AimbatSeismogram instance. + """ + t1 = seismogram.t0 + set_seismogram_parameter(session, seismogram, SeismogramParameter.T1, t1) + value = get_seismogram_parameter(seismogram, SeismogramParameter.T1) + assert isinstance(value, Timestamp) + + +class TestGetSeismogramParameterById: + """Tests for reading parameter values from a seismogram by ID.""" + + def test_get_by_id(self, session: Session, seismogram: AimbatSeismogram) -> None: + """Verifies that a bool parameter is returned correctly when looked up by ID. + + Args: + session: The database session. + seismogram: An AimbatSeismogram whose ID is used for lookup. + """ + value = get_seismogram_parameter_by_id( + session, seismogram.id, SeismogramParameter.SELECT + ) + assert isinstance(value, bool) + + def test_get_by_id_not_found(self, session: Session) -> None: + """Verifies that a ValueError is raised for an unknown seismogram ID. + + Args: + session: The database session. + """ + with pytest.raises(ValueError): + get_seismogram_parameter_by_id( + session, uuid.uuid4(), SeismogramParameter.SELECT + ) + + +class TestSetSeismogramParameter: + """Tests for writing parameter values to a seismogram instance.""" + + def test_set_bool_parameter( + self, session: Session, seismogram: AimbatSeismogram + ) -> None: + """Verifies that a bool parameter is persisted correctly. + + Args: + session: The database session. + seismogram: An AimbatSeismogram instance. + """ + original = get_seismogram_parameter(seismogram, SeismogramParameter.SELECT) + set_seismogram_parameter( + session, seismogram, SeismogramParameter.SELECT, not original + ) + assert ( + get_seismogram_parameter(seismogram, SeismogramParameter.SELECT) + is not original + ) + + def test_set_timestamp_parameter( + self, session: Session, seismogram: AimbatSeismogram + ) -> None: + """Verifies that a Timestamp parameter is persisted correctly. + + Args: + session: The database session. + seismogram: An AimbatSeismogram instance. + """ + t1 = seismogram.t0 + set_seismogram_parameter(session, seismogram, SeismogramParameter.T1, t1) + assert get_seismogram_parameter(seismogram, SeismogramParameter.T1) == t1 + + +class TestSetSeismogramParameterById: + """Tests for writing parameter values to a seismogram by ID.""" + + def test_set_by_id(self, session: Session, seismogram: AimbatSeismogram) -> None: + """Verifies that a bool parameter is persisted when set by seismogram ID. + + Args: + session: The database session. + seismogram: An AimbatSeismogram whose ID is used for lookup. + """ + set_seismogram_parameter_by_id( + session, seismogram.id, SeismogramParameter.FLIP, True + ) + assert get_seismogram_parameter(seismogram, SeismogramParameter.FLIP) is True + + def test_set_by_id_not_found(self, session: Session) -> None: + """Verifies that a ValueError is raised for an unknown seismogram ID. + + Args: + session: The database session. + """ + with pytest.raises(ValueError): + set_seismogram_parameter_by_id( + session, uuid.uuid4(), SeismogramParameter.FLIP, True + ) + + +class TestGetSelectedSeismograms: + """Tests for retrieving selected seismograms.""" + + def test_all_selected_by_default(self, session: Session) -> None: + """Verifies that all seismograms in the active event are selected by default. + + Args: + session: The database session. + """ + selected = get_selected_seismograms(session) + assert len(selected) > 0 + + def test_after_deselecting_one( + self, session: Session, seismogram: AimbatSeismogram + ) -> None: + """Verifies that deselecting a seismogram removes it from the selected set. + + Args: + session: The database session. + seismogram: An AimbatSeismogram to deselect. + """ + count_before = len(get_selected_seismograms(session)) + set_seismogram_parameter(session, seismogram, SeismogramParameter.SELECT, False) + assert len(get_selected_seismograms(session)) == count_before - 1 + + def test_all_events(self, session: Session) -> None: + """Verifies that get_selected_seismograms returns seismograms across all events. + + Args: + session: The database session. + """ + selected_active = get_selected_seismograms(session, all_events=False) + selected_all = get_selected_seismograms(session, all_events=True) + assert len(selected_all) >= len(selected_active) + + +class TestDumpSeismogramTableToJson: + """Tests for serialising the seismogram table to JSON.""" + + def test_returns_json_string(self, session: Session) -> None: + """Verifies that the seismogram table is returned as a valid JSON string. + + Args: + session: The database session. + """ + result = dump_seismogram_table_to_json(session) + assert isinstance(result, str) + parsed = json.loads(result) + assert isinstance(parsed, list) + assert len(parsed) > 0 + + +class TestDumpSeismogramParameterTableToJson: + """Tests for serialising the seismogram parameter table to JSON.""" + + def test_active_event_as_string(self, session: Session) -> None: + """Verifies that a JSON string of the active event's parameters is returned. + + Args: + session: The database session. + """ + result = dump_seismogram_parameter_table_to_json( + session, all_events=False, as_string=True + ) + assert isinstance(result, str) + parsed = json.loads(result) + assert isinstance(parsed, list) + assert len(parsed) > 0 + + def test_active_event_as_list(self, session: Session) -> None: + """Verifies that a list of dicts of the active event's parameters is returned. + + Args: + session: The database session. + """ + result = dump_seismogram_parameter_table_to_json( + session, all_events=False, as_string=False + ) + assert isinstance(result, list) + assert len(result) > 0 + assert "select" in result[0] + + def test_all_events_as_string(self, session: Session) -> None: + """Verifies that a JSON string of all events' parameters is returned. + + Args: + session: The database session. + """ + result = dump_seismogram_parameter_table_to_json( + session, all_events=True, as_string=True + ) + assert isinstance(result, str) + parsed = json.loads(result) + assert isinstance(parsed, list) + assert len(parsed) > 0 + + def test_all_events_as_list(self, session: Session) -> None: + """Verifies that a list of dicts of all events' parameters is returned. + + Args: + session: The database session. + """ + result = dump_seismogram_parameter_table_to_json( + session, all_events=True, as_string=False + ) + assert isinstance(result, list) + assert len(result) > 0 + assert "select" in result[0] + + def test_all_events_returns_more_than_active_only(self, session: Session) -> None: + """Verifies that all_events=True returns more rows than active event only. + + Args: + session: The database session. + """ + active_only = dump_seismogram_parameter_table_to_json( + session, all_events=False, as_string=False + ) + all_events = dump_seismogram_parameter_table_to_json( + session, all_events=True, as_string=False + ) + assert len(all_events) >= len(active_only) + + +class TestPrintSeismogramTable: + """Tests for printing the seismogram table.""" + + def test_active_event_short( + self, session: Session, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that output is produced for the active event with short=True. + + Args: + session: The database session. + capsys: The pytest capsys fixture. + """ + print_seismogram_table(session, short=True, all_events=False) + assert len(capsys.readouterr().out) > 0 + + def test_active_event_long( + self, session: Session, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that output is produced for the active event with short=False. + + Args: + session: The database session. + capsys: The pytest capsys fixture. + """ + print_seismogram_table(session, short=False, all_events=False) + assert len(capsys.readouterr().out) > 0 + + def test_all_events(self, session: Session, capsys: pytest.CaptureFixture) -> None: + """Verifies that output is produced when printing seismograms for all events. + + Args: + session: The database session. + capsys: The pytest capsys fixture. + """ + print_seismogram_table(session, short=False, all_events=True) + assert len(capsys.readouterr().out) > 0 + + +class TestPrintSeismogramParameterTable: + """Tests for printing the seismogram parameter table.""" + + def test_print_short(self, session: Session, capsys: pytest.CaptureFixture) -> None: + """Verifies that output is produced with short=True. + + Args: + session: The database session. + capsys: The pytest capsys fixture. + """ + print_seismogram_parameter_table(session, short=True) + assert len(capsys.readouterr().out) > 0 + + def test_print_long(self, session: Session, capsys: pytest.CaptureFixture) -> None: + """Verifies that output is produced with short=False. + + Args: + session: The database session. + capsys: The pytest capsys fixture. + """ + print_seismogram_parameter_table(session, short=False) + assert len(capsys.readouterr().out) > 0 + + +class TestPlotAllSeismograms: + """Tests for plotting seismograms.""" + + def test_returns_figure(self, session: Session) -> None: + """Verifies that plot_all_seismograms returns a matplotlib Figure. + + Args: + session: The database session. + """ + fig = plot_all_seismograms(session) + assert isinstance(fig, Figure) diff --git a/tests/integration/test_snapshots.py b/tests/integration/test_snapshots.py new file mode 100644 index 00000000..8605bad7 --- /dev/null +++ b/tests/integration/test_snapshots.py @@ -0,0 +1,389 @@ +"""Integration tests for snapshot management functions in aimbat.core._snapshot.""" + +import json +import uuid +import pytest +from aimbat.core._snapshot import ( + create_snapshot, + delete_snapshot, + delete_snapshot_by_id, + get_snapshots, + rollback_to_snapshot, + rollback_to_snapshot_by_id, + dump_snapshot_tables_to_json, + print_snapshot_table, +) +from aimbat.core import get_active_event +from aimbat.models import AimbatSnapshot, AimbatSeismogram +from sqlmodel import Session, select + + +@pytest.fixture +def session(loaded_session: Session) -> Session: + """Provides a session with multi-event data and an active event pre-loaded. + + Args: + loaded_session: A SQLModel Session with data populated. + + Returns: + The database session. + """ + return loaded_session + + +@pytest.fixture +def snapshot(session: Session) -> AimbatSnapshot: + """Provides a snapshot of the active event's current parameters. + + Args: + session: The database session. + + Returns: + An AimbatSnapshot for the active event. + """ + create_snapshot(session) + return session.exec(select(AimbatSnapshot)).one() + + +class TestCreateSnapshot: + """Tests for creating parameter snapshots.""" + + def test_creates_snapshot(self, session: Session) -> None: + """Verifies that a snapshot is written to the database. + + Args: + session: The database session. + """ + assert len(session.exec(select(AimbatSnapshot)).all()) == 0 + create_snapshot(session) + assert len(session.exec(select(AimbatSnapshot)).all()) == 1 + + def test_snapshot_linked_to_active_event(self, session: Session) -> None: + """Verifies that the snapshot is associated with the active event. + + Args: + session: The database session. + """ + active_event = get_active_event(session) + create_snapshot(session) + snapshot = session.exec(select(AimbatSnapshot)).one() + assert snapshot.event_id == active_event.id + + def test_snapshot_with_comment(self, session: Session) -> None: + """Verifies that the optional comment is stored on the snapshot. + + Args: + session: The database session. + """ + create_snapshot(session, comment="test comment") + snapshot = session.exec(select(AimbatSnapshot)).one() + assert snapshot.comment == "test comment" + + def test_snapshot_without_comment(self, session: Session) -> None: + """Verifies that the comment defaults to None when not provided. + + Args: + session: The database session. + """ + create_snapshot(session) + snapshot = session.exec(select(AimbatSnapshot)).one() + assert snapshot.comment is None + + def test_snapshot_captures_seismogram_parameters(self, session: Session) -> None: + """Verifies that the snapshot includes one entry per seismogram. + + Args: + session: The database session. + """ + active_event = get_active_event(session) + n_seismograms = len(active_event.seismograms) + + create_snapshot(session) + snapshot = session.exec(select(AimbatSnapshot)).one() + assert len(snapshot.seismogram_parameters_snapshots) == n_seismograms + + def test_snapshot_captures_event_parameters( + self, session: Session, snapshot: AimbatSnapshot + ) -> None: + """Verifies that the snapshot includes event parameters. + + Args: + session: The database session. + snapshot: An AimbatSnapshot for the active event. + """ + active_event = get_active_event(session) + assert ( + snapshot.event_parameters_snapshot.parameters_id + == active_event.parameters.id + ) + + +class TestDeleteSnapshot: + """Tests for deleting snapshots.""" + + def test_delete_snapshot(self, session: Session, snapshot: AimbatSnapshot) -> None: + """Verifies that a snapshot is removed from the database. + + Args: + session: The database session. + snapshot: An AimbatSnapshot to delete. + """ + delete_snapshot(session, snapshot) + assert len(session.exec(select(AimbatSnapshot)).all()) == 0 + + def test_delete_snapshot_by_id( + self, session: Session, snapshot: AimbatSnapshot + ) -> None: + """Verifies that a snapshot is removed when deleted by ID. + + Args: + session: The database session. + snapshot: An AimbatSnapshot whose ID is used for deletion. + """ + delete_snapshot_by_id(session, snapshot.id) + assert session.get(AimbatSnapshot, snapshot.id) is None + + def test_delete_snapshot_by_id_not_found(self, session: Session) -> None: + """Verifies that deleting a non-existent snapshot ID raises ValueError. + + Args: + session: The database session. + """ + with pytest.raises(ValueError): + delete_snapshot_by_id(session, uuid.uuid4()) + + +class TestRollbackToSnapshot: + """Tests for rolling back parameters to a snapshot.""" + + def test_rollback_restores_event_parameters( + self, session: Session, snapshot: AimbatSnapshot + ) -> None: + """Verifies that event parameters are restored to snapshot values on rollback. + + Args: + session: The database session. + snapshot: An AimbatSnapshot capturing the original parameters. + """ + active_event = get_active_event(session) + original_min_ccnorm = snapshot.event_parameters_snapshot.min_ccnorm + + # Mutate the parameter after taking the snapshot + active_event.parameters.min_ccnorm = 0.0 + session.add(active_event) + session.commit() + assert active_event.parameters.min_ccnorm == 0.0 + + rollback_to_snapshot(session, snapshot) + session.refresh(active_event) + assert active_event.parameters.min_ccnorm == original_min_ccnorm + + def test_rollback_restores_seismogram_parameters( + self, session: Session, snapshot: AimbatSnapshot + ) -> None: + """Verifies that seismogram parameters are restored to snapshot values on rollback. + + Args: + session: The database session. + snapshot: An AimbatSnapshot capturing the original parameters. + """ + active_event = get_active_event(session) + seismogram = active_event.seismograms[0] + original_select = snapshot.seismogram_parameters_snapshots[0].select + + # Mutate the parameter after taking the snapshot + seismogram.parameters.select = not original_select + session.add(seismogram) + session.commit() + + rollback_to_snapshot(session, snapshot) + session.refresh(seismogram) + assert seismogram.parameters.select == original_select + + def test_rollback_by_id(self, session: Session, snapshot: AimbatSnapshot) -> None: + """Verifies that rollback_to_snapshot_by_id produces the same result as rollback_to_snapshot. + + Args: + session: The database session. + snapshot: An AimbatSnapshot to roll back to. + """ + active_event = get_active_event(session) + original_min_ccnorm = snapshot.event_parameters_snapshot.min_ccnorm + + active_event.parameters.min_ccnorm = 0.0 + session.add(active_event) + session.commit() + + rollback_to_snapshot_by_id(session, snapshot.id) + session.refresh(active_event) + assert active_event.parameters.min_ccnorm == original_min_ccnorm + + def test_rollback_by_id_not_found(self, session: Session) -> None: + """Verifies that rolling back to a non-existent snapshot ID raises ValueError. + + Args: + session: The database session. + """ + with pytest.raises(ValueError): + rollback_to_snapshot_by_id(session, uuid.uuid4()) + + +class TestGetSnapshots: + """Tests for retrieving snapshots from the database.""" + + def test_no_snapshots_initially(self, session: Session) -> None: + """Verifies that no snapshots exist before any are created. + + Args: + session: The database session. + """ + assert len(get_snapshots(session)) == 0 + + def test_get_snapshots_for_active_event( + self, session: Session, snapshot: AimbatSnapshot + ) -> None: + """Verifies that snapshots for the active event are returned. + + Args: + session: The database session. + snapshot: An AimbatSnapshot for the active event. + """ + snapshots = get_snapshots(session, all_events=False) + assert len(snapshots) == 1 + assert snapshots[0].id == snapshot.id + + def test_get_snapshots_all_events( + self, session: Session, snapshot: AimbatSnapshot + ) -> None: + """Verifies that get_snapshots with all_events=True includes all events. + + Args: + session: The database session. + snapshot: An AimbatSnapshot for the active event. + """ + all_snapshots = get_snapshots(session, all_events=True) + assert len(all_snapshots) >= 1 + + def test_multiple_snapshots(self, session: Session) -> None: + """Verifies that multiple snapshots can be created and retrieved. + + Args: + session: The database session. + """ + create_snapshot(session, comment="first") + create_snapshot(session, comment="second") + assert len(get_snapshots(session)) == 2 + + +class TestDumpSnapshotTablesToJson: + """Tests for serialising snapshot data to JSON.""" + + def test_as_string(self, session: Session, snapshot: AimbatSnapshot) -> None: + """Verifies that a JSON string is returned when as_string=True. + + Args: + session: The database session. + snapshot: An AimbatSnapshot to include in the dump. + """ + result = dump_snapshot_tables_to_json(session, all_events=False, as_string=True) + assert isinstance(result, str) + parsed = json.loads(result) + assert "snapshots" in parsed + assert "event_parameters" in parsed + assert "seismogram_parameters" in parsed + + def test_as_dict(self, session: Session, snapshot: AimbatSnapshot) -> None: + """Verifies that a dict is returned when as_string=False. + + Args: + session: The database session. + snapshot: An AimbatSnapshot to include in the dump. + """ + result = dump_snapshot_tables_to_json( + session, all_events=False, as_string=False + ) + assert isinstance(result, dict) + assert "snapshots" in result + assert len(result["snapshots"]) == 1 + + def test_all_events_includes_more_snapshots( + self, session: Session, snapshot: AimbatSnapshot + ) -> None: + """Verifies that all_events=True returns at least as many snapshots as active only. + + Args: + session: The database session. + snapshot: An AimbatSnapshot to include in the dump. + """ + active_only = dump_snapshot_tables_to_json( + session, all_events=False, as_string=False + ) + all_events = dump_snapshot_tables_to_json( + session, all_events=True, as_string=False + ) + assert len(all_events["snapshots"]) >= len(active_only["snapshots"]) + + def test_seismogram_parameters_count( + self, session: Session, snapshot: AimbatSnapshot + ) -> None: + """Verifies that seismogram_parameters count matches the active event's seismograms. + + Args: + session: The database session. + snapshot: An AimbatSnapshot to include in the dump. + """ + n_seismograms = len(session.exec(select(AimbatSeismogram)).all()) + result = dump_snapshot_tables_to_json(session, all_events=True, as_string=False) + assert len(result["seismogram_parameters"]) <= n_seismograms + + +class TestPrintSnapshotTable: + """Tests for printing the snapshot table.""" + + def test_print_active_event_short( + self, + session: Session, + snapshot: AimbatSnapshot, + capsys: pytest.CaptureFixture, + ) -> None: + """Verifies that output is produced for the active event with short=True. + + Args: + session: The database session. + snapshot: An AimbatSnapshot to display. + capsys: The pytest capsys fixture. + """ + print_snapshot_table(session, short=True, all_events=False) + assert len(capsys.readouterr().out) > 0 + + def test_print_active_event_long( + self, + session: Session, + snapshot: AimbatSnapshot, + capsys: pytest.CaptureFixture, + ) -> None: + """Verifies that output is produced for the active event with short=False. + + Args: + session: The database session. + snapshot: An AimbatSnapshot to display. + capsys: The pytest capsys fixture. + """ + print_snapshot_table(session, short=False, all_events=False) + assert len(capsys.readouterr().out) > 0 + + def test_print_all_events( + self, + session: Session, + snapshot: AimbatSnapshot, + capsys: pytest.CaptureFixture, + ) -> None: + """Verifies that output is produced when printing snapshots for all events. + + Args: + session: The database session. + snapshot: An AimbatSnapshot to display. + capsys: The pytest capsys fixture. + """ + print_snapshot_table(session, short=False, all_events=True) + assert len(capsys.readouterr().out) > 0 diff --git a/tests/integration/test_station.py b/tests/integration/test_station.py new file mode 100644 index 00000000..74f69b9f --- /dev/null +++ b/tests/integration/test_station.py @@ -0,0 +1,342 @@ +"""Integration tests for station management functions in aimbat.core._station.""" + +import json +import uuid +import pytest +from sqlalchemy.exc import NoResultFound +from sqlmodel import Session, select + +from aimbat.core import get_active_event +from aimbat.core._station import ( + delete_station, + delete_station_by_id, + dump_station_table_to_json, + get_stations_in_active_event, + get_stations_in_event, + get_stations_with_event_seismogram_count, + print_station_table, +) +from aimbat.models import AimbatStation + + +@pytest.fixture +def session(loaded_session: Session) -> Session: + """Provides a session with multi-event data and an active event pre-loaded. + + Args: + loaded_session: A SQLModel Session with data populated. + + Returns: + The database session. + """ + return loaded_session + + +@pytest.fixture +def station(session: Session) -> AimbatStation: + """Provides the first station associated with the active event. + + Args: + session: The database session. + + Returns: + The first AimbatStation in the active event. + """ + active_event = get_active_event(session) + return active_event.seismograms[0].station + + +class TestDeleteStation: + """Tests for deleting stations from the database.""" + + def test_delete_station(self, session: Session, station: AimbatStation) -> None: + """Verifies that a station is removed from the database. + + Args: + session: The database session. + station: The station to delete. + """ + station_id = station.id + delete_station(session, station) + assert ( + session.get(AimbatStation, station_id) is None + ), "Station should be absent after deletion" + + def test_delete_station_by_id( + self, session: Session, station: AimbatStation + ) -> None: + """Verifies that a station is removed when deleted by ID. + + Args: + session: The database session. + station: The station whose ID is used for deletion. + """ + station_id = station.id + delete_station_by_id(session, station_id) + assert ( + session.get(AimbatStation, station_id) is None + ), "Station should be absent after deletion by ID" + + def test_delete_station_by_id_not_found(self, session: Session) -> None: + """Verifies that deleting a non-existent station ID raises NoResultFound. + + Args: + session: The database session. + """ + with pytest.raises(NoResultFound): + delete_station_by_id(session, uuid.uuid4()) + + +class TestGetStationsInActiveEvent: + """Tests for retrieving stations in the active event.""" + + def test_returns_stations(self, session: Session) -> None: + """Verifies that stations for the active event are returned. + + Args: + session: The database session. + """ + stations = get_stations_in_active_event(session, as_json=False) + assert len(stations) > 0, "Expected at least one station for the active event" + + def test_returns_aimbat_station_instances(self, session: Session) -> None: + """Verifies that all returned items are AimbatStation instances. + + Args: + session: The database session. + """ + stations = get_stations_in_active_event(session, as_json=False) + assert all( + isinstance(s, AimbatStation) for s in stations + ), "All returned items should be AimbatStation instances" + + def test_as_json_returns_list_of_dicts(self, session: Session) -> None: + """Verifies that as_json=True returns a list of dicts. + + Args: + session: The database session. + """ + result = get_stations_in_active_event(session, as_json=True) + assert isinstance(result, list), "Expected a list when as_json=True" + assert all( + isinstance(item, dict) for item in result + ), "Each element should be a dict when as_json=True" + + def test_as_json_count_matches_objects(self, session: Session) -> None: + """Verifies that as_json=True and as_json=False return the same number of stations. + + Args: + session: The database session. + """ + objects = get_stations_in_active_event(session, as_json=False) + json_list = get_stations_in_active_event(session, as_json=True) + assert len(objects) == len( + json_list + ), "Object and JSON representations should have the same length" + + def test_stations_belong_to_active_event(self, session: Session) -> None: + """Verifies that the returned stations are associated with the active event. + + Args: + session: The database session. + """ + active_event = get_active_event(session) + active_station_ids = {s.station_id for s in active_event.seismograms} + stations = get_stations_in_active_event(session, as_json=False) + returned_ids = {s.id for s in stations} + assert ( + returned_ids == active_station_ids + ), "Returned station IDs should match those linked to the active event" + + +class TestGetStationsInEvent: + """Tests for retrieving stations in a specific event.""" + + def test_returns_stations_for_event(self, session: Session) -> None: + """Verifies that stations for the given event are returned. + + Args: + session: The database session. + """ + active_event = get_active_event(session) + stations = get_stations_in_event(session, active_event) + assert len(stations) > 0, "Expected at least one station for the given event" + + def test_returns_aimbat_station_instances(self, session: Session) -> None: + """Verifies that all returned items are AimbatStation instances. + + Args: + session: The database session. + """ + active_event = get_active_event(session) + stations = get_stations_in_event(session, active_event) + assert all( + isinstance(s, AimbatStation) for s in stations + ), "All returned items should be AimbatStation instances" + + def test_station_ids_match_event_seismograms(self, session: Session) -> None: + """Verifies that station IDs match those linked to the event's seismograms. + + Args: + session: The database session. + """ + active_event = get_active_event(session) + expected_ids = {s.station_id for s in active_event.seismograms} + returned_ids = {s.id for s in get_stations_in_event(session, active_event)} + assert ( + returned_ids == expected_ids + ), "Station IDs should match those linked to the event's seismograms" + + +class TestGetStationsWithEventSeismogramCount: + """Tests for retrieving stations with associated seismogram and event counts.""" + + def test_returns_all_stations(self, session: Session) -> None: + """Verifies that all stations in the database are returned. + + Args: + session: The database session. + """ + all_stations = session.exec(select(AimbatStation)).all() + results = get_stations_with_event_seismogram_count(session, as_json=False) + assert len(results) == len( + all_stations + ), "Expected one row per station in the database" + + def test_returns_tuples_with_counts(self, session: Session) -> None: + """Verifies that each result is a tuple of (AimbatStation, int, int). + + Args: + session: The database session. + """ + results = get_stations_with_event_seismogram_count(session, as_json=False) + for row in results: + station, seismogram_count, event_count = row + assert isinstance( + station, AimbatStation + ), "First element should be an AimbatStation" + assert isinstance( + seismogram_count, int + ), "Second element should be an int (seismogram count)" + assert isinstance( + event_count, int + ), "Third element should be an int (event count)" + + def test_counts_are_non_negative(self, session: Session) -> None: + """Verifies that all seismogram and event counts are non-negative. + + Args: + session: The database session. + """ + results = get_stations_with_event_seismogram_count(session, as_json=False) + for _, seismogram_count, event_count in results: + assert seismogram_count >= 0, "Seismogram count should be non-negative" + assert event_count >= 0, "Event count should be non-negative" + + def test_as_json_returns_list_of_dicts(self, session: Session) -> None: + """Verifies that as_json=True returns a list of dicts with count fields. + + Args: + session: The database session. + """ + results = get_stations_with_event_seismogram_count(session, as_json=True) + assert isinstance(results, list), "Expected a list when as_json=True" + for item in results: + assert isinstance(item, dict), "Each element should be a dict" + assert "seismogram_count" in item, "Dict should contain 'seismogram_count'" + assert "event_count" in item, "Dict should contain 'event_count'" + + def test_as_json_count_matches_objects(self, session: Session) -> None: + """Verifies that both return modes yield the same number of rows. + + Args: + session: The database session. + """ + objects = get_stations_with_event_seismogram_count(session, as_json=False) + json_list = get_stations_with_event_seismogram_count(session, as_json=True) + assert len(objects) == len( + json_list + ), "Object and JSON representations should have the same number of rows" + + +class TestDumpStationTableToJson: + """Tests for dumping the full station table to JSON.""" + + def test_returns_valid_json_string(self, session: Session) -> None: + """Verifies that the result is a valid JSON string. + + Args: + session: The database session. + """ + result = dump_station_table_to_json(session) + assert isinstance(result, str), "Expected a string result" + parsed = json.loads(result) + assert isinstance(parsed, list), "Parsed JSON should be a list" + + def test_entry_count_matches_database(self, session: Session) -> None: + """Verifies that the JSON contains one entry per station in the database. + + Args: + session: The database session. + """ + all_stations = session.exec(select(AimbatStation)).all() + result = json.loads(dump_station_table_to_json(session)) + assert len(result) == len( + all_stations + ), "JSON entry count should match station count in the database" + + def test_entries_contain_id_field(self, session: Session) -> None: + """Verifies that each entry in the JSON has an 'id' field. + + Args: + session: The database session. + """ + result = json.loads(dump_station_table_to_json(session)) + for entry in result: + assert "id" in entry, "Each station entry should have an 'id' field" + + +class TestPrintStationTable: + """Tests for printing the station table.""" + + def test_print_active_event_short( + self, session: Session, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that output is produced for the active event with short=True. + + Args: + session: The database session. + capsys: The pytest capsys fixture. + """ + print_station_table(session, short=True, all_events=False) + assert ( + len(capsys.readouterr().out) > 0 + ), "Expected output when printing station table (short, active event)" + + def test_print_active_event_long( + self, session: Session, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that output is produced for the active event with short=False. + + Args: + session: The database session. + capsys: The pytest capsys fixture. + """ + print_station_table(session, short=False, all_events=False) + assert ( + len(capsys.readouterr().out) > 0 + ), "Expected output when printing station table (long, active event)" + + def test_print_all_events( + self, session: Session, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that output is produced when printing stations for all events. + + Args: + session: The database session. + capsys: The pytest capsys fixture. + """ + print_station_table(session, short=False, all_events=True) + assert ( + len(capsys.readouterr().out) > 0 + ), "Expected output when printing station table for all events" diff --git a/tests/integration/test_uuid.py b/tests/integration/test_uuid.py new file mode 100644 index 00000000..f19cf312 --- /dev/null +++ b/tests/integration/test_uuid.py @@ -0,0 +1,207 @@ +"""Integration tests for aimbat.utils._uuid.""" + +import uuid +import pandas as pd +import pytest +from aimbat.models import AimbatEvent +from aimbat.utils._uuid import string_to_uuid, uuid_shortener +from sqlmodel import Session + + +def _make_event(uid: uuid.UUID, offset_seconds: int = 0) -> AimbatEvent: + """Helper to create an AimbatEvent with a specific UUID and time offset. + + Args: + uid: The UUID for the event. + offset_seconds: Time offset in seconds. + + Returns: + The created event. + """ + return AimbatEvent( + id=uid, + time=pd.Timestamp("2000-01-01") + pd.Timedelta(seconds=offset_seconds), + latitude=0.0, + longitude=0.0, + depth=0.0, + ) + + +class TestStringToUuid: + """Tests for the string_to_uuid function.""" + + def test_resolves_full_uuid(self, patched_session: Session) -> None: + """Verifies resolving a full UUID string. + + Args: + patched_session: The database session. + """ + uid = uuid.uuid4() + patched_session.add(_make_event(uid)) + patched_session.commit() + result = string_to_uuid(patched_session, str(uid), AimbatEvent) + assert result == uid + + def test_resolves_short_prefix(self, patched_session: Session) -> None: + """Verifies resolving a UUID from a short prefix. + + Args: + patched_session: The database session. + """ + uid = uuid.uuid4() + patched_session.add(_make_event(uid)) + patched_session.commit() + prefix = str(uid).replace("-", "")[:6] + result = string_to_uuid(patched_session, prefix, AimbatEvent) + assert result == uid + + def test_raises_on_no_match(self, patched_session: Session) -> None: + """Verifies that ValueError is raised when no match is found. + + Args: + patched_session: The database session. + """ + with pytest.raises(ValueError, match="Unable to find"): + string_to_uuid(patched_session, "000000", AimbatEvent) + + def test_raises_on_ambiguous_match(self, patched_session: Session) -> None: + """Verifies that ValueError is raised when multiple matches are found. + + Args: + patched_session: The database session. + """ + uid1 = uuid.UUID("aaaaaaaa-0000-4000-8000-000000000001") + uid2 = uuid.UUID("aaaaaaaa-0000-4000-8000-000000000002") + patched_session.add(_make_event(uid1, offset_seconds=0)) + patched_session.add(_make_event(uid2, offset_seconds=1)) + patched_session.commit() + with pytest.raises(ValueError, match="more than one"): + string_to_uuid(patched_session, "aaaaaaaa", AimbatEvent) + + def test_custom_error_message(self, patched_session: Session) -> None: + """Verifies that a custom error message is used when provided. + + Args: + patched_session: The database session. + """ + with pytest.raises(ValueError, match="custom error"): + string_to_uuid( + patched_session, "000000", AimbatEvent, custom_error="custom error" + ) + + def test_ignores_dashes_in_input(self, patched_session: Session) -> None: + """Verifies that dashes in the input string are ignored. + + Args: + patched_session: The database session. + """ + uid = uuid.UUID("abcdef12-1234-4000-8000-000000000001") + patched_session.add(_make_event(uid)) + patched_session.commit() + result = string_to_uuid(patched_session, "abcdef12-1234", AimbatEvent) + assert result == uid + + +class TestUuidShortener: + """Tests for the uuid_shortener function.""" + + def test_returns_unique_prefix_for_single_entry( + self, patched_session: Session + ) -> None: + """Verifies getting a unique prefix for a single event. + + Args: + patched_session: The database session. + """ + uid = uuid.uuid4() + event = _make_event(uid) + patched_session.add(event) + patched_session.commit() + short = uuid_shortener(patched_session, event) + assert str(uid).startswith(short) + + def test_prefix_is_shortest_unique(self, patched_session: Session) -> None: + """Verifies that the returned prefix is the shortest possible unique prefix. + + Args: + patched_session: The database session. + """ + uid1 = uuid.UUID("aaaaaaaa-0000-4000-8000-000000000001") + uid2 = uuid.UUID("bbbbbbbb-0000-4000-8000-000000000002") + e1 = _make_event(uid1, offset_seconds=0) + e2 = _make_event(uid2, offset_seconds=1) + patched_session.add(e1) + patched_session.add(e2) + patched_session.commit() + short1 = uuid_shortener(patched_session, e1) + short2 = uuid_shortener(patched_session, e2) + assert str(uid1).startswith(short1), "prefix should match uid1" + assert str(uid2).startswith(short2), "prefix should match uid2" + assert not str(uid2).startswith(short1), "short1 should not match uid2" + assert not str(uid1).startswith(short2), "short2 should not match uid1" + + def test_disambiguates_shared_prefix(self, patched_session: Session) -> None: + """Verifies disambiguation when two UUIDs share a long common prefix. + + Args: + patched_session: The database session. + """ + uid1 = uuid.UUID("aaaaaaaa-0000-4000-8000-000000000001") + uid2 = uuid.UUID("aaaaaaaa-0000-4000-8000-000000000002") + e1 = _make_event(uid1, offset_seconds=0) + e2 = _make_event(uid2, offset_seconds=1) + patched_session.add(e1) + patched_session.add(e2) + patched_session.commit() + short1 = uuid_shortener(patched_session, e1) + short2 = uuid_shortener(patched_session, e2) + assert short1 != short2, "disambiguated prefixes should differ" + assert str(uid1).startswith(short1), "short1 should match uid1" + assert str(uid2).startswith(short2), "short2 should match uid2" + + def test_class_form_with_str_uuid(self, patched_session: Session) -> None: + """Verifies calling with a class and string UUID instead of a model instance. + + Args: + patched_session: The database session. + """ + uid = uuid.uuid4() + patched_session.add(_make_event(uid)) + patched_session.commit() + short = uuid_shortener(patched_session, AimbatEvent, str_uuid=str(uid)) + assert str(uid).startswith(short) + + def test_class_form_requires_str_uuid(self, patched_session: Session) -> None: + """Verifies that str_uuid is required when calling with a class. + + Args: + patched_session: The database session. + """ + with pytest.raises(ValueError, match="str_uuid must be provided"): + uuid_shortener(patched_session, AimbatEvent) + + def test_raises_if_id_not_in_table(self, patched_session: Session) -> None: + """Verifies that ValueError is raised if the UUID is not in the table. + + Args: + patched_session: The database session. + """ + uid = uuid.uuid4() + with pytest.raises(ValueError, match="not found in table"): + uuid_shortener(patched_session, AimbatEvent, str_uuid=str(uid)) + + def test_min_length_respected(self, patched_session: Session) -> None: + """Verifies that the minimum length constraint is respected. + + Args: + patched_session: The database session. + """ + uid = uuid.uuid4() + patched_session.add(_make_event(uid)) + patched_session.commit() + event = patched_session.get(AimbatEvent, uid) + assert event is not None, "expected event to exist in database" + short = uuid_shortener(patched_session, event, min_length=4) + assert ( + len(short.replace("-", "")) >= 4 + ), "result should be at least 4 characters excluding dashes" diff --git a/tests/lib/test_lib_common.py b/tests/lib/test_lib_common.py deleted file mode 100644 index 7127216b..00000000 --- a/tests/lib/test_lib_common.py +++ /dev/null @@ -1,80 +0,0 @@ -from aimbat.models import AimbatStation -from sqlmodel import Session -from sqlalchemy import Engine -from collections.abc import Iterator -import pytest -import uuid - -UUID1 = uuid.UUID("11e6ca37-e03b-42b6-acc4-e9eaba5c1587") -UUID2 = uuid.UUID("12e6ca37-e03b-42b6-acc4-e9eaba5c1587") - - -class TestUuidFunctions: - @pytest.fixture - def session_with_stations( - self, fixture_engine_session_with_project: tuple[Engine, Session] - ) -> Iterator[Session]: - station_1 = AimbatStation( - id=UUID1, - name="TEST1", - network="TE", - channel="BHZ", - location="", - latitude=12, - longitude=12, - elevation=12, - ) - station_2 = AimbatStation( - id=UUID2, - name="TEST2", - network="TE", - channel="BHZ", - location="", - latitude=12, - longitude=12, - elevation=12, - ) - _, session = fixture_engine_session_with_project - session.add_all([station_1, station_2]) - session.commit() - yield session - - @pytest.mark.parametrize( - "uuid_str,expected", - [ - (str(UUID1)[:2], UUID1), - (str(UUID2)[:2], UUID2), - (str(UUID1), UUID1), - (str(UUID1)[:1], ValueError), - (str(UUID2)[:1], ValueError), - (str(uuid.uuid4()), ValueError), - ], - ) - def test_string_to_uuid( - self, - session_with_stations: Session, - uuid_str: str, - expected: uuid.UUID | Exception, - ) -> None: - from aimbat.utils import string_to_uuid - - if isinstance(expected, type) and issubclass(expected, Exception): - with pytest.raises(expected): - string_to_uuid(session_with_stations, uuid_str, AimbatStation) - else: - assert ( - string_to_uuid(session_with_stations, uuid_str, AimbatStation) - == expected - ) - - @pytest.mark.parametrize("test_uuid", [UUID1, UUID2]) - def test_uuid_shortener( - self, session_with_stations: Session, test_uuid: uuid.UUID - ) -> None: - from aimbat.utils import uuid_shortener - - aimbat_station = session_with_stations.get(AimbatStation, test_uuid) - assert aimbat_station is not None - assert ( - uuid_shortener(session_with_stations, aimbat_station) == str(test_uuid)[:2] - ) diff --git a/tests/test_data.py b/tests/test_data.py deleted file mode 100644 index ed6f5ba0..00000000 --- a/tests/test_data.py +++ /dev/null @@ -1,218 +0,0 @@ -from aimbat.app import app -from aimbat.aimbat_types import DataType -from aimbat.models import AimbatDataSource -from pysmo.classes import SAC -from sqlalchemy.exc import NoResultFound -from sqlmodel import select, Session -from sqlalchemy import Engine -from pathlib import Path -from pydantic import TypeAdapter -import aimbat.core._data as data -import pytest -import numpy as np -import json - - -class TestDataBase: - """Base class for testing the data module.""" - - -class TestDataAdd(TestDataBase): - def test_lib_add_sac_file_to_project( - self, - sac_file_good: Path, - fixture_engine_session_with_project: tuple[Engine, Session], - ) -> None: - engine, session = fixture_engine_session_with_project - - # do this 2 times to verify nothing changes - for _ in range(2): - data.add_files_to_project( - session, - [sac_file_good], - datatype=DataType.SAC, - ) - - seismogram_filename = session.exec( - select(AimbatDataSource.sourcename) - ).one() - assert seismogram_filename == str(sac_file_good) - - def test_cli_data_add( - self, - sac_file_good: Path, - fixture_engine_session_with_project: tuple[Engine, Session], - ) -> None: - sac_file_good_as_string = str(sac_file_good) - engine, session = fixture_engine_session_with_project - - with pytest.raises(SystemExit) as excinfo: - app(["data", "add", "--no-progress", sac_file_good_as_string]) - - assert excinfo.value.code == 0 - session.flush() - - seismogram_filename = session.exec(select(AimbatDataSource.sourcename)).one() - assert seismogram_filename == str(sac_file_good) - - -class TestDataTable(TestDataBase): - def test_lib_print_data_table_without_active_event( - self, - fixture_session_with_data: Session, - capsys: pytest.CaptureFixture, - ) -> None: - - session = fixture_session_with_data - # no event active - with pytest.raises(NoResultFound): - data.print_data_table(session, False) - - data.print_data_table(session, False, True) - captured = capsys.readouterr() - assert "AIMBAT data for all events" in captured.out - - @pytest.mark.parametrize( - "short, all_events, expected", - [ - (True, True, "AIMBAT data for all events"), - (True, False, "AIMBAT data for event 2011-09-15 19:31:04"), - (False, True, "AIMBAT data for all events"), - (True, False, "AIMBAT data for event 2011-09-15 19:31:04"), - ], - ) - def test_lib_print_data_table_with_active_event( - self, - fixture_engine_session_with_active_event: tuple[Engine, Session], - capsys: pytest.CaptureFixture, - short: bool, - all_events: bool, - expected: str, - ) -> None: - _, session = fixture_engine_session_with_active_event - data.print_data_table(session, short, all_events) - captured = capsys.readouterr() - assert expected in captured.out - - @pytest.mark.parametrize( - "cli_args,expected", - [ - (["--all", "--no-short"], "AIMBAT data for all events"), - (["--no-short"], "AIMBAT data for event 2011-09-15 19:31:04.080000+00:00"), - (["--all"], "AIMBAT data for all events"), - ([], "AIMBAT data for event 2011-09-15 19:31:04"), - ], - ) - def test_cli_data_list( - self, - fixture_engine_session_with_active_event: tuple[Engine, Session], - capsys: pytest.CaptureFixture, - cli_args: list[str], - expected: str, - ) -> None: - cmd = ["data", "list"] - cmd.extend(cli_args) - with pytest.raises(SystemExit) as excinfo: - app(cmd) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - assert expected in captured.out - - -class TestDataDump(TestDataBase): - def test_lib_dump_data( - self, - fixture_session_with_data: Session, - ) -> None: - json_data = data.dump_data_table_to_json(fixture_session_with_data) - adapter = TypeAdapter(list[AimbatDataSource]) - adapter.validate_json(json_data) - - def test_cli_dump_data( - self, - fixture_session_with_data: Session, - capsys: pytest.CaptureFixture, - ) -> None: - with pytest.raises(SystemExit) as excinfo: - app(["data", "dump"]) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - loaded_json = json.loads(captured.out) - assert isinstance(loaded_json, list) - assert len(loaded_json) > 0 - for i in loaded_json: - _ = AimbatDataSource(**i) - - -class TestDataCompare(TestDataBase): - def test_compare_aimbat_seis_to_sac_seis( - self, - sac_file_good: Path, - sac_instance_good: SAC, - fixture_engine_session_with_project: tuple[Engine, Session], - ) -> None: - from aimbat.models import AimbatSeismogram - - _, session = fixture_engine_session_with_project - - data.add_files_to_project( - session, - [sac_file_good], - datatype=DataType.SAC, - ) - - sac_seismogram = sac_instance_good.seismogram - aimbat_seismogram = session.exec(select(AimbatSeismogram)).one() - - assert np.array_equal(aimbat_seismogram.data, sac_seismogram.data) - assert aimbat_seismogram.delta == sac_seismogram.delta - assert ( - pytest.approx(aimbat_seismogram.begin_time.timestamp()) - == sac_seismogram.begin_time.timestamp() - ) - assert len(aimbat_seismogram) == len(sac_seismogram) - - def test_compare_aimbat_station_to_sac_station( - self, - sac_file_good: Path, - sac_instance_good: SAC, - fixture_engine_session_with_project: tuple[Engine, Session], - ) -> None: - from aimbat.models import AimbatStation, AimbatSeismogram - - _, session = fixture_engine_session_with_project - - data.add_files_to_project(session, [sac_file_good], datatype=DataType.SAC) - - sac_station = sac_instance_good.station - aimbat_seismogram = session.exec(select(AimbatSeismogram)).one() - aimbat_station = session.exec(select(AimbatStation)).one() - assert aimbat_seismogram.station == aimbat_station - assert aimbat_station.name == sac_instance_good.kstnm - assert aimbat_station.latitude == sac_station.latitude - assert aimbat_station.longitude == sac_station.longitude - assert aimbat_station.elevation == sac_station.elevation - - def test_compare_aimbat_event_to_sac_event( - self, - sac_file_good: Path, - sac_instance_good: SAC, - fixture_engine_session_with_project: tuple[Engine, Session], - ) -> None: - from aimbat.models import AimbatEvent, AimbatSeismogram - - _, session = fixture_engine_session_with_project - - data.add_files_to_project(session, [sac_file_good], datatype=DataType.SAC) - - sac_event = sac_instance_good.event - aimbat_seismogram = session.exec(select(AimbatSeismogram)).one() - aimbat_event = session.exec(select(AimbatEvent)).one() - assert aimbat_seismogram.event == aimbat_event - assert aimbat_event.latitude == sac_event.latitude - assert aimbat_event.longitude == sac_event.longitude - assert aimbat_event.depth == sac_event.depth diff --git a/tests/test_event.py b/tests/test_event.py deleted file mode 100644 index 70a38127..00000000 --- a/tests/test_event.py +++ /dev/null @@ -1,344 +0,0 @@ -from aimbat import settings -from aimbat.utils import get_active_event -from aimbat.models import AimbatEvent, AimbatStation, AimbatSeismogram -from aimbat.aimbat_types import EventParameter -from pydantic_core import ValidationError -from sqlmodel import Session, select -from sqlalchemy.exc import NoResultFound -from typing import Any -from collections.abc import Generator, Sequence -from pydantic import TypeAdapter -import aimbat.core._event as event -import pytest -import random -import json - - -class TestEventBase: - @pytest.fixture - def session( - self, fixture_session_with_data: Session - ) -> Generator[Session, Any, Any]: - yield fixture_session_with_data - - def get_random_station(self, session: Session) -> AimbatStation: - stations = session.exec(select(AimbatStation)).all() - return random.choice(stations) - - def get_random_event(self, session: Session) -> AimbatEvent: - events = session.exec(select(AimbatEvent)).all() - return random.choice(events) - - def activate_random_event(self, session: Session) -> AimbatEvent: - random_event = self.get_random_event(session) - event.set_active_event(session, random_event) - return random_event - - -class TestDeleteEvent(TestEventBase): - def test_lib_delete_event_by_id(self, session: Session) -> None: - aimbat_event = self.get_random_event(session) - id = aimbat_event.id - event.delete_event_by_id(session, id) - assert ( - session.exec(select(AimbatEvent).where(AimbatEvent.id == id)).one_or_none() - is None - ) - - def test_cli_delete_event_by_id(self, session: Session) -> None: - from aimbat.app import app - - aimbat_event = self.get_random_event(session) - id = aimbat_event.id - - with pytest.raises(SystemExit) as excinfo: - app(["event", "delete", str(id)]) - - assert excinfo.value.code == 0 - - session.flush() - assert ( - session.exec(select(AimbatEvent).where(AimbatEvent.id == id)).one_or_none() - is None - ) - - def test_cli_delete_event_by_id_with_wrong_id(self, session: Session) -> None: - from aimbat.app import app - from uuid import uuid4 - - id = uuid4() - - with pytest.raises(NoResultFound): - app(["event", "delete", str(id)]) - - def test_cli_delete_event_by_string(self, session: Session) -> None: - from aimbat.app import app - - aimbat_event = random.choice(list(session.exec(select(AimbatEvent)))) - id = aimbat_event.id - - with pytest.raises(SystemExit) as excinfo: - app(["event", "delete", str(id)[:5]]) - - assert excinfo.value.code == 0 - - session.flush() - assert ( - session.exec(select(AimbatEvent).where(AimbatEvent.id == id)).one_or_none() - is None - ) - - -class TestGetActiveEvent(TestEventBase): - def test_get_active_event_when_none_is_active(self, session: Session) -> None: - events = session.exec(select(AimbatEvent)).all() - assert all(e.active is None for e in events) - - with pytest.raises(NoResultFound): - get_active_event(session) - - -class TestSetActiveEvent(TestEventBase): - def test_lib_set_active_event(self, session: Session) -> None: - events = session.exec(select(AimbatEvent)).all() - assert all(e.active is None for e in events) - aimbat_event = random.choice(events) - - event.set_active_event(session, aimbat_event) - assert aimbat_event.active is True - - def test_lib_change_active_event(self, session: Session) -> None: - events = list(session.exec(select(AimbatEvent)).all()) - assert all(e.active is None for e in events) - random.shuffle(events) - - first_active_event = events.pop() - second_active_event = events.pop() - - event.set_active_event(session, first_active_event) - assert first_active_event.active is True - - event.set_active_event(session, second_active_event) - assert first_active_event.active is None - assert second_active_event.active is True - - def test_lib_set_active_event_by_id(self, session: Session) -> None: - import uuid - - events = list(session.exec(select(AimbatEvent)).all()) - assert all(e.active is None for e in events) - aimbat_event = random.choice(events) - - event.set_active_event_by_id(session, aimbat_event.id) - assert aimbat_event.active is True - - with pytest.raises(ValueError): - event.set_active_event_by_id(session, uuid.uuid4()) - - def test_cli_event_activate(self, session: Session) -> None: - from aimbat.app import app - - event = self.get_random_event(session) - assert event.active is None - - with pytest.raises(SystemExit) as excinfo: - app(["event", "activate", str(event.id)]) - - assert excinfo.value.code == 0 - - session.refresh(event) - assert event.active is True - - def test_cli_event_activate_with_str_id(self, session: Session) -> None: - from aimbat.app import app - - event = self.get_random_event(session) - assert event.active is None - short_uuid = str(event.id)[:6] - - with pytest.raises(SystemExit) as excinfo: - app(["event", "activate", short_uuid]) - - assert excinfo.value.code == 0 - - session.refresh(event) - assert event.active is True - - -class TestGetCompletedEvents(TestEventBase): - def test_get_completed_events(self, session: Session) -> None: - assert len(event.get_completed_events(session)) == 0 - events = list(session.exec(select(AimbatEvent)).all()) - aimbat_event = random.choice(events) - aimbat_event.parameters.completed = True - session.commit() - assert len(event.get_completed_events(session)) == 1 - assert event.get_completed_events(session)[0].id == aimbat_event.id - - -class TestGetEvents(TestEventBase): - @pytest.fixture - def all_events( - self, session: Session - ) -> Generator[Sequence[AimbatEvent], Any, Any]: - from aimbat.models import AimbatEvent - - yield session.exec(select(AimbatEvent)).all() - - @pytest.fixture - def all_seismograms( - self, session: Session - ) -> Generator[Sequence[AimbatSeismogram], Any, Any]: - from aimbat.models import AimbatSeismogram - - yield session.exec(select(AimbatSeismogram)).all() - - def test_lib_get_events_using_station( - self, session: Session, all_seismograms: Sequence[AimbatSeismogram] - ) -> None: - station = self.get_random_station(session) - - event_set1 = set( - s.event.id for s in all_seismograms if s.station.id == station.id - ) - event_set2 = set(e.id for e in event.get_events_using_station(session, station)) - - assert event_set1 == event_set2 - - -class TestGetEventParameter(TestEventBase): - def test_lib_get_event_parameter(self, session: Session) -> None: - aimbat_event = self.activate_random_event(session) - - assert ( - event.get_event_parameter(session, EventParameter.COMPLETED) - == aimbat_event.parameters.completed - ) - assert ( - event.get_event_parameter(session, EventParameter.MIN_CCNORM) - == aimbat_event.parameters.min_ccnorm - ) - assert ( - event.get_event_parameter(session, EventParameter.WINDOW_POST) - == aimbat_event.parameters.window_post - ) - assert ( - event.get_event_parameter(session, EventParameter.WINDOW_PRE) - == aimbat_event.parameters.window_pre - ) - - def test_lib_set_event_parameter(self, session: Session) -> None: - _ = self.activate_random_event(session) - - assert event.get_event_parameter(session, EventParameter.COMPLETED) is False - event.set_event_parameter(session, EventParameter.COMPLETED, True) - assert event.get_event_parameter(session, EventParameter.COMPLETED) is True - with pytest.raises(ValidationError): - event.set_event_parameter(session, EventParameter.COMPLETED, "foo") - - def test_lib_print_event_table( - self, session: Session, capsys: pytest.CaptureFixture - ) -> None: - _ = self.activate_random_event(session) - - event.print_event_table(session, short=True) - captured = capsys.readouterr() - assert "AIMBAT Events" in captured.out - assert "2012-01-12 19:31:04" in captured.out - event.print_event_table(session, short=False) - captured = capsys.readouterr() - assert "AIMBAT Events" in captured.out - assert "2011-09-15 19:31:04.080000+00:00" in captured.out - - def test_cli_get_event_parameter( - self, session: Session, capsys: pytest.CaptureFixture - ) -> None: - from aimbat.app import app - - _ = self.activate_random_event(session) - - with pytest.raises(SystemExit) as excinfo: - app(["event", "parameter", "get", "completed"]) - - assert excinfo.value.code == 0 - assert "False" in capsys.readouterr().out - - with pytest.raises(SystemExit) as excinfo: - app(["event", "parameter", "get", "window_post"]) - - assert excinfo.value.code == 0 - assert f"{settings.window_post.total_seconds()}s" in capsys.readouterr().out - - -class TestCliEvent(TestEventBase): - def test_cli_usage(self, capsys: pytest.CaptureFixture) -> None: - from aimbat.app import app - - with pytest.raises(SystemExit) as excinfo: - app(["event", "--help"]) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - assert "Usage" in captured.out - - def test_cli_set_event_parameter( - self, session: Session, capsys: pytest.CaptureFixture - ) -> None: - from aimbat.app import app - - _ = self.activate_random_event(session) - - with pytest.raises(SystemExit) as excinfo: - app(["event", "parameter", "get", "completed"]) - - assert excinfo.value.code == 0 - assert "False" in capsys.readouterr().out - - with pytest.raises(SystemExit) as excinfo: - app(["event", "parameter", "set", "completed", "True"]) - - assert excinfo.value.code == 0 - - with pytest.raises(SystemExit) as excinfo: - app(["event", "parameter", "get", "completed"]) - - assert excinfo.value.code == 0 - assert "True" in capsys.readouterr().out - - def test_cli_event_list( - self, - session: Session, - capsys: pytest.CaptureFixture, - ) -> None: - from aimbat.app import app - - with pytest.raises(SystemExit) as excinfo: - app(["event", "list"]) - - assert excinfo.value.code == 0 - assert "AIMBAT Events" in capsys.readouterr().out - - -class TestEventDump(TestEventBase): - def test_lib_dump_event(self, fixture_session_with_data: Session) -> None: - json_data = event.dump_event_table_to_json(fixture_session_with_data) - adapter = TypeAdapter(list[AimbatEvent]) - adapter.validate_json(json_data) - - def test_cli_dump_data( - self, fixture_session_with_data: Session, capsys: pytest.CaptureFixture - ) -> None: - from aimbat.app import app - - with pytest.raises(SystemExit) as excinfo: - app(["event", "dump"]) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - loaded_json = json.loads(captured.out) - assert isinstance(loaded_json, list) - assert len(loaded_json) > 0 - for i in loaded_json: - _ = AimbatEvent(**i) diff --git a/tests/test_iccs.py b/tests/test_iccs.py deleted file mode 100644 index 74b06d09..00000000 --- a/tests/test_iccs.py +++ /dev/null @@ -1,62 +0,0 @@ -from aimbat.models import AimbatSeismogram -from aimbat.aimbat_types import SeismogramParameter -from pysmo.tools.iccs import ICCSSeismogram -from sqlmodel import Session, select -from sqlalchemy import Engine -from pandas import Timedelta -from typing import Any -from collections.abc import Generator -import pytest -import random - - -class TestICCSBase: - @pytest.fixture - def random_aimbat_seismogram( - self, fixture_engine_session_with_active_event: tuple[Engine, Session] - ) -> Generator[AimbatSeismogram, Any, Any]: - from aimbat.models import AimbatSeismogram - - _, session = fixture_engine_session_with_active_event - - yield random.choice(list(session.exec(select(AimbatSeismogram)).all())) - - -class TestAimbatSeismogramIsICCSSeismogram(TestICCSBase): - def test_is_iccs_seismogram_instance( - self, random_aimbat_seismogram: AimbatSeismogram - ) -> None: - assert isinstance(random_aimbat_seismogram, ICCSSeismogram) - - @pytest.mark.parametrize( - "parameter, expected", - [ - (SeismogramParameter.SELECT, True), - (SeismogramParameter.FLIP, False), - (SeismogramParameter.T1, None), - ], - ) - def test_read_iccs_parameters( - self, - random_aimbat_seismogram: AimbatSeismogram, - parameter: SeismogramParameter, - expected: Any, - ) -> None: - assert getattr(random_aimbat_seismogram, parameter) == expected - - @pytest.mark.parametrize( - "parameter, new_value", - [ - (SeismogramParameter.SELECT, False), - (SeismogramParameter.FLIP, True), - (SeismogramParameter.T1, Timedelta(seconds=2)), - ], - ) - def test_write_iccs_parameters( - self, - random_aimbat_seismogram: AimbatSeismogram, - parameter: SeismogramParameter, - new_value: Any, - ) -> None: - setattr(random_aimbat_seismogram, parameter, new_value) - assert getattr(random_aimbat_seismogram, parameter) == new_value diff --git a/tests/test_io.py b/tests/test_io.py deleted file mode 100644 index 111b777b..00000000 --- a/tests/test_io.py +++ /dev/null @@ -1,78 +0,0 @@ -from aimbat.models import AimbatSeismogram -from aimbat.aimbat_types import DataType -from pysmo.classes import SAC, SacSeismogram -from pysmo import Seismogram -from sqlmodel import Session, select -from sqlalchemy import Engine -from sqlalchemy.exc import StatementError -from typing import Any -from collections.abc import Generator -from pathlib import Path -import aimbat.core._data as data -import numpy as np -import pytest - - -class TestSacBase: - @pytest.fixture - def aimbat_seismogram_from_sac( - self, - fixture_engine_session_with_project: tuple[Engine, Session], - sac_file_good: Path, - ) -> Generator[AimbatSeismogram, Any, Any]: - - _, session = fixture_engine_session_with_project - data.add_files_to_project(session, [sac_file_good], DataType.SAC) - aimbat_file = session.exec(select(AimbatSeismogram)).one() - yield aimbat_file - - @pytest.fixture - def sac_seismogram(self, sac_file_good: Path) -> Generator[SacSeismogram, Any, Any]: - sac_seismogram = SAC.from_file(sac_file_good).seismogram - yield sac_seismogram - - -class TestSacRead(TestSacBase): - def test_parameters_are_equal( - self, - sac_seismogram: SacSeismogram, - aimbat_seismogram_from_sac: AimbatSeismogram, - ) -> None: - assert isinstance(aimbat_seismogram_from_sac, Seismogram) - assert sac_seismogram.delta == aimbat_seismogram_from_sac.delta - assert ( - pytest.approx(sac_seismogram.begin_time.timestamp()) - == aimbat_seismogram_from_sac.begin_time.timestamp() - ) - assert ( - pytest.approx(sac_seismogram.end_time.timestamp()) - == aimbat_seismogram_from_sac.end_time.timestamp() - ) - assert len(sac_seismogram) == len(aimbat_seismogram_from_sac) - - -class TestSacWrite(TestSacBase): - def test_random_data( - self, - sac_file_good: Path, - aimbat_seismogram_from_sac: AimbatSeismogram, - ) -> None: - new_data = np.random.rand(len(aimbat_seismogram_from_sac)) - aimbat_seismogram_from_sac.data = new_data - assert np.allclose(new_data, aimbat_seismogram_from_sac.data) - sac_seismogram = SAC.from_file(sac_file_good).seismogram - assert np.allclose(sac_seismogram.data, aimbat_seismogram_from_sac.data) - - -class TestSacBadFile(TestSacBase): - def test_t0_missing( - self, - sac_file_good: Path, - fixture_engine_session_with_project: tuple[Engine, Session], - ) -> None: - _, session = fixture_engine_session_with_project - sac = SAC.from_file(sac_file_good) - sac.t0 = None - sac.write(sac_file_good) - with pytest.raises(StatementError): - data.add_files_to_project(session, [sac_file_good], DataType.SAC) diff --git a/tests/test_models.py b/tests/test_models.py deleted file mode 100644 index 83845e8a..00000000 --- a/tests/test_models.py +++ /dev/null @@ -1,54 +0,0 @@ -from aimbat.models import AimbatSeismogram -from aimbat.utils import get_active_event -from typing import Any -from collections.abc import Generator -from sqlmodel import Session -from sqlalchemy import Engine -import numpy as np -import pytest -import random - - -class TestModelsBase: - @pytest.fixture - def session( - self, fixture_engine_session_with_active_event: tuple[Engine, Session] - ) -> Generator[Session, Any, Any]: - session = fixture_engine_session_with_active_event[1] - yield session - - -class TestAimbatSeismogram(TestModelsBase): - @pytest.fixture - def random_seismogram( - self, session: Session - ) -> Generator[AimbatSeismogram, Any, Any]: - - yield random.choice(list(get_active_event(session).seismograms)) - - def test_lib_get_seismogram_data_with_no_datasource( - self, random_seismogram: AimbatSeismogram - ) -> None: - _ = random_seismogram.data - random_seismogram.datasource = None # type: ignore - - with pytest.raises(ValueError): - _ = random_seismogram.data - - def test_lib_set_seismogram_data_with_no_datasource( - self, random_seismogram: AimbatSeismogram - ) -> None: - _ = random_seismogram.data - random_seismogram.datasource = None # type: ignore - - with pytest.raises(ValueError): - random_seismogram.data = np.array([1, 2, 3]) - - def test_lib_get_seismogram_begin_time_with_zero_length_data( - self, - random_seismogram: AimbatSeismogram, - monkeypatch: pytest.MonkeyPatch, - ) -> None: - monkeypatch.setattr(random_seismogram, "data", np.array([], dtype=np.float32)) - - assert random_seismogram.begin_time == random_seismogram.end_time diff --git a/tests/test_project.py b/tests/test_project.py deleted file mode 100644 index 42c36651..00000000 --- a/tests/test_project.py +++ /dev/null @@ -1,135 +0,0 @@ -from sqlalchemy import Engine -from aimbat.app import app -from pathlib import Path -from sqlmodel import Session -import aimbat.core._project as project -import pytest - - -class TestProjectBase: - """Base class for project tests.""" - - -class TestProjectExists(TestProjectBase): - def test_lib_project_exists_if_false( - self, fixture_empty_db: tuple[Engine, Session] - ) -> None: - - engine, _ = fixture_empty_db - - assert project._project_exists(engine) is False - - def test_lib_project_exists_if_true( - self, fixture_empty_db: tuple[Engine, Session] - ) -> None: - engine, _ = fixture_empty_db - project.create_project(engine) - assert project._project_exists(engine) is True - - -class TestProjectCreate(TestProjectBase): - @pytest.mark.dependency(name="create_project") - def test_lib_create_project(self, fixture_empty_db: tuple[Engine, Session]) -> None: - engine, _ = fixture_empty_db - assert project._project_exists(engine) is False - project.create_project(engine) - assert project._project_exists(engine) is True - - def test_lib_create_project_when_one_exists( - self, fixture_empty_db: tuple[Engine, Session] - ) -> None: - engine, _ = fixture_empty_db - assert project._project_exists(engine) is False - project.create_project(engine) - assert project._project_exists(engine) is True - with pytest.raises(RuntimeError): - project.create_project(engine) - - def test_cli_create_project(self, fixture_empty_db: tuple[Engine, Session]) -> None: - engine, _ = fixture_empty_db - assert project._project_exists(engine) is False - with pytest.raises(SystemExit) as excinfo: - app(["project", "create"]) - assert excinfo.value.code == 0 - assert project._project_exists(engine) is True - - -class TestProjectDelete(TestProjectBase): - def test_lib_delete_project_file( - self, fixture_session_with_project_file: tuple[Engine, Session, Path] - ) -> None: - - engine, _, _ = fixture_session_with_project_file - - assert project._project_exists(engine) is True - - project.delete_project(engine) - assert project._project_exists(engine) is False - - def test_lib_delete_project( - self, fixture_engine_session_with_project: tuple[Engine, Session] - ) -> None: - engine, _ = fixture_engine_session_with_project - - assert project._project_exists(engine) is True - - project.delete_project(engine) - assert project._project_exists(engine) is False - - def test_cli_delete_project( - self, fixture_engine_session_with_project: tuple[Engine, Session] - ) -> None: - engine, _ = fixture_engine_session_with_project - assert project._project_exists(engine) is True - - with pytest.raises(SystemExit) as excinfo: - app(["project", "delete"]) - assert excinfo.value.code == 0 - assert project._project_exists(engine) is False - - -class TestProjectTable(TestProjectBase): - def test_lib_print_project_info_no_project( - self, fixture_empty_db: tuple[Engine, Session] - ) -> None: - engine, _ = fixture_empty_db - with pytest.raises(RuntimeError): - project.print_project_info(engine) - - def test_lib_print_project_info_with_empty_project( - self, - fixture_engine_session_with_project: tuple[Engine, Session], - capsys: pytest.CaptureFixture, - ) -> None: - engine, _ = fixture_engine_session_with_project - project.print_project_info(engine) - captured = capsys.readouterr() - assert "Project Info" in captured.out - assert "None" in captured.out - - def test_lib_print_project_info_with_active_event( - self, - fixture_engine_session_with_active_event: tuple[Engine, Session], - capsys: pytest.CaptureFixture, - ) -> None: - engine, _ = fixture_engine_session_with_active_event - project.print_project_info(engine) - captured = capsys.readouterr() - assert "Project Info" in captured.out - assert "(3/0)" in captured.out - - def test_cli_print_project_info_with_active_event( - self, - fixture_engine_session_with_active_event: tuple[Engine, Session], - capsys: pytest.CaptureFixture, - ) -> None: - engine, _ = fixture_engine_session_with_active_event - assert project._project_exists(engine) is True - - with pytest.raises(SystemExit) as excinfo: - app(["project", "info"]) - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - assert "Project Info" in captured.out - assert "(3/0)" in captured.out diff --git a/tests/test_seismogram.py b/tests/test_seismogram.py deleted file mode 100644 index 84b5b67b..00000000 --- a/tests/test_seismogram.py +++ /dev/null @@ -1,360 +0,0 @@ -from aimbat.app import app -from aimbat.aimbat_types import SeismogramParameter -from aimbat.models import AimbatSeismogram -from sqlmodel import Session, select -from sqlalchemy import Engine -from typing import Any -from matplotlib.figure import Figure -from collections.abc import Generator -from pydantic import TypeAdapter -import aimbat.core._seismogram as seismogram -import pytest -import random -import json - - -class TestSeismogramBase: - @pytest.fixture(autouse=True) - def session( - self, fixture_engine_session_with_active_event: tuple[Engine, Session] - ) -> Generator[Session, Any, Any]: - session = fixture_engine_session_with_active_event[1] - yield session - - -class TestDeleteSeismogram(TestSeismogramBase): - def test_lib_delete_seismogram_by_id(self, session: Session) -> None: - aimbat_seismogram = random.choice(list(session.exec(select(AimbatSeismogram)))) - id = aimbat_seismogram.id - seismogram.delete_seismogram_by_id(session, id) - assert ( - session.exec( - select(AimbatSeismogram).where(AimbatSeismogram.id == id) - ).one_or_none() - is None - ) - - def test_cli_delete_seismogram_by_id(self, session: Session) -> None: - aimbat_seismogram = random.choice(list(session.exec(select(AimbatSeismogram)))) - id = aimbat_seismogram.id - - with pytest.raises(SystemExit) as excinfo: - app(["seismogram", "delete", str(id)]) - - assert excinfo.value.code == 0 - - session.flush() - assert ( - session.exec( - select(AimbatSeismogram).where(AimbatSeismogram.id == id) - ).one_or_none() - is None - ) - - def test_cli_delete_seismogram_by_id_with_wrong_id(self) -> None: - import uuid - - from aimbat import settings - - settings.log_level = "INFO" - - id = uuid.uuid4() - - with pytest.raises(SystemExit) as excinfo: - app(["seismogram", "delete", str(id)]) - - assert excinfo.value.code == 1 - - def test_cli_delete_seismogram_by_string(self, session: Session) -> None: - aimbat_seismogram = random.choice(list(session.exec(select(AimbatSeismogram)))) - id = aimbat_seismogram.id - - with pytest.raises(SystemExit) as excinfo: - app(["seismogram", "delete", str(id)[:5]]) - - assert excinfo.value.code == 0 - - session.flush() - assert ( - session.exec( - select(AimbatSeismogram).where(AimbatSeismogram.id == id) - ).one_or_none() - is None - ) - - -class TestGetSeismogramParameter(TestSeismogramBase): - @pytest.fixture - def random_seismogram( - self, session: Session - ) -> Generator[AimbatSeismogram, Any, Any]: - from aimbat.utils import get_active_event - - yield random.choice(list(get_active_event(session).seismograms)) - - @pytest.mark.parametrize( - "parameter, expected", - [ - (SeismogramParameter.SELECT, True), - (SeismogramParameter.FLIP, False), - (SeismogramParameter.T1, None), - ], - ) - def test_lib_get_seismogram_parameter( - self, - random_seismogram: AimbatSeismogram, - parameter: SeismogramParameter, - expected: Any, - ) -> None: - assert ( - seismogram.get_seismogram_parameter(random_seismogram, parameter) - == expected - ) - assert getattr(random_seismogram.parameters, parameter) == expected - - def test_lib_get_seismogram_parameter_by_id( - self, session: Session, random_seismogram: AimbatSeismogram - ) -> None: - import uuid - - assert ( - seismogram.get_seismogram_parameter_by_id( - session, random_seismogram.id, SeismogramParameter.SELECT - ) - is True - ) - - with pytest.raises(ValueError): - seismogram.get_seismogram_parameter_by_id( - session, uuid.uuid4(), SeismogramParameter.SELECT - ) - - def test_cli_get_seismogram_parameter_with_uuid( - self, random_seismogram: AimbatSeismogram, capsys: pytest.CaptureFixture - ) -> None: - with pytest.raises(SystemExit) as excinfo: - app( - [ - "seismogram", - "parameter", - "get", - str(random_seismogram.id), - SeismogramParameter.SELECT, - ] - ) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - assert "True" in captured.out - - def test_cli_get_seismogram_parameter_with_string( - self, random_seismogram: AimbatSeismogram, capsys: pytest.CaptureFixture - ) -> None: - with pytest.raises(SystemExit) as excinfo: - app( - [ - "seismogram", - "parameter", - "get", - str(random_seismogram.id)[:6], - SeismogramParameter.SELECT, - ] - ) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - assert "True" in captured.out - - -class TestSetSeismogramParameter(TestSeismogramBase): - @pytest.fixture - def random_seismogram( - self, session: Session - ) -> Generator[AimbatSeismogram, Any, Any]: - from aimbat.utils import get_active_event - - seismogram = random.choice(list(get_active_event(session).seismograms)) - assert seismogram.parameters.select is True - yield seismogram - - def test_lib_set_seismogram_parameter( - self, session: Session, random_seismogram: AimbatSeismogram - ) -> None: - seismogram.set_seismogram_parameter( - session, random_seismogram, SeismogramParameter.SELECT, False - ) - - assert ( - seismogram.get_seismogram_parameter( - random_seismogram, SeismogramParameter.SELECT - ) - is False - ) - - def test_lib_set_seismogram_parameter_by_id( - self, session: Session, random_seismogram: AimbatSeismogram - ) -> None: - import uuid - - seismogram.set_seismogram_parameter_by_id( - session, random_seismogram.id, SeismogramParameter.SELECT, False - ) - - assert ( - seismogram.get_seismogram_parameter( - random_seismogram, SeismogramParameter.SELECT - ) - is False - ) - - with pytest.raises(ValueError): - seismogram.set_seismogram_parameter_by_id( - session, uuid.uuid4(), SeismogramParameter.SELECT, False - ) - - def test_cli_set_seismogram_parameter_with_uuid( - self, random_seismogram: AimbatSeismogram, session: Session - ) -> None: - with pytest.raises(SystemExit) as excinfo: - app( - [ - "seismogram", - "parameter", - "set", - str(random_seismogram.id), - SeismogramParameter.SELECT, - "False", - ] - ) - - assert excinfo.value.code == 0 - - session.refresh(random_seismogram) - assert ( - seismogram.get_seismogram_parameter( - random_seismogram, SeismogramParameter.SELECT - ) - is False - ) - - def test_cli_set_seismogram_parameter_with_string( - self, random_seismogram: AimbatSeismogram, session: Session - ) -> None: - with pytest.raises(SystemExit) as excinfo: - app( - [ - "seismogram", - "parameter", - "set", - str(random_seismogram.id)[:6], - SeismogramParameter.SELECT, - "False", - ] - ) - - assert excinfo.value.code == 0 - - session.refresh(random_seismogram) - assert ( - seismogram.get_seismogram_parameter( - random_seismogram, SeismogramParameter.SELECT - ) - is False - ) - - -class TestGetAllSelectedSeismograms(TestSeismogramBase): - def test_lib_get_selected_seismograms_for_active_event( - self, session: Session - ) -> None: - assert len(seismogram.get_selected_seismograms(session)) == 13 - - def test_lib_get_selected_seismograms_for_all_events( - self, session: Session - ) -> None: - assert len(seismogram.get_selected_seismograms(session, all_events=True)) == 20 - - -class TestPrintSeismogramTable(TestSeismogramBase): - def test_lib_print_seismogram_table_no_short( - self, session: Session, capsys: pytest.CaptureFixture - ) -> None: - seismogram.print_seismogram_table(session, short=False, all_events=False) - captured = capsys.readouterr() - assert "AIMBAT seismograms for event" in captured.out - assert "ID (shortened)" not in captured.out - - def test_lib_print_seismogram_table_short( - self, session: Session, capsys: pytest.CaptureFixture - ) -> None: - seismogram.print_seismogram_table(session, short=True, all_events=False) - captured = capsys.readouterr() - assert "AIMBAT seismograms for event" in captured.out - assert "ID (shortened)" in captured.out - - def test_lib_print_seismogram_table_no_short_all_events( - self, session: Session, capsys: pytest.CaptureFixture - ) -> None: - seismogram.print_seismogram_table(session, short=False, all_events=True) - captured = capsys.readouterr() - assert "AIMBAT seismograms for all events" in captured.out - assert "ID (shortened)" not in captured.out - - def test_lib_print_seismogram_table_short_all_events( - self, session: Session, capsys: pytest.CaptureFixture - ) -> None: - seismogram.print_seismogram_table(session, short=True, all_events=True) - captured = capsys.readouterr() - assert "AIMBAT seismograms for all events" in captured.out - assert "ID (shortened)" in captured.out - - def test_cli_print_seismogram_table(self, capsys: pytest.CaptureFixture) -> None: - with pytest.raises(SystemExit) as excinfo: - app(["seismogram", "list"]) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - assert "AIMBAT seismograms for event" in captured.out - assert "ID (shortened)" in captured.out - - -class TestDumpSeismogram(TestSeismogramBase): - def test_lib_dump_data(self, session: Session) -> None: - json_data = seismogram.dump_seismogram_table_to_json(session) - type_adapter = TypeAdapter(list[AimbatSeismogram]) - type_adapter.validate_json(json_data) - - def test_cli_dump_data(self, capsys: pytest.CaptureFixture) -> None: - with pytest.raises(SystemExit) as excinfo: - app(["seismogram", "dump"]) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - loaded_json = json.loads(captured.out) - assert isinstance(loaded_json, list) - assert len(loaded_json) > 0 - for i in loaded_json: - _ = AimbatSeismogram(**i) - - -class TestSeismogramPlot(TestSeismogramBase): - @pytest.mark.mpl_image_compare - def test_lib_plotseis_mpl(self, session: Session) -> Figure: - return seismogram.plot_all_seismograms(session) - - @pytest.mark.skip(reason="I con't know how to test QT yet.") - def test_lib_plotseis_qt( - self, - session: Session, - ) -> None: - _ = seismogram.plot_all_seismograms(session, use_qt=True) - - def test_cli_plotseis_mpl(self) -> None: - with pytest.raises(SystemExit) as excinfo: - app(["plot", "data"]) - - assert excinfo.value.code == 0 diff --git a/tests/test_settings.py b/tests/test_settings.py deleted file mode 100644 index 7b93fc99..00000000 --- a/tests/test_settings.py +++ /dev/null @@ -1,51 +0,0 @@ -import pytest - - -class TestConfig: - @pytest.mark.parametrize( - "pretty, expected", - [(True, "AIMBAT project file location"), (False, 'AIMBAT_PROJECT="aimbat.db"')], - ) - def test_lib_print_defaults( - self, pretty: bool, expected: str, capsys: pytest.CaptureFixture - ) -> None: - from aimbat._config import print_settings_table - - print_settings_table(pretty) - output = capsys.readouterr().out - assert expected in output - - @pytest.mark.parametrize( - "pretty, expected", - [ - ("--pretty", "AIMBAT project file location"), - ("--no-pretty", 'AIMBAT_PROJECT="aimbat.db"'), - ], - ) - def test_cli_print_defaults( - self, pretty: str, expected: str, capsys: pytest.CaptureFixture - ) -> None: - from aimbat.app import app - - with pytest.raises(SystemExit) as excinfo: - app(["settings", pretty]) - - assert excinfo.value.code == 0 - - output = capsys.readouterr().out - assert expected in output - - @pytest.mark.parametrize("pretty", [True, False]) - def test_lib_print_defaults_without_env_prefix( - self, - pretty: bool, - monkeypatch: pytest.MonkeyPatch, - capsys: pytest.CaptureFixture, - ) -> None: - from aimbat._config import Settings, print_settings_table - - monkeypatch.delitem(Settings.model_config, "env_prefix") - - print_settings_table(pretty) - output = capsys.readouterr().out - assert "AIMBAT_" not in output diff --git a/tests/test_snapshot.py b/tests/test_snapshot.py deleted file mode 100644 index f617f6e1..00000000 --- a/tests/test_snapshot.py +++ /dev/null @@ -1,246 +0,0 @@ -from aimbat.app import app -from sqlmodel import Session -from sqlalchemy import Engine -from typing import Any -from collections.abc import Generator -import aimbat.core._snapshot as snapshot -import pytest - -RANDOM_COMMENT = "Random comment" - - -class TestSnapshotBase: - @pytest.fixture(autouse=True) - def session( - self, fixture_engine_session_with_active_event: tuple[Engine, Session] - ) -> Generator[Session, Any, Any]: - _, session = fixture_engine_session_with_active_event - yield session - - -class TestLibSnapshotGet(TestSnapshotBase): - def test_get_snapshots_when_there_are_none(self, session: Session) -> None: - assert snapshot.get_snapshots(session, all_events=True) == [] - - -class TestLibSnapshotCreate(TestSnapshotBase): - def test_create_snapshot(self, session: Session) -> None: - assert snapshot.get_snapshots(session) == [] - snapshot.create_snapshot(session) - snapshot.create_snapshot(session, comment=RANDOM_COMMENT) - test_snapshot1, test_snapshot2, *_ = snapshot.get_snapshots(session) - assert test_snapshot1.comment is None - assert test_snapshot2.comment == RANDOM_COMMENT - - -class TestLibSnapshotDelete(TestSnapshotBase): - def test_snapshot_delete(self, session: Session) -> None: - snapshot.create_snapshot(session) - snapshot.create_snapshot(session, comment=RANDOM_COMMENT) - test_snapshot1, test_snapshot2, *_ = snapshot.get_snapshots(session) - snapshot.delete_snapshot(session, test_snapshot1) - assert len(snapshot.get_snapshots(session)) == 1 - assert test_snapshot2 == snapshot.get_snapshots(session)[0] - - def test_delete_snapshot_by_id(self, session: Session) -> None: - snapshot.create_snapshot(session) - snapshot.create_snapshot(session, comment=RANDOM_COMMENT) - test_snapshot1, test_snapshot2, *_ = snapshot.get_snapshots(session) - snapshot.delete_snapshot_by_id(session, test_snapshot1.id) - assert len(snapshot.get_snapshots(session)) == 1 - assert test_snapshot2 == snapshot.get_snapshots(session)[0] - - def test_delete_snapshot_by_id_raises_with_random_id( - self, session: Session - ) -> None: - import uuid - - random_id = uuid.uuid4() - with pytest.raises(ValueError): - snapshot.delete_snapshot_by_id(session, random_id) - - -class TestLibSnapshotRollback(TestSnapshotBase): - def test_snapshot_rollback(self, session: Session) -> None: - from aimbat.utils import get_active_event - - active_event = get_active_event(session) - - assert active_event.parameters.completed is False - assert active_event.seismograms[0].parameters.select is True - - snapshot.create_snapshot(session) - - active_event.parameters.completed = True - active_event.seismograms[0].parameters.select = False - session.flush() - assert active_event.parameters.completed is True - assert active_event.seismograms[0].parameters.select is False - - test_snapshot, *_ = snapshot.get_snapshots(session) - snapshot.rollback_to_snapshot(session, test_snapshot) - assert active_event.parameters.completed is False - assert active_event.seismograms[0].parameters.select is True - - def test_rollback_to_snapshot_by_id(self, session: Session) -> None: - snapshot.create_snapshot(session) - test_snapshot, *_ = snapshot.get_snapshots(session) - snapshot.rollback_to_snapshot_by_id(session, test_snapshot.id) - - def test_rollback_to_snapshot_by_id_raises_with_random_id( - self, session: Session - ) -> None: - import uuid - - random_id = uuid.uuid4() - with pytest.raises(ValueError): - snapshot.rollback_to_snapshot_by_id(session, random_id) - - -class TestLibSnapshotTable(TestSnapshotBase): - @pytest.fixture(autouse=True) - def create_snapshots(self, session: Session) -> Generator[None, Any, Any]: - assert snapshot.get_snapshots(session) == [] - snapshot.create_snapshot(session) - snapshot.create_snapshot(session, RANDOM_COMMENT) - yield - - def test_snapshot_table_no_short( - self, session: Session, capsys: pytest.CaptureFixture - ) -> None: - snapshot.print_snapshot_table(session, short=False, all_events=False) - captured = capsys.readouterr() - assert RANDOM_COMMENT in captured.out - assert "AIMBAT snapshots for event" in captured.out - assert "ID (shortened)" not in captured.out - - def test_snapshot_table_short( - self, session: Session, capsys: pytest.CaptureFixture - ) -> None: - snapshot.print_snapshot_table(session, short=True, all_events=False) - captured = capsys.readouterr() - assert RANDOM_COMMENT in captured.out - assert "AIMBAT snapshots for event" in captured.out - assert "ID (shortened)" in captured.out - - def test_snapshot_table_no_short_all_events( - self, session: Session, capsys: pytest.CaptureFixture - ) -> None: - snapshot.print_snapshot_table(session, short=False, all_events=True) - captured = capsys.readouterr() - assert RANDOM_COMMENT in captured.out - assert "AIMBAT snapshots for all events" in captured.out - assert "ID (shortened)" not in captured.out - - def test_snapshot_table_short_all_events( - self, session: Session, capsys: pytest.CaptureFixture - ) -> None: - snapshot.print_snapshot_table(session, short=True, all_events=True) - captured = capsys.readouterr() - assert RANDOM_COMMENT in captured.out - assert "AIMBAT snapshots for all events" in captured.out - assert "ID (shortened)" in captured.out - - -class TestCliSnapshotUsage(TestSnapshotBase): - def test_cli_usage(self, capsys: pytest.CaptureFixture) -> None: - with pytest.raises(SystemExit) as excinfo: - app(["snapshot", "--help"]) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - assert "Usage" in captured.out - - -class TestCliSnapshotCreate(TestSnapshotBase): - def test_create_snapshot(self, session: Session) -> None: - with pytest.raises(SystemExit) as excinfo: - app(["snapshot", "create", RANDOM_COMMENT]) - - assert excinfo.value.code == 0 - - all_snapshots = snapshot.get_snapshots(session) - assert len(all_snapshots) == 1 - assert all_snapshots[0].comment == RANDOM_COMMENT - - -class TestCliSnapshotRollbackAndDelete(TestSnapshotBase): - @pytest.fixture(autouse=True) - def create_snapshots(self, session: Session) -> Generator[None, Any, Any]: - assert snapshot.get_snapshots(session) == [] - snapshot.create_snapshot(session, RANDOM_COMMENT) - session.flush() - yield - - def test_delete_snapshot_with_uuid(self, session: Session) -> None: - all_snapshots = snapshot.get_snapshots(session) - assert len(all_snapshots) == 1 - snapshot_id = all_snapshots[0].id - - with pytest.raises(SystemExit) as excinfo: - app(["snapshot", "delete", str(snapshot_id)]) - - assert excinfo.value.code == 0 - - session.flush() - all_snapshots = snapshot.get_snapshots(session) - assert len(all_snapshots) == 0 - - def test_delete_snapshot_with_string(self, session: Session) -> None: - all_snapshots = snapshot.get_snapshots(session) - assert len(all_snapshots) == 1 - snapshot_id = str(all_snapshots[0].id)[:8] - - with pytest.raises(SystemExit) as excinfo: - app(["snapshot", "delete", str(snapshot_id)]) - - assert excinfo.value.code == 0 - - session.flush() - all_snapshots = snapshot.get_snapshots(session) - assert len(all_snapshots) == 0 - - def test_rollback_to_snapshot_with_uuid(self, session: Session) -> None: - all_snapshots = snapshot.get_snapshots(session) - assert len(all_snapshots) == 1 - snapshot_id = all_snapshots[0].id - - with pytest.raises(SystemExit) as excinfo: - app(["snapshot", "rollback", str(snapshot_id)]) - - assert excinfo.value.code == 0 - - session.flush() - - def test_rollback_to_snapshot_with_string(self, session: Session) -> None: - all_snapshots = snapshot.get_snapshots(session) - assert len(all_snapshots) == 1 - snapshot_id = str(all_snapshots[0].id)[:8] - - with pytest.raises(SystemExit) as excinfo: - app(["snapshot", "rollback", str(snapshot_id)]) - - assert excinfo.value.code == 0 - - session.flush() - - -class TestCliSnapshotTable(TestSnapshotBase): - @pytest.fixture(autouse=True) - def create_snapshots(self, session: Session) -> Generator[None, Any, Any]: - assert snapshot.get_snapshots(session) == [] - snapshot.create_snapshot(session) - snapshot.create_snapshot(session, RANDOM_COMMENT) - yield - - def test_snapshot_table_no_format(self, capsys: pytest.CaptureFixture) -> None: - with pytest.raises(SystemExit) as excinfo: - app(["snapshot", "list"]) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - assert RANDOM_COMMENT in captured.out - assert "AIMBAT snapshots for event" in captured.out - assert "ID (shortened)" in captured.out diff --git a/tests/test_station.py b/tests/test_station.py deleted file mode 100644 index 33e8e7fa..00000000 --- a/tests/test_station.py +++ /dev/null @@ -1,139 +0,0 @@ -from aimbat.models import AimbatStation -from aimbat.app import app -from sqlmodel import Session, select -from sqlalchemy import Engine -from typing import Any -from collections.abc import Generator -from pydantic import TypeAdapter -import aimbat.core._station as station -import random -import pytest -import json - - -class TestStationBase: - @pytest.fixture(autouse=True) - def session( - self, fixture_engine_session_with_active_event: tuple[Engine, Session] - ) -> Generator[Session, Any, Any]: - session = fixture_engine_session_with_active_event[1] - yield session - - -class TestDeleteStation(TestStationBase): - def test_lib_delete_station_by_id(self, session: Session) -> None: - aimbat_station = random.choice(list(session.exec(select(AimbatStation)))) - id = aimbat_station.id - station.delete_station_by_id(session, id) - assert ( - session.exec( - select(AimbatStation).where(AimbatStation.id == id) - ).one_or_none() - is None - ) - - def test_cli_delete_station_by_id(self, session: Session) -> None: - seismogram = random.choice(list(session.exec(select(AimbatStation)))) - id = seismogram.id - - with pytest.raises(SystemExit) as excinfo: - app(["station", "delete", str(id)]) - - assert excinfo.value.code == 0 - - session.flush() - assert ( - session.exec( - select(AimbatStation).where(AimbatStation.id == id) - ).one_or_none() - is None - ) - - def test_cli_delete_station_by_id_with_wrong_id(self) -> None: - from aimbat import settings - - settings.log_level = "INFO" - - import uuid - - id = uuid.uuid4() - - with pytest.raises(SystemExit) as excinfo: - app(["station", "delete", str(id)]) - - assert excinfo.value.code == 1 - - def test_cli_delete_station_by_string(self, session: Session) -> None: - station = random.choice(list(session.exec(select(AimbatStation)))) - id = station.id - - with pytest.raises(SystemExit) as excinfo: - app(["station", "delete", str(id)[:5]]) - - assert excinfo.value.code == 0 - - session.flush() - assert ( - session.exec( - select(AimbatStation).where(AimbatStation.id == id) - ).one_or_none() - is None - ) - - -class TestLibStation(TestStationBase): - def test_sac_data(self, session: Session, capsys: pytest.CaptureFixture) -> None: - station.print_station_table(session, short=False) - assert "AIMBAT stations for event" in capsys.readouterr().out - - station.print_station_table(session, short=True) - assert "ID (shortened)" in capsys.readouterr().out - - station.print_station_table(session, short=False, all_events=True) - assert "AIMBAT stations for all events" in capsys.readouterr().out - - station.print_station_table(session, short=True, all_events=True) - assert "# Seismograms" in capsys.readouterr().out - - -class TestCliStation(TestStationBase): - def test_cli_usage(self, capsys: pytest.CaptureFixture) -> None: - with pytest.raises(SystemExit) as excinfo: - app(["station", "--help"]) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - assert "Usage" in captured.out - - def test_cli_station_list( - self, session: Session, capsys: pytest.CaptureFixture - ) -> None: - with pytest.raises(SystemExit) as excinfo: - app(["station", "list", "--all"]) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - - assert "# Seismograms" in captured.out - - -class TestDumpStation(TestStationBase): - def test_lib_dump_data(self, session: Session) -> None: - json_data = station.dump_station_table_to_json(session) - type_adapter = TypeAdapter(list[AimbatStation]) - type_adapter.validate_json(json_data) - - def test_cli_dump_data(self, capsys: pytest.CaptureFixture) -> None: - with pytest.raises(SystemExit) as excinfo: - app(["station", "dump"]) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - loaded_json = json.loads(captured.out) - assert isinstance(loaded_json, list) - assert len(loaded_json) > 0 - for i in loaded_json: - _ = AimbatStation(**i) diff --git a/tests/test_typing.py b/tests/test_typing.py deleted file mode 100644 index f0de45ed..00000000 --- a/tests/test_typing.py +++ /dev/null @@ -1,57 +0,0 @@ -from sqlmodel import SQLModel -from enum import StrEnum -from typing import get_args, TypeAliasType -from aimbat.models import AimbatEventParametersBase, AimbatSeismogramParametersBase -from aimbat.aimbat_types import ( - EventParameter, - SeismogramParameter, - EventParameterBool, - EventParameterFloat, - EventParameterTimedelta, - SeismogramParameterBool, - SeismogramParameterTimestamp, -) - - -def set_from_basemodel(obj: type[SQLModel]) -> set[str]: - """Returns a set from the basemodel fields and remove "id" from it.""" - my_set: set[str] = set(obj.model_fields) - my_set.discard("id") - - return my_set - - -def set_from_strenum(enum: type[StrEnum]) -> set[str]: - - return set([member.value for member in enum]) - - -def set_from_typealiases(*aliases: TypeAliasType) -> set[str]: - my_list = [] - for alias in aliases: - my_list.extend([v for v in get_args(alias.__value__)]) - - return set(my_list) - - -class TestLibTypes: - """Ensure Default models and types are consistent.""" - - def test_event_parameter_types(self) -> None: - assert set_from_basemodel(AimbatEventParametersBase) == set_from_strenum( - EventParameter - ) - assert set_from_strenum(EventParameter) == set_from_typealiases( - EventParameterBool, - EventParameterFloat, - EventParameterTimedelta, - ) - - def test_seismogram_parameter_types(self) -> None: - assert set_from_basemodel(AimbatSeismogramParametersBase) == set_from_strenum( - SeismogramParameter - ) - assert set_from_strenum(SeismogramParameter) == set_from_typealiases( - SeismogramParameterBool, - SeismogramParameterTimestamp, - ) diff --git a/tests/unit/aimbat_types/test_pydantic.py b/tests/unit/aimbat_types/test_pydantic.py new file mode 100644 index 00000000..215f7ca3 --- /dev/null +++ b/tests/unit/aimbat_types/test_pydantic.py @@ -0,0 +1,131 @@ +"""Tests for aimbat_types._pydantic custom Pydantic types.""" + +import pytest +from pydantic import BaseModel, ValidationError +from aimbat.aimbat_types import ( + PydanticTimestamp, + PydanticTimedelta, + PydanticNegativeTimedelta, + PydanticPositiveTimedelta, +) +from pandas import Timestamp, Timedelta + + +class _TimestampModel(BaseModel): + """Test model for PydanticTimestamp.""" + + value: PydanticTimestamp + + +class _OptionalTimestampModel(BaseModel): + """Test model for optional PydanticTimestamp.""" + + value: PydanticTimestamp | None = None + + +class _TimedeltaModel(BaseModel): + """Test model for PydanticTimedelta.""" + + value: PydanticTimedelta + + +class TestPydanticTimestamp: + """Tests for PydanticTimestamp custom type.""" + + def test_accepts_timestamp(self) -> None: + """Verifies that a pandas Timestamp is accepted.""" + ts = Timestamp("2020-01-01") + assert _TimestampModel(value=ts).value == ts + + def test_accepts_string(self) -> None: + """Verifies that a valid date string is accepted and converted to Timestamp.""" + m = _TimestampModel(value="2020-01-01") # type: ignore[arg-type] + assert isinstance(m.value, Timestamp) + + def test_rejects_none(self) -> None: + """Verifies that None is rejected for a required field.""" + with pytest.raises(ValidationError): + _TimestampModel(value=None) # type: ignore[arg-type] + + def test_optional_accepts_none(self) -> None: + """Verifies that None is accepted for an optional field.""" + assert _OptionalTimestampModel(value=None).value is None + + def test_rejects_invalid_string(self) -> None: + """Verifies that an invalid date string raises ValidationError.""" + with pytest.raises(ValidationError): + _TimestampModel(value="not-a-timestamp") # type: ignore[arg-type] + + +class TestPydanticTimedelta: + """Tests for PydanticTimedelta custom type.""" + + def test_accepts_timedelta(self) -> None: + """Verifies that a pandas Timedelta is accepted.""" + td = Timedelta(seconds=5) + assert _TimedeltaModel(value=td).value == td + + def test_rejects_none(self) -> None: + """Verifies that None is rejected.""" + with pytest.raises(ValidationError): + _TimedeltaModel(value=None) # type: ignore[arg-type] + + +class TestPydanticNegativeTimedelta: + """Tests for PydanticNegativeTimedelta custom type.""" + + def test_accepts_negative(self) -> None: + """Verifies that a negative Timedelta is accepted.""" + + class M(BaseModel): + value: PydanticNegativeTimedelta + + assert M(value=Timedelta(seconds=-1)).value == Timedelta(seconds=-1) + + def test_rejects_positive(self) -> None: + """Verifies that a positive Timedelta is rejected.""" + + class M(BaseModel): + value: PydanticNegativeTimedelta + + with pytest.raises(ValidationError): + M(value=Timedelta(seconds=1)) + + def test_rejects_zero(self) -> None: + """Verifies that a zero Timedelta is rejected.""" + + class M(BaseModel): + value: PydanticNegativeTimedelta + + with pytest.raises(ValidationError): + M(value=Timedelta(0)) + + +class TestPydanticPositiveTimedelta: + """Tests for PydanticPositiveTimedelta custom type.""" + + def test_accepts_positive(self) -> None: + """Verifies that a positive Timedelta is accepted.""" + + class M(BaseModel): + value: PydanticPositiveTimedelta + + assert M(value=Timedelta(seconds=1)).value == Timedelta(seconds=1) + + def test_rejects_negative(self) -> None: + """Verifies that a negative Timedelta is rejected.""" + + class M(BaseModel): + value: PydanticPositiveTimedelta + + with pytest.raises(ValidationError): + M(value=Timedelta(seconds=-1)) + + def test_rejects_zero(self) -> None: + """Verifies that a zero Timedelta is rejected.""" + + class M(BaseModel): + value: PydanticPositiveTimedelta + + with pytest.raises(ValidationError): + M(value=Timedelta(0)) diff --git a/tests/unit/cli/test_common.py b/tests/unit/cli/test_common.py new file mode 100644 index 00000000..ec390384 --- /dev/null +++ b/tests/unit/cli/test_common.py @@ -0,0 +1,198 @@ +"""Unit tests for aimbat.cli._common.""" + +import pytest +from aimbat.cli._common import ( + GlobalParameters, + PlotParameters, + IccsPlotParameters, + TableParameters, + CliHints, + HINTS, + simple_exception, +) +from aimbat import settings + + +class TestGlobalParameters: + """Tests for the GlobalParameters dataclass.""" + + def test_default_debug_is_false(self) -> None: + """Verifies that debug defaults to False.""" + params = GlobalParameters() + assert params.debug is False + + def test_debug_true_sets_log_level(self) -> None: + """Verifies that setting debug=True changes the log level to DEBUG.""" + GlobalParameters(debug=True) + assert settings.log_level == "DEBUG" + + def test_debug_false_does_not_change_log_level(self) -> None: + """Verifies that debug=False does not alter the log level.""" + original = settings.log_level + GlobalParameters(debug=False) + assert settings.log_level == original + + +class TestPlotParameters: + """Tests for the PlotParameters dataclass.""" + + def test_default_use_qt_is_false(self) -> None: + """Verifies that use_qt defaults to False.""" + params = PlotParameters() + assert params.use_qt is False + + def test_use_qt_can_be_set_true(self) -> None: + """Verifies that use_qt can be set to True.""" + params = PlotParameters(use_qt=True) + assert params.use_qt is True + + +class TestIccsPlotParameters: + """Tests for the IccsPlotParameters dataclass.""" + + def test_default_context_is_true(self) -> None: + """Verifies that context defaults to True.""" + params = IccsPlotParameters() + assert params.context is True + + def test_default_all_is_false(self) -> None: + """Verifies that all defaults to False.""" + params = IccsPlotParameters() + assert params.all is False + + def test_context_can_be_set_false(self) -> None: + """Verifies that context can be set to False.""" + params = IccsPlotParameters(context=False) + assert params.context is False + + def test_all_can_be_set_true(self) -> None: + """Verifies that all can be set to True.""" + params = IccsPlotParameters(all=True) + assert params.all is True + + +class TestTableParameters: + """Tests for the TableParameters dataclass.""" + + def test_default_short_is_true(self) -> None: + """Verifies that short defaults to True.""" + params = TableParameters() + assert params.short is True + + def test_short_can_be_set_false(self) -> None: + """Verifies that short can be set to False.""" + params = TableParameters(short=False) + assert params.short is False + + +class TestCliHints: + """Tests for the CliHints frozen dataclass.""" + + def test_activate_event_hint_content(self) -> None: + """Verifies that ACTIVATE_EVENT hint references the activate command.""" + assert "activate" in CliHints.ACTIVATE_EVENT + assert "aimbat event activate" in CliHints.ACTIVATE_EVENT + + def test_list_events_hint_content(self) -> None: + """Verifies that LIST_EVENTS hint references the list command.""" + assert "list" in CliHints.LIST_EVENTS + assert "aimbat event list" in CliHints.LIST_EVENTS + + def test_hints_instance_is_frozen(self) -> None: + """Verifies that the CliHints dataclass is frozen (immutable).""" + with pytest.raises((AttributeError, TypeError)): + HINTS.ACTIVATE_EVENT = "new value" + + def test_hints_singleton_values(self) -> None: + """Verifies that the HINTS singleton has the expected attribute values.""" + assert HINTS.ACTIVATE_EVENT == CliHints.ACTIVATE_EVENT + assert HINTS.LIST_EVENTS == CliHints.LIST_EVENTS + + +class TestSimpleException: + """Tests for the simple_exception decorator.""" + + def test_returns_value_when_no_exception(self) -> None: + """Verifies that the decorated function returns its value normally.""" + + @simple_exception + def good() -> int: + return 42 + + assert good() == 42 + + def test_passes_args_and_kwargs(self) -> None: + """Verifies that args and kwargs are forwarded to the wrapped function.""" + + @simple_exception + def add(a: int, b: int = 0) -> int: + return a + b + + assert add(3, b=4) == 7 + + def test_exits_on_exception_in_normal_mode(self) -> None: + """Verifies that an exception causes SystemExit when not in debug mode.""" + settings.log_level = "INFO" + + @simple_exception + def boom() -> None: + raise ValueError("something went wrong") + + with pytest.raises(SystemExit) as exc_info: + boom() + assert exc_info.value.code == 1 + + def test_reraises_in_debug_mode(self) -> None: + """Verifies that exceptions propagate normally when in DEBUG mode.""" + settings.log_level = "DEBUG" + + @simple_exception + def boom() -> None: + raise ValueError("debug error") + + with pytest.raises(ValueError, match="debug error"): + boom() + + def test_reraises_in_trace_mode(self) -> None: + """Verifies that exceptions propagate normally when in TRACE mode.""" + settings.log_level = "TRACE" + + @simple_exception + def boom() -> None: + raise RuntimeError("trace error") + + with pytest.raises(RuntimeError, match="trace error"): + boom() + + def test_preserves_function_name(self) -> None: + """Verifies that the decorator preserves the original function name.""" + + @simple_exception + def my_function() -> None: + pass + + assert my_function.__name__ == "my_function" + + def test_preserves_function_docstring(self) -> None: + """Verifies that the decorator preserves the original function docstring.""" + + @simple_exception + def documented() -> None: + """My docstring.""" + + assert documented.__doc__ == "My docstring." + + def test_exit_prints_error_panel(self, capsys: pytest.CaptureFixture[str]) -> None: + """Verifies that the exception message is printed before exiting.""" + settings.log_level = "INFO" + + @simple_exception + def boom() -> None: + raise RuntimeError("panel message") + + with pytest.raises(SystemExit): + boom() + + # Rich prints to stderr or stdout; capture via sys.stdout fallback + captured = capsys.readouterr() + assert "panel message" in captured.out or "panel message" in captured.err diff --git a/tests/unit/io/test_sac.py b/tests/unit/io/test_sac.py new file mode 100644 index 00000000..5ed1203d --- /dev/null +++ b/tests/unit/io/test_sac.py @@ -0,0 +1,283 @@ +"""Unit tests for aimbat.io._sac.""" + +from aimbat.io._sac import ( + create_event_from_sacfile, + create_seismogram_from_sacfile_and_pick_header, + create_station_from_sacfile, + read_seismogram_data_from_sacfile, + write_seismogram_data_to_sacfile, +) +from aimbat.models import AimbatEvent, AimbatSeismogram, AimbatStation +from pathlib import Path +from pandas import Timedelta, Timestamp +from pydantic import ValidationError +from pysmo.classes import SAC +import numpy as np +import pytest + +# =================================================================== +# read / write seismogram data +# =================================================================== + + +class TestReadSeismogramData: + """Tests for reading seismogram data from SAC files.""" + + def test_returns_ndarray(self, sac_file_good: Path) -> None: + """Verifies that reading data returns a numpy ndarray. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + data = read_seismogram_data_from_sacfile(sac_file_good) + assert isinstance(data, np.ndarray) + + def test_matches_pysmo_data(self, sac_file_good: Path) -> None: + """Verifies that the read data matches data read by pysmo. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + expected = SAC.from_file(sac_file_good).seismogram.data + data = read_seismogram_data_from_sacfile(sac_file_good) + np.testing.assert_array_equal(data, expected) + + def test_nonexistent_file_raises(self, tmp_path: Path) -> None: + """Verifies that reading from a non-existent file raises FileNotFoundError. + + Args: + tmp_path (Path): Temporary directory path. + """ + with pytest.raises(FileNotFoundError): + read_seismogram_data_from_sacfile(tmp_path / "missing.sac") + + +class TestWriteSeismogramData: + """Tests for writing seismogram data to SAC files.""" + + def test_overwrites_data_on_disk(self, sac_file_good: Path) -> None: + """Verifies that writing data updates the file on disk. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + original = read_seismogram_data_from_sacfile(sac_file_good) + new_data = np.ones_like(original) * 42.0 + + write_seismogram_data_to_sacfile(sac_file_good, new_data) + + reread = read_seismogram_data_from_sacfile(sac_file_good) + np.testing.assert_array_equal(reread, new_data) + + def test_preserves_length(self, sac_file_good: Path) -> None: + """Verifies that writing data preserves the number of points. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + original = read_seismogram_data_from_sacfile(sac_file_good) + write_seismogram_data_to_sacfile(sac_file_good, np.zeros_like(original)) + reread = read_seismogram_data_from_sacfile(sac_file_good) + assert len(reread) == len(original) + + def test_round_trip(self, sac_file_good: Path) -> None: + """Write then read should return the same array. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + data = np.linspace(-1.0, 1.0, 100) + # First overwrite with our data, then verify the round-trip. + write_seismogram_data_to_sacfile(sac_file_good, data) + result = read_seismogram_data_from_sacfile(sac_file_good) + np.testing.assert_allclose(result, data) + + +# =================================================================== +# create_station_from_sacfile +# =================================================================== + + +class TestCreateStation: + """Tests for creating AimbatStation objects from SAC files.""" + + def test_returns_aimbat_station(self, sac_file_good: Path) -> None: + """Verifies that the function returns an AimbatStation instance. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + station = create_station_from_sacfile(sac_file_good) + assert isinstance(station, AimbatStation) + + def test_fields_match_sac(self, sac_file_good: Path) -> None: + """Verifies that station fields match the SAC header values. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + sac = SAC.from_file(sac_file_good) + station = create_station_from_sacfile(sac_file_good) + + assert station.name == sac.station.name + assert station.network == sac.station.network + assert station.location == sac.station.location + assert station.channel == sac.station.channel + assert station.latitude == sac.station.latitude + assert station.longitude == sac.station.longitude + assert station.elevation == sac.station.elevation + + def test_nonexistent_file_raises(self, tmp_path: Path) -> None: + """Verifies that creating a station from a non-existent file raises FileNotFoundError. + + Args: + tmp_path (Path): Temporary directory path. + """ + with pytest.raises(FileNotFoundError): + create_station_from_sacfile(tmp_path / "missing.sac") + + +# =================================================================== +# create_event_from_sacfile +# =================================================================== + + +class TestCreateEvent: + """Tests for creating AimbatEvent objects from SAC files.""" + + def test_returns_aimbat_event(self, sac_file_good: Path) -> None: + """Verifies that the function returns an AimbatEvent instance. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + event = create_event_from_sacfile(sac_file_good) + assert isinstance(event, AimbatEvent) + + def test_fields_match_sac(self, sac_file_good: Path) -> None: + """Verifies that event fields match the SAC header values. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + sac = SAC.from_file(sac_file_good) + event = create_event_from_sacfile(sac_file_good) + + assert isinstance(event.time, Timestamp) + assert event.time == sac.event.time + assert event.latitude == sac.event.latitude + assert event.longitude == sac.event.longitude + assert event.depth == sac.event.depth + + def test_has_parameters(self, sac_file_good: Path) -> None: + """Verifies that the created event has initialized parameters. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + event = create_event_from_sacfile(sac_file_good) + assert event.parameters is not None + + def test_nonexistent_file_raises(self, tmp_path: Path) -> None: + """Verifies that creating an event from a non-existent file raises FileNotFoundError. + + Args: + tmp_path (Path): Temporary directory path. + """ + with pytest.raises(FileNotFoundError): + create_event_from_sacfile(tmp_path / "missing.sac") + + +# =================================================================== +# create_seismogram_from_sacfile_and_pick_header +# =================================================================== + + +class TestCreateSeismogram: + """Tests for creating AimbatSeismogram objects from SAC files.""" + + def test_returns_aimbat_seismogram(self, sac_file_good: Path) -> None: + """Verifies that the function returns an AimbatSeismogram instance. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + seis = create_seismogram_from_sacfile_and_pick_header(sac_file_good, "t0") + assert isinstance(seis, AimbatSeismogram) + + def test_fields_match_sac(self, sac_file_good: Path) -> None: + """Verifies that seismogram fields match the SAC header values. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + sac = SAC.from_file(sac_file_good) + seis = create_seismogram_from_sacfile_and_pick_header(sac_file_good, "t0") + + assert isinstance(seis.begin_time, Timestamp) + assert seis.begin_time == sac.seismogram.begin_time + assert isinstance(seis.delta, Timedelta) + assert seis.delta == sac.seismogram.delta + + def test_t0_uses_requested_pick_header(self, sac_file_good: Path) -> None: + """Verifies that t0 is populated from the specified pick header. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + sac = SAC.from_file(sac_file_good) + + seis_t0 = create_seismogram_from_sacfile_and_pick_header(sac_file_good, "t0") + assert seis_t0.t0 == sac.timestamps.t0 + + seis_t1 = create_seismogram_from_sacfile_and_pick_header(sac_file_good, "t1") + assert seis_t1.t0 == sac.timestamps.t1 + + def test_has_parameters(self, sac_file_good: Path) -> None: + """Verifies that the created seismogram has initialized parameters. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + seis = create_seismogram_from_sacfile_and_pick_header(sac_file_good, "t0") + assert seis.parameters is not None + + def test_nonexistent_file_raises(self, tmp_path: Path) -> None: + """Verifies that creating a seismogram from a non-existent file raises FileNotFoundError. + + Args: + tmp_path (Path): Temporary directory path. + """ + with pytest.raises(FileNotFoundError): + create_seismogram_from_sacfile_and_pick_header( + tmp_path / "missing.sac", "t0" + ) + + def test_invalid_pick_header_raises(self, sac_file_good: Path) -> None: + """Verifies that requesting an invalid pick header raises AttributeError. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + with pytest.raises(AttributeError): + create_seismogram_from_sacfile_and_pick_header( + sac_file_good, "nonexistent_header" + ) + + def test_none_pick_raises(self, sac_file_good: Path) -> None: + """Verifies that if the pick header exists but is None, ValidationError is raised. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + sac = SAC.from_file(sac_file_good) + # Find a timestamp header that is None. + none_header = None + for name in ["t4", "t5", "t6", "t7", "t8", "t9"]: + if getattr(sac.timestamps, name) is None: + none_header = name + break + assert none_header is not None, "expected at least one None timestamp header" + + with pytest.raises(ValidationError): + create_seismogram_from_sacfile_and_pick_header(sac_file_good, none_header) diff --git a/tests/unit/models/test_sqlalchemy.py b/tests/unit/models/test_sqlalchemy.py new file mode 100644 index 00000000..f4854adf --- /dev/null +++ b/tests/unit/models/test_sqlalchemy.py @@ -0,0 +1,146 @@ +import pytest +import pandas as pd +from datetime import datetime, timezone +from unittest.mock import MagicMock +from sqlalchemy.engine import Dialect +from aimbat.models._sqlalchemy import SAPandasTimestamp, SAPandasTimedelta + + +@pytest.fixture +def mock_dialect() -> Dialect: + """Fixture for a mock SQLAlchemy dialect.""" + return MagicMock(spec=Dialect) + + +class TestSAPandasTimestamp: + """Tests for the SAPandasTimestamp custom SQLAlchemy type.""" + + @pytest.fixture + def sa_timestamp(self) -> SAPandasTimestamp: + """Fixture providing an instance of SAPandasTimestamp.""" + return SAPandasTimestamp() + + def test_process_bind_param_none( + self, sa_timestamp: SAPandasTimestamp, mock_dialect: Dialect + ) -> None: + """Test that None is passed through unchanged.""" + assert sa_timestamp.process_bind_param(None, mock_dialect) is None + + def test_process_bind_param_naive_timestamp( + self, sa_timestamp: SAPandasTimestamp, mock_dialect: Dialect + ) -> None: + """Test that a naive pandas Timestamp is converted to a UTC datetime.""" + ts_naive = pd.Timestamp("2023-01-01 12:00:00") + result = sa_timestamp.process_bind_param(ts_naive, mock_dialect) + assert isinstance(result, datetime) + assert result.tzinfo == timezone.utc + assert result.year == 2023 + assert result.hour == 12 + + def test_process_bind_param_aware_timestamp( + self, sa_timestamp: SAPandasTimestamp, mock_dialect: Dialect + ) -> None: + """Test that a timezone-aware pandas Timestamp is converted to UTC.""" + # Create a non-UTC timestamp + ts_ny = pd.Timestamp("2023-01-01 12:00:00").tz_localize("America/New_York") + result = sa_timestamp.process_bind_param(ts_ny, mock_dialect) + assert isinstance(result, datetime) + assert result.tzinfo == timezone.utc + # 12:00 NY is 17:00 UTC + assert result.hour == 17 + + def test_process_bind_param_converts_other_types( + self, sa_timestamp: SAPandasTimestamp, mock_dialect: Dialect + ) -> None: + """Test that other datetime types are also converted correctly.""" + dt = datetime(2023, 1, 1, 12, 0, 0) + result = sa_timestamp.process_bind_param(dt, mock_dialect) + assert isinstance(result, datetime) + assert result.tzinfo == timezone.utc + + def test_process_bind_param_truncates_nanoseconds( + self, sa_timestamp: SAPandasTimestamp, mock_dialect: Dialect + ) -> None: + """Test that nanosecond precision is truncated to microseconds.""" + # DateTime in Python only supports microseconds, pandas supports nanoseconds + ts_nano = pd.Timestamp("2023-01-01 12:00:00.123456789") + result = sa_timestamp.process_bind_param(ts_nano, mock_dialect) + assert result is not None + # Should be truncated/floored to microseconds + assert result.microsecond == 123456 + # Ensure it didn't round up or do something unexpected with the extra precision + assert result.second == 0 + + def test_process_result_value_none( + self, sa_timestamp: SAPandasTimestamp, mock_dialect: Dialect + ) -> None: + """Test that None result is passed through unchanged.""" + assert sa_timestamp.process_result_value(None, mock_dialect) is None + + def test_process_result_value_naive_datetime( + self, sa_timestamp: SAPandasTimestamp, mock_dialect: Dialect + ) -> None: + """Test that a naive datetime from DB is converted to a UTC pandas Timestamp.""" + # SQLAlchemy might return a naive datetime (implicit UTC or from DB) + dt_naive = datetime(2023, 1, 1, 12, 0, 0) + result = sa_timestamp.process_result_value(dt_naive, mock_dialect) + assert isinstance(result, pd.Timestamp) + assert result.tzinfo == timezone.utc + assert result.year == 2023 + + def test_process_result_value_aware_datetime( + self, sa_timestamp: SAPandasTimestamp, mock_dialect: Dialect + ) -> None: + """Test that an aware datetime from DB is converted to a UTC pandas Timestamp.""" + dt_aware = datetime(2023, 1, 1, 12, 0, 0, tzinfo=timezone.utc) + result = sa_timestamp.process_result_value(dt_aware, mock_dialect) + assert isinstance(result, pd.Timestamp) + assert result.tzinfo == timezone.utc + + +class TestSAPandasTimedelta: + """Tests for the SAPandasTimedelta custom SQLAlchemy type.""" + + @pytest.fixture + def sa_timedelta(self) -> SAPandasTimedelta: + """Fixture providing an instance of SAPandasTimedelta.""" + return SAPandasTimedelta() + + def test_process_bind_param_none( + self, sa_timedelta: SAPandasTimedelta, mock_dialect: Dialect + ) -> None: + """Test that None is passed through unchanged.""" + assert sa_timedelta.process_bind_param(None, mock_dialect) is None + + def test_process_bind_param_timedelta( + self, sa_timedelta: SAPandasTimedelta, mock_dialect: Dialect + ) -> None: + """Test that a pandas Timedelta is converted to nanoseconds (int).""" + td = pd.Timedelta(seconds=10) + result = sa_timedelta.process_bind_param(td, mock_dialect) + assert isinstance(result, int) + assert result == 10 * 1_000_000_000 # nanoseconds + + def test_process_bind_param_converts_other_types( + self, sa_timedelta: SAPandasTimedelta, mock_dialect: Dialect + ) -> None: + """Test that other types (like strings) are converted to nanoseconds.""" + # String conversion + result = sa_timedelta.process_bind_param("1 days", mock_dialect) + assert isinstance(result, int) + assert result == 86400 * 1_000_000_000 + + def test_process_result_value_none( + self, sa_timedelta: SAPandasTimedelta, mock_dialect: Dialect + ) -> None: + """Test that None result is passed through unchanged.""" + assert sa_timedelta.process_result_value(None, mock_dialect) is None + + def test_process_result_value_int( + self, sa_timedelta: SAPandasTimedelta, mock_dialect: Dialect + ) -> None: + """Test that an integer (nanoseconds) from DB is converted to a pandas Timedelta.""" + ns_value = 5 * 1_000_000_000 # 5 seconds in ns + result = sa_timedelta.process_result_value(ns_value, mock_dialect) + assert isinstance(result, pd.Timedelta) + assert result.total_seconds() == 5.0 diff --git a/tests/test_app.py b/tests/unit/test_app.py similarity index 57% rename from tests/test_app.py rename to tests/unit/test_app.py index 77832dff..a7fad800 100644 --- a/tests/test_app.py +++ b/tests/unit/test_app.py @@ -1,18 +1,42 @@ +"""Unit tests for the main CLI application entry point.""" + from importlib import metadata, reload from typing import Any import pytest def mock_return_str(*args: list[Any], **kwargs: dict[str, Any]) -> str: + """Mock function that returns a fixed string version. + + Args: + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. + + Returns: + str: The string "1.2.3". + """ return "1.2.3" def mock_raise(*args: list[Any], **kwargs: dict[str, Any]) -> None: + """Mock function that raises an Exception. + + Args: + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. + + Raises: + Exception: Always raised. + """ raise Exception def test_cli_usage(capsys: pytest.CaptureFixture) -> None: - """Test aimbat cli help output.""" + """Test aimbat cli help output. + + Args: + capsys (pytest.CaptureFixture): Fixture to capture stdout/stderr. + """ from aimbat import app with pytest.raises(SystemExit) as excinfo: @@ -27,7 +51,12 @@ def test_cli_usage(capsys: pytest.CaptureFixture) -> None: def test_cli_version( capsys: pytest.CaptureFixture, monkeypatch: pytest.MonkeyPatch ) -> None: - """Test aimbat cli version flag.""" + """Test aimbat cli version flag. + + Args: + capsys (pytest.CaptureFixture): Fixture to capture stdout/stderr. + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ from aimbat import app monkeypatch.setattr(metadata, "version", mock_return_str) diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py new file mode 100644 index 00000000..9e384980 --- /dev/null +++ b/tests/unit/test_config.py @@ -0,0 +1,227 @@ +"""Unit tests for aimbat._config.""" + +import io +from pathlib import Path +from typing import Any +import pytest +from rich.console import Console +from aimbat._config import Settings, settings, print_settings_table, cli_settings_list + + +def _capture_pretty(monkeypatch: pytest.MonkeyPatch) -> str: + """Call print_settings_table(pretty=True) and return plain rendered output. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + + Returns: + str: The captured output string. + """ + buffer = io.StringIO() + console = Console(file=buffer, highlight=False, no_color=True, width=200) + monkeypatch.setattr("aimbat.utils._json.Console", lambda: console) + print_settings_table(pretty=True) + return buffer.getvalue() + + +class TestSettings: + """Tests for the Settings class configuration.""" + + def test_default_project(self) -> None: + """Verifies the default project file name.""" + s = Settings() + assert s.project == Path("aimbat.db") + + def test_default_logfile(self) -> None: + """Verifies the default log file name.""" + s = Settings() + assert s.logfile == Path("aimbat.log") + + def test_default_log_level(self) -> None: + """Verifies the default log level is INFO.""" + s = Settings() + assert s.log_level == "INFO" + + def test_db_url_derived_from_project(self) -> None: + """Verifies that db_url is derived from the project path by default.""" + s = Settings() + assert str(s.project) in s.db_url + + def test_db_url_custom_not_overridden(self) -> None: + """Verifies that a custom db_url is preserved.""" + s = Settings(db_url="sqlite:///custom.db") + assert s.db_url == "sqlite:///custom.db" + + def test_env_prefix(self) -> None: + """Verifies that the environment variable prefix is 'aimbat_'.""" + assert Settings.model_config.get("env_prefix") == "aimbat_" + + def test_min_id_length_default(self) -> None: + """Verifies the default minimum ID length.""" + s = Settings() + assert s.min_id_length == 2 + + def test_bandpass_apply_default(self) -> None: + """Verifies that bandpass_apply is a boolean.""" + s = Settings() + assert isinstance(s.bandpass_apply, bool) + + def test_min_ccnorm_bounds(self) -> None: + """Verifies that min_ccnorm is within [0, 1].""" + s = Settings() + assert 0 <= float(s.min_ccnorm) <= 1 + + def test_window_pre_is_negative(self) -> None: + """Verifies that window_pre is a negative duration.""" + s = Settings() + assert s.window_pre.total_seconds() < 0 + + def test_window_post_is_positive(self) -> None: + """Verifies that window_post is a positive duration.""" + s = Settings() + assert s.window_post.total_seconds() > 0 + + def test_context_width_is_positive(self) -> None: + """Verifies that context_width is a positive duration.""" + s = Settings() + assert s.context_width.total_seconds() > 0 + + +class TestPrintSettingsTablePlain: + """Tests for print_settings_table with pretty=False.""" + + def test_contains_setting_names(self, capsys: pytest.CaptureFixture[str]) -> None: + """Verifies that output contains setting names in uppercase. + + Args: + capsys (pytest.CaptureFixture[str]): Fixture to capture stdout/stderr. + """ + import json + + print_settings_table(pretty=False) + output = capsys.readouterr().out + for k in json.loads(Settings().model_dump_json()): + assert k.upper() in output + + def test_contains_env_prefix(self, capsys: pytest.CaptureFixture[str]) -> None: + """Verifies that output contains the environment variable prefix. + + Args: + capsys (pytest.CaptureFixture[str]): Fixture to capture stdout/stderr. + """ + print_settings_table(pretty=False) + output = capsys.readouterr().out + assert "AIMBAT_" in output + + def test_contains_values(self, capsys: pytest.CaptureFixture[str]) -> None: + """Verifies that output contains current setting values. + + Args: + capsys (pytest.CaptureFixture[str]): Fixture to capture stdout/stderr. + """ + print_settings_table(pretty=False) + output = capsys.readouterr().out + assert str(settings.project) in output + assert str(settings.logfile) in output + + def test_format_is_key_equals_value( + self, capsys: pytest.CaptureFixture[str] + ) -> None: + """Verifies that output lines are formatted as KEY=VALUE. + + Args: + capsys (pytest.CaptureFixture[str]): Fixture to capture stdout/stderr. + """ + print_settings_table(pretty=False) + output = capsys.readouterr().out + for line in output.strip().splitlines(): + assert "=" in line + + +class TestPrintSettingsTablePretty: + """Tests for print_settings_table with pretty=True.""" + + def test_title_present(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that the table title is present. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_pretty(monkeypatch) + assert "AIMBAT settings" in output + + def test_column_headers_present(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that column headers are present. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_pretty(monkeypatch) + assert "Name" in output + assert "Value" in output + assert "Description" in output + + def test_setting_names_present(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that all setting names are present in the table. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + import json + + output = _capture_pretty(monkeypatch) + for k in json.loads(Settings().model_dump_json()): + assert k in output + + def test_setting_values_present(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that setting values are present in the table. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_pretty(monkeypatch) + assert str(settings.project) in output + assert str(settings.logfile) in output + + def test_env_var_in_description(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that environment variable names are included in descriptions. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_pretty(monkeypatch) + assert "AIMBAT_" in output + + +class TestCliSettingsList: + """Tests for the cli_settings_list function.""" + + def test_delegates_to_print_settings_table( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + """Verifies that the function calls print_settings_table with the correct argument. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + calls: list[dict[str, Any]] = [] + monkeypatch.setattr( + "aimbat._config.print_settings_table", + lambda pretty: calls.append({"pretty": pretty}), + ) + cli_settings_list(pretty=True) + assert calls == [{"pretty": True}] + + def test_default_pretty_is_true(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that 'pretty' defaults to True. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + calls: list[dict[str, Any]] = [] + monkeypatch.setattr( + "aimbat._config.print_settings_table", + lambda pretty: calls.append({"pretty": pretty}), + ) + cli_settings_list() + assert calls[0]["pretty"] is True diff --git a/tests/unit/utils/test_json.py b/tests/unit/utils/test_json.py new file mode 100644 index 00000000..3e174025 --- /dev/null +++ b/tests/unit/utils/test_json.py @@ -0,0 +1,320 @@ +"""Unit tests for aimbat.utils._json.""" + +import io +from typing import Any, Callable +import pytest +from rich.console import Console +from aimbat.utils._json import json_to_table + + +def _capture_table( + monkeypatch: pytest.MonkeyPatch, + data: dict[str, Any] | list[dict[str, Any]], + title: str | None = None, + formatters: dict[str, Callable[[Any], str]] | None = None, + skip_keys: list[str] | None = None, + column_order: list[str] | None = None, + column_kwargs: dict[str, dict[str, Any]] | None = None, + common_column_kwargs: dict[str, Any] | None = None, +) -> str: + """Call json_to_table and return the rendered output as a plain string. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + data (dict[str, Any] | list[dict[str, Any]]): The data to render. + title (str | None): Optional table title. + formatters (dict[str, Callable[[Any], str]] | None): Optional value formatters. + skip_keys (list[str] | None): Keys to exclude from the table. + column_order (list[str] | None): Explicit order of columns. + column_kwargs (dict[str, dict[str, Any]] | None): Column-specific arguments. + common_column_kwargs (dict[str, Any] | None): Arguments applied to all columns. + + Returns: + str: The captured table output. + """ + buffer = io.StringIO() + console = Console(file=buffer, highlight=False, no_color=True, width=200) + monkeypatch.setattr("aimbat.utils._json.Console", lambda: console) + json_to_table( + data, + title=title, + formatters=formatters, + skip_keys=skip_keys, + column_order=column_order, + column_kwargs=column_kwargs, + common_column_kwargs=common_column_kwargs, + ) + return buffer.getvalue() + + +class TestJsonToTableSingleDict: + """Tests json_to_table with a single dictionary input.""" + + def test_basic(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies basic key-value rendering. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table(monkeypatch, {"name": "Alice", "age": 30}) + assert "name" in output + assert "Alice" in output + assert "age" in output + assert "30" in output + + def test_title(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that the title is rendered. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table(monkeypatch, {"name": "Alice"}, title="Person") + assert "Person" in output + + def test_default_column_headers(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies default headers for dictionary input. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table(monkeypatch, {"x": "y"}) + assert "Key" in output + assert "Value" in output + + def test_formatter_applied(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that value formatters are applied. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + {"score": 0.123456}, + formatters={"score": lambda v: f"{v:.2f}"}, + ) + assert "0.12" in output + assert "0.123456" not in output + + def test_skip_keys(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that specified keys are skipped. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + {"name": "Alice", "secret": "hidden"}, + skip_keys=["secret"], + ) + assert "name" in output + assert "secret" not in output + assert "hidden" not in output + + def test_column_order(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that column order is respected (row order for dicts). + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + {"b": "2", "a": "1"}, + column_order=["a", "b"], + ) + pos_a = output.index("a") + pos_b = output.index("b") + assert pos_a < pos_b + + def test_column_kwargs_header_override( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + """Verifies that column headers can be overridden via column_kwargs. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + {"name": "Alice"}, + column_kwargs={"Key": {"header": "Field"}, "Value": {"header": "Data"}}, + ) + assert "Field" in output + assert "Data" in output + assert "Key" not in output + + def test_common_column_kwargs_applied( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + """Verifies that common_column_kwargs are accepted. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + # Ensures no exception is raised when common_column_kwargs is provided. + output = _capture_table( + monkeypatch, + {"x": "1"}, + common_column_kwargs={"min_width": 5}, + ) + assert "x" in output + + def test_per_column_kwargs_override_common( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + """Verifies that column-specific kwargs override common kwargs. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + {"x": "1"}, + common_column_kwargs={"header": "Common"}, + column_kwargs={"Key": {"header": "Specific"}}, + ) + assert "Specific" in output + + +class TestJsonToTableListOfDicts: + """Tests json_to_table with a list of dictionaries.""" + + def test_basic(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies basic table rendering for list of dicts. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + [{"id": 1, "name": "Alice"}, {"id": 2, "name": "Bob"}], + ) + assert "id" in output + assert "name" in output + assert "Alice" in output + assert "Bob" in output + assert "1" in output + assert "2" in output + + def test_empty_list(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that an empty list produces valid output (empty table). + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + # Should not raise and should print an empty table. + output = _capture_table(monkeypatch, []) + assert output is not None + + def test_title(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that the title is rendered. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, [{"description": "test item"}], title="Results" + ) + assert "Results" in output + + def test_formatter_applied(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that value formatters are applied. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + [{"value": 3.14159}], + formatters={"value": lambda v: f"{v:.1f}"}, + ) + assert "3.1" in output + assert "3.14159" not in output + + def test_skip_keys(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that specified keys are skipped. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + [{"name": "Alice", "secret": "x"}], + skip_keys=["secret"], + ) + assert "name" in output + assert "secret" not in output + + def test_column_order(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that column order is respected. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + [{"b": "2", "a": "1"}], + column_order=["a", "b"], + ) + pos_a = output.index("a") + pos_b = output.index("b") + assert pos_a < pos_b + + def test_column_order_partial(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Keys not listed in column_order should be appended after. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + [{"c": "3", "b": "2", "a": "1"}], + column_order=["a"], + ) + pos_a = output.index("a") + pos_b = output.index("b") + pos_c = output.index("c") + assert pos_a < pos_b + assert pos_a < pos_c + + def test_column_kwargs_header_override( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + """Verifies that column headers can be overridden. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + [{"id": 1}], + column_kwargs={"id": {"header": "Identifier"}}, + ) + assert "Identifier" in output + assert "id" not in output + + def test_missing_key_in_row(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Rows missing a key should render 'None' for that cell. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + [{"a": 1, "b": 2}, {"a": 3}], + ) + assert "None" in output + + def test_common_column_kwargs_applied( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + """Verifies that common_column_kwargs are applied to list columns. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + [{"x": "1"}], + common_column_kwargs={"min_width": 5}, + ) + assert "x" in output diff --git a/tests/unit/utils/test_sampledata.py b/tests/unit/utils/test_sampledata.py new file mode 100644 index 00000000..9c6ba4b1 --- /dev/null +++ b/tests/unit/utils/test_sampledata.py @@ -0,0 +1,151 @@ +"""Unit tests for aimbat.utils._sampledata.""" + +import io +import zipfile +from pathlib import Path +from unittest.mock import MagicMock, patch +import pytest +from aimbat.utils._sampledata import delete_sampledata, download_sampledata + + +def _make_zip_bytes(filenames: list[str]) -> bytes: + """Return the bytes of a ZIP archive containing empty files with the given names. + + Args: + filenames (list[str]): List of filenames to include in the ZIP. + + Returns: + bytes: The bytes of the ZIP archive. + """ + buf = io.BytesIO() + with zipfile.ZipFile(buf, mode="w") as zf: + for name in filenames: + zf.writestr(name, b"") + return buf.getvalue() + + +@pytest.fixture() +def sampledata_dir(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> Path: + """Point settings.sampledata_dir at a temp directory for each test. + + Args: + tmp_path (Path): Temporary directory path. + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + + Returns: + Path: The temporary sample data directory. + """ + d = tmp_path / "sample-data" + import aimbat + + monkeypatch.setattr(aimbat.settings, "sampledata_dir", d) + return d + + +class TestDeleteSampledata: + """Tests for the delete_sampledata function.""" + + def test_removes_directory(self, sampledata_dir: Path) -> None: + """Verifies that the sample data directory is removed. + + Args: + sampledata_dir (Path): The sample data directory. + """ + sampledata_dir.mkdir() + (sampledata_dir / "file.txt").write_text("x") + delete_sampledata() + assert not sampledata_dir.exists() + + def test_raises_if_dir_missing(self, sampledata_dir: Path) -> None: + """Verifies that FileNotFoundError is raised if the directory is missing. + + Args: + sampledata_dir (Path): The sample data directory. + """ + assert not sampledata_dir.exists() + with pytest.raises(FileNotFoundError): + delete_sampledata() + + +class TestDownloadSampledata: + """Tests for the download_sampledata function.""" + + def _mock_urlopen(self, filenames: list[str]) -> MagicMock: + """Return a context-manager mock that yields ZIP bytes for urlopen. + + Args: + filenames (list[str]): List of filenames for the mock ZIP. + + Returns: + MagicMock: A mock object behaving like urlopen's return value. + """ + zip_bytes = _make_zip_bytes(filenames) + mock_resp = MagicMock() + mock_resp.read.return_value = zip_bytes + mock_resp.__enter__ = lambda s: s + mock_resp.__exit__ = MagicMock(return_value=False) + mock_urlopen = MagicMock(return_value=mock_resp) + return mock_urlopen + + def test_extracts_files(self, sampledata_dir: Path) -> None: + """Verifies that files are extracted to the sample data directory. + + Args: + sampledata_dir (Path): The sample data directory. + """ + mock_urlopen = self._mock_urlopen(["data/file1.sac", "data/file2.sac"]) + with patch("aimbat.utils._sampledata.urlopen", mock_urlopen): + download_sampledata() + assert sampledata_dir.exists() + + def test_raises_if_dir_non_empty(self, sampledata_dir: Path) -> None: + """Verifies that FileExistsError is raised if the directory is not empty. + + Args: + sampledata_dir (Path): The sample data directory. + """ + sampledata_dir.mkdir() + (sampledata_dir / "existing.txt").write_text("x") + mock_urlopen = self._mock_urlopen(["data/file.sac"]) + with patch("aimbat.utils._sampledata.urlopen", mock_urlopen): + with pytest.raises(FileExistsError): + download_sampledata() + mock_urlopen.assert_not_called() + + def test_force_overwrites_existing(self, sampledata_dir: Path) -> None: + """Verifies that existing files are overwritten when force=True. + + Args: + sampledata_dir (Path): The sample data directory. + """ + sampledata_dir.mkdir() + (sampledata_dir / "old.txt").write_text("old") + mock_urlopen = self._mock_urlopen(["data/new.sac"]) + with patch("aimbat.utils._sampledata.urlopen", mock_urlopen): + download_sampledata(force=True) + assert not (sampledata_dir / "old.txt").exists() + + def test_empty_dir_not_blocked(self, sampledata_dir: Path) -> None: + """Verifies that an existing empty directory does not block download. + + Args: + sampledata_dir (Path): The sample data directory. + """ + sampledata_dir.mkdir() + mock_urlopen = self._mock_urlopen(["data/file.sac"]) + with patch("aimbat.utils._sampledata.urlopen", mock_urlopen): + download_sampledata() + mock_urlopen.assert_called_once() + + def test_urlopen_called_with_src(self, sampledata_dir: Path) -> None: + """Verifies that urlopen is called with the configured source URL. + + Args: + sampledata_dir (Path): The sample data directory. + """ + import aimbat + + mock_urlopen = self._mock_urlopen(["data/file.sac"]) + with patch("aimbat.utils._sampledata.urlopen", mock_urlopen): + download_sampledata() + mock_urlopen.assert_called_once_with(aimbat.settings.sampledata_src) diff --git a/tests/utils/test_utils.py b/tests/utils/test_utils.py deleted file mode 100644 index 4bfdd7f4..00000000 --- a/tests/utils/test_utils.py +++ /dev/null @@ -1,184 +0,0 @@ -from aimbat._config import Settings -from aimbat.app import app -from pysmo.classes import SAC -from datetime import datetime, timezone -from typing import Any -from sqlmodel import Session -from sqlalchemy import Engine -from collections.abc import Generator -from pathlib import Path -import aimbat.utils._checkdata as _checkdata -import aimbat.utils._sampledata as _sampledata -import numpy as np -import os -import pytest - - -class TestUtilsBase: - @pytest.fixture - def session( - self, fixture_engine_session_with_active_event: tuple[Engine, Session] - ) -> Generator[Session, Any, Any]: - _, session = fixture_engine_session_with_active_event - yield session - - @pytest.fixture(autouse=True) - def download_dir( - self, - session: Session, - tmp_path_factory: pytest.TempPathFactory, - patch_settings: Settings, - ) -> Generator[Path, Any, Any]: - tmp_dir = tmp_path_factory.mktemp("download_dir") - patch_settings.sampledata_dir = tmp_dir - yield tmp_dir - - -class TestUtilsCheckData(TestUtilsBase): - def test_check_station_no_name(self, sac_instance_good: SAC) -> None: - assert sac_instance_good.station.name - _checkdata.checkdata_station(sac_instance_good.station) - sac_instance_good.kstnm = None - issues = _checkdata.checkdata_station(sac_instance_good.station) - assert "No station name" in issues[0] - - def test_check_station_no_latitude(self, sac_instance_good: SAC) -> None: - assert sac_instance_good.station.latitude - _checkdata.checkdata_station(sac_instance_good.station) - sac_instance_good.stla = None - issues = _checkdata.checkdata_station(sac_instance_good.station) - assert "No station latitude" in issues[0] - - def test_check_station_no_longitude(self, sac_instance_good: SAC) -> None: - assert sac_instance_good.station.longitude - _checkdata.checkdata_station(sac_instance_good.station) - sac_instance_good.stlo = None - issues = _checkdata.checkdata_station(sac_instance_good.station) - assert "No station longitude" in issues[0] - - def test_check_event_no_latitude(self, sac_instance_good: SAC) -> None: - assert sac_instance_good.event.latitude - _checkdata.checkdata_event(sac_instance_good.event) - sac_instance_good.evla = None - issues = _checkdata.checkdata_event(sac_instance_good.event) - assert "No event latitude" in issues[0] - - def test_check_event_no_longitude(self, sac_instance_good: SAC) -> None: - assert sac_instance_good.event.longitude - _checkdata.checkdata_event(sac_instance_good.event) - sac_instance_good.evlo = None - issues = _checkdata.checkdata_event(sac_instance_good.event) - assert "No event longitude" in issues[0] - - def test_check_event_no_time(self, sac_instance_good: SAC) -> None: - assert sac_instance_good.event.time - _checkdata.checkdata_event(sac_instance_good.event) - sac_instance_good.o = None - issues = _checkdata.checkdata_event(sac_instance_good.event) - assert "No event time" in issues[0] - - def test_check_seismogram_no_begin_time(self, sac_instance_good: SAC) -> None: - assert len(sac_instance_good.seismogram.data) > 0 - _checkdata.checkdata_seismogram(sac_instance_good.seismogram) - sac_instance_good.seismogram.data = np.array([]) - issues = _checkdata.checkdata_seismogram(sac_instance_good.seismogram) - assert "No seismogram data" in issues[0] - - def test_cli_usage(self, capsys: pytest.CaptureFixture) -> None: - with pytest.raises(SystemExit) as excinfo: - app(["utils", "checkdata", "--help"]) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - assert "Usage" in captured.out - - def test_cli_checkdata( - self, tmp_path_factory: pytest.TempPathFactory, capsys: pytest.CaptureFixture - ) -> None: - """Test AIMBAT cli with checkdata subcommand.""" - - testfile = str(tmp_path_factory.mktemp("checkdata")) + "/test.sac" - - sac = SAC() - sac.write(testfile) - - with pytest.raises(SystemExit) as excinfo: - app(["utils", "checkdata", testfile]) - assert excinfo.value.code == 0 - output = capsys.readouterr().out - for item in ["name", "latitude", "longitude"]: - assert f"No station {item} found in file" in output - for item in ["time", "latitude", "longitude"]: - assert f"No event {item} found in file" in output - assert "No seismogram data found in file" in output - - sac.station.name = "test" - sac.station.latitude = 1.1 - sac.station.longitude = -23 - sac.event.time = datetime.now(timezone.utc) - sac.event.latitude = 33 - sac.event.longitude = 19.1 - sac.seismogram.data = np.random.rand(100) - sac.write(testfile) - with pytest.raises(SystemExit) as excinfo: - app(["utils", "checkdata", testfile]) - assert excinfo.value.code == 0 - output = capsys.readouterr().out - for item in ["name", "latitude", "longitude"]: - assert f"No station {item} found in file" not in output - for item in ["time", "latitude", "longitude"]: - assert f"No event {item} found in file" not in output - assert "No seismogram data found in file" not in output - - -class TestUtilsSampleData(TestUtilsBase): - @pytest.mark.dependency(name="download_sampledata") - def test_lib_download_sampledata(self, download_dir: Path) -> None: - assert len(os.listdir(download_dir)) == 0 - _sampledata.download_sampledata() - assert len(os.listdir(download_dir)) > 0 - with pytest.raises(FileExistsError): - _sampledata.download_sampledata() - _sampledata.download_sampledata(force=True) - - @pytest.mark.dependency(depends=["download_sampledata"]) - def test_lib_delete_sampledata(self, download_dir: Path) -> None: - _sampledata.download_sampledata() - assert len(os.listdir(download_dir)) > 0 - _sampledata.delete_sampledata() - assert download_dir.exists() is False - - def test_cli_usage(self, capsys: pytest.CaptureFixture) -> None: - with pytest.raises(SystemExit) as excinfo: - app(["utils", "sampledata", "--help"]) - assert excinfo.value.code == 0 - assert "Usage" in capsys.readouterr().out - - def test_cli_download_sampledata(self, download_dir: Path) -> None: - assert len(os.listdir((download_dir))) == 0 - with pytest.raises(SystemExit) as excinfo: - app(["utils", "sampledata", "download"]) - assert excinfo.value.code == 0 - assert len(os.listdir((download_dir))) > 0 - - # can't download if it is already there - with pytest.raises(FileExistsError): - app(["utils", "sampledata", "download"]) - - # unless we use force - with pytest.raises(SystemExit) as excinfo: - app(["utils", "sampledata", "download", "--force"]) - assert excinfo.value.code == 0 - - def test_cli__delete_sampledata(self, download_dir: Path) -> None: - assert len(os.listdir((download_dir))) == 0 - with pytest.raises(SystemExit) as excinfo: - app(["utils", "sampledata", "download"]) - assert excinfo.value.code == 0 - assert len(os.listdir((download_dir))) > 0 - - with pytest.raises(SystemExit) as excinfo: - app(["utils", "sampledata", "delete"]) - assert excinfo.value.code == 0 - assert not download_dir.exists() diff --git a/uv.lock b/uv.lock index 607ad101..98c91744 100644 --- a/uv.lock +++ b/uv.lock @@ -368,7 +368,7 @@ wheels = [ [[package]] name = "cyclopts" -version = "4.5.4" +version = "4.6.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, @@ -376,9 +376,9 @@ dependencies = [ { name = "rich" }, { name = "rich-rst" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3b/d2/f37df900b163f51b4faacdb01bf4895c198906d67c5b2a85c2522de85459/cyclopts-4.5.4.tar.gz", hash = "sha256:eed4d6c76d4391aa796d8fcaabd50e5aad7793261792beb19285f62c5c456c8b", size = 162438, upload-time = "2026-02-20T00:58:46.161Z" } +sdist = { url = "https://files.pythonhosted.org/packages/49/5c/88a4068c660a096bbe87efc5b7c190080c9e86919c36ec5f092cb08d852f/cyclopts-4.6.0.tar.gz", hash = "sha256:483c4704b953ea6da742e8de15972f405d2e748d19a848a4d61595e8e5360ee5", size = 162724, upload-time = "2026-02-23T15:44:49.286Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/0f/119fa63fa93e0a331fbedcb27162d8f88d3ba2f38eba1567e3e44307b857/cyclopts-4.5.4-py3-none-any.whl", hash = "sha256:ad001986ec403ca1dc1ed20375c439d62ac796295ea32b451dfe25d6696bc71a", size = 200225, upload-time = "2026-02-20T00:58:47.275Z" }, + { url = "https://files.pythonhosted.org/packages/8f/eb/1e8337755a70dc7d7ff10a73dc8f20e9352c9ad6c2256ed863ac95cd3539/cyclopts-4.6.0-py3-none-any.whl", hash = "sha256:0a891cb55bfd79a3cdce024db8987b33316aba11071e5258c21ac12a640ba9f2", size = 200518, upload-time = "2026-02-23T15:44:47.854Z" }, ] [[package]] @@ -1102,14 +1102,14 @@ wheels = [ [[package]] name = "optype" -version = "0.15.0" +version = "0.16.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d7/93/6b9e43138ce36fbad134bd1a50460a7bbda61105b5a964e4cf773fe4d845/optype-0.15.0.tar.gz", hash = "sha256:457d6ca9e7da19967ec16d42bdf94e240b33b5d70a56fbbf5b427e5ea39cf41e", size = 99978, upload-time = "2025-12-08T12:32:41.422Z" } +sdist = { url = "https://files.pythonhosted.org/packages/66/d3/c88bb4bd90867356275ca839499313851af4b36fce6919ebc5e1de26e7ca/optype-0.16.0.tar.gz", hash = "sha256:fa682fd629ef6b70ba656ebc9fdd6614ba06ce13f52e0416dd8014c7e691a2d1", size = 53498, upload-time = "2026-02-19T23:37:09.495Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/07/8b/93f6c496fc5da062fd7e7c4745b5a8dd09b7b576c626075844fe97951a7d/optype-0.15.0-py3-none-any.whl", hash = "sha256:caba40ece9ea39b499fa76c036a82e0d452a432dd4dd3e8e0d30892be2e8c76c", size = 88716, upload-time = "2025-12-08T12:32:39.669Z" }, + { url = "https://files.pythonhosted.org/packages/91/a8/fe26515203cff140f1afc31236fb7f703d4bb4bd5679d28afcb3661c8d9f/optype-0.16.0-py3-none-any.whl", hash = "sha256:c28905713f55630b4bb8948f38e027ad13a541499ebcf957501f486da54b74d2", size = 65893, upload-time = "2026-02-19T23:37:08.217Z" }, ] [package.optional-dependencies] @@ -1536,8 +1536,8 @@ wheels = [ [[package]] name = "pysmo" -version = "1.0.0.dev12+g1a71df1ca" -source = { git = "https://github.com/pysmo/pysmo?rev=master#1a71df1caeb3737fac7478424412f5898b73302a" } +version = "1.0.0.dev19+g8ccdd62f2" +source = { git = "https://github.com/pysmo/pysmo?rev=master#8ccdd62f20d00e619295e5ea9a1fdb8453c28284" } dependencies = [ { name = "attrs" }, { name = "attrs-strict" }, @@ -1811,75 +1811,75 @@ wheels = [ [[package]] name = "scipy" -version = "1.17.0" +version = "1.17.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/56/3e/9cca699f3486ce6bc12ff46dc2031f1ec8eb9ccc9a320fdaf925f1417426/scipy-1.17.0.tar.gz", hash = "sha256:2591060c8e648d8b96439e111ac41fd8342fdeff1876be2e19dea3fe8930454e", size = 30396830, upload-time = "2026-01-10T21:34:23.009Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0b/11/7241a63e73ba5a516f1930ac8d5b44cbbfabd35ac73a2d08ca206df007c4/scipy-1.17.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:0d5018a57c24cb1dd828bcf51d7b10e65986d549f52ef5adb6b4d1ded3e32a57", size = 31364580, upload-time = "2026-01-10T21:25:25.717Z" }, - { url = "https://files.pythonhosted.org/packages/ed/1d/5057f812d4f6adc91a20a2d6f2ebcdb517fdbc87ae3acc5633c9b97c8ba5/scipy-1.17.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:88c22af9e5d5a4f9e027e26772cc7b5922fab8bcc839edb3ae33de404feebd9e", size = 27969012, upload-time = "2026-01-10T21:25:30.921Z" }, - { url = "https://files.pythonhosted.org/packages/e3/21/f6ec556c1e3b6ec4e088da667d9987bb77cc3ab3026511f427dc8451187d/scipy-1.17.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f3cd947f20fe17013d401b64e857c6b2da83cae567adbb75b9dcba865abc66d8", size = 20140691, upload-time = "2026-01-10T21:25:34.802Z" }, - { url = "https://files.pythonhosted.org/packages/7a/fe/5e5ad04784964ba964a96f16c8d4676aa1b51357199014dce58ab7ec5670/scipy-1.17.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e8c0b331c2c1f531eb51f1b4fc9ba709521a712cce58f1aa627bc007421a5306", size = 22463015, upload-time = "2026-01-10T21:25:39.277Z" }, - { url = "https://files.pythonhosted.org/packages/4a/69/7c347e857224fcaf32a34a05183b9d8a7aca25f8f2d10b8a698b8388561a/scipy-1.17.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5194c445d0a1c7a6c1a4a4681b6b7c71baad98ff66d96b949097e7513c9d6742", size = 32724197, upload-time = "2026-01-10T21:25:44.084Z" }, - { url = "https://files.pythonhosted.org/packages/d1/fe/66d73b76d378ba8cc2fe605920c0c75092e3a65ae746e1e767d9d020a75a/scipy-1.17.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9eeb9b5f5997f75507814ed9d298ab23f62cf79f5a3ef90031b1ee2506abdb5b", size = 35009148, upload-time = "2026-01-10T21:25:50.591Z" }, - { url = "https://files.pythonhosted.org/packages/af/07/07dec27d9dc41c18d8c43c69e9e413431d20c53a0339c388bcf72f353c4b/scipy-1.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:40052543f7bbe921df4408f46003d6f01c6af109b9e2c8a66dd1cf6cf57f7d5d", size = 34798766, upload-time = "2026-01-10T21:25:59.41Z" }, - { url = "https://files.pythonhosted.org/packages/81/61/0470810c8a093cdacd4ba7504b8a218fd49ca070d79eca23a615f5d9a0b0/scipy-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0cf46c8013fec9d3694dc572f0b54100c28405d55d3e2cb15e2895b25057996e", size = 37405953, upload-time = "2026-01-10T21:26:07.75Z" }, - { url = "https://files.pythonhosted.org/packages/92/ce/672ed546f96d5d41ae78c4b9b02006cedd0b3d6f2bf5bb76ea455c320c28/scipy-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:0937a0b0d8d593a198cededd4c439a0ea216a3f36653901ea1f3e4be949056f8", size = 36328121, upload-time = "2026-01-10T21:26:16.509Z" }, - { url = "https://files.pythonhosted.org/packages/9d/21/38165845392cae67b61843a52c6455d47d0cc2a40dd495c89f4362944654/scipy-1.17.0-cp312-cp312-win_arm64.whl", hash = "sha256:f603d8a5518c7426414d1d8f82e253e454471de682ce5e39c29adb0df1efb86b", size = 24314368, upload-time = "2026-01-10T21:26:23.087Z" }, - { url = "https://files.pythonhosted.org/packages/0c/51/3468fdfd49387ddefee1636f5cf6d03ce603b75205bf439bbf0e62069bfd/scipy-1.17.0-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:65ec32f3d32dfc48c72df4291345dae4f048749bc8d5203ee0a3f347f96c5ce6", size = 31344101, upload-time = "2026-01-10T21:26:30.25Z" }, - { url = "https://files.pythonhosted.org/packages/b2/9a/9406aec58268d437636069419e6977af953d1e246df941d42d3720b7277b/scipy-1.17.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:1f9586a58039d7229ce77b52f8472c972448cded5736eaf102d5658bbac4c269", size = 27950385, upload-time = "2026-01-10T21:26:36.801Z" }, - { url = "https://files.pythonhosted.org/packages/4f/98/e7342709e17afdfd1b26b56ae499ef4939b45a23a00e471dfb5375eea205/scipy-1.17.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:9fad7d3578c877d606b1150135c2639e9de9cecd3705caa37b66862977cc3e72", size = 20122115, upload-time = "2026-01-10T21:26:42.107Z" }, - { url = "https://files.pythonhosted.org/packages/fd/0e/9eeeb5357a64fd157cbe0302c213517c541cc16b8486d82de251f3c68ede/scipy-1.17.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:423ca1f6584fc03936972b5f7c06961670dbba9f234e71676a7c7ccf938a0d61", size = 22442402, upload-time = "2026-01-10T21:26:48.029Z" }, - { url = "https://files.pythonhosted.org/packages/c9/10/be13397a0e434f98e0c79552b2b584ae5bb1c8b2be95db421533bbca5369/scipy-1.17.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fe508b5690e9eaaa9467fc047f833af58f1152ae51a0d0aed67aa5801f4dd7d6", size = 32696338, upload-time = "2026-01-10T21:26:55.521Z" }, - { url = "https://files.pythonhosted.org/packages/63/1e/12fbf2a3bb240161651c94bb5cdd0eae5d4e8cc6eaeceb74ab07b12a753d/scipy-1.17.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6680f2dfd4f6182e7d6db161344537da644d1cf85cf293f015c60a17ecf08752", size = 34977201, upload-time = "2026-01-10T21:27:03.501Z" }, - { url = "https://files.pythonhosted.org/packages/19/5b/1a63923e23ccd20bd32156d7dd708af5bbde410daa993aa2500c847ab2d2/scipy-1.17.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:eec3842ec9ac9de5917899b277428886042a93db0b227ebbe3a333b64ec7643d", size = 34777384, upload-time = "2026-01-10T21:27:11.423Z" }, - { url = "https://files.pythonhosted.org/packages/39/22/b5da95d74edcf81e540e467202a988c50fef41bd2011f46e05f72ba07df6/scipy-1.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d7425fcafbc09a03731e1bc05581f5fad988e48c6a861f441b7ab729a49a55ea", size = 37379586, upload-time = "2026-01-10T21:27:20.171Z" }, - { url = "https://files.pythonhosted.org/packages/b9/b6/8ac583d6da79e7b9e520579f03007cb006f063642afd6b2eeb16b890bf93/scipy-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:87b411e42b425b84777718cc41516b8a7e0795abfa8e8e1d573bf0ef014f0812", size = 36287211, upload-time = "2026-01-10T21:28:43.122Z" }, - { url = "https://files.pythonhosted.org/packages/55/fb/7db19e0b3e52f882b420417644ec81dd57eeef1bd1705b6f689d8ff93541/scipy-1.17.0-cp313-cp313-win_arm64.whl", hash = "sha256:357ca001c6e37601066092e7c89cca2f1ce74e2a520ca78d063a6d2201101df2", size = 24312646, upload-time = "2026-01-10T21:28:49.893Z" }, - { url = "https://files.pythonhosted.org/packages/20/b6/7feaa252c21cc7aff335c6c55e1b90ab3e3306da3f048109b8b639b94648/scipy-1.17.0-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:ec0827aa4d36cb79ff1b81de898e948a51ac0b9b1c43e4a372c0508c38c0f9a3", size = 31693194, upload-time = "2026-01-10T21:27:27.454Z" }, - { url = "https://files.pythonhosted.org/packages/76/bb/bbb392005abce039fb7e672cb78ac7d158700e826b0515cab6b5b60c26fb/scipy-1.17.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:819fc26862b4b3c73a60d486dbb919202f3d6d98c87cf20c223511429f2d1a97", size = 28365415, upload-time = "2026-01-10T21:27:34.26Z" }, - { url = "https://files.pythonhosted.org/packages/37/da/9d33196ecc99fba16a409c691ed464a3a283ac454a34a13a3a57c0d66f3a/scipy-1.17.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:363ad4ae2853d88ebcde3ae6ec46ccca903ea9835ee8ba543f12f575e7b07e4e", size = 20537232, upload-time = "2026-01-10T21:27:40.306Z" }, - { url = "https://files.pythonhosted.org/packages/56/9d/f4b184f6ddb28e9a5caea36a6f98e8ecd2a524f9127354087ce780885d83/scipy-1.17.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:979c3a0ff8e5ba254d45d59ebd38cde48fce4f10b5125c680c7a4bfe177aab07", size = 22791051, upload-time = "2026-01-10T21:27:46.539Z" }, - { url = "https://files.pythonhosted.org/packages/9b/9d/025cccdd738a72140efc582b1641d0dd4caf2e86c3fb127568dc80444e6e/scipy-1.17.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:130d12926ae34399d157de777472bf82e9061c60cc081372b3118edacafe1d00", size = 32815098, upload-time = "2026-01-10T21:27:54.389Z" }, - { url = "https://files.pythonhosted.org/packages/48/5f/09b879619f8bca15ce392bfc1894bd9c54377e01d1b3f2f3b595a1b4d945/scipy-1.17.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e886000eb4919eae3a44f035e63f0fd8b651234117e8f6f29bad1cd26e7bc45", size = 35031342, upload-time = "2026-01-10T21:28:03.012Z" }, - { url = "https://files.pythonhosted.org/packages/f2/9a/f0f0a9f0aa079d2f106555b984ff0fbb11a837df280f04f71f056ea9c6e4/scipy-1.17.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:13c4096ac6bc31d706018f06a49abe0485f96499deb82066b94d19b02f664209", size = 34893199, upload-time = "2026-01-10T21:28:10.832Z" }, - { url = "https://files.pythonhosted.org/packages/90/b8/4f0f5cf0c5ea4d7548424e6533e6b17d164f34a6e2fb2e43ffebb6697b06/scipy-1.17.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cacbaddd91fcffde703934897c5cd2c7cb0371fac195d383f4e1f1c5d3f3bd04", size = 37438061, upload-time = "2026-01-10T21:28:19.684Z" }, - { url = "https://files.pythonhosted.org/packages/f9/cc/2bd59140ed3b2fa2882fb15da0a9cb1b5a6443d67cfd0d98d4cec83a57ec/scipy-1.17.0-cp313-cp313t-win_amd64.whl", hash = "sha256:edce1a1cf66298cccdc48a1bdf8fb10a3bf58e8b58d6c3883dd1530e103f87c0", size = 36328593, upload-time = "2026-01-10T21:28:28.007Z" }, - { url = "https://files.pythonhosted.org/packages/13/1b/c87cc44a0d2c7aaf0f003aef2904c3d097b422a96c7e7c07f5efd9073c1b/scipy-1.17.0-cp313-cp313t-win_arm64.whl", hash = "sha256:30509da9dbec1c2ed8f168b8d8aa853bc6723fede1dbc23c7d43a56f5ab72a67", size = 24625083, upload-time = "2026-01-10T21:28:35.188Z" }, - { url = "https://files.pythonhosted.org/packages/1a/2d/51006cd369b8e7879e1c630999a19d1fbf6f8b5ed3e33374f29dc87e53b3/scipy-1.17.0-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:c17514d11b78be8f7e6331b983a65a7f5ca1fd037b95e27b280921fe5606286a", size = 31346803, upload-time = "2026-01-10T21:28:57.24Z" }, - { url = "https://files.pythonhosted.org/packages/d6/2e/2349458c3ce445f53a6c93d4386b1c4c5c0c540917304c01222ff95ff317/scipy-1.17.0-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:4e00562e519c09da34c31685f6acc3aa384d4d50604db0f245c14e1b4488bfa2", size = 27967182, upload-time = "2026-01-10T21:29:04.107Z" }, - { url = "https://files.pythonhosted.org/packages/5e/7c/df525fbfa77b878d1cfe625249529514dc02f4fd5f45f0f6295676a76528/scipy-1.17.0-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:f7df7941d71314e60a481e02d5ebcb3f0185b8d799c70d03d8258f6c80f3d467", size = 20139125, upload-time = "2026-01-10T21:29:10.179Z" }, - { url = "https://files.pythonhosted.org/packages/33/11/fcf9d43a7ed1234d31765ec643b0515a85a30b58eddccc5d5a4d12b5f194/scipy-1.17.0-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:aabf057c632798832f071a8dde013c2e26284043934f53b00489f1773b33527e", size = 22443554, upload-time = "2026-01-10T21:29:15.888Z" }, - { url = "https://files.pythonhosted.org/packages/80/5c/ea5d239cda2dd3d31399424967a24d556cf409fbea7b5b21412b0fd0a44f/scipy-1.17.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a38c3337e00be6fd8a95b4ed66b5d988bac4ec888fd922c2ea9fe5fb1603dd67", size = 32757834, upload-time = "2026-01-10T21:29:23.406Z" }, - { url = "https://files.pythonhosted.org/packages/b8/7e/8c917cc573310e5dc91cbeead76f1b600d3fb17cf0969db02c9cf92e3cfa/scipy-1.17.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00fb5f8ec8398ad90215008d8b6009c9db9fa924fd4c7d6be307c6f945f9cd73", size = 34995775, upload-time = "2026-01-10T21:29:31.915Z" }, - { url = "https://files.pythonhosted.org/packages/c5/43/176c0c3c07b3f7df324e7cdd933d3e2c4898ca202b090bd5ba122f9fe270/scipy-1.17.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f2a4942b0f5f7c23c7cd641a0ca1955e2ae83dedcff537e3a0259096635e186b", size = 34841240, upload-time = "2026-01-10T21:29:39.995Z" }, - { url = "https://files.pythonhosted.org/packages/44/8c/d1f5f4b491160592e7f084d997de53a8e896a3ac01cd07e59f43ca222744/scipy-1.17.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:dbf133ced83889583156566d2bdf7a07ff89228fe0c0cb727f777de92092ec6b", size = 37394463, upload-time = "2026-01-10T21:29:48.723Z" }, - { url = "https://files.pythonhosted.org/packages/9f/ec/42a6657f8d2d087e750e9a5dde0b481fd135657f09eaf1cf5688bb23c338/scipy-1.17.0-cp314-cp314-win_amd64.whl", hash = "sha256:3625c631a7acd7cfd929e4e31d2582cf00f42fcf06011f59281271746d77e061", size = 37053015, upload-time = "2026-01-10T21:30:51.418Z" }, - { url = "https://files.pythonhosted.org/packages/27/58/6b89a6afd132787d89a362d443a7bddd511b8f41336a1ae47f9e4f000dc4/scipy-1.17.0-cp314-cp314-win_arm64.whl", hash = "sha256:9244608d27eafe02b20558523ba57f15c689357c85bdcfe920b1828750aa26eb", size = 24951312, upload-time = "2026-01-10T21:30:56.771Z" }, - { url = "https://files.pythonhosted.org/packages/e9/01/f58916b9d9ae0112b86d7c3b10b9e685625ce6e8248df139d0fcb17f7397/scipy-1.17.0-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:2b531f57e09c946f56ad0b4a3b2abee778789097871fc541e267d2eca081cff1", size = 31706502, upload-time = "2026-01-10T21:29:56.326Z" }, - { url = "https://files.pythonhosted.org/packages/59/8e/2912a87f94a7d1f8b38aabc0faf74b82d3b6c9e22be991c49979f0eceed8/scipy-1.17.0-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:13e861634a2c480bd237deb69333ac79ea1941b94568d4b0efa5db5e263d4fd1", size = 28380854, upload-time = "2026-01-10T21:30:01.554Z" }, - { url = "https://files.pythonhosted.org/packages/bd/1c/874137a52dddab7d5d595c1887089a2125d27d0601fce8c0026a24a92a0b/scipy-1.17.0-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:eb2651271135154aa24f6481cbae5cc8af1f0dd46e6533fb7b56aa9727b6a232", size = 20552752, upload-time = "2026-01-10T21:30:05.93Z" }, - { url = "https://files.pythonhosted.org/packages/3f/f0/7518d171cb735f6400f4576cf70f756d5b419a07fe1867da34e2c2c9c11b/scipy-1.17.0-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:c5e8647f60679790c2f5c76be17e2e9247dc6b98ad0d3b065861e082c56e078d", size = 22803972, upload-time = "2026-01-10T21:30:10.651Z" }, - { url = "https://files.pythonhosted.org/packages/7c/74/3498563a2c619e8a3ebb4d75457486c249b19b5b04a30600dfd9af06bea5/scipy-1.17.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5fb10d17e649e1446410895639f3385fd2bf4c3c7dfc9bea937bddcbc3d7b9ba", size = 32829770, upload-time = "2026-01-10T21:30:16.359Z" }, - { url = "https://files.pythonhosted.org/packages/48/d1/7b50cedd8c6c9d6f706b4b36fa8544d829c712a75e370f763b318e9638c1/scipy-1.17.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8547e7c57f932e7354a2319fab613981cde910631979f74c9b542bb167a8b9db", size = 35051093, upload-time = "2026-01-10T21:30:22.987Z" }, - { url = "https://files.pythonhosted.org/packages/e2/82/a2d684dfddb87ba1b3ea325df7c3293496ee9accb3a19abe9429bce94755/scipy-1.17.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:33af70d040e8af9d5e7a38b5ed3b772adddd281e3062ff23fec49e49681c38cf", size = 34909905, upload-time = "2026-01-10T21:30:28.704Z" }, - { url = "https://files.pythonhosted.org/packages/ef/5e/e565bd73991d42023eb82bb99e51c5b3d9e2c588ca9d4b3e2cc1d3ca62a6/scipy-1.17.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb55bb97d00f8b7ab95cb64f873eb0bf54d9446264d9f3609130381233483f", size = 37457743, upload-time = "2026-01-10T21:30:34.819Z" }, - { url = "https://files.pythonhosted.org/packages/58/a8/a66a75c3d8f1fb2b83f66007d6455a06a6f6cf5618c3dc35bc9b69dd096e/scipy-1.17.0-cp314-cp314t-win_amd64.whl", hash = "sha256:1ff269abf702f6c7e67a4b7aad981d42871a11b9dd83c58d2d2ea624efbd1088", size = 37098574, upload-time = "2026-01-10T21:30:40.782Z" }, - { url = "https://files.pythonhosted.org/packages/56/a5/df8f46ef7da168f1bc52cd86e09a9de5c6f19cc1da04454d51b7d4f43408/scipy-1.17.0-cp314-cp314t-win_arm64.whl", hash = "sha256:031121914e295d9791319a1875444d55079885bbae5bdc9c5e0f2ee5f09d34ff", size = 25246266, upload-time = "2026-01-10T21:30:45.923Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/7a/97/5a3609c4f8d58b039179648e62dd220f89864f56f7357f5d4f45c29eb2cc/scipy-1.17.1.tar.gz", hash = "sha256:95d8e012d8cb8816c226aef832200b1d45109ed4464303e997c5b13122b297c0", size = 30573822, upload-time = "2026-02-23T00:26:24.851Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/48/b992b488d6f299dbe3f11a20b24d3dda3d46f1a635ede1c46b5b17a7b163/scipy-1.17.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:35c3a56d2ef83efc372eaec584314bd0ef2e2f0d2adb21c55e6ad5b344c0dcb8", size = 31610954, upload-time = "2026-02-23T00:17:49.855Z" }, + { url = "https://files.pythonhosted.org/packages/b2/02/cf107b01494c19dc100f1d0b7ac3cc08666e96ba2d64db7626066cee895e/scipy-1.17.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:fcb310ddb270a06114bb64bbe53c94926b943f5b7f0842194d585c65eb4edd76", size = 28172662, upload-time = "2026-02-23T00:18:01.64Z" }, + { url = "https://files.pythonhosted.org/packages/cf/a9/599c28631bad314d219cf9ffd40e985b24d603fc8a2f4ccc5ae8419a535b/scipy-1.17.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:cc90d2e9c7e5c7f1a482c9875007c095c3194b1cfedca3c2f3291cdc2bc7c086", size = 20344366, upload-time = "2026-02-23T00:18:12.015Z" }, + { url = "https://files.pythonhosted.org/packages/35/f5/906eda513271c8deb5af284e5ef0206d17a96239af79f9fa0aebfe0e36b4/scipy-1.17.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:c80be5ede8f3f8eded4eff73cc99a25c388ce98e555b17d31da05287015ffa5b", size = 22704017, upload-time = "2026-02-23T00:18:21.502Z" }, + { url = "https://files.pythonhosted.org/packages/da/34/16f10e3042d2f1d6b66e0428308ab52224b6a23049cb2f5c1756f713815f/scipy-1.17.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e19ebea31758fac5893a2ac360fedd00116cbb7628e650842a6691ba7ca28a21", size = 32927842, upload-time = "2026-02-23T00:18:35.367Z" }, + { url = "https://files.pythonhosted.org/packages/01/8e/1e35281b8ab6d5d72ebe9911edcdffa3f36b04ed9d51dec6dd140396e220/scipy-1.17.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02ae3b274fde71c5e92ac4d54bc06c42d80e399fec704383dcd99b301df37458", size = 35235890, upload-time = "2026-02-23T00:18:49.188Z" }, + { url = "https://files.pythonhosted.org/packages/c5/5c/9d7f4c88bea6e0d5a4f1bc0506a53a00e9fcb198de372bfe4d3652cef482/scipy-1.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8a604bae87c6195d8b1045eddece0514d041604b14f2727bbc2b3020172045eb", size = 35003557, upload-time = "2026-02-23T00:18:54.74Z" }, + { url = "https://files.pythonhosted.org/packages/65/94/7698add8f276dbab7a9de9fb6b0e02fc13ee61d51c7c3f85ac28b65e1239/scipy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f590cd684941912d10becc07325a3eeb77886fe981415660d9265c4c418d0bea", size = 37625856, upload-time = "2026-02-23T00:19:00.307Z" }, + { url = "https://files.pythonhosted.org/packages/a2/84/dc08d77fbf3d87d3ee27f6a0c6dcce1de5829a64f2eae85a0ecc1f0daa73/scipy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:41b71f4a3a4cab9d366cd9065b288efc4d4f3c0b37a91a8e0947fb5bd7f31d87", size = 36549682, upload-time = "2026-02-23T00:19:07.67Z" }, + { url = "https://files.pythonhosted.org/packages/bc/98/fe9ae9ffb3b54b62559f52dedaebe204b408db8109a8c66fdd04869e6424/scipy-1.17.1-cp312-cp312-win_arm64.whl", hash = "sha256:f4115102802df98b2b0db3cce5cb9b92572633a1197c77b7553e5203f284a5b3", size = 24547340, upload-time = "2026-02-23T00:19:12.024Z" }, + { url = "https://files.pythonhosted.org/packages/76/27/07ee1b57b65e92645f219b37148a7e7928b82e2b5dbeccecb4dff7c64f0b/scipy-1.17.1-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:5e3c5c011904115f88a39308379c17f91546f77c1667cea98739fe0fccea804c", size = 31590199, upload-time = "2026-02-23T00:19:17.192Z" }, + { url = "https://files.pythonhosted.org/packages/ec/ae/db19f8ab842e9b724bf5dbb7db29302a91f1e55bc4d04b1025d6d605a2c5/scipy-1.17.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:6fac755ca3d2c3edcb22f479fceaa241704111414831ddd3bc6056e18516892f", size = 28154001, upload-time = "2026-02-23T00:19:22.241Z" }, + { url = "https://files.pythonhosted.org/packages/5b/58/3ce96251560107b381cbd6e8413c483bbb1228a6b919fa8652b0d4090e7f/scipy-1.17.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:7ff200bf9d24f2e4d5dc6ee8c3ac64d739d3a89e2326ba68aaf6c4a2b838fd7d", size = 20325719, upload-time = "2026-02-23T00:19:26.329Z" }, + { url = "https://files.pythonhosted.org/packages/b2/83/15087d945e0e4d48ce2377498abf5ad171ae013232ae31d06f336e64c999/scipy-1.17.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:4b400bdc6f79fa02a4d86640310dde87a21fba0c979efff5248908c6f15fad1b", size = 22683595, upload-time = "2026-02-23T00:19:30.304Z" }, + { url = "https://files.pythonhosted.org/packages/b4/e0/e58fbde4a1a594c8be8114eb4aac1a55bcd6587047efc18a61eb1f5c0d30/scipy-1.17.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2b64ca7d4aee0102a97f3ba22124052b4bd2152522355073580bf4845e2550b6", size = 32896429, upload-time = "2026-02-23T00:19:35.536Z" }, + { url = "https://files.pythonhosted.org/packages/f5/5f/f17563f28ff03c7b6799c50d01d5d856a1d55f2676f537ca8d28c7f627cd/scipy-1.17.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:581b2264fc0aa555f3f435a5944da7504ea3a065d7029ad60e7c3d1ae09c5464", size = 35203952, upload-time = "2026-02-23T00:19:42.259Z" }, + { url = "https://files.pythonhosted.org/packages/8d/a5/9afd17de24f657fdfe4df9a3f1ea049b39aef7c06000c13db1530d81ccca/scipy-1.17.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:beeda3d4ae615106d7094f7e7cef6218392e4465cc95d25f900bebabfded0950", size = 34979063, upload-time = "2026-02-23T00:19:47.547Z" }, + { url = "https://files.pythonhosted.org/packages/8b/13/88b1d2384b424bf7c924f2038c1c409f8d88bb2a8d49d097861dd64a57b2/scipy-1.17.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6609bc224e9568f65064cfa72edc0f24ee6655b47575954ec6339534b2798369", size = 37598449, upload-time = "2026-02-23T00:19:53.238Z" }, + { url = "https://files.pythonhosted.org/packages/35/e5/d6d0e51fc888f692a35134336866341c08655d92614f492c6860dc45bb2c/scipy-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:37425bc9175607b0268f493d79a292c39f9d001a357bebb6b88fdfaff13f6448", size = 36510943, upload-time = "2026-02-23T00:20:50.89Z" }, + { url = "https://files.pythonhosted.org/packages/2a/fd/3be73c564e2a01e690e19cc618811540ba5354c67c8680dce3281123fb79/scipy-1.17.1-cp313-cp313-win_arm64.whl", hash = "sha256:5cf36e801231b6a2059bf354720274b7558746f3b1a4efb43fcf557ccd484a87", size = 24545621, upload-time = "2026-02-23T00:20:55.871Z" }, + { url = "https://files.pythonhosted.org/packages/6f/6b/17787db8b8114933a66f9dcc479a8272e4b4da75fe03b0c282f7b0ade8cd/scipy-1.17.1-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:d59c30000a16d8edc7e64152e30220bfbd724c9bbb08368c054e24c651314f0a", size = 31936708, upload-time = "2026-02-23T00:19:58.694Z" }, + { url = "https://files.pythonhosted.org/packages/38/2e/524405c2b6392765ab1e2b722a41d5da33dc5c7b7278184a8ad29b6cb206/scipy-1.17.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:010f4333c96c9bb1a4516269e33cb5917b08ef2166d5556ca2fd9f082a9e6ea0", size = 28570135, upload-time = "2026-02-23T00:20:03.934Z" }, + { url = "https://files.pythonhosted.org/packages/fd/c3/5bd7199f4ea8556c0c8e39f04ccb014ac37d1468e6cfa6a95c6b3562b76e/scipy-1.17.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:2ceb2d3e01c5f1d83c4189737a42d9cb2fc38a6eeed225e7515eef71ad301dce", size = 20741977, upload-time = "2026-02-23T00:20:07.935Z" }, + { url = "https://files.pythonhosted.org/packages/d9/b8/8ccd9b766ad14c78386599708eb745f6b44f08400a5fd0ade7cf89b6fc93/scipy-1.17.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:844e165636711ef41f80b4103ed234181646b98a53c8f05da12ca5ca289134f6", size = 23029601, upload-time = "2026-02-23T00:20:12.161Z" }, + { url = "https://files.pythonhosted.org/packages/6d/a0/3cb6f4d2fb3e17428ad2880333cac878909ad1a89f678527b5328b93c1d4/scipy-1.17.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:158dd96d2207e21c966063e1635b1063cd7787b627b6f07305315dd73d9c679e", size = 33019667, upload-time = "2026-02-23T00:20:17.208Z" }, + { url = "https://files.pythonhosted.org/packages/f3/c3/2d834a5ac7bf3a0c806ad1508efc02dda3c8c61472a56132d7894c312dea/scipy-1.17.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:74cbb80d93260fe2ffa334efa24cb8f2f0f622a9b9febf8b483c0b865bfb3475", size = 35264159, upload-time = "2026-02-23T00:20:23.087Z" }, + { url = "https://files.pythonhosted.org/packages/4d/77/d3ed4becfdbd217c52062fafe35a72388d1bd82c2d0ba5ca19d6fcc93e11/scipy-1.17.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:dbc12c9f3d185f5c737d801da555fb74b3dcfa1a50b66a1a93e09190f41fab50", size = 35102771, upload-time = "2026-02-23T00:20:28.636Z" }, + { url = "https://files.pythonhosted.org/packages/bd/12/d19da97efde68ca1ee5538bb261d5d2c062f0c055575128f11a2730e3ac1/scipy-1.17.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:94055a11dfebe37c656e70317e1996dc197e1a15bbcc351bcdd4610e128fe1ca", size = 37665910, upload-time = "2026-02-23T00:20:34.743Z" }, + { url = "https://files.pythonhosted.org/packages/06/1c/1172a88d507a4baaf72c5a09bb6c018fe2ae0ab622e5830b703a46cc9e44/scipy-1.17.1-cp313-cp313t-win_amd64.whl", hash = "sha256:e30bdeaa5deed6bc27b4cc490823cd0347d7dae09119b8803ae576ea0ce52e4c", size = 36562980, upload-time = "2026-02-23T00:20:40.575Z" }, + { url = "https://files.pythonhosted.org/packages/70/b0/eb757336e5a76dfa7911f63252e3b7d1de00935d7705cf772db5b45ec238/scipy-1.17.1-cp313-cp313t-win_arm64.whl", hash = "sha256:a720477885a9d2411f94a93d16f9d89bad0f28ca23c3f8daa521e2dcc3f44d49", size = 24856543, upload-time = "2026-02-23T00:20:45.313Z" }, + { url = "https://files.pythonhosted.org/packages/cf/83/333afb452af6f0fd70414dc04f898647ee1423979ce02efa75c3b0f2c28e/scipy-1.17.1-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:a48a72c77a310327f6a3a920092fa2b8fd03d7deaa60f093038f22d98e096717", size = 31584510, upload-time = "2026-02-23T00:21:01.015Z" }, + { url = "https://files.pythonhosted.org/packages/ed/a6/d05a85fd51daeb2e4ea71d102f15b34fedca8e931af02594193ae4fd25f7/scipy-1.17.1-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:45abad819184f07240d8a696117a7aacd39787af9e0b719d00285549ed19a1e9", size = 28170131, upload-time = "2026-02-23T00:21:05.888Z" }, + { url = "https://files.pythonhosted.org/packages/db/7b/8624a203326675d7746a254083a187398090a179335b2e4a20e2ddc46e83/scipy-1.17.1-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:3fd1fcdab3ea951b610dc4cef356d416d5802991e7e32b5254828d342f7b7e0b", size = 20342032, upload-time = "2026-02-23T00:21:09.904Z" }, + { url = "https://files.pythonhosted.org/packages/c9/35/2c342897c00775d688d8ff3987aced3426858fd89d5a0e26e020b660b301/scipy-1.17.1-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:7bdf2da170b67fdf10bca777614b1c7d96ae3ca5794fd9587dce41eb2966e866", size = 22678766, upload-time = "2026-02-23T00:21:14.313Z" }, + { url = "https://files.pythonhosted.org/packages/ef/f2/7cdb8eb308a1a6ae1e19f945913c82c23c0c442a462a46480ce487fdc0ac/scipy-1.17.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:adb2642e060a6549c343603a3851ba76ef0b74cc8c079a9a58121c7ec9fe2350", size = 32957007, upload-time = "2026-02-23T00:21:19.663Z" }, + { url = "https://files.pythonhosted.org/packages/0b/2e/7eea398450457ecb54e18e9d10110993fa65561c4f3add5e8eccd2b9cd41/scipy-1.17.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eee2cfda04c00a857206a4330f0c5e3e56535494e30ca445eb19ec624ae75118", size = 35221333, upload-time = "2026-02-23T00:21:25.278Z" }, + { url = "https://files.pythonhosted.org/packages/d9/77/5b8509d03b77f093a0d52e606d3c4f79e8b06d1d38c441dacb1e26cacf46/scipy-1.17.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d2650c1fb97e184d12d8ba010493ee7b322864f7d3d00d3f9bb97d9c21de4068", size = 35042066, upload-time = "2026-02-23T00:21:31.358Z" }, + { url = "https://files.pythonhosted.org/packages/f9/df/18f80fb99df40b4070328d5ae5c596f2f00fffb50167e31439e932f29e7d/scipy-1.17.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:08b900519463543aa604a06bec02461558a6e1cef8fdbb8098f77a48a83c8118", size = 37612763, upload-time = "2026-02-23T00:21:37.247Z" }, + { url = "https://files.pythonhosted.org/packages/4b/39/f0e8ea762a764a9dc52aa7dabcfad51a354819de1f0d4652b6a1122424d6/scipy-1.17.1-cp314-cp314-win_amd64.whl", hash = "sha256:3877ac408e14da24a6196de0ddcace62092bfc12a83823e92e49e40747e52c19", size = 37290984, upload-time = "2026-02-23T00:22:35.023Z" }, + { url = "https://files.pythonhosted.org/packages/7c/56/fe201e3b0f93d1a8bcf75d3379affd228a63d7e2d80ab45467a74b494947/scipy-1.17.1-cp314-cp314-win_arm64.whl", hash = "sha256:f8885db0bc2bffa59d5c1b72fad7a6a92d3e80e7257f967dd81abb553a90d293", size = 25192877, upload-time = "2026-02-23T00:22:39.798Z" }, + { url = "https://files.pythonhosted.org/packages/96/ad/f8c414e121f82e02d76f310f16db9899c4fcde36710329502a6b2a3c0392/scipy-1.17.1-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:1cc682cea2ae55524432f3cdff9e9a3be743d52a7443d0cba9017c23c87ae2f6", size = 31949750, upload-time = "2026-02-23T00:21:42.289Z" }, + { url = "https://files.pythonhosted.org/packages/7c/b0/c741e8865d61b67c81e255f4f0a832846c064e426636cd7de84e74d209be/scipy-1.17.1-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:2040ad4d1795a0ae89bfc7e8429677f365d45aa9fd5e4587cf1ea737f927b4a1", size = 28585858, upload-time = "2026-02-23T00:21:47.706Z" }, + { url = "https://files.pythonhosted.org/packages/ed/1b/3985219c6177866628fa7c2595bfd23f193ceebbe472c98a08824b9466ff/scipy-1.17.1-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:131f5aaea57602008f9822e2115029b55d4b5f7c070287699fe45c661d051e39", size = 20757723, upload-time = "2026-02-23T00:21:52.039Z" }, + { url = "https://files.pythonhosted.org/packages/c0/19/2a04aa25050d656d6f7b9e7b685cc83d6957fb101665bfd9369ca6534563/scipy-1.17.1-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:9cdc1a2fcfd5c52cfb3045feb399f7b3ce822abdde3a193a6b9a60b3cb5854ca", size = 23043098, upload-time = "2026-02-23T00:21:56.185Z" }, + { url = "https://files.pythonhosted.org/packages/86/f1/3383beb9b5d0dbddd030335bf8a8b32d4317185efe495374f134d8be6cce/scipy-1.17.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e3dcd57ab780c741fde8dc68619de988b966db759a3c3152e8e9142c26295ad", size = 33030397, upload-time = "2026-02-23T00:22:01.404Z" }, + { url = "https://files.pythonhosted.org/packages/41/68/8f21e8a65a5a03f25a79165ec9d2b28c00e66dc80546cf5eb803aeeff35b/scipy-1.17.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a9956e4d4f4a301ebf6cde39850333a6b6110799d470dbbb1e25326ac447f52a", size = 35281163, upload-time = "2026-02-23T00:22:07.024Z" }, + { url = "https://files.pythonhosted.org/packages/84/8d/c8a5e19479554007a5632ed7529e665c315ae7492b4f946b0deb39870e39/scipy-1.17.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:a4328d245944d09fd639771de275701ccadf5f781ba0ff092ad141e017eccda4", size = 35116291, upload-time = "2026-02-23T00:22:12.585Z" }, + { url = "https://files.pythonhosted.org/packages/52/52/e57eceff0e342a1f50e274264ed47497b59e6a4e3118808ee58ddda7b74a/scipy-1.17.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a77cbd07b940d326d39a1d1b37817e2ee4d79cb30e7338f3d0cddffae70fcaa2", size = 37682317, upload-time = "2026-02-23T00:22:18.513Z" }, + { url = "https://files.pythonhosted.org/packages/11/2f/b29eafe4a3fbc3d6de9662b36e028d5f039e72d345e05c250e121a230dd4/scipy-1.17.1-cp314-cp314t-win_amd64.whl", hash = "sha256:eb092099205ef62cd1782b006658db09e2fed75bffcae7cc0d44052d8aa0f484", size = 37345327, upload-time = "2026-02-23T00:22:24.442Z" }, + { url = "https://files.pythonhosted.org/packages/07/39/338d9219c4e87f3e708f18857ecd24d22a0c3094752393319553096b98af/scipy-1.17.1-cp314-cp314t-win_arm64.whl", hash = "sha256:200e1050faffacc162be6a486a984a0497866ec54149a01270adc8a59b7c7d21", size = 25489165, upload-time = "2026-02-23T00:22:29.563Z" }, ] [[package]] name = "scipy-stubs" -version = "1.17.0.2" +version = "1.17.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "optype", extra = ["numpy"] }, ] -sdist = { url = "https://files.pythonhosted.org/packages/40/fe/5fa7da49821ea94d60629ae71277fa8d7e16eb20602f720062b6c30a644c/scipy_stubs-1.17.0.2.tar.gz", hash = "sha256:3981bd7fa4c189a8493307afadaee1a830d9a0de8e3ae2f4603f192b6260ef2a", size = 379897, upload-time = "2026-01-22T19:17:08Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d9/ad/413b0d18efca7bb48574d28e91253409d91ee6121e7937022d0d380dfc6a/scipy_stubs-1.17.1.0.tar.gz", hash = "sha256:5dc51c21765b145c2d132b96b63ff4f835dd5fb768006876d1554e7a59c61571", size = 381420, upload-time = "2026-02-23T10:33:04.742Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/51/e3/20233497e4a27956e7392c3f7879e6ee7f767f268079f24f4b089b70f563/scipy_stubs-1.17.0.2-py3-none-any.whl", hash = "sha256:99d1aa75b7d72a7ee36a68d18bcf1149f62ab577bbd1236c65c471b3b465d824", size = 586137, upload-time = "2026-01-22T19:17:05.802Z" }, + { url = "https://files.pythonhosted.org/packages/6c/ee/c6811e04ff9d5dd1d92236e8df7ebc4db6aa65c70b9938cec293348b8ec4/scipy_stubs-1.17.1.0-py3-none-any.whl", hash = "sha256:5c9c84993d36b104acb2d187b05985eb79f73491c60d83292dd738093d53d96a", size = 587059, upload-time = "2026-02-23T10:33:02.845Z" }, ] [[package]] @@ -1914,57 +1914,61 @@ wheels = [ [[package]] name = "sqlalchemy" -version = "2.0.46" +version = "2.0.47" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/06/aa/9ce0f3e7a9829ead5c8ce549392f33a12c4555a6c0609bb27d882e9c7ddf/sqlalchemy-2.0.46.tar.gz", hash = "sha256:cf36851ee7219c170bb0793dbc3da3e80c582e04a5437bc601bfe8c85c9216d7", size = 9865393, upload-time = "2026-01-21T18:03:45.119Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b6/35/d16bfa235c8b7caba3730bba43e20b1e376d2224f407c178fbf59559f23e/sqlalchemy-2.0.46-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a9a72b0da8387f15d5810f1facca8f879de9b85af8c645138cba61ea147968c", size = 2153405, upload-time = "2026-01-21T19:05:54.143Z" }, - { url = "https://files.pythonhosted.org/packages/06/6c/3192e24486749862f495ddc6584ed730c0c994a67550ec395d872a2ad650/sqlalchemy-2.0.46-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2347c3f0efc4de367ba00218e0ae5c4ba2306e47216ef80d6e31761ac97cb0b9", size = 3334702, upload-time = "2026-01-21T18:46:45.384Z" }, - { url = "https://files.pythonhosted.org/packages/ea/a2/b9f33c8d68a3747d972a0bb758c6b63691f8fb8a49014bc3379ba15d4274/sqlalchemy-2.0.46-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9094c8b3197db12aa6f05c51c05daaad0a92b8c9af5388569847b03b1007fb1b", size = 3347664, upload-time = "2026-01-21T18:40:09.979Z" }, - { url = "https://files.pythonhosted.org/packages/aa/d2/3e59e2a91eaec9db7e8dc6b37b91489b5caeb054f670f32c95bcba98940f/sqlalchemy-2.0.46-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:37fee2164cf21417478b6a906adc1a91d69ae9aba8f9533e67ce882f4bb1de53", size = 3277372, upload-time = "2026-01-21T18:46:47.168Z" }, - { url = "https://files.pythonhosted.org/packages/dd/dd/67bc2e368b524e2192c3927b423798deda72c003e73a1e94c21e74b20a85/sqlalchemy-2.0.46-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b1e14b2f6965a685c7128bd315e27387205429c2e339eeec55cb75ca4ab0ea2e", size = 3312425, upload-time = "2026-01-21T18:40:11.548Z" }, - { url = "https://files.pythonhosted.org/packages/43/82/0ecd68e172bfe62247e96cb47867c2d68752566811a4e8c9d8f6e7c38a65/sqlalchemy-2.0.46-cp312-cp312-win32.whl", hash = "sha256:412f26bb4ba942d52016edc8d12fb15d91d3cd46b0047ba46e424213ad407bcb", size = 2113155, upload-time = "2026-01-21T18:42:49.748Z" }, - { url = "https://files.pythonhosted.org/packages/bc/2a/2821a45742073fc0331dc132552b30de68ba9563230853437cac54b2b53e/sqlalchemy-2.0.46-cp312-cp312-win_amd64.whl", hash = "sha256:ea3cd46b6713a10216323cda3333514944e510aa691c945334713fca6b5279ff", size = 2140078, upload-time = "2026-01-21T18:42:51.197Z" }, - { url = "https://files.pythonhosted.org/packages/b3/4b/fa7838fe20bb752810feed60e45625a9a8b0102c0c09971e2d1d95362992/sqlalchemy-2.0.46-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:93a12da97cca70cea10d4b4fc602589c4511f96c1f8f6c11817620c021d21d00", size = 2150268, upload-time = "2026-01-21T19:05:56.621Z" }, - { url = "https://files.pythonhosted.org/packages/46/c1/b34dccd712e8ea846edf396e00973dda82d598cb93762e55e43e6835eba9/sqlalchemy-2.0.46-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:af865c18752d416798dae13f83f38927c52f085c52e2f32b8ab0fef46fdd02c2", size = 3276511, upload-time = "2026-01-21T18:46:49.022Z" }, - { url = "https://files.pythonhosted.org/packages/96/48/a04d9c94753e5d5d096c628c82a98c4793b9c08ca0e7155c3eb7d7db9f24/sqlalchemy-2.0.46-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8d679b5f318423eacb61f933a9a0f75535bfca7056daeadbf6bd5bcee6183aee", size = 3292881, upload-time = "2026-01-21T18:40:13.089Z" }, - { url = "https://files.pythonhosted.org/packages/be/f4/06eda6e91476f90a7d8058f74311cb65a2fb68d988171aced81707189131/sqlalchemy-2.0.46-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:64901e08c33462acc9ec3bad27fc7a5c2b6491665f2aa57564e57a4f5d7c52ad", size = 3224559, upload-time = "2026-01-21T18:46:50.974Z" }, - { url = "https://files.pythonhosted.org/packages/ab/a2/d2af04095412ca6345ac22b33b89fe8d6f32a481e613ffcb2377d931d8d0/sqlalchemy-2.0.46-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e8ac45e8f4eaac0f9f8043ea0e224158855c6a4329fd4ee37c45c61e3beb518e", size = 3262728, upload-time = "2026-01-21T18:40:14.883Z" }, - { url = "https://files.pythonhosted.org/packages/31/48/1980c7caa5978a3b8225b4d230e69a2a6538a3562b8b31cea679b6933c83/sqlalchemy-2.0.46-cp313-cp313-win32.whl", hash = "sha256:8d3b44b3d0ab2f1319d71d9863d76eeb46766f8cf9e921ac293511804d39813f", size = 2111295, upload-time = "2026-01-21T18:42:52.366Z" }, - { url = "https://files.pythonhosted.org/packages/2d/54/f8d65bbde3d877617c4720f3c9f60e99bb7266df0d5d78b6e25e7c149f35/sqlalchemy-2.0.46-cp313-cp313-win_amd64.whl", hash = "sha256:77f8071d8fbcbb2dd11b7fd40dedd04e8ebe2eb80497916efedba844298065ef", size = 2137076, upload-time = "2026-01-21T18:42:53.924Z" }, - { url = "https://files.pythonhosted.org/packages/56/ba/9be4f97c7eb2b9d5544f2624adfc2853e796ed51d2bb8aec90bc94b7137e/sqlalchemy-2.0.46-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a1e8cc6cc01da346dc92d9509a63033b9b1bda4fed7a7a7807ed385c7dccdc10", size = 3556533, upload-time = "2026-01-21T18:33:06.636Z" }, - { url = "https://files.pythonhosted.org/packages/20/a6/b1fc6634564dbb4415b7ed6419cdfeaadefd2c39cdab1e3aa07a5f2474c2/sqlalchemy-2.0.46-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:96c7cca1a4babaaf3bfff3e4e606e38578856917e52f0384635a95b226c87764", size = 3523208, upload-time = "2026-01-21T18:45:08.436Z" }, - { url = "https://files.pythonhosted.org/packages/a1/d8/41e0bdfc0f930ff236f86fccd12962d8fa03713f17ed57332d38af6a3782/sqlalchemy-2.0.46-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b2a9f9aee38039cf4755891a1e50e1effcc42ea6ba053743f452c372c3152b1b", size = 3464292, upload-time = "2026-01-21T18:33:08.208Z" }, - { url = "https://files.pythonhosted.org/packages/f0/8b/9dcbec62d95bea85f5ecad9b8d65b78cc30fb0ffceeb3597961f3712549b/sqlalchemy-2.0.46-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:db23b1bf8cfe1f7fda19018e7207b20cdb5168f83c437ff7e95d19e39289c447", size = 3473497, upload-time = "2026-01-21T18:45:10.552Z" }, - { url = "https://files.pythonhosted.org/packages/e9/f8/5ecdfc73383ec496de038ed1614de9e740a82db9ad67e6e4514ebc0708a3/sqlalchemy-2.0.46-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:56bdd261bfd0895452006d5316cbf35739c53b9bb71a170a331fa0ea560b2ada", size = 2152079, upload-time = "2026-01-21T19:05:58.477Z" }, - { url = "https://files.pythonhosted.org/packages/e5/bf/eba3036be7663ce4d9c050bc3d63794dc29fbe01691f2bf5ccb64e048d20/sqlalchemy-2.0.46-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:33e462154edb9493f6c3ad2125931e273bbd0be8ae53f3ecd1c161ea9a1dd366", size = 3272216, upload-time = "2026-01-21T18:46:52.634Z" }, - { url = "https://files.pythonhosted.org/packages/05/45/1256fb597bb83b58a01ddb600c59fe6fdf0e5afe333f0456ed75c0f8d7bd/sqlalchemy-2.0.46-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9bcdce05f056622a632f1d44bb47dbdb677f58cad393612280406ce37530eb6d", size = 3277208, upload-time = "2026-01-21T18:40:16.38Z" }, - { url = "https://files.pythonhosted.org/packages/d9/a0/2053b39e4e63b5d7ceb3372cface0859a067c1ddbd575ea7e9985716f771/sqlalchemy-2.0.46-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8e84b09a9b0f19accedcbeff5c2caf36e0dd537341a33aad8d680336152dc34e", size = 3221994, upload-time = "2026-01-21T18:46:54.622Z" }, - { url = "https://files.pythonhosted.org/packages/1e/87/97713497d9502553c68f105a1cb62786ba1ee91dea3852ae4067ed956a50/sqlalchemy-2.0.46-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4f52f7291a92381e9b4de9050b0a65ce5d6a763333406861e33906b8aa4906bf", size = 3243990, upload-time = "2026-01-21T18:40:18.253Z" }, - { url = "https://files.pythonhosted.org/packages/a8/87/5d1b23548f420ff823c236f8bea36b1a997250fd2f892e44a3838ca424f4/sqlalchemy-2.0.46-cp314-cp314-win32.whl", hash = "sha256:70ed2830b169a9960193f4d4322d22be5c0925357d82cbf485b3369893350908", size = 2114215, upload-time = "2026-01-21T18:42:55.232Z" }, - { url = "https://files.pythonhosted.org/packages/3a/20/555f39cbcf0c10cf452988b6a93c2a12495035f68b3dbd1a408531049d31/sqlalchemy-2.0.46-cp314-cp314-win_amd64.whl", hash = "sha256:3c32e993bc57be6d177f7d5d31edb93f30726d798ad86ff9066d75d9bf2e0b6b", size = 2139867, upload-time = "2026-01-21T18:42:56.474Z" }, - { url = "https://files.pythonhosted.org/packages/3e/f0/f96c8057c982d9d8a7a68f45d69c674bc6f78cad401099692fe16521640a/sqlalchemy-2.0.46-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4dafb537740eef640c4d6a7c254611dca2df87eaf6d14d6a5fca9d1f4c3fc0fa", size = 3561202, upload-time = "2026-01-21T18:33:10.337Z" }, - { url = "https://files.pythonhosted.org/packages/d7/53/3b37dda0a5b137f21ef608d8dfc77b08477bab0fe2ac9d3e0a66eaeab6fc/sqlalchemy-2.0.46-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:42a1643dc5427b69aca967dae540a90b0fbf57eaf248f13a90ea5930e0966863", size = 3526296, upload-time = "2026-01-21T18:45:12.657Z" }, - { url = "https://files.pythonhosted.org/packages/33/75/f28622ba6dde79cd545055ea7bd4062dc934e0621f7b3be2891f8563f8de/sqlalchemy-2.0.46-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ff33c6e6ad006bbc0f34f5faf941cfc62c45841c64c0a058ac38c799f15b5ede", size = 3470008, upload-time = "2026-01-21T18:33:11.725Z" }, - { url = "https://files.pythonhosted.org/packages/a9/42/4afecbbc38d5e99b18acef446453c76eec6fbd03db0a457a12a056836e22/sqlalchemy-2.0.46-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:82ec52100ec1e6ec671563bbd02d7c7c8d0b9e71a0723c72f22ecf52d1755330", size = 3476137, upload-time = "2026-01-21T18:45:15.001Z" }, - { url = "https://files.pythonhosted.org/packages/fc/a1/9c4efa03300926601c19c18582531b45aededfb961ab3c3585f1e24f120b/sqlalchemy-2.0.46-py3-none-any.whl", hash = "sha256:f9c11766e7e7c0a2767dda5acb006a118640c9fc0a4104214b96269bfb78399e", size = 1937882, upload-time = "2026-01-21T18:22:10.456Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/cd/4b/1e00561093fe2cd8eef09d406da003c8a118ff02d6548498c1ae677d68d9/sqlalchemy-2.0.47.tar.gz", hash = "sha256:e3e7feb57b267fe897e492b9721ae46d5c7de6f9e8dee58aacf105dc4e154f3d", size = 9886323, upload-time = "2026-02-24T16:34:27.947Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/80/88/74eb470223ff88ea6572a132c0b8de8c1d8ed7b843d3b44a8a3c77f31d39/sqlalchemy-2.0.47-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4fa91b19d6b9821c04cc8f7aa2476429cc8887b9687c762815aa629f5c0edec1", size = 2155687, upload-time = "2026-02-24T17:05:46.451Z" }, + { url = "https://files.pythonhosted.org/packages/ef/ba/1447d3d558971b036cb93b557595cb5dcdfe728f1c7ac4dec16505ef5756/sqlalchemy-2.0.47-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7c5bbbd14eff577c8c79cbfe39a0771eecd20f430f3678533476f0087138f356", size = 3336978, upload-time = "2026-02-24T17:18:04.597Z" }, + { url = "https://files.pythonhosted.org/packages/8a/07/b47472d2ffd0776826f17ccf0b4d01b224c99fbd1904aeb103dffbb4b1cc/sqlalchemy-2.0.47-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a5a6c555da8d4280a3c4c78c5b7a3f990cee2b2884e5f934f87a226191682ff7", size = 3349939, upload-time = "2026-02-24T17:27:18.937Z" }, + { url = "https://files.pythonhosted.org/packages/bb/c6/95fa32b79b57769da3e16f054cf658d90940317b5ca0ec20eac84aa19c4f/sqlalchemy-2.0.47-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ed48a1701d24dff3bb49a5bce94d6bc84cbe33d98af2aa2d3cdcce3dea1709ec", size = 3279648, upload-time = "2026-02-24T17:18:07.038Z" }, + { url = "https://files.pythonhosted.org/packages/bb/c8/3d07e7c73928dc59a0bed40961ca4e313e797bce650b088e8d5fdd3ad939/sqlalchemy-2.0.47-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4f3178c920ad98158f0b6309382194df04b14808fa6052ae07099fdde29d5602", size = 3314695, upload-time = "2026-02-24T17:27:20.93Z" }, + { url = "https://files.pythonhosted.org/packages/6b/d2/ed32b1611c1e19fdb028eee1adc5a9aa138c2952d09ae11f1670170f80ae/sqlalchemy-2.0.47-cp312-cp312-win32.whl", hash = "sha256:b9c11ac9934dd59ece9619fe42780a08abe2faab7b0543bb00d5eabea4f421b9", size = 2115502, upload-time = "2026-02-24T17:22:52.546Z" }, + { url = "https://files.pythonhosted.org/packages/fd/52/9de590356a4dd8e9ef5a881dbba64b2bbc4cbc71bf02bc68e775fb9b1899/sqlalchemy-2.0.47-cp312-cp312-win_amd64.whl", hash = "sha256:db43b72cf8274a99e089755c9c1e0b947159b71adbc2c83c3de2e38d5d607acb", size = 2142435, upload-time = "2026-02-24T17:22:54.268Z" }, + { url = "https://files.pythonhosted.org/packages/4a/e5/0af64ce7d8f60ec5328c10084e2f449e7912a9b8bdbefdcfb44454a25f49/sqlalchemy-2.0.47-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:456a135b790da5d3c6b53d0ef71ac7b7d280b7f41eb0c438986352bf03ca7143", size = 2152551, upload-time = "2026-02-24T17:05:47.675Z" }, + { url = "https://files.pythonhosted.org/packages/63/79/746b8d15f6940e2ac469ce22d7aa5b1124b1ab820bad9b046eb3000c88a6/sqlalchemy-2.0.47-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09a2f7698e44b3135433387da5d8846cf7cc7c10e5425af7c05fee609df978b6", size = 3278782, upload-time = "2026-02-24T17:18:10.012Z" }, + { url = "https://files.pythonhosted.org/packages/91/b1/bd793ddb34345d1ed43b13ab2d88c95d7d4eb2e28f5b5a99128b9cc2bca2/sqlalchemy-2.0.47-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0bbc72e6a177c78d724f9106aaddc0d26a2ada89c6332b5935414eccf04cbd5", size = 3295155, upload-time = "2026-02-24T17:27:22.827Z" }, + { url = "https://files.pythonhosted.org/packages/97/84/7213def33f94e5ca6f5718d259bc9f29de0363134648425aa218d4356b23/sqlalchemy-2.0.47-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:75460456b043b78b6006e41bdf5b86747ee42eafaf7fffa3b24a6e9a456a2092", size = 3226834, upload-time = "2026-02-24T17:18:11.465Z" }, + { url = "https://files.pythonhosted.org/packages/ef/06/456810204f4dc29b5f025b1b0a03b4bd6b600ebf3c1040aebd90a257fa33/sqlalchemy-2.0.47-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5d9adaa616c3bc7d80f9ded57cd84b51d6617cad6a5456621d858c9f23aaee01", size = 3265001, upload-time = "2026-02-24T17:27:24.813Z" }, + { url = "https://files.pythonhosted.org/packages/fb/20/df3920a4b2217dbd7390a5bd277c1902e0393f42baaf49f49b3c935e7328/sqlalchemy-2.0.47-cp313-cp313-win32.whl", hash = "sha256:76e09f974382a496a5ed985db9343628b1cb1ac911f27342e4cc46a8bac10476", size = 2113647, upload-time = "2026-02-24T17:22:55.747Z" }, + { url = "https://files.pythonhosted.org/packages/46/06/7873ddf69918efbfabd7211829f4bd8019739d0a719253112d305d3ba51d/sqlalchemy-2.0.47-cp313-cp313-win_amd64.whl", hash = "sha256:0664089b0bf6724a0bfb49a0cf4d4da24868a0a5c8e937cd7db356d5dcdf2c66", size = 2139425, upload-time = "2026-02-24T17:22:57.033Z" }, + { url = "https://files.pythonhosted.org/packages/54/fa/61ad9731370c90ac7ea5bf8f5eaa12c48bb4beec41c0fa0360becf4ac10d/sqlalchemy-2.0.47-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ed0c967c701ae13da98eb220f9ddab3044ab63504c1ba24ad6a59b26826ad003", size = 3558809, upload-time = "2026-02-24T17:12:15.232Z" }, + { url = "https://files.pythonhosted.org/packages/33/d5/221fac96f0529391fe374875633804c866f2b21a9c6d3a6ca57d9c12cfd7/sqlalchemy-2.0.47-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d3537943a61fd25b241e976426a0c6814434b93cf9b09d39e8e78f3c9eb9a487", size = 3525480, upload-time = "2026-02-24T17:27:59.602Z" }, + { url = "https://files.pythonhosted.org/packages/ec/55/8247d53998c3673e4a8d1958eba75c6f5cc3b39082029d400bb1f2a911ae/sqlalchemy-2.0.47-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:57f7e336a64a0dba686c66392d46b9bc7af2c57d55ce6dc1697b4ef32b043ceb", size = 3466569, upload-time = "2026-02-24T17:12:16.94Z" }, + { url = "https://files.pythonhosted.org/packages/6b/b5/c1f0eea1bac6790845f71420a7fe2f2a0566203aa57543117d4af3b77d1c/sqlalchemy-2.0.47-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dff735a621858680217cb5142b779bad40ef7322ddbb7c12062190db6879772e", size = 3475770, upload-time = "2026-02-24T17:28:02.034Z" }, + { url = "https://files.pythonhosted.org/packages/c5/ed/2f43f92474ea0c43c204657dc47d9d002cd738b96ca2af8e6d29a9b5e42d/sqlalchemy-2.0.47-cp313-cp313t-win32.whl", hash = "sha256:3893dc096bb3cca9608ea3487372ffcea3ae9b162f40e4d3c51dd49db1d1b2dc", size = 2141300, upload-time = "2026-02-24T17:14:37.024Z" }, + { url = "https://files.pythonhosted.org/packages/cc/a9/8b73f9f1695b6e92f7aaf1711135a1e3bbeb78bca9eded35cb79180d3c6d/sqlalchemy-2.0.47-cp313-cp313t-win_amd64.whl", hash = "sha256:b5103427466f4b3e61f04833ae01f9a914b1280a2a8bcde3a9d7ab11f3755b42", size = 2173053, upload-time = "2026-02-24T17:14:38.688Z" }, + { url = "https://files.pythonhosted.org/packages/c1/30/98243209aae58ed80e090ea988d5182244ca7ab3ff59e6d850c3dfc7651e/sqlalchemy-2.0.47-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b03010a5a5dfe71676bc83f2473ebe082478e32d77e6f082c8fe15a31c3b42a6", size = 2154355, upload-time = "2026-02-24T17:05:48.959Z" }, + { url = "https://files.pythonhosted.org/packages/ab/62/12ca6ea92055fe486d6558a2a4efe93e194ff597463849c01f88e5adb99d/sqlalchemy-2.0.47-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f8e3371aa9024520883a415a09cc20c33cfd3eeccf9e0f4f4c367f940b9cbd44", size = 3274486, upload-time = "2026-02-24T17:18:13.659Z" }, + { url = "https://files.pythonhosted.org/packages/97/88/7dfbdeaa8d42b1584e65d6cc713e9d33b6fa563e0d546d5cb87e545bb0e5/sqlalchemy-2.0.47-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9449f747e50d518c6e1b40cc379e48bfc796453c47b15e627ea901c201e48a6", size = 3279481, upload-time = "2026-02-24T17:27:26.491Z" }, + { url = "https://files.pythonhosted.org/packages/d0/b7/75e1c1970616a9dd64a8a6fd788248da2ddaf81c95f4875f2a1e8aee4128/sqlalchemy-2.0.47-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:21410f60d5cac1d6bfe360e05bd91b179be4fa0aa6eea6be46054971d277608f", size = 3224269, upload-time = "2026-02-24T17:18:15.078Z" }, + { url = "https://files.pythonhosted.org/packages/31/ac/eec1a13b891df9a8bc203334caf6e6aac60b02f61b018ef3b4124b8c4120/sqlalchemy-2.0.47-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:819841dd5bb4324c284c09e2874cf96fe6338bfb57a64548d9b81a4e39c9871f", size = 3246262, upload-time = "2026-02-24T17:27:27.986Z" }, + { url = "https://files.pythonhosted.org/packages/c9/b0/661b0245b06421058610da39f8ceb34abcc90b49f90f256380968d761dbe/sqlalchemy-2.0.47-cp314-cp314-win32.whl", hash = "sha256:e255ee44821a7ef45649c43064cf94e74f81f61b4df70547304b97a351e9b7db", size = 2116528, upload-time = "2026-02-24T17:22:59.363Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ef/1035a90d899e61810791c052004958be622a2cf3eb3df71c3fe20778c5d0/sqlalchemy-2.0.47-cp314-cp314-win_amd64.whl", hash = "sha256:209467ff73ea1518fe1a5aaed9ba75bb9e33b2666e2553af9ccd13387bf192cb", size = 2142181, upload-time = "2026-02-24T17:23:01.001Z" }, + { url = "https://files.pythonhosted.org/packages/76/bb/17a1dd09cbba91258218ceb582225f14b5364d2683f9f5a274f72f2d764f/sqlalchemy-2.0.47-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e78fd9186946afaa287f8a1fe147ead06e5d566b08c0afcb601226e9c7322a64", size = 3563477, upload-time = "2026-02-24T17:12:18.46Z" }, + { url = "https://files.pythonhosted.org/packages/66/8f/1a03d24c40cc321ef2f2231f05420d140bb06a84f7047eaa7eaa21d230ba/sqlalchemy-2.0.47-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5740e2f31b5987ed9619d6912ae5b750c03637f2078850da3002934c9532f172", size = 3528568, upload-time = "2026-02-24T17:28:03.732Z" }, + { url = "https://files.pythonhosted.org/packages/fd/53/d56a213055d6b038a5384f0db5ece7343334aca230ff3f0fa1561106f22c/sqlalchemy-2.0.47-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:fb9ac00d03de93acb210e8ec7243fefe3e012515bf5fd2f0898c8dff38bc77a4", size = 3472284, upload-time = "2026-02-24T17:12:20.319Z" }, + { url = "https://files.pythonhosted.org/packages/ff/19/c235d81b9cfdd6130bf63143b7bade0dc4afa46c4b634d5d6b2a96bea233/sqlalchemy-2.0.47-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c72a0b9eb2672d70d112cb149fbaf172d466bc691014c496aaac594f1988e706", size = 3478410, upload-time = "2026-02-24T17:28:05.892Z" }, + { url = "https://files.pythonhosted.org/packages/0e/db/cafdeca5ecdaa3bb0811ba5449501da677ce0d83be8d05c5822da72d2e86/sqlalchemy-2.0.47-cp314-cp314t-win32.whl", hash = "sha256:c200db1128d72a71dc3c31c24b42eb9fd85b2b3e5a3c9ba1e751c11ac31250ff", size = 2147164, upload-time = "2026-02-24T17:14:40.783Z" }, + { url = "https://files.pythonhosted.org/packages/fc/5e/ff41a010e9e0f76418b02ad352060a4341bb15f0af66cedc924ab376c7c6/sqlalchemy-2.0.47-cp314-cp314t-win_amd64.whl", hash = "sha256:669837759b84e575407355dcff912835892058aea9b80bd1cb76d6a151cf37f7", size = 2182154, upload-time = "2026-02-24T17:14:43.205Z" }, + { url = "https://files.pythonhosted.org/packages/15/9f/7c378406b592fcf1fc157248607b495a40e3202ba4a6f1372a2ba6447717/sqlalchemy-2.0.47-py3-none-any.whl", hash = "sha256:e2647043599297a1ef10e720cf310846b7f31b6c841fee093d2b09d81215eb93", size = 1940159, upload-time = "2026-02-24T17:15:07.158Z" }, ] [[package]] name = "sqlmodel" -version = "0.0.35" +version = "0.0.37" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "sqlalchemy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a6/fd/6f468f52977b85f8b1af3f0d7d4396ed77804a59bf589f2f47c524383388/sqlmodel-0.0.35.tar.gz", hash = "sha256:e0079a6ec569323587ffb7326bbbc9d9a1a92e9be271b18e83f54d4a4200d6ac", size = 86087, upload-time = "2026-02-20T16:42:21.254Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/26/1d2faa0fd5a765267f49751de533adac6b9ff9366c7c6e7692df4f32230f/sqlmodel-0.0.37.tar.gz", hash = "sha256:d2c19327175794faf50b1ee31cc966764f55b1dedefc046450bc5741a3d68352", size = 85527, upload-time = "2026-02-21T16:39:47.038Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8f/f3/90f7b2eb86e590b74cf33e37a5313c074092684666355201afe9a1ae7ef5/sqlmodel-0.0.35-py3-none-any.whl", hash = "sha256:367c11719bc4967430d5aadc43ee1a6f7638b9c82ee7c8835401400e05ec9431", size = 27221, upload-time = "2026-02-20T16:42:20.301Z" }, + { url = "https://files.pythonhosted.org/packages/b1/e1/7c8d18e737433f3b5bbe27b56a9072a9fcb36342b48f1bef34b6da1d61f2/sqlmodel-0.0.37-py3-none-any.whl", hash = "sha256:2137a4045ef3fd66a917a7717ada959a1ceb3630d95e1f6aaab39dd2c0aef278", size = 27224, upload-time = "2026-02-21T16:39:47.781Z" }, ] [[package]]