From 414772b2ad47974d37c06024a92f11040bd080b8 Mon Sep 17 00:00:00 2001 From: Simon Lloyd Date: Tue, 24 Feb 2026 19:44:27 +0000 Subject: [PATCH 1/3] test: add integration and functional test suite - Refactor tests/conftest.py with grouped fixtures and shared helpers - Add integration tests for core modules: event, project, seismogram, station, snapshot, model relationships (cascade deletes) - Add functional CLI tests: basic ops, project lifecycle, parameters, snapshots, sample data - Split TestProjectLifecycle into file-backed (subprocess) and in-memory (in-process) variants - Fix print_project_info to display 'in-memory database' instead of ':memory:' as the project file label - All tests use in-process cli fixture with monkeypatched engine; subprocess only used where real file I/O is required - Short-ID variants included for all UUID-accepting CLI commands --- .github/copilot-instructions.md | 97 +++ .gitignore | 3 + Makefile | 7 +- flake.nix | 3 +- pyproject.toml | 10 +- src/aimbat/_config.py | 57 +- src/aimbat/aimbat_types/_pydantic.py | 8 +- src/aimbat/app.py | 34 +- src/aimbat/cli/_data.py | 15 +- src/aimbat/cli/_snapshot.py | 4 +- src/aimbat/cli/_utils/app.py | 32 +- src/aimbat/core/__init__.py | 1 + src/aimbat/core/_active_event.py | 86 ++ src/aimbat/core/_data.py | 181 ++-- src/aimbat/core/_event.py | 183 ++-- src/aimbat/core/_iccs.py | 5 +- src/aimbat/core/_project.py | 60 +- src/aimbat/core/_seismogram.py | 80 +- src/aimbat/core/_snapshot.py | 170 ++-- src/aimbat/core/_station.py | 295 +++++-- src/aimbat/db.py | 16 + src/aimbat/models/_models.py | 390 +++++---- src/aimbat/utils/__init__.py | 2 - src/aimbat/utils/_active_event.py | 38 - src/aimbat/utils/_checkdata.py | 148 ---- tests/__init__.py | 0 ...stSeismogramPlot.test_lib_plotseis_mpl.png | Bin 155122 -> 0 bytes tests/cli/test_cli_common.py | 15 - tests/conftest.py | 432 ++++++---- tests/functional/test_cli_basic_ops.py | 624 ++++++++++++++ tests/functional/test_cli_parameters.py | 793 ++++++++++++++++++ tests/functional/test_cli_project.py | 72 ++ tests/functional/test_cli_sampledata.py | 196 +++++ tests/functional/test_cli_snapshots.py | 574 +++++++++++++ tests/integration/test_active_event.py | 115 +++ tests/integration/test_data_io.py | 327 ++++++++ tests/integration/test_datasource_sac.py | 300 +++++++ tests/integration/test_db_operations.py | 473 +++++++++++ tests/integration/test_event.py | 351 ++++++++ tests/integration/test_models.py | 668 +++++++++++++++ tests/integration/test_project.py | 131 +++ tests/integration/test_seismogram.py | 416 +++++++++ tests/integration/test_snapshots.py | 389 +++++++++ tests/integration/test_station.py | 342 ++++++++ tests/lib/test_lib_common.py | 80 -- tests/test_data.py | 218 ----- tests/test_event.py | 344 -------- tests/test_iccs.py | 62 -- tests/test_io.py | 78 -- tests/test_models.py | 54 -- tests/test_project.py | 135 --- tests/test_seismogram.py | 360 -------- tests/test_settings.py | 51 -- tests/test_snapshot.py | 246 ------ tests/test_station.py | 139 --- tests/test_typing.py | 57 -- tests/unit/aimbat_types/test_pydantic.py | 131 +++ tests/unit/cli/test_common.py | 198 +++++ tests/unit/io/test_sac.py | 283 +++++++ tests/unit/models/test_sqlalchemy.py | 146 ++++ tests/{ => unit}/test_app.py | 33 +- tests/unit/test_config.py | 227 +++++ tests/unit/utils/test_json.py | 320 +++++++ tests/unit/utils/test_sampledata.py | 151 ++++ tests/unit/utils/test_uuid.py | 224 +++++ tests/utils/test_utils.py | 184 ---- uv.lock | 204 ++--- 67 files changed, 9004 insertions(+), 3064 deletions(-) create mode 100644 src/aimbat/core/_active_event.py delete mode 100644 src/aimbat/utils/_active_event.py delete mode 100644 src/aimbat/utils/_checkdata.py delete mode 100644 tests/__init__.py delete mode 100644 tests/baseline/tests.test_seismogram.TestSeismogramPlot.test_lib_plotseis_mpl.png delete mode 100644 tests/cli/test_cli_common.py create mode 100644 tests/functional/test_cli_basic_ops.py create mode 100644 tests/functional/test_cli_parameters.py create mode 100644 tests/functional/test_cli_project.py create mode 100644 tests/functional/test_cli_sampledata.py create mode 100644 tests/functional/test_cli_snapshots.py create mode 100644 tests/integration/test_active_event.py create mode 100644 tests/integration/test_data_io.py create mode 100644 tests/integration/test_datasource_sac.py create mode 100644 tests/integration/test_db_operations.py create mode 100644 tests/integration/test_event.py create mode 100644 tests/integration/test_models.py create mode 100644 tests/integration/test_project.py create mode 100644 tests/integration/test_seismogram.py create mode 100644 tests/integration/test_snapshots.py create mode 100644 tests/integration/test_station.py delete mode 100644 tests/lib/test_lib_common.py delete mode 100644 tests/test_data.py delete mode 100644 tests/test_event.py delete mode 100644 tests/test_iccs.py delete mode 100644 tests/test_io.py delete mode 100644 tests/test_models.py delete mode 100644 tests/test_project.py delete mode 100644 tests/test_seismogram.py delete mode 100644 tests/test_settings.py delete mode 100644 tests/test_snapshot.py delete mode 100644 tests/test_station.py delete mode 100644 tests/test_typing.py create mode 100644 tests/unit/aimbat_types/test_pydantic.py create mode 100644 tests/unit/cli/test_common.py create mode 100644 tests/unit/io/test_sac.py create mode 100644 tests/unit/models/test_sqlalchemy.py rename tests/{ => unit}/test_app.py (57%) create mode 100644 tests/unit/test_config.py create mode 100644 tests/unit/utils/test_json.py create mode 100644 tests/unit/utils/test_sampledata.py create mode 100644 tests/unit/utils/test_uuid.py delete mode 100644 tests/utils/test_utils.py diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 217430f2..808817a1 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -1,5 +1,102 @@ # GitHub Copilot Instructions for AIMBAT +## Build, Test, and Lint + +Dependencies are managed with **uv**. All commands assume the virtualenv is active or are prefixed with `uv run`. + +```bash +# Install all dependencies +make sync # uv sync --locked --all-extras + +# Format and lint +make format # black . +make lint # black --check + ruff check . +uv run ruff check --fix . # auto-fix ruff issues + +# Type checking +make mypy # uv run pytest --mypy -m mypy src tests + +# Run all tests (includes mypy + matplotlib comparison) +make tests # pytest --cov --mpl + mypy + +# Run a single test file or test +uv run pytest tests/unit/test_foo.py +uv run pytest tests/unit/test_foo.py::test_specific_function + +# Regenerate matplotlib baseline images (then manually move to test directories) +make test-figs +``` + +Configuration: `pyproject.toml` (pytest, mypy, black, ruff, coverage). Tests run against Python 3.12–3.14 in CI via tox. + +## Architecture + +AIMBAT is a seismological tool for automated and interactive measurement of body-wave arrival times. It processes SAC-format seismograms and stores state in a SQLite database. + +### Module Layout + +``` +src/aimbat/ +├── app.py # Cyclopts CLI root — registers all subcommands +├── cli/ # CLI command definitions (thin layer, delegates to core/) +├── core/ # Business logic: ICCS/MCCC algorithms, event/seismogram ops +│ ├── _active_event.py # Manages the single active event constraint +│ ├── _data.py # SAC ingestion entry point +│ ├── _iccs.py # ICCS alignment (wraps pysmo.tools.iccs) +│ └── _snapshot.py # Parameter state capture for rollback/comparison +├── models/ # SQLModel ORM definitions (Events, Seismograms, Stations, etc.) +│ └── _sqlalchemy.py # SAPandasTimestamp / SAPandasTimedelta type decorators +├── aimbat_types/ # Custom Pydantic types (PydanticTimestamp, enums for parameters) +├── io/ # File I/O — _base.py defines abstract base; _sac.py implements SAC via pysmo +├── utils/ # Shared helpers (JSON→table, UUID truncation, styling, sample data) +├── _config.py # Global Settings (pydantic-settings, env prefix AIMBAT_) +├── _lib/ # Internal mixins (EventParametersValidatorMixin) +├── _utils.py # Top-level utility helpers +├── db.py # SQLite engine singleton (foreign keys enforced via PRAGMA) +└── logger.py # Loguru-based logging +``` + +### Data Flow + +1. SAC files are ingested via `aimbat data add` → `core/_data.py` → `io/` → stored in SQLite +2. One event is set "active" at a time; all processing commands operate on the active event +3. ICCS (Iterative Cross-Correlation and Stack) aligns seismograms: `core/_iccs.py` wraps `pysmo.tools.iccs` +4. MCCC (Multi-Channel Cross-Correlation) refines arrival time picks: wraps `pysmo.tools.signal.mccc` +5. Snapshots (`core/_snapshot.py`) capture parameter state for rollback/comparison + +### Key Models + +- **AimbatEvent** — seismic event with `active` flag (only one active at a time, enforced by DB trigger) +- **AimbatSeismogram** — links to AimbatEvent + AimbatStation; stores `t0` (initial pick) and processing parameters +- **AimbatEventParameters** — per-event processing settings (window, bandpass, min_ccnorm) +- **AimbatSeismogramParameters** — per-seismogram flags (`select`, `flip`, `t1` pick) +- **SAPandasTimestamp / SAPandasTimedelta** in `models/_sqlalchemy.py` — custom SQLAlchemy type decorators storing pandas timestamps as UTC datetimes and timedeltas as nanosecond integers + +### Configuration + +Settings live in `_config.py` as a `pydantic-settings` class. All settings can be overridden via environment variables prefixed with `AIMBAT_` (e.g. `AIMBAT_LOG_LEVEL=DEBUG`) or a `.env` file. The default project file is `aimbat.db` in the current directory. + +## Key Conventions + +### Testing + +- **Each test gets a fresh in-memory SQLite database** via the `engine` fixture in `tests/conftest.py`; never share state between tests +- **UUID generation is seeded** (`random.Random(42)`) in tests via `mock_uuid4` autouse fixture — do not rely on random UUIDs in assertions +- **`patch_settings` fixture** resets all settings to defaults before each test; use `@pytest.mark.parametrize` with `indirect=["patch_settings"]` to override specific settings +- Test assets (SAC files) live in `tests/assets/`; use `tmp_path_factory` copies to avoid mutating them +- Mirror `src/aimbat/` directory structure under `tests/` (e.g. `tests/unit/core/`, `tests/unit/models/`) +- Matplotlib comparison tests use `--mpl` flag; baseline images live in `baseline/` + +### CLI Pattern + +Each CLI module in `cli/` creates a Cyclopts `App` instance and registers it with the root app in `app.py`. CLI functions are thin wrappers that open a `Session` from `aimbat.db.engine` and delegate to `core/` functions. + +### Custom Types + +- Use `PydanticTimestamp` / `PydanticTimedelta` (from `aimbat.aimbat_types`) for pandas-compatible time fields in models +- Use `PydanticNegativeTimedelta` / `PydanticPositiveTimedelta` for constrained sign validation +- Use `SAPandasTimestamp` / `SAPandasTimedelta` (from `aimbat.models._sqlalchemy`) as the `sa_type` in SQLModel fields + ## Code Style and Standards ### General Principles diff --git a/.gitignore b/.gitignore index 33358aed..bbe2a139 100644 --- a/.gitignore +++ b/.gitignore @@ -34,3 +34,6 @@ reset_project.sh aimbat.log .env aimbat_test.log +GEMINI.md +CLAUDE.md +.claude/settings.local.json diff --git a/Makefile b/Makefile index 9a414845..1faafccd 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -.PHONY: help check-uv sync upgrade lint test-figs tests \ +.PHONY: help check-uv sync upgrade lint test-figs tests tests-full \ mypy docs live-docs build publish clean python \ format format-check changelog @@ -36,7 +36,10 @@ lint: check-uv ## Check formatting with black and lint code with ruff. test-figs: check-uv ## Generate baseline figures for testing (then manually move them to the test directories). uv run py.test --mpl-generate-path=baseline -tests: check-uv mypy ## Run all tests with pytest. +tests: check-uv mypy ## Run tests with pytest (excludes slow functional tests). + uv run pytest --cov --cov-report=term-missing --cov-report=html --mpl -m "not slow" + +tests-full: check-uv mypy ## Run all tests including slow functional tests. uv run pytest --cov --cov-report=term-missing --cov-report=html --mpl mypy: check-uv ## Run typing tests with pytest. diff --git a/flake.nix b/flake.nix index ff952bb1..d9b1ea80 100644 --- a/flake.nix +++ b/flake.nix @@ -23,13 +23,14 @@ in { default = pkgs.mkShell { nativeBuildInputs = with pkgs; [ + bashInteractive + sqlitebrowser uv ruff (python314.withPackages (ps: with ps; [tox])) python313 python312 gnumake - sqlitebrowser ]; shellHook = '' diff --git a/pyproject.toml b/pyproject.toml index 6eba5d90..d67fa695 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -77,14 +77,20 @@ requires = ["hatchling", "hatch-vcs"] build-backend = "hatchling.build" [tool.pytest.ini_options] -# xvfb_width = 1920 -# xvfb_height = 1080 testpaths = [ "tests", "src", ] +markers = [ + "slow: mark slow tests that may take a long time to run", + "cli: mark as command-line interface tests", + "gui: mark tests that require a GUI environment", + "mpl: mark tests that generate matplotlib figures", +] mpl-generate-summary = "html" mpl-use-full-test-name = true +# xvfb_width = 1920 +# xvfb_height = 1080 [tool.mypy] mypy_path = "src" diff --git a/src/aimbat/_config.py b/src/aimbat/_config.py index d9894bee..12227c7d 100644 --- a/src/aimbat/_config.py +++ b/src/aimbat/_config.py @@ -133,36 +133,55 @@ def set_computed_defaults(self) -> Self: def print_settings_table(pretty: bool) -> None: """Print a pretty table with AIMBAT configuration options.""" - from aimbat.utils import make_table, TABLE_STYLING - from rich.console import Console + import json + from aimbat.utils import TABLE_STYLING + from aimbat.utils._json import json_to_table env_prefix = Settings.model_config.get("env_prefix") + values: dict[str, str] = json.loads(settings.model_dump_json()) if not pretty: - for k in Settings.model_fields: - print( - f'{(env_prefix + k).upper() if env_prefix else k}="{getattr(settings, k)}"' - ) + for k, v in values.items(): + env_key = f"{env_prefix.upper()}{k.upper()}" if env_prefix else k + print(f'{env_key}="{v}"') return - table = make_table(title="AIMBAT settings") - table.add_column("Name", justify="left", style=TABLE_STYLING.id, no_wrap=True) - table.add_column("Value", justify="center", style=TABLE_STYLING.mine) - table.add_column("Description", justify="left", style=TABLE_STYLING.linked) - - for k, v in Settings.model_fields.items(): + rows = [] + for k, v in values.items(): + field_info = Settings.model_fields.get(k) env_var = ( - f"Environment variable: {env_prefix.upper()}{str(k).upper()}" + f"Environment variable: {env_prefix.upper()}{k.upper()}" if env_prefix else "" ) - description_with_env_var = ( - f"{v.description} " if v.description else "" - ) + env_var - table.add_row(k, str(getattr(settings, k)), description_with_env_var) + description = field_info.description if field_info else "" + description_with_env_var = (f"{description} " if description else "") + env_var + rows.append( + {"name": k, "value": str(v), "description": description_with_env_var} + ) - console = Console() - console.print(table) + json_to_table( + rows, + title="AIMBAT settings", + column_kwargs={ + "name": { + "header": "Name", + "justify": "left", + "style": TABLE_STYLING.id, + "no_wrap": True, + }, + "value": { + "header": "Value", + "justify": "center", + "style": TABLE_STYLING.mine, + }, + "description": { + "header": "Description", + "justify": "left", + "style": TABLE_STYLING.linked, + }, + }, + ) def cli_settings_list( diff --git a/src/aimbat/aimbat_types/_pydantic.py b/src/aimbat/aimbat_types/_pydantic.py index a624dd58..522be11d 100644 --- a/src/aimbat/aimbat_types/_pydantic.py +++ b/src/aimbat/aimbat_types/_pydantic.py @@ -11,7 +11,9 @@ ] -def _format_timedelta(td: Timedelta) -> float: +def _format_timedelta(td: Timedelta | None) -> float | None: + if td is None: + return None return td.total_seconds() @@ -40,6 +42,8 @@ def __get_pydantic_core_schema__( ) -> CoreSchema: # Define how to validate the input (from string, datetime, or object) def validate(value: Any) -> T: + if value is None: + raise ValueError(f"{cls.target_type.__name__} value cannot be None") if isinstance(value, cls.target_type): return value try: @@ -63,7 +67,7 @@ class _AnnotatedTimedelta(_PandasBaseAnnotation): type PydanticTimedelta = Annotated[ Timedelta, _AnnotatedTimedelta, - PlainSerializer(_format_timedelta, return_type=float), + PlainSerializer(_format_timedelta, return_type=float | None), ] type PydanticNegativeTimedelta = Annotated[ PydanticTimedelta, AfterValidator(_must_be_negative_pd_timedelta) diff --git a/src/aimbat/app.py b/src/aimbat/app.py index 6ebf0835..5dcc72b1 100644 --- a/src/aimbat/app.py +++ b/src/aimbat/app.py @@ -6,19 +6,6 @@ commands is available by typing `aimbat COMMAND --help`. """ -from ._config import cli_settings_list -from .cli import ( - _align, - _data, - _event, - _pick, - _plot, - _project, - _station, - _seismogram, - _snapshot, - _utils, -) from importlib import metadata from cyclopts import App from rich.console import Console @@ -32,17 +19,16 @@ console = Console() app = App(version=__version__, help=__doc__, help_format="markdown", console=console) -app.command(_align.app) -app.command(_data.app) -app.command(_event.app) -app.command(_pick.app) -app.command(_plot.app) -app.command(_project.app) -app.command(_station.app) -app.command(_seismogram.app) -app.command(cli_settings_list, name="settings") -app.command(_snapshot.app) -app.command(_utils.app) +app.command("aimbat.cli._align:app", name="align") +app.command("aimbat.cli._data:app", name="data") +app.command("aimbat.cli._event:app", name="event") +app.command("aimbat.cli._pick:app", name="pick") +app.command("aimbat.cli._plot:app", name="plot") +app.command("aimbat.cli._project:app", name="project") +app.command("aimbat.cli._station:app", name="station") +app.command("aimbat.cli._seismogram:app", name="seismogram") +app.command("aimbat.cli._snapshot:app", name="snapshot") +app.command("aimbat.cli._utils:app", name="utils") if __name__ == "__main__": diff --git a/src/aimbat/cli/_data.py b/src/aimbat/cli/_data.py index 9dcf42e0..ca20a3c6 100644 --- a/src/aimbat/cli/_data.py +++ b/src/aimbat/cli/_data.py @@ -13,14 +13,15 @@ @app.command(name="add") @simple_exception def cli_data_add( - seismogram_files: Annotated[ + datasources: Annotated[ list[Path], Parameter( name="files", consume_multiple=True, validator=validators.Path(exists=True) ), ], *, - filetype: DataType = DataType.SAC, + datatype: DataType = DataType.SAC, + dry_run: Annotated[bool, Parameter(name="dry-run")] = False, show_progress_bar: Annotated[bool, Parameter(name="progress")] = True, global_parameters: GlobalParameters | None = None, ) -> None: @@ -29,20 +30,22 @@ def cli_data_add( Args: seismogram_files: Seismogram files to be added. filetype: Specify type of seismogram file. + dry_run: If True, print the files that would be added without modifying the database. show_progress_bar: Display progress bar. """ from aimbat.db import engine - from aimbat.core import add_files_to_project + from aimbat.core import add_data_to_project global_parameters = global_parameters or GlobalParameters() disable_progress_bar = not show_progress_bar with Session(engine) as session: - add_files_to_project( + add_data_to_project( session, - seismogram_files, - filetype, + datasources, + datatype, + dry_run, disable_progress_bar, ) diff --git a/src/aimbat/cli/_snapshot.py b/src/aimbat/cli/_snapshot.py index 75283e4c..1001bf2b 100644 --- a/src/aimbat/cli/_snapshot.py +++ b/src/aimbat/cli/_snapshot.py @@ -92,14 +92,14 @@ def cli_snapshot_dump( all_events: Select snapshots for all events. """ from aimbat.db import engine - from aimbat.core import dump_snapshot_table_to_json + from aimbat.core import dump_snapshot_tables_to_json from sqlmodel import Session from rich import print_json global_parameters = global_parameters or GlobalParameters() with Session(engine) as session: - print_json(dump_snapshot_table_to_json(session, all_events, as_string=True)) + print_json(dump_snapshot_tables_to_json(session, all_events, as_string=True)) @app.command(name="list") diff --git a/src/aimbat/cli/_utils/app.py b/src/aimbat/cli/_utils/app.py index 4ab83911..28015d40 100644 --- a/src/aimbat/cli/_utils/app.py +++ b/src/aimbat/cli/_utils/app.py @@ -5,40 +5,14 @@ are not strictly part of an AIMBAT workflow. """ -from .._common import GlobalParameters, simple_exception from .sampledata import app as sampledata_app -from pathlib import Path -from typing import Annotated -from cyclopts import App, Parameter - - -@simple_exception -def _run_checks(sacfiles: list[Path]) -> None: - from aimbat.utils import run_checks - - run_checks(sacfiles) - +from aimbat._config import cli_settings_list +from cyclopts import App app = App(name="utils", help=__doc__, help_format="markdown") +app.command(cli_settings_list, name="settings") app.command(sampledata_app, name="sampledata") -@app.command(name="checkdata") -def cli_checkdata( - sacfiles: Annotated[list[Path], Parameter(name="data", consume_multiple=True)], - *, - common: GlobalParameters | None = None, -) -> None: - """Check if there are any problems with SAC files before adding them to a project. - - Args: - sacfiles: One or more SAC files. - """ - - common = common or GlobalParameters() - - _run_checks(sacfiles) - - if __name__ == "__main__": app() diff --git a/src/aimbat/core/__init__.py b/src/aimbat/core/__init__.py index 2c8351f0..ab8a46dd 100644 --- a/src/aimbat/core/__init__.py +++ b/src/aimbat/core/__init__.py @@ -4,6 +4,7 @@ _internal_names = set(dir()) +from ._active_event import * from ._data import * from ._event import * from ._iccs import * diff --git a/src/aimbat/core/_active_event.py b/src/aimbat/core/_active_event.py new file mode 100644 index 00000000..b745ad4d --- /dev/null +++ b/src/aimbat/core/_active_event.py @@ -0,0 +1,86 @@ +"""Get and set the active event (i.e. the one being processed).""" + +# WARNING: Do not import other modules from `aimbat.core` here to avoid circular imports +from aimbat.logger import logger +from aimbat.models import AimbatEvent +from aimbat.cli._common import HINTS +from sqlmodel import Session, select +from sqlalchemy.exc import NoResultFound +from uuid import UUID + +__all__ = [ + "get_active_event", + "set_active_event_by_id", + "set_active_event", +] + + +def get_active_event(session: Session) -> AimbatEvent: + """ + Return the currently active event (i.e. the one being processed). + + Args: + session: SQL session. + + Returns: + Active Event + + Raises + NoResultFound: When no event is active. + """ + + logger.debug("Attempting to determine active event.") + + select_active_event = select(AimbatEvent).where(AimbatEvent.active == 1) + + # NOTE: While there technically can be no active event in the database, + # we typically don't really want to go beyond this point when that is the + # case. Hence we call `one` rather than `one_or_none`. + try: + active_event = session.exec(select_active_event).one() + except NoResultFound: + raise NoResultFound(f"No active event found. {HINTS.ACTIVATE_EVENT}") + + logger.debug(f"Active event: {active_event.id}") + + return active_event + + +def set_active_event_by_id(session: Session, event_id: UUID) -> None: + """ + Set the currently selected event (i.e. the one being processed) by its ID. + + Args: + session: SQL session. + event_id: ID of AIMBAT Event to set as active one. + + Raises: + ValueError: If no event with the given ID is found. + """ + logger.info(f"Setting active event to event with id={event_id}.") + + if event_id not in session.exec(select(AimbatEvent.id)).all(): + raise ValueError( + f"No AimbatEvent found with id: {event_id}. {HINTS.LIST_EVENTS}" + ) + + aimbat_event = session.exec( + select(AimbatEvent).where(AimbatEvent.id == event_id) + ).one() + set_active_event(session, aimbat_event) + + +def set_active_event(session: Session, event: AimbatEvent) -> None: + """ + Set the active event (i.e. the one being processed). + + Args: + session: SQL session. + event: AIMBAT Event to set as active. + """ + + logger.info(f"Activating {event=}") + + event.active = True + session.add(event) + session.commit() diff --git a/src/aimbat/core/_data.py b/src/aimbat/core/_data.py index 15a95f37..0259748a 100644 --- a/src/aimbat/core/_data.py +++ b/src/aimbat/core/_data.py @@ -1,10 +1,12 @@ +import os +from aimbat.core import get_active_event from aimbat.logger import logger from aimbat.aimbat_types import DataType from aimbat.utils import ( uuid_shortener, - get_active_event, make_table, TABLE_STYLING, + json_to_table, ) from aimbat.io import create_seismogram, create_station, create_event from aimbat.models import ( @@ -19,10 +21,9 @@ from collections.abc import Sequence from rich.progress import track from rich.console import Console -import os __all__ = [ - "add_files_to_project", + "add_data_to_project", "get_data_for_active_event", "print_data_table", "dump_data_table_to_json", @@ -106,10 +107,92 @@ def _create_seismogram( return aimbat_seismogram -def add_files_to_project( +def _add_datasource( + session: Session, datasource: str | os.PathLike, datatype: DataType +) -> AimbatDataSource: + """Add a data source to the AIMBAT database, creating related station, event and seismogram if necessary.""" + aimbat_station = _create_station(session, datasource, datatype) + aimbat_event = _create_event(session, datasource, datatype) + aimbat_seismogram = _create_seismogram(session, datasource, datatype) + + # TODO: perhaps adding potentially updated station and event information should be optional? + aimbat_seismogram.station = aimbat_station + aimbat_seismogram.event = aimbat_event + + # Create AimbatDataSource instance with relationship to AimbatSeismogram + select_aimbat_data_source = select(AimbatDataSource).where( + AimbatDataSource.sourcename == str(datasource) + ) + aimbat_data_source = session.exec(select_aimbat_data_source).one_or_none() + if aimbat_data_source is None: + logger.debug(f"Adding data source {datasource} to project.") + aimbat_data_source_create = AimbatDataSourceCreate( + sourcename=str(datasource), datatype=datatype + ) + aimbat_data_source = AimbatDataSource.model_validate( + aimbat_data_source_create, + update={"seismogram": aimbat_seismogram}, + ) + + else: + logger.debug( + f"Using existing data source {datasource} instead of adding new one." + ) + aimbat_data_source.seismogram = aimbat_seismogram + session.add(aimbat_data_source) + return aimbat_data_source + + +def _print_dry_run_results( + added_datasources: Sequence[AimbatDataSource], + existing_station_ids: set, + existing_event_ids: set, + existing_seismogram_ids: set, +) -> None: + """Print a summary table showing which entities were added vs skipped.""" + bool_fmt = TABLE_STYLING.bool_formatter + json_to_table( + [ + { + "Filename": str(ds.sourcename), + "Station": ds.seismogram.station_id not in existing_station_ids, + "Event": ds.seismogram.event_id not in existing_event_ids, + "Seismogram": ds.seismogram_id not in existing_seismogram_ids, + } + for ds in added_datasources + ], + title="Dry Run: Data to be added", + formatters={ + "Station": bool_fmt, + "Event": bool_fmt, + "Seismogram": bool_fmt, + }, + ) + new_stations = sum( + ds.seismogram.station_id not in existing_station_ids for ds in added_datasources + ) + new_events = sum( + ds.seismogram.event_id not in existing_event_ids for ds in added_datasources + ) + new_seismograms = sum( + ds.seismogram_id not in existing_seismogram_ids for ds in added_datasources + ) + console = Console() + console.print( + f"\n{new_stations} station(s) added, " + f"{len(added_datasources) - new_stations} skipped. " + f"{new_events} event(s) added, " + f"{len(added_datasources) - new_events} skipped. " + f"{new_seismograms} seismogram(s) added, " + f"{len(added_datasources) - new_seismograms} skipped." + ) + + +def add_data_to_project( session: Session, datasources: Sequence[str | os.PathLike], datatype: DataType, + dry_run: bool = False, disable_progress_bar: bool = True, ) -> None: """Add files to the AIMBAT database. @@ -118,81 +201,83 @@ def add_files_to_project( datasources: List of data sources to add. datatype: Type of data. disable_progress_bar: Do not display progress bar. + dry_run: If True, do not commit changes to the database. """ logger.info(f"Adding {len(datasources)} {datatype} files to project.") - for datasource in track( - sequence=datasources, - description="Adding files ...", - disable=disable_progress_bar, - ): - aimbat_station = _create_station(session, datasource, datatype) - aimbat_event = _create_event(session, datasource, datatype) - aimbat_seismogram = _create_seismogram(session, datasource, datatype) - - # TODO: perhaps adding potentially updated station and event information should be optional? - aimbat_seismogram.station = aimbat_station - aimbat_seismogram.event = aimbat_event - - # Create AimbatDataSource instance with relationship to AimbatSeismogram - select_aimbat_data_source = select(AimbatDataSource).where( - AimbatDataSource.sourcename == str(datasource) - ) - aimbat_data_source = session.exec(select_aimbat_data_source).one_or_none() - if aimbat_data_source is None: - logger.debug(f"Adding data source {datasource} to project.") - aimbat_data_source_create = AimbatDataSourceCreate( - sourcename=str(datasource), datatype=datatype - ) - aimbat_data_source = AimbatDataSource.model_validate( - aimbat_data_source_create, update={"seismogram": aimbat_seismogram} - ) - - else: - logger.debug( - f"Using existing data source {datasource} instead of adding new one." - ) - aimbat_data_source.seismogram = aimbat_seismogram - session.add(aimbat_data_source) - - session.commit() + try: + with session.begin_nested(): + # Snapshot existing IDs before adding so we can tell new from reused. + if dry_run: + existing_station_ids = set(session.exec(select(AimbatStation.id)).all()) + existing_event_ids = set(session.exec(select(AimbatEvent.id)).all()) + existing_seismogram_ids = set( + session.exec(select(AimbatSeismogram.id)).all() + ) + + added_datasources: list[AimbatDataSource] = [] + for datasource in track( + sequence=datasources, + description="Adding data ...", + disable=disable_progress_bar, + ): + added_datasources.append(_add_datasource(session, datasource, datatype)) + + if dry_run: + logger.info("Dry run: displaying data that would be added.") + session.flush() + _print_dry_run_results( + added_datasources, + existing_station_ids, + existing_event_ids, + existing_seismogram_ids, + ) + session.rollback() + logger.info("Dry run complete. Rolling back changes.") + else: + session.commit() + logger.info("Data added successfully.") + + except Exception as e: + logger.error(f"Failed to add data. Rolling back changes. Error: {e}") + raise def get_data_for_active_event(session: Session) -> Sequence[AimbatDataSource]: - """Returns the AimbatFiles belonging to the active event. + """Returns the data sources belonging to the active event. Args: session: Database session. Returns: - List of AimbatFiles. + Sequence of AimbatDataSource objects belonging to the active event. """ - logger.info("Getting aimbatfiles in active event.") + logger.info("Getting data sources for active event.") - select_files = ( + statement = ( select(AimbatDataSource) .join(AimbatSeismogram) .join(AimbatEvent) .where(AimbatEvent.active == 1) ) - return session.exec(select_files).all() + return session.exec(statement).all() def print_data_table(session: Session, short: bool, all_events: bool = False) -> None: - """Print a pretty table with AIMBAT data. + """Print a pretty table with information about the data sources in the database. Args: short: Shorten UUIDs and format data. all_events: Print all files instead of limiting to the active event. """ - logger.info("Printing AIMBAT data table.") + logger.info("Printing data sources table.") if all_events: aimbat_data_sources = session.exec(select(AimbatDataSource)).all() - title = "AIMBAT data for all events" + title = "Data sources for all events" else: active_event = get_active_event(session) aimbat_data_sources = get_data_for_active_event(session) @@ -202,7 +287,7 @@ def print_data_table(session: Session, short: bool, all_events: bool = False) -> else active_event.time ) id = uuid_shortener(session, active_event) if short else active_event.id - title = f"AIMBAT data for event {time} (ID={id})" + title = f"Data sources for event {time} (ID={id})" logger.debug(f"Found {len(aimbat_data_sources)} files in total.") diff --git a/src/aimbat/core/_event.py b/src/aimbat/core/_event.py index 3145b25e..ddfdedef 100644 --- a/src/aimbat/core/_event.py +++ b/src/aimbat/core/_event.py @@ -1,11 +1,10 @@ """Module to manage and view events in AIMBAT.""" +from aimbat.core import get_active_event from aimbat.logger import logger from aimbat.cli._common import HINTS from aimbat.utils import ( uuid_shortener, - get_active_event, - make_table, json_to_table, TABLE_STYLING, ) @@ -13,6 +12,7 @@ AimbatEvent, AimbatEventParameters, AimbatEventParametersBase, + AimbatEventRead, AimbatStation, AimbatSeismogram, ) @@ -23,21 +23,16 @@ EventParameterTimedelta, ) from pydantic import TypeAdapter -from rich.console import Console from sqlmodel import select, Session from sqlalchemy.exc import NoResultFound from typing import overload, Any, Literal -from pandas import Timedelta +from pandas import Timedelta, Timestamp from collections.abc import Sequence from uuid import UUID -import aimbat.core._station as station __all__ = [ "delete_event_by_id", "delete_event", - "get_active_event", - "set_active_event_by_id", - "set_active_event", "get_completed_events", "get_events_using_station", "get_event_parameter", @@ -84,46 +79,6 @@ def delete_event(session: Session, event: AimbatEvent) -> None: session.commit() -def set_active_event_by_id(session: Session, event_id: UUID) -> None: - """ - Set the currently selected event (i.e. the one being processed) by its ID. - - Args: - session: SQL session. - event_id: ID of AIMBAT Event to set as active one. - - Raises: - ValueError: If no event with the given ID is found. - """ - logger.info(f"Setting active event to event with id={event_id}.") - - if event_id not in session.exec(select(AimbatEvent.id)).all(): - raise ValueError( - f"No AimbatEvent found with id: {event_id}. {HINTS.LIST_EVENTS}" - ) - - aimbat_event = session.exec( - select(AimbatEvent).where(AimbatEvent.id == event_id) - ).one() - set_active_event(session, aimbat_event) - - -def set_active_event(session: Session, event: AimbatEvent) -> None: - """ - Set the active event (i.e. the one being processed). - - Args: - session: SQL session. - event: AIMBAT Event to set as active. - """ - - logger.info(f"Activating {event=}") - - event.active = True - session.add(event) - session.commit() - - def get_completed_events(session: Session) -> Sequence[AimbatEvent]: """Get the events marked as completed. @@ -252,60 +207,32 @@ def set_event_parameter( session.commit() -def dump_event_table_to_json(session: Session) -> str: - """Dump the table data to json.""" - - logger.info("Dumping AIMBAT event table to json.") - adapter: TypeAdapter[Sequence[AimbatEvent]] = TypeAdapter(Sequence[AimbatEvent]) - aimbat_event = session.exec(select(AimbatEvent)).all() - - return adapter.dump_json(aimbat_event).decode("utf-8") +@overload +def dump_event_table_to_json( + session: Session, as_string: Literal[True] = ... +) -> str: ... -def print_event_table(session: Session, short: bool) -> None: - """Print a pretty table with AIMBAT events. +@overload +def dump_event_table_to_json( + session: Session, as_string: Literal[False] +) -> list[dict[str, Any]]: ... - Args: - session: Database session. - short: Shorten and format the output to be more human-readable. - """ - logger.info("Printing AIMBAT events table.") +def dump_event_table_to_json( + session: Session, as_string: bool = True +) -> str | list[dict[str, Any]]: + """Dump the table data to json.""" - table = make_table(title="AIMBAT Events") - table.add_column( - "ID (shortened)" if short else "ID", - justify="center", - style=TABLE_STYLING.id, - no_wrap=True, - ) - table.add_column("Active", justify="center", style=TABLE_STYLING.mine, no_wrap=True) - table.add_column( - "Date & Time", justify="center", style=TABLE_STYLING.mine, no_wrap=True + logger.info("Dumping AIMBAT event table to json.") + events = session.exec(select(AimbatEvent)).all() + event_reads = [AimbatEventRead.from_event(e) for e in events] + adapter: TypeAdapter[Sequence[AimbatEventRead]] = TypeAdapter( + Sequence[AimbatEventRead] ) - table.add_column("Latitude", justify="center", style=TABLE_STYLING.mine) - table.add_column("Longitude", justify="center", style=TABLE_STYLING.mine) - table.add_column("Depth", justify="center", style=TABLE_STYLING.mine) - table.add_column("Completed", justify="center", style=TABLE_STYLING.parameters) - table.add_column("# Seismograms", justify="center", style=TABLE_STYLING.linked) - table.add_column("# Stations", justify="center", style=TABLE_STYLING.linked) - - for event in session.exec(select(AimbatEvent)).all(): - logger.debug(f"Adding event with id={event.id} to the table.") - table.add_row( - uuid_shortener(session, event) if short else str(event.id), - TABLE_STYLING.bool_formatter(event.active), - TABLE_STYLING.timestamp_formatter(event.time, short), - f"{event.latitude:.3f}" if short else str(event.latitude), - f"{event.longitude:.3f}" if short else str(event.longitude), - f"{event.depth:.0f}" if short else str(event.depth), - TABLE_STYLING.bool_formatter(event.parameters.completed), - str(len(event.seismograms)), - str(len(station.get_stations_in_event(session, event))), - ) - - console = Console() - console.print(table) + if as_string: + return adapter.dump_json(event_reads).decode("utf-8") + return adapter.dump_python(event_reads, mode="json") @overload @@ -350,6 +277,70 @@ def dump_event_parameter_table_to_json( return active_event.parameters.model_dump(mode="json") +def print_event_table(session: Session, short: bool) -> None: + """Print a pretty table with AIMBAT events. + + Args: + session: Database session. + short: Shorten and format the output to be more human-readable. + """ + + logger.info("Printing AIMBAT events table.") + + json_to_table( + data=dump_event_table_to_json(session, as_string=False), + title="AIMBAT Events", + column_order=[ + "id", + "active", + "time", + "latitude", + "longitude", + "depth", + "completed", + "seismogram_count", + "station_count", + ], + formatters={ + "id": lambda x: ( + uuid_shortener(session, AimbatEvent, str_uuid=x) if short else x + ), + "active": TABLE_STYLING.bool_formatter, + "time": lambda x: TABLE_STYLING.timestamp_formatter(Timestamp(x), short), + "latitude": lambda x: f"{x:.3f}" if short else str(x), + "longitude": lambda x: f"{x:.3f}" if short else str(x), + "depth": lambda x: f"{x:.0f}" if short and x is not None else str(x), + "completed": TABLE_STYLING.bool_formatter, + }, + common_column_kwargs={"justify": "center"}, + column_kwargs={ + "id": { + "header": "ID (shortened)" if short else "ID", + "style": TABLE_STYLING.id, + "no_wrap": True, + }, + "active": {"style": TABLE_STYLING.mine, "no_wrap": True}, + "time": { + "header": "Date & Time", + "style": TABLE_STYLING.mine, + "no_wrap": True, + }, + "latitude": {"style": TABLE_STYLING.mine}, + "longitude": {"style": TABLE_STYLING.mine}, + "depth": {"style": TABLE_STYLING.mine}, + "completed": {"style": TABLE_STYLING.parameters}, + "seismogram_count": { + "header": "# Seismograms", + "style": TABLE_STYLING.linked, + }, + "station_count": { + "header": "# Stations", + "style": TABLE_STYLING.linked, + }, + }, + ) + + def print_event_parameter_table( session: Session, short: bool, all_events: bool ) -> None: diff --git a/src/aimbat/core/_iccs.py b/src/aimbat/core/_iccs.py index 5a02add1..5b6f591c 100644 --- a/src/aimbat/core/_iccs.py +++ b/src/aimbat/core/_iccs.py @@ -1,11 +1,9 @@ """Processing of data for AIMBAT.""" -from typing import cast - +from aimbat.core import get_active_event from aimbat import settings from aimbat.logger import logger from aimbat.models import AimbatSeismogram -from aimbat.utils import get_active_event from pysmo.tools.signal import mccc from pysmo.tools.iccs import ( ICCS, @@ -16,6 +14,7 @@ update_timewindow as _update_timewindow, ) from sqlmodel import Session +from typing import cast __all__ = [ "create_iccs_instance", diff --git a/src/aimbat/core/_project.py b/src/aimbat/core/_project.py index 6fdbdfed..6e6436ee 100644 --- a/src/aimbat/core/_project.py +++ b/src/aimbat/core/_project.py @@ -1,4 +1,4 @@ -from aimbat.utils import get_active_event +from aimbat.core import get_active_event from aimbat.logger import logger from aimbat.models import ( AimbatEvent, @@ -44,36 +44,52 @@ def _project_exists(engine: Engine) -> bool: def create_project(engine: Engine) -> None: - """Create a new AIMBAT project.""" + """Initializes a new AIMBAT project database schema and triggers. - # import this to create tables below + Args: + engine: The SQLAlchemy/SQLModel Engine instance connected to the target database. + + Raises: + RuntimeError: If a project schema already exists in the target database. + """ + + # Import locally to ensure SQLModel registers all table metadata before create_all() import aimbat.models # noqa: F401 - logger.info(f"Creating new project in {engine=}.") + logger.info(f"Creating new project in {engine.url}") if _project_exists(engine): raise RuntimeError( - f"Unable to create a new project: project already exists in {engine=}!" + f"Unable to create a new project: project already exists at {engine.url}!" ) logger.debug("Creating database tables and loading defaults.") SQLModel.metadata.create_all(engine) - if engine.driver == "pysqlite": - with engine.connect() as connection: - connection.execute(text("PRAGMA foreign_keys=ON")) # for SQLite only - # This trigger ensures that only one event can be active at a time - with engine.connect() as connection: - connection.execute(text("""CREATE TRIGGER single_active_event - BEFORE UPDATE ON aimbatevent - FOR EACH ROW - WHEN NEW.active = TRUE - BEGIN - UPDATE aimbatevent SET active = NULL - WHERE active = TRUE AND id != NEW.id; - END; - """)) + if engine.name == "sqlite": + with engine.begin() as connection: + # Trigger 1: Handle updates to existing rows + connection.execute(text(""" + CREATE TRIGGER IF NOT EXISTS single_active_event_update + BEFORE UPDATE ON aimbatevent + FOR EACH ROW WHEN NEW.active = TRUE + BEGIN + UPDATE aimbatevent SET active = NULL + WHERE active = TRUE AND id != NEW.id; + END; + """)) + + # Trigger 2: Handle brand new active events being inserted + connection.execute(text(""" + CREATE TRIGGER IF NOT EXISTS single_active_event_insert + BEFORE INSERT ON aimbatevent + FOR EACH ROW WHEN NEW.active = TRUE + BEGIN + UPDATE aimbatevent SET active = NULL + WHERE active = TRUE; + END; + """)) def delete_project(engine: Engine) -> None: @@ -119,8 +135,10 @@ def print_project_info(engine: Engine) -> None: grid.add_column() grid.add_column(justify="left") if engine.driver == "pysqlite": - project = str(engine.url.database) - grid.add_row("AIMBAT Project File: ", project) + if engine.url.database == ":memory:": + grid.add_row("AIMBAT Project: ", "in-memory database") + else: + grid.add_row("AIMBAT Project File: ", str(engine.url.database)) events = len(session.exec(select(AimbatEvent)).all()) completed_events = len(event.get_completed_events(session)) diff --git a/src/aimbat/core/_seismogram.py b/src/aimbat/core/_seismogram.py index f85fc75d..547741d5 100644 --- a/src/aimbat/core/_seismogram.py +++ b/src/aimbat/core/_seismogram.py @@ -1,7 +1,7 @@ +from aimbat.core import get_active_event from aimbat.logger import logger from aimbat.utils import ( uuid_shortener, - get_active_event, make_table, TABLE_STYLING, json_to_table, @@ -281,6 +281,44 @@ def dump_seismogram_table_to_json(session: Session) -> str: return adapter.dump_json(aimbat_seismograms).decode("utf-8") +@overload +def dump_seismogram_parameter_table_to_json( + session: Session, all_events: bool, as_string: Literal[True] +) -> str: ... + + +@overload +def dump_seismogram_parameter_table_to_json( + session: Session, all_events: bool, as_string: Literal[False] +) -> list[dict[str, Any]]: ... + + +def dump_seismogram_parameter_table_to_json( + session: Session, all_events: bool, as_string: bool +) -> str | list[dict[str, Any]]: + """Dump the seismogram parameter table data to json.""" + + logger.info("Dumping AimbatSeismogramParameters table to json.") + + adapter: TypeAdapter[Sequence[AimbatSeismogramParameters]] = TypeAdapter( + Sequence[AimbatSeismogramParameters] + ) + + if all_events: + parameters = session.exec(select(AimbatSeismogramParameters)).all() + else: + parameters = session.exec( + select(AimbatSeismogramParameters) + .join(AimbatSeismogram) + .join(AimbatEvent) + .where(AimbatEvent.active == 1) + ).all() + + if as_string: + return adapter.dump_json(parameters).decode("utf-8") + return adapter.dump_python(parameters, mode="json") + + def print_seismogram_table( session: Session, short: bool, all_events: bool = False ) -> None: @@ -335,7 +373,7 @@ def print_seismogram_table( row = [ (uuid_shortener(session, seismogram) if short else str(seismogram.id)), TABLE_STYLING.bool_formatter(seismogram.parameters.select), - str(len(seismogram)), + str(len(seismogram.data)), str(seismogram.delta.total_seconds()), ( uuid_shortener(session, seismogram.datasource) @@ -362,44 +400,6 @@ def print_seismogram_table( console.print(table) -@overload -def dump_seismogram_parameter_table_to_json( - session: Session, all_events: bool, as_string: Literal[True] -) -> str: ... - - -@overload -def dump_seismogram_parameter_table_to_json( - session: Session, all_events: bool, as_string: Literal[False] -) -> list[dict[str, Any]]: ... - - -def dump_seismogram_parameter_table_to_json( - session: Session, all_events: bool, as_string: bool -) -> str | list[dict[str, Any]]: - """Dump the seismogram parameter table data to json.""" - - logger.info("Dumping AimbatSeismogramParameters table to json.") - - adapter: TypeAdapter[Sequence[AimbatSeismogramParameters]] = TypeAdapter( - Sequence[AimbatSeismogramParameters] - ) - - if all_events: - parameters = session.exec(select(AimbatSeismogramParameters)).all() - else: - parameters = session.exec( - select(AimbatSeismogramParameters) - .join(AimbatSeismogram) - .join(AimbatEvent) - .where(AimbatEvent.active == 1) - ).all() - - if as_string: - return adapter.dump_json(parameters).decode("utf-8") - return adapter.dump_python(parameters, mode="json") - - def print_seismogram_parameter_table(session: Session, short: bool) -> None: """Print a pretty table with AIMBAT seismogram parameter values for the active event. diff --git a/src/aimbat/core/_snapshot.py b/src/aimbat/core/_snapshot.py index 11eaccea..efe922a0 100644 --- a/src/aimbat/core/_snapshot.py +++ b/src/aimbat/core/_snapshot.py @@ -1,20 +1,23 @@ +import uuid +import json +from aimbat.core import get_active_event from aimbat.logger import logger -from aimbat.utils import uuid_shortener, get_active_event, make_table, TABLE_STYLING +from aimbat.utils import uuid_shortener, json_to_table, TABLE_STYLING from aimbat.models import ( AimbatSeismogramParametersBase, AimbatSnapshot, + AimbatSnapshotRead, AimbatEvent, AimbatEventParametersBase, - AimbatEventParameters, AimbatEventParametersSnapshot, AimbatSeismogramParametersSnapshot, ) from sqlmodel import Session, select -from rich.console import Console +from sqlalchemy import true +from pandas import Timestamp from collections.abc import Sequence from typing import overload, Literal, Any from pydantic import TypeAdapter -import uuid __all__ = [ "create_snapshot", @@ -23,7 +26,7 @@ "delete_snapshot_by_id", "delete_snapshot", "get_snapshots", - "dump_snapshot_table_to_json", + "dump_snapshot_tables_to_json", "print_snapshot_table", ] @@ -184,60 +187,83 @@ def get_snapshots( logger.info("Getting AIMBAT snapshots.") - if all_events: - logger.debug("Getting snapshots for all events.") - return session.exec(select(AimbatSnapshot)).all() - - logger.debug("Getting snapshots for active event.") - select_active_event_snapshots = ( + statement = ( select(AimbatSnapshot) - .join(AimbatEventParametersSnapshot) - .join(AimbatEventParameters) .join(AimbatEvent) - .where(AimbatEvent.active == 1) + .where(AimbatEvent.active == True if not all_events else true()) # noqa: E712 ) - return session.exec(select_active_event_snapshots).all() + + logger.debug(f"Executing statement to get snapshots: {statement}") + return session.exec(statement).all() @overload -def dump_snapshot_table_to_json( +def dump_snapshot_tables_to_json( session: Session, all_events: bool, as_string: Literal[True] ) -> str: ... @overload -def dump_snapshot_table_to_json( +def dump_snapshot_tables_to_json( session: Session, all_events: bool, as_string: Literal[False] -) -> list[dict[str, Any]]: ... +) -> dict[str, list[dict[str, Any]]]: ... -def dump_snapshot_table_to_json( +def dump_snapshot_tables_to_json( session: Session, all_events: bool, as_string: bool -) -> str | list[dict[str, Any]]: - """Dump the `AimbatSnapshot` table data to json.""" +) -> str | dict[str, list[dict[str, Any]]]: + """Dump snapshot data as a dict of lists of dicts. + + Returns a structure with three keys: + + - ``snapshots``: flat list of snapshot metadata. + - ``event_parameters``: flat list of event parameter snapshots. + - ``seismogram_parameters``: flat list of seismogram parameter snapshots. + + Each entry includes a ``snapshot_id`` for cross-referencing. - logger.info("Dumping AimbatSeismogramtable to json.") + Args: + session: Database session. + all_events: Include snapshots for all events. + as_string: Return a JSON string when True, otherwise a dict. + """ + logger.info(f"Dumping AimbatSnapshot tables to json with {all_events=}.") + + snapshots = get_snapshots(session, all_events) - adapter: TypeAdapter[Sequence[AimbatSnapshot]] = TypeAdapter( - Sequence[AimbatSnapshot] + snapshot_adapter: TypeAdapter[Sequence[AimbatSnapshotRead]] = TypeAdapter( + Sequence[AimbatSnapshotRead] + ) + event_params_adapter: TypeAdapter[Sequence[AimbatEventParametersSnapshot]] = ( + TypeAdapter(Sequence[AimbatEventParametersSnapshot]) + ) + seis_params_adapter: TypeAdapter[Sequence[AimbatSeismogramParametersSnapshot]] = ( + TypeAdapter(Sequence[AimbatSeismogramParametersSnapshot]) ) - if all_events: - parameters = session.exec(select(AimbatSnapshot)).all() - else: - parameters = session.exec( - select(AimbatSnapshot).join(AimbatEvent).where(AimbatEvent.active == 1) - ).all() + snapshot_reads = [AimbatSnapshotRead.from_snapshot(s) for s in snapshots] + event_params = [s.event_parameters_snapshot for s in snapshots] + seis_params = [sp for s in snapshots for sp in s.seismogram_parameters_snapshots] - if as_string: - return adapter.dump_json(parameters).decode("utf-8") - return adapter.dump_python(parameters, mode="json") + data: dict[str, list[dict[str, Any]]] = { + "snapshots": snapshot_adapter.dump_python(snapshot_reads, mode="json"), + "event_parameters": event_params_adapter.dump_python(event_params, mode="json"), + "seismogram_parameters": seis_params_adapter.dump_python( + seis_params, mode="json" + ), + } + + return json.dumps(data) if as_string else data def print_snapshot_table(session: Session, short: bool, all_events: bool) -> None: """Print a pretty table with AIMBAT snapshots. + Uses the ``snapshots`` portion of :func:`dump_snapshot_tables_to_json` + and renders it via :func:`~aimbat.utils.json_to_table`. + Args: + session: Database session. short: Shorten and format the output to be more human-readable. all_events: Print all snapshots instead of limiting to the active event. """ @@ -246,9 +272,6 @@ def print_snapshot_table(session: Session, short: bool, all_events: bool) -> Non title = "AIMBAT snapshots for all events" - snapshots = get_snapshots(session, all_events) - logger.debug(f"Found {len(snapshots)} snapshots for the table.") - if not all_events: active_event = get_active_event(session) if short: @@ -258,36 +281,49 @@ def print_snapshot_table(session: Session, short: bool, all_events: bool) -> Non f"AIMBAT snapshots for event {active_event.time} (ID={active_event.id})" ) - table = make_table(title=title) + data = dump_snapshot_tables_to_json(session, all_events, as_string=False) + snapshot_data = data["snapshots"] - table.add_column( - "ID (shortened)" if short else "ID", - justify="center", - style=TABLE_STYLING.id, - no_wrap=True, - ) - table.add_column( - "Date & Time", justify="center", style=TABLE_STYLING.mine, no_wrap=True - ) - table.add_column("Comment", justify="center", style=TABLE_STYLING.mine) - table.add_column("# Seismograms", justify="center", style=TABLE_STYLING.linked) + column_order = ["id", "date", "comment", "seismogram_count"] if all_events: - table.add_column("Event ID", justify="center", style=TABLE_STYLING.linked) - - for snapshot in snapshots: - logger.debug(f"Adding snapshot with id={snapshot.id} to the table.") - row = [ - (uuid_shortener(session, snapshot) if short else str(snapshot.id)), - TABLE_STYLING.timestamp_formatter(snapshot.date, short), - str(snapshot.comment), - str(len(snapshot.seismogram_parameters_snapshots)), - ] - if all_events: - aimbat_event = snapshot.event - row.append( - uuid_shortener(session, aimbat_event) if short else str(aimbat_event.id) - ) - table.add_row(*row) - - console = Console() - console.print(table) + column_order.append("event_id") + + skip_keys = [] if all_events else ["event_id"] + + json_to_table( + data=snapshot_data, + title=title, + column_order=column_order, + skip_keys=skip_keys, + formatters={ + "id": lambda x: ( + uuid_shortener(session, AimbatSnapshot, str_uuid=x) if short else x + ), + "date": lambda x: TABLE_STYLING.timestamp_formatter(Timestamp(x), short), + "event_id": lambda x: ( + uuid_shortener(session, AimbatEvent, str_uuid=x) if short else x + ), + }, + common_column_kwargs={"justify": "center"}, + column_kwargs={ + "id": { + "header": "ID (shortened)" if short else "ID", + "style": TABLE_STYLING.id, + "no_wrap": True, + }, + "date": { + "header": "Date & Time", + "style": TABLE_STYLING.mine, + "no_wrap": True, + }, + "comment": {"style": TABLE_STYLING.mine}, + "seismogram_count": { + "header": "# Seismograms", + "style": TABLE_STYLING.linked, + }, + "event_id": { + "header": "Event ID (shortened)" if short else "Event ID", + "style": TABLE_STYLING.linked, + }, + }, + ) diff --git a/src/aimbat/core/_station.py b/src/aimbat/core/_station.py index 0d026e6f..d28fee38 100644 --- a/src/aimbat/core/_station.py +++ b/src/aimbat/core/_station.py @@ -1,19 +1,23 @@ +import uuid +from aimbat.core import get_active_event from aimbat.logger import logger -from aimbat.utils import uuid_shortener, make_table, get_active_event, TABLE_STYLING +from aimbat.utils import uuid_shortener, json_to_table, TABLE_STYLING from aimbat.models import AimbatStation, AimbatSeismogram, AimbatEvent -from sqlmodel import Session, select +from typing import overload, Literal, Any +from sqlmodel import Session, select, col +from sqlalchemy import func from sqlalchemy.exc import NoResultFound -from rich.console import Console from collections.abc import Sequence from pydantic import TypeAdapter -import uuid __all__ = [ "delete_station_by_id", "delete_station", "get_stations_in_event", - "print_station_table", + "get_stations_in_active_event", + "get_stations_with_event_seismogram_count", "dump_station_table_to_json", + "print_station_table", ] @@ -50,6 +54,48 @@ def delete_station(session: Session, station: AimbatStation) -> None: session.commit() +@overload +def get_stations_in_active_event( + session: Session, as_json: Literal[False] +) -> Sequence[AimbatStation]: ... + + +@overload +def get_stations_in_active_event( + session: Session, as_json: Literal[True] +) -> list[dict[str, Any]]: ... + + +def get_stations_in_active_event( + session: Session, as_json: bool +) -> Sequence[AimbatStation] | list[dict[str, Any]]: + """Get the stations for the active event. + + Args: + session: Database session. + + Returns: Stations in active event. + """ + logger.info("Getting stations for active event.") + + statement = ( + select(AimbatStation) + .join(AimbatSeismogram) + .join(AimbatEvent) + .where(AimbatEvent.active == True) # noqa: E712 + ) + + logger.debug(f"Executing query: {statement}") + results = session.exec(statement).all() + + if not as_json: + return results + + adapter: TypeAdapter[Sequence[AimbatStation]] = TypeAdapter(Sequence[AimbatStation]) + + return adapter.dump_python(results, mode="json") + + def get_stations_in_event( session: Session, event: AimbatEvent ) -> Sequence[AimbatStation]: @@ -61,23 +107,91 @@ def get_stations_in_event( Returns: Stations in event. """ - logger.info(f"Getting stations for event: {event.id}.") - select_stations = ( + statement = ( select(AimbatStation) .join(AimbatSeismogram) .join(AimbatEvent) .where(AimbatEvent.id == event.id) ) - stations = session.exec(select_stations).all() - - logger.debug(f"Found {len(stations)}.") + logger.debug(f"Executing query: {statement}") + stations = session.exec(statement).all() return stations +@overload +def get_stations_with_event_seismogram_count( + session: Session, as_json: Literal[False] +) -> Sequence[tuple[AimbatStation, int, int]]: ... + + +@overload +def get_stations_with_event_seismogram_count( + session: Session, as_json: Literal[True] +) -> list[dict[str, Any]]: ... + + +def get_stations_with_event_seismogram_count( + session: Session, as_json: bool +) -> Sequence[tuple[AimbatStation, int, int]] | list[dict[str, Any]]: + """Get stations along with the count of seismograms and events they are associated with. + + Args: + session: Database session. + as_json: Whether to return the result as JSON. + + Returns: A sequence of tuples containing the station, count of seismograms + and count of events, or a JSON string if as_json is True. + """ + logger.info("Getting stations with associated seismogram and event counts.") + + statement = ( + select( + AimbatStation, + func.count(col(AimbatSeismogram.id)), + func.count(func.distinct(col(AimbatEvent.id))), + ) + .select_from(AimbatStation) + .join(AimbatSeismogram, isouter=True) + .join(AimbatEvent, isouter=True) + .group_by(col(AimbatStation.id)) + ) + + logger.debug(f"Executing query: {statement}") + results = session.exec(statement).all() + + if not as_json: + return results + + formatted_results = [] + + for row in results: + # 1. Dump the station to a dict. mode="json" safely converts UUIDs/Datetimes to strings! + station_dict = row[0].model_dump(mode="json") + + # 2. Add the counts directly to the dictionary + station_dict["seismogram_count"] = row[1] + station_dict["event_count"] = row[2] + + # 3. Add to our final list + formatted_results.append(station_dict) + + return formatted_results + + +def dump_station_table_to_json(session: Session) -> str: + """Create a JSON string from the AimbatStation table data.""" + + logger.info("Dumping AIMBAT station table to json.") + + adapter: TypeAdapter[Sequence[AimbatStation]] = TypeAdapter(Sequence[AimbatStation]) + aimbat_station = session.exec(select(AimbatStation)).all() + return adapter.dump_json(aimbat_station).decode("utf-8") + + def print_station_table( session: Session, short: bool, all_events: bool = False ) -> None: @@ -88,92 +202,107 @@ def print_station_table( short: Shorten and format the output to be more human-readable. all_events: Print stations for all events. """ - logger.info("Printing station table.") title = "AIMBAT stations for all events" - aimbat_stations = None if all_events: logger.debug("Selecting all AIMBAT stations.") - aimbat_stations = session.exec(select(AimbatStation)).all() + data = get_stations_with_event_seismogram_count(session, as_json=True) else: - logger.debug("Selecting AIMBAT stations for active event.") + logger.debug("Selecting AIMBAT stations used by active event.") active_event = get_active_event(session) - aimbat_stations = get_stations_in_event(session, active_event) + data = get_stations_in_active_event(session, as_json=True) + if short: title = f"AIMBAT stations for event {active_event.time.strftime('%Y-%m-%d %H:%M:%S')} (ID={uuid_shortener(session, active_event)})" else: title = ( f"AIMBAT stations for event {active_event.time} (ID={active_event.id})" ) - logger.debug("Found {len(aimbat_stations)} stations for the table.") - - table = make_table(title=title) - table.add_column( - "ID (shortened)" if short else "ID", - justify="center", - style=TABLE_STYLING.id, - no_wrap=True, - ) - table.add_column( - "Name & Network", justify="center", style=TABLE_STYLING.mine, no_wrap=True - ) - table.add_column("Channel", justify="center", style=TABLE_STYLING.mine) - table.add_column("Location", justify="center", style=TABLE_STYLING.mine) - table.add_column("Latitude", justify="center", style=TABLE_STYLING.mine) - table.add_column("Longitude", justify="center", style=TABLE_STYLING.mine) - table.add_column("Elevation", justify="center", style=TABLE_STYLING.mine) + column_order = [ + "id", + "name", + "network", + "channel", + "location", + "latitude", + "longitude", + "elevation", + ] if all_events: - table.add_column("# Seismograms", justify="center", style=TABLE_STYLING.linked) - table.add_column("# Events", justify="center", style=TABLE_STYLING.linked) - - for aimbat_station in aimbat_stations: - logger.debug(f"Adding {aimbat_station.name} to the table.") - row = [ - ( - uuid_shortener(session, aimbat_station) - if short - else str(aimbat_station.id) - ), - f"{aimbat_station.name} - {aimbat_station.network}", - f"{aimbat_station.channel}", - f"{aimbat_station.location}", - ( - f"{aimbat_station.latitude:.3f}" - if short - else str(aimbat_station.latitude) - ), - ( - f"{aimbat_station.longitude:.3f}" - if short - else str(aimbat_station.longitude) - ), - ( - f"{aimbat_station.elevation:.0f}" - if short - else str(aimbat_station.elevation) - ), - ] - if all_events: - row.extend( - [ - str(len(aimbat_station.seismograms)), - str(len({i.event_id for i in aimbat_station.seismograms})), - ] - ) - table.add_row(*row) - - console = Console() - console.print(table) - - -def dump_station_table_to_json(session: Session) -> str: - """Create a JSON string from the AimbatStation table data.""" - - logger.info("Dumping AIMBAT station table to json.") - - adapter: TypeAdapter[Sequence[AimbatStation]] = TypeAdapter(Sequence[AimbatStation]) - aimbat_station = session.exec(select(AimbatStation)).all() - return adapter.dump_json(aimbat_station).decode("utf-8") + column_order.extend(["seismogram_count", "event_count"]) + + column_kwargs: dict[str, dict[str, Any]] = { + "id": { + "header": "ID (shortened)" if short else "ID", + "style": TABLE_STYLING.id, + "justify": "center", + "no_wrap": True, + }, + "name": { + "header": "Name", + "style": TABLE_STYLING.mine, + "justify": "center", + "no_wrap": True, + }, + "network": { + "header": "Network", + "style": TABLE_STYLING.mine, + "justify": "center", + "no_wrap": True, + }, + "channel": { + "header": "Channel", + "style": TABLE_STYLING.mine, + "justify": "center", + }, + "location": { + "header": "Location", + "style": TABLE_STYLING.mine, + "justify": "center", + }, + "latitude": { + "header": "Latitude", + "style": TABLE_STYLING.mine, + "justify": "center", + }, + "longitude": { + "header": "Longitude", + "style": TABLE_STYLING.mine, + "justify": "center", + }, + "elevation": { + "header": "Elevation", + "style": TABLE_STYLING.mine, + "justify": "center", + }, + "seismogram_count": { + "header": "# Seismograms", + "style": TABLE_STYLING.linked, + "justify": "center", + }, + "event_count": { + "header": "# Events", + "style": TABLE_STYLING.linked, + "justify": "center", + }, + } + + formatters = { + "id": lambda x: ( + uuid_shortener(session, AimbatStation, str_uuid=x) if short else str(x) + ), + "latitude": lambda x: f"{x:.3f}" if short else str(x), + "longitude": lambda x: f"{x:.3f}" if short else str(x), + "elevation": lambda x: f"{x:.0f}" if short else str(x), + } + + json_to_table( + data, + title=title, + column_order=column_order, + column_kwargs=column_kwargs, + formatters=formatters, + ) diff --git a/src/aimbat/db.py b/src/aimbat/db.py index 614dcef4..f74083d1 100644 --- a/src/aimbat/db.py +++ b/src/aimbat/db.py @@ -1,9 +1,25 @@ """Module to define the AIMBAT project file and create the database engine.""" +import sqlite3 from aimbat import settings from sqlmodel import create_engine +from sqlalchemy import event +from sqlalchemy.pool import ConnectionPoolEntry __all__ = ["engine"] engine = create_engine(url=settings.db_url, echo=False) """AIMBAT database engine.""" + + +# Automatically enforce foreign keys for every new connection if using SQLite +if engine.name == "sqlite": + + @event.listens_for(engine, "connect") + def set_sqlite_pragma( + dbapi_connection: sqlite3.Connection, connection_record: ConnectionPoolEntry + ) -> None: + """Enables foreign key support for SQLite connections.""" + cursor = dbapi_connection.cursor() + cursor.execute("PRAGMA foreign_keys=ON") + cursor.close() diff --git a/src/aimbat/models/_models.py b/src/aimbat/models/_models.py index e86c70b6..d81671b7 100644 --- a/src/aimbat/models/_models.py +++ b/src/aimbat/models/_models.py @@ -4,6 +4,9 @@ as classes to use with python in AIMBAT. """ +import numpy as np +import os +import uuid from ._sqlalchemy import SAPandasTimestamp, SAPandasTimedelta from aimbat import settings from aimbat._lib._mixins import EventParametersValidatorMixin @@ -15,13 +18,12 @@ PydanticPositiveTimedelta, ) from datetime import timezone -from sqlmodel import Relationship, SQLModel, Field +from sqlmodel import Relationship, SQLModel, Field, col, select +from sqlalchemy import func +from sqlalchemy.orm import column_property from pydantic import computed_field -from typing import TYPE_CHECKING +from typing import Self, TYPE_CHECKING from pandas import Timestamp -import numpy as np -import os -import uuid __all__ = [ "AimbatTypes", @@ -37,67 +39,11 @@ "AimbatSeismogramParametersBase", "AimbatSeismogramParametersSnapshot", "AimbatSnapshot", + "AimbatEventRead", + "AimbatSnapshotRead", ] -class AimbatDataSourceCreate(SQLModel): - """Class to store data source information.""" - - sourcename: str | os.PathLike = Field(unique=True) - datatype: DataType = DataType.SAC - - -class AimbatDataSource(SQLModel, table=True): - """Class to store data source information.""" - - id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) - sourcename: str - datatype: DataType - seismogram_id: uuid.UUID = Field( - default=None, foreign_key="aimbatseismogram.id", ondelete="CASCADE" - ) - seismogram: "AimbatSeismogram" = Relationship(back_populates="datasource") - - -class AimbatEvent(SQLModel, table=True): - """Store event information.""" - - id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) - "Unique ID." - - active: bool | None = Field(default=None, unique=True) - "Indicates if an event is the active event." - - time: PydanticTimestamp = Field( - unique=True, sa_type=SAPandasTimestamp, allow_mutation=False - ) - "Event time." - - latitude: float - "Event latitude." - - longitude: float - "Event longitude." - - depth: float | None = None - "Event depth." - - seismograms: list["AimbatSeismogram"] = Relationship( - back_populates="event", cascade_delete=True - ) - "List of seismograms of this event." - - parameters: "AimbatEventParameters" = Relationship( - back_populates="event", cascade_delete=True - ) - "Event parameters." - - snapshots: list["AimbatSnapshot"] = Relationship( - back_populates="event", cascade_delete=True - ) - "List of snapshots." - - class AimbatEventParametersBase(SQLModel): """Base class that defines the event parameters used in AIMBAT. @@ -134,6 +80,74 @@ class AimbatEventParametersBase(SQLModel): "Maximum frequency for bandpass filter (ignored if `bandpass_apply` is False)." +class AimbatSeismogramParametersBase(SQLModel): + """Base class that defines the seismogram parameters used in AIMBAT.""" + + flip: bool = False + "Whether or not the seismogram should be flipped." + + select: bool = True + "Whether or not this seismogram should be used for processing." + + t1: PydanticTimestamp | None = Field(default=None, sa_type=SAPandasTimestamp) + """Working pick. + + This pick serves as working as well as output pick. It is changed by: + + 1. Picking the phase arrival in the stack. + 2. Running ICCS. + 3. Running MCCC. + """ + + +class AimbatDataSourceCreate(SQLModel): + """Class to store data source information.""" + + sourcename: str | os.PathLike = Field(unique=True) + datatype: DataType = DataType.SAC + + +class AimbatDataSource(SQLModel, table=True): + """Class to store data source information.""" + + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + sourcename: str + datatype: DataType + seismogram_id: uuid.UUID = Field( + default=None, foreign_key="aimbatseismogram.id", ondelete="CASCADE" + ) + seismogram: "AimbatSeismogram" = Relationship(back_populates="datasource") + + +class AimbatSeismogramParameters(AimbatSeismogramParametersBase, table=True): + """Class to store ICCS processing parameters of a single seismogram.""" + + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + seismogram_id: uuid.UUID = Field( + default=None, foreign_key="aimbatseismogram.id", ondelete="CASCADE" + ) + seismogram: "AimbatSeismogram" = Relationship(back_populates="parameters") + snapshots: list["AimbatSeismogramParametersSnapshot"] = Relationship( + back_populates="parameters", cascade_delete=True + ) + + +class AimbatSeismogramParametersSnapshot(AimbatSeismogramParametersBase, table=True): + """Class to store a snapshot of ICCS processing parameters of a single seismogram.""" + + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + seismogram_parameters_id: uuid.UUID = Field( + foreign_key="aimbatseismogramparameters.id", ondelete="CASCADE" + ) + parameters: AimbatSeismogramParameters = Relationship(back_populates="snapshots") + snapshot_id: uuid.UUID = Field( + default=None, foreign_key="aimbatsnapshot.id", ondelete="CASCADE" + ) + snapshot: "AimbatSnapshot" = Relationship( + back_populates="seismogram_parameters_snapshots" + ) + + class AimbatEventParameters( AimbatEventParametersBase, EventParametersValidatorMixin, table=True ): @@ -147,7 +161,7 @@ class AimbatEventParameters( ) "Event ID these parameters are associated with." - event: AimbatEvent = Relationship(back_populates="parameters") + event: "AimbatEvent" = Relationship(back_populates="parameters") "Event these parameters are associated with." snapshots: list["AimbatEventParametersSnapshot"] = Relationship( @@ -172,37 +186,36 @@ class AimbatEventParametersSnapshot(AimbatEventParametersBase, table=True): ) -class AimbatStation(SQLModel, table=True): - """Class to store station information.""" - - id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) - "Unique ID." - - name: str = Field(allow_mutation=False) - "Station name." - - network: str = Field(allow_mutation=False) - "Network name." - - location: str = Field(allow_mutation=False) - "Location ID." - - channel: str = Field(allow_mutation=False) - "Channel code." - - latitude: float - "Station latitude" +class AimbatSnapshot(SQLModel, table=True): + """Class to store AIMBAT snapshots. - longitude: float - "Station longitude" + The AimbatSnapshot class does not actually save any parameter data. + It is used to keep track of the AimbatEventParametersSnapshot and + AimbatSeismogramParametersSnapshot instances. + """ - elevation: float | None = None - "Station elevation." + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + date: PydanticTimestamp = Field( + default_factory=lambda: Timestamp.now(tz=timezone.utc), + unique=True, + allow_mutation=False, + sa_type=SAPandasTimestamp, + ) + comment: str | None = None + event_parameters_snapshot: AimbatEventParametersSnapshot = Relationship( + back_populates="snapshot", cascade_delete=True + ) + seismogram_parameters_snapshots: list[AimbatSeismogramParametersSnapshot] = ( + Relationship(back_populates="snapshot", cascade_delete=True) + ) - seismograms: list["AimbatSeismogram"] = Relationship( - back_populates="station", cascade_delete=True + event_id: uuid.UUID = Field( + default=None, foreign_key="aimbatevent.id", ondelete="CASCADE" ) - "Seismograms recorded at this station." + "Event ID this snapshot is associated with." + + event: "AimbatEvent" = Relationship(back_populates="snapshots") + "Event this snapshot is associated with." class AimbatSeismogram(SQLModel, table=True): @@ -226,24 +239,23 @@ class AimbatSeismogram(SQLModel, table=True): station_id: uuid.UUID = Field( default=None, foreign_key="aimbatstation.id", ondelete="CASCADE" ) - station: AimbatStation = Relationship(back_populates="seismograms") + station: "AimbatStation" = Relationship(back_populates="seismograms") event_id: uuid.UUID = Field( default=None, foreign_key="aimbatevent.id", ondelete="CASCADE" ) - event: AimbatEvent = Relationship(back_populates="seismograms") + event: "AimbatEvent" = Relationship(back_populates="seismograms") parameters: "AimbatSeismogramParameters" = Relationship( back_populates="seismogram", cascade_delete=True, ) - def __len__(self) -> int: - return np.size(self.data) - if TYPE_CHECKING: - flip: bool - select: bool - t1: Timestamp | None - data: np.ndarray + # Add same default values for type checking purposes + # as in AimbatSeismogramParametersBase + flip: bool = False + select: bool = True + t1: Timestamp | None = None + data: np.ndarray = np.array([]) @property def end_time(self) -> Timestamp: ... @@ -252,9 +264,9 @@ def end_time(self) -> Timestamp: ... @computed_field def end_time(self) -> PydanticTimestamp: - if len(self) == 0: + if len(self.data) == 0: return self.begin_time - return self.begin_time + self.delta * (len(self) - 1) + return self.begin_time + self.delta * (len(self.data) - 1) @property def flip(self) -> bool: @@ -297,85 +309,147 @@ def data(self, value: np.ndarray) -> None: ) -class AimbatSeismogramParametersBase(SQLModel): - """Base class that defines the seismogram parameters used in AIMBAT.""" +class AimbatStation(SQLModel, table=True): + """Class to store station information.""" - flip: bool = False - "Whether or not the seismogram should be flipped." + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + "Unique ID." - select: bool = True - "Whether or not this seismogram should be used for processing." + name: str = Field(allow_mutation=False) + "Station name." - t1: PydanticTimestamp | None = Field(default=None, sa_type=SAPandasTimestamp) - """Working pick. + network: str = Field(allow_mutation=False) + "Network name." - This pick serves as working as well as output pick. It is changed by: + location: str = Field(allow_mutation=False) + "Location ID." - 1. Picking the phase arrival in the stack. - 2. Running ICCS. - 3. Running MCCC. - """ + channel: str = Field(allow_mutation=False) + "Channel code." + latitude: float + "Station latitude" -class AimbatSeismogramParameters(AimbatSeismogramParametersBase, table=True): - """Class to store ICCS processing parameters of a single seismogram.""" + longitude: float + "Station longitude" - id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) - seismogram_id: uuid.UUID = Field( - default=None, foreign_key="aimbatseismogram.id", ondelete="CASCADE" - ) - seismogram: AimbatSeismogram = Relationship(back_populates="parameters") - snapshots: list["AimbatSeismogramParametersSnapshot"] = Relationship( - back_populates="parameters", cascade_delete=True + elevation: float | None = None + "Station elevation." + + seismograms: list[AimbatSeismogram] = Relationship( + back_populates="station", cascade_delete=True ) + "Seismograms recorded at this station." -class AimbatSeismogramParametersSnapshot(AimbatSeismogramParametersBase, table=True): - """Class to store a snapshot of ICCS processing parameters of a single seismogram.""" +class AimbatEvent(SQLModel, table=True): + """Store event information.""" id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) - seismogram_parameters_id: uuid.UUID = Field( - foreign_key="aimbatseismogramparameters.id", ondelete="CASCADE" - ) - parameters: AimbatSeismogramParameters = Relationship(back_populates="snapshots") - snapshot_id: uuid.UUID = Field( - default=None, foreign_key="aimbatsnapshot.id", ondelete="CASCADE" - ) - snapshot: "AimbatSnapshot" = Relationship( - back_populates="seismogram_parameters_snapshots" + "Unique ID." + + active: bool | None = Field(default=None, unique=True) + "Indicates if an event is the active event." + + time: PydanticTimestamp = Field( + unique=True, sa_type=SAPandasTimestamp, allow_mutation=False ) + "Event time." + latitude: float + "Event latitude." -class AimbatSnapshot(SQLModel, table=True): - """Class to store AIMBAT snapshots. + longitude: float + "Event longitude." - The AimbatSnapshot class does not actually save any parameter data. - It is used to keep track of the AimbatEventParametersSnapshot and - AimbatSeismogramParametersSnapshot instances. - """ + depth: float | None = None + "Event depth." - id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) - date: PydanticTimestamp = Field( - default_factory=lambda: Timestamp.now(tz=timezone.utc), - unique=True, - allow_mutation=False, - sa_type=SAPandasTimestamp, - ) - comment: str | None = None - event_parameters_snapshot: AimbatEventParametersSnapshot = Relationship( - back_populates="snapshot", cascade_delete=True + seismograms: list[AimbatSeismogram] = Relationship( + back_populates="event", cascade_delete=True ) - seismogram_parameters_snapshots: list[AimbatSeismogramParametersSnapshot] = ( - Relationship(back_populates="snapshot", cascade_delete=True) + "List of seismograms of this event." + + parameters: AimbatEventParameters = Relationship( + back_populates="event", cascade_delete=True ) + "Event parameters." - event_id: uuid.UUID = Field( - default=None, foreign_key="aimbatevent.id", ondelete="CASCADE" + snapshots: list[AimbatSnapshot] = Relationship( + back_populates="event", cascade_delete=True ) - "Event ID this snapshot is associated with." + "List of snapshots." - event: AimbatEvent = Relationship(back_populates="snapshots") - "Event this snapshot is associated with." + if TYPE_CHECKING: + seismogram_count: int = 0 + station_count: int = 0 + + +AimbatEvent.seismogram_count = column_property( # type: ignore[assignment] + select(func.count(col(AimbatSeismogram.id))) + .where(col(AimbatSeismogram.event_id) == col(AimbatEvent.id)) + .correlate_except(AimbatSeismogram) + .scalar_subquery() +) +"Number of seismograms for this event." + +AimbatEvent.station_count = column_property( # type: ignore[assignment] + select(func.count(func.distinct(col(AimbatSeismogram.station_id)))) + .where(col(AimbatSeismogram.event_id) == col(AimbatEvent.id)) + .correlate_except(AimbatSeismogram) + .scalar_subquery() +) +"Number of unique stations for this event." + + +class AimbatEventRead(SQLModel): + """Read model for AimbatEvent including computed counts.""" + + id: uuid.UUID + active: bool | None + time: PydanticTimestamp + latitude: float + longitude: float + depth: float | None + completed: bool = False + seismogram_count: int + station_count: int + + @classmethod + def from_event(cls, event: AimbatEvent) -> Self: + """Create an AimbatEventRead from an AimbatEvent ORM instance.""" + return cls( + id=event.id, + active=event.active, + time=event.time, + latitude=event.latitude, + longitude=event.longitude, + depth=event.depth, + completed=event.parameters.completed, + seismogram_count=event.seismogram_count, + station_count=event.station_count, + ) + + +class AimbatSnapshotRead(SQLModel): + """Read model for AimbatSnapshot with a seismogram count.""" + + id: uuid.UUID + date: PydanticTimestamp + comment: str | None + event_id: uuid.UUID + seismogram_count: int + + @classmethod + def from_snapshot(cls, snapshot: AimbatSnapshot) -> Self: + """Create an AimbatSnapshotRead from an AimbatSnapshot ORM instance.""" + return cls( + id=snapshot.id, + date=snapshot.date, + comment=snapshot.comment, + event_id=snapshot.event_id, + seismogram_count=len(snapshot.seismogram_parameters_snapshots), + ) type AimbatTypes = ( diff --git a/src/aimbat/utils/__init__.py b/src/aimbat/utils/__init__.py index b7575f40..0e2c4a85 100644 --- a/src/aimbat/utils/__init__.py +++ b/src/aimbat/utils/__init__.py @@ -6,8 +6,6 @@ _internal_names = set(dir()) from ._json import * -from ._active_event import * -from ._checkdata import * from ._sampledata import * from ._style import * from ._uuid import * diff --git a/src/aimbat/utils/_active_event.py b/src/aimbat/utils/_active_event.py deleted file mode 100644 index f313a141..00000000 --- a/src/aimbat/utils/_active_event.py +++ /dev/null @@ -1,38 +0,0 @@ -from aimbat.logger import logger -from aimbat.models import AimbatEvent -from aimbat.cli._common import HINTS -from sqlmodel import Session, select -from sqlalchemy.exc import NoResultFound - -__all__ = ["get_active_event"] - - -def get_active_event(session: Session) -> AimbatEvent: - """ - Return the currently active event (i.e. the one being processed). - - Args: - session: SQL session. - - Returns: - Active Event - - Raises - NoResultFound: When no event is active. - """ - - logger.debug("Attempting to determine active event.") - - select_active_event = select(AimbatEvent).where(AimbatEvent.active == 1) - - # NOTE: While there technically can be no active event in the database, - # we typically don't really want to go beyond this point when that is the - # case. Hence we call `one` rather than `one_or_none`. - try: - active_event = session.exec(select_active_event).one() - except NoResultFound: - raise NoResultFound(f"No active event found. {HINTS.ACTIVATE_EVENT}") - - logger.debug(f"Active event: {active_event.id}") - - return active_event diff --git a/src/aimbat/utils/_checkdata.py b/src/aimbat/utils/_checkdata.py deleted file mode 100644 index 4c54a558..00000000 --- a/src/aimbat/utils/_checkdata.py +++ /dev/null @@ -1,148 +0,0 @@ -from aimbat.logger import logger -from pysmo import Station, Event, Seismogram -from pathlib import Path - -__all__ = ["run_checks"] - - -def checkdata_station(station: Station) -> list[str]: - """Check if station information is complete. - - Args: - station: station object to test. - """ - - logger.info("Checking station information.") - - issues = list() - - try: - assert station.name is not None - except (AssertionError, Exception): - issue = "No station name found in file." - issues.append(issue) - - try: - assert station.latitude is not None - except (AssertionError, Exception): - issue = "No station latitude found in file." - issues.append(issue) - - try: - assert station.longitude is not None - except (AssertionError, Exception): - issue = "No station longitude found in file." - issues.append(issue) - - return issues - - -def checkdata_event(event: Event) -> list[str]: - """Check if event information is complete. - - Args: - event: event object to test. - """ - - logger.info("Checking event information.") - - issues = list() - - try: - assert event.latitude is not None - except (AssertionError, Exception): - issue = "No event latitude found in file." - issues.append(issue) - - try: - assert event.longitude is not None - except (AssertionError, Exception): - issue = "No event longitude found in file." - issues.append(issue) - - try: - assert event.time is not None - except (AssertionError, Exception): - issue = "No event time found in file." - issues.append(issue) - - return issues - - -def checkdata_seismogram(seismogram: Seismogram) -> list[str]: - """Check if seismogram information is complete. - - Args: - seismogram: seismogram object to test. - """ - - logger.info("Checking seismogram information.") - - issues = list() - try: - assert seismogram.data is not None - assert len(seismogram.data) > 0 - except (AssertionError, Exception): - issue = "No seismogram data found in file." - issues.append(issue) - - return issues - - -def run_checks(sacfiles: list[Path]) -> None: - """Run all checks on one or more SAC files. - - Args: - sacfiles: SAC files to test. - """ - - logger.info("Running all checks.") - - from pysmo.classes import SAC - - def checkmark() -> None: - print("\N{CHECK MARK}", end="") - - def crossmark() -> None: - print("\N{BALLOT X}", end="") - - all_issues = dict() - - for sacfile in sacfiles: - issues = list() - my_sac = SAC.from_file(str(sacfile)) - print(f"\n{sacfile}: ", end="") - - station_issues = checkdata_station(my_sac.station) - if len(station_issues) == 0: - checkmark() - else: - issues.extend(station_issues) - crossmark() - - event_issues = checkdata_event(my_sac.event) - if len(event_issues) == 0: - checkmark() - else: - issues.extend(event_issues) - crossmark() - - seismogram_issues = checkdata_seismogram(my_sac.seismogram) - if len(seismogram_issues) == 0: - checkmark() - else: - issues.extend(seismogram_issues) - crossmark() - - if len(issues) > 0: - all_issues[sacfile] = issues - - if len(all_issues) == 0: - print("\n\nNo issues found!") - return - - print("\n\nPlease fix the following issues before proceeding:") - for sacfile, issues in all_issues.items(): - print(f"\n file: {sacfile}:") - for issue in issues: - print(f" - {issue}") diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/baseline/tests.test_seismogram.TestSeismogramPlot.test_lib_plotseis_mpl.png b/tests/baseline/tests.test_seismogram.TestSeismogramPlot.test_lib_plotseis_mpl.png deleted file mode 100644 index 84c02e55917ef3244b03a4f20b91bf32c7711b87..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 155122 zcmeFZWmJ`Y^ewsp1rZb!C8b0`B&AynKw3H^mG15okdRWkTafNf1rd-2>F(~5x@+ru z{`Zb?zn*bEop%g|uL65N&+l0==Uj7b|JPC?H?9+3M<5V4L|?s>MIg{95C~M|Yv}MV zv3Bn*;15iTS1Q&B1dcZH3nlAmrXd0afe?N9Ox_`WeZt=6jsi*J%#_*ZPBW*ih)Lt+ zWQycRS$xMI1E6w6OzA!h5z2>RTc{B15aZ;X}^R7Yp-|6=Wqm*Zw-Wp&&r z8>-xGr}Ga8_%<>^q^YU-;U;POkfyt9yRA~5YD=Rh+Rpxdb7yCeMx9fU$w;yAo}P{l zvwqjTAAv+KKH@X<7?l~Y5q}N~lde8D9V>rgI?9$`yDRqo{resRHg6xFgu$JJp9X9! z#=q%RE6jp43TP-OJV?wcJ)4`G!}3eswp>58B*sP`tuXK2>wYS2YkQ!wb(vF8Kbzu9 z_O{Uj#lgWLx!0g_qkK#%_9;92LHN*uB0=4+a6^1{TyY1Lw$o|}6yl&^j= zb$KE3?AbG~Grf`+B@IU(%ef!D8R7&sJfy@yA6h;VBpuYR#dok<)Y`87o|&Gu5MGkH zW@%|v+jAB7xf-8dXRK|Xu8fR~q*5sL{-|-m&NnP3TvF1`a1lG@ zb{a%0_hq|JoQYj|>tIN832yM)&`{KvSA>G><5jFwBEcr1lkhgIlbnrdAp&Be zqL`DDliuFmp`SlnwV!?c`gMO}ENXq-#OZLoElVctXGh1}T0z62FS*&rIznG7ZDNh#BN<#O-=0)6VuZrt80`O3*orW&xAbwRJbSnie(o^dCb9a znEU%p(OVzY8aq?3{qxf!IvgCFt69g#$4(u=_qbEz6A~gqLPB_~=Kc8V3JYJen@_5t z81;XVJoqgtEv>ZnJ5TQ6!-rzD8H{!NvRAXx)6>HuO_#b7j9P<9+jUrIGmIuPdwP0G zc(`%zy9YrQqmzLbNqyz{U|P;QGhr zY8IS%gGP0mj_lKm>_FG?5|kg52Eslcl6g@hHa1>gw6WMd2g?*W>k) zZeo*nP*{)|=(tD(ci*x-^L=M`_ghPgk2q6ZxMUp1U!2rc7Bj-EO1X^kh^%a>C84^kmo9~&TnjV?yvNtIGk=&tmWb1$P$Vfi&i7zaBHfY-2F^j`toqh93`4r z8?`f*y(H6^jd-yu0jrV3yw*~2fsoArm*Gu0U)s^43n7OI;ipfZ^usH%vjZT-F!K~U z)<;U#BB~T}6vu6c)OGarp;QI1PBv&1n6sUKATdKw(9(X@sFuo{Yo{xCz{PbOcx1`4 zWOd`7Ih-54O0hR2bVO)qXoDIVWphK+6^iG6)a($s2!XTh@4`_-2cDICUA!BHEiEnk zTT@9Z{h4#q(~au(6D`9<@7f@DBHx&C8}jZRA3OXx+z?k#xQj&_GC(v!_~E9 zE95+)r*Cd;ZB6XfVcCIr!^6dWy|w7FJKwfGUKK{5;o$jIE_1X$Cp(*v+vJy^8FX@FvvKnflYsxdvV0Kq;nMWh;s?<-3!dgp(7$9l2=rmot@o{>e#jCQiwrOEjPhz zgc`VR=;r2z2tp@>r+4S>-I0ok;Ts!ME6_F<;43eR!(v)6lHrZ;%a_fLBbgNyoHuXY zoL^d!)6fWxi+fYI;EO|!o<)G2^z|wh{%Z{lDX678J3GiHE0w@A@>GB6dzBiR?PmR{ znF+@muULM{X7?~;1T{#ivwp#IbaMJ#^91SyJ=BEhl5T-HPYklcOk_iZi+Q5ke#bcv z$|ee=z!UiI;5|+jriq5b%vHq#jZP@^qZEjhp#n{>%%AoXwgfl=#}tr(r&uCRMq;$o z648QEQaABvq|%|AA4Eolha=dRGM+#Kl9!i%Ei1blf+^UQ{!6efc;~w>&JsMIrMjcZ zceBm@hcv5Vw0ggywY!xF{Niz-C-a@}_XpnN?$ekn)bA!k7KgX*-VyORFxS{^d>b4* zFrTVu($eZles%_V`YWEhWK!!&TU(pUb`yr+_qm46>b2#Gnj_I}_JJ!DtvHXFO@^5x z8Px+C8ZI>%?F|eb`TP6dK*!*UGDaB1-gAQ(bAd{51s(ljVHcC~;v5OA&P>B)^~1M< zc{it5GL!U1iggcALx03-!h_Q;Q{8aok<#IaLm?(bWuvD4@+>wi4A0ZkbLrsAQLhNO z?#b=~HX(Cz@2$d%uf)6!_K+0ZAzHr+G@t$mA@7C)E1fBoup0Ef4DN2U)bKlm)w(OD ztel(*6e*&JHHoWNu53en{4HKH|5^NaAxd+3s^RII3NziE*%padww3rJ%g0O&XDsJ` z4&L>q3YkpS#<-pT`4kn^8gQ4+V5ImV98~sD6snQgy!2g{#-z?MD=VwvQbRmxfs+U7 zHFmm#*$Osmzc-j7M%vo^IZQ{-GE4_VB_(wsSptbze@sdXU86)E16x&LAu~D-*)M=1 zVsGExhiChclQSyG{UU}@y?Vg7RfDtQnVQ-?OG`_d3B7kOoSYmT*<*)#dvE0o{u#PC z`{0XILVK#v$4caVK=-k*voi~A?e7meJ97~e6WcyGFfBZ@k&f+-`e9Uve)HZddHL`< zdRtvy8TCK@OJ@)vY5LuXwa|TBP8On|Q#c)Mc8-=A$M8FIVBEY%K|z7Ys!}n{#dd#) z;3g6g5rH^t{GInkrNV5&5law>4i7iR91lnIPXIcN)VuOhQc_C9u=vpBSH4FMr1geq$E$#;9kpK?=Zu=Z~>= zeSLj78JTCw%F1D!4Myv=zYl!56=G((d!wY*$&r9{Dbd;QyGOcpL&*3q zb_MQJQ5g>9lb#-KP(6Km$z)#e1Uk+7aFN&exZ2tI`C1Sor`b3cV)0jumvEY)O)x&P znHvG~B4`FAw!OVQJs0v+Z*#ng{!KYHoUU2bLZs6P05yNb99;;$r7}LKP>3aY*3@uh z^PPu^sk7XZlB-sA+pEcSUf`3uXvX=<(qgOVEka{7Sq1n1Ih*nL6-{#;9LVt3CkE-vX=T7|<<<@gGTy!r_ByIThO9G!k zwRv-*rfM}ihnP(S^V+rP-*3zx0$zqQ&+kQ0K76>qgtMSJbY~T=Lcc6gHo$qa%9nG> zC5*11_VX5Wh43bf#!AaMh~h;ECmMG4!9)#td7UKkIrt`T4H{QB8*xybgj}Xni(bh{ zRGwPZ=YzGO{(SWsbn8^;8XUuls;b1$kOedQpEw_`@2PH*^4bm187DvwOc45rj}kwm ztu0B7M;-Q6BUUzm+j_CkZbQ|qcIOAWD+eSDz-q+E$jCt-4k^!o!5&RqC(tR@pPii< z=2P|1jW07;;yU$i-s9@giadfQs^NN|fX}2cKCPgH#^Q2p0spU7z!Z%;J4?fk>p~T1}{@7FC&hKdC|rjRqgr|5>GTI#_K1ddBUv|EMF1sTJ@ebf@HJzN%1Y_%T{uC9jO0E9Jjn| z;Nt2EO>8FQ>BThQ;44?J?o2vwu@6tyI_|9xbwCUul#6xz&$<2_ZFPHN;;o+m4L2Ms zX8-_tfI45j=U6`E0m?~_j&(j~5x_s&mb*%i?CtFr78kz&InTD+9CvKD4G0JbZ)j*h z9oSvyAfYeF&X$E}RcLBz5-S+W0mPQA>2{LQYic@PDFdM^O@xhpmyj@v%xRfYK~eG0 z{$4EZC0C=Nrg$X1q0Izj)FZ)@`>3rVhw;28!jYF68PT^*|^*xlKY0si}#o?c?YvX#^b!b-nvil;=clV@Qk{9KJfV?LhTu2|eX*#WB9PBPFE@r zDA1@I0LGgCOV2v(6rPc2+7MLi_fFHE81lgWA_@u$tSl^ElaY}9gF!e?2>%&lC=`euKn~x&e@ChoMI$D) zCyQ}+@bQaH$2gots`2=N7a)-jz^p=-6D#2mGIjG4sKdXI%D{YEm?6LcIJ1Y0jKRah z!%otq_8X%@K)?(jbdJWYaR9B-D;G9SK2i89t@&MMe`G-r3Ak{@ahg-p(zZ?J^YZe> zDdwqKLI*~Y8T7UFiJB;gM;7Zv3B9iP5emdexhYAV)4|(|vlG6<;dg?a^uI`XY@j=P z0EZjd)36;>SRXC>h=y@A5v9=%u%0-9NvkhiR1G@uUsPr4b}*>K#l>aO&3`o9zwg99*BYV^=?iopjh&Ry z{ro`5tY(A5c^ieD%CoYnY6tMsg}N{8Up=w8p-YI!BU<8%ut*AkmXzx1>ZS*hP&D*` z#@8Otex#WI`oe1n0I4n+)#$@DpQpTs4<9_xDpDr?)tKAAf^0Lq_M3cBFK++6^4U}i z8FY%gHx+u&t`0t|<>u$>L9Z9=yhd&tC`=W?e@H0zMS}A2i9S|c zTI~{~0+HmH@JZ=!IG*tfTpYhMt2_9Or5@;>@sR_8vVC}%&-t{~yy09}S((suv=n6; z`t<`EntrG&=xb8(T(`1f*oQkhK0^1wB_R`6fj4h{yMh)nL` zJq0DD5~a5pR^#n0`DcWs@}EFGVH z-g9<)NKI`uW?GR@yj93m_K^p{JxQSHf`r1L!U^W&t^8pDK@9rI+QL|kJ(Kj)b2g~6 z8GwO60zqYV+Bd+bS8lA?sy9x;Xx3pV(rLR5O-#tafeQhY37d$;8~Q*rfJb7e;0S0} zRP7xdM8MlbJCujkvjK-A@%t7(%D1lmez~qK#|KMCXJOVZZfz(0m-F|MG_Bm?KCneS zwI|R;6q`-(B)T4|FgBc7L6ZS4g9YVVd$>>=xI|mx*74{k9t{l*1QblKPgEkK{m#%% zqo2Hs<}f9$s;VkA=zR|%1dpyAsEejKiS^>I6ljHNP!A7ddI%WRenN(I6>5L)>Ue_n zH=1|Mkx7w~k^*VU3~>fBl3#GJ-r@QP-|kN;E}q0wN}$}nNS^1i-zx&_v-hhU4b+$x zDE6{Un$WM0H!7waX8mcRS#+;&Zf*vWKTQM>c~L+X`(KnB!JYL1`UP_8DzyfX2RB1L ztpmyd01SO7M41y(P~b-Zu5m{^`&ZgN4hnJO_~B++?R9c$F5g!hm_JK&w+ilN`k&`p zaGcqn6~@0wAZZ4HY_m0)urXF4zVrlnSAOmkRhp-3>FED=2Jyf71PBI}xVh7bgCN(S z(8T(IWO0p>G1@dEV>~|kr#c3X#&Rk=L4*6ZJ>ZW@EsjVK7d-uBS5c#cVTL3ZiX00y zUoFW@MfJEI7xZ00fuNEN`AYBqz0$ReJ2BLF{NocST=FW8p5hO@aZ_nuA@0K;Z6hWe zKpuWXwuP;%YzBu3Qvoi%wnIMSkb*_wjXT=_^phWY7Zze`*D7`2>1M|(W%zy)1@gxz zW5)q8tv5t%OiHrWX&{Ny?9=Ce$5oPMQ}q*9uGS>)*`-W3qQnwmr@y-42ZA%rfA5=- zp4-}8@wxm{#68~9q(^m$X|iZ=L6?|+@6o`Dh{$*6h6NSR_2Bg>QSTDghl+F$@jHAN zA^iRJB>ugJ7_E?$)RV-fyzL;HhZ`0gDZs^Kv5>EV^6yn-spWW1P88L=;($;VDw`;? zvZA~SL!K6+6M-qB*!9%z!wr%TzP=ccVrp(D3x|~CA)NM`YCt-J8XBHD)g=PCWO3Zp z+1=eGo-YIN0WI>Sh)CMd+RDIJyd5;{;!pw-UI&3%Wwom}SdfusA9W$<)i zVH&pw^;;^+BPsFkGHEkeZ8o0bG;mHXHIJ51MvC5HAgKi(U%X0*o_5KI17v2^exEoJ zP(V*DUzz~Qu3l|3^Ciyg+r&gdbMs38#r$rd*MVN_larIfzXb#+n$w)je#(`T-)Ub9 zWYlr14hGN)%&J$Yo$CVwu^Fr8Z}ai<$MD)SLJtD8Fmf z);LQVOx+kLRS`f%E9!l;f}}_g3dmB1tlvOqfVIx8tzk8qR!j(k;ukI8%0s7^Te!r3 zam3^LXI~pCs0*lRhmH4HS;Ks=2zDFMaJK)f6`(O|f4hPl3Xr*rqA0in74V#oFJVQLn3V==g&>R6&oZ|6u9R&TRK(sB#d#{Sa_kn)iSr&4Bg7w$1r?djvk*F9!u-#U-BN6>))D-qAtoA?C; zhRci7wx2(>kbGzVE}bIt+vaNkm6&yY;v&@#5HwE+LZo0kGO@cGjCj4VDi$GkCnY5` zKK@lYb_*RcLZR@!Wr<-X#JG>_iFyLocgC@C$D zEqHD@)v-0*e(rba@v#_F5l|Jfbt&cyPSR&rSI0tppt3*vwgb}EwVOA~FG=z7e?Y?& zi{mg|Hw2Olx=w1Z!7BWihN5C5|J2S3%_0Lx#5yc9>+5GqIPfkP5il`AVXm&konDp@ ztR$dQTD-Nn{7TW5T`%qi&4$p4i^Mg`D+Go&em48#aoa58BB3U96(j)W-K_jhc8@UV zhtr9xi_0~Iyuw^wE@O<;!v1ph3xO!EOsY#7wdK){QUj3R|mcV%EI8}tZ+K9 z)m~xAQOMxMbv2?@5pEYD=0w6yuMM^!Odp!Zk zd(YAV0>40uV@XB^MMFd2kXo6M@soq&Gd;cQpSrrzMPcCKU74jG&Q1Z*KL^LNu-n1Z z3e#VhKotUJgzEhxT{LX{1hg7L7M*LHlMYmXh@a&Q-XgCV!Dknq4q@Mvj4@fFOB8FexvK*?1*2KLK2% zw5)o9Z|^m2Z2|c~bc(L7nd3jt^%k}7>o6iOaalbam4kHeJ}+D}V`v$(vpY#E$c{bxtwdQY zE4y!cef2ub8;FDO$;s`nV~P9qtUVbuxf9(+UQO5kks&(-vxY^a%IM*Crem*;#@GgY7!r&#zhD`rGDThS4(Q5zUAhGXuqH+C2)n zlAN6Uh3b!t;roZ6@-qCprkLX5*;q{@w9sne(8fjsL~%)&Uw9P~&7@MJKwV`_2HdM|}KzY>Gn$5(+b_{&zLDD1MR!ttx3vDR%iG)6?aM`T2XFh>_9p z^WRO0KFWB+RDfvA9vX|&M5(OudCM%Qp@DwOiN8vZRXFG1`CX78njIhfdrR#S1z8ot zoRf)`3=ISn@|}Gfil>)^x5}(N)5Tfo;bkBFec9P~1$jZe#h8UMWie)AV>eD--Z7EJ z?74*u8hB){>c|8?%-((O_c8o07VH`crR>Qi3Xe<_Fjq&+&dlmStZff@P#d3jk`BexH}FJ${x z_J#&MDr;)ww97cMP(YndtyI=73df}_fuTdQkoaH7xAcI!O7HoefGY+2!T!k_Dz1{! zk#L;W)hjPw%c4@teHZ|M+${7D9xgrLC%u2)h$kh<5c)NDVkxGgmKGZa$^62@TQlnt z)otFAU-#r3AeIRN|DBDf2HElH3we1-=C?)hJPm`Ycpku3IYzJ0F*6e%TXWcy$llAz z7m&_?f?%2Q7s$@W4_u{Kiz9DxGG|#?$;{6yL?(k(7N#Jv;d@XHZwi{3@gSesPb6-( z-1+O^YUXb1Kx(qX~N z@O!=g^Lt)rzfZV~*u$H_u9;miU$25FFf2nO@*_191j}g2}N=hQY(|ZlrUalVed-UG_9{uk?VdmI7 zUpF7B&93-Mf2C8Y4v@=#R~D(wrrKLoiC{B$e|35zFEcrr&;u$1Q2;0wjL-ij1DA0| zhTzMG5vp|OeEj~URkDj_%?4$OHl#UachY(T(wY!G$>>)aTU&$PzmKhd@X6`i(eV9& ztP{^hg=yOA9o@Td%NPI7H#Z^@wU=Eozq+!jceDke%4HSc(PYBdWWbihg4j7a`jVV{ zJ1Y-`8%MHGgUc!WJR;&uLrsmTZr*0@8j$hhH~;U;bUwb;v%GC*m73<>Sy+W3E=}ek zuNawf$G{j3Mw?nb-d8?F>9-i=-G5$?Is7(d-y9n|TvmSIx6AsYQ|p2J4PD)1v*E_n z{rv`0O{i$ZG=CGdL{Sb+_L*@49pZ^h*Xuk=f&O6ww+s_``GJ9G1LLJ*gIsjbz{Ag6 znB(^R_;9Q}=VLqNhfNUcUyn<=+nIcPls!YNtuD%4F@fR5{|iJ_6(U@t^fF^f-(5RX z_RusM%7PuOCL@MS#^9d#R7^z&hdV7TuT^4%UT&ySxG%j@Rw)}JUY3BhAV+ zJS;0nMUCS*@D6|ry&(Qu=Hkmf%U<I&0fF~pwEdRCo048- zyxhXV$VXR!dtI}%`{CJ8fu7#;R-3uuNzSg(2YCT=>8HvnqRC-+)ETd$QrrR2)qeg< zJykmL3q6{It%qW@o~NNq8S%Fo)M6@j;?}Y^?oUUY=oA@051IVWBFD2U!cAx_BZMD z8KmDmf7d&`HLoMRc3U1V(4^a7SpT~bLkS$W{*o|Mf1qb)=e9b!q-Yd^nN(Zm*n-#* zaa_48f}u@OXf};%^*&w^5eA<4=xCHYq$-zv#G$AqF>6Pgx>Bm6K8yjl&*MM;7b-<8 zbMiCZila^Up#1zUD(aGu)Ga3xY%yI)`2iku(mxlf ze%`O9KynbK7sbi*W zj3f9wKH)M#vJhWie{NyzQnUm<$) zTiO<#zQHRL9AmV`f?(gIr7wM3qA+$EJ)e}0uk-!Gjqu--Chj2FXVP06tqPZ$UX9we z=x}p{EhD0R?-zgeyzFO--H}WMn1`4ChLuUVzGk z{>4_Fs`-!3K4t{wZyve%0XTk1yj;L7a#tyunr@Yqr0CyH&M{oHEe>7=YYb=-PaL+j zz&;|avS>Osi>TDuc?*NEwzG=K=Rg0P$ae*CSxvEdPM=ij|XpP(vX+YDl_5xjieJhY`Uo zM6(|ny7=g-ii-c0f!`kSxm16^P+&M!&yUmyz%!HSvYHef{eDv2*Ibzp)Qyyi0;V5} zq=>>V=?Hd|Yug2@e)T1XI!9La7=@fPycXl$0X}-H>a%kzxCE`$v$NDpX}HhtN&h-#p-d9a-+;lh9RMc4d$R1{;V`%K!x7@ z=bc0cD41BrXo3JD0Z&&Nl+i%Lf>~{EZ!bu!q#oc%VUI1yejHKDXL0RX<3ijA^Xz$gMs;zV6OzT*k&qxH{;O%fR?0#1|un$8P?W!rc@LT} zz_}F_MR0nwWo{Ek-7jNh-Gop0VW!fRm;0nIJAw`q69ZvkY0>z-X+~2C&B_IV%HCNR zT!7+F$B>a(!@HrT9&?MR4QplPL3k7%IXNg|*x(3$$j!|yscr)DJgK^twC{!Lm=C zbPRymzO166p0xSg?DMy#2w4M;Fj)m57||jU1N|tzjl?X$PzRjSS=rh5!43h_ob|JN zp^)=}|3WMyIhz`>^P3&}z8dMh9wJh1)xkncMU@yL9GsATyXP3&yDYC?hhoLMaO{Hy zfPf-^urT}`7_qa%r#O~!<`5C_H9Yct%UF#D9=!({ztcMoy4hJH;kN7bjU_4FQsPwQ zmC+T&#Zoa$oSeaL@;9C>7PGHU6nX*)9EmEkwWKV|4iL+U*eLT94H=bM?NVhp^7Srs zIWW3O#`n{CtF9F$TPpP~+zT|pWFXoudH!n+Vu?lN=*=TVy$c$HPFCzUj0M{}a(1@2 zlXux%1rfD%{9qVFHPU-wo{Oodj<|hKtzUMwZIjK4a>(Hqwn6$>!& z!Gka~J%h4UTjB>dV6mOz`;(1WTBUxk<;$HnMoa7}?yq(iLn{v5H6Vn54C=1qu3QF26F~cB2J91@*p zo<(|B9%{arLwQx`#k#;qz54i;2?s03o4dNY3WP>U-;x9nJ1M0D-Yv3D&`p_c{p2L{-?- zb*wm(q^0|V20DLczUvRrZ#Hxpm9cqEu-ovk+I1qo*#yFlWudv847 z>@Ri|ge>?X!RK2Al1uk0J9u7Ta8?w*aJBOPCwAWu7WoR+dt| z(f-D~&Zq1X<#kgI5~lj&`%tEq51e>xSDW>dTz@ZL;NjsBf$r2YWoU;%N*cIB@oA+S6%*2A(uol?A#jLDY zx)>Sf*T+Q{t`cT*{_~W4ZL$5w2xV=_6=B6 zR6e#zxW0dD`sl+f&Wff5&cyis!BJS`2X6L=)O>Z6VL2JkA_clWyxYX4-|XsL)KA?l zh1}f2Hulu`Vn2V|6A59VZI}9GU7#}|{f$uf zz=w|omZmuK`Znb7A0&E%iE6#T6L5^~^}N7AwsJDZ`CEK^e9CoB>|i$4K@QzV`%!;I zKk;(hTeO6!cYm4)j$*!A8$-qTd*L+X;GlS^aQ3lgD{4_7@;W+H+@F^uRxheKI3T$q zE^d$4T0SdsW~pkw+MM{pmkZd$&?s@-7g+)i?89ADiZ+8%`}L=jqh)a`WOFU(AL+vQ z`IcQr!Wwa4tdM83UKtk}dg_p}>Sm{Cy3o~ripf%sTKi7(5+n-dP)JhK2LGpnkw_jlhav<8n`Z&PgP z>hA$Ckt6+La9>gpjm5Cef+9Hsel)#5`zy$hUZ1RsgBeWX0XNL3=!%qIu@krD z<$tNh92<{RrZ=Hu&^0!s6{3kTkUHDDqx2^E+o!qeI|Kb)Lqux1CX;e<)~02VGMx8w zzd-BRV?*&v(!|x(y(usMU|Q3eD&Ag=HP=*ExAHE*+$~%|6U!&O9UYhld*25K?_Q?f zFOP~CiDOd96BZLQGQJH0ibB|UP;v3bqm1S8at0l};yF~CpHey5-#U6fb(xuQAw*km zWW32NF8-XrYrQgDL51L-m;k>Id14~E-DcqT)+H;|QU{+V4GkCBkVsfc!C#}!d2V{~M=LkCOGH#_nd_g|F6)GO*EtJ|Dzlp0F3oKB!pDgTeBlQ!T&i!e6VkBI8yrt($B&m5?TUq%iJ!3?Id%(K! zem`bJR`%NWc2327us+_Ai7DWDrG!50@R z7FEer%V9#{w5Ev4Y5L7^f~UH21UA98YCP&#;rGT0loXR~_eh2zPA&GI&>YF@ChQCvDH^LTJbcf>s|X8f=fLNFU6-Hm~j ztB?OUI};EQnsb-HUlv#raaslEY5Yh-T6+8WLqWeo?)Yp9`5E-vKh2A7NK zx4&V{&(Ckeq}U3&=R4zBsWwfQovVOUxcK+5X>0894mWFR;}RsB49TKhU?lufNT?S) zaJ!e!_ljHPWFIld`LF#B`h0P|GBeY3aC(@~|1w4~!-4Oi&zqyI%;P%?PhZP4ZO*;l z0tY@UcutBJT&A8y($87^IC1h4}l=K%YSR zer_Bp=oM|NNZkza^_^2X<+iuB(HXC_L^c|N))0}SK_wUKac+4rOOdzt^D4|t*#rqL zZxSp#Ql<{Wd_fhm2%BdAO-3`-E@5RxP|`zIAKZXQK{cyoWuzq2m3v?y9F|kY#>#a?jyBf={AcFMO^KElS~yQm_NS`C zPvSeArk}-gNI%|DXqepVume8I@d~2pSr;;YUAJ#2g(v`-k}N-c{b) zGbF(mu5N2yO}!7wt4;hDxkMhe^LRw@6g|1dsSG^bRH^8GOh>9H<3i|N?#zjTp z;ka=TEd*6n`mSyl0j1fG?;E34Mlv!6O@=q@bjsG&%}^x~ZsOb!k{he41ql%DOOQ)TBW_DM zw5zMDcY+h3nJR?u8?@XmWXk~{(RHlGL0wQCJH*QJlkYppahdVZ>_iG0e?fgLlpg=| zf`%^?0%Vs#Bvq!D7s3rB;N5NpgQf)3q*tI1fgeDy(@ztFq)J;+Os4ne_r&KVF(3oQ z;1h`k`eHa?U_@3J`=VDqrfee9O8RPFT&x_L*abcHmzbm}TQ@{YX~@SkaTmUd^{MW_ z@4hP))-;gB%7aBz>f;ktqeD?qEG${Ys#R-^DAmY7gd0 zlhIQD4I1ptZP#Zg)ESjeC>j&X&8GzFVJ{KsJwt*n`mKiUB}`2B$f7L25G$)!EW|PN zu2vUiv86b#-a%_HK8_LGIx)uRHnT6%hmqCi_+DVsu4eA#PZkw>4fl`CS4j4O@0+ zy2r0_b01XL*~D4pM2N)&wT~t#aFA#O2Yd7I;$`>W${*_QYVkSD&##RClC~N1m^n_x zX`RqqU-y#tgN9Z_+mihr^&%>^9VClNZVHd6SWcSddjhOA9UYF- zJ(L`gk$t~ge0nb~UM?RzG8!c{XJlB|^ulm_z#ZeCk|J^a27l)H*--ch{p%TuX4G zO4mB0=gh`=Y?hHmMR8l(vbq9o0_Qj92p-!%p8U=qc4j{w^`(g@@z`c_cb#3Fqx}5o z5ERM4zAm=awedJO{!E$7bw*#{be%QPWsCnU0Ta>zh_o3YYGY&Cpfvo(C+Ppta+LrB zgQvRsH?LBWkWk>OSAu9~Jzw#nejl28f^1dJ$au`deJi)Ws3P?}NeJ(6u%*kXsfz(x0Njn!LDp?vo~RLeEJ64M{&Wb)EewjjWjE#AKZbJ;Op? zF1=k#^Yp;ave$2mLX~BnG2!5VdEeWy5#i+MfOP2y98aT(O*w~*)t^TaiG*0?C27_j zA*Nv+un8Qc5p+sO->inqj!>c|Nd6c&NQsOfj(u)t6#sSxKGx8K2iqc|m}%$dC+Vhd zY|3>16eT0**x6s&zK>5h0h6Qf#)Ppx)(a$n7&(Td!=)Xya6) zz|cP!rtX~M7HIH2hAl>w=ige%!+0E+Wzf;c7v_6@wD@G+r>bIj@@ZmW!6x5yl=gz3 zQ5jKeJj4iFDZ7PjGaCISE-H_Z?QRCE$ z3!3^EaLmRT1ve|xXyEQ1S!yQvRJyx=@^zVor8X>1j1%2ymOMS`e|Ku9Xt!)Jexq}R zG&^_x)4Yz`BYK_CD1wf$ip)8~3mF4;Vo-zM0m<$NCY|&D?rR7>kW_qz%Ct|uh#(N& zPc)4lGi7AFx> z<69!{u;}rzI=_E&U}g*dE=+X}RG0|25xfhAy37_?ho{Hgi1fu#(_G9*#WimYa-ZI4 z34`Sb1a`4%j4s>n@kf`Er0!48_vHYVu_LY3kI~SGik&(0u+e{e`#dokcm$^PwO~@O zTT}*S%-d#)_#7GQOKW@cI#xF$ET&AOtmI5HPkCS|q4{l*1?&=oPiRUfMknR`5JhnH9{ zNg*W{C`*C6d+#)buDTi-A1!ej|9Dr%IV7AD!HaV**?GhM2dTgRgM(GwhEM#jRoZa3 zVn1O+$@pyXJSh$qvw~s!pT~X^Tkv-$=evt(w0yT!+F+MIv&nw%q z4{eQ!m@IYeKV|IsT2hqm9h8y5M^4&VN+qIfwN7~C?LE-(u4rVr;vL56W4ai;+uz^< zDcQmk$;b@KRFMY;7HyxQ?x(C-=9(Jazj6sYhD|t-z8T=1;J!kdRY1Y`h)M4L>%wk> zaAvLJ*MPhkCia7Ykr;rj!!jCO>&pg-l9CH6R3Z*3j@Af>vCnk9e!9KJ}zw zFzM=SkK0e}^bLH?%6_^u0sa{D68-M{imre&mtcC$u1`@K@1MMT6vOq291E*zaw_TD zx2Z=A6W^2;1=1C{ch!AwmfG4bl#gOOr}<5&F}u=6=R8sKlK>@`753xd zlP!1>g5Q`|`Pjn_LP0R;ySFc|OwBLs=)Anb`RA1Nd|MkoJ#(*@d;f_*!rPJb_ApO(`(mRXEy{*bQ*67q{C?AAZ~PzJl1zKMOEN0;I?&;>PBVkUhZ+|>RZh0{ZbXWpF5pQ=G0ESs9t=9XIL z=XEyuEIhictQ<0J*PNs~r1s^Wq2Psm{>c&Jz`+yS`tAD$Ctr|13t{wBEq6cFeFxwr zQLHQuOuTV`DUbyuu;!E~>dl}A!Of)=!;_~%R~54?=I@xJJ#(UIKhGX%AXhD?PpuvI z&@fMt&Ez%RqeElg^5e5Sk7~z4qe&hda#;k=sAHb4_^A^L3JMSr zmVp+ZDI;T?UyCjh)HisdMu|hi2I>j18M1OEQWbJyNt`c>S=dleOlTR--=K!Bt{Ra! zOrwAjfc!9+Yp(OR5I2c}`1tJ^(Fq6w<-UY++cQGh&lJ+4pw9To5_2twe`M;c^q#Jl zvG3Hd)~T&4@zLhQm;nN{&}4+^_>U8-Lm|oOVNlAO3Q2=9p4{J$#l$WssP*o^0wV2Y z9LcNQd4T7aw#oKNj4C(ck0`6y9?rMFhSIu?wSVpIkbWO+J7L8Qu3^4+HEMlep(zLW4 zluNHlj4cu5r)4j6wWPm~kmQaIxylWr*ee@3xjG)1G~K@sHn6eM^i6RGL8F9W0V90! z4DF)H_|N9}d$K{osYaItfXHEf%$HwVOzmgdn|f8IGp=MeO4Gq7_&CiIWFYaUhW}jh3a28?~HQc3XjZcH-}^?0ZvJMi?FLczJrYUA?JEIefMs z@W$GNz2jbjy~E;4<_Y7|B%=&MSG5}f*=SGvEpt zzenhorq~f)$V~RsCAzA&*PHyvCtO*ksrvKn{G9{-Gz@#{xS+bbcheOWWOO*>jLLSO z*>R{04yw6+7{k?HdViGy^qs#(Nf7)5n8uP|;R(5e1^@aT)L)RAM@1TqV7trf3~YuA z{xpfx$WMFtnwe-+$oD_KRx{yMBZcQiRP)teltzL|{X4t5ezdkOm6nmiR*)fX=mdxo zi3hNUN6Q^kbCXO*(((xtd?XD;^4Ic_V>TM! z6_McACPmveIuAm8d^txKFC6Z|!DVE~sH#HJV!=lbISi0B6olcCmzj2SqM(dBFXMs% z>7AVo+nkrz#^qVKY%UY+3=&Qk6^2qyPM(c^?N=Fm>%hzXgrDUzt+qBjzg^(T%F6zD zk(VG1jgx}|?#y~%W*QYt=pegXN~$Jv8sAb*nfyq-lX&yy@tRF3Goiu%L)ckHRlPsq zo)Q#6Ktz<1MnFW7Zb|75krwIh4k<+e5u{6+O?QKUz@eqPyBj3Ev;S+|H}}nb;W}#x z?z8uAe`DsEXFdZ(NMbIo+A-ZHCM2J3BGt^FamSPKjC>$U9f}|{Sp7p)tQo?{IIys{ zC--^8T>|x!#txxnLcy!RKq1B41A~f+C)auALHt!}8z_=!Xr7yUT$H{oEh6!JyoZ$o zpVSz8R?!fML_0(S1xX$%s;!-wl#96xw{Q#y0uPqe&l@akr4-M1OzIKjBmqx)!R76rn{HI zecJql>|^;KFJ4yas~gc485tF4<$r}TW(@30`=KEPm4XKX0{plCY*bTG`f^GiO!Osr zPUk-!`+LE5wys2R8#nXAj$708R6=G)*vseKvC!|L8I>ltQyNxSSRWPfx~A{jGk1BMqYmba`| z7W5z88J^brpM37M#iA`4zS^2dW-K9&y5G&(G{3ni4gIFuQ^Re)Q-RbZ+1hI7;%KC0 zM605r{I{m~n>r@?+1RWQ|NV#U-H#vhS35E??=?L^3H@?6QBhQp0UB3DFV7tIB%d5u zYd(_SnR->B!Z7Q;#e(F_z+=N2Wjedj?@NL^KQ3c~3XnhCw!2-|XrKiptT`ZRq8DgoAnI#A}U>vz+xyI7*pTUH#BPi6B8*`akwKTPIunPgPt))HE9# zyYdN>N_O&@z|ASLXwn?Zf9v0C+zQyVjg8L(rsY1(ynK8^4IVDgxY9Dy8vH6%jhRAT zYN77R(;DY?D!3_l3!+&vGd&|y^oDR+`ojZRv=U3RkUTU97h{lYGBDiEcYbnfKgS|ZFZ(H~oJV)qz>-x2hcQ0@lKaQBPcMU&h6DiV4x#}Hh)Y_ry2zSCbLrelDsB* zFO}?cuL;%aH@5v4o$bkpAU7WtM{PJ3RMkp~Dt+Y3)w|L0_=Sz#cpf~k zJ!uJajAQ@YGLkn?TX*DU`{FFV*He*?SS+K6)$57pw@`<*!=}7K>QXgkOpkv&RB|Bd z1YQ2!x3ro|s@3|&RaT1pe?3-pn4KKve0c%MI@c|X`K@I!5jP%-FJI2{odZEGKR;zg zxXO=~Ml_DB45;3tp>5t`RN{?w5oJ=iQvC1goMpr3tpa~*GUiSF+(fYfI;Q{6*__;a z_wL~|En=%%&B>RvwY;imrWpPj>*TX@UO(9%UeVUZ9~s~E=Y7Yx-f1(j92! zMRX{kk4_OH5+)j&bg9!bxPh64Su~4NbuWNM_4C-hfrQ=k8v=pg-MD}zny$`HDS`c8 z=%?%ny;+zzk5WZMMSrvzmSoSt5z(dpA}3ukd~yg$&$*QYguJbc%W))DZy02f{uTT- zH^UN5RUk$H2|^Vr$%xcc;U6(EDMOW(oCyG%Osd@~tc$9vt3!K`3RZ)t;NW0;XXnc7 zk%sz)1~u@OjQl-V8i7o<0A4TKl5>h@r>VYP+w10hZ4>wxYP?DEX6z&!rhh0;AP7YO z-{C1Vn)JU@Qu#v=iuKD(I9>PauFDVHb*Br4t zGUnL$LshZ&V6EM}0tw|1*Kz3Okj@)ng61qF5bgxZZ+VcI=RxHt2O1OeplXePbASoZ ze7ACd2bK#BhzKZ?FlI(cbC8TAJbt{nAR0Ve{acKKg-*PH@tNuScTT(MK+u%v+E>kk(%o7R;8J3X@N4z`RQL(DD zu1KkJaC0ksmt83HM$yLDD94|OXw6d~@osus+NV&pSuW!gwz+F8Jf=A!J0^OG+<<_s z;ndftn*hI4joBUlX+2T3_w{sTdpjSguVkwBdhC`v7cAy_VMn25YU^Mja7nMCCbTW} zkTf7{MLnz8e0W*XFJDF)y}0sXAF&d>vd^&BsyofN?C#ggG<>fl@xGRX-?f9NRXUrU)hL^6iPGWmounA~BrCkC7Ci>@n$!8GHpYA>D z&5U6(d_VH;@-!E+*;`MOZJeC7Olk=g%%y zY7sMS?I?fUA_@7}RCpXSON&uQlNFn>of$xV6n*WNCPN%pR21*5{dm)onNjI?J3_&t ziB-DCc%{y}!5sjS@Sx9%N6gjUs;PBZT)T5P|MSj_Qa&T&P8H$I*SI8P$QjoRt-U_= zL&#;z1ELZ!Q&*(?uClz`XA~$zQ2@ataXOoUg?b|M+V4VuOXKHQ-hMEJ@=C=S#@O~m zI<@ze66z*2ftJS{z`MGqrpo^GlC@%twO=p~^Vz#BloQxK9RF$P>K4Kd8wL|el(KXI zGn4Y;>f?#su0n!6E82Fc!qnZRbULvH4dgzJmQrB zI<0dT91WTGG-{U5Huo3ao_ZqXaX4ob{ZE9T3XnNqI^39_kJ&BVYdj+GT%Le54>qT{ zv3=?0cuD%^Kzi_GLPFlh(Ly!vF`evkt%BN%B$p^oPIad*UsR@~rRCi%jUfa2*GkxV zJ*nr?Y%VMjN+J$GMkFj`f4&(Yw;ZulD(M6Juo)6hD#xIP23VzTD2#;_6|ugnv@`&K zCk`p+SFl)39A#!^+R0g64kATU&+|j~DQLis^osfh%LbUIJy+*P(B+FnI6$LhG@J`Z zaDxmpUqTNj(i#aUGDE2I0az5NwjrB4L9^_;a1|R$0KfvYQDz|!*xgb=rY8pn04ltP zL&p8>wLj(nT+Nrl=nTA`e_tShXRrxr=;#80R+J?h*FvPB{ThzLt&_V5(C(q-8LShR z{ZUa5fPJW0>&U2{_ailx@_F4h+hty1fz5kZI+}IPEXZ0v0AR==8$I5h_#c@O2wlyy zp8H|||3m&2bk!qmL7;R4gk}|KID@WkTJK{5@Z5cE++-@O+2xf8do+;h^P|@|E5nFJ zN@{L@C8p_cC5AC0TVp6K@GewM)=sDPXs8!?$;dWSs=#Xb3QkRM`~uY$$@7y<;0y&I zBpbj1gYcfn19GC8>FI4^Vq(aRJ15J6T?h%KpgOxZTL6zjSba1_p-_PTS6iE}{F=C1 z&1c9sQ&&A81m{FjA^N3Cg8O0! zIM^OXRF{q9EUlK5SO*&lVOFG-Wnd0A17Xuw?{iO*#f{9*LWTlkl_Rb?Hce&jYzOWmwX z)i?CyqL{tB8{Wu83Dxwc%*?IV)klc{GX%t;vdwcmlQn&6nv~W5A~sCf!vcb4^=#e= z3Ti||bCM?X{$3FjlohfM=mk~;2r{0)fGQWXj@?GWSOG1DWCOf%c`hU_d9*cUqoS?d z1Kc9bYFjG283#P{Xx6o(Y3-K8q>bg(_NqDWZa9;{0;XqL;lv!6nVCbZN84_`(g5gM zS{ncHMZ>IsKmyIF<+7qGuGWXom#avv^>?BeW5@ELHM-po-9_oGn{TC+;JoycC&|3_ z7yNF$DXCUd@q*y9J4q6y5-0B;mA_Df5kVye8j&&1a4jF~2XQ zBLC*@Gp{;LDVT4{YHMpP?fKA$+(T`9DSxeHpaMc~$OOQOnKcUk zM=ylFdfYDR8+u}unMBS2J$^X+eXEce}EnshBk79aX&>Kg-3oF35RsHRt4gmpNGe6ctoR0ImO#O zeX{P%_%gEWWXns<{!mp-?T1^}H9*jE0IsNVvN0sd!^_Kg5$6PyG$kc#gpJbsYrA|kyoh>b`JPen_@OL_yVW>PK5o}SqMM5hsEeDs- z6sZ9{@Jc`wRtDPVN!M_#lg0lcW}wC9!JEznvgU^hEj7phy8io55m>3pa3`pPvhY)@ zSlhAq|e4v$8nGH6H4fVze&l~C|*1RV0VzFLHq9nFC2)3o`j0*-$t;~;BcbS;FR0JS&?DDvFKk+o*e7uu~TsaYv z6uB{{%aPLJ;*6Oi@v7a?MF=S7jYPAsj9i@P0^>rMh?tmJ*bA5vUHctR%m)V2nVuIW zn9okXJ9#Xmd&_#0+XuCY6;f`e8-&U!0g5iF~~F8Tw9_BYD<__|Iil$=CdH-1MZ$e@nh5 z1t=-Ea$q?R%lIsXaz&-5r^AZPi?J?ytEN7Rg(!Ye*<9Jx<(xaNscNub(`8=9_z#zK z7@pFP#K`nNro(^5zihh|RWD+5m?{cYSWQJj`jS)QkfH+D_$H*)iqec~Z{OwuEK#+G zNMpQcNYZ6j73f@>APQ(mGoEPLmr`@8>oi)#c-7U=M`|TUrptzUnplq)zlH^afUqdH zHP{4EVL=W{G{U?)demI+Z3*^VxfH}|^`k`^%ogF7uwP4jKgw~Hm7qGs@=g^Tuo?jH zGk3Nx9c*t${K6$wJ*(#aP%$wKOnU^RpF496fq{Wc0s;-Q2M-=Rcp3e-~J4F%@|~Vp38J|Jriw z_woHmNl9TN2|c~S!jo4eEyk)$$n=$BEkq&IGmKd*rHb z-h3J~W#1gp&{LCUgfvC~)&VV?laByf&y`&^(db())54$rGaK1rZIu}f^xuCdK4SWE$T{`k? zgL4QGqibD~Ggn%wIu`qgh>T2oPmr04>UAgpoV|61cNkv2Jjl3=xU6W}1t}AZ1FAry z*o1FIi7q<_^O7~H^fXfHiS#d*2`pO_G+A;3+04__;*=87tEXgZTJ!q5`xYo zc%#GNb%8%pmR)stCdtF|69==ve4s@kl)@`etJYDKV<<7@yZZiK`nt1QC(dVX(DFLd zae+KN=ixK03^?bWun&Gsdx^}dm)b+~Vw~r^q@8vUQSvd_yw2J`qm&T{rgv@ays%8G zen6tBRiF+GzjuhXXqL^AWupo+l>qEJ2Z~-#T!qDLW<9&StS1Jw=;-9s!LzRlupp+U zS}oYbprH$-v!(Rl30+0K-P4zMUq)K-OTYArtg>i8b{W3AR{ z^I+Ks+~b=;Q`RcdBkW_2&d$n&47|DUrKt8NbBvV&X9^^z5}Umt1)%~dpA$9Y8FG1Z zdD15p*p?P;Y?60Xb~_$Ocsamjyf{{obo#q5tEN?})7G9AMt&)#_O4)4U+Qxg|87IV z3xkj5v-LH#&~K*-j5#=&;|kPI93~Ny3A;7Yx$%3CLM75?eg1h})_(rHG%{LXwYlR1 zlzMDMz?7)4Xq77hyCfS?sWp-lfH7j&{jefCUlw!*5NO;hr*j%6&7;i4M~M{QbI_mlj5A$m=5X*g)`8!g0U4z!|rBTSRazZy{eHn zUJ+(4o*zzfD*Wp5tZTZ)VaJW-VY*h+>^nLJg#wpn$nXP70*LZ*RdT_rD7clOlnb&` zC@B{LK=~?x0RcJX%oZi3n$Hl>n_~`$Fu2mR0J!$!n{5NUmW-MznKCpcA@lG5 zA}+?3?y}aWI`SsLqXrcx{$dHQ0m2R{>FlATW2zp|gHr+F;@!1n06^-lfdM6QZrjv> z6(Bt!K+Q%89Lw$Q7 zXL~zS^I4m3g){wpJn?wg<#oSfLhmPk(dXz`tAlkeO!O*L#;;5(up4xPb)Wd^J_$`v z$C7@VTPP@Dcc4{TI*Vz^&Fj9fG4Vv9AJx@e_hX$0SNUXnaJQPYlG+bxsZRHhFJsEu zWHQ3SOk8vM&AkQ8v$I@_yA=Mqb2uhzwq)cp1Lh~=mLr-4nW`FV`t06J&M|=b9l1-TXjgMyCGIOY{Q7QJ@1Uyp2Xl3L`~F+b+Z2 z>+d&la^~C$3Hh5^pXeqTo|b9+`{zw^@~MU%IABdkT-^sx9FNvb zD?phEhW!Fs^mv5@F{Ben6SjiIS|7IxNhqe?T;6}(mm$WztB1vUL{s5(rIa(WTeA%l zwcY|KP5*9a=p*Br{XGV=`}cZc4;}(jeC6hnZwl)cN;S+xd0TEu&`u|#`S+;5{@>_3 zq(lK0xm06}UnQ?YT?&Qb;G*n))h`QUCj?-I%7$r0pQ{HR9jp^iFPr%wndekIJaN_4 z+@OZ{f~4MooG=7P`M-ac!eLAR9TQW?x9?TC_B*m-zX^rq%HC!^U)&+n^N0wGgF}0WDrb=-I(UjhFgd9` zyeTkKhKhR88$82Sg^jhiG1{g0HhWZ2F?&X1g|C1A#yEnE(tfL;>U(r_JIISYNY$aq z`x+%D=M62(4i7bT)?ogG_Sg0Yi^{8PyQ44zDKD**KY?%ec3Dhz>O9}GXzGt2H@WWU zfd&9o&<{Y@MxxbR&UEYdTb@$WI63Z+bRn}@NC4n%V?E~B)cxq+aRfnC#P{#Y4$~F> zGUe6{u~6G5ARZ&gl!Z;BV6^5=*gW(MDt87hx7!vtRUNeH8S${5l-*H33)q+ORm68IKMjT zAJ|CLEMJ$B2sMY9Dbi*eB5bc*21}OCvdQnrUU3jVLjk%#1I~N;n}{%qq~OjdKZzxM zA3t=69;@g6d74$YCnlo(*`hf(-VXmFW1+1*ssEz9y|WVvBPE($Pno}@gN8;&xAjKO z>tR9xTEVCyshHT}@&lCd;{QPnL_U0=9vR^f<*%!CZW%k%s&Bwr*l_EVvZ+NSBg28l z**_a-b`D#rHgU?FvRRJs_+x{*nf zg7fL6s)otp{JdZDAG88>1+xufHa1<<{OaoROuZ()JNFvbqQ*c`uR0S&Qpp%uSxvws zgdBzi=Tkf=kcmi5(ti03m{pO4!F?E0WXY+bswyTejScfew&$8sKv1yCBY*A=h*I*rqZ%~v#cra2iRwG0duYDNU_Only^(Q#9E zSbigy@_^_81{i$%Y@gFYAqNn1o0%iG08;aOf9>lS1w}(=|G(KrT_q&~Z+ZC#8Zki0 zGGyoVgINna!kh46s95)8&@6&&dkc1oz8!F3P$DN7g&P~0jpC@OwL#P^1?jG9=bDTx zop`zFDX*FuqvLLUm_OFt5(+*{FzXIv;}s<>tVaO3(Fi*7zCL>SaU@Rzouidd&qb&8 zr-uT=GfV7=37PTA0v0tj$=ExxF66?^|9RAGEQRyfFW-hr71`_8Btf{BWRj9xg*B^o z&U>d|C9@;rcjbzR7<&20JnR}YHe(oN(F47sP`Nq+i)WHk9309!^WZr<-o-sQXf&ITESZg8ARp-hL5W<@(dr%uk>G^&2gilT6Qb z^8py1Z@e4-CLC$6)9uny^@$7&@X$H%0^36^IQtccbm8N_G!q3scQ zg=_~}n4s&CS0`mSGKD+bRnFIS{3yY-liBzqfxGOV`bi_) zsm%lFtcx3NcM1znBr!rmLJX0FM~F;7h5Pd750&NQat|&Scd3KPla|&e*=o#K-WQ!o z49Tvyz^w-j=^*A))6=&D36X)d)>%hEeUuNr`j)wWL6~C&;o*1?D87g9ZWe=p~x zw5Q3EUDFoM!X~uezz42xct2pyaRqq^Ja>W96(C?B!J|+{MD{;WxMi?{NT;AQu%}r- z=Ll$4-BR)4!C8$yDpJC& zhDaX?316QmTMJ|g!pA+{bV`*vU0Poct2+DO>Y;}@>^T0Mb?)sYyFeHMp(-u2^v9IU z*4FlQQ8@k6b$NdNi^YuzVg`nkq3wkpQ+BQ6i#Nv?DndFoFuzV08PKph-j9nb`W{j8 zsi%i)|Bxggy#EDGkg;X~QB<@w71!C_{842Hbr^Urc;Hx(Z}KPn`xl3bp5Bi}GMSp= z#EgyY;r;vePyGCWds3oC4Ux0Bdv}2|r;d=F!Ia<&6 z?L@CH9hF+#OUE36Xz3eE4vxZv@AGRLYC@j^t2DfY}bB za8@9}jIhkB`m?{s#F*^4*6oCH<>cpQIMq2Hle`N0wpEzkqc5B{kUC^)>tIU>J(M;*7KZ^%&{8^NK%wm9*qyS5d@TmYzSQx$%eBL^{YBlJUmo~CpSDCmyy{^&pv#sz?rVC&Ck{K7+`!n5MTf5 z>drmy&k;1x<2L!bFRA}v-oF8Ea1$^VL%)J`0|~M3w{IQ*sn~-- zcq7OnAK+k-hK0))BW7l0j4U%StZs`0XB9Pdfcp<(#3Qy>9Qqvvprx0QbxsQTGl3!n z{sRnnwdY9UIrwM69(pwIzg0YuOii~!U|pNb(J$cgNi;a@TU?wJkm-e>-NS!p-4Rtx zT)?aA%oZrEP%_~<_4~xnue~wjfz=V_^+xdzIrN_S^#2q+)2|?bFhneYlkfauYit|! zje!9blm?L@o#psGZegK~{r>#Z>FHWRf&*>}3NJgC@aBO5@<_%6*h~L)zk97aOT2!f z1Un@JwmqB_uk!@1_fHo09H1Wn%n6ITyYgTRM1pMy2na&MK33STblp`Ip^8WLt^`Xe z@blAB7TpmuzWEhwDR(a^^;y(mpwmu;s9lOCgw=!k@(oF?iVa+qgCaN=kt%$&e5_S^B25f2(|Z5 z8n7eHs2)GQ3;#H-r)TUoo=-q=F{23?dre*46ax*QuVH4wcNh!v_HEpC7869kpFVw( zSi8#|F<@$RvN?h@KYDVyJeNOOL)IB7T-T|PRD&)jbVuHDdX{w2t4D)bxcxhG3NgJ2 zHH@?Yfic=5wpPEEmX-q^7P}?zc#a1{hQOm=F*vQyz0}FOWzH6+p^kZTLnMq!Ka^6T z#(|oSn|@I2)etQ&PaAiXo0LMBPd+-%MZsritzMw6Mrl6qSSfG@82nCX_vjV$~DEhqlw@G zk_An%4J7;lQ?lE4YQhWNWe*k2;Ud8zFqJ6`6fj>99Em$7ecfS&BWt#;!CZrBZK5aE z^O4`bn?k3yKdG|MzX4-%r5;?MtyjLeY6evtPr*71HH|I7o13YFU<&TqYx9*E^G3 zZa${y^}6VSzbYg1Iy@(54eF*YgGZSoyn}@^Yh;2~7%MAvW^*^)k*h9tb_OuUga6;v z5v-U&a4GV-P@r7-jhJUBJYkWEE?}^>e5r#lf^C$SLdL z`uuy&vhd5#4nm?|mVI`d&Xml|Sf74b*#2TqurSis3+xm-<+GQGW*i_SJ?m>vaMn&R4VYtzmyP4JGQ>Rv3NunOLg$185OHH*OYJ3-c zT$}BrHmWdwEKbYzIA`-;2X%#~o7}mfu8&Tr>>KVC!Mwb?7JTF0Ib>vH!@!%#O_l|l zc=krUH`_sGCL*m?3%1jQqz4mXXZ&V-Fmn^Erpk5BGg+0^R(duK4IXEc{ac@D{H_Tf z5MBRF_AFg(fGg_6O!Bn5`(5tM-_}%4n=oZYMg@?_h@k*wZGL&$2pY3G5s{Sn46g$8 zh~s}Kxth2$0V43nk0Xd2(+1Gp5&4$kT2*~II};9a4d?Vo>jVcF6fWn^&x3!ce*qcu zCsU;5!TfP)m?oz4&GlN@n8}LX1x02&=bjw{|#J%3d-C(ch4$Yg@J$5WMx4a zQcVNNdq~hWxXn0A%AS#5k?^))|H(CmA!iZ$?BJ?@T-;$7Hgj0|5qOv z@r5m;o1=4=>L!q#goQV0ZsXOB_`Q4XSJy?YbN{xdx>LgB*A%`yyn6@E=kjtMGM>W+ z8yl;|w3+2+mx=bRBYl|8MPp6coA=>^EPrSHt5BF{o0gxC z^QFzu$6QAp^P$Wg#Wzx&DM?~ZwUq26CbC)S;OcgOv=KN3cBMqh8g2$W{v`Me3^vCX zt5GpFF!nk%3n*rBvXOTZlui57KDWD^pKHTRnKLtXz-WUcdQVu(%As&eGAv_^ysE@Z z1;4qOaI;_vF_I#OqL3JPp7N`E>wdBqXACO(O)A9p*#&M#M|DBFGfPqmDUcas#rEF9 zz?_xsj<$i&EGRvlft=SPe7&%MgbIc-=%M@u?I~P(It%7#!jntjK!;&3sF{y0?ef^UMd*9{0xc9CSSK<3CZo z_BMuc)gsK+G_@rQ3$uooB7o2d|B)*ni&nk)zaC!hb+b{+KE{6k$VgVm?Rj?21tLIP zH8gaNNW8ioj3@LtJ)O8ft3lLuf5yk7?5D3U3i7xB+fgjHouU5=EyuJqcC!Xtp+MFz%e#^N80{%^gE*y+;pW&)Puxp)+L${ z>7aO`Wsvzdhm0VaSJ3=li>S$FVUUH@(@U*WY33A+pj&H`<5QwEH>a0OsNW|g?FPHm zV`H`jh=1dU<(!dx3DH_Iz490Y!$ckqN@m~~b8 z#ccC$BWe9#z7wtu^6VsDU#?v!c|f{G{+aS@;&Y&D4;T8v;}DjWWz5&yESWQcxXE-( zK;c;kAViKddQmY@;sQ1?zl+^%Di5JSZ?uVy6E(8>_?jlF!`?j3n6uoPpWm%kAhcMO0etiG-olY+n zjFHfaNz{D@lc9b^G5(2WSt6u_cP1t#mikmt?N&uaW+Ng_s>FoHJ#_L1q}>D zK_z1t)TeyJT+yeWN*0qcCzz*?3Tbmz#`3$gbOvQm$CPKBn5rr(15p_5xw)N(=p`_3 zl^~RE(fTGibuvuE#jQKmUa)o5Sy;R@V;3V0HCns=)Qm4>yC3^-iF-3A{gh|ahuMm} z-`Ihv!Ce44hZUA=m*2nmJ=|{@9QWiSe=pf{ z77JOGh*FVnUa_xmwSmwcMyEvV@1v`%NJ8kelJZsFgr3?EQ^0*1vKQ7r+5IxK40EEg zJvB}SZ^`-8wj6J0X|=m!U)HTpo#`MDqowXA_?$L^{s=^<4n<0{eoo3kxjVmW-f_8x z)R>zK#(XL9P=k4a=p4~ z=^s*}qB)ueuW7$gIJ$GRdkuWhOvVdTVP0`}ipIFlG)q%C0 zUpcYQg!|JjB&|3vD~OPqIyKd|nnGanl6JeWfNr864_8t75H}(soBa-hGJ$MXfPCNx z|KE9k%elrRC@uR!iTS13V21J57nAYgCMg+@$m8Sw$6U{2ysxd-z|6D;%64k47B(JC z+9v$FhK*13qt(xvkP51u5@R=~uF017RaLV}t>lc%$$14MNPl?2 z4DvU~X+i)-eF+)7Ns^QLmk+X8nu|+CwDhZ%vh5W=UjzmqO$Q$$F; z^|rzNX`VVR72l4Zf{o#?H&34#rV|USfv@VJ|K!Yf8_fV`w`A}^u^1> zF=LuX%xV?)pQ~H2IX%UYNgudR;f-^1!viDPeh!$vVUN=yq6{VFybviwX}ns(gGu8D zR8*#ihqo$#@ zp>nGe#e*cwFWDdJVe-%P+H!eA!~DEHJBggv?Pvwxs2Jiu>5>YLyabfiTwfofW46`+ z5-=3wmQ+q;Md^3j{OufE@uIQ!V;j87sHmSlaewyp4bRQ>151PZ-}8tmY1VJl*^#I> zH0}nbb*#L7iet`(mZ~NBVTCzg>*TJVU+Wv$dcXB`hj)(#{j8N9 ztA}EWo4w(z(h}zulc;1Bh9+1j_McGTr}#G;Gt*O1h5h&DPIEjT@o3?#Pli*K17u$1 z%MX#=^5(^cC6f5~_`YphbgAeR_`du5kHy3up%8-^l&;f=8v5yM+n z6g_Hh@6S8ll`lim^!qh&INT+|u0s6K*OV0Zd8exMWSvjFXz6C8wWn9s9q&`DnCRWb z=^&bKx6T%{>?U@08dfUSe|8O-`XzuW$p`n+c+>jspuAlfFZ}U)SKK)S)hI+-d&w#7V&};JkN+CcpMX2yc zh%!~`(j5j1AQuu2~n08B^r3{g27x^bOkCd~>m(_}JLezPsH|;n1*O!mlDC816rIqgGQ( z_^;OsJYoq@1t7S8zmP%jMtb@~3R)}Gu=s}${wN25dgOK(o@Yt7zf`7Kx3;us1?azK zDJdz@?C&z1D)v;5K6hBIju=*Vg8^a~^pVFbjLcIqs;VpxVVd?#JKrWM>Khq8a?Llo zv(Dd@mH%_$w)JRurd^GnH_`>8Qr{qQ^zAht5ry(2XD77|HwkS-GL$*CVN)wBKfQe$ zC6)WqcsrJHvMg$*F0?2979KwegOE zivQ*X?e5IoQ2X+Pc?+i7a<|?<3qYGF+ox%w*Jed*I30T*^o_|=-l}f=Clo*pbumm~ z3uMer=__s#L1U0#zEhPjh`>2kJ30@j`>gk%IrTM6*~KTi^)HV}{~K382^AGe#@L&?zZyaIMNM6YW^xclQx5eBa(uo3!2k=4pg$yitGOJ@jv?S zrJV7GGH#=w|0`Cz=5NwuO%;26U|e795*;rbUmv*}&APX*0X>RJ=aP2+Vf+V;aa%L% zNzF?KFr$6)0`v9%K64TcAL;k+zs58Q8V0L&3{Flg5oS#|MFWpSMOrE_ZsBHUcj7JX zK=d)L>87hWep;(_w;4#8hPk~#eR)5ftzPtm8f4E(9HYF|pW^gNLW!WKQsbbIb)pCi zi(0UPabEc1;x!p@y|L^hoa!2pNl9`bp}$$5AUr(!TtGbaw57Utjz7W;6B1A2``RI!IjK&5n#U9~yn%iSfLydRAiekxdbEdywE z=}dn%_$_BhwAXkT(Nk;N0|yv{c?kfOOinImXUE8ki-V10q@zXz9o<-)IMqg3LU`z_3WqqFJzehOb-KZQLi zizDuI_Y>A{QA1br7SWWy*OMTbKk(`2a~J*A)1AM;fd=>R z7ZXqNAcdzr#X|jKt`CvC$qQ1>Urj$vbeGtAm~KtHu_pXdQ1{PQ`#*>7)S?`? z376j!2paWRtDqd|$7|7kU}cByK3YWlemKULf^@181z;xGwY4M(y%JVd_*w;#`%%e8 zBct|Wn)s|ALJZ#hAa7?rKmWRsZ7^176odz+q}GtMyXbY~3P~s%`B=16RH1=)*OXMs z0wZZU4uE!m0=6;2f57BUMS1m2pY10q>_&0naEc606;pq?mG#65OO@|Anp+wv{|p@J zCbV^WaX)`fw5vNt=Q-2<`}e);iJI%R(+iS?%RPfl)!Rp7109xG)YT*f4>_%`*6w~w z;>%n3{l2Id2rB>o8yZ>^vcTeK6J2q=}bvS zcWb6lXWl*nz`;=DERmq8nV5L``SY>a!*otz=I5!;JZzY_NUG26-Mlh;gV4U-y+==r zLgD^KS7_^*>#@?t)cr?K7c}R*nfGw-;TTz15jA*xA-GV0EHJfNlc`w8qu_xU%0bZi zIS0RnSqg`Rm?!@Y_q0>Yr@>c!-0V|@1%YqN#K?re%IDn0_`>{w=RT^oUE9yc!Cecj zRD_gJq2*UifZ3=@O1bGRx_*3=;Yzh{drqdmP?-scKh`0n|_%Tr8L=)?1!nZ+uq3IZ1o-055gY+rHj zwJYScar_rLLh@OHFsRM2L;sWSB=xogje98OdxnA;m~d%s@e|swIyBq-6NN9EgZyNivGfRK{p9w4W1rNW72H7@&$~iBtpzM(4kcDPres zUjmxn9nh8ZWS^X%u`F$TD38bm(fvn#@5f8MeLFV#;l%YttrG^8%hf9hn7%$JjO}mbc}AXS z`|(k-pw^LZ{ogdz_KPjnJI!bZzM2L1xPJzy85n5qSTHFEr^t8wo!381r#1Ow{H4|0 z$&gLVe@6bt!1%^K^WzkZ>a|sC49fa^96EYTE+`WPN)(wuTPVu$v1<+sb1MO{r2DJa z1be;o-H{;&m$VY0A7>j@PMTOA!U|5eGHx(qzrWUG85c0olRQH?iHU`&yx<(k-_qP3 zN=?oCCGFSC4%p3#F-ty*`>wxb99DiDPLGMKL@I#dz~pg#bJ;v~)7-w`TjMZ=N5hZ* zH01U-6$Pu?LzgES>mqgo^6x7`@Eg~u*N3t%@Z5s*2*-*2fLfp`U5(lM;>4!*CZ3_jfgr+U6`XJkSnF)>4XLjR}2f=pe?4@Sa@ ziuk}ygEr6a8diZ@%sepN_~Q5Phf{G69=7uqk*3$gmzJ7NTi*(0Kr^?<9p1XLEAq<| zE(nxcx9SuhuqL;Nd@1a%U3rS>x~_ z!_2BRcKK%Kj0g`+-tKgxlQiFjWjjM7xl!Z5;OpP)x6ByD-+YM{s;x@qiHY%~zGy05L>bu4jT0j0{{DT$ zv%Nh-4~s)V5i@38P^yG8=3!-lot!oFF*4+1YUKA8pKAMgejPzh%(fNdR&s&zeqjpV zQJK%5$=V*OG8%vFz}P|mgZ&YZEt{VYKy6?}CO8!Lj6f|g(8JV(&AuJxfj_RN0~C7+QEIS-cDd-_Ji6>p?vWF$RyV@&G?y*0~|<{F@_3B>Kk zt-+N3Unm2TE=SU7Pxna$2F}PUjMOpU5NhEny0OX7oC2M+@m}}uMG$uFo&K0~U4nNW zG8Sjz#t~n9e@>$=2Fnv5rXSEU)N_4=-rv6xX?^w@OYt}SAq2gLzv>sdE<5gf$;#`A z)l=)hPfQ%;w%@J!J>cxw>#)A6HTEENEY<;^k7sV za7;3khhC)l>70Ym_a;}oFzXB@7^#p~=XzQgc3cFug%<5KvavyOwEv?nAyp>7{49nX zva;ECKg~Nqeb;ziNjA>Zj}6gY`9Hq2u@U%_@`jlwhL->wn$?GL#+;<{{=eC#62ZtZ zxsscwH>WuK7qe!L3r@!TipZMBOU_TTMq+nzt@|F!@*xaH%R|8(G2}QnZhwW&9)eK2 z;7`s+?&}O=Wupq83BI$VF`25%uuFCE2 z`#0JsC@cF=i$2t4DM2ot_II4BOt@K~tOCwOQI~-c*mzg=C>+_mI4?+cg694jm zcm(p=0G?i1g3&5ST^$2y0pjH5Fh`SYdf}bEi-KnNJiUPN2`_wVco3J?cw1$*R(ZO~ z&32xKN?2F_uCmhkm^J!8CLUA+^+`utGQ%;QAuAxZkGiV%2kTN|jW6NSYb zEi|$$Lhv$;3Pq_N?`?^i7+;N`DMns@dGP5R*Y4RQDF0FskbQN^(xR){{vT}? z*dIQ|Jzl0hgn|z;Y9pET=h|cpIgd-g^`bKdv8e9TzG5n9Lz%j6Y1MlbISUZ>_VCcs z1f}wk{Bl_6Bkbk=;sPKOB>dzM6g11uko0r0i?t37-G|XZ^+s0#-bHoNwnJ&E- z4`(a#2l1BUL-(Zdj}1NOBvOTp(&~*l5PT}}hZA^@1t(d*hXY{N>i~@kW_ET9FvKYW zqK2~8?vaX%$Pj1k*3kQg@YE_SB6*$vRpwKhcZ_^M3n+A1u55{xj+kKXZ)&xiqGH_i ze=OL)NVgjB^KK+M*3gK{WOce~XmHMDVoRzNB&)KQ#+Gcg`VhVx6P=8z=jz(oGRHRP z)q*Y{9q*>D(LZb&A1UoriJY-!+Hg2ay-JA@8nwJ>|am?ISW$_4q zu=E(&JSP3pa=G0toC0PQ#ArkuQ1vTqTQqkkOId$68~yQ4)Qo~N~1CwIzl^daYEU$RNW5m(~Qwa2DdFGYy{3` zDr%lo0|xS0zi(cS6-17MDg8pGvs_GKVuTYR6coj%Zg0oQREGY_aTN7QQ3shkURd)w zLrzbxm)GQlPNj%@OP~V{Ka1qNrN2DL20ZDYjT65i5%U>f=J9DY4f*<+@7FIG%ez#L zaw0i&YM zN8iPbnwMR+2b?@tL&iBb#$=leue|zku(|m7s$#WDxz2M?CIx~kD78M09&w@iz@u@)292ym@;~J=j+TSwBR==Efi}(dv^h^@Ktr@ykI{b zIj^$J;ASn{I*}3M1btG|wy$s`jER)}4f*G1 zxBs@#(9!6$*Z&CWiZ%QDWhdM5CmzhSdVY=sjlaaiPco1rD(_1c)4jCgaQ&8)Y=2A~ z9d~kIEI{yBIOP}4{uWPLrp*>vRCJSga`O4%jx~z(nlLbMmwmda1~g7HtZ{N@GsY#%)PJ3LRNnQFXNCVdm8 zdlOxMA3zBWznsnZ!G!kRZk&3~AAb+B_d2#Jsq>Oz?17^1|VFJ8E!N z8QL%saapfp7p^q=~6!_9=N#o-m zrSr#_z=Sh2AOMVODS(E*2Rc*_#12NoF@dCw-J@JG&F0ILytLtnyqWv0i?<%A$f`+2 zOw#fSAHfg8vp6x924|aEPn24cNhh8~JB;-1qMq7&$xMG0s8*6;3!cRQo^->5vvfM2 zqMF#Fx>^c{Mp6)*^r?4~&&Y!SGqYL}XPbHw2rNsQRKG3zt$V8s9Ik3xLuyII#ed5t z_LG-gM<}G3ImHFnk67-IloduN315Eq77ZVK3aQ*>j{&P3H;5<&RBMbw`v;NApq8O; z<@q}3Rw#_Fbq~lydpU1r8Jfha7oz=(Y~2r3O;gy^$lL;^Ys>9r$;QqEY(V0-baJ5_rdb+yemx@2I&i(rI-q6LU->vJ5Rs5t2dWOlDZ}}I#$os>uXD@qwv41K3=H4E z+?u2yO>$>4crK)U^wWnaSUI(j);MY-mB4PA@R^z#^y`|MwY$qr+M8y~@Nj2E`S`|1 z1LQ+Zp*|^@0kkX!0uENiKL`YDU#^!!8NXwFm`&qeBNn=|PVYV+Ch}>)(XH}t85#%m z&vkTSV}71F!_~t%{bT{^<1q%Zdn&mA#sgmh9{Db1xM4{zss8LPoomT_G~r3g?&l=4 zMH5#N^j=x16E7$3YdXd{WLX;($BGA|l9K)RY>>9>hPNIG)+meHwK1^@Yu{`F-j5Ti zR-RJ-{-^B^+%3!oO0P%^1VWB4y@fyo3ha%~f8X#~dG%d}ZSCgW85pyFrxX+*g++;k zS5+Amm1d3XP5qc*jUqb5g@q*)(AidLBIk4!dWE`DTEz6dum7ERZBUsW?Vi;$A!iEl z@#*@emug&6D?6t{OVS>5VV_V*Ex|zS&^8l&R9n@&0ieWiy z^Ud_}nDvB(Ze)8Dc&A`C8Hk_&72Gv@y+Jjz3#r9eHdE^s*7jN7WJ#` z$7{t(bS%uMi8*hk*>tOj>(yS$pKk%fqn&$%@XQiwurlea;>vqoqTtEw$@<)6gY2>R z)1({ab?p5HZa?iDWlwA>*L>Yl!ujOy&&raqGEs3fFd-r66xGu=`<%B1p=h5y4=;n@ z__egEZMwo3EGnSNaw|1KaINgjs|RUm{4GbRMjVEb2X;bBy7|Ql{`qlOr{2C7@m~|8 zN$8gnrxRA+89E$ZtKM#p+rxD&FX!yJ53HG6SYVGy&-{*1&;uCwTul}hnE(EBZdya( z_N=o{bW93CrGI5S&Bk4nm$y;R%t^TZijL88lIb-miHJ5wP;xTP)%C;&$A?qX8<7(j zF!_^9NVDnw*@?V>u=~f9bvSy|PqwNbt;c}9dO}Y)cOfO?ZZDGB)#E(W6FpQ`w36mm zL}c_me3&7Y9mWuO>gayN$7`JGJBPN+ZS>=cGbR}GbrCZHf5XE5z{` zyI(g2rnPpT>kT<{C#z0^IJHXo9v(L8jqGcjHV8Otbo#2SKEy^VT~gYHpphHcj7hi^sZO9&lW;z1q8 z$Ct#}D5tPQfX7QTt(ej$Sop!AVQF^un}RJB9MC)Ho=Y8;e+_PJD_XRhbQVCRy(G~6 zAQfEJ`g8(DpQ+w_!dcZSOBmHRa)b6{xu{4^RW=<3;cK$CT`YH;v{Z`V%tWsD5TzMm zcTb{|3O=R4B@ob`bQ30!f@48>W>W*t@^G#*R9c$u{+64aEvmAk0}TzCsyJE-C0a&m zIa*-A#fXoGQY^6Ay(}x|pD8FaKeS?P(1ZrxS5-IAaKvB_C$!%oz2ai#P0~6$7ix;8 z0WteeSM#e&KGJr4ge>&*Jf9P@*@dLBv7I3;Pu^}k$BqS{3M~KuUcj`~Fm}!PSqfNI zK{6pfBUzXdHyHZjhCowAF_6II-+yPGj$SQa_b$6gq5+e(chun1eke1EvSifV^J9+Z z+geD-=3uwrsb^P_;s?SGV7vB?83xM>TRaGwgHds{OI8ft0o0xzE-ql8frddH}^F;HDts55mTkjQ(#zkw}i<`{(>nxa? zT~}XzV=mdL#+8DRif2b!V&acSH6{8Ui*7GpU-07lMp9=2o(-JQNN@@G5kfn2#5)xn zF@K*DZ#!jn#^y%b_1I5MbrGR`J?^y&1L)Z(uwzl^w{L;CUzUANUAq36><;swI)(Ec z6CNi(PN7fb_v9q;nE#GPqCg2~DT9#>8wrkvei~@q>#+_QrKMwXrx7tH2J){R2nZZa zK9f)mM{PH^@id6OL5<}|n}OP7WVvmno5uNH=}6K%QU>$|IJJNAbJ(9X+nfa{B%U`W zr{*G8Y3dRGS`AXTGdn2sLXNU)7Br6N8ALPN*L~jXXM>CU2a{c0+F-=p&+I>$nB}{o zXIPg`1_jLDfGHb%-0{(wY-r)v5GQGUGE4%?N`H-4YKCnDA><2&oT-Pi9lbu+&R+Ut z2pCAQ+Z)CuCF~+>TyaoDv>#fm9Dya!lr@IcRrc864RfK;GM$N9m{U)>-RA%g;r=S0 z6BZP3=`kL)@nYkvnHtg0jB6*>tF0I^T@B;M*@J_l6W>^il->ym9c*yjQ+MlcA_Z?N&Pge@3aBO+U=j|+krAqeOS1w+ziuqS@c}xdI z|Cy*rBfPb>ab!gSeYHn;xHK%B_Y1I%Ao{?W+HOe%N^8yg(QUsdpYH2=%-fCkZ~YpP zmvBZ<^ z_{l_6^UphYXvU!7&*8Vd>FCyJB1)Y1rv73LxMf5Etm_%JIO%WeY}X-wt1O%Y9!_x> z#>s-IgKCIp*1xCsPG($|V(j$J(9@&Yn_EB@V^OrDh4@$^WL=zR!w{rjmmGiK_Hyo> zsjMuHFd7)R@jNy(+*>4V(Hx{t4O1f`W0PsajF#z~8yjm*N(K4q6P|~B>_S4OqaFyc zd5%_^1aJOydR_Mk@}mW6X#Fy7><(K0Pg#V7pf6pqq(O{25M0Jyqfa>wFn`p%O3~X0 zxay@mTmC1jP6^Yrpz*3Xy#tqnkd>cku95uAbfI7gvemF+{_EoP%6^V$^FjctQ=nxl zCC?YqH#|3oDP_Jez7;rvpMW`8-ZLqQAy-b!_j|fsQ+5MBe)DpjL(9#{sk#kITWYFH z*Bzfe9_gTv)C?u%CI=y*{qo=A%}w8o?}kQqvxk=nCUw=7jTt^w)#;T^wEHqqR23lF zA|HFnzv=QAI@V|2m%(R$oUf57qJ6aR=abo>mrY2s<+$|?WJQAJAJ!0~tP~duQ`n&U zf$?P+wtY$p=sRIIEeCL}9aoPO=z|Q}pzH5GMdEGdbB7|0R+z{5_}oIgN&kTe zObZL&Stw=ie`-9>HjT<(E)M5Y=@nBPJJAIkL0)Pu6Uv7f{4l^{%=ZGZ5W90@ySVtt zk6l~t)eVgq(Nj})#l@zePxO<65CyAX0^5vtywlv&+L#J7j)+KzX7@r0S(BRdhuAr{ zUtW1m&cd!u8mU4vZ%@JP-mcM+KL za%DMV8Bf7~Dlzt2%IK7M_@`q_EgM2)Qlx00N4y2T1I|_WfC3Xig^{$R1c zy*C@RW^xo}E-kST;b96pH!~F!X8@+Xtz7dzuef#Fo(=!-fbO^>5C{`apur&{@Hmzb zEGnSRd%|m)^&c-ZlK4nmZ8zpnV6~G2EPyM7Lx6!{Iy7O@Q-rMTGp=4qiOySRx{eMS z8b69SlvGqZazZK?_(h2G{ZDSYu0oL2NIsN3@13S%rBf|ae5#bi)RsFl&(nP5_ZV)? z%TdlR-WB?;T4!`<^EVK*UL*au_;=DSK_kkb5SsK+o2cht4yS55{ zEMKPcXJw^+b{vdc{c6*3?NbB5C3u;SP@Dy^m+BG9XlP962mU?L08r>dcQN@7vd|fD zhywv$cPJL!{0|bbbqz!%B5dWU@jLGI^+7+FZz+&ZsN-||v0(;)G_7?SJC2UvUjMZx z!_klOk7Z$EkUqLB8ecr+rw;oV4Sl;P`u%#{tZCg=GTtoSfB=7D-mnH84Kj~%pw>;E z?EX@iHu@dvQupsxP_$*(Wwt3K1T?X;<`g=!Rf7ZDLNT{j7n{7GMPY@`F2s~%LpO0) zV*i>_b)l)0NEjQlG6DQ8nu=`z9fa_TON1-#U5VDYwu(xoVR$W(P*pC8vF$=uGOQ%4 z7i$e=OOR4Meh6=M`u9U6=iM4zv4fddHQQ;&DL^`zgc@af8G?zjQN6Bx{0=fwRtlUi z-O0%!>4}{Vo136cXiiS)*>rLO+ag3>VGTT8A^!;mh_b@kSW*&(k)-;Qj;k$Y94tH{ z7jR{Pc3WRxU;F-LLPmyyx~%X9m?xHu@9gmS`O^~*0n_wkdU@}AYLos!#r&MwCrNjM zw~pkaGs`*e+!#H}WP+y-Eun+me+Cqmn3&p8zwTcu0kne5BKt4gkl}1Y&JsKRr_!R8 z>oeyRy)w_h4p>-y0>WOW)m1QG0GVjQvIJa0yRD*8)$$KY1^?Dp2iHsu|Bh_mC}+x@ zp05wo3vxt9(l%y0=`286Sva{^=aMM}QlP_KGDE)pXrINkXe6N|8Pr8=fV)#1r#FNgyt=ef+ z3>JrY6e|dylqhl@S~Nke4K^ZO1UU4@N64LtNT~so4;kb9UTu_+p^=cIB3GmqIa9A_c^7F7Li1)!HDt+4@Zm2 zAsc{(Y2cTQW?)x%ygMPt@LOEK-25Xrm!T7@SKi=b?0o(=2zNvUXjQnhG+xhPAGe<8 zGGoQ39nbn#_&l|3OH8a6y-I~JpsBm0{Hh1er_#?)S!CE_tFOOy1O|fb?BynMYIr}h zdY-}((>x=hrw?<2vDs#>0P$abWwz*appO$7V^{xhPZuX_~cw?SZ%jMSQe}$Oh$;qdn zZGB9L@8m1>s`}$cIqPlX zq|Z%Tdx`)19WY}#TptcL%-1@(S~I1(t=vr8&_jEAT^bq|64Lue+a;F3=~JDTpLx(` zogbfrh`x5)EOOL0u+}^Nks~sac5#`~Z}FAE&-vl243aH&lj; znQFHj0~dviwlN!q+=GBL(QC)BU@Q?zWFMl!IB#pS*TM=GoK3SK$msg@E#%Zvmjsj< zO?FgBJFfaTguP(Ju+{1Qq`3bsx=`mMAUo;BUtQ*_zbs&!A9P$vHT zxnUE{{zOB|?a>ayZBrA4{@TP&4!Mrn6k?`EN_smD3lnJytDpS+H1K`0Rah1vt`Z43l8 z4>z%Fd7M;~(6+m}R1aLixhn>pT>KjwzFJ#Qpc2x;6!AW$vjfnKYxqemI0$PDIad+k zMIRrC@bS9(e*C~V6rl8O`AU)%(8@c1|KRf9QZ*FgTqb{mD~gyl8N#46>p_`{;)6h9 zg+=`iz3{I7halNqi@Gj}GotYO+==ixNP$)C)VecStj;g1o8SY%ODB?b&2c#*I!tQuMW)(+xny+qz-QWt*zV{a8XOZrBJ<>B{?fwz&} zB1`llB4)}qsQ^DdRo!vlw^A6$5nntkd`!2VMu?u8HmGx5wTX0UYU=Sqo{*@3aC1O` z-E5RT5|XMVX-Vbazh!z-%x6A5?fk-y#2^}_d9{<+;xngiFx(hVOFXXW?oA4StKj41 zu4~Tfz3vRD>#;Ob*1#9%i*&LpQ@mLe^)_~C@)Bl5!fdapuyWyYp&)LTXH2*5?jQXM zCRjNEkMQRQ%DgC<);oo-D!LhVgC7dlYbiwQ%W8N`+!g}bJlj;>qH`SKOPkJ8y(~6n zQvQ>P_CP8AjFa1z=B)RHiGy#AzwX!QszgVuO@cfS!_>6CiH3%kovQ3Gs!2%9#rAS9 zMTiE|@>oU>vc~pY9=`5kYI>8O7o^9-Bgik3bz@0W{AY8mC65M_GA_PvwDu0xOd_{1L;g?w;-|pZm$IMv=6ksdqTdh*enU{!GKNA$=sSr_S?5JM+FpMl5pAkQ)oCi zVm19;w9H=xwP4CilVWEs5&R@HR5UviyaPJ*3i@U& z$${mEh=QokrN!IHMqRzB+K&8P;TRViQ<%xScMG%iWyk(&C*ET-;oap}_Pl3u3Mh4q z^rx2ZU`J+#Ilhx&cZ5_{DgnB9fzE+z!yvs({BRFSX7r|}?PIGVe|i`n30Q5V&|(@O z(>2XR<^a^+sHzIEWR~viV2ns1zLFEUkS65gRT>&hJUVQ({ZEBa01&#S9OfCOOvD~k zQ$SWUT3Dirjc(pHcE)TGJ(6Db%%i8MuHe02GTn33$oF&7`-lT2 z#2dBkMlp?+NkJ!-iI^MWi3|yKKbCP&=OW@or@%@jEq)N*ge3+uMb;3G#sWGSdPTEN zSa>?4+!DBgs*jd2NN90g+hNGBVb3my*j*a5^)+CZe{nz23%Mh`dIeygm0hN*I~5i1 zA7M+laU~_WecWbylbx@MB$iMYq;(oHQ-K@#TND}vfC|WFOo{pxR>T3{N;Uzd*ssvg zbD*lyt5{{?r(bo?BOuUaC=KmM7+8V36>f|7sqx5nqTjkH>biot?yO6x0**6bxf1tg zo?-&2j=_PE`wFJJm+U3RhmjYqw>fudBou#xawT8Np&qDS?*g^t9;2LL{|FRwPE@;# zC^`$5d>Cv2AirmJbzj~z&r^(m!;VFQUsK~$TAtfCu4j_8$_{`cR48emTJkGP72yv!h|Q3t-Z%G^lH~N3I%0td{(;$5$O$sV7;_p z=KQ{eqB8n93(HMIY1H$4O=RYqH$rcPGv4}KP?h2K{4`wl4&AA((50_@EX?Uj1M~blFpOiCPS!wO1cXHAnXOrq?Tma=qP!Jr6 z2-Ww$jJfayz(#0MHzQ8TBbD)6C>v*bTBp_yJ62scLxhqWPVDB#>SNBp0P+ClTG7)l z1{0~5hhVOP0hB03H$EULVeGISN^^5(X;-CJ*E(!o^vV4#!A0=6-08?0Q24uC8+}PD zi(|`ks$xQo?Jdf9DC>}PBm7}6_UBIn94cT-P%tQeB#|@<2zVmlk4hA9u)N9O%fiF% zNc``=3tNJJS2q4)D2~eKUqj)gIIGj?JHI&rbv{bB8nG^qOO&m&GsrlAhybsvD-P>> z|M2GTzCME9q=|idKKc89wyQ1IE59=F5cO624UR;LN~!LK-(`8{)ee6PAm6+wVk!@h z-QF{3dO0i=>N2I1lOF`Fu_S>+fkPBFb-0yb&;*n2d1KiQ!-s$D}PK5%${Z* z6A}>QX<$l$`Q~7W;yI-~SSG!NG;qafN=R&A=8o4wMMlE zEYnQUDFbSY_Wsw8Sz~fQW~G&UPXr8#nk6QaOyT?f8`!Ur0J@FYA>FDH?%lz^Db@e_ z870W>Xcmrsndot(x+a&Gr-%`1mJG#R9icaskgHm5P)YIgQ~$N@DxCs?5iS{4M}X!?Hr7ctV8b-;Ztvijg`W9wDxaqKOr4q$Um-olcaWor-i^Dj!% z5+}=AM1B3^70-7Z;(kz=OnQrCUzI+oYswUKw5Rp#)uUdVUR$mA#rtaN4@ow8ys^Ht zVF>v)4kVsTT-?_JM4r@~v8?NBjbtp-*lWrR1eoLl=U!ewc!4M=?78y&DR*{o=9%D! zY&0~BK>kAwF1zv+44CI{ZRQKHs9RRUd71K;I?w!0gMLJuXgL zZ`Sz~D-WPXfcY7PhO*Y(ECQCLI3`I~RAcjtWQYntJfb35riIngRSg5HFYahSEj#N# zqI8^47QmhyvbU+!_I!T4CVqPbqf+x?F(HF{a20;J>u2p)moGIW0#KWYr8*wx)zM<^ z6J5IdTPOf~7^-0>Lj>LlleY|6{@HuqtvPRJJ~O?hiIP1)@d@_(Uco^+@6#V#5Z67MU(2j=9{^0DW4PIIfD&kTaxdSAxt z#O9rQ&GW@r*n%7_=IAv78OIF^WpM!j8VZ)_Y>q&wotbp>Gd0~7Si&HQ{xuvS&MiZm zb9FrO8=Z8g%?95+cKeGb{NIR}Ex^3=;ug;-SzmuWxhrhv`mC>tcp{-vZGZn(w?OS@|{ zH{qsyNz#;2es_`Xvh$=8~Vd&#Rjo;%LhLLG@pdhEZ+9z$iUt z7*d~`pd5E*ctpZ+9WX2Qq{NTKIhHe_Xl>YcKguwBSRrxS#HQ@%zDaqIkcqOs6AH+V zSq%&nG1Th*eDO|61rp6{Vgl4FN(X#`GkvJx;g#zS)90rreR9f?Y%nU5o-Ca)H{=kE zbUe{iKM%2TapS`zD~dpLyJ69RRc}~a$j%-H2ej{WTR>gvs@SYZe8xcg7nm36bua;k z1|4w=o%qje9M%O1dYy$O4-iFZgM#~=1U(DNpZj_r@1_KV7naW^Sy-}PK9n?Wvzy(* zoSWt6r(Se{1=V8`b-WNj=JJ0ymF?lXHUXa1qKC*)Yu*>7iF%ISN(FXr$(?MU! zxhu?FloTi@CZpDAOM!b@2K1`X?Vo;A|A|F`=b(=T+5%%^z|v1WH3DswyaRPDAYr7{ zL&$TzMJ*r#mSri|cUPh-y}=O*vb)oCk)J1i#mC2Y6l;{+fJlRvlxCe88+>A7Vx{E~ z_Zl#oWCCI)Um+pk;^B3@P}ti1{USGC`qhBQ(9<3WKep?7=x%iF-6C>1`3cw?SGNBL z_R~92I~XLU|LN>(SP4R9`ivI}te0oY)Sx6@^ncWTWBLl*HoSnQ^r_Bn zb&_4rlZG)A%)h)Wv9f$w7cXDm`($rCgsov*MWWrJ2XI%@lZnpDX%p|Y|JP+PUuWjl z^q;d%2ey9B*Y6o9T1Mz*NMT4DW>YwdP0stUsjMu519*>CKN6l4Rz45(qT)W1`^#P#hlP=s3U`Sny z%Vv@ZP!N#*ieBsUf_#r0Klevxkc;~gLl_r_^%DC566-y;(yeBHf5ru2u*`Vf{}MgkS}`x>WS}&;%Rk@&{nzsEO2>j$JsF@oU%n(E$uMNlsV6o1e{Dp%^BztDgDTk-fo2MWMwk2KiW}e^IgWH5C*3 zCkt>iO-(2KG03|KYd7cdT?PizNeI5FB`D7iWwkJ5vXgPy+2-BVVNCcyEjl(QuKhk;|^Qpwyz1Iroo;d zp_W8&Yy^f*vkkZXt`|e+?6!{Eo*D8WHpk$-WCRdqaG3*ndAGm(=fK=~6riS&26wc- zX3TnFZRr_U`TL832h9dhitNzlGZYvQIV}VXeH*p&ke5v2ojpq=%9qCp`Y(VSEV{R* z27+RTe}A3;aWk0pAXDz`V>a;5V^IDkQJ&eKZaa@zu(Ie#pigL43k5xO5SGIL$jxI{ z_6Niduaf3R?|tH)+TS~W^^{;ssTBY-rRTO@LLW9aKnKjJ+kYtK>8>QHz!f_d2nB<={G^C9U9u8ZZDa2l0Gkp%i8GRS!#8U&m%c(4%qlSma>t!;X8Qp8} zWWh~CrHGIc21InygU1cnJ0JgN8ZzkIOyKa26`@%~BIK~6k?<`@8}^;4V~o!+LXb5a z-f^?NVG_;8b}NtTiqqzrV=6C2%(zmT)z{Ei9xV8`Z=r2WC5}!qxr%W-e7lY*_~hBo zcY6vFUm03evb;LUX6-D2eEHBUY$N(l#YaNoj<@vEjaTZG+`GUHS67B&yK6{TObXqV zQ3UG=+(hReVeucGX+Xy4B98WL_HaOj1+vh~Ui2nEA5$TK3{>d>n2Ra!XKl}qu>f{; z3d(_(C0bQot+xPPwZ`9&(9XbyWEQfgrzgSezx*v^hn0{;kbMCs=b%~VrBvF^ zI2*b1cdIZuy;~dZgR;F>!*idSJo#uyu|8dHF$$tuwR8vA<>k$zi97$r(_WWob$TrK zKjulBo_Kt%7lvI!asQ}6FTe9-n9jMM0CRdyI{|^_94N@ewKDl!yF*0h=Fo2`!3vbW zzGmFzf|gI}TxlgrN}A&(VMmMN;C8~7A7;M`R&Q-5Kx&4ot-^-7()Xs zaW7i39a_8|e&>BH{ihp6f(}mZ8}D`)m8dX5WAbodbxUYPKuGWyi(ew8_7(+4+&>8< z45TC>{TrVPb(>6nl>;VFPP0*9ZbAeA^|4j;=E0X(Pqx25aKrq+X~DX@wK&(tCjn=O zEAPnmX628ngwJlt<>2BK3nq@z)zk_E_SMT@4z2uo5h`$6e*OBDEXmCXkc>f6RN^2< zQxt%qzKlY00Q`ctld(fm)U4WpyH8182NZHOSf{e=C!3-HipO2NOaVo>_o`TE#BdfC zrgL>c!_?HW9X-ZTy6l*R1#tmg*dohK0-}#_?O9)yk5}qJnlj*aR-LZjx}6xS#6}1k zufZ2ME$jZ37({|$=s zxP8Z>RV{li8m2HY%Qrdrgfnedr%NpdPJ;EE=CdeZA`N?hx z4IXJ=omz6kzw?_W;iZ>E&?AY0@r!Z~CSwGGzx~j2A(Rh*Y;B7933BsP$~0SC11IiRVs*9WUWQ9yK;e8zoQ#uSSLL4Yart#{p| zFOFIt;PL3_>8C;!`L=#fsO*p50hXj11mJ&a?dIOg$svPq`Za*PE?;)H3XhC5>j*+% ziWsLu8{F17rLVq!5A}r#`~og6K(zGxX7obrVQ-e)&T_#3Un+&YWqH7n8=nr&6ROz6 zL~23pSA9;sHQJL~c$ji&0fP_d(K5kptlvnerYE(GG2Xc_XG&Xsy{k+Wc)8oF;^=2~2*T?$;XnG(yL%YS> zEz+%2zoE*%;mQ*nAH-t5o`t<&n$nplp9u$kLvdxwnvT$%k|Iv^dD<#Va_3!gE$ZTp za3FNv(=?CH;eKG&{VP#$;Gr(I8a{;pcT73ht^ZVz2+%6Y&DJ4pZ0BtHKl#p9BN6HM zzf}t{5##8sXXPgB!uU9}0Eyd`Q+p{$I{{c$pN*u)D z>R_%^w^|Zi}`iAoNwa1 z6{!v^)5XQ4zuYMBtBKWG-JMiV*z^CrKgIBp;S{~Y56k1U))icP1Q1Z?UN+ zg4b^K6$0gq7zkfm`9=z{t~vm$hug|>K|dK8M1n{VOJE5L;u^42L{6n(z$XgOIHWJp zf=&(kFXLq3uYPZPx{G`X-;%$XUG!KU2JxreV9c=|q`wsv7q5Y*VF_GN`p*6INQXWz z!9cC*M+I|;DR=zB@wW=gkI0y&=TaEI9FG&DBu0D(xAg=R0oN#swAep?RZUA6OwdFAITR6xp zvyC|){;OF4h#zIcbJ4b)yvRKSiX$;XbW`(WyBqJ|`>#x~Wup(B`7_-Bqci^!b{ds8SO_PruXU}5o^unv;lm*{avbAvI&mmHSqOs zy9yo=ZRdpOrOddJUP@`J++QtOLl8OgaaxgZVziqbeg8$9Toca+%l;Be32hH{Do!Us z_PO2=$zq5A^=9w$O)>OnZh{*YM{g!dHB&d5a8&a!2dN-+E(#9FQ3(`S67Pf5cZ-XI z+9f_$@;K88W`E@O+*TptL!qIO2pFR%8gnw2X3Y2oYH={BB43xhx>)Z-`*?qC1>7${ z_O3GSfCC)u7i1Jb<SI2C|8XggHL&q1-oRJPLcF$RAG1|U#iCMCIEGl6G>fw#yYanyXS0tXL2S80X> zU`~>1c!s{G(#|d}FBk~$7K)s9;r{=uM>Nt1ek=p@o_Z-5_oO3hPnQ*m*Bv%8UeWkj z_C6Bc(gTe=iWr9Ur6?td?W()sc*M+~S;+CJxIu~+OoZF>`wE~Tf!P-wx1XMV;eFy~;Z)`yAEcbATNzdDa;SYlr))E}G z5bhOE75@ftrTvd*n~QHTz)FUf1X4J3M)4&;0)+AHxC(;=to>z5UTdru7gYJ`i%|Mt z)>pX^r)t}dh5NzOk)~;Y)a`fND*h`@facM z$f~*SzGW^artj?Bi-0Ugl=7yPq)b-QJ|yPLmq*f1oZ@ZXusB#$_f+CWU7-`NLA-;( z(^L?g3<3hTD4X3g{{n`Q|La&W6gNL-4c(5z6kSvpV&X&260rFn|CoOz2R58JH-rCv z>#_rOCBP0qpB_P=DE1y}4mb}%loZ2EhN5dUVqV@3{!-+@fDHMgZ?syb*Si-jsrC@| zH97*DJf1ow8D`(Z!R-#~1hU4*^c?nXDc_%-Zxy1(!e(~5_3iV1=~h3(b=j34f5Si@ zMt;6i?2=nw3Ibl&b5@o#V*HR_v~IyoVbJKoJn4^l+CS+*-3t$w{nj76#7_+%p*J0b z-5G(n%Exnn#LfVW#n2ZzJn)8z0q_XHroG0?H%4x#A5kxCeJf&GgIi}RX0h>n!>-I? zV!yn!@n#C5*F+JwV%$C+y!H>g7<2U=w0JQEh5o!HD4MVa=U-rm*cpmTgh0gEJfv68 zYL}6|-;;pz8*S`YnZqn?^Gf4uZ=oY90kVY})Q89J@ShAibt)D=7d#k3<-i-6qYRXC za90c+5q~C29YkmcnX7#Ofu^LRLky0t`=cgkEA@~6+X<@1$)Y$orDSnQi4=3oDpr*f z<#~!TmC?1D$_}@_?6?}2D(4OPtrRBVOPjcyS}xqe@8WVrxP|PWc#6_7DpJPn?ZuQa zmaBZ(8N`f&0q0IoIHBmQp`=_ChTzF1XTnj*r^{NEeZsJUV}WKGR0$ zv?0%d*`3$Q&;w_|!#x;ez#$;$0gPxaBD60T5B1keK00*}a==^O0*Zf^w#O+b?$vHN z*V%>=k<`?{>@)x6moMfX+x(t93x)zJ-54>(aPaen4#=X8#+@H+Kl|izsA_A3b(;hx zJYArYM=aV~Y2B8qq^^-03V2GYP)$w!zBdql4{d~;wsMuK<%2|`S7Vd^q2s6ENw20> z@qmGGaZu7@gnlJd&Sn|$IJ614eP(GzylU|d{{n6rx7QQ)aOP(l{fGR-`W42oiHsk_ zzS6#g5#4U99hNeTeoU=XWt3pUKy^=TG080`V4%Rt>_q&M!(X&Gpd2r=i;!R@RACq= zn(T&>&l3`;77Pp0B1&O|Xjvjo-};zl5qe07P%-=aTU+lS@O;K0LK|(WD;WLywY@Os z574BC{^`bu_pNG)4X1tQP+^;b9~TybO~@$zU#4yd*uB86Z|?B6brQrNfOFUtSr!yo zd1oW}zkek$8TEb4hJl^cTikh=%a{EGyM_n52DPPyEr|KrPVt4Vfl_X7#!8+>h$Z3X zhbRBhqMUE@SARG4FR%74$*SuZH=EFz2A)u{#Is{wC5D%Rcr^+Rndbz0Q1Mj_Wjg!; zxm=)~t|S&V1^q#t#l(SugG-u0Q_S0Kik+L5#snv&JgxPA5Ac=sOigxt{5w^>j=}#6 zJaPA7(5uaoR#rx&=cE66hawOXf=x(-G|G!f$6qV&*gQpDDfO=BY9v?Qr1FP#fAv;P z!_ZoHc8=TMnhn!w1POa4YHBV)OD@tYhy;|7*7^D<2Fl7+LCtFu0QM{jxpf6?QSUIP zCOhK)N7Fe*W%_<^KiimWd$KW2=7h<%n`}(B&B?a?&9?2DFx}aBu73aZyy(r$YIWCj zo%`H4_UDKqgqQXDcLWSWE@Ty`|2U|5vTxY6p0l++9aR$y6css=;L#DsO~J)|B%s^X zg}Kp_i#``GEI2=eect&vZ+ROlKtekLd}(}m1D9^++xZm$%GsS_HQ}|cP_)rNbsau2Ym1}6QK$zaOCL(=xDXISb zw@GmDQOMN|MnvCDzBUp+Yq|NVa(ZON5tL5FSeoCQzx!QD$q~fg zvJ`9(oVJcbIy?JkC4Y5YV}hekozABhqO{b)C0=c`=?J4d{xu;E4Httyh)@4Mfr_OVWAu~uY@Q+!Wc7YT^y-${!c^?d;5z7-b( z^GuL*_x<0o|G=}f~Fv^vN%^H+!Y9Bm}^Ep4(3Ig2a{r8&GE#yFBe2iHd z#T)=|@?T%@SJScXWo*p(%SREoIG||&bWRSLadusP;x1PQC$ZD7C~JS8=;UZdvE8ov zp+@aNQPV}eXE>Qr9bMGe*jpMCi}Uu^QCdnvyHacM(tQ#iFj2E!f9E!=3;S_%N8L=g zl5Y!~s7;ZUL;p{AmH#EZMR<#g+F)TZLYO~F>pKm>d>{c_ zAss${kW-WkAsl#untFtrIt2@`0V__IKbce&P}0zZ0?Q2?9=81<$T~p7 z`LnwA2FAXd>|Z}C7rVk@Ggz4Kl;6HlfdAri4F83f0T=hn$-M(&;HWY0>Jc4(qdSqG z8m2U+xU}369Zcz{F@? z{$EW+9T+c~i?ru5d>r6C2;}Z22OX=J>O&r^tcAA==PLe;`pn6(i3uhj2S)+(t50<78Y!VZ7w2baUm_PS|j0|PPF0T*t`nR z7=fwf6I57#$PnJbBSqETjepW&^Q}J?R+v(D1_C5(UkVcuLC^Spetk2B6?rCQ+zY+o z?;sbSS5|?CEK2_tbE)MM=$-#4bzp(DI)do)${T+GSs2C8Uv}qcrm7l>^wRSY;;?_< zAQM3ua_pR!EVGZsn2WZm?)zOW#fF`?&3gT{q~481;2KW#XOs6djffmIyrPOAY4)&L zAgi%KUr@-@HSiWn133vWUtt4?=g-GLBM9BKBO953D=s&8I*Cr@b70bL{l8$j8PFdO zm>dL16!cp}i62*y4tz^={DJ;HDOxMiNjgYQX`?{E6YxMM0#rdNn1Pcj5yR^o3<3;@ zJWm)oWBk8w-}Rd9Q|B69`nZ|xf2x49QJTR4rNrfW_4Y`URBoJf;z5_$)a92-joBY% zry_AjAbk+HW&@0K0X^Pd5)nmU)j)6%*Dj?*FLoXtSn!66TTpGA9y@*I!4EZfy6`FT z(C$A=#u%cbQx54XQ^e8S1eJUiA(@Pz(x4@ zkH{*8+19<&;M+s=ckw|<;Ew^=9p@7~EdlFh=F|#VQb4r{7y@Uxu2}*o{jUH2)OiS@ zaig!m(&VnvO37=N))yf6aJH2F#Uz##bF~4Q!M%?cz}$wLl`QzfaaRhK$?0?uEZw7oQ0n5`=mQ)onXXoQF zgNjl@MxciTknHP9E|By!HAk-x$9a#^j8C`SIyPJR**1%TH{LPr4=sX%IXHD999jV@ zv0oLDUq{Is8Y9|S{*-4`Y4bN&(ONJg2<;OFh|0B1G^b?aM@WC$_4_WNTrdY!;1@14 zEUWCX5+uob1@~AI_;Q|NxdK2_E8Pkx9tD8jRIO2J;2JGI<|*Tb`|4d4bs$4TUxTb( z(CEWVMU6(eXBSVdmiE?35P^MCR_c+v0Neh>jkic*Ztc(dhC?ALXbg-2jSD%C13!9f zZ1Q4d6>aLJj?Evsn>%1g@Vu802g4+$NWsCEcgxouT~{zVj_{rDe3kkiAnrAGHZc=q zqP4aFjG1QcA3z)8vj^pJs`|O4oa1%D3pBVzJM~yfvK(dJUI4#xDmuEd|9c-CM;tmB zt~{-QJ4eEp@i!mM5x)66y+&2AfwiSpz;Nh&bLSR98X(sqq^@#+Y9cki>pE7oMD)Yo%K%4+Z zhyq|$pQu#$lqvN-Z|twVCrJvDZrzM>bky8 z87F20#4g~EKC;M2jd7)S6k8?`g>xyn`e%!!#M;-m@k^GsV;!mLYTe4)kyOmpF1^?N zs2KEzSB>YCAWZyjG)CZ|0Ai7tOL+KeEnnZz?rY%;+jYvQf35om9{WFpb65TX)AX{E zok*7U<~OqM1N&FUxd7Gwt4BRyCNT8hQg8d~$G5t=XDB0&4L&y;SO2hu89UH?MM$O5F(-d!=f$tE zPx>xL`;UOTKW^=rlzE|0-1!Evq~ zEis14jYGOF(_GhWA^6PsF?>67c126PrSS$s3Q<@GJ5b?HSBqV1TG7XQJfhg(=`w%! zRIcly=D#pGk50f~Y|AWCUcM76%$M3=&=i4Vpzr-$cKfUCqWw9*8a4F$W8Vt;+*}z+AdV- z)q{1Bpn}f53v7HFc`0e0POTYJU)MMvYj?teDd61LQ}veUUeCtOP&}E#Qu=V~b=D zAAp5_^ek9qXMm2z27V1}Uta-Vc?;Q9-qQ~*pTaNRjZz&|>5+J{$GhtSfKTzfym=wR zflr}T#nJPN))%Zbz>-{~Z^BT7ff%iYpi<3Bhe%OWQ3|ZQU{z&}RAdQ68~TS_f)d3< zWC0AU(IK;}bmd{9Y|)Ska69tRhM?pXlaW8va$-#%c;rulfHII00ZV^F{I(qTv0K22 zI^wq8W9&Fr*J}MHCFP$vv?=D`!kTOIS||iOOoOF8O~!HEq2v>{z8%F;_?*d|UZx&> zZQ{85Xa9p^?jXJ20(M!DdskA2)c(^#XDhz78apMq2M%8Ohx3X>R%x#sDi=+GpZ1%TL_=A%tte?7n ztXTv1$laIomDy*uP3iqTXdxCAv!ia|7K?a7W11Y=Y1BJE)x*o;H?B6=xJZ_6+|QM3 z^W12l>An}qvUU7vCyS%!zS~Oz44Nax$O=83ICt6m;A0VJj>8E%QCPGRzr+p)=#c~z zSMTR+GHe;D!=>Suh5zXJ0cQDl+;2XblA^pHPk9^TjPd{<_Zk6XTIb%OfiHPSAmY#J zzfh}M{yO-)*4gpIw(&`eWvT&JjIJA|PoNgza)+46a-TGw&A!)@J|JH0SKReP}j zm^!gfW1-Ha>i79PG<8*9kn(HCGz3ARE62yn3z9Z90sGl|M3#h}&F*!tM_pdy$1OJT z#GBCQm7RM(z&-tXNT7SEeI9GjfHn|5Ofp>&1$l zfn#y|Jo9W?A!t<9E^2WKepPYfzrY1_4!7no2xYLT<$rY5trH7@D=(BHES+XC?aVrf zZF7LZh@+BkH{oU5Z70`OqA*ae;gCxAVn{X)lNYVPBJtcdR6vsq89b5(v%uch;CKre z8E1NmbZ@^4!c_g%BOXn-5BEkR7O-#O_sCbOjbc=?C%9Q3eTU2s9pj_GV&P0?SE ztUiGtkW<_xIyRQ!DJlLzjdrs4%6 zmZH2uA>W^6=|wq@3KJ>0|Jz-8aIJ0?r4Q)3mH~i`MV1ztH$lFU;!q;G(f*#OQa3X- z6LPG7=I6B$(DWhhN^+YC0h09f+r!UV!6ok2+wvDT!TI>4FF%<1RtUte8KHy z`Oj|1)z2$nSeriReZl+qw*XQIgc0pZ&a%78vuNa_}zd|N>;WPO(nqVJ+* z+AW(Q$5&5MZCUR5isfzwz$Z)%Y3m+3Oxm8D>uU;OaB)%$*|{B}iYoXg`Ff}|LSAd0u{j1Ls-_*1EeeMFa+cqa!ADRHa~ zKeNJs+6|P8KE!jkl%oA*3I{;K6M&h}RafHuK|7XrrdC1g2w|_c z-oyWkhf*%DNppT`?`ou7M#g05<}j5CoRw86DjMKr<-j~_(ro_5#4I9Lu-=S(NbM9z z<5e-c0N6>Z%LgWHq`pr@6IdSvb>1+!>8#Nt;x#*yxJicGo1}wj|VpuA$$E?OQI?Y*2?)j5ny;&~_yZ{LWviW>6 z|8AhL2Ny43)!q+|PSE9L$;ycqf3+D5B0Tv4riC?4_XHD}G{!f<(oeV?a8W}(h(!9zk4!C6z7qSy{X*b}H~vAP zEm)+{u@u|ee>{l8rVAEoaw&CR=+#Wk#yp+_5D%J$sC>VP1+#n<2r8?HA8_E{MBgw_ zDs(1)40E6uBvZOaGui&S?-Zj|boi=dSQYnOw+If-QOI z`+dRJnI$FTjy^BVSS9?f|D~}iQ`wx%P5GDuQq2@dHjnEA1wY_#|JCpPHSjBbvLAZ{ zdGt7-4{mchggA8_AEe^MwriI*bHdjzeZc9^)j8{Vr*E*~pf7s#6O{R%yVmkc zr9{Dnp2VW!0i0Sqg{g4078Oy%G@mSUSkEg&m*>Cjdlb&5)uun-Jvj*m#2k!ado=@( zIN%s+J(Y`=6n!Ywwe65)=!QqNKxi;ye|#}!NCbPX^^XCF@={JnMHyD**^7?^uXQk$ z#)bNUE<#^D|5P@B%u;;0@-|(7;NYBbFGx!2LFz>>?$1#jLU$bS(Y8De&y($yYEr@=k7*EER5uvX=15W(x2R9=tSz?D52r2wbNxO+}6PJ zbKg5qYBDD$LLLBu&0Plk|2#nptq#{+FsNH@!)JGAhaW<|ov$Tc_cprz6S*v1Uo&!5 zq9oACCS{Sh&G#kX5EBnB%yWaY`3c0#?d;}37{pr~=sj()l%ylbi6jT@!V&KKQ8Zz< zv0_SMjbdUW9^aG(;7GoTbW=!$t*R)FK3IECjf_~NJ8$84pDF=-QTIsi>2LSC27g{d zBEK{tsar?RDbsy)Sj1M-^sm$f$;{4o(o`KZ$5EK##Dnc?t2OM~`)X&~IFGGo$+kwu4={d!rCfo9cd>i5lbYNV)% z$*^KT1Ub&6@Oxb1)!v|^AudmCp3K;vZ$?<>8+0@`^=z7cwoFmDWHdfiY6>e&P2{BB zqsczcVM@Y2?GvQwa`hw;k%vKDCgE^7KP%R%`LOU2jEDQ!r$?Xx&x5G@boN7X?`7TV zO&^DzDnKmtutCB6xYZ>tQdk&PD+4y+74%!N;Lnktn;UgO`Uw(mS}HqIMNw|#D+31L6JWCbe#)qEp$-O=mI)BcFVPMz1U z4yMPayGs3~)7rf{>tEt{T7;d3zLQXJA|{l{DdaQXO$;nThp|X7CS42h40rWdB1ZdK z4|eZBCE|sKGx#$@wno!A@T%&Oy=(Ytq1e$}xCM2=W>%sA{N5VDc0rA-fb!5yILSzwUjjOm5+@R4C)Bc*PBx?Csk{F4GK1{uvvIkAD^7n^Fr@`;2 zCjk1DvCdO7x*G@jYM-OSVm@v6l2zwYp|d^BUt|0te|WhAq(8xv?JJC$^+CW`BplEr zW=1l%!}(+@uK#nrd4B;p4HLOt0&`iy^4&6L3TS91a|rEz3mYDI5~rw%6%OuS)!kZV zGav8T0>By@qXb)6=w6b_K?69rWBuaE0Yp|h9nMyj_8~-aE(?30Q2fo~zGK3_<(k1- zzNq|>!2xyTqo(BncT|)VP>*e9Yxg(iuRsp)Ag%DlSeRTig%#I?tDckMV=vNF;_Pbp z@TwnmJlJxBrwxvu&=Fj>NhS(LAF|eMJ6R8mNLtG-MuLL;tB~GjOeQCTk|bKZaqIocYJ3sR>vayU55oFc`*XyW} zrxnT2d5Ahs4uwcc9)NJB2NzoUr}j&(W#qyo);pAI%D;nSsswQFqj-wbZSKEFw~^

Y&z zFJ|_aX{dwFUjyg>Ce-2D*)T6lSXrfaduKXcFvg_AmU#8XzFNP#iPE`1DXO;_O3Dc& zyKnc`>wZKm6hz4B4wPf%3V53TfYy`WS z%y1Gp6VrDYz@)=$>d6V0mo3?C%aV_MD=V77w4O>pXUZ)a^G{}I6TnvUm^(~xP*dZA zLu*-{EYY0qT^nzE+c5TUAGT!NYDj}>aX%!dAix;&c|suI3hM=YsAevcH-l5C+}A0% z)T+5Bt@k-l?K8f9qnBP>YG(Nly!%aUsz34z=YbY35?h%g5T-gdd|JYOqY5Vt-yDr{ z{ws*xS>(Yu3tL-LDISc{9JYJjEdsJWWYN1|oC7bx=Te>6VtGtZ($+_)Wv~1v&AHD@+V+PDvdp{nO(?N{nxdgx83N?1w|(V5JaI1KXHPB<7*WXu zcnb{WqilgjFm)BGI#}y1EzBb4cKpPHfy2ZbcJHuZU=E&HSn4={zGVFNL^+y<{bnb4 z@$e|Z-Sc+#J~c8d?&IvJPxnCcCM|^zIc_xJv%%Dn_hNMEvu&((%JIciQ0$Hv)}Zg- z7jO*hMMps;B_`;w;WNW*Q}7Hjan+UB8W1EV7G=xpYsi4L{>1xEFWX6-X=D~=b-TQ!JX2zB<^ESu@MY)j-tg^t zX;4!>dg5~1v*LXI2s;fSBe?15T^}3Uk-1R=zpvrQbkew{3rtOa@IS;5RzPPE`*w$m zS|VTiQ7J(&bUl@Qoetzk8&L8<2};}9b>zcijH3VLfnv-NPf*me@fE_wqKWPP2*TvS zjy1LBQ{)w|E)&|@H$~#h?)+HNE`NNinY9&u z0Q9bHf0`VTU4VtN%jdHil0Nh@XB=N&qS(}bjNiWPF#)U0{rgJ&7e_h-h#=roL0vQujAY+*-cxm#SbwkLcIh>IX1V8Y2!D95H`G-r zsZsMX+INykM6uM(*?cK*xwB&ZV%PdjZ1Lkwe|W&ankOw6NoZdlA|_F^kV zCQY99th%*Bi&1IFQ*d3IyVE=l1qEHjCNeRkG^_0Zf1{{X#1}P^QHn_7yrCbH8q+UK zDR@M}8TQS--{m?JZP&Fv@+winaU6a^H?X|zv)?;1^4>vEa;W-t92IGyV%;2%v+`If zzl@*U8U3uW`Y0qnAh(>NR@NlY=YM9a~FV z-NZx%Tv=JS4?Z{dzcE$3@i^`T4cApRjKD#9MY$w2#%KwFj_#4e_qRcdtyU%CkNZpk zzeDURjcR?CVISk(3q+>1<|;A)02;|F?1jzp`9cog3#E*Q=npyg#28;oCbqdQQbyGw zYIe1|RX~`WsXbM9Usfa~6^HLk3i1oFm;qqo&+hIvyKm|qbZ~#I7Uu@WNIVFX9iX8o4dQ-(IPhiw!v{sy+!Yd?lYhfak<6|1`Y-$E%r@yYvq!Ey2>}I z0XAo{YTwRO%7#L=4PDS{Y(UJozQJ(iS1`9)&nVETpx9su@K-9i8Lue;mOyQXU-_~z z`ff)kY?LItNT#%TfFAgeP|MgcF(a z<8C`4LGC?&75?y2c=6%juFjNmofN9){5XVy3JWFUy59kgFLk0jN3$Jv;9Iua04)L> zTC8#`F&n{JO|H9DeZ!#7w&vArS}PCyHUP7CIF1GA%JFInEb^t*A*p%^Xik7C!z6pi?~lgtAK_70`!k`_UVGz^bhL3Uis=x-HXpy4sI*&;OT7+W zUlT70pwz*=7jyE_wqd>oLU&SCmqiw~L~f`H06@}C&b7+zQ?gMzWD z)i5~nPp1r}yg9ij0Rvfk4s0Q)~Y1RLl|<$=wAnEjfX-2--3tO{mTN|qJ>ZYiR}&-DZn@a#55{YbLQ)-piv78r z{jX(vDuGHCm*15U)OmyQDe2qo3;qB!|(sac3 z@Y+`Swym|rvx_XmcSligKXSCr-c$6S+JCj&p$`4_+zSLhU=sj|RZX>bGJCf`;I|so z{NWoxDG%0QX%dCK6yBCguOXo9N6`_~7gri7AKB}_Be}kvEAj9c9Di*;Q@`i3;%Mnn zU14}My1{wfuK5^-&&~dCR6lD1xo7h~b?xpxWrjP}`S~JpS=Z*)BS0Y+RRQ$iBJ*qR zz4sMnUIHa3wqh461|Rv3a9JylC*mU=#C5xyC#i9!0o$(r;~|0~gh{|;infs|C_mri zh=)Pes8Lpi()T?85ug6aIp~O&6wJ5GcL7Vf2|T-+%%ktL)~MdnAw83m zr0=y4?km&+B3c~Y0??S=F7(I^eG^y0zZO1c=EoFH8)3~Q4D8?Z#OW%f9l5DCQtfPo zSi?jHMw#`>Lz@Coauu^|tR4ur&w-VdK8Qfe&je7-`mS6-PAhojT)SZ6#>GwGl=$^t zcOGuAe-Lxw*blK^o2IO7n=U?E!5~|;GXtu6`yOlx?`sqIF7L~4@E&b{^NTSt0G}Q- zZwQkq!Y6(CoXX3;`mvvH8{W3DcfT7-gmJGpBJjDdoepNt>8)33jsIz}>i9^Q%S7N_ z>9Js~A&fFqSa3pBw1IsNTZ>E(Ra8naFvx4pnmq0B+z=Vc&5R>fby*$(hZ^*U7tr47 zJykkjONO9#k?lvKqFnq(-wnCT>w6qs7{TzC)Q}6_gpe^8!;L!IT|pU>m{cq=;5zLq z)avyu^?zuMq>iZ6=>@=d9R`e!0_J4SE9EL?Aq(#Oh@#{SC)ohfyZ(+&PR>JCD zd4;B9DK^M3%?B~~c(|@MxOXyryga;nuskh!Z}IIiHR3Ue=6g5F+5^;!#QX{~xKqZ{6$kB~v)n_GlzC!ok0PKT1va+zK>jOp%d^;yeM&EOCU3{yC_t;llQ?A{ZH>${~I4~N0X z$_ahI8sRvskRoly7$W{Xo<~rDje#w_%am)H$q%JqYUHu-N=`m|tS~1L_X{_Y@S6}D z?%5B(TDWNaGZaPMGa6mx3?8gs&;B7BV1eowkLjn7So-t#8{zUX&lIsc*LR~3=Tm;Z z4HVz4Cw3B>NB?ctw-Cvz89;S?@4ne!p* zXA(74{K=AH2D1zPf|Wvc-4uVF+1T4`(qrN&2Gi_fr0tC?&VN|BjIY-1fAE|!mffpp z(Mz{oHmBLn&JLh4@f@&MxX(=o;#kP{PqpiFwaI^b=euO>Wpx>UH+ z7ApkPqzaAh2`ZOc+^4MS@Dvse(~ysew>c0MU`{~|5P%ctB|M81GNwShTQWM9*rCbRP-Ha4UwjNJ=xm&HO{N?ZzNbIty z?v7<0Rs2JXvp_-<2pgF?Yhdx$!|~${!x0mWblBQSBFY~+v$?KIwV$Qu-t^W2S$5pl zKz79s9oZG8eM?`KpQ|tdQmtd+L-R z^fdHu_xvvq-+#lPN@=Ett!Ln)lioBGiuN8Pjov{(?aUdbIv(4j7Vthh?8u7 z(a~O2)e%aGA83_ZdT=Fr13|6(tsXkxfBzN9+1QI-HhiR zKN;tgD2mOWKfO?I3=b0sQQKah*=*1vkTOI*-`E=ieKC6bs_@vk4CNe+YbZaHw_adi ztU;I(!aW@@J53Th{=&p=#vvFh_||w1DpTQCThln~k#8w})p1=l)p4E`uhi?x5OkSG z7#$r2?11XK=~Kpbqt&mJ_se^aY7Jd9>UeEi1(=N>jHQqi21?Jobz<^q1)6T@F0%Y+ zj*61xGqFmN=hBsa#=A*T-7x2yM~YNmEtfxbA9ea^8rnwK=|s;LZRT5laJ z{;W|pU@!oR({7$OM6=tRZ(;i5^&&!7my$~)pScL8Y|c)OFz8#&B0Dh3qu7^VWAmk? z*poy9c(*XffT$g)c2|Iv4Iy=H(HD_3kgM1pK5?ig^#b3v|Hxr!@s)1t%3_5#c6S(x z%vyM$Oc2dT83QEo*KhZrC%YFnottq^ox7W#pHKg@kSu#sprWdTJe)!S!;e02VAP@v zZ+?HrkV#0SX!s3OoxfX!cP%`-!cu{2aHg{3BH>qq*G0UHZB=$(mRHe)CL*d-1W9%7 z_}LPH)<^rI;h;|0kLsJfyS3R9d<<>THxpjjs$jSeK3Oid1ewHuJg(M7sN?(AJp{Ui znVfwOn?8;1idX1V#QHDCj#lnpNPIZzg2Bq@*WFOyq*&qztrZfN=jN$ucBh3qm_;HY zD4(zg;Cyz#;lHzGl3D)xqS$!@`f73G^<<^5f|({Y0VDVk{P@yh5N=rLvm9DFx1izumSDt9QN zus4a`7oXFImX9SyBH+oc@Ow%(_9W_0T38wn&32F@c=1y$f6;;!e~|r$-WDmFsOZkixd+ml z-~AN7KzX^>`a`3fCre=*f;4H~!&auoH?%@}1Dv`%gYQ@b5xR9`KO<<3zMlx*5|hGH zQWnN-C7#^8)g?+(vs?P_J-i=vSL?*_x+s`UtxoG%3s#6rdS`o2LWLz}V-k(&kuqBm zj|Q`#U7#x%{0S4$?Y0p@vzzS;Gno-3HwpavcQw^|xl;Rb=y|B#=~Q9Jo^ZNRzc}0) zgfX(t>IcbMSoBSmfQThG3kim|Vu+broV;A~>o0KY=_=FJQPa-n(|wz5X2SOv5*$vZ zmxuniw(S}7)$fn|FY^_qEWT$S1TB(FScgc+Bx8lSG`s(HCiiwDHJ!|m8(Jog(sQOS zRbxvSr}#H0vQCIf8kR1Z#n(yl>gU24)ERZVuNkhD#*%sXd9}|enrM@Kw;u{rRmQq( zFHDs?q=Q<6(f$$1TTe@AMXxAWZF^m1J=5;)ZbNV;lIymINj0dhwH|h9l^ls~LGv~L zUAc_ldN*%7EbwmAQJ4m-z*iS2#Rxtyu~fgHT`R$vX4HEw*QKt{cbM7_H4zQ_us)<` z>Enx5EMKpX#9sXywfAV-9%m$MtQ~E5?X~N*$*E1(XMXQX)XpjVcWmoX2Z|J!eS?qu zE3G?alDm6Dk=t-RAp=(2nA>+w+#rv*;g82!zmx8(pwTQs^ z;6vVzpBfY*qTlI$`G4{BXuwwr-~n}9l>7N38{NMpQsiE~cFr?CdPBuPuZrR5kg~Ag z+k^1Im9x&~ED*kk)LMTXuAk^wk)|{n>a3&oL{{1iHQeA)+P`_d5%otRgk!nz#V4U5 z(K~&>P!3P3iS@HZ*M%s(6+nQ&%YGFH|K?I9>O-;`hlXys(xYE!v>5FV)d9U<7(v`BU*!!&FcTcTMUQYhWMPbvIH-mx0jkqIq)S|jOfif98tno;7($s3clu0bUraUr8 z^r#FRfw+p&7tO1-rfP_Aho=}-LaOm2M2mMe3Xb8Nn8XLABK6;T@u_- zI{T-RviEYR+(`eU@+Vy;?Gx;i7v7550uIAS&B*aQMQ*~+6!9??I{BFU^~deK6ht(F z4{nD#C&S(Uf_c_?_w${52d4&aYk(i7waN*cV8gS+DuMUoeSx=!`6z4t7|C`a(V9Pc zYWCfDRCsc$9KmmvFPgr~j>RrnuDZvrZnvACAueDO(cOAmyVGk&cc>X}+_uV|Vu||; z3oJ_`43m09^{>m*Z(KAsM?ap)UbEjm1B$@HA(?kp2Jf3ruxRlgZ3FIh=p|>`iUl=C z>KCNoa?vQDsE*ls?0Cf7!7#YXTTC+ESbFBNvt~kITF%n!ccbYsJ`c&@=~6B(loi3r z0lf*hiEoSZZ@W%@LDY-U`#khOTvaKiGnSqE!lk=0@B+Ji+_YIU3p`4}Ku?SlJn2?R z9&l!G;79C(&SNX)nzr_fuuJM#TymwIDY3&kZ-Ah2GGC7h#JrW!^AYCp&mfTVcmr={ zpFWv%ui{?zAsghSAsyBY?|e=>r#qE}?xV+L|7A*(bgs+D=)Yg8!`A;T3?HhtM*Pm3 zg_{z;ced0<`EMlNZo?&&c7jDIbtjD`BHE>0G-)j3HP`B!of_<1JLeM$nwF(z;}#Zv zq*jy~?0yu{6&2(v$&aZ^4BWvlF(k2O17YPE*y#y--lmCAQl(zBXZy?AaQ^rI#5qnp zdA-zh)&WU1`z|6*L{c>`3CKjcucorcO;EvA;nr?7Q~c#iyjqq-SceC(-tT@TN=sIr zk>I}-);QWYZ90V3H)dBIO}}jk4u2@Dd;~M<6<%o;@HyTM#5*ce#ZI8+_)L2@wm45d=%*);7p&}|4 zFIKsFU|qZL9veH{HUb;oB*l!4!@Jayq$#^E!jUN4)h}kz{HV_2lKfToReq^y@h=je z-qszPo+)H$=U0Dr^NGm)vwo#rDS})so9xbHGKSE&n;+_C*+^8fB#JGait!9*EvjDK z4>!)w<(R_RAK@=Xp#5={U|Jy;g!7QDm~7FZj_=#HmqJ3o)kTOM^ZM%^c8lnldHUYB zx%ck-=65{Ids5+ht-fo{hIlDLIS@bt@~3GQ=tfaR?TE_hAyf^Qc z59#aX%=*M{3DG!Q^}x4A-eqAaAfXUU3^@?Av7ahn)n}?I=_0}gLkb3XZrr{>xbwOH z??TOg!7RisiC4)9te8vw@h}53zmyT#kj=htG{nkmMrf6tK}5&D;_bk0?lg71T+}mC zF~Y9Bt+X4~)6gRw*GjiJLOZ#mGsRn-5*7i%&BYA~Mm3(@5x+$}(YMFfbk*eg{Cxsj zQ8gSqp;28uf6_=18W71yYVfa~Mu;|)H-*iJhFD~jgc?TZ{L&wl> zd64amYqqmWHu%chJ5u*n=e+KbRY)lZF)%olEbx8b2xfrMsjfZkHo>`}N7ANJs(_xh zwfiXH zF0AJz1erW|M9XUW?_Wbg*~b8e!PJxaVb2Ck5|aNw9HBM;_J(PY26OS2MjeLM?9N}j$km&7~I}c87 zO;QWktU!fDe+a!Zzjw|&K<9vN1lAKORDi(Qh~sDKZ15dF=bgYIWV z*(2Z6_1b^S?zTJD2T8|qNx5Qm?YQY;61Cc2(R0w3f0kV91KDlbT;scCl4B7ago=E6 zaa83TWBpkD#QP-UIAsM#bi zdyrpda*6u2#j<4jvORIPLifjlHWi!l>xNtIqu>nzQMmB8GX+O0MI)JHWfSmZT$gH~ z??6Td-mR}b#*IzNf_)~Jt2Z)RVDxLN96PAx4venw2upNP<9_{+K7ugHS@n4PmD}l( z^x%li$xQ%NOcK47GVbmlmE5LZynXpZtjq!xNbzbD0Hd*cmuv%GozbQ*lzsb@q| z(itYHdd(6?4PeAUCdP*(v)2vjce$Cpou1DGT#Y~uftibGfA3K_qxS2k4+xyI)L;E~%S%xG09T?hJt_ zd3>$O0ETjea;yEm&$*w0Uso4X`D$)+Zq_v+EB}SEUr;gK%J{!0ivh`v?t53?p3ZPq zTPxT5{U;JunI;~}&FMqstCpB6+wlnn_VgYnp}>Ba{r&lv#zuT_M-F?~F0K2%QH7|& zvton8n+A&-A ziwr!~PyhhCaBZsFXt_k<8r(egPH8=Uosm2JxEp`j=p1*(&~Fa$tG% zfrO~?U4LkGDt$?>ib+S;jyeWGGUwkLBdUu>SS*Uqz>5RuW-{D9ZQcoG`p$O>OR1F( zH`<7Dt)V1+Fs1rO=!SfZm{qzSHR_MNpW$|CN=lc1Rj4ZUe^3B}9ooHX(_Q+cEToR^ zU%!V7Cp_!p28;T1%{TRzca-1+os?E!dHEvDXnySCSwD zL6&ZU3JYAow#Wd3cSRifc`|l#J|7iRi8mVj(*qlj?)5DlCFJPM2mZII>n!AFHEs@6 zSWbQ&84(uYVxm!sNLOuFX6w1_t_YBxBYzd_s(Es6Z!@U}BbY}YPYK^<|8bFu7rUMSqcL)lt6aH%O1SV01HyH365f_UuOf9FX zvY7kbd6xz_oD*}x_Cd4p1$c{ASkrbsDN0omyMNH~LVz=gQAse?R47t-$mu#r67w3@ zj~Ciz{w9cYBPg>4kE%3dQ(`u~NCZ&$hKPC>7nS|pJ$j>v8=f97hm8XAwB=7%Ux{ww zWeOSb>%#!}yZ&IEaXxWDTToEgUs=OEp6Vtkb=%L|4tTo68?U6>8&}BMEcS<4r}n-* z#}AqKrq$zfbWUwPPHy=Zuh6*T^@-ynH512eTSsa^-}^%|l-F%m9=Y30eMD?lTG4?d zsEz;Gj`;kRw17hLV}GxI=mUjW21gI?5@EC<(W7&HpD|opbYeyvq)d?tSiTLyaDlg@kF)$1kHRf2#{Q`-kdWydA!tL;1~#&c}?g&Tw@HrH4!XvVi(K`Kq&j@ZRbAc zgmwtn#6KI#`xB1x1awUhI~t8nVxNfM%kM)BOetsxhlVWmOm!N@Ca9#34&4w`F>hzR zLXP}woL^K`)Q<%NCp-7cedN!60ok;u3y8KgJ zjf5oTF)k_rX-oJ~i1az*Ty|a3Z@EmTjlA7T9T813m?HB6Gx#BN^EgepHf!C!L{-J= zi+C9kxKdfr(*5=SqvsoV;ImbD$uQ1G(V*hqakeRYYbM+LwlhN^XedmN8h~p&Vbbnb^ zD_V7^F^gO77eY-Ny>=*|Fe3%dyZh`JNpETAoBP(gogQBQz@=Tw&ks4q?){Fas3Ze| zk$4H}+1>d8F|SSK%O|uJV~Qry(A zh*6XNC5W-ruL5s^CatUp{ex&Ft5tUuN@!L(4DozqofZvTL*)Pkt_no&4Mo+r%guFB zy85l~@+mwIgfneA?E^adD@&JZf}J24V@BTvL1#yEik}HpJeaEp9?n>@2l2Z2q)1I~ z%14C~tA&BT`L0A)gBrfDzPAh9(xuay=hoUopN|OE2u63adC@*W$6JyXNyNk(t0W|; zPg(zDhn~a*TGqh7sExMf8^t;^tqY(`IV*H#{W&7+b!mrz>oFfWy?If3UiN=Zzr+X6 z2FF!OnI+}6Y#$bf94En2L-zQH2lii*-pp(_Z@-r%YJA#@$A;Zkkw|y$tW+bvrqeTw zX_zXuF|uvs&8uhf=i}zCq2H=v`k?8MP1C@|)jtC+xhp zzsOjh&b{8IsEBpJICNpmji(-Pzq-46d`=grCXY;scm&}7>mQ5^#UR~&G}|5Y<%#KU z5uSa7H9{bZyNxP*q}C0b|0?G|dZttX;)AX zfm<*s){3J^=z#-)bbic1`yl*l|@F zRAOtv44$sNdLxl9_@a(-ylvV3&|ncnkqxMwm8-z#f6uDs-}Wz}l16ONJ4PuKVz{pM zEkc@VRHq6brl%6}VGx|As{0y`iI<*_T@}8y**Iqf{*T}5~dbk%&+n(2By%j zUk~xgXT15m+45%)wbts=5d=jBhSw}zfye+go23_-lSCGvdu?ORI!7p`YR21q_NOtMO{Qk5KEC4k&4>^c))z}rrzqq%wL|cZ5zZWU|rjm@v3 zh9KqXdRb?7evoX?qk$yRRZ;(U>}^6xgp$NUEl71N>e*==a)K>2Hj>fmGtI|0AD8Q% zNj;TT1`jwZ(q>K47s#J1H_F{DUSJYC+)H%r@@sV_pXjHVWYPG@|zcO7`ATBP+9B!*>0gVYe|Gh&?r81MJ**Vt#Rr zN_3SHr7Tl`o&}Kb2!M=9$j%;pIPFwcQ^VFf5PB+B&KU(;jUeh79k7Hg@6P`&doDQ~ zE!M|$1Uwkdi8z3F#2V$IV7kXmRe^$f-}A30>K!_@Dh3$}^;7|v{@cFk944bIV$yd= z1})?d$GnE`Y9NqfT8{ntk3z)HY#;TKK5>~RriG?|Fodzlv;N6FA2}aGh{;W0@sknS zlm=$gm=wA>)hkh!m8m1QJ2EDy(ebT#VKXrwd9fdfomvkr#~UE772a=SiP7rlXu?QN z{bJdx_oVt;s6DItQ{U{XIX}~2P0_*}gn1KU{c|77(&Gu} znfgcid^vvE;56aS_8|^-3|T=$#C#YU32$)7@Fbm0C)C|K6znJ-nlm71v85L*IgzAL zuh}^w+_}k6w#d$W(j38c0-CVN%)``e&fc?wmuY8guj~>LLXr65e_iH8nXL2 z%>KBa>#P{DHoU#0ac3tYC^B%^$kt#MlasTBlXY;RPnPw`J4FF~V|a$ky(K;Fzk`bW ziH$4V+NBy3iGQMLTbE!XV|{&HOkbbaP}5~$Fo_P}i3cC=F75yxRpD?nN0bZ$1Hq!i>{nWnH#9L$sAS@600$h8)o|Et3<)0sAbi+FL_^EX;6eIlTz6yB z9TE4!GU_=RYtQ{Pu`+!E6`D;)yxx|kB@^IhG_)pxs!8_rgw?tFz(hv;9BiQNdDcBo zrttX#D|0x971tbBFv0M{Ub3t{i7LI2N|_on`d^dN-^uw*P}Y}(N=AYs^E_JTw*diC z!UJnTyhgzUaW!sq3YpfmrA2!3RrlW~An%i)&MNJ7@h=yFEH}m(B-&jfBl5T1CdcRvLgVIyHf!hn>j7``XN`DN2|SG-W0+F>zD zKd4BpaFwf%3=+f^xaEBU>4;3e`C(s^+ZyhM^7?S*z75@b2rvROf`pG2uTvo-7Q~Ib zsToOk$6_dG)sz~CKRg|RXCy%Py{i7?%=VrqOzv>sU2SKxG+7|0D%mJDg{sK;u|KxiJ_X$l6Rq4{cjQAxt}j1SX>TaAiY(!PE2(|M2Vi9)-g zE=QiWy)6bLiGXIkx;Ifke4}d>9mJREBrg#{C@tG4B@O0HUPS*CMEmryMz5x9Eee7m zic7L5{Usuwk`vmMdJEA2riDCN76QTg%Sxxj<%nR3Dtjt0UZ#I(Hpc)!uKeYkWP;WLt#Y{j%ndd~yI_0`9$HK?j8 zZpwm&3?1&7-YtH9N|?>9sW$pFfJt%`Zd3>^)nEj;Y!Pt@iDm$YPvvnSoGMYay1(2P z0!j77IfoxAwYH4^Iv(%sMS=VS@Eky83j5(@Dap)L?cC*gdU}GlWg?&yrW*?SoKpkF zx@A60uAin=IS3Ho_5)(?{4E(ZcYR-EeH{Jt&Z7_1Ws$Er3x~ji*G^A2T@&k$2-~20l zNq5CE8{6YJJ+*_BAA56iy{a|pENavL#D~kzt9JJf={*7p)?BmOh@}LPO|mn`AvMw+ z()(7ocQ{Lm+s98%dLGT?@?lnRbYvnQ_uJHH>vQ0uC~bn7w8@z(cWz$zftOVzDS6k_ zgD|JwGDJyflTNTzI{5MBPj6ey@Cr}L@Z_*w03)a)E0#H+If9=30|Be)m1umj(#lR4 z2dp5U!Fz-QB!URI@BIg&uBz?U0K|e=z#XyO(8*V^V8Z$Kp9vI4`B&SzpXEXqPfyI( z8>gqrmX;%E6iiGNt5;A{Eh7sh;QBrz4F36ia2n+C7qE~BuB1^JVL7Z+_YUoMhrPT4 z`tSYba8g>(g4^@x28Kpq!=TQ_2mTvDYhLayNBJHz8Q%3r+;H&>n@Op9z1eVVCg*yp zB4t2br?G0Mo=WBi`m@5fZ^oaRzbXrqGJNQr>+j^zBL=VfVsR;`cjnuWKGXy3M$5V2 z;2NpGq8BFrfK5A=xBB@~D5^uM$6}f(Tm4dLBk9~b)g>h{Iv1e!27hX*aj3e(^uvJ3 z6C_gnTXJ&V!)zCQNd~3rnh0+yj3%@M%)OWm!-`jX_t6I{pY6oH2VqcXL%`!8@;ku$ z*I_{+Udc~UhoGd@R!K^BZW6e}cDf|Up1keBODWXgNVrJAdK|B0oZ9i|=l{?YXB~78Qwuvdk zI#VE_qMGlG=RE?~T2pK5#oca>(Ok!CvIwB&ydFZkV>!FvVNu8un4v}ohR%^^P2F0% z+Qvp+=4!*nj;kuK|9dnF!8>t+kDTUBKyxZ)B7d47S{yjkpZE2kZx@1?0@@!qET0I# ze4cJXv!7oSY2N|>`TzP8H-_;CjcinTcukntAY`xHzlTTl_uW<}J-jDSb`Q#bz9>KW z+GvghcJZ+H{4O!49%*Q18v$6W@a@|ldyH0c4-EFr=8pMT>)2x9k}+!8A1ylw#I%63 zP#X?zz1H!I32UR()?FMuZhk^r2jT!-PLylL-G$xHKjj&i4faSu)L<)YIR3Xu=m_s} zyPkKauXhymzdy(v z*Z2D}^b+*|0zO29`C)x`yZ?LPAt#9{W4PkcLS0uh-*%^Zw#$Y0nbgF<)<2U8ml`Es z*OzBdnTa#$ZWSXY=q7#Jbse3!j%o1jI2@#l6m1?Q;rWuqAP5SPQA zTFeZ1&b}rknaLTIE2S{9R_}`oc^-WV>`+>}mm&2DySv!E02o_L92|*O&*LWlI}W$q z(Kl>58(L)Al@0a+#cE~4lTF0L#0Lh|*B$9~%C9{j#$+m#OT#Y#56vjOGXw^CANnhb zbEXUaq=1MLzqb=rDBau(KL{<`EPS^ZwO01hrpPQ;>VDv_u9DnM#y-< z?aSHE6i=z^@$uf?-U%w2rAYayt-j7d!tZr|MFD^G_3=_a zsJHoiFX8gubawb;)@nB;17Lrabx0g+!QW6P{cep=m=w!HB)n`0#<9%xUZDUdvv z`b($eb_R*DPx}?l>h$t8Ez1f=aCV>yj2E$l4u!q9~9 zVW@1}WfhKw2&se*7EPZ!K5-EpJ;%l-63$T$?3HygenaiWN;tTT5v)O_NPE==t*tVX>2h?>-}^^ zXk<#n8wU~|bYlmz1Mm0s0(4*2+=k%5Vz=;oXD6-bb-dt-F)*nA(5Z{itk62!h$Ne= z+l}LQ1DAcg(aC60bEUn}X-!E*1#?1^S;frEEOF>Bv7qO$y&wP)fy#V19nTX~A3Z&i zatDQs8gt1S)&INTul!t)gk*coIT8J)`1MU_rr%2YT*)L>WhiAtPpffIZ2Tb(s@CnF zq&79YNzsAY;tt{dG3G}-Q7mSjy^&MVlj{?x5&4w_*>&KDo4$X%qYW_+`5k3w0vN{vt)c-FRyWr&yrFLDh^GH!$kd^Bnw*kf>O8Nm`xWfDT z*~;Gz_Zq9J;b+GXr;3(HqC1{resk=1E3hqfc8e^de>2TinmuZ~s%51PC%R2@T+8&E zj0U&=Z6~qWd~MPykWTZE{88EjCFXOdGiA5yHfGjiO)J&{=l zkxC1H&8Os6o7OJ}UuKFvw{QqC|0dTG(VPNnUF}8zL z3-He51Z?wS#f*rI45A!=*NbyD5MMGjQ6Tf$Nl%s+s`2vjMxc{`TfBmSxiam8J_xt7 zu&{_F;vKzQY`!F4p+{ zbJ5gWSnMNxq4BrK&}u(e`fA1PTz_L9rs3!N&Bx54XnuclfJT`p=G}D5`(ELfN5i>c zAtU!7X+A@aBxRnmGZ`mz{x_yG&wsR2Z_Fdg<9!$#iLl8X0v2JqNJ=|!e@vHur+2O9 zs`9CZt>=85T1=%>QyjjK1{vNVwX2SOLsti4L^2i@)$Qd#iRpg;jeQ~H;MA&!n?X&R zKeJOX^>*`U84?{WImLLER^oF@-;+JW-j2706ROX+J6*3ndn4#X+IAjIz^0p4@3^81 zhkSjEGji&P9ElFO5%plSU!qO74o0j4L>6S$%`Ku6U7L)&%L298B%0 z;@_NO)2)lMLwYW1d=p?T5$gWeQ8ySHDSolkDwvD09xYYvVigcElxzZJ{+HKhd#xkj z4g_*2_Ab8aDP7?xnL<8aIju&WH@e=`#s3{KIbCfBaWvn6%=YE9;~^eIp0L|YQcK4Z z52`B*TQr>HOU2OB(;NR64E>r}1>)P%=(4>(Z*)g;x|%D({v;LlWpiBS17UCdpuD;R z7SeY@-fY@c`XKjrwY4f>vBqR8DE44_Xc4Ax5r?NzE_fOTI&n~6rD-XTY1V{`fI&4S z_s6(KvvG6AY=xo+T%jlLr6mq7-0bpQx#UNSA-r1wNFNGNEjPeoq()2nyIHY0u}Pt0 zXBEr5Zvw#|K2+iLs}v^4r>2A%h)ic#kWTYF}pjc;OB3YMq8fpuh3+yFKP*Wb6r^UJ1YyFo5=L zXM210a_RT|85+Id1T&ln(FNQG-Gg>omRD|Wz2x4W$rlp{WS@jDZ_m2gj*5-BkfA5* z=)f@lHegG-_Gr#%Bg;#2RNF&$TC_KfQ#?##CK!fWwM-W+fT+OJKw+stak;cjUw?W) z)_W5vL2l3@O6?iChE5C!MJ?r`D!RY4y`g_U1jwr_I>%7XUhjPMg zy)VJI6e+D#UMZo_Tr`9l$rkj}lrWS@%}{D6OorhKAxT~PeamkC0a7xJEgDFeou1rf zHQ;K0D425=y6_%Zazi5T9zqv<5ITx=gyO}8u1#9NIv|x(3;bI4;AIf-E!}p#d6!h; zqsW(EU$>YsESv6`{-A~bYP+U~YX9tuwN6>}M55|7-ah@t$1xD}6$J&D6dAk)S|!ly zJG7RS-02Df!-X&R683~|X{CEn@Y8vY=kp76khN4N46(5-O>Q^MOinKkD*g>n)wq|b z=)O>=u*O8mR2yzOiI_*#P*?p3E6KqNP@VmO1#^%S`!F(|AH5K;r^8=Eb0w~|WUjP~$n`uuJ(IRMK6~zm%o`^{OG5g}* zI!f0o5ZNya{0i6onyrw(sTNKz=Hs6LEpF&td%v>3BxJtVnIEz`obFJ4%FC%!i9>07VtU8{Y2 zTXL;KU;1og%F@Mhy))xCwM5Hl{l2)+fQNB?;ZRA_$O3V$H>yU0DAjG^;mZSqktdi{ zIzi7iw$}uI-%5!GelGjIK3P;U@aO*fBNbVUY-mbK)aP$0AR3VSr7gWXPhxZJV4xd7 ze-iHJ%_zqJ$IAChS}lY{INB0NQw<$WN1lwndA#;0UlE^ zRApTSg}EvjK4$A0<;3cm-wAuE0g1xrw0-lY*-c7!=Zk3cVTbKoRKl>H)$A`k-&Eca zF*h_HvVhlMyWX|qDmw-lo@&A3n~A4;NG4Z?OI+LwUnAEWrYey`8PE?+W8%}1&g0NU zQ;tlkv6~U%P~8hy*Eimyz9=W%Syc0eGTt?W6%uu1Vn>gRpJn~vR*FYHlVr zpH`U}uabBd1-cg{Fb-AEuPTv==MH>SEi{@LMRN1P|7^a%kyogkxxQSdPQd=|vhxwO z^M$2nW4;#k@}w{CQlhK2l08=1gc6Gct#NL;nmjRTY~zdQb6pM{(D@R8fFT09vDe0F zh_zF}H$eK7$6g~Tjp4lTv`J$4+e%Pp6e3r5!&wA9D}EVlg%(8G5#$17hG|t|7i1rL z$lJt5LfGiI6m95WBoLtwWTxF=TL@vB1-Os9=TJe4SY>o?p>!F&zjV6q~%Y(x)B6YPt1d z_Y1I|R{;ZRsY)DV2CztNpBBxLqNZ`z;q4d0J{~u|Sizlw!ctH(wY6{a4%%kn+-Vf= z7JC~nq`-`ton6yL$+O75%sa`;oa)TqC$Z+sVjY6-<$tyM-Ake7k7<`Bf1!ya%EC@N zB-GvCNSU&@h7+k32&YG$(A;g!=`3pEW>KZY_yM=we-UVszDPqiJ&%%%jMz|A(J=I_ z?Ul`5M<}dOY3B%JkGrln7^GaR^e%2VuxoKdQtbZRun5r}m6tzTOyuN2v2$_%#a2kD zr%;X)PBw|1Fnqk_kfrWYHtRz_k^>>lv*G5mVa!1`i&fp&B=xjyVxT6;Q%!^ha+W}? zi93CXz-_dM0C?EgMpxT~9xrW|-Gm?@~Z0yf!Wj!-9Ge`Z$(j^o2k1u2OcKM&!-YX1G+SHGiCp#CV@u?1T z9WTz@23AFW#ZK%L^sXU*^E_o)HZ$~7ER=71YPfAzBx3h#G9MhN7uD}H6k-Dc>}hSl zL%M}r5#lL;4zjwP`wG3{;)Qu(6H@P)O%C2lkP>~ZVA~EagZ_D~HZY{=l z50(iNj@ZN#hwK|M=jrSH!Fk}$?=#+#cB!6kdpEdH*VOCT9#}DjMK!)G2-mu2h>0KB zwTj^|Cwq;7_y$SR55>=mRvGW6*k&rKjXs|latF~M#${kPoXjruKlG0@HSIC)!w^j^ znOc7T=Z|EE+Ue>}b)yZHnt)viDmnrAiP_aW>>B1woZ=58%+fO0<^_#b3B?@8>+_k- z3UCB53jBJzs};i|;*cQ!H*-{E<(C=2U^XG}{@yG(5FZz(pMQ7M=9{`?pS7T!n>_xJ>ZyU77r* zH7^^Ho2ek&6pvNdfQyTX5nC9>_Y$)%)nW0!cp__B&5_2n=no1W^zDuv?%d$s)Tz4I zmCCc%bQ>co1|Ks34hDST*hCtQBAd#Y)_l>;P zSBH8i5?i$za~4w{DVK2I&ufJ5d4{(WD`)CE^!GHsladYaXVy9$&xbWcH?&vATlrm) zfo0kJS-sip!jI!qo-bQYLXMbNa5L0LWzl#I+im-gG+-GSQ>NdVkqHN4Yt%J1_x+!F ziH!4nyAdKZMAF~uqz&hDDDV(`(V^k%WG$BAkK~S=kNkNF6(jzR_J6_mAjt8jq{VZj74(W83@e`Qbl)Qv z=~k@iVP+CkbaCN?>rSMiv?|v^i+BfkdVV?|K1|&#;C^StkCTW%Q|(z$0`OdW6Nnn3`s3h8RoShC z7?db&{=WZ>AWj}j=&=R+56~{Swvv=3Ie$)TP_Siti~pjn%5Xl~q0PqtQN+z|l)L`( zY#@sBUY+6l`w*SFFu~F2w{g7Cd#eWek<%LN|J7KQEo#I{Q+VeW^m>qRN*DUm&_Lex z#1Vzh>(+?j3IM66GX?&JOfrjQQe*j{mC7SQE&C8iT}*74Iy&eaGKY=Jw$POIDW7pQ zXv)(G6UxT|0=UV6vH2}5wy`PUt< zq6TrW^?U8=$FgHU%{DId=1}h5kkJEs(OaRgUfYhCST}D%-wXtpY1lHQ9dhXpuixyO z)vkdFfh@BUO+imIlr;>YA`fynriFO$Hck!^>twa9nG>rm`kx`6fG@a%A)%#Q`}2Yk zW>)x7O4?zDP8PpqdSu%wdQYI&9 zc`##RaBeDE7$jyiUPj81ca+Vua{op$@7MiMZYTMR7B?ebIV{EBgM)?M#sZ3W!9Yz$_2c)gMX);yuM02_RGkQX;3;%&J??&SruMB{{BKJb_?$o zZ$-PVk4^FRZ`OKaCnM0%{Cwq#@)xEW9P&l-{caBH65g}mZVQI?BVshin@CYeV zFv_9M;AcU;-xCI3Tdx>IHSP4WQe}C1MajBPXZ z0bRrDBTS_m_usDj*C<_G8#2R{OJ`6S5w=3nyGa&F$Qi2AaHuDT))aWmtou|&MDLq>5BT{-e>&w*rLc}w z2IX`<(NZ!_73;j0W00fhVTRLbEdN#`;6Z-uzvRNQn|T$#m%;);46y$}4|6(M^ify? zOwGit#&uqXp_<1;rDa31AB#K$98!`YRvKg3lr#;V)f9HWY-vtLUMvZ<9ILz|mTD&8 zUhfW&EoU`4O|~fHbwXUN%@3KseF@t%JzFQ_mgD=e9MtK(7jR!Do7Li5|9l@Zte_gq zZ=V_^mI?YH@d;dbG;9{y(a}x#rzIyv(Nh3s9z6mW9@)MKH13hD^cV%&_pWo*_!FO1 z3nV2=*iqrxQ;nKb3VGX~JTI7h3JOeK0!<1Q+FR-0JN&7w*PRezqLnWEQfb6$c|q6$ z4}SDlbEd^=Ecl!$ps9#FkIPqsD7np5L0CFpOt{{e23zhrMgA!U}&LZu>@yqQO`~9gdnw-d}1qQzta*5jid8eq2N{OqZCL5^8`& zpm%S!pqISSPMbpfV7#mOO&*9OFmULAnmA+{KO6#g51|7(LpZ|~I<3#W&18%8rdF{O z$=|pvj&W+SAH^mivdZNh7(y4ram~x`;xFm>yXIkawI`R_q0aIt-+iR%6j_Lqj+i-+E(+a&#&bZ%#J7hf-qp6y1MCJqlf? zrefJ2tf>}YVbm&%U}3ZzghaHrrD_m&sxW4Fs$2Hr=*BD%(GHaMzZ1AK2`5=J`a5cZ<4px&b0#O#+SV$!SDk@jnA*GGEQ6TxCkwqb_@02F-yc3* z8Me~Gd(8oeUd->a>*K2uVVweXx(&Rm!|L%?Ug?5FCuRSk@HUj!9U$vd+2|MyY{C|( z0#zB4jWxU+c@)sF&Of6QPXDWYxbD53l=rU@3@IZio?NXK#xbR>d7O4tG7yxa3&L6` zFKr5z^GHhtMV4&pV7=WC5=PLFzY#twa!8GR+Khw&LyNdfXZ&VLl>sxHEHINoWe)Mv zg~@Tte~COyF%!n>3{2>rtN6ttwxXl76>{r$O7O1%nwy3 z{ZH&WQyAxI`m@-|>g!?(aKD>B9%!DmR#bS8`uRW(lfP>vvCMb0r`p?p#;gMh=3DaD z)HJWkw#l5J=QMdOONE%|=ue%2!XT>p0e@_?4{hs&g%Z(Y;;>f{(08Xl_VmYm?gZE= zWu@&R zm-l*zrKqgpfcfSJua_q<6yN-C_jF?#bh8ZWwT6}@sE3cv&oAyCcov?zLn@&0NQz4> z`O)>K_W(dFr((rNW5akHOcUM|>qXJZhh}xA(yPKRs^;E6c`>PWl)GulM!(0?EYY07 zBVXiT4kYv;f!>7Pn>ZXw7_!SO6FLg7v<41Jyiv66FkVR#Xf=1u<&@(mW>CMsBjlOZ zKscXGXf&3c`P^l<`M~9}iH0V*=-o)sDnul6D)e85@@7ZRhy1P&dA-J^kYQPBY{uA= zFg_a_%_>}YEVRPAOXP#zVw8;WJ?{CzbdkZL8RXx_R@ykw?0Zt%WpT<+u&r8#m5GZX zAB^yJ0e7tgZ*EL29lD>ZtK_o*;cIn&NPRK=Hl;y!rPeVegy`Fjg!*WURHzF=zNFDSq(4zZNjjO$+)@&t7hS!<#W;>A*stgdwGW~R@q|} zbQaZkq0+*A<3kufcdO+Gmg;eC<_|iQM}GH8HMwajhiS?@WXLJSr`|&1@ejz4n(;=m~VBCbTW#88MlR8#aCJC$H$AYM}6h~nFfcYbFljOoKa%6l_O4z zQ^#XsvV)d{3hz?avcUVkOhd>Uq;LJ_+aI3}?Wq(mTff+@H+)XQPkAKp!azwCjtE#i zs^h}`O;O`~{4>MD_0|-msKUO>%Vv$%6qYI#MA`4+C=Uv>P(I9la`AI>amK_L;CJhJ zKYxFfAF(~c8=l*A$Z|$%V9-nM0%9y16NY-Z(}Y_C5#F^MN|pj(?>u|w<=0Y8z+kE! z)4Lp>+ObphjybeGg(VbX=i6s?(=*U#lRc{8{8TKbgF_y{0c%Lef7=4)H-;?8vvx?B z2oz$-OejtY!TRmdT6(hJ1^TC2v&f!HC5W5LpWZ0vHXS%bKkM5eCEs0y|9d(Hl1xX z8m{nrK#S_`@Z6}4{nJswk0adpc!$IygYK#M@T2qO(wR9G#)=MDyV?JW6)8U(OChF| z%MVr$S3~SC59uhoWxk}t{g9-jnC1QM)?9y}Df*y*>(zO?>0vN?(Y(9=R^w$CXMMHd z=G?p|sqN*?+;G_m_lm5#-Lf3P4zE%dLNC46L0Nfs>oVjQq!iCqP^w-ymZTgV#Osi6 z7~}vc>>KEch_n1xJsbR*xwgCv^1fZ!2;*MIeBc*#Ic;r1Aex2`)T6?V*TKVmK`3RH zIV60mUO7=sz^K2s#BLE?I+idfDc4~0sDcs{co`e$nyq}EH)#YX-f9`|U_tcNW1DL8`B953WI5wiL zyD{<&ttTiG9p@@i=zl#v`>}i9rcAuuQ}K?nD|4TNQR5O|QWb*0RSuIhcMQ0a2~2eM zDg=Kca@ID(g9eMd<+Hr_>?hZzSwyZ~=jUCY5@h@z2}W}|F`TM#Hm~5}2G(nSLmf4% z(Y|53QNMd-v~1-9KVIx^Bnz|7U=U56Yb%3fMB zRc-ah1R$NjkQ~X%2Y&zgla?o~K)^!ovp7W+M0|thdUwiME;h6)#2SO$yRHtU^nR{- z15uWn<6ZyhK4e8X3d7b1iO?SK7SRg&Pc3rGW_^r5S%7vonOl zqinBlwXP(J(GVjs(fJ>_9~sfe&rxjYN|CStTliCqpd950p*W3`-a`q|zdbX!P~w40 zJ+%lj(INiszX*;xZu6tOJ1^qdkjktuTOa;7g8P&2?%&6bR=;<1Es`#J5upnLRFu+S z%*NOJH@{I%XTc_O=rLE5X*n`#KNM7qs{SG>%0XMV)$F_H&LOXu!e?)-hO^bU%z&34vBLqc*qBpggu0$%*U}dd?ss@ z`T^nIA9-1!m|}$OR}0R}r1ZbOKcSUVk#p!MVzm^CfA(u${bA>Jj410+u3qCN-JR_`NKqp4a7)a)3MPR-dWLtvh1HS(F$e*_B)K;n96VrtoRC zpwdOivjwKg0ELRL$x^yiTRd#fr&Ls*JGZ4OdbX4*J1hPo38HD0lM1Xjif}!?BDbF71syuAhXROBxBlL>OKxA)&pBFum6^*Und(xmd%1f^ z@b-=hVKcTG?(Ho@16zxq&CXyGQ^dU5zvix88-E*;x~wadTy|5=!3)(#i1ht(;kS7j z))F}hyo(s_zQGM!WD=gYr3-~!QBha~>_3@(h@=Zb&d=-koP>}JT21Fp9dW}|zW-~D zQi>&GJweIQ6R^@uQlkz_Vt}DciP?M*$XWWd_6dkck`;5&NCiKC9>xb2;uh!5RuH`< zhf==sj&6$c-S^l5gfdbl4Oq`eHI#Sqo@5P=i+Qh<={+zY__nJQF>e3p$J_t z@X{&BsUrKPXypGi%CBJysn9MM_KnjYZk3HEEVLds5TH;~B>c~K9BWHM!8Gmb&!6hw zlDS8I>J|*L^BY6&(NL0S^uyurBhJ|a`K4R8XjIbPdi(dhn?Eo^FOMY%}0KJML~;9Z-y6|jFa1N*6_;C~1_ zX?v^ff~%{m1LhntWIikYl3;O7f*=+iEKyQ53gL&I31pzJngbpW(U?==+Rf8H!z0DD z(}&693`cL)Ys-V;vdj)olBW;p|25Ke6amo@a!dUgt3t+kw?z=?nJu+>X=ipx?5KFM zTvJhCuEoO?YWslkC`m|IDtf8OB|mTba$h{^y#XV#^20SZw}>6f;^%b4s4jZ7GOpzB zcRSJrSaGZMhR~GB0u;&7Aw|O|FcF$zk;^tC)M_R_UsF~Dv#s6n z@q4|=Z8F=UtgZCYW7#&S(I0QFyS*{TA)21~ZOwH)a^?hJK9=3>M0)`?M9F2fr5fVF z(mgp%R(Q?%cq**4;9exr*rjs8ZM-&C<4Oj)QSTPE*D*X@~I+e02V<$4kqZSK}Bmbp! z8>|0-x1TvBJ;fJ4he+Ah#a1zqH_Dyd$!X#=ACF2e4x}Fksi(D(K|PqLheIIk!lO@Q zg+hIX%P5{F_FU~eMQq`iH<+A6HYe@c(|;c3d~Fi=g^AqW;+ZnRXwCyHnqArp16MksZXUUsPX7sTyQM%bpWsS z&SAbu)o}qa=w!M1HDx89L=Z$@nG4w#5zg045D0|E10n|S;10YW=>gxvv*Y-mS96Ha z0`0528DN>L^v-#(&#_CVAp@TkD5=2w6>M}a-Zc~eBAdJQM2Pk29z2SM6D=58o&)a5 zd`3$Og^9$i>tzb)+QQ{ZHo>>Qcq%)61>2lr_~n%Nig>8Um+|iT03(aP|E<|8i!N*) z>DQSfSZ%XxvWaiJ5naH;x(8#j3$r>NMfjs#@#tpTZqB(I3*44~oV5Z}SoE5)f9%6kJFSH1zA zQk44-%k!JRoj!Xvkx)??STZtm*~9|XA1FUkP*Ih)R3ekZ*C4lwk;8h(1nD2twN~Y~ z=}X#kXk6nGz`C$H>f(&FU?rgVd~Dj!t(VwRGBWgpXTc=|w1EDliPn!&d%#2MO#g)0 zxqK7Dclw79-Y4a(mu8?Yw1)WIb;!r>M9=q3jI6vO&Yc6WSKJ@%VIHQg-MB93gO*<+ zy-n7|{+)N#xR4xRrHKbW{(_G{m)ge#TcP~-k(XeP+7S5d*0#4Evu_2Jkm=yV&`W(( z8Q+c?=rthX#4t)r#u}e>criR5NfBUND|edd?yTd*EY=b~6<-aQUzGrN&8GxwMpxdA zL#BfZuoa}B!NY^gP6YIw#x!c^sNBu$& zDA5nEdL%kslL@rRLJM)c$P=e-E7B8!OI`39?Y`bVMs4;(oDx)WI_F=kacKt!-%rzC z4p$Ig^yReC1@l_`hFAuzpWD|036^GG`T1wPXd#? zO{{?3L6x-Xy{PHv!xDqRvU5?8LzOf@K*pI}c3gsef))SUn!Hsg1GIi$QIZytS|W@3 zQ_@GrmL@U)TI9@qtmN=2uoDX$r$(d?UxA&10&FmM(!DsG)g0i>#{nG3`DA=b>ctH) zAU}N)@~^!+GU6;7*J9h>{rH&vk11#En*WQ3-?06i9juX=>%*YJg9Rb1HtP0D-sw5` z^p`V)48Q}DoJIQPO5@PBgs4qwH(*XFb$Gob#by5Nuj<`w4yie|-xl(a5`fw!)oh&D zG{q$t$J602;SeTVPW-N0OpT;nk*;$o*D6QMtqEHypw;~%$$JU7rCeWygyNy^?zWq) z6N*1GHl5KXfPcc6jZl_RmU6c7J0w_xQQ^&I<#Z%h2q0Fa!;bcQ#T;I!2#u?boDV9N)ghPZ$#3Bc04C@aaEGBmrkMxy^QK?##~mwuV`& zugHzi@fFARr&C8GB03wIJKKpjOG^_(;a&Lc9jNUcFlocSe^!C#Ew{){URox;xYU}= zYHT$mWmool>;sI|T!492nw-r#w6%d%cjvP>yDL`}dQi>vk#3TmhT9$a2`tcwOw211 zH{Uc9pEM&}RiUwU1-JiEY`O`!dMjr^_~uQ}1eNnI28+_NwTm^JAgfP3Y(u0xAbt#a zKwV9to%J5>W}`>~Irh1X{-4;67N^ANIysA^#b1leEF!jFrJJU!Qo!BV@A^Amit;nB zdubG$|1AZ!pI>%5*1!&t0!iFhT#1$O)bTeOY$WwXWs~RlyKgpsoEzLa^0uml+P`K- z>(lywl?CPuqw4n1Umjkr6gMyoQVhRt?8egT?B>Be=5+dq*F60_+;l5v>w7>8*mHa5 z+kQgK^Na&Z%bKfmE#Q4tg8-IhbXfWelQ}XN`P*!*6c198Q?P~rV&=SeAXSjjGZ7RL zb{k1MQ@PLG8wc@c}o3)v;J#FCLhnpi-b- z;Y`j*x%F0d5mKmTVgtdn!sJ8Te_v4zBEshkz|>LVCo;m>p8b0&Z=-7;zzzV`h59d@ z3veL5qH$g!HBG55udAwozn>hv$+NW+`+*)e?;HpV7)_k55B9GJMJFd%!8DSXNLJb& z;jjfgOZ8kvelxe0IxRKoaqJ#r3*p<`FMcPbd=r@b0@fSaWreSkkatw zuNQOw$TR#y`*fa~-eAMF!=vjNexUqgTpKd`xL1PUD^~R-;aO<;ugT#Pp*e@iuHTq& zSC-wmxvD&JL{>fDFLTl(y zw{>zQmFeeEu;op=@v^sU!vB9Popn@I?eq2tk?!v9?h=shF6oEvE~O2+Te?dGly0QE zySuv^-kaa|{lmq=g&y`fd(X^$&CKU2p5C8Z8i?%a{LqL0#tG)$h$Hx`t#S_EP6!Cf81+-U<{*UGY| zY0*lERJ>yJzjCZ!2(lnNOfXiCZvrwVda_@PqvPY4%es9X{NqxVKhJgBpT&mY*;+7H z6-o(NzN0mSfgNf=|l%(inwoVzTr6{gktC7*F?0Z7$eU zO^Pxc=x9NFBQOnP@=O90sO`va!6XBA>g#*sc^|IE#On^Q?3T!is+2G1vZA?lU)St*6#&on z3iV`k>@4RB?!cHwW`udv+_l5v?U?{T39xtzs23ULo-BEePDY?(c9_*&-T_b{Y|(5q zT?m7cU~FqVB9ExvE8wWo&V|uMmQl&nXhC>o;T5OsgPybYl)>Ars_Q+SA%>NoobTGZ zAP8t6TO}mQ=XMpA=Pp+^6E)5%d}G|%j0Vt+9DkQ33{PG;jEG;uPL=$62PPS<0l8$? zOQ;uAM>wZtfD_RFJ05U$I%0zx`t5YNoD1N^MgbsF-H)4Yy$t&k=^eX6&aZRsUdPjCZ|^|Zt!~(YYzx1-?}?-Cqeq0 z^h=RyJAM3XK7j*ekmhKrlQuM=z+*7iRnFKn=VFbBy?~J0lJ^Ew1^WR}!15c+!*955 zzSVYFq|fQRur2*d!meegeNa#mC%4e3AyQsoD9G#EZYE}TiM#%!8w>@Mnti9dGrNa} zT(#D_kdOe4(DLRFHoIuk*kpkdV(CaqcroPm#w)XVD{wNL>VH5gj($3aQ+~Oo>JfaE z`sLbN{znEb1_4_YKBizQTg%Aa>A#cBz3<>k!A@=xVqTu4x=P+(ofh$2+;s)x^-tBMIE`yz z3$jYrOrt3juz^@2c|o;U_&wF+}iN6 z1KjU#aG#wY!@N`<0VkHGge+9^_BTfhPw60&g?oy5?MnVlyi=X$?h5HC^;RAlNK&$W z@*f?s&{Z2WKn{ZUZV&=@PER$yDJB_Dc{rFnZN1w-l#0M>*K=&?2Ah@<$=Z|eJ$POO zmxz6NAGEs_`>nXZw0FDJKP#+Ptpf&r_hT$@)vkcqn?-V;5a^AYn?MhMgPoJ@@!;WO z!8Mfn_Cql5WNTQ|k%P^RjAekZsxVQ0`^siM+>m*gP#8Z_>Zah>bbCMXH^|wjsJ?wb z6^D04&C75U1P3eGK4mf%p(wB_YQ4VntsdVQWN)e4PmG`Wz1Z7UeGHEtDVo$g&PA7C z6`su*lpVgrwYK~2AFZYUeBj$6E;%bCr4#^=x}BeP9G5xv_K(1W2OUNrrk2D|}RKBcS|ESkP z$voHu9bmq*HAp~5ldL%PdV4fYe9~^&wDvWQIRpoVc6MtQ3xcSe#2dGYM6 zehY!@qK9sT6inQ2CM{ZL@o%v+pQO&<$K zy%|}r5qy}{tDu$$YNsLsKs9}a`X70|w^A@6hDKrSDp3b03Ge)jjkVnK-~$ey#FqubgtF4#YOfFSc#L;&j#y=nmp{4C0agM=h;)2aK!(&bVkCf%!BDo2F@=95%)tP5BQ zQKm=i$5+OYD?TM2&dwAugI^$5YC`~~FE)Y_Vi@N;gs0?kw{);Dpu_VB;9DZ+jrV!O zu7H9MLjC(_5FRmqDkf^if8ZGa`1O=!Umif07z6sAez*BMkzSB80agGH>Am5X zZS1!{fSwUM{Z}SYh6)Q8cQu!@dYp&~Bb5I67eRr3KUnkwAT+oTw6lI&+R!F~UND1# zRT92OGL~`jb$K6mVzfE!LgaHMz+_B{eHb> z9l)4vSadlrqSlRj>p1S*W{l=7li2@JS^baekBGfUsZzy8@1+RX38Vpp9_r3ywwlf< zngZAP{-e*feR>j~CC<)HQR59$M;iYg9-J~UCx#{bZaT%uNC(X^HzWU-FMAU(rne_l zmY*TT>3m#Kq7Sg$GbW|M!i( zx;kR0fBU3ZqXeg-X|YMqGw)aTwk4~2broDgTl)RVWy%f@a*AkBj|lPpG~2ilwMfaM+c`u(AryB@#L;wrK_L=0(Q0jSfP!_{YJM1d<>9^_NFV7_Z>0U@!M}b%c-dM zS;`X=&OM?CSAEgj+eMz)8K{_=%3wwtf}QCrV79k+36|>Q5^0X~h$Vex>;7dPHDE-j zdM7T-v-&-3<7EQd7Jl7Z%N9>8Wozj1sFU zmGSxCs9KvY1tAz1Sis6vu12a}+d}lupslV;4U$!)GFk-nz%o6_1vC>33&8BR3t9cQ zVH}>85Tu#Qvi@}#m5s~{0@c%Z?m*xdKz(NkqM|M_zvJhIlE~}N{lB3Wt`i0z=F-zY zVG4RuuCJfQr9VO0xJxN=!Af9VjopFUyWg8+QhNZy|PPgmlixWH~qY<>bjtoiMEBn0BFc6?Y+u097v;e$<> zm3{A&I#ZIjwu>if^I&u9<zm7L&RE6pr#!JCYQSy){gVr`TiO)B zzy&0ribF@Pyh|h~1Ly|N&K4wH_7IVi*F0lWh){{E>b-gOD~P*G|3 zJ!m9tJ6|g`-+iXaans5jzNK_nrX3P$M$1=g-D`w+6*(Sbam9V~IhRz;Hkg*xDPm+J z{AY$QK_z}l>fkJp`+h5-`!Eta=aBIV>l8DdKbgm1@KR((JdcuMov}RP>SKJNO?NhcJlt)$r-2!I_{$kAbU)^ zi8}^}?+2%1!50sY{mJ<))RgoT=ChP|qhHSkpHYsMrz-AtRVh`s3QpmfqDe4&l9G@7 zt`=1Pk+C{r5(}Pib2t4i{B8sb9+tc94OT@QvhH6dpza5IvuwK+{8w;Cs-|>+pynR{ zO1mUhBfnA<`_3RuA}ka>I)XaKCnIrs?Y9Y_&%b)reLS`}!H_FIDe(&&8t36{@5<=r zGHIKGpt3E1`JQ+0v3@RDm2a*&W!^bW9|g5b*YZjRz|KVA0Z{7|;z@6(0f_|wvI1SF zfqb^y29PoOS72{AY=-IHm*e%-(l5d}S$80{q!HoT!_!xrQ^t}6dIxr zK3<8EUv@hwiq)-0R~pZ?M6B{&Y5Z~P&WAJa6Ra@5^WyW0nQ>>@+@2o zR?VM-X;_`;0DWa-nf_Re2wlpr%I{sSFH4aw2!B{M(qgxo&ow2I%ieUZt%q}oklK6D zLyX+~-AXvNzp4cFjVf3ZtD~6xK&+C3=FyB>CLw*WKKsK6Fc_C`Z5{ILxCqYOnYU9= z!mSzx5pQt+9{n|>3;J=c_L>aA4mY!Z!H&lzQ!o>WH!wH~VraI;OnxGLA~|1)+CJ`5 zv^j0E4mR0zhg1$4^zr0;QRq!nkLWy98BayK$@2l6OpK-U1%q&|jOLG~7Vkvt3SiBr zntTR&i%bN~ebsFP1M-DXhmCWKf!PyOA?2HTwINb2%eFLd4}n*vQa>Yp-;LBu`zd@3 zxGsJlqH|$hlyp8!yvp?nvrx^TeR%jojV42lc+Qy=h{v9Tf@6f&M1vnJ9Ln^9m+1xf zwosV$A|!LpI_4WV0|Nz_U_vBJa70U^AiVhM*1@xO?vxyS4{j+WL;jz~w;!T)0WQ7i zCve9B5-B(@sGm0Im;yyF0&gByjnzVxOi+GK7SapM5I~(fk;_D`JElDg_*N^Cs-?BVJa04vSDqwj;a&o~YLi8f~ zdvg`aVhHU-*K;Lwy@8gEV7d_Q4NQ5K>J&8*tS8m`zky~l3NjD^uihWdG;CWA#?n#W zDP2n1#JB`MpbU&AZ;q8=luW@j2p1EHk%^Fsm*ZgxHhD^xDM^;r*kyi7qpKjGWkSr& zOQZfclc^`ay4L!xDP=aps^%UKDRGWbFRT^KwD)3d7gmSUBGQ`8!jd*@lF;T%Tg!uh zw&=zJn%(;`sd!qu0RUa>lvk7-zLa-+=-Q*Gl|XZ({2UjFb^jK{ERd`6*_i8K>`?wt zs1O{3^-q)-SbfAyD)qGTmxG2 ziW3zD^{!+3_Sz=3#kSCg_v#weRpY5pi3bQ|>!T&TbUryTx4~_rQ6++OznQj5IoqHq zY&1xLr%<7qcK@HV9c(4ohWF@T))vT-2?<@~@eLRxFFaCtNzKix1WO*`Pw$V+JaBII z3JQwo7#2jG&h(d-AJ_N!!y+P-%T7!^qUVd<7&4z9=igVvu4G#d6w(8KD5s0{c>PS@ z6!|JvovGgWKhC8M1yEGX%#m;GAz%~v;lqcyCheix?d@%LU>B@pXo+h;_`0~~W{!n@ z?5IX-OgJn}lelhiDit9AuWbjK6x@*CGf2z~+rA7PaD>N}NlkP#NNy6YK*Sox5~VuB z0UGfG(_!ao@=I-Frc}jxN?VpO5HBMfgi9+00_an=!7VbxMSODme`Hl$@k;$2Gn%9p^bcDuo=8D1 zNbg;X$_|<@9)5pjWYJi|+wY@plhLJRF`1G|rnMne3^m2o;;aaCf`PM=Ke?Cn zzH@rUFfw(*z<+Ly6enRsQRtLy)M4#gCALLp-mB5Zr9rA%DN$nU;egfwlru0~0dhyM zK8r;QVdaKqDd*oRM6wQXJ_$`hV|uJsoZCMwCElUekp%^5#kW>g|4{!sorKhkLE03r zT07EFSCryTNioXl`A(}uz)rxuwe`EEG3$R|sok=6M{Yj(@^Lc=uLL>yOL##+e~`b; z)Bwh%$@R>p6Pbi@P&u1-z{wdscV*d^jX)76F)bkd<6tYX8XLff1qt`C%+=)d`_-!;GM1Uer;zaxVbs&aJdj%7GPW%1HLp) zY`E=gYv)Kg%*u)>=fRw;i` z(4>REg{wRIYx4gyH=tlD1Fq@YShKOQaY8~u_9VPEzN4e#&FSi>WtOkU|LqT=<&DdD zJrofMOvR}eJvi^F(9O?_EFDZ%hP7uG{&9i@wF5hz+v|e!^#cL|qGz@iuYZQ4>x{gX}~=m;>)wFKN4)xOywU7Ytt>j-+3mEiwu)o{dzJjX_T zunAwW@$$azS((n%Xp*}jUhN8O4hIaJJsw1Ph3?4;(M52QJjYNbDZ_f~3}=x;B}p-uiy3MNybB@Kb;SF7y324?embYCqX3PA zv0@0|>IRD8nbRMMnJDJw$S2Tm<)ASe0Uj>rh)IfW22IrUF8g@NikzZs4Ma&r=VxEl187v($sE-2;Rm2GGdp~GvkBlLb`ySnjmyV2d7K|Wz) z9b{oziVE+Q;b73fOE@kis$sf6xYMq_wzmBxSb~b~o4o=TQaJD)SgEjqCPk5ky%2|n zicUZaJL>l((h>~`?VPU9i2MiNq%`K-n+5a&LWNdqL;~^`&eBtOmFxk1zuxuU5CKvQ(_VbHZ zPax?jKw0rP>v>DwlcE(G~z1Q@3|y_k`O&_?RM}Yymj3Us(*IHaA^a zaak0|d1oJnAGC6`EM1O1(JEz;41jbW5{^j0e{WtrqieP*0oD+l?wsBgDWh(W<*>Q9 z@I2-9+MiDFNmNu&k*^yoWe^1DpM)0e!c}cg z^K>$361??aKsEzudL&6+@6O~37qsN?69vL2zgTw}L~iDFl8%FMI@J}`!sgc?u(_m| zBui9tVb@(Cc8HFasMI~5US$E5U;e0NKt%LIRdl;w1i(ASHLAh}1{jMq=&KR%f-B-# zaR-DzMhsNaHmOCeQ5t>&$(D6iOTlh)Lv?XLKtN9^m0UxPoV3k z+vNJVG;p%8EQ6uO={^2Wm}HmxeyWc@WvhM_&%pxS~d`>cwm33F4^lkpnxCYpcn{V=D zjTU~O6BZWMa`b9%Z#OeDTl&UXb#ByUH5DIsfj~mi85;3~ftx%cATl$G};ZE4*oB;^%+gasd0#CLJC zL_Wg7`%BDvmu*FW8gOjT_Lk}?N*D4LEK<6rWjuV;^J19++v0CFZ78E36doq`eO!Ba z!RMHAv|D-ayc7*4bJ|S^eRay}G<(fr4az;sjmf9?xiWn$a}SEc_<*Qk)w~4hJ-@x|5}(th(Ge*$xbh0Q{~oup(R{cCYL90u`-X zC}~`HD4WOcY4?0JNlo(V28jN?yC1f*N$czEKj!8X`?hz4f{u=$An0wbH9;RAHqx;3 zk7j4cP1Kv11msDXTb$VZ#KZ}p^xYI4?`Gtf+%!l9aA86Avtw6Ce~|=(B}VT0tG}Q; z&OL>FkGTZCj|@@&X_U%;0m3F3gJfnPT`5k~i9-wiybaZ1{AB3?wjZ}1D`*zT85BtR ze&g2o#Yl(c*JqYA-f%$hmK%W|X8eMp$;nMa`cNg-A8C##v?IFq-m8kn0Zxu@6qvjv|Q*a#iL8EkVlBJ_CTWHY8E5l{Q+UE*=_=SXf`b-iXcX&Fg@Q z4)ai6-XS^IPL5=`tAwK(u34mgKJ!xd*>kUHiR4RDcPj{axhi+-TV|OEJ77<+pg>w- z2WIB&TqNNwefe((c5E60_i&_4+;VKZmm4OqBUmZpMqSl8b7|=sxs?^nrosSx01$tP zh|8KZAhvdPay1xUyE!YJtH3ZM9ji#Rq0#)^fFcz*d;zf?4$*t^Dpi7lTEL4R1!X;5 zL?mZ?)4JwfL(34%h-6PDaxZ*5@{*DB3JVK+2bQFWwP#Q28$I&e52N?TH!)gUZ)4*) zqQ!&7WMy@nCT%`VOyIb#cYpmX2xv7!CzqezXn^4Z$Ppbq(L*6Qxb2NpwQ|1yg0>O>AK>d=5T(XEpUSlj&0#Ty;D$@Atk>yv=N_1@}U7U`=z zl&UQ9(Ec?Shvgv@)uTMQVYpHOw2Bwtz9CkuIqb6du?h{Oa8k-B`o@Rj7C~k+P8n+u z%4!y>l`|K0+KVYPpn6ENGJd4E&HmRA5XQkC5IF+Y|_!S zl$@7(R}XTPl}VZ{*Uz9QEpA~=(;wKsLwj_9@bZEKFx6Z!aU5CTsBUKF(*f9P;{??> z(A+I@$}5gvG?I!;vS_hclsgYe>GY~LhWqc)9Z&yCNc%@OdHaXk1F1^R90I%`uxV8| zWJ7My80n=H;kkb29B2J==^P%2;jwoTx&0#=G_Dd1?pAD206=GANt??F;8o5S3$mb% zubJENUn45<;D;cwUu?Esy0b5JEC_*uE)B4WiTyZ(0I?9;zobIv(@-{Te>AM{Z>2det{H+sC7M{@TK7zso?BqO2pgbu7KcV&y7^-`{lJu5@J z<8^i{pj@mJrx_Z3)y)X{F8*VB8%(G;kei+*X|p+XL%cLzCCg&H0;|8bWtYNP&cbV8 zudzF-oZtL}#F2s-8XC#(ANflk>6wMTmD+lS!+&C}7!QbedP?oM0k>>rl|NgkCTMebdiwPOyq%BR9_6Ih zpfae0180V$}G>b@g!ZezggikT)JkVFI2fL!|ys z#_lOfMW)M*?pH_F`ziLDZ6GU($@Zg9Op0tj-G_E{2cqHPc8%w;6Y;He7RZmToJF)L zX-m0Wd}wazT{~D9wo6gikx1haQPKGly0wMNR6BjhNY4^QNpFn;i~1fEI(Uz_-cD-) zxUi@X@hSJy$QgR}p!9Y=Uhu58sdAm>j(*_Q8pE8*kQ<9dByva#{4!O0l7 znX@`?4~WIZ8`043%W`nGu?txdP|{jL_xHQPqhu;;OmW0N7fVnSXqKYxP0~jH{22*E zm~;W|fEMozJ~z6EJZaPZejRy>9W_vh4S}l|j-tRvA^@)svPS0tL~-iS5Gy;L#*d&@ zFj|g5g#n{o;WxJT0$?J1t~(w|>3iZ*J!)=^CRbjX@fE`&kviyJ?wuOEyjz(rgK#np zBq{MoKE65CSfRSZV29zulrxeNN{(HP{V>T{Tofo-Cbq7r zS!x{Go-JC$$W@?n4S(`|Bzse)CsucPtoxp1_=B8x+YQ||H>M+jB2ZoO`h5opZf@3# zHIi1_07R0Am=ZKv!8m4=1_iawYGy~rv6(qDZ&@9sIV>1%=Iu8xZ@r>rbCiNx%XKpz z%xCVS;t&ogy>BIql$t=4IE`zB>YA?Macl9|I>hF_%*mgtPG z`qO}-ybaaEWMklCQkU<)r`F5+hp37&i_dtG@8#s9Q*-HRz%G@u=c(ndS#bg|IkzF#Dx0Z^{@98!N7cAkogdr!*S(<1K7SL@x4-|Q9vuF zou8k#TW!s#Tk#49$hx_(u~%^rO5T=}!5s~xvAt_-Q1TWAM*^B#xV*NHZ|~;80+@bG z>*%o0FKCf=I8m06VCw1JT{&cUYVx`ks$TG+pjfGET$<-k3JK}fZ{$jgixaYVd zJS5<_0t5bMeEcjBGN?i0Y;9lX)y~n}oC6-d>g*g}`O{pQRz;cqNl^6YCvXHT%-=!- zjltsL`jB{a!KWu0N^q8B7E%)HKmt{(OmCDRv;Mb(+}YB4FL!qELO+pjo}LQF8n1Et zwnu7am}!NcO?9h(2C30stU*JOP*ZNIcXX_X(Nvv`sOkHJa9RSk%5HF;DPde32`#-W zS42X;{`nbRQ1UDth**00Q%`z7v2%dim!~IIW&sX2Sq&u_s8YlRf$)gWw*bq@Vv(+M z?%o2mRg8Y6#xUs$$oU%jSR1Woj_@r$8Gm{6(b3O^z*7xs5)kE$oCmOcvHckCLgArl z$l?cLV8SnFY+RiyQ9-tb80Kb&aGwyf{-lW6g~Lpf_Hw%GDFUb*E-z1S@pByj`ZLl_&|aveCi*?kwa{74c1={b`wvAs~tGgQK4=A&{2 zYwv$_6}-rDIcMd(sk(q^9ra(=UDq0L3j)WgG4C~3&E3!Vmb*7JmduyORfj?j+@K=6 zmih?b6p*pGvjEcSr!c&o5ydGh^BFB=2*2Qv4G>b+zeb^cvU5aEJP7Nr`NhJ`Vy45V zIqmT_^8$>n-=^TggPD6o*FQ>GzekaBZUI4L_=Y@@;p_}gPP83IOx%b^&jNH_S)(f! z%8{?vHz*#mf8#$R^oD1=;RvkTs)giI;zUk(v#y5v?uc-c9X@|q4rtiw zIsSd3SHviqt4Q-#zdcLVKQ&j%B@p${WpP}~FQg&%%%iQ>inQg=%C3R=HZ_p>CL3nD z2M<|5ssaZoYW8zgr1#ukzbeONzM#Lna5}qS)M&mb?~1s*?3=Ur)lLrWcW9mosd*AT zb{xyWK>f(ksHLK{WIj3B$zshYkKnYFB9EmD37!9;l_qVyuPP^p;(EN`@)1?$Zyjo9 zKYbDob0JV1JX>a0f+BaEK>M~>HSbfa55Ka|HI|Y8GdBeAxahIs+Bi5E7)WH?UtQ(U zci5cyk4+#ySy^Qjjq2pe%1I3^UHbg8K#hX{@SNtWd1E0sCJc}?*x z$GN$`M=PhLC}P)lW?*^igmDJ-4S?(BCJBgD9rIOnlVywirT&z8;~z!ajr1F@kQg*U z(aH&A$UyxQDdTk%79+o)u&{8qgX+;M9Z;nsx3vCTTT8v&-{noIuCAkFS_-PWCC2F; zx2oj#JV8lMUzlutHUd#YL#t3l%>^8)0^}0Kz%2(@sIjRJJnZa;&d&Q2YmdDCq*4O| zO2(G6Pz4ihlFq47ZyF1`lx?4-!7xj@aPz`s|uGIJkH0*Jub<$rO4bL!jh$T=vi1 z{~0d5Q0RWg0c2p{ZTSO2rxxkTOFh=j`CJ&xeQd0=!$G*;^Ly2L_Z{(J<1(liXrbRY zu+eAU;|=BwFI6zaG0@#L1q1{`#VmxLre_!g%quub1_BV>6gts~8(v`1w@6CkoM?}A9UOx@BImx^O0+N(?e4{NPlJ3+& zDF`jU+T`7d@7{K%fCJ)NQ&_={Hriux}ue|zbWj9fp_g+K=dx$8-XF*x7*hqOym1t`~O&_eUbg)3zf_&qWJ4OX--as zu263r20EmR^S+IHL*4DNe(Mtx5CaRVukh>_%R3hz9l)nb4X*6(-`thAy{BzYR?E9z z>wGdbPgA|{Mys+G6rvQd8=IV&M@Q3O#f2V2Hd0m8>j7ix0IAV4x*~CghGy*Q!3ErY z>V|KH1|kqO%V&*8f+7#sli<+;=+z6^EQas^XF&?Mg1!I#H6#QMOgw%KA1q6mt~Mqn z&H8(H_lfnz4oyjUjo%ubUPdWXS!bSJz+E7qS&QZ38g%l|FfTc!qkCNLFRCOYeNA;iCIH&6 zgv&*^h8E5G!9v>HT$W;0I+=xWMuwpCg&ApF{OOx*}X^ z(v<9h(a=@F4wOU&`GKcdeO}Rz++|6KyOmS<^;$78QqGvXOLqAGlt&jqnSfCd2X*Bl zB?|aH%j5tio z2|8fl*cl=9z5AWmraRt>RZY?`VrhlrvVDsoZa2%JkuZtey}i4H9r9a`2|oLqR#|9WH*h-jxRUTtHZvU0HLin_l~3erzsb%s>@;B@IJCf;5BbSgp05Ik?)N`1q% z!A z+IVfIOvHAEY;i^S=S*3dXM9;W8u=+2y797l%MBUfAA-Mkv@vqqK)f0KYnlNBU~CEs zoL7;X5rO8i%cIU)^gQ#F6jQ-9zM*a`wH~ZVk8Ov71Z`b>Er;0}M!SN<1_N@%0pu!T zLFXerkD(!xeINhkzeJK8?-qVPx)~*xcTIS!NbfeIK9js!+S?nc=sohy&qu>fcO4;m z`M&4pSFUtFG^b0%XInfJk71V(AwA-ktpGM zKO3;;mOY&trnYVySEaG)%yub|0ON^-^bu6B8>{luF8|U$(d6V!!f8%sv{* z-!8+sFd8g!R41)bo{95OX9-Gv)QvfFN0j7_vG4Ef8GjXM{Q-kwGH zbB}zR1lzyrE$bnLlzw%_tc?ci`#HNbot0Xm_@1u0`hN@WlS|>_t*uN|WX>O)_&ok& z3yIqI@a9MRxuJn)B$G=YTfxw}@rRN`tRy14;!rQD?}K&U^{r`DB2@crm%)4uZeO{+ z%4bMzA7_|rp=+jc{h_R?NXVY?@;$eGs~T%Jj~5ArAz8Pwz1-_~Etl0kGusXvJIA4% z+T5h#&0MyvA2^kvq5LU5J^mAmO^{VzO#0Z5kByA~4#9^Mg4+n%OhBNOT=<3PLnaFY z<7-5FjI!&cV1R<|g1y2p-`A&~K1$xFT&WMKpV<5$5iOanD!{3c$48hpG0|VDpfWB2 z3vc=G1P5kMJT}o?WPiU7_5<$b=4ejsjWf6*LqjqB&sPPj6y+8kTWKhFLu2T@(a|}_ ztuU;>`{WpKG3CTE70ccJcl56JR9oHk}4J$YAZ<1{!9G zO7&!5;rMk%;Bzaa)h7P_EnQsd9aUpXJH9LwBlTaMwyK&7jm0HaTpWu?fVb}&8$oYO z$`?{@?r2qAFBTp>d}ih>6h0(GvyUn7_a#`01`Y zg7g&|XE1_%vPDe|2ZPjKI5M&V4bzpwwAT*}?c$VH;DK-HgvQGYYSPfgHcK>5mWst; zPyf{q3xTg`F)wA=daRJ(Y#QgB>P=l8d0;HC=3j+pq311>OE2|FEio^ z-uPZU*l}>4_HOQH*WOtBD3Mm`p(wOh!cpoF7ogC0ma;K?tbmP&*VTRMF3&-*5ij_U z-!>33H!|XDi1322V<+3f$1U<$H0#c6XBAu0`hvr20C~Qzwrb6mUhJg5FM4+pUk9t> zkvHCDB8L-ONKH%Bv!oXExd>?KrPt`~e+^f|6vf#Rk4vznulrNhZphM}^6Qc1dPLxp$~ClsFr%+8HB+gM|O25p>UTzttt;gXG`|hamV( zkMOM$2u|+TNoIJh~CrJ52;aMxwHurUd4^?zI#!6|ABdpb{}Pw-p$0 zwEj5s2&6}j#5|&7yK@N`;H2?pN&7w{igsWdR(y|&I`tsfhqkJb{i%RyW&ivxfLQP) zY;-sK&zYc9v|o6BKBKJsBPT;(*zIncvWo=AZiHZPvhrvCz1E+x&b;vOxFC1ievB8( z8f}*dszqXa(X%~{}vkTWMQI;${Bw{gFh{UjafXFtQwowl`WB*3U8it8;MFx zE2G$PkC@)#N7drRWhwPZLMmFLePwF=ec@<=Ya*#ltAW7^0u8CEii(87LS9+rHs}Ji zXexRmeSuWeT=RwDLgC_yjrG1UAvI@>$b zLzo~$8Mx>1sKu4qMfNqwRGN5jXJ%N#T}$;i z!p!a+Q6_)2*e4NR-vT@&)1c;4tC~5GDIyu2zYBF_;v}|*bEa>x5V%cynj>|I zhiA&PRW#j={_PqVh-_ZYLm{DMO$HYYii>rSg7_cu@Q_?w@+Ez0ig3&ZGc;Nxd}+z@ zdDl&Ui7&~pI>A5lY7}C-(vM1(f^w4tq^H!>6(YWomB_wI#42lsYu2erUfu>wg+k=vX)`?f>B^YEBz484$>OKjNC13w_w~BZ@js zNXisf9e3VyOUch?B69Qj$=1dLI|M<`!ea4o)h+}9d{GOEi}!e!^4MSz@|=+{X;~;K zH(3M({W~`Isi?OJXp{{jm6BbuJx>EDWHI33cYzm|=hu;$~tirvue~wCV zXcz`e)-&9_F==RMGV)(Ds{}b>9Uauk<(6+zQPkwU8k)=BwbF31)e5zwW<(>4jdj-kConPTD4irUHTA4%$NK{k)LZ0DX3f4PVhn>99}#duxPB+$2ne>( za$JyMg{@h2{)dicABIySKtQj{B^>dShNg&J8{zpSeH0b(6Ah>cHFlID0=Y#orK5{{ zclkqT*xAvOCjwjgmE!n6ZQIkiHM;o-dw(SUK_wdzP0EUd1{bq&eDDAhZjJ(BU&~Y3 zCWIyFMtDzJ82>6ITt-@}_V>@XJ7 z3|58w>pZQcjFSxHS1h+gO_j?7fw9HE7RqpQs|kVh^=V67EkY_PrD{4VB^2M0VrHWC zRP#Nvqm{ZRcXF<))=XS;UbZ^75z3+Qe~0K7s5nnom)^}5l0t-Nxx-2n>xwv1@7Q7P zrOze3S8`}UVQrCYd1=E&z!or&`iM1Pu}Hwty&v+MQiXBKr*a#!wbZdE8!E%`EXcEI zVpf|g|A`q69U__W>LHWw0iHzgf_+8c93A57;U8RI6s$*fl(zbvJ$cG*CXyVfwc|CL z{UF!XCY7$92sudV#xgzDeLigQg}kYIy5ra`puU;>$#*V2G2u{9xDy-j!UoE%ym6|> zG>DtTo4+noa!EZbk~JGt3WZ{|4lPLW)*ZqHbo*}f5|YG{AXvWKHZ~&-X+$4R*Bknig(NL`eu^617KCuTDEMemM!1DD|xj;HeXOalnuD1m{ixoSYl)8Vs= zw2tE?i;E3!oP^2LFH$wH++C88Os7>4Bhn65<2GP^cB3cm6zzMSF_@3dhCiYbEH zYUrnG`w#fl*px3>Mv&AIR-bVQy%{Vn^8-Tj!6W;Oc%5LN+p{BL*VAhM%I%L7ID1lX zek~<8N?-8s)u#ET<+LB2*!P2wknNxOfwRwEC1t*GKYm<3DK6e%m0r^+6v|FbffW)R zBO`id=F|90KfRb_Hl7Q9zQjd{~mcmr_dMM*^6G7hO zMVSh4^9ZB)5`x8qw-({f+5#f@S1LW(d@LbPLp<#j8#km?1#Iz@<=eymMk7f+7`m(zl4*BJH9?i`-$4uzh z^DR~jF_!M3(Mg8_QqYW1l}PUtK4HMDOOv(jrV4 zoQIG#N_>@~RInMO!y97}13{mCHb1$Ub3Lhr!7AJ3Ri9X0Wr5gf?}MKD33+(5H2u>!oxth_O@@CIartb#Eue4(dR@Uqo2Mni9Lw z8CX8L|MIuvf9eV(=8Mq$-OM58^Z0SnjgHSBoWkiD6MwlCLa|X&!pmkV1p}Ul|JWi3 zJl;Y(4G8V{ybeU5_3zv_YNoT37cFcu^s_yDx#P#Z<3BCqypvP+YF6vsNkSr)*P1_$ zb?RTdsRt*M)%`U2mrI-WKTWhZrX21q&j~&u+@R?sm3Ws&ZTAn(yR_pRZF~GWDKqKJ zA#|!aF~Gf^o0}ZpucTuU&dklFJioUCNAcF#M><&y@OzDq|NWVn`A=y;Q`0kNx-c&0 zEI4Lxd#6=KX+)iir?vg&*b|-5`#mVuf|rzZ7qZx8IoG2e&+#RX+fUHZ{;_s18Yz@l z9&vr#*sxNZ3u9`6#`^3*oe!zDE0OkFGEep&WnIjxT;?i7<$Ud$Gd1Qiab)kw*WIk( z3AOxWqxLEvtcP!jq8TxsitTctGC7@dUw;`$x=8$CI#WT)XN8$jj)){2oT)9VZ*edV zsIDHIkF%@>a}4j(^#u&x zEA9SczjD~?Zs-h6m#+Sop0NtZ=Y z(C;fxO+ORYHDNZjAf(xIj*s`QxjxLfP0(<`6VNz{ccSTZSlm6bx3yF2GZo8dguy983!KQ$UoTj* zWmNdNDhTfEhKGYvoklnM%iYnU`g*+R-tWDeImN{xz%;qa=ia5f_2FP$8*ArP=*Hfo zcDXrndQ-c~;({P7iq+)uK5gD-#X@eBx!_^_;F$#f%5|-4}?A zE?R%q&yW+043tI}GS{`a#1?St)Lo?DOA)m%UAIPm|38kdI;iTliHf|^A>G~Gozfsk zN{4iJH%Pa1cPrhEbc2*MNJ)3+{T{zR?lAYxaEIadJiBMlp0hiyExWdHDAPe1k0)Tp z3bZ06HfdkE6~@|@j>=winCjdes{iEtv2v?D=vPQG(E4$*5b%;^Zgj{#~nl*&Ddkds?ECjkoQ%qyiMA zPBuucs)se&d!aT~@V##?mVBWS^~`O%q0r+Py&c+DUUHXCOl3yZ-efd!_w42NrJHZX zMw6cE2qCBv=;@8UZQ8tOppdmnlA}ofEzciKBylU5fR@g-YKbCRTnDVQLFA;y#zR}T zt#tM*VU}VSrS7xRG6|!(qq8b=UlmmHrYWZvnllsT3M7XS;7i+xvYDY>lQPN&RMVGO z6Z@myTI(e+D;mj7V^T9`$~^bXUT8Da@bMUO#v(4I()``Q1ss+_HFD9eMdJa~(62{J z2DvEo>Js=$?g#+FE?m6!^?wu9RQb*4uThZR(hF4OU|%~%Tvr_xA8)aDps-AwanE*v zO*_u@f7$t7kQ|VUmRw#|Lq@-gr8AzA(7Q4+okd+&Wzbmv+B-7x!vXx`UZK zS4m<_*%>TZn|4Qnrph35`Jy7-u&KR*s#DA5Wnsw#PR&Yb+prbrab|aie0rZL5hQY-5=^tg@!kPybit(oD;Pgivno#N@ROmxXfENn4XPctN|T&4=Ci<%jBTNo>I*)j`R3sF7HdIG$;<8FTRA*M&(glh z+O?}o^}gBa-uAFJ+Z2nhWsZ&6)=b1bp)mL4TKEsoDGD-}K3W7s95&BO`ikG;ca{bp zcKY^qJ7qxJock>-uCah1_i^f9U9q)|6K(Mj?;%v2!|SoH&TktGnfgpS(Zc8sSt6xm@$R!w1w!q`_xPFfJ)NEn;G6FZC>$^?ld;YgH3R#Aj4 zG50{?Yh4+1em$=ms5(`ZrlQvW*x5;WQrFu^*zkw)l`Lx8Pwq#3@9TVVkqU!~ZBm&B zTz~QU)b-{5E&fISi~2vD=eKDH@|tGatmz0fOHDKLH;B}#4uK*8D%FH(izm-9*QH-Z z>Y}H%3oquS3KVOP87UQe1(%}v)fJu!ZUbEDF!9yC?~_6oJxrK|ACqqu3I5}iA5P5t zBJM+G>gDM8>%s>Y`aKdY1>L;g>7Pwj-7>UAq@e|_%p%c-3$bUWpla77FIl{_B+nhG zuwiA?5E~ZXMrfO8NmWnv9svWpH<2vriOr{nd_I#yobdgq54Jz8gwU&-Gjnq0U0&x# zQm&e6Ze}WN_$@K87CzUsec>ubl(V#dSl;;KMBA`zf{9Sr4P`=W!>v5N9#^>hVW^o1 zWaam)%+!A|MUA!xDFAtjhT-fqd}At-62q-E_V-;TtdM>scVJSIb$OLv!1wRo;gK0( zZz;GG=KzTLFS-!ciY(zZDVS_ZbV<_my5b}sR4k_`JVYC=HT3pa#aAEcguBwPv>3{s zr1g{}7%?3k%|RrL2Q3AOLivKLR+9wgkNC>VYFm;Kyw+JRj0d=GHUPQNpjeQ6Yd>v^ zdWVR84mQ%NYS9Ub0u5M}&*g}TIV{g0{MAMjcoGL23ThiVF~(1y>|6dj&Vpo6r-chJ z#O_!pzLaBl*PY&0uULAt1~x&$8_;YEuob?-s0KzP{ZI`TqQEW|zV_;LV@c-GPi0Qb*Pq5 z&(`cmW{(VK;WN+qeC72y_1%RjvlWWub!;K1uP=NpS`1VlH9JfqS0`4NJk}YrP+L`O zf5Hjx!icL)Nw^97x-_!?M0zO_bO*K1qIq}grL3hU_EiH$uWiykAiS!7`&j|q7@cP3 zzp`#UO8i@}zxnVpWan7YnzfNRk7^6r$tmX|7q+>bDA_7+((1Bh#G-3jDUH=-BSI;B z#QO_^dNf%muQ4GN6V`VnIgt6 z(TQH3I;d$cz8Q3hNWWNmjFf_mtUb-0q)0X0f=BY#pFrsiSbaQ+9?}bj@AKCo| zShCJ>(dwDOMc@8ijO|R6McYPv1+i~XGuMVf)W7BRfp1m%*^_)L!ATrpUQ?l571xhdm8<`k3hQD%IOy6r$$fgFz(wmvBhfS!QNftr8$);G2mWJ;&KzV1vaX>je0Rav1rECB-~92cKGVM2{r^>ilnIC$rGicnxT7N z{%N#UR=xr>%faCz+AeHrLCk!-hwN6~w)=qKM6~Dk^vENIv<4@1`dNDp+bd5W*k^s> zzy6uf>*1^d0+|H%9T%c!QfYesDD#VR6qDv-{|DW!K!|`3G)pOJ?pl~}81xbqhO-rt zCuFp5?|gH06O)ab#l>f%=eE6R_SuhPyPGhM+Rh^Vvw2YBZ$%wT$oK53-1c)94^{rqD==Wh`0u5L{aR$T?=!h(Luu^lVwr<;$om^Bj?kj!HjZQSa*4|&rEL$VpCqk z0;}aNrTn}6T^m@6+d0iTk}0#5n-xHDk3VtT4ewt95X@f%w}MT8;g|J(=6+u~x7hp` z)_BQb7{*PoCMdd{{no-DtgmY#_QyyFzjh40TnN9EPz76j_6r=of}p-Vs->kr%3)tk zwTE{Htqa1|mLe7XBOE@C#5UgptqO|gM_PKVBi7H%q$i8pi!GPD9oFh zBmTD6OY&SdG<-YSn12JcNfiqCZr0t>jg_n{&O~iVtoS4UKtAW6R=ZJ{1bd_yTEyZS zauk5+^~aAHn#EQ!5pnL7|1MlE#ul9_fC+975zz*0rwF$jBe<4ZaX+ehAJ&8UJ7Pj)z?4|?Ia*R8xR_-%ft_%m;mY1N8K z^c78anQHz}@hTQ@9r&1Wi^PfLe@LJ#8p!?iC-MA+6?ziy^;w6J!_v-=XUE*vcT3yB zUZ!B*UJ@joYplC?h?U=PgsWKZTXgi$I9)0EsDCs~$wCGUmnbFzGT_oE#@G`mYhMP) z*;;dPD`+j~IoW(yTx>FJ%PQ#k{f{LqRnYxEbwGsqA8)3M80GUXf6K{zq5}{$`oo#8 zTygCEM77f0mAM2zGtFKp8#I?;AE_)lxGTml6*o5gh8WwzBsiD4H_6w^Px4H$^spC* zTe7??Wc|xu<9*qEi+x*k+!0`h$*Cr1Hw{;Z`rNZoG0g1w{=mqjL4(7<#n^M{G4sDh7o1bjv0amH{ zq_%eBf8mSlo}y2f77T~EzGXh4a%Q@hoOAM=)j}00%mNdZa(MyZ*7B<-)I-Jchit|N z^9`hJKC?YvkiF@;QrjZUDQi4YBPq$NzcTNJs`LI&f~b0Mo zKq@}uPSig^@@-+>3Im#4G+V??ELf)Vh)2{-GPiihD>%8tKMKBHUnFvUogC9TxW^>N zs-(7jgk>|+OYi%=&{-QKp?@)Mu(}XpQg@ z|A+>#ejT6HzvnK>Urs2`@vH^u=Zbqh+78#rr|u#*Wd2q0RG^`Er*PaG%vYj2_w-4E zloQ&=rDl~n{pv?7Rgvgtw6Y@Z+guF%gwt|zdb%r9mtspZCLw?MCC7+*@7b;*1?!K1 z@^$pm$E~Rd^Rah24&3~(3bRKd_Ely$CsE5IzRWH>Zttz!QkJ&b2#)oQb4<*h7q$mp zUT#lh=p(LIf?`Zp)CrImB;tLRDxN{Irbe z6fIWz)2ksFoc0J|Vg8?EEh6DLI1|^Ev*o@eDT536PcO=}`(J6v^9=Xfl& z);5oydu96m(MoU+E55AC7v<^?^LwG&aitT#rOY*OvI)hmOSnL1pz7ce~%ISfcrmqy_YUq<~8p zWoGWv+Uqx=&#SJ+wRA;oVc*p!Newzs5&)9Q%BGSL z*`RTe>w6CGmr~Mw>&}Ss8o+fjn-HriD{Z>P1Ks(ec5fLuHl2S|Wh$j&$^5|4{ahGMvwKqdnkbg2bck!LU9+sy5Pw zYTw!%erjG{do2ROuz^myj(BwW=tR>@C#}H>9aJkdB@W1;Y%oTjBJxjWxovc8ieC6) zt_5b4EwJVUwfdTlnm3*Ahy4TzWXZbxb62ES{Ff~}Y6EX2PiSN0wkBgKz<;fwTsxi{ z*}{m6BKB7>e0}q?nZ}~Q>m?4QtXQ$iyJw=m5S&Lwp;>OlKCNxcb@g@}gIv_=i70xp z-LvCo>w<`dZ#SaY>33!`wMF8bHjUImp)b7qT@S4nxQNjb#Q3cSQy=D(mF4vX(zlGGmK^qB5V{%q(?tGE0~icSiQ>%x7<_$8 zQH<(*hl?CM5>ls9XX2^+`+Fm85j(2ex%j)QeTD<$E6>aWy^SbpS#fZb`m?dZJiqhp z3#0jpab|v6lo&anl3!uBl~B%pnW#bovH~N#^O+w7YJPb7?)3z&tISWhB&hUQb8|>- zudf%U*tN0i$emE-kG7`dKs``BBA=t2=|j)7=i{8H?R|o#bUD!&rz=3yyYimERm4-YJ(>0J zJD@abTWnfc3tkd*Tr~ccgbq4LgXy@`6Kg6 ziHRsN6&WBPk@bDKYH08g)mg#Wzu}XhA(kEYMgTROs*f(4b!j!|9>^Xtl~Wp|kC!C! zC_B1_EZ(|s#6x(qQc_T%f$m^RT>8pg3Aimb2$`Y_p3cIqT{LUnoOa?&^1%*FCZr$V6;+mWNu|2_0ntxrGWuEa+zrr$ku z*%8{BTrQ6^yp+Z?O)ZysR=~`9U?i$5BHYKSR)3Vt|E%O^%2%1&Y$P-uc^q%~^c~qf zk>JXO!;@tf&D-qQUR@QPdi)d|b1(vYLuhBi@D)NzS43uw;8*{mBqu>cBsn+T?b|0V zi@qnCTD7vhb=~H5O)C@-{)6EnoWz+W_^ZmPoeSUFuWsxxrg zZ9{XnWMKc5A9r2zTx zWS*`>`J+ZS;|B*999p#xj#ebZzA7QfJJ}dzvc<^+KPF9E7bU{W)J5m3O9s^<DD{V>x%pkZVI%PYk0@h`7Hvspd;_|kh#RtHfXnNzh{`#s$wa<4 zxG}~N(ODloACf%;^-Rn~HZ{5<)387Eq`t!>(hIgsh=}es$>XF=?=WMp(4b$v1%U&xEY&HVZ8io>izjm30$oTqz1 zY;3`1F7mMO(%H=h5ulkTNd!iG4I%41Kqmvvf-W-iztp3=oA);YUkw`zBUKUUJ>3!x zD5f2+w!al;Y@W=i6~T)CoR3N?lg3%d9v3aB zE7K%Ew5Fg>S)jHWX<-O@+&enZpBFm5e~D=xI;x(<<$OCsU4Tp3-EWX5l6mtEP@QR3C&)0XW&Qn5!e)pS# zFW0k#KIc8Fo_pEP2a5g(+Iu9g=dd{*Tkw-Yce*)#kCsVtR71qxjNQ+dAAhzW6L7P- zpX%Ch6DSH^{Fkch1kciSPY&9y8>Pkw7-j~5OpW$*Myk&+V{3Gwuh%h4yz)RvS;EH5 zcySi^2mb$_ur>J2$SoC{2Hlb$=&vapEEV_XV2`D_59V>6rf((!)%jmpvnJgds0<) zusVa&YxX3*7Y~ATB72wnug5sQ0>yBt89A4ZVyIHI6T_Q*71x%vf}TG)xTFb1;2Af| zD0Z(w^g&==#%bI>fYSMIACP> z`us4SD?$=A&H0I}|H_f7=Z06^afz^(>o1(^ zdy7XzL{y>QnQ^!79Vc+w5|nD-m(Ebt@>RpYH<`e$CGg+-f&y~Lqk5(GfWTQtpE#;e zKM<{qnx^Uv0$Yo;mlr?J24Pu~^NMY=VWalLW*AA!RwTne*{|pAlKI2qQT4;$O#9o% zwBFz)1AwS;r3sWR{IhjL$0OKwx>FRltz(k(&~1Vj>1{Ac45BT+MgUm?tU@X-LWuT< zSL=tx=9TgmyL;KoFF@3NZL@zN%_!9{UM0CtBtQ^+lJa%e@``kf{gm3>;Xg_)S50UM zj357ajFT(wt;tBlM?s#uGx@Z0ig)h}B1ZlsQ1K=X7G4Zq`W-xBa$&KVnRmb2$P!m# z+4w4FSzK*j->q+Whe@K4B~|2m;|apJLjO42zn1PnAhzPx>7WOE#58`6ZP| zW|vMvqT{WnXNwj0TW_;~U~<0?g2JV78PY+sFD6=MLk8|&gqHE^)<0HHNfbveZrdKt zxa@Q%#9dxFf74?(9q(USk9_D%l@YcrW!t;zNuY9Ab%aKw$8>l)A!k?7?HF2l=cov9 zndHh|KJt6%${;|5i@oOD(j-uZSTpnnk8Yf9<~2~e3qI_hVq4-}Tml-%ggU*Feq4*lo!0GC^l8Z(u?(DjVlI9GHhnkQX=Z{*dH88G8`{i{kbOwWLQc^+! zk)b&FWqz8&KV$z}qM5yQC7?1-sbRFkv!e7)s zDJ(3)MH_Y9U4*{yJCbhCsle@#`}W-&_6f_@RIK@OT_J7O4M{;3PNMDu*|q_{8>tnj z@@;J$77YUtCv#aO@b$ectDA`gL{ipWJFjivKJ{676~fChp*df6{V)iD$1vOYr*QcG z`A5s`vUNH?m*%PKa)YI=_eo=gZYz6#0>$<(anyHzwxU?f`a+TC=H|{%+s`h+s&Z$5 z;l6O&!w>qn7)&r67md zeUW@bXxiu`lCGq%(1YS4&CZ58&Eeh2XzDtF{+ z-`83RDoSCEQt!>x9MxyrD&wW1sc=*--w@I~4ulFM%T#1TH|T{w(M5T3~j> zhcxlf`-kO|7b$zq_9hX~okR#yI^{OA;i?~zMTu{1kit41O`n)O3EOJJOlF@1`~;j!tkQ{Ike9@Z7!BS z>t5pH+uHac-pI(vl+-Z76;KfG8vz=0WP!#PSOOi9{$L9EG>GKZr#R!mCcN>3jK1-< zba|$1xyRhuJ8L_azrN|wFCL6SN8Xz0oe8e>zm;EF4jr2036=5|N3#5P^(mEpkxol{Lj zN#S9B7YGS(FOiDYzl|0pC-N@3vHnw&nWj+fbkWWs$iJg@%AGJ(e_?dWohoiFOKnX2 zT?Yh^oc?o!n!&`qZv`Bq zQzzNQLok%nFj>CH)MV&LqGbG|K03&7;dp3CditU56B8w>Jo$BHxZNH;4jXx+a%mjW z5@=_w(!)?9_kZ;51?1CMDYgSX%RcxB%Tr_U@$qFjjneHLU&E!Ore2NEpvn4LqQ?GZ7x)*wU)gIwuBh-{8e5A+QIDE2Rtrw5b= zDR5u;*pbv}te2J~{2%*K#XBg!ojBv1Jy(G)E=_)mRP+TI3rcGv%G3-_T)OychafG} z+L13}UKyw)Bw9HtM^=s`i7E%puku-suQ@CmaZ9pbI;3<~_JCFqwo)p!q;KtbaP64- zAPzdwTs$W*=d(Bgsg*0CT=Bb1+aD02sjXsK$3N)|2qQE(4h$W<7**{k!mqgNJ{Yn5 za!5G>I40>uih`WkxzW^qglJP;a92|SL8#c$b9}xvg9J0fh+D}Te~J=+#bgA{pZq1I)Jjd!k{~c zB9`}St?^Kup-~#*3XqYSS7rjzOk_;; zqkfvUv<+FYgYML~F&@P<+CP9R$v^0ZQuRE4PI_Pz4cyRa(j=tK=5Dc64GSx^PicU3+4(A8}F zb8gkOS*#)Z)7j&3IYI+Q8O*@Nae~0ojg>%&=5Hqmp5^Arm5Q$c7n=}M!{@&>w$J!V zkt1zDr*j-XO=$^}`kW=KCjs@?*+IY!XE}qRh^c=NzvULinb0BVubtNHPWtYnA>nXC zGPPpgJ<4@7Od{Pqu`!|0mjK)$2WaNuqfd#*&un*2nnY04NrC5$m$V;uKRw0a5yCcGd-)4Zrf z_%;=4%OOwmkz%?1R7}8;XKC%#_x_k;I{kTJCyeCFa>K71l9!#`p8e_b5gKpXfHx9T zQPf`g;h0j@IFo{^MNqGoQQrnmhfL5rWtclLt2(j8;C6!_AdX@Z`Yq#uLv z;9)CZD20T4tMw_Ucz9B8juz@&kA4T-ZTLOef*NysC=RtXYt3#CL}*GjHnu-s1puXX z;nmpzYRcC;U^4s1dCZn7(zxy+j~ms3Gbnw2<_`fn)xe|>o^_9)_4RcJj$UB?-@9JW zi1zd4gmone7XJNV#@0CfeoVp%lJjw=C%&w3&LV-cKMSA317XoHEO3_YkN}$nTe5~{ z?Z5582+VJ2atJ^fO0Cw~`U)ol5q`Ak3V?%CXIRRmMAJx=mLG@z@I%D{3#cqXO-A$1 zrRdE5s|7Htyr2TrgqXSV)y0bW=YaZ??em#N)Fjqr-w;@5=L^pc-2OU6<+e zA@9cn@)s+2*{kA@WEA4KT=v~eGuD&BfUWt!+2Rd$j1jke6An~uB3a|J3yr~Az?)z6;?DB=XsYffN^JpyiT-B5s2{PiNv-(iTz-C_Q- zhU1*FG@Jc;N2hQJCd{T`$_t@Qosa-W3TzMLcdfdEzJe&eO~i^r?-NVE zzKsucT{3lez9^t=?j*A7P%c7W^Ew)KE87@JNZr%;dnqkyvmS{5En6bPoc*ss1Z}dE zm|>0$wUKt-Y74hsR8dMzDk@U}5MTa0I_cdk3w1qG@*wgU9PdAxA1a%J>1Pyp zklVw>@`1rv67=rDJP$8l6mUQ(nhK%2Ao}tR?nbOT+cyZioU_r+lGkP<37dEBls+$w z;es)Ub`@|)S?#7{w^O%bwVqc$TZv*56|pUwO3 zcz0F?p$ey&j->$UzDbVvDc`@R3Wi_(14bTz*Lm3nwRvxWWYj+j!1KHZ7LUsc6HrQD zPFGt$9QnN5Ed0alK<^2&pzDlm*PID?-V^!9mP2h*D`fP8IUUc9l>b0Gj)4yw3HV%} zL@~_(KK0`c-iBM|z9effy>c2W>w7(rLVF93FM=7vPtQVyX%XHDUPa$S9`>SKFn)ZN zfF|~q*Nvk{_p#exKBP)WfFURC43ZFiGZ=@Hb1HOxOs|G;z2s%16BZi))E{qvA@j|K ze;kgGe0F%Ba?mH70&J;bbd2rN9Q5zf6rW`THveNwKT+ImT>7%sCJH_HC16n#P0wQ& z%dqUhXKNK@Cd-5g>2*xsMO&tJNC!60hn?8_dnkRC4WlXq`711O!luJR8>9@E(+izZI&EKV7w zgc+h|x}Qz=9i@rsvY-)hBJqE84xC^wY#c9bwF|#`7<%~KV5zCWQTX^vYXSEm(Rt?Y zfaCRI%@k8Y6xemLHmEa1(7)IoW9cSz8lT|V`1cYlxH)j)|2G2_yrvC@Cet}+%DRUk z1U|xKNB0M%y@=)0aOr&?nc*vSuXM=A3hsI%vB*z$ucq>UAflt^HRAvUcD(iLe&JT< zOUM|N7uWN7kVjgpuF=<|1oDo+FhcZSvflmoJ~d9oX`FMv#Uz3~T1Z^J4V$9WZyd0k zt7jzts^!o#ZI%iqZOgyILOoe?QRIayFZ0POl@VFcgM~cAoiC@RM9Qv^x>UGK=e0$R zpP*c8c~LfElJ@=JJayB^7Os8}EHxDX}<6pQSH zZq&{to|{&J@#mPDe~P@4oxZEB1p%kGrS;h9Iu-(Da7C_vj_7NgU)MQ?3o$yb*qqf-Md%B?OfWi(i?A;Xkp0p6a~p%*EQPOf(`u-iplltd2>9(j;~txB8Qc_S$lJA&WaKx zmY1fbqvn2cOuiptx69hU_d9Lf%6TajIhjBUfxl|s4VVuIejw>L!&q;7_EG)?j79}5 zzc=5nmsBva)w=)Zo^m_8%6RES;-@K?cc&41d^k^D&BOc&lULs%kk+X4R$KX5$AE>} zl%$2WF&kj$7bYp zt);cUW~Qfd>M{Bu?Z(ZH2qEWfsi~+3o@Wdv#mnpi`zI#pN3GB8JGrPN8k>*~v zz_j#`loX?~Q9Laqj-z$iXi=Wa1cCj%)Z+X%{yoClb%tuph5iQ1f_=Y}Wg&qo0Byp8 zk-tIWT-*>?d^}svD?Y0uc#a)f#gpi1@HE8N2%vF*!UIT`RbFOa=jhbJFBiu z?=;9`kG&B#Pd|CpU`c-RcQ-X89@8xM92iRWF=^Eh3>)4`z%ra5rGA|U9>pAP)GB@) zQ=HW4Yjq4?s&?J&MCu5&!dK49wK5=aY)o7zn0HGQI3?ikKI(6pr$z#NNH~+kf0PU# zB{sm!-u;%F!fW6Hzt;^?{7E#A(qYZDZ;}I_d1D(6b0Uc>-qUC&PSuKXD9xT31Ju{T@y3H9Wtb6|-}rz4l_$^4ms6 z3t7_XdexTD5tBH4nJC}WXW-p2uO z7-s&?KTSOP2t|b-;h4KT_Sq6BsUfYcMsJ0zlG0jCOv0-Mh~vWXEA*L&$6Z|u5>HQ> z5yeg1x3&kwO*X~yeVq5Q1EoGW)unZ8xL^?awRnhvX$ls@A6X(sn$#JnXTVp9{ke%$ zqfY&JY%lgzQxSy!fG$Yaxzbc7O(Y~%$j!v^)AaO~;Z>LjT;JyNmE`8p&WM--4Y#`d z&e73;LPvv|1R*x#`!P&Rq;z%+mmKp6bd2CB5gh#m$L)S-1|E%~VM?k9|CN;xo|_-l z{pL~F$efNVuD%ZsTdupwTi%biIu}HGUghD8YtC`3m$%fr4AJ{v*EfP|TD6`DNJuz8 zVHNkz9`x*8z#E(L@cZxg3l7MS_F{4alLyeL1MR(;+JtCheLiWbE_s2)zyMUa2(V<9 zw={hPg973k58aAt3(FTv@^0$Ylgx@m6d3c+3`JfJNW85Ab`E(UT0!vr}#OoII}X1W5uJ#yLHO19e-cfQX!7A=18 ztP5kHw|R~G`GZ9xIr_tzd*F1KWK?iH8MpeXwHTXmXbY_0tLOQd9nSS5RaRDXZ?btNJTrsOe+AFl`h`ho!> z&2nKJ3W+Nyj_LTW2fGabD5(P8LC(7eb;I}@aCykynhGLuDt*Bnci{Aomb*y zt~fO^+%t_8Hhg3CP5JllMr)tWzmJeaDZlN%P2_SjalEp;N%BsE)bV^;ySs@ zUD!1uTLmmOEh?!MZ$o>@XKPZYJMjJ{mj$b5|76gE2I`|SLv86GIUOZ{!+^KUJ5A?T zsrrc57WLBN*t-epEOIf=op=fXu)RyNI_i~q8%z&|C9o@?ktO$~4YrVbU){}|1s6p3 z=>#4>#(`hgyV4=_`X++2HvV#qWh=*H)v+(`^AE{>mjjjj!6uQ13u#Bl^6YYh^AuJi zhoXoOJ5-<&qYgFoilSic-Cgl^EJk%5yCLIv233yf5D5uyRsIJb#UVoI1%vLrm1(QZ z37&zg-S0p8eVZYCDH-^e`q1G|7aI^}q@y{t+UkgiEq&Yih@;gbqLx_q+Z=d|zX4>B zP?-cu2F%C+jj}s8UkC)ldZ8*A%JXEo5!OrN>xGwTYt^TeRHnpT691%K1HbrP+ZB_g z1HoV3*wI7Id3RqRTW(`cwv~*{4ad1Rm6~K@05*=1j;WRmU()0bJGtNe+;eMZWYK3F z(=cEfv$QnI8XzAt&!HP@UL_y9TW1*afBE6D=HJZ-@DQWOa}A@&c>p7cdln-@a$Uvz z=;iZ6-E*9O_)&4EFXZ^iAIelCY2*h@6}JDXV;Ey;NhsmMg>l%k(8idr&f{0%D^K6c zxKNT^qPP<{5C5B;d8dPi;=rIdA$1>v_L_rhh0Djj-(z)CG-LePj3PHlx>ss3&q04- zJ=gM&b`;F*5=^yHQj3`r86YZn9nOj(OkEVs-X^d>89TL?=+F5|t0AH-w4YWdk}|?D z&oUz7Q<&173!!PfjFoHcc=qxfh}3P}jDcx=2>Kgvkv%4`d-kAzdY|71(Kpb_eeS&O z)P*>`+5dnX6nmAcmHBeP7fH0 zv9&7vurqvXuGn&Kq!k;1wt`EyV8AjaE4!tQ2j9F#J?7|0K_Y_Qp~4RyL2;jZlse5qWpnw(F%_U730=H@w{0>>tGMCVomVl}*_Fca$`U;~1y1 z8O9Q?6(&Q?6{^t2`?uoiS73}hctAkXGnr^kf23p3MMdIqYJ{e*2R2R@s-#xg<9yvmwdm>Q$0`*KB8$FPo95kkIyrlx`dPQ} z6Y{x>my(wII)7$nNX-YhM+(w>glQrH!h0KGB;&Uf@F{%QU_>J#q0gcrdGG$kz3LO; zXB5!--##`&aGOJAfF|N2#5fm|N)M&l|9 z+f|;_#>w7vm{CLLXFIu}waru(oE@IvqAhHYZuMWTU1(o%Ng%_VzAuX5hLM1-C+N-uW-OB!T?p+!I z2L}`V?^^wT{@B>P61Snmk~aYdAL7Zh)Tn6TwzsM19=njF{2$(=Lgr=7Ii67+J(^Ud zUI9F9USFCDqHKUf=TqHx{}G(g86ktPqWPd-buPr}-yumoHJqKl67zqFUC2#RA)rwR zlWp=*qvVU@I-Wc%ikBL9iSlbXok7qsVI(?Wmr}Y=9St-MKJ7^!ki~LYvY`BcYi-AF z<|H?0h=EvP*|ysIFMI z54E4mk!n?hZnq%U)exjl z9{qSXIEIq_|`0kLdQ&#-ItA7K8iAFXjm(==p1CIpMO$z$}j25P}g?M+i{MjqBz@|+7Pf_+W%Q%$nL^A3EAmQR` zCk?b{xS4G$PjRwlZ%a{Q*&w8BMQWq3t6dhPy=uqSs9mNqnx6z-SRK&);A;>dc%1uW zUZimS3rqGYbmO$A&#lQUjD9YQ*TH;v?@~O@toYy6vWMy8eZGSfJ!`9iXO~Q##^rIV z=@->2KEGpxTJGQVNKVakX@EKT^AOUoR*kgJn6uC-+0O*D=YHhRpEu6YBx zmCSeQ1qp&zX(|Q+qMrPM@K|yE{bJ~tA=00x3eX5)o#yye2TD{w!8>$?`kDk@*sVy# z@N%kocFb2I{hVEcb0=bJ1))1C3u(amS>L-_bAb#Q3__W0kPak*bMz*?KS5W#PM7p5 zMHy$w=0Q_3dOLK3H8>~z+@?wqVjn+n949(~ifO<^Q9RciN}~&5Egkixz*({&R&?OG zT`lu!Lj{k?6}&mi*v*;7ZTrA0r$gyrKL55bM5IO@MNrdAi}kO z9XHBcr$9c-`PU%P^U*I=neguGC%LCoqlixO0R8dFI%F z$~a}d@E8Yv2&g&+keC|ze${3q=acw&syr+(#N|7qID0a!(-RbGuc5BarN;_kGvm6U zab;cYMjF{%r!e6=fqShbC0QD)XTFH;;CC<}1^^9wYb(%le7+qJk!+E*`?2dYvD#YL z=^$@kK@c!N+KuD06;HLC+u}XmKsQX~d^)X6HQFoe>T$9R$UZ81vs61R4eo=kn3{Fg zD)8plyGVhI;vs-}XZO!uHFPrn@zLaNz9{AcD!gO~F6V@3Z`4a)9`bBf48zP~cw|D@ z&$)C{K@$!X3%|bPQwTR=#4ZKW^1j+(8(PzxN<3wS`4yuoITC|;F!+|;@4)qYvLf|P zdY26h)ptoum-lFz*Ba|C_q)1_QrGA9YL&`T2{a?qBLIi6kpFniWfA4+_fX9)d_`l_ z`P>$A{`2`FvDoYGEjs9-HCWtv`*$L~yHH+gapntxPe(L3a*O%wb`YHF{&-2XIBCT8 zT`e5)H{p$VA&5iwIb2LD2hCre4NweU{3Q!&osGVvFYCN2sdUBg5%_2sIXDvVM2*%y zt&Cxd*6iPY;}1e_1;vl2L6ED*h?!39*rI8P>l*tO|5qlMAlHqSW?h&9ocQ0KP`%Sp z$Uw~Gb90X{9p+sj*tJYdp(Xmdy@SeT>(RK|YRrkOGu*;IxmNaEZ(}BxuUb(5$KPi# z^h+xhU~RrAXx32}3uPBYJO~v|gN6GxKDYV1Bh!+PP5C0nn4(agw0;^UJn~=By`%z8 zqOU_NevVz*OIfkcr=PAb&{&U}?>nNWf@rqfS!$VKbDDWI;Jy*1Mm*ZEm0nGHI`fFS z(Q<^DBVCBA5+#dWvgHnfnufPHa}yUQw9tBb_Bqjuvkd`GMfS;u5C0%n0xtnk&flSp zW8%!WAc~ldNQH+N={n%{N>5fk|1THIm7N}_1M)fj*U6^umJA^ShcQ&6)~Or&{k%jA z-hCf96Y=BuEk?4z;S!f+O!4K-j!550b)NXw%C5E}9i2SS$7o*3mhB2q%1+`urxQpVF{L#z1igvHJVpJEfp!D#? zbFtH^$WK)O1aY5em(X$kUMv02Hgpm>C~M-W?^~S)*dVpL9H*PH;%5W4uSe;#lsFii z_=3Ilq(f!@{TjrqeDIw_-RQwsFDa4?NcK6#ddnU}?{a%6Y~ILhE$V(4)rMvk7`z{P zT<(TQFLD$C@;BR^8;cd}D|Q1moo~^JG?T1s#(4f`y|v6{8vQzt+r97NIMx5m7+Q_!El2TtsV`6X$*6ceZ#_L@vaH{s!=lH*k09RV1IsSB zc+Q0xZbw5RVkzD2+VQC;C4u-b>AVTZBgnFH1kK@L;@dg<=>B7rAQ<6c0^d(O`XEJ5 z^8NB=>}JJ@*kuk|oImwukLtm<%BI-5rKva}&a73B|9K7ppB+&?fFwTuPDz=_x;VNV z+&wBgJF0F~4VtOu{rz5_zCNlir;VM+ZWYr?I3XbU9TFO~)DZ3(0~Fo~iy?`#OZF=h zh2rwk&oh;|YE^j}O-mrRbssS{5M=4{m3i3QrDrm!`7}5TOdkXG|F;7g)auO>&Ua9eY;-M{;!BQQ=>0`=kdd9qpe=4pz z{0Wuszd!YP`yLC?*R9|v2`Cm&C8wfVtq&zyjke+{7mauxNO*Egm@3mzh_EX&zi-C_eb|Q^%HJ*dxPlNHAn%Tu@)Tos&qYNpBEE;9#9}Zzy1d+ula}q z>^REzT1WJw?;Dos(FDo-wSCDq2c|E6y-HbJ$vxlpi{u-s5!~3gxvE?ggH@H0GRe01 z?jdoKjW-7r(sNoBEH==z>`qSI`EBk$*sVd*k3z9Kle>q-Dv2#E+u+fVyw3@1ZXn}&(BS>1*wPhVIOZ*$4 zFnl1Rf>Wa!OD>xXnaY#Q{PyPZcaK~AtVbV8*9?b8>}KcmO(Y)RzE10>YeK_E7H z{?AH%tUqa+vUE>mx5mwt`S_)70jLNlJub~NH9;^K4l4$LU&*b_P-4+sf|a<42<*P~ z$~LNfGkw;xO#Xig{By&DWa-6T@Q8p1A+YfhQZyhjYg7unn$M~M#8TtG$W%p4*&%~F z@E9s0A1;te0&>YJ&CI2X{4%pf1AZ$^a!7Fl<=jxYsIA@tA@)-%GU>VHF<@I!i4_r#G=`(kh;JR$9Yx`hx$iKEFoj+Z?#JVT1WI z+MPDhYZZ*-`=^Og^OHEh_8+*vKc&Sm4J_FE$;!h5mq5acwvbk%`kt7nUw@I&ETwJwGt ziq10!{=4()vGEu6M1U42R2CF@k-74z8>EY}O2~k+ke&*|l#hVxyG{3~3@W#|F4jU& zrKY>IjPOF%W@jBAFY5v(V}xa5h2>Ak`KNH1by61mm)r$~$T*KjDei7QC1JhSU0Kx$ zBqxnN=swtE)J*WT<6I$Q6JkQbr~BlaA76MTuai<2rD^s7xEyrCjj=WI!}P)p1ch=? zRC-@M9wmCPzb`8}X`}!|*y0;4@;A%JwWTXBUud1um@b{0^8Dm2Xque%OOI*6^S5-w zUp$1Teqi9qscpf5TIDxjamvoMs?Q+N8q1n}ckd3ul(BBedw0$!?{t`?2Jovn9o~p! z23Q_<{E=^4SFU#Kn8&rr)EIuv9_HP3?FSCo8#$4KqFo??>4QLS`AXhO;9$sM zA{6#AQo_-jjU|5|^8?oJgZSjF0Ue<8n5Em9ETJ#$K6ssOuE$A8l1D|dVoxT56p~D? zl)u-R;X>3r#>K$m0&GtaZ(UL6=F%(c>~a&p!Y%e`h&ddnnJnB@`GnW|l|bU(Jv`it zA3613UI0E}J~so>5r)~SMbTjbC*TRu7^cY11?ea-W%dWZ!Aa||ukz3P`vjwLmmPZ-vnA`EhFR&pXgnya zHna32xa_;)6ACp4g+j1tU9z3*T!b&jeI)eAB);bn81vqV!!XlHu`-%bI^wFoC|{Pp z9mn)MG9+64RB%&Znj+2iaat}Dc%lI2k#Q(f15IP1ec*;IlpDHRI4JRJaPL$o{&Qsi zUWqLie0xAh&v%rg1PV#b-H8xXbT8f0Iy^H;N_ElYK;VF<7P3(5l*|pei_Er&zvHwV ziz`!+Dmz{9^E`rQp!A0~uX-t4%4+;Ir;ZyWUOQCcC+ob~3i9#(f`$ny+A$MZ2x#Hd zRA5L+#hu-PcV&>-t}3jw+bbU-nX(!j{I+j+qRg(UPI?VZnYj_Aj`=(GM03gpxW|G^ zeg;>L~0y>f^K)xQ!g}vM~Tj1m{-hQuZUF&p7DBHof(5c@WC0J1C z1_NUdB_}DS@zQ(#I#j`^CV8pYuk^KYSXf3UqV(%OS@zasKe}{|%n!Epf&|Yp@og>c zkCt=-CcM3Q!;u~CH{5Z(5wL25u`{d^kQW$`Wcindk39K>Bu5|=0Dk(13Qyn$nyhT+cnuSuW&B^Mr_ho)i2}6)Mryaarx~Ql_UftNo4`b z#8%LhddXoa<}jY&rZh0AOzk0OAe$`mNAKU0)ag4pCO{?qwDC5rC}YnM>uyE!+L!cw zZNj8OH0{4*1vLlR%V*(>bno1W!nmrj`aEtXsptU;Aai4A#WrJoVbbU_!hCrE0UY@s zHy5bkG~F4PdUEp-HE#u_QszY?zcKnv3Cz9$PGL~Rf8bsdyz=Lr!5UA|dfy3gu+cbv z5CF-EDnv;$B3jD(iX~9&l~(qG7D@dp#)^t>0KonG7AqMel_DNcx&cop-0uMw@9lLP{ume$0LH8r6}xNC5`oLjX+tvYw*OOMRVYVxQz zSKRAabz}7jAPfn;t#i@a?duzzZf5(D}J}7VF#a1Z4@$q27^Wu%o`KPzM1L!taKbc%hmA><1fA z7Xlh708U9-e`L5ZKqP5n;Jdx+KkLTaXW5s^`9ba~)C0{4UNmP(Oi?!7E>50#Hh&|; z^8xoWI9fYj%`pqR^DPh4dAx@Oc%uHD(@Cu<{EJL`P-s;sCHJeIVMfp}XngaZJf4UD z60-21A4BSoRQtlq%@3x^c%7;Mw*%N_P<=a!^ESyqqrU(zaaVgr40QCNciXtDC zLg>Kz;eMvQbg9#bF%reCEu%qs3Gx+(@Vx0G&?|WW5-l&O_TF41Wfbfzpb{gbi3@2j zD^H{Q;WjC&BfLSmZ1Us&pwBxDKk2pG`9f5QCDrYhoZ2>ICD6HmBFy&EE1;IlPRdGc z>Rb$U(WV|?H!1G1CSOc&4@n+`by;};;|wyGW%<~|Uh<)R7XR_nIWA<3oL{b!MQAGx zw0Q{Ur)4yL9#J0wA)DyK^LVr)&u}L2TLHK^!!iIN9Ve7`Grn_m`@H(>+}N~tSAuH!11CgP z>`%HAjbVmtuPcWaAUVWD956a+2n+_WtUenr>Dl7(Kxwsy@k4%;_!1Sc3v^}#cA}Y> zyXYx2;XpSca3&e3g&ro$wJh^os4iSrLzNMr6c0bTL$TLh@^1hQLjC*C7Plta_Mny;{eo0b zLRzIbHsiFR->j?u1Q}2f61|&*RzredMhbIR=Al%2A=>_zmDpd*_6I+7a_^ z*-ci{XGY182^23BO)dP#Wa3tN*gs+LMLvML#z5Li6`SLdms7MKgWe>)KhtsUT{TcM zQ;iuf&`V_jfoLtB>fRjMnq&RpMct6DsZw5|H&iWj}NB^gNfA1F&PW$4uQGW({aSDved+=(YwkS2B!p|c?8s` zeEc2~;~-GwP@j11Oa?PaCt17MMe53Og2IWpd}I&YvH_hP$r7zRc6t>e#3w1r{aN!^ zg}b9%^kR0CSKR-ts0HeR*Kd+>Z`3dzj6fmK)rdCOt-=KbIR}}Nob6B+xmhyD7e+FW zZ3exlkN~w2cmHQ*HS;i^2Ol64L_@386mgA?7d^`@{V@6l@bbBt7#C$jMQ1 z?4RTVyr3T2YQ*>+^=+v+5NAY&_Rv#l63`^JP;Tzq_*6b{KAqzbG=ov>N>c!d=GwN~ zR}j=(MHJ0T0ACJ>AW-$@y2nq2K{!zrE5tHK{x;5!bH$LY{lI78DfP%!(3g5>o+c5P z3O|dg`B|ME^WhLZ@QBqV$(5qfiMVh<(MtO1^a}u9EVwGsZFsU`n#IuM`kTd}Z4rFT zQq81!{bAkOk>g`e62)ddxf}zQhsD>`;f2=f)My!7^XZ{XP^t7FOq>sP`bJ&GiXRl` zRk8p?oCmE%AuXS)1;Rq9nOA#cAOVHdu7FCrABDzhsH6`7k8E#B}VuCo;X=5 zgjJm`scI}J=0s}DcPlqO(9s-3$nVVq0Xw;CdFzT-Gz!!y{^iZw^EtLwc)lgEZ%@v+ z;pMPQt0p12xmPDjCKw=);<$MOe{g5+;X>ae8^DMYTmYQR#67)xWM03hJ(%b5(!0cPSFgy;(vx z=0>?{-xSe5bno+~QCXnb_4bd&76R@yXr^ZT?)OG))gfbdRT*_LKtlsA<-er(x64t2 z+}JszGS{WY6!71GZNxY2==> z#HZmED75JmQR%IN#9=X7W+h&Rl@H9WiRa`G0PJk-?df8h=Y89t(mJ}jP6#X!)T3|2 z<`I38Q6)+unQ@yCuB5w4(nhmE2gX7A%&89m0D8w_-`oX%0v$a7dU&$ET;R7}q^y_P zE*{lo$#hW3MDGpTKz=y+?F+R88t3dWF*P|2eRZ)%m+pg2&>%vHE9DQq3y1RC{FpzT zhF-~XA#TSLs%X!`jKLiEUa~Ye9XX;P!era&C*U?dlPfoe$wB=ERL;r14?o^suJd1v zU{vb^SGZS~x&n2XRb= zr<`arP9>x;oWiujNxZSL409e?H0O}1kLG#DAsAe5lIRi4=IZP#fZ7E5_YJ>hX zfWj8GOyyKh+wutRy)76taZ3_BsnVo*#(1m($np5?ByV9e{k(wxYyJ~;x`a> z(&=B?CSn%_?r`A}f?R@1MsgnUovg+v3X1XeFDz1Jkx*ZFmJ?*Wybg@{yac-@QeALz z22kof%Yvv zySb9-jHVT-OL!lSs*i|;3_bSs%{70(B|^aBqTao0MzzP@fUV$9HL988o0FZ|ma}~x z2EN0jkl=dq=l+<5*pHj`Za8kP8Y;mP++DpfVICluW8SE65^A`Ck48{FM_mrC3tbEc zx#i#dPL?MkouW8?t{W3sQXPC|20eSi0Tz8l-V1gw*e#&4$It}ue}iP$??CYNL8-w1 zF5(V&VG}g39=}5PY2Op1eryQjW&qkFL`>}DP87bt%n2zzOeTgeYe;Z1>Q^cD@R2FZ zpm2J>7E$;+4%$JhqMBmRVgX9J=tlTF%G;XiW|nS++~golPOkVba}@BB#lYBXb;KQr zSSE;5{VgES6}CTM%AU1)g8L0xhLVA-c z71%+o06!kMN}oJSU_+E71+ZBaC`>J0U`UJT^2|a2;U}nnJ3(Y2ghTlUULK zEIKx;(r*XpL_jY;Gm5pmY?`%g_hs7bBoo;U>b!fhPLh68GGBCj=FZH;66Np04P~IG z>k=mUv#SA3Q?*y8=-Jv17GL4NR;4)wZ3)1D2c`abl?AZ?4JzC0{%uzyvjCWG=3lIg z)Y~mdFl@&i8}&X`zCYrDB?r(z6W1MtqMjjP!U6XIGD7?J?bwAngt)Nb?@ZEWet5o( zNp8hcIE>ZkKmLT8%wX>MZ4RI=Xf*mvygF^g0o3vO z*!ih5JERu7;WI{bPrC$>?ZZZz^Elat2Ep-b@{g>bBW>)@<=_5#-C4Nicv^o+aCAppI0J){;@D`k-JtTGm`JYCc_JCD0 zC_z=?rv?FNr`ecp3Tw=}A#a+4;UuC3DRY7!B)V^r z)B4dJAp4xIPvXwcJdxg@xj^V+1wh?^qRsB*{a28!_wSLxrjqRj-?x@+kFm_uI`rss zTynw+BRC?zZ2etKWq|W%d5Es`3s-7A`9Tnw9UxiAR$K&B)1T5Cpf7luZcs zN>DkG?PM}2h^Mg%ayI)cHtl_toVIBOdFQ_yz7&AUSQ`QQV1x^Ow9J@Oa{FO)(Df}s zCH`KVwQu0{^rH0Z|$WDWKYwww8YK>O=qxUVx#xJ&1FMs#xos@{a%{ zpLW_4{5WOVy>q7OE6$N6;%Z7_*Q@0LorXW`gcGSFAu~x(EWIVCNt>t9Qt5;(GJR52HdFKS8%!n;?xS z39$Aq$7iRX6k9us5Fo44Myjjd-r}5D_L(i&=Kq}};yfM`#mmjx8>Q91WL0w@N)M9A ztSEdu5G^bL#GNPC#&s2`V)2z>1sp|KAs!&&Ua*NVsP|y`+1BkYsX}{N^?~LlM}O@A zDY|OpRo1a}w*B|pWs5c*kjps!!J-FjVVq2kgqpz-U+G++JbGy%F2>@RaqlKP&4P&j zfVT<0nw(4aE7G8&40X&HIBr>0;FHF;h*d4d-Q~KVjE-f`y=7@rn;ei#^pv9l%~2*E z{9WQ)cK|mcKSmX4F!&gqVZI>l`aKz-CaZZ%Gk~rDwNz#Z!-*;CoSii0skWdO7)UZc zXBw7k5eyISRJbULq`!P$Lq25De?=JeZ0ak!3E%r}#G5a&BBw=?E%Z@znh3l$Ln(krxfAV379?6c0^{pn^x{MoEzUyg+n6I z7ZvpA$aq?YM)-j{0-JL!Q|n#{Uu{<(VoO9f%(-mS0(6DdV-+4?#Q5p21@oOx$j&%f zN5ry8=YEKDm8$Amjjf*8)yju+B5fuzfK6Nn1UUe zji=a#sF)YJa@p?6cF-@lK!O{>qs(JQVu_aETc5fFY!@tNZEK>Z6_s#UEXbJ!lK*Y& z9HyTdcmE`HqRHIHUB|aA4_U)P?PP~bhyIr}?S0&n!FG}8m5Sw!W^za^Xv<_wxqV-8 zgQf~_O{v?--j6f)scu7^Zp626Kf-k4mpKiF5x1g&Q4N9{nf{{AhQp>{?!5p&xAZM?9UQg4=iDzcQ(CHev)RsmMZYK4VYSr zy=p@LHf9d_vb*$J>qx}kgh^T)rwtu8EGVoBX8{pLz25!8d+xHz{akmk!}ecn^2hY^ zvR@_ldZf+IgCA$O@~>KC-|eT-GrKV7iTo745{E9wMLO${rtUFal0qdpusbWTq(@wS zVk)#uj)1caj)(@wbvd%-+e613GFZFz7HmG8TDclHCd9#xUL=_-eotE9W5|c)dmHKj~0>K&@IjE~V zkPrh?BxMxeWT6leL`>0wOA0g?_e0s|hVi8;yXts%`9;XJVrq5m&ghsSMFTeRpoD`9 z4T!3C(P^}aa=(rGO58kf00?^pD+%vM&E7K0YfJB3XGQr19LBjR0r2pQ(|#aglEH8b z`)h(n6gMAnz%HiIj|-$+{drT_pU)YBqt*T306ho*2Swr~>YP`P)wuq8vb`5CK~UNR zB;nb%`V$Xny-7R`3trda6sHLZ1}BEX63U=ufw@pxUlO`HM36gS11AE2hUQj>3!iSi zosE9I**otN>Z4x#P7{A?ObgXrw~hqkPnfQid*7k)eb6}Oc7F{Lr&eS>l4-`iMMy84 zOJLca2#uZrBu{MQ$*hxg07}X#452iFJSo?Nh`Cl0%ztUuX19kvYb&^m0!pr(ITMs(uiEP%mgMi`vZl1XWK3UH&xpZ26`pP zw(-UH%Hv3JxJAP+(A8eX>XqDQ?zUuks!{>1t`mTMS zUooM7b=ZO(n@y>}$BMUbCnaL0Aiam_?6TH|4HMJahrW---w5E6s7h#Bv^+M8-He&4 zkK7j3z;oJaPmoz0wNhZn7Qd*LC<>EH2)=Y=B3(sy+D4Gl| z)`7%@%3#Gk(5Y)+HvmQ#qjBRg41ei$W}G>RIH^rQE@v6R^%-Boa!oX7$La4i0nq9} z8YWku{Q*^Y27cd_KqHRJdxf|GA3m_dfTo#IPHpjj0=8bQ*!(Ou|6{qS&1*)=RI9b& zazk*v&?K~Cr1-#!6{kyADmzmGLqzTD>xKh?|CFG~FkeL9Q zv1A@SKGjGC=A@lBZxi-gC5Dm^UQ5ebWkBC$jJ&MFb1LHr!FjN*S^)PLU;kk7jx_LA zKi#)Q|44O^O8k}!Vh`s}=`tHqJ}Wn_^dpHQb^d=M#<%nTg()RG_knVEFBF8bBzVCOWQdnHbPKHps6Iy&-~v34fU~oIG0MvpBCf z1qeMX4`;7HX}QrGaL9`MPb5mM@&Z5a=>u?IzYaJm^h(s(xpe%i_m1rYZpkR}Y(tVEFtK!^lAUoQ1`%wy0M{$6#K_;%W=_YBHK64uETe*06gft$Fe4huFnji5%qzZg zSpel27Y;hK)mPm9g#Ah-AO*{Kkaq$^we521C)@KfyH8Mn&8Mfr-4aZc*dr|LR`F-a z3I2CwlvI@iS-`qE1jPRuc;(Ciw(5I5V}g&yQ>Ai%IuN9En2h)a>T}*y^i&GZYyH_d zapT5C`SC?=ID}|6&e|q7MU!b_XZP{Fwjc359RJV>Drozv?ruT|T6kl-ydUvTwI+|) zHOm(Z1;P6VKQF{cVNm<|ocupoA4UknXu}X9>SOf3Ll&mWv4#5;jDUzqrg~Oew$Rs4 z%|qq$k@kTdR&ivIb_BH5d>6ZYc-djS!40ye=0u;-s5s>`a=RE7D zK!gAwx>iR7c!Ecb4n>xV<7;~yk0HB#ee9NF69OK$+k;Dt@tu#zeRbh5pw*hs%<4{K zKl@^!S*ry14}o6n334jXKYBw?VmQfa4benPncON{AN>p7Dm0MkU8my3c&zTbuwgE!$Flzb>NtMlNrDh!NV|N zstq}GI=9JFoHI=~%iTMoo#C)XGei57c%|@}1u(zdo&+|xLAo9?7n)A2#k$L_w!lZ0 zOSbuYJO}ahV^?)hAqx76g~+1>5j8w!TqutiM6-SskhTb)i|*JRVmKj`F# zo2mpSJJW4;aNdQbC(n$VK{&IgtO+aDjuoU*}ufNk)1~4ywlURv@I6mmdkvIq3EEn+B|??jv{%2uMIn zm@AZfh(;Za9PSNLp)p~vr0(rB)g(8&oo63q$a^uM?_8OojH)t|V}0$qPrJ$O=P2!y zT)V#u29~$JA3L}O#^DRAplDNz;ece9aUA-aytTl>?%o{I@x$*6lyBUhi9u6`e0Ij& zn}#3|nWA@%z>*QYLNNP`r4RPNYQAL~cE`MBK zO_ty8sats^*&Wq&D3+})24GAnXSNep^?|U}%1Yd_M5gY^e z&#vWNM5&dYl+n8TGpo-jcM0A$vh<(G96@@taZ?D}IFrBOYTbtfoXFC^_EW|;@q^cowzj8l4I*9B) zi=KSYb*XDwhO6%^wu={V#oyBCK`v=kH`)1g{gZqqQV>2N1u3hQY+^6a|B>wB(&k;R zezNm*2f3^bi9fYj1SI8%CU|>#Ejp2Qb&O_AE>2`+? z@;Sk@?V$c3u>wQzN8#XG z3M;P(uU447J<)*}jEB33yq`s_8206DCeBE=hr|nfBHMzTV^4mmr!l0T(TQOgHdy~< z@=EOUir%G@@P+Y7>=Xof;5!r?D}@B*!2io?9S*VBnvoph{j%aVm_%X?Y3V?Rl+*?)YbAIuOHKE{UE83G77S^Q8xcN z^fNk{cWtg0U9OXc^4^<=M7ME_&7Q~p#s18#<)+y{pnPiF?3xwK?e#mD^ZAruBb3H{ z`BQAH_{()dOe!j-g6kP-UIu(MV}1?8{Z&g*vd?++F9}Ejd($M%S4aB$2=Pr$>|*26 zFhv8mNQVL=Q8BSpJxOJ4nb)?04+BGCo*2N91rvYvBL!6+(g0jAWDsS+XF;L{ArviW z@w-x&x8zghJv@rz`DJ3$-KD06e=X-%8@_xnKV%ou6IM$k!XAjREi9gD*B9%5PAgL% z(83^m@EgaVqN?#{7KK2{<5su=LT&A*z_6ZAoqu!jx`6BL8i1Yc*XkKPap>+^6>AsE z@Zs{9hNGc|zj?fV=*$sDf4Cw(efT%nl7w&{ab?jg@&XHMf%WM%B~^`h-)oB4CdA(2 z{if2F%daD<3{3V-&8{6i3XbJND3@SnDr1>fz4?TWO`qz@_ohFeZ+~8Ck9T`FY<`D_ zg*BPKM&IA<5y|WgY%lv`$vAJYsm^EBxa9LZPcEqij5_LeQ!B=T^ z{>UdFkgt@*52XvoWoK7h0iJsruLGI5gv9#(K8lf%k?Z~pvVi*`{e`cNjt(RuLR?f7 z=Iht56LnVVwkvJTgYzrZW=OgXw#b&2mN&PzCcO|WS$X+sTchy7D*^}Ums`r6aCJl!otA`0*D?Mn5)BupzYyQKuF&s5c1}R`4;f} zRBY5K+#f?~Kx5U%b+6oT?2M^ETd1`FCSceQgoL37?QX?Y$9&K=$ zgjvs)t^>5?&(H8krvu~x6nuQzM;@2^nm70N_}-*uOUX=HGyVumC&xC2 zGLo($-pcLXT<{ry;ny}dnj!gTlaKqsJnt2;VCG{HVSo&;=OuLgw0#$v!wP*cNi|H3o$ z1w6%KVZ+n$jXNT8I;xf&s`0?4v`(0KeX;%_Gz}u%q4G*aNy&yG3)DC;o2bDQq*Kn8 z-2Rme3j^gff*QiSyu67<2P02U&$V>RY_UupJ9eKdTel_4Y%j#i{n?(2y=e}+MQ)hD z#6+B$ni`aLB@2u1cb;7$kq3H?2FO^G?YlykgYsiRTgcApE+#vsNPSAp&!KMyJ>M|H zIHJ3}J+Q&Sinps-fHNQa`tH+AalEcC0RYjVgU2eVuTOG#c<8t_hy@xwOcyARX1vA1 z#{MGmbR%K+@A`TG+%sx>jMFboQPIG~O5^)wzv#!~k2A%pr>CdvRtI2_4{Zm59b>Qk z9AmPXp?Oe#nCt$638sq8AvS_dvlq;{wUwF2Ct^&jg*d>|G694wTDzg4q1QywM^y`O zTj0^2o}V+$WG5zy>?O7OJM@}f7pO& zE32rec`q#^L(Rex%}Y&_hh%Z`5d&sW@t2gLGuZ`M%KaFm4+#q^88=f1p3`E4ptrw2 zPqXX;0go*~TYG!8{W3rJ%|`vap#<2s;o#s-4{OIwC-bCa<>bV`hUNrj{z$eEv67OK zCqT)~?$|8WF?e4eM>}oyQ*v-%y|teC3GifbadCm5`Ey#^RT6{xM4OjOo&7RVe0+S9 z>mIi7-7a;V!zvl6fIGJfwxot`iKS?QbG(ayOjd%Fq@+g1!l6Sg1oH9_q{g0f+_QpB zI0$g?P{qW=(l|e&fPJv+>K;`4v77ayF&lM|5cAju1_i-`0Sf_pe7^h9BJFsy=b0%e z&w!nroNV&D=Yb9=n0b^`RNVze+S&yEPq&tmQc@;hJ~_I$^nk61-ECi^_D}#k6F2n1 zBmFJf?NS6}EG$*4?+}Q#H)hf}Uy3@bkZACt|992h@tm$PI1Y!#Z)c{N%#yUyVFt%3J2fJ3g zy7R&J@%H`iueF;Qx8wZY2t4on+GZ9c0+$;2Ps~Lmm)$8(s#ah_-_H05)h{|6I~im9 zZ7L7A+gKum?;8+yV5I}%!3%e6mK5(FBIU;uzNOki;E3;GDP!+;

Tu^EvGcpUc@$R$<c?%-v0~QfN;GY(^vfp+~^)_?> z8k-OwpWAS>(yj+H2u#jwLsS%6iD7#S&~ur#t0n4mXV=%MTU+M!M}V$}3sVC&8dD<= z@G)?rS0}5_Kkwx*9QLLPQ^>w3nsqFi^)H(KpPw5$^#hi*>QzwuG)zZF2kp_*(^FM# zZBS56l;hI`#o?c39DFuI4I__# ze@A!cE6M*XFVA*7-d~56l`(ZZUM*oS$#`>vO}q92)0XJpEE(6_>;L`pjg4h3f-JD9 zFfc)g=;FSCB5x{QT!EfJKnlA8<62-g5Ch1soM19UNkg>KZj=m3P1AX@R zI5_YD0ReS(OLGN}`?F=I*Vk+_tH0fWjHovP4ZiP*?xCv^D_M%C^(84(P=f#Y|C1=h zJmo6SiFC@QVDJe5ck=L1!KM~Gtt6lkCwTg;bO{LwX#-x*&)@JtKTlQ1ycS^)v_HWj zBn$^{%X8lT1rrF`ki7z8SXEUOD0MbqaugfF!^2?%#iuJ_ehv*G1E(Qmk&BCKb^!HB zN8f03Q;Op?T>wB|`d!D888|z0z9Qi>-ZS~l{I^WVqMYUb{AAhO;r1_i*ahMa==xVg zZ>{A)%qB2XjAw4du{*bogA`Upm1+HT{LR z(ACjWn6Ten@ncPGEfzVs!<2jDD^h-AV98&ip|$O#V?+1J8X(J|&Dg$4guO~3dtXxc zpHb4?U94x~0U^8JBgVvdjEPl7{t{@~QAUB$tn_*#IGiwLV8~UJE zS6A=;uFn6^3KS*S&}YFQ-vX&W(O{?N>EmPeyUGOm5r9C9NlY9GeFLCF7cTO$v0)@L zYKp6cxn5mNp<>>_P>#^;L!_&`3VFxw7Ch8`mX{WFY&*8`ND4e6DEpO z;I;RoNbNX}{U1mSsldjxvPj?QdQYTZ$H<5RR1e65%xT~zO;(vmC}ayZ52`yZH9E*> zXyAfb1p^wPfDH&qy+}!O9P*!~0!zZ+f7>j#>IGsfZ0Nj>zl2&Xj9@x>u)h`409%&l z^Ik!r7by8yYMB^qZ+adkAt6#PFRv7`P(?G?A3wg_a@yDdk8mB_3)m03jMpcB^K^vO zTh?B~RwDw?|G?oRliF5O-#^?E)v{rS*J8bX*)JaJ==rWqG!t z4xsSO8a#|j2PUmE{6ga|3M1j!sTp zfDQ}+A0&@4^j9H+8yPA#TU!PE!6xCeGLuHO0(6_>!5m9dOG}=7D(k*tp`3*H8+))V zscPS8$99%J~gxus=fXJ;pU1Xa7wb;`_)2Dl>&;K0g();6?& zeT+>^%tS>6#_u&*vYk$+f4jyHT}>cyY25Z_u%uKAz%EzgckeEbF={~~;;*0a)`k{r z?B}b`Pg+1LQUj5nLY6Xm1Lj8o*bK2TF)4t+#gkH{VP;kUv%&)0ouE<|qZrs~(m2cz zq00nJGT+X0=Cm1GgO86|8%OEPlK!0grwl4OISDd*2z) zb^HGPRY_?`8EH^oTSBO$q(Pa5lo_%r8Bz9ZnuYAFh>GkH$|%Yfp@GWGNMt5i_xsfE z_q(qLzx&Dkdi|gMd(h>&F1|jW^E{8^c#q@rIa0NEQx0MKt;c(n@kWxqjbf&&A1^sT zop%0SkF;(*)VjNH@`KI_1_nCHGCI1WkDJZ`y?|AfelEDu2@#}|F-#~aDT%-aB3amc zQ^>jSZi$|r9>wEvj1xVM)aA>UGe-C<6rli|6WOSet|xW1zk%k|Bc;Pob~^g{&JV z<~vOaP(=DFg74q=X~{ls)bQYNe`De*k6GJo*4EZ5R<7){?%GA98qiM%jN~Gfig2k) zcZQki=}x?-nfB#WMySK`o?Xl-ccGDxEJJ3Rn2pj}0$a7RCy(ziZ1sNr6Z}lU;!9e86 zB{Q5OIsk;L!=KskX>TH}ioJ38bsT~dJx9mydp$r9{q6}mDPr9vllzoBDH=9_R>Jk z_9R&XUQ;iC;!YGJ%FM2VB@~LH1qy0u} zzBT7phwe3#HFVuSbMQgkVCXRamKYit?ZQ0C0~k&r;WjJr{OZ8mp?I`GrDZ90bHN$R5TPZ@6)^qLFE^)zBO@lA;Np@Jnfv-g`np1T& z0QkAY#nmBH2w*PD)aNuH+Og(SC#$d$wc?G9gBWhu2n?O1TaX2z$15^262<>)o#p%F z(_O>ELO7FREyIYau=%-}8b#qEuC;5|jsZew8~#26Vqw&ne0m4T`Rr0omBp?z!!PPE zCxS;Z`b4Op7V0q$vNl7uI+i_39AxCV-H=ng8-%7{u!b58g%F}r@swxw@87>GL9%6J zxS-7q#)T@p*~;12)FdnO+5j&GkoJAs)YzB+V6O)JyYF&`hu4VI?6zlC?Lt2BUO?+g zk$dWTGI48Fm(q)PkQ2K)JGbK+^xWrcwel>ZT}I!qu4iW#Vir&jz~y~7@-WZse0@v^ zUYza|78V9*CaO%>g5sAcxUnS zv^fLu&pKE+95P<~!5;wEW%ov=*D`r|7fGny=egc|r z7HZ2tb5amQ*DAm_S+0dL8FlsGPc1F-*`{?|0s;!p?SEoDDlu)68=2G4%1SuaWz5@4 z#OmRXALeYL#$G6>NPc06;i}wAAK5RoNjd)_J5C0&Z1T^V(0O5(+Wb`GB$F(vsPWvN z$v401YHNcLPt}kYg{f4`3=l<-zZ;w++pfwa3#e?87N=7*vy~Hdhu*?SP@L>LJ3G6X z_3i}|6Y}MSy|35BYfx}`K^{YpJkOF)wrxXF@&QJ;I{0Den!lJy z6#y9bfdjgm_F4++yU*ofn9SZrDD&3#_F;MLZaOk!{tOr=!vwSuWF&8A_w0`YU?dln zdL&J^z%bJ_bC3U+!|LdV6&sI8{=+ z8WdP%CfPtlX)@i{gaj_+_Pa!f19u~rGq=uue%3KbJ?Y-+jUqTPhPJDh-+-KX3O|Eb zsm9=q_kzCPb%JpU20o-lR?*y7I-7s_F!?3O3u73HC9kO&E^huwW$xIxCGUx-bx6Ym zS<7&hP%uGKL*{XlH>NwffUXqgT%}>K@yq{cjlE6l`uS5>8n1(IEm!1V72dPwOl!_X zPO_M|L#v|6P-9b5pGxVH5?l~OZQT!PXP;W0cPa7TvQN#$)s@Hry&Q!hlLln-Wd%20MV5-HDx&$n@B;Y9I~_91AAz+R;=Ct%IO=u?RQ;EY)H~T#um8xo?+T~{u0nI=obSHo#KyUNe?~_zKc6bM_iQ+JW8K>yE}^c@7uY!mSHEBN?z^vn3%Wi9$u86&7V_ee!eO=3* zv<;j0+X>1n8}F;H00Ygy`+$+s)&PQ-wzFO`trf<^1>Arvyp$0P_c+&Cdh0v4-Kw;Q zTFs%r9TIsQE;IZ5Ny>CAuZxR|2;^MwvyRlf zucbI74Tvc~!KHWa-u(?YdAYZijdG~K<{PD_BoFx(b<=V&n8qEI=ZiD$j6N!lh=_n% z`t12HZ-szwX0eTZ82-i-SlYhMexOmdw6qjRDY0@06DX)|Zf-WyV zA8(9-WwAiax;$T%EN=;_@Mq7%sLwO9hi*DcCyKefTaTt$z#Rno(F-cqIb^tFyE7 zN_zTx;LSS5KRyT{exQ1MLs_u-GjS1lj}VrR>H4gJv${b+L5TogT|+~uCvvOn>Oyd$ z~n|EuP#g zJ@jnNGx&PkpFJeK_>Tgp%@$KV-cQZvrzR1qorEW8R{MNuR<%iBM^nBk=NT zBnHT8zEyVR?hM^vYN;0=l!EkdLa`Gyt_+}ZV1z+Ky_lXqFTJyQZvxDO@7J4gAjUV>&4{owx&0a7)ygvV=HlT=ZdU?(mIq@u>3Z!= zy|)lY<>fsN3uGtoL|0D_7f5Yy;k2`6flVUWO<=R+`BJe>2OKgWs*rKOvacs77KPNe zIOlbeUmL5#zd%qm?r(^T|2Sw;A0vg&JY43GBB!8m5AG<-pS+GY8+@ijDpnY!(J`w0ctsV4aGGFsZCRhO^W*t~9Q(*aL48R>Y{_*6rqPW=dGVhn^#ip$Kx zhf-q~V4M-x_u{9FILHkg+!AAr-PU44{SfCGp~WO=Wk0IoZ;o~QBab@sST8YA&@tRa zb}XH0H+5aM`^uJmHZoTwqy5;#9yOG_L8kczXv!@$tJXQ(G)bH77BDpe9gQ2-Yz$@# zQtR)9+WJRY9R|u=%W+VUlzgmk=;xP;fu2>L9Ip95x8~PKi;6nns5Z18?s;Z*_6B4b z(BwcU&6BP!-Q{ZvE_Ys!l5##X_$5;TUVR8gh#v{N!NUJXv(Ti51xaFlrhOrq^{7a3 zNqwxeFfJ+#Lv>Uk<0;4N5EecuC8id7K&cp5We9gE3V#W+dV>F$FkXCI%d%~+)z@Gs zWyzlj75|UwkZ4)PmN)2X`PC$t>yg(@=5C&z}++*6|p%s*|*HG97vNek9Omi=~)jP-p?*UUUZJ`8zpN7JcP1%d7t3|yop5C8{aQ9VJU_0)xR zr#2S=GGr;;E+Zj}V$9bCJPldCW|JJE>b>GQJIE6aiC#ZLGDq8cfcqNbWml1-2Psf1 zStA`xRmcX2u5RkhrV#bw1IL(ts*3F?K0o!Ng%h-hlqEuiAYCXxb>#x-WW~j!Jp&VS zO@Ga0@t`Cyb8v7Z>6a8EP06EY0j^y$TZlnAMtt=6R>t3e<&B#f#Zc-I;v6WOoAr4roV^rjRlv42o+6v2;8 zAS#nU$N3E+`*b4qVCpwZ&D?v{`44~+<;~4g2v1dm+B&(^UMoSi7+QN4Jl>1wmk`Hr zzFH1Hcciu?)rYsq%eU8aaHK!mTyMBB{hd`|*ZUjBE+__$-FNmjPa156=#^cua_88$ zU8U;s#)Vex2OWV+6R~-sS9+C@N>u!J;F|&v1({0ito?vxp}5W(h@RY_eLDr*es|IZ zt+Lx#a=K){kx^V8Brn2|$W5XLhl=^-0eVGF-m$Uz5i#@=`O}-AKG+I~(Rz*Ed4b<{ zu_Y_fiT@q>mh|*=ClMjb`wFVgWi??@d|RQCy@3&&PKg*-9d!#4p}MHjSutWvwa^P%$ug zfWDs>l@`7_u^Eu<35v%A%Ost=H1n1$*`QIK{r_4I(zbV&fAj8LW@Y`-H%0#L3v>1ap%6~)@9z(!8{DE;btzC-I$etc zreI@ZW3#>t?a4Q+h0SY}F2H0+l z2?0D7`mU3wBchjHO7laA&Kx==Vt%J!3?iUP#N3|jCw{U%^9>=9kvpKJ0DM${I3(ITw|`z$y_{B>YqNh15%52LlLG#4EONF_(#(ucLH>yT zZhC;7$u^4b>7~%xz;wBm`9jk>a58p%eIOtrlJO~Wo~^wjOjtj(HTRMtIDYevGt9Ol zmLJoivD65aca|NAD;5pDuQ!Z!8$b%t-h^;s42&;HGAds%ShO9T1N=~jjWJPM6umQ0 za{SOoRs&|n4aJrj@N*hMtDI@ zeSBgoWqP`OE~++s`uZuMB_J>T*$p@?s>s&-DDB{p72wAc~wEE+#_RtGIHFYTXx?Mf zBX{x3^V~mEFymIPUL8!&EC9AA3NCKD)*L#I3uaQ{m^sudZU{B`yMzAstRaHUPJYyf z32OFUU=rOx`67#^Z_#ln7G7>{c{HWD{AxMhH8{8fq&IZ5yCTxMCvoXGH|JB~L&Xd8 z&TP_dg~Z(XTijSTK~k4P#8KgDQmNl_JI>dJZwGXzQt#cnM}^k59^C+tz=$-cXlNMk zHJcqU@Y;KW4KQK$P}uHsi?P-uX~Y==5ks_old zd4bludV6_5CPR=P6u~V?C{opC?|i9V^4I;oVj`%vCq1on@vxjv^ZRK_XQfl_GN7ty zLURw9lYlfN`6?)DJ75Hdf}JP;HOQ(m5k`5w(H;8Z_xkdsKTx0EpQ?jo7>p2*9eT>= zvzq-tzLNXpf3{TM>&1Vac#@Ju+2=Iz1eAvPIcG0a4H?`L@V-&STE<;-zw$bwV;R^F z@IgHRR181Tq!uc4R=ekHe9iiQxb|@H7fpOc|LSD=bw<0*Q=HImk@9SFA3f|#bs*G? zjEtFs8fIq6Rhwg&#l=4Sv&&+!-6FBVOv3i%F z>SRw(PbHvA{M+dncm>TqT3ibPBp3;)B@7BKJv!AI&q9 zFoKPd@JxQWX6^ZBH$koDVsk-Qj^VE*$V$gaha6Eu3H<`oN`%j7)8DW?$c^xLBCcG$ z8VtUp2%QYyonZlCC#1uoNBRaagV6nMw;*+8JZG8}sP?~(n_J}!Qd!0j7F z!?w%tCeaiS?83+w>%f699XJe@Y>*qIfc0!_{A)Jv2}WOXWz@bakblshhrU@s*c3L% zeT>t6PK_|o!*D<&>^)P~{ono<(hIY=KisLB2^9GgC(f{LG=TyH#-4EG7JU}lmvc|P zqLs5Rc_%AF<>X8R8#Vb=erc*~F;61#p^o71#V_P{uxQ0*rhuoJAH2-C~&Zp9T5HvJ6=BQ>mdb%T0qrLvQ13v7>I4qCAlP4HvkXnwf^Q zF;3t46&dPzM2swUk=pzB@4sI-nWM-LKXaNi!+|1l5lF3e&V}t*QW)sU|LxRFNC?_6 zr^J)qc=!ii-rk1+8qj@_0ZMG%Uc40wamK`m`I7%~5E9p)8=$Z32R7~M>mz+%W2h%u zP7-^(|NF`$@5W1C-u0t99^F_wrI;fBR?z<}#NrnN)>^0k)k65J&g{d#dNqcO{zuvh zE03+-BC+n;wQHg5<(xn?l89wmD=RBLpX)I7q?&&rWRPuOLNpt@ptI6uB9xuN$jE$B zHJI*up6%e7JICWw(QbYYUSYWS#q}`n8ek47GNAoHyp%k9<0E_gc;u|>XxE!h=$VcC zg>DT}xd1nMx9EA}EByu_T0eHF*2bggpuCfbKFTaZ?1b3#44_Yj<9P3pp~SO>hEy=3 zol>jOhPgD22H(SIpH8zY&6l)0kX{kc(z2EoHW)D#NQK*}q<>66BW*R*@2fw*ZiN^t zFj0v5c;S6AGt7<-JhI?EH>fY+G||8G&Hz+gvMoT546uGcBUgh8YjEhp=4X<<=G8G? zFYOu6Hx+Fc8XSBR?aNT4md3=yY!oq|pv2No?!Vf51a`KbLIV(27715yEuqHtngyw>_8)qtS?I(NDMSQZrt-VP#&4%$Gfrq<=OQIec_e1U(uIY28LOIJPg&v zY8z|-SR>`gZ}gL23LVAX#z^^U;05%6e^@rY-uhU7ILwAXLO3Ew7KdRYtiv-7FjkqW zQ*n~eM&X|S(%Y;5D_Wiu?hsMj+H;s@>2f)!B+VtUkb)?3kPNu^`QXSK3tK#WLlmPIHvlfcV$|3Ni2nItI z&E4v;daL9_@k|MP37TzaN{MR?1#(E$&RtI#g8=_z2+gRsp6Nd`kH81G%_v(ydyPWZ z-&?MS!1lmOq8A$zX71t`v#)(@fC`w8q*n%JJ?D(6$TU6a!uTGZp_Z1Ge3xlak6E!5 z;B%z?54}Q3^N*Vb@NiJmjGp5lG>jubJjxA_y>si(2!+xrCnxtZ)2M>oNuOx2Am4C) zWGtEnu5LW&;t*GiPT24DsPg%a<2+~tT27IGef&q0v^f9@L*P!fzLCsksP* z<)j(eTo?AY^1P+sG2lQo;~B&O4F$<_2?b4(EXZLZ(^j}}IVGjSzQGu2cXj|vp=Nf7 z#s}v!P%9Z=)4^M~pTjySiC8Ixz1TT8xC+su{zQcaed-hs(y*!zn_vWH{B|CmV-~*1 zl>nD$e7obx&dJF5Wix62Zjp#SF{Ls@T|)WJ?fUTH!_K}wjVJoAvUxu|LEi#IPC8QR zAhO}tl^TdVK%u=Qr181L-Q91?KGGt01o;;(?XCKQGqp!Jd8t_zALiO_czFpv!jYnQ zKmhN;!yS(OLr+gXy-i$IGikTk{6D`R9_RzQ2!L>9!_qVSu>ei-&&*q{FU*uIlmYC% zx9ir5eo=(E!wwi4p8;>$*x2|b%r^)Cvq$~?y}X9~Pi3I}O%6fjfiAMI?hCF!M@&w$ zsCiSLiy?)5A_&;T{O9L_8|X^ngvSL*-nD<1E&3eLH-IO8(pq7Jdjcs(PKd>P*%ec_ z3tM<5uzs++`vz3{bEu@G#8lkf+S%Pb-q6iSji2cz58nyg!>=T~5BdrljLOA_)8dZM z>gHdjK`r5WJb;#|S6L=C4QPPy+qi29RQ?Lm<2L+xUy61vy`3Gy!d6WLB%W?q zRXF~3KaScVz%9~b6qzz0a#xBZdjc|&}@Sd zE{krlf8A#isE~^OZ1UL7qm!|B(Xp+btiBSFR0Qs4S-AGny=w-onS;qt8pWIxI~`G5MP zi54HG>CcesZ=o#+&-FmR{Ux`!R;PM~$D7~?ny6S>3W4o4O(`6ME6+Jw+P|wC3mLoM)=OB3+Rt9W; zqM1o$5!N#w=)^fb5Q8#9{$wvcmx?d`_W^+aKM*p{;dXLbpHEo9h=M=zvL|HHju>43 EKlk?+t^fc4 diff --git a/tests/cli/test_cli_common.py b/tests/cli/test_cli_common.py deleted file mode 100644 index b0faa08f..00000000 --- a/tests/cli/test_cli_common.py +++ /dev/null @@ -1,15 +0,0 @@ -from aimbat._config import Settings -import pytest - - -def test_simple_exception( - patch_settings: Settings, capsys: pytest.CaptureFixture -) -> None: - patch_settings.log_level = "INFO" - from aimbat.app import app - - with pytest.raises(SystemExit) as e: - app(["event", "activate", "nonexistent-uuid-str"]) - captured = capsys.readouterr() - assert "╭─ Error ────────────────────" in captured.out - assert e.value.code == 1 diff --git a/tests/conftest.py b/tests/conftest.py index b079dfe9..e8b96935 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,75 +1,74 @@ -from aimbat.aimbat_types import DataType -from pysmo.classes import SAC -from sqlmodel import Session, select -from sqlalchemy import Engine -from pathlib import Path -from collections.abc import Callable, Iterator -from dataclasses import dataclass, field -from importlib import reload -from aimbat import settings -from aimbat._config import Settings -from aimbat.logger import configure_logging -import aimbat.db as db -import aimbat.core._project as project -import aimbat.core._data as data -import aimbat.core._event as event -import random -import shutil +import aimbat.db import pytest -import matplotlib.pyplot as plt import uuid +import shutil +import matplotlib.pyplot as plt +import random +import json +import os +import subprocess +from aimbat.app import app +from aimbat.aimbat_types import DataType +from aimbat.core import add_data_to_project, set_active_event, create_project +from aimbat.models import AimbatEvent +from aimbat.logger import configure_logging +from dataclasses import dataclass, field +from typing import Any, Literal +from pathlib import Path +from collections.abc import Callable, Generator, Sequence +from sqlmodel import Session, select, create_engine +from sqlalchemy import Engine, event + +# --------------------------------------------------------------------------- +# Constants +# --------------------------------------------------------------------------- + +_AIMBAT_LOGFILE = "aimbat_test.log" +_AIMBAT_LOG_LEVEL: Literal["DEBUG"] = "DEBUG" + + +# --------------------------------------------------------------------------- +# Test data +# --------------------------------------------------------------------------- @dataclass class TestData: + """Container for test data paths. + + Attributes: + multi_event: A list of paths to multi-event SAC files. + sacfile_good: Path to a known good SAC file. + """ + multi_event: list[Path] = field( default_factory=lambda: sorted( Path(__file__).parent.glob("assets/event_*/*.bhz") ) ) - sacfile_good = Path(__file__).parent / "assets/goodfile.sac" + sacfile_good: Path = Path(__file__).parent / "assets/goodfile.sac" TESTDATA = TestData() -# https://rednafi.com/python/patch-pydantic-settings-in-pytest/ -@pytest.fixture -def patch_settings(request: pytest.FixtureRequest) -> Iterator[Settings]: - # Make a copy of the original settings - original_settings = settings.model_copy() - - # Collect the env vars to patch - env_vars_to_patch = getattr(request, "param", {}) - - # Patch the settings to use the default values - for k, v in Settings.model_fields.items(): - setattr(settings, k, v.default) - - # Patch the settings with the parametrized env vars - for key, val in env_vars_to_patch.items(): - # Raise an error if the env var is not defined in the settings - if not hasattr(settings, key): - raise ValueError(f"Unknown setting: {key}") +# --------------------------------------------------------------------------- +# Autouse mocks +# --------------------------------------------------------------------------- - # Raise an error if the env var has an invalid type - expected_type = getattr(settings, key).__class__ - if not isinstance(val, expected_type): - raise ValueError( - f"Invalid type for {key}: {val.__class__} instead of {{expected_type}}" - ) - setattr(settings, key, val) - yield settings - - # Restore the original settings - settings.__dict__.update(original_settings.__dict__) +@pytest.fixture(autouse=True) +def patch_debug_setting(monkeypatch: pytest.MonkeyPatch) -> Generator[None, None, None]: + """Automatically patches settings to enable debug logging for tests. + Args: + monkeypatch: The pytest monkeypatch fixture. -@pytest.fixture(autouse=True) -def patch_debug_setting(patch_settings: Settings) -> Iterator[None]: - patch_settings.log_level = "DEBUG" - patch_settings.logfile = Path("aimbat_test.log") + Yields: + None + """ + monkeypatch.setattr(aimbat.settings, "logfile", _AIMBAT_LOGFILE) + monkeypatch.setattr(aimbat.settings, "log_level", _AIMBAT_LOG_LEVEL) configure_logging() yield @@ -77,6 +76,12 @@ def patch_debug_setting(patch_settings: Settings) -> Iterator[None]: @pytest.fixture(autouse=True) def mock_uuid4(monkeypatch: pytest.MonkeyPatch) -> None: + """Mocks uuid.uuid4 to produce deterministic UUIDs. + + Args: + monkeypatch: The pytest monkeypatch fixture. + """ + def make_generator() -> Callable[[], uuid.UUID]: rand = random.Random(42) return lambda: uuid.UUID(int=rand.getrandbits(128), version=4) @@ -86,147 +91,278 @@ def make_generator() -> Callable[[], uuid.UUID]: @pytest.fixture(autouse=True) def mock_show(monkeypatch: pytest.MonkeyPatch) -> None: + """Mocks plt.show to prevent plots from displaying during tests. + + Args: + monkeypatch: The pytest monkeypatch fixture. + """ monkeypatch.setattr(plt, "show", lambda: None) @pytest.fixture(autouse=True) -def increase_columns(monkeypatch: pytest.MonkeyPatch) -> Iterator[None]: +def increase_columns(monkeypatch: pytest.MonkeyPatch) -> Generator[None, None, None]: + """Increases the COLUMNS environment variable for wider output in tests. + + Args: + monkeypatch: The pytest monkeypatch fixture. + + Yields: + None + """ monkeypatch.setenv("COLUMNS", "1000") yield -@pytest.fixture(scope="session") -def test_data_dir( - tmp_path_factory: pytest.TempPathFactory, -) -> Iterator[Path]: - tmp_dir = Path(tmp_path_factory.mktemp("test_data")) +# --------------------------------------------------------------------------- +# File fixtures +# --------------------------------------------------------------------------- - yield tmp_dir - shutil.rmtree(tmp_dir) +@pytest.fixture() +def db_path(tmp_path: Path) -> Path: + """Path for the temporary project database file (does not exist yet). + Args: + tmp_path: The pytest tmp_path fixture. -@pytest.fixture(scope="session") -def test_data(test_data_dir: Path) -> Iterator[list[Path]]: - data_list: list[Path] = [] - for orgfile in TESTDATA.multi_event: - testfile = test_data_dir / f"{uuid.uuid4()}.sac" - shutil.copy(orgfile, testfile) - data_list.append(testfile) - yield data_list + Returns: + Path to the temporary project database file. + """ + return tmp_path / "test_project.db" + + +@pytest.fixture() +def sac_file_good(tmp_path_factory: pytest.TempPathFactory) -> Path: + """Provides a path to a temporary copy of a known good SAC file. + Args: + tmp_path_factory: The pytest tmp_path_factory fixture. -@pytest.fixture(scope="session") -def test_data_string(test_data: list[Path]) -> Iterator[list[str]]: - yield [str(data) for data in test_data] + Returns: + Path to the temporary SAC file. + """ + orgfile = TESTDATA.sacfile_good + tmpdir = tmp_path_factory.mktemp("aimbat") + testfile = tmpdir / "good.sac" + shutil.copy(orgfile, testfile) + return testfile @pytest.fixture -def fixture_empty_db( - patch_settings: Settings, -) -> Iterator[tuple[Engine, Session]]: - db_url: str = r"sqlite+pysqlite:///:memory:" - patch_settings.db_url = db_url - db.engine.dispose() - reload(db) +def multi_event_data(tmp_path_factory: pytest.TempPathFactory) -> list[Path]: + """Provides a list of paths to temporary copies of multi-event SAC files. + + Args: + tmp_path_factory: The pytest tmp_path_factory fixture. + + Returns: + A list of paths to the temporary SAC files. + """ + orgfiles = TESTDATA.multi_event + tmpdir = tmp_path_factory.mktemp("aimbat") + for orgfile in orgfiles: + testfile = tmpdir / orgfile.name + shutil.copy(orgfile, testfile) + return sorted(tmpdir.glob("*.bhz")) + - with Session(db.engine) as session: - yield db.engine, session - db.engine.dispose() +# --------------------------------------------------------------------------- +# Database +# --------------------------------------------------------------------------- @pytest.fixture -def fixture_session_with_project_file( - tmp_path_factory: pytest.TempPathFactory, - patch_settings: Settings, -) -> Iterator[tuple[Engine, Session, Path]]: - db_file = Path(tmp_path_factory.mktemp("test_db")) / "mock.db" - db_url: str = rf"sqlite+pysqlite:///{db_file}" +def engine_from_file( + db_path: Path, monkeypatch: pytest.MonkeyPatch +) -> Generator[Engine, None, None]: + """Creates an empty project database backed by a file. + + Args: + db_path: Path to the temporary project database file. + monkeypatch: The pytest monkeypatch fixture. + + Yields: + A SQLAlchemy Engine connected to the file database. + """ + db_url: str = rf"sqlite+pysqlite:///{db_path}" + engine: Engine = create_engine( + db_url, + connect_args={"check_same_thread": False}, + ) - patch_settings.db_url = db_url - patch_settings.project = db_file + @event.listens_for(engine, "connect") + def set_sqlite_pragma(dbapi_connection: Any, connection_record: Any) -> None: + cursor = dbapi_connection.cursor() + cursor.execute("PRAGMA foreign_keys=ON") + cursor.close() - db.engine.dispose() - reload(db) - project.create_project(db.engine) + monkeypatch.setattr(aimbat.db, "engine", engine) - with Session(db.engine) as session: - yield db.engine, session, db_file - db.engine.dispose() + yield engine + engine.dispose() @pytest.fixture -def fixture_engine_session_with_project( - patch_settings: Settings, -) -> Iterator[tuple[Engine, Session]]: - """Yield a session with a new project.""" +def engine() -> Generator[Engine, None, None]: + """Creates an in memory database with a new project. + + Yields: + A SQLAlchemy Engine connected to the in-memory database with project. + """ + engine: Engine = create_engine( + "sqlite:///:memory:", + connect_args={"check_same_thread": False}, + ) - db_url: str = r"sqlite+pysqlite:///:memory:" - patch_settings.db_url = db_url + @event.listens_for(engine, "connect") + def set_sqlite_pragma(dbapi_connection: Any, connection_record: Any) -> None: + cursor = dbapi_connection.cursor() + cursor.execute("PRAGMA foreign_keys=ON") + cursor.close() - db.engine.dispose() - reload(db) - project.create_project(db.engine) + create_project(engine) - with Session(db.engine) as session: - yield db.engine, session - db.engine.dispose() + yield engine + engine.dispose() @pytest.fixture -def fixture_session_with_data( - test_data: list[Path], patch_settings: Settings -) -> Iterator[Session]: - """Yield a session with a test data added.""" +def patched_engine( + engine: Engine, monkeypatch: pytest.MonkeyPatch +) -> Generator[Engine, None, None]: + """Monkeypatches ``aimbat.db.engine`` so CLI functions use the test database. + + Args: + engine: The SQLAlchemy Engine for the test database. + monkeypatch: The pytest monkeypatch fixture. - db_url: str = r"sqlite+pysqlite:///:memory:" - patch_settings.db_url = db_url + Yields: + The monkeypatched SQLAlchemy Engine. + """ + monkeypatch.setattr(aimbat.db, "engine", engine) + yield engine - db.engine.dispose() - reload(db) - project.create_project(db.engine) - with Session(db.engine) as session: - data.add_files_to_project(session, test_data, DataType.SAC) +@pytest.fixture() +def loaded_engine(patched_engine: Engine, multi_event_data: Sequence[Path]) -> Engine: + """A patched engine pre-populated with multi-event data and an active event. + + Args: + patched_engine: The monkeypatched SQLAlchemy Engine. + multi_event_data: Paths to temporary copies of multi-event SAC files. + + Returns: + The monkeypatched SQLAlchemy Engine with data loaded. + """ + + datasources = multi_event_data + with Session(patched_engine) as session: + add_data_to_project(session, datasources, DataType.SAC) + events = session.exec(select(AimbatEvent)).all() + lengths = [len(e.seismograms) for e in events] + set_active_event(session, events[lengths.index(max(lengths))]) + return patched_engine + + +@pytest.fixture() +def patched_session(patched_engine: Engine) -> Generator[Session, None, None]: + """A session bound to the patched engine for CLI tests. + + Args: + patched_engine: The monkeypatched SQLAlchemy Engine. + + Yields: + A SQLModel Session bound to the patched engine. + """ + with Session(patched_engine) as session: yield session - db.engine.dispose() -@pytest.fixture -def fixture_engine_session_with_active_event( - patch_settings: Settings, test_data: list[Path] -) -> Iterator[tuple[Engine, Session]]: - """Yield a session with an active event.""" +@pytest.fixture() +def loaded_session(loaded_engine: Engine) -> Generator[Session, None, None]: + """A session pre-populated with multi-event data and an active event. - db_url: str = r"sqlite+pysqlite:///:memory:" - patch_settings.db_url = db_url + Args: + loaded_engine: The monkeypatched SQLAlchemy Engine with data loaded. - db.engine.dispose() - reload(db) - project.create_project(db.engine) + Yields: + A SQLModel Session with data populated. + """ + with Session(loaded_engine) as session: + yield session - with Session(db.engine) as session: - data.add_files_to_project(session, test_data, DataType.SAC) - events = session.exec(select(event.AimbatEvent)).all() - lengths = [len(e.seismograms) for e in events] - event.set_active_event(session, events[lengths.index(max(lengths))]) - yield db.engine, session - db.engine.dispose() + +# --------------------------------------------------------------------------- +# CLI +# --------------------------------------------------------------------------- @pytest.fixture() -def sac_file_good(tmp_path_factory: pytest.TempPathFactory) -> Path: - orgfile = TESTDATA.sacfile_good - tmpdir = tmp_path_factory.mktemp("aimbat") - testfile = tmpdir / "good.sac" - shutil.copy(orgfile, testfile) - return testfile +def cli() -> Callable[[str], None]: + """Returns a callable that invokes ``app()`` in-process with command tokens. + + Returns: + A callable that accepts a command string and runs it via the app. + """ + + def _run(command: str) -> None: + try: + app(command) + except SystemExit as exc: + if exc.code != 0: + raise + + return _run + + +@pytest.fixture() +def cli_json(capsys: pytest.CaptureFixture[str]) -> Callable[[str], list | dict]: + """Returns a callable that runs a ``dump`` sub-command and returns parsed JSON. + + Args: + capsys: The pytest capsys fixture. + + Returns: + A callable that accepts a command string and returns the parsed JSON output. + """ + + def _run(command: str) -> list | dict: + capsys.readouterr() # discard output from prior commands + try: + app(command) + except SystemExit as exc: + if exc.code != 0: + raise + captured = capsys.readouterr() + return json.loads(captured.out) + + return _run @pytest.fixture() -def sac_instance_good(sac_file_good: Path) -> Iterator[SAC]: - my_sac = SAC.from_file(sac_file_good) - try: - yield my_sac - finally: - del my_sac +def aimbat_subprocess( + db_path: Path, +) -> Callable[[Sequence[str]], subprocess.CompletedProcess[str]]: + """Returns a callable that runs ``aimbat `` as a subprocess against the test database. + + Args: + db_path: Path to the temporary project database file. + + Returns: + A callable that accepts a sequence of CLI arguments and returns the completed process. + """ + + def _run(args: Sequence[str]) -> subprocess.CompletedProcess[str]: + env = os.environ.copy() + env["AIMBAT_DB_URL"] = f"sqlite+pysqlite:///{db_path}" + env["AIMBAT_LOGFILE"] = _AIMBAT_LOGFILE + env["AIMBAT_LOG_LEVEL"] = _AIMBAT_LOG_LEVEL + env["COLUMNS"] = "1000" + return subprocess.run( + ["uv", "run", "aimbat", *args], + capture_output=True, + text=True, + env=env, + ) + + return _run diff --git a/tests/functional/test_cli_basic_ops.py b/tests/functional/test_cli_basic_ops.py new file mode 100644 index 00000000..f510d6d9 --- /dev/null +++ b/tests/functional/test_cli_basic_ops.py @@ -0,0 +1,624 @@ +"""Functional tests exercising the AIMBAT CLI. + +All commands are invoked in-process via ``app()`` with ``aimbat.db.engine`` +monkeypatched to the test fixture's in-memory database. +""" + +import pytest +from pathlib import Path +from collections.abc import Callable, Sequence +from sqlalchemy import Engine + +# =================================================================== +# Project lifecycle (in-memory) +# =================================================================== + + +@pytest.mark.cli +class TestProjectLifecycle: + """Tests for project commands against an in-memory database.""" + + def test_create_project_twice_fails( + self, + patched_engine: Engine, + cli: Callable[[str], None], + ) -> None: + """Verifies that creating a project when one already exists fails. + + Args: + patched_engine: The monkeypatched in-memory engine (project already created). + cli: The in-process CLI callable. + """ + with pytest.raises((SystemExit, RuntimeError)): + cli("project create") + + def test_project_info( + self, + patched_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that project info displays a panel for an in-memory database. + + Args: + patched_engine: The monkeypatched in-memory engine (project already created). + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("project info") + output = capsys.readouterr().out + assert ( + "Project Info" in output + ), "Output should contain the 'Project Info' panel title" + assert ( + "in-memory database" in output + ), "Output should indicate this is an in-memory database" + + def test_delete_project_succeeds_for_in_memory( + self, + patched_engine: Engine, + cli: Callable[[str], None], + ) -> None: + """Verifies that project delete completes without error for an in-memory database. + + Args: + patched_engine: The monkeypatched in-memory engine (project already created). + cli: The in-process CLI callable. + """ + cli("project delete") # should not raise for in-memory + + +# =================================================================== +# Data management +# =================================================================== + + +@pytest.mark.cli +class TestDataManagement: + """Tests for adding and managing data.""" + + def test_add_data( + self, + patched_engine: Engine, + multi_event_data: Sequence[Path], + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that data can be added to the project.""" + files = " ".join(str(f) for f in multi_event_data) + cli(f"data add {files} --no-progress") + events = cli_json("event dump") + assert len(events) > 0 + + def test_add_data_idempotent( + self, + loaded_engine: Engine, + multi_event_data: Sequence[Path], + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Adding the same files twice does not duplicate data.""" + events_before = cli_json("event dump") + + files = " ".join(str(f) for f in multi_event_data) + cli(f"data add {files} --no-progress") + + events_after = cli_json("event dump") + assert len(events_after) == len(events_before) + + def test_data_list(self, loaded_engine: Engine, cli: Callable[[str], None]) -> None: + """Verifies that data list command runs successfully.""" + cli("data list --all") + + def test_data_dump( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that data dump returns a list of data items.""" + data = cli_json("data dump") + assert isinstance(data, list) + assert len(data) > 0 + + def test_dry_run_does_not_add( + self, + patched_engine: Engine, + multi_event_data: Sequence[Path], + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that dry-run mode does not modify the database.""" + files = " ".join(str(f) for f in multi_event_data) + cli(f"data add {files} --no-progress --dry-run") + events = cli_json("event dump") + assert len(events) == 0 + + +# =================================================================== +# Event operations +# =================================================================== + + +@pytest.mark.cli +class TestEventOperations: + """Tests for event-related CLI commands.""" + + def test_event_list( + self, loaded_engine: Engine, cli: Callable[[str], None] + ) -> None: + """Verifies that event list command runs successfully.""" + cli("event list") + + def test_event_dump( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that event dump returns a list of events.""" + events = cli_json("event dump") + assert len(events) > 1 + + def test_activate_event( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that an event can be activated.""" + events = cli_json("event dump") + + inactive = [e for e in events if e["active"] is None] + assert len(inactive) > 0 + target_id = inactive[0]["id"] + + cli(f"event activate {target_id}") + + events_after = cli_json("event dump") + active = [e for e in events_after if e["active"] is True] + assert len(active) == 1 + assert active[0]["id"] == target_id + + def test_activate_switches_active( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Activating a different event deactivates the previous one.""" + events = cli_json("event dump") + ids = [e["id"] for e in events] + + cli(f"event activate {ids[0]}") + cli(f"event activate {ids[1]}") + + events_after = cli_json("event dump") + active = [e for e in events_after if e["active"] is True] + assert len(active) == 1 + assert active[0]["id"] == ids[1] + + def test_delete_event( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that an event can be deleted.""" + events_before = cli_json("event dump") + target_id = events_before[0]["id"] + + cli(f"event delete {target_id}") + + events_after = cli_json("event dump") + remaining_ids = [e["id"] for e in events_after] + assert target_id not in remaining_ids + assert len(events_after) == len(events_before) - 1 + + def test_activate_event_with_short_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that an event can be activated using a shortened ID. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + events = cli_json("event dump") + inactive = [e for e in events if e["active"] is None] + assert len(inactive) > 0 + target_id = inactive[0]["id"] + short_id = target_id[:8] + + cli(f"event activate {short_id}") + + events_after = cli_json("event dump") + active = [e for e in events_after if e["active"] is True] + assert len(active) == 1 + assert active[0]["id"] == target_id + + def test_delete_event_with_short_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that an event can be deleted using a shortened ID. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + events_before = cli_json("event dump") + target_id = events_before[0]["id"] + short_id = target_id[:8] + + cli(f"event delete {short_id}") + + events_after = cli_json("event dump") + remaining_ids = [e["id"] for e in events_after] + assert target_id not in remaining_ids + assert len(events_after) == len(events_before) - 1 + + def test_delete_event_removes_seismograms( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that deleting an event also deletes its seismograms.""" + seis_before = cli_json("seismogram dump") + events = cli_json("event dump") + target_id = events[0]["id"] + + cli(f"event delete {target_id}") + + seis_after = cli_json("seismogram dump") + assert len(seis_after) < len(seis_before) + + +# =================================================================== +# Event parameters +# =================================================================== + + +@pytest.mark.cli +class TestEventParameters: + """Tests for event parameter CLI commands.""" + + def test_parameter_list( + self, loaded_engine: Engine, cli: Callable[[str], None] + ) -> None: + """Verifies that parameter list command runs successfully.""" + cli("event parameter list") + + def test_parameter_get_and_set( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies getting and setting event parameters.""" + cli("event parameter get completed") + assert "False" in capsys.readouterr().out + + cli("event parameter set completed true") + + cli("event parameter get completed") + assert "True" in capsys.readouterr().out + + def test_parameter_dump( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that parameter dump returns parameter data.""" + data = cli_json("event parameter dump") + assert "completed" in data + + +# =================================================================== +# Station operations +# =================================================================== + + +@pytest.mark.cli +class TestStationOperations: + """Tests for station-related CLI commands.""" + + def test_station_list( + self, loaded_engine: Engine, cli: Callable[[str], None] + ) -> None: + """Verifies that station list command runs successfully.""" + cli("station list --all") + + def test_station_dump( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that station dump returns a list of stations.""" + stations = cli_json("station dump") + assert len(stations) > 0 + + def test_delete_station( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that a station can be deleted.""" + stations = cli_json("station dump") + target_id = stations[0]["id"] + + cli(f"station delete {target_id}") + + stations_after = cli_json("station dump") + assert len(stations_after) == len(stations) - 1 + + def test_delete_station_with_short_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that a station can be deleted using a shortened ID. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + stations = cli_json("station dump") + target_id = stations[0]["id"] + short_id = target_id[:8] + + cli(f"station delete {short_id}") + + stations_after = cli_json("station dump") + assert len(stations_after) == len(stations) - 1 + + +# =================================================================== +# Seismogram operations +# =================================================================== + + +@pytest.mark.cli +class TestSeismogramOperations: + """Tests for seismogram-related CLI commands.""" + + def test_seismogram_list( + self, loaded_engine: Engine, cli: Callable[[str], None] + ) -> None: + """Verifies that seismogram list command runs successfully.""" + cli("seismogram list") + + def test_seismogram_dump( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that seismogram dump returns a list of seismograms.""" + data = cli_json("seismogram dump") + assert len(data) > 0 + + def test_delete_seismogram( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that a seismogram can be deleted.""" + seis = cli_json("seismogram dump") + target_id = seis[0]["id"] + + cli(f"seismogram delete {target_id}") + + seis_after = cli_json("seismogram dump") + assert len(seis_after) == len(seis) - 1 + + def test_delete_seismogram_with_short_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that a seismogram can be deleted using a shortened ID. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + seis = cli_json("seismogram dump") + target_id = seis[0]["id"] + short_id = target_id[:8] + + cli(f"seismogram delete {short_id}") + + seis_after = cli_json("seismogram dump") + assert len(seis_after) == len(seis) - 1 + + +# =================================================================== +# Snapshot lifecycle +# =================================================================== + + +@pytest.mark.cli +class TestSnapshotLifecycle: + """Tests for snapshot creation, deletion, and rollback.""" + + def test_create_snapshot( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that a snapshot can be created.""" + cli("snapshot create initial") + data = cli_json("snapshot dump") + assert isinstance(data, dict) + snapshots = data["snapshots"] + assert len(snapshots) == 1 + assert snapshots[0]["comment"] == "initial" + assert len(data["event_parameters"]) == 1 + assert len(data["seismogram_parameters"]) > 0 + + def test_create_multiple_snapshots( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that multiple snapshots can be created.""" + cli("snapshot create first") + cli("snapshot create second") + data = cli_json("snapshot dump") + assert isinstance(data, dict) + snapshots = data["snapshots"] + assert len(snapshots) == 2 + comments = {s["comment"] for s in snapshots} + assert comments == {"first", "second"} + + def test_delete_snapshot( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that a snapshot can be deleted.""" + cli("snapshot create to-delete") + data = cli_json("snapshot dump") + assert isinstance(data, dict) + snapshots = data["snapshots"] + assert len(snapshots) == 1 + + cli(f"snapshot delete {snapshots[0]['id']}") + + data_after = cli_json("snapshot dump") + assert isinstance(data_after, dict) + assert len(data_after["snapshots"]) == 0 + + def test_delete_snapshot_with_short_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that a snapshot can be deleted using a shortened ID. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create to-delete") + data = cli_json("snapshot dump") + assert isinstance(data, dict) + snapshots = data["snapshots"] + assert len(snapshots) == 1 + short_id = snapshots[0]["id"][:8] + + cli(f"snapshot delete {short_id}") + + data_after = cli_json("snapshot dump") + assert isinstance(data_after, dict) + assert len(data_after["snapshots"]) == 0 + + def test_snapshot_list( + self, loaded_engine: Engine, cli: Callable[[str], None] + ) -> None: + """Verifies that snapshot list command runs successfully.""" + cli("snapshot create") + cli("snapshot list") + + def test_rollback_snapshot( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Rollback restores parameter values from a snapshot.""" + cli("snapshot create before-change") + + cli("event parameter set completed true") + cli("event parameter get completed") + assert "True" in capsys.readouterr().out + + data = cli_json("snapshot dump") + assert isinstance(data, dict) + cli(f"snapshot rollback {data['snapshots'][0]['id']}") + + cli("event parameter get completed") + assert "False" in capsys.readouterr().out + + def test_rollback_snapshot_with_short_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that rollback works with a shortened snapshot ID. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + capsys: The pytest capsys fixture. + """ + cli("snapshot create before-change") + + cli("event parameter set completed true") + cli("event parameter get completed") + assert "True" in capsys.readouterr().out + + data = cli_json("snapshot dump") + assert isinstance(data, dict) + short_id = data["snapshots"][0]["id"][:8] + cli(f"snapshot rollback {short_id}") + + cli("event parameter get completed") + assert "False" in capsys.readouterr().out + + +# =================================================================== +# Full workflow: add → delete → re-add +# =================================================================== + + +@pytest.mark.cli +class TestDataReaddWorkflow: + """Delete all data then add it back.""" + + def test_delete_all_events_and_readd( + self, + loaded_engine: Engine, + multi_event_data: Sequence[Path], + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that data can be re-added after deletion.""" + events_before = cli_json("event dump") + assert len(events_before) > 0 + + for event in events_before: + cli(f"event delete {event['id']}") + + events_empty = cli_json("event dump") + assert len(events_empty) == 0 + + seis_empty = cli_json("seismogram dump") + assert len(seis_empty) == 0 + + files = " ".join(str(f) for f in multi_event_data) + cli(f"data add {files} --no-progress") + + events_after = cli_json("event dump") + assert len(events_after) == len(events_before) diff --git a/tests/functional/test_cli_parameters.py b/tests/functional/test_cli_parameters.py new file mode 100644 index 00000000..618695e1 --- /dev/null +++ b/tests/functional/test_cli_parameters.py @@ -0,0 +1,793 @@ +"""Functional tests for CLI commands that read and write event and seismogram parameters. + +All commands are invoked in-process via ``app()`` with ``aimbat.db.engine`` +monkeypatched to the test fixture's in-memory database. The ``dump`` +sub-commands are used as the source of truth for verifying parameter changes. +""" + +import pytest +from collections.abc import Callable +from sqlalchemy import Engine + +# =================================================================== +# Event parameter — get +# =================================================================== + + +@pytest.mark.cli +class TestEventParameterGet: + """Tests for ``event parameter get``.""" + + def test_get_bool_parameter( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that getting a bool parameter prints its current value. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("event parameter get completed") + assert "False" in capsys.readouterr().out, "'completed' should default to False" + + def test_get_float_parameter( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that getting a float parameter prints a numeric value. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("event parameter get min_ccnorm") + output = capsys.readouterr().out.strip() + assert output, "Expected a non-empty output for min_ccnorm" + assert float(output) >= 0.0, "min_ccnorm should be a non-negative float" + + def test_get_timedelta_parameter( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that getting a timedelta parameter prints a value ending in 's'. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("event parameter get window_pre") + output = capsys.readouterr().out.strip() + assert output.endswith( + "s" + ), f"window_pre should be printed in seconds (got '{output}')" + + def test_get_bandpass_bool_parameter( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that getting bandpass_apply prints a bool value. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("event parameter get bandpass_apply") + output = capsys.readouterr().out.strip() + assert output in ( + "True", + "False", + ), f"bandpass_apply should be True or False, got '{output}'" + + +# =================================================================== +# Event parameter — set + verify via dump +# =================================================================== + + +@pytest.mark.cli +class TestEventParameterSetBool: + """Tests for setting boolean event parameters and verifying via dump.""" + + def test_set_completed_true( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that setting completed=true is reflected in the dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + before = cli_json("event parameter dump") + assert isinstance(before, dict), "Dump should return a dict for active event" + assert before["completed"] is False, "'completed' should default to False" + + cli("event parameter set completed true") + + after = cli_json("event parameter dump") + assert isinstance(after, dict), "Dump should return a dict for active event" + assert after["completed"] is True, "'completed' should be True after being set" + + def test_set_completed_false( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that setting completed=false is reflected in the dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("event parameter set completed true") + cli("event parameter set completed false") + after = cli_json("event parameter dump") + assert isinstance(after, dict), "Dump should return a dict for active event" + assert ( + after["completed"] is False + ), "'completed' should be False after being set back" + + def test_set_bandpass_apply( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that setting bandpass_apply is reflected in the dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + before = cli_json("event parameter dump") + assert isinstance(before, dict), "Dump should return a dict for active event" + original = before["bandpass_apply"] + + cli(f"event parameter set bandpass_apply {not original}".lower()) + + after = cli_json("event parameter dump") + assert isinstance(after, dict), "Dump should return a dict for active event" + assert ( + after["bandpass_apply"] is not original + ), "'bandpass_apply' should have toggled after set" + + +@pytest.mark.cli +class TestEventParameterSetFloat: + """Tests for setting float event parameters and verifying via dump.""" + + def test_set_min_ccnorm( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that setting min_ccnorm is reflected in the dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("event parameter set min_ccnorm 0.42") + after = cli_json("event parameter dump") + assert isinstance(after, dict), "Dump should return a dict for active event" + assert after["min_ccnorm"] == pytest.approx( + 0.42 + ), "'min_ccnorm' should be 0.42 after being set" + + def test_set_bandpass_fmin( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that setting bandpass_fmin is reflected in the dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("event parameter set bandpass_fmin 0.1") + after = cli_json("event parameter dump") + assert isinstance(after, dict), "Dump should return a dict for active event" + assert after["bandpass_fmin"] == pytest.approx( + 0.1 + ), "'bandpass_fmin' should be 0.1 after being set" + + def test_set_bandpass_fmax( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that setting bandpass_fmax is reflected in the dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("event parameter set bandpass_fmax 2.0") + after = cli_json("event parameter dump") + assert isinstance(after, dict), "Dump should return a dict for active event" + assert after["bandpass_fmax"] == pytest.approx( + 2.0 + ), "'bandpass_fmax' should be 2.0 after being set" + + +# =================================================================== +# Event parameter — dump +# =================================================================== + + +@pytest.mark.cli +class TestEventParameterDump: + """Tests for ``event parameter dump``.""" + + def test_active_event_returns_dict( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that the active-event dump returns a dict. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli_json: The in-process CLI JSON dump callable. + """ + data = cli_json("event parameter dump") + assert isinstance(data, dict), "Active-event dump should be a dict" + + def test_active_event_contains_all_parameter_keys( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that all expected parameter keys are present in the dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli_json: The in-process CLI JSON dump callable. + """ + data = cli_json("event parameter dump") + assert isinstance(data, dict), "Active-event dump should be a dict" + for key in ( + "completed", + "min_ccnorm", + "window_pre", + "window_post", + "bandpass_apply", + "bandpass_fmin", + "bandpass_fmax", + ): + assert key in data, f"Expected key '{key}' in event parameter dump" + + def test_all_events_returns_list( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that ``--all`` returns a list of parameter dicts. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli_json: The in-process CLI JSON dump callable. + """ + data = cli_json("event parameter dump --all") + assert isinstance(data, list), "All-events dump should be a list" + assert len(data) > 1, "Expected parameters for more than one event" + + def test_all_events_entries_contain_parameter_keys( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that each entry in the all-events dump has the expected keys. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli_json: The in-process CLI JSON dump callable. + """ + data = cli_json("event parameter dump --all") + assert isinstance(data, list), "All-events dump should be a list" + for entry in data: + assert "completed" in entry, "Each entry should have 'completed' key" + assert "min_ccnorm" in entry, "Each entry should have 'min_ccnorm' key" + + def test_set_visible_in_all_events_dump( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that a parameter change to the active event appears in the all-events dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("event parameter set completed true") + all_data = cli_json("event parameter dump --all") + assert isinstance(all_data, list), "All-events dump should be a list" + active_entries = [e for e in all_data if e.get("completed") is True] + assert ( + len(active_entries) == 1 + ), "Exactly one event should have completed=True after setting it" + + +# =================================================================== +# Event parameter — list +# =================================================================== + + +@pytest.mark.cli +class TestEventParameterList: + """Tests for ``event parameter list``.""" + + def test_list_produces_output( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that the list command produces output. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("event parameter list") + assert ( + len(capsys.readouterr().out) > 0 + ), "Expected output from event parameter list" + + def test_list_short_produces_output( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that ``--short`` produces output. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("event parameter list --short") + assert ( + len(capsys.readouterr().out) > 0 + ), "Expected output from event parameter list --short" + + def test_list_all_events_produces_output( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that ``--all`` produces output covering all events. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("event parameter list --all") + assert ( + len(capsys.readouterr().out) > 0 + ), "Expected output from event parameter list --all" + + +# =================================================================== +# Seismogram parameter — get +# =================================================================== + + +@pytest.mark.cli +class TestSeismogramParameterGet: + """Tests for ``seismogram parameter get``.""" + + def test_get_select_with_full_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that 'select' can be retrieved using the full seismogram ID. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + capsys: The pytest capsys fixture. + """ + seis = cli_json("seismogram dump") + assert ( + isinstance(seis, list) and len(seis) > 0 + ), "Expected at least one seismogram in the dump" + target_id = seis[0]["id"] + + cli(f"seismogram parameter get {target_id} select") + output = capsys.readouterr().out.strip() + assert output in ( + "True", + "False", + ), f"'select' should be True or False, got '{output}'" + + def test_get_select_with_short_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that 'select' can be retrieved using a shortened seismogram ID. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + capsys: The pytest capsys fixture. + """ + seis = cli_json("seismogram dump") + assert ( + isinstance(seis, list) and len(seis) > 0 + ), "Expected at least one seismogram in the dump" + short_id = seis[0]["id"][:8] + + cli(f"seismogram parameter get {short_id} select") + output = capsys.readouterr().out.strip() + assert output in ( + "True", + "False", + ), f"'select' should be True or False, got '{output}'" + + def test_get_flip_default_is_false( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that 'flip' defaults to False. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + capsys: The pytest capsys fixture. + """ + seis = cli_json("seismogram dump") + target_id = seis[0]["id"] + + cli(f"seismogram parameter get {target_id} flip") + assert "False" in capsys.readouterr().out, "'flip' should default to False" + + def test_get_select_default_is_true( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that 'select' defaults to True. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + capsys: The pytest capsys fixture. + """ + seis = cli_json("seismogram dump") + target_id = seis[0]["id"] + + cli(f"seismogram parameter get {target_id} select") + assert "True" in capsys.readouterr().out, "'select' should default to True" + + +# =================================================================== +# Seismogram parameter — set + verify via dump +# =================================================================== + + +@pytest.mark.cli +class TestSeismogramParameterSet: + """Tests for setting seismogram parameters and verifying via dump.""" + + def test_set_select_false_with_full_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that setting select=false is reflected in the dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + seis = cli_json("seismogram dump") + assert ( + isinstance(seis, list) and len(seis) > 0 + ), "Expected at least one seismogram in the dump" + target_id = seis[0]["id"] + + cli(f"seismogram parameter set {target_id} select false") + + params = cli_json("seismogram parameter dump") + assert isinstance(params, list), "Seismogram parameter dump should be a list" + target_params = next(p for p in params if p["seismogram_id"] == target_id) + assert ( + target_params["select"] is False + ), f"'select' should be False for seismogram {target_id} after being set" + + def test_set_select_false_with_short_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that setting select=false via a shortened ID is reflected in the dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + seis = cli_json("seismogram dump") + target_id = seis[0]["id"] + short_id = target_id[:8] + + cli(f"seismogram parameter set {short_id} select false") + + params = cli_json("seismogram parameter dump") + assert isinstance(params, list), "Seismogram parameter dump should be a list" + target_params = next(p for p in params if p["seismogram_id"] == target_id) + assert ( + target_params["select"] is False + ), f"'select' should be False for seismogram {target_id} after being set via short ID" + + def test_set_flip_true_with_full_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that setting flip=true is reflected in the dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + seis = cli_json("seismogram dump") + target_id = seis[0]["id"] + + cli(f"seismogram parameter set {target_id} flip true") + + params = cli_json("seismogram parameter dump") + assert isinstance(params, list), "Seismogram parameter dump should be a list" + target_params = next(p for p in params if p["seismogram_id"] == target_id) + assert ( + target_params["flip"] is True + ), f"'flip' should be True for seismogram {target_id} after being set" + + def test_set_flip_true_with_short_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that setting flip=true via a shortened ID is reflected in the dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + seis = cli_json("seismogram dump") + target_id = seis[0]["id"] + short_id = target_id[:8] + + cli(f"seismogram parameter set {short_id} flip true") + + params = cli_json("seismogram parameter dump") + assert isinstance(params, list), "Seismogram parameter dump should be a list" + target_params = next(p for p in params if p["seismogram_id"] == target_id) + assert ( + target_params["flip"] is True + ), f"'flip' should be True for seismogram {target_id} after being set via short ID" + + def test_set_does_not_affect_other_seismograms( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that changing one seismogram's parameter does not affect others. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + params_before = cli_json("seismogram parameter dump") + assert isinstance( + params_before, list + ), "Seismogram parameter dump should be a list" + assert ( + len(params_before) > 1 + ), "Need at least two seismograms in the active event for this test" + target_id = params_before[0]["seismogram_id"] + other_id = params_before[1]["seismogram_id"] + other_select_before = params_before[1]["select"] + + cli(f"seismogram parameter set {target_id} select false") + + params_after = cli_json("seismogram parameter dump") + assert isinstance( + params_after, list + ), "Seismogram parameter dump should be a list" + other_select_after = next( + p["select"] for p in params_after if p["seismogram_id"] == other_id + ) + assert ( + other_select_after == other_select_before + ), "Changing one seismogram's 'select' should not affect another's" + + +# =================================================================== +# Seismogram parameter — dump +# =================================================================== + + +@pytest.mark.cli +class TestSeismogramParameterDump: + """Tests for ``seismogram parameter dump``.""" + + def test_returns_list( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that the dump returns a list of parameter dicts. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli_json: The in-process CLI JSON dump callable. + """ + data = cli_json("seismogram parameter dump") + assert isinstance(data, list), "Seismogram parameter dump should be a list" + assert len(data) > 0, "Expected at least one entry in the parameter dump" + + def test_entries_contain_expected_keys( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that each entry contains the expected parameter keys. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli_json: The in-process CLI JSON dump callable. + """ + data = cli_json("seismogram parameter dump") + assert isinstance(data, list), "Seismogram parameter dump should be a list" + for entry in data: + for key in ("select", "flip", "t1", "seismogram_id"): + assert ( + key in entry + ), f"Expected key '{key}' in seismogram parameter dump entry" + + def test_all_events_returns_more_entries( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that ``--all`` returns at least as many entries as the active-event dump. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli_json: The in-process CLI JSON dump callable. + """ + active_data = cli_json("seismogram parameter dump") + all_data = cli_json("seismogram parameter dump --all") + assert isinstance( + active_data, list + ), "Active-event seismogram parameter dump should be a list" + assert isinstance( + all_data, list + ), "All-events seismogram parameter dump should be a list" + assert len(all_data) >= len( + active_data + ), "--all should return at least as many entries as the active-event dump" + + def test_count_matches_seismogram_dump( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that the parameter dump entry count matches the seismogram count. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli_json: The in-process CLI JSON dump callable. + """ + seis = cli_json("seismogram dump") + params = cli_json("seismogram parameter dump --all") + assert isinstance(seis, list), "Seismogram dump should be a list" + assert isinstance(params, list), "Parameter dump should be a list" + assert len(params) == len( + seis + ), "One parameter entry should exist per seismogram" + + +# =================================================================== +# Seismogram parameter — list +# =================================================================== + + +@pytest.mark.cli +class TestSeismogramParameterList: + """Tests for ``seismogram parameter list``.""" + + def test_list_produces_output( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that the list command produces output. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("seismogram parameter list") + assert ( + len(capsys.readouterr().out) > 0 + ), "Expected output from seismogram parameter list" + + def test_list_short_produces_output( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that ``--short`` produces output. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("seismogram parameter list --short") + assert ( + len(capsys.readouterr().out) > 0 + ), "Expected output from seismogram parameter list --short" diff --git a/tests/functional/test_cli_project.py b/tests/functional/test_cli_project.py new file mode 100644 index 00000000..50107a8c --- /dev/null +++ b/tests/functional/test_cli_project.py @@ -0,0 +1,72 @@ +"""Functional tests for AIMBAT project CLI commands that require a real file. + +These tests are run via subprocess so that the engine lifecycle (create → delete) +operates on an actual database file rather than an in-memory database. +""" + +import subprocess +import pytest +from pathlib import Path +from collections.abc import Callable, Sequence + + +@pytest.mark.slow +@pytest.mark.cli +class TestProjectLifecycleWithFile: + """Tests for project creation and deletion against a real database file.""" + + def test_create_project( + self, + aimbat_subprocess: Callable[[Sequence[str]], subprocess.CompletedProcess[str]], + db_path: Path, + ) -> None: + """Verifies that a new project database file is created.""" + result = aimbat_subprocess(["project", "create"]) + assert result.returncode == 0, result.stderr + assert db_path.exists(), "Database file should exist after project create" + + def test_create_project_twice_fails( + self, + aimbat_subprocess: Callable[[Sequence[str]], subprocess.CompletedProcess[str]], + ) -> None: + """Verifies that creating a project when one already exists fails.""" + aimbat_subprocess(["project", "create"]) + result = aimbat_subprocess(["project", "create"]) + assert result.returncode != 0, "Second project create should fail" + + def test_project_info( + self, + aimbat_subprocess: Callable[[Sequence[str]], subprocess.CompletedProcess[str]], + ) -> None: + """Verifies that project info displays a panel after creation.""" + aimbat_subprocess(["project", "create"]) + result = aimbat_subprocess(["project", "info"]) + assert result.returncode == 0, result.stderr + assert ( + "Project Info" in result.stdout + ), "Output should contain the 'Project Info' panel title" + + def test_project_info_shows_file_path( + self, + aimbat_subprocess: Callable[[Sequence[str]], subprocess.CompletedProcess[str]], + db_path: Path, + ) -> None: + """Verifies that project info includes the database file path.""" + aimbat_subprocess(["project", "create"]) + result = aimbat_subprocess(["project", "info"]) + assert ( + db_path.name in result.stdout + ), "Output should contain the database filename" + + def test_delete_project( + self, + aimbat_subprocess: Callable[[Sequence[str]], subprocess.CompletedProcess[str]], + db_path: Path, + ) -> None: + """Verifies that the project database file is removed after deletion.""" + aimbat_subprocess(["project", "create"]) + result = aimbat_subprocess(["project", "delete"]) + assert result.returncode == 0, result.stderr + assert ( + not db_path.exists() + ), "Database file should be absent after project delete" diff --git a/tests/functional/test_cli_sampledata.py b/tests/functional/test_cli_sampledata.py new file mode 100644 index 00000000..f00cd48f --- /dev/null +++ b/tests/functional/test_cli_sampledata.py @@ -0,0 +1,196 @@ +"""Functional tests for the AIMBAT sampledata CLI commands. + +All commands are invoked in-process via ``app()`` with +``aimbat.settings.sampledata_dir`` monkeypatched to a temporary directory. +A retry helper re-attempts the download up to 3 times to tolerate transient +network issues. +""" + +import pytest +from collections.abc import Callable +from pathlib import Path + +import aimbat._config as _config + +_MAX_RETRIES = 3 + + +# =================================================================== +# Fixtures +# =================================================================== + + +@pytest.fixture() +def sampledata_dir( + tmp_path: Path, + monkeypatch: pytest.MonkeyPatch, +) -> Path: + """Patches ``aimbat.settings.sampledata_dir`` to a temporary directory. + + Args: + tmp_path: The pytest tmp_path fixture. + monkeypatch: The pytest monkeypatch fixture. + + Returns: + Path to the temporary sample data directory. + """ + target = tmp_path / "sample-data" + monkeypatch.setattr(_config.settings, "sampledata_dir", target) + return target + + +def _run_with_retries( + cli: Callable[[str], None], + command: str, + retries: int = _MAX_RETRIES, +) -> None: + """Runs a CLI command, retrying up to ``retries`` times on failure. + + Args: + cli: The in-process CLI callable. + command: The command string to run. + retries: Maximum number of attempts. + + Raises: + Exception: If all attempts fail. + """ + last_exc: Exception | None = None + for _ in range(retries): + try: + cli(command) + return + except Exception as exc: + last_exc = exc + raise last_exc # type: ignore[misc] + + +# =================================================================== +# Download +# =================================================================== + + +@pytest.mark.slow +class TestSampledataDownload: + """Tests for ``utils sampledata download``.""" + + def test_download_creates_files( + self, + sampledata_dir: Path, + cli: Callable[[str], None], + ) -> None: + """Verifies that download creates files inside the sampledata directory. + + Args: + sampledata_dir: Path to the temporary sample data directory. + cli: The in-process CLI callable. + """ + _run_with_retries(cli, "utils sampledata download") + assert ( + sampledata_dir.exists() + ), "Sample data directory should exist after download" + assert any( + sampledata_dir.rglob("*") + ), "Sample data directory should contain at least one file after download" + + def test_download_creates_seismogram_files( + self, + sampledata_dir: Path, + cli: Callable[[str], None], + ) -> None: + """Verifies that the download includes BHZ seismogram data files. + + Args: + sampledata_dir: Path to the temporary sample data directory. + cli: The in-process CLI callable. + """ + _run_with_retries(cli, "utils sampledata download") + bhz_files = list(sampledata_dir.rglob("*BHZ")) + assert ( + len(bhz_files) > 0 + ), "Expected at least one BHZ seismogram file in the downloaded sample data" + + def test_download_twice_fails_without_force( + self, + sampledata_dir: Path, + cli: Callable[[str], None], + ) -> None: + """Verifies that a second download without --force raises FileExistsError. + + Args: + sampledata_dir: Path to the temporary sample data directory. + cli: The in-process CLI callable. + """ + _run_with_retries(cli, "utils sampledata download") + assert sampledata_dir.exists(), "Directory should exist after first download" + + with pytest.raises((SystemExit, FileExistsError)): + cli("utils sampledata download") + + def test_download_force_overwrites( + self, + sampledata_dir: Path, + cli: Callable[[str], None], + ) -> None: + """Verifies that --force re-downloads and replaces existing sample data. + + Args: + sampledata_dir: Path to the temporary sample data directory. + cli: The in-process CLI callable. + """ + _run_with_retries(cli, "utils sampledata download") + assert sampledata_dir.exists(), "Directory should exist after first download" + + _run_with_retries(cli, "utils sampledata download --force") + assert ( + sampledata_dir.exists() + ), "Directory should still exist after force re-download" + assert any( + sampledata_dir.rglob("*") + ), "Directory should contain files after force re-download" + + +# =================================================================== +# Delete +# =================================================================== + + +@pytest.mark.slow +class TestSampledataDelete: + """Tests for ``utils sampledata delete``.""" + + def test_delete_removes_directory( + self, + sampledata_dir: Path, + cli: Callable[[str], None], + ) -> None: + """Verifies that the sample data directory is removed after delete. + + Args: + sampledata_dir: Path to the temporary sample data directory. + cli: The in-process CLI callable. + """ + _run_with_retries(cli, "utils sampledata download") + assert sampledata_dir.exists(), "Directory should exist before delete" + + cli("utils sampledata delete") + assert ( + not sampledata_dir.exists() + ), "Sample data directory should be absent after delete" + + def test_download_after_delete_succeeds( + self, + sampledata_dir: Path, + cli: Callable[[str], None], + ) -> None: + """Verifies that sample data can be re-downloaded after deletion. + + Args: + sampledata_dir: Path to the temporary sample data directory. + cli: The in-process CLI callable. + """ + _run_with_retries(cli, "utils sampledata download") + cli("utils sampledata delete") + assert not sampledata_dir.exists(), "Directory should be absent after delete" + + _run_with_retries(cli, "utils sampledata download") + assert sampledata_dir.exists(), "Directory should exist after re-downloading" diff --git a/tests/functional/test_cli_snapshots.py b/tests/functional/test_cli_snapshots.py new file mode 100644 index 00000000..4e05ca17 --- /dev/null +++ b/tests/functional/test_cli_snapshots.py @@ -0,0 +1,574 @@ +"""Functional tests for the AIMBAT snapshot CLI commands. + +All commands are invoked in-process via ``app()`` with ``aimbat.db.engine`` +monkeypatched to the test fixture's in-memory database. The ``snapshot dump`` +JSON output is used as the ground truth for ID verification after mutations. +""" + +import pytest +from collections.abc import Callable +from sqlalchemy import Engine + +# =================================================================== +# Snapshot creation +# =================================================================== + + +@pytest.mark.cli +class TestSnapshotCreate: + """Tests for the ``snapshot create`` CLI command.""" + + def test_create_without_comment( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that a snapshot is created with a null comment by default. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create") + data = cli_json("snapshot dump") + assert isinstance(data, dict), "Dump should return a dict" + assert len(data["snapshots"]) == 1, "Expected exactly one snapshot" + assert ( + data["snapshots"][0]["comment"] is None + ), "Comment should be None when not provided" + + def test_create_with_comment( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that the comment is stored when provided. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create my-comment") + data = cli_json("snapshot dump") + assert isinstance(data, dict), "Dump should return a dict" + assert ( + data["snapshots"][0]["comment"] == "my-comment" + ), "Comment should match the value passed to create" + + def test_create_captures_event_parameters( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that one event parameter snapshot is created per snapshot. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create") + data = cli_json("snapshot dump") + assert isinstance(data, dict), "Dump should return a dict" + assert ( + len(data["event_parameters"]) == 1 + ), "Expected one event parameter snapshot per snapshot" + + def test_create_captures_seismogram_parameters( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that seismogram parameter snapshots are created. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create") + data = cli_json("snapshot dump") + assert isinstance(data, dict), "Dump should return a dict" + assert ( + len(data["seismogram_parameters"]) > 0 + ), "Expected at least one seismogram parameter snapshot" + + def test_create_multiple_snapshots( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that multiple snapshots accumulate correctly. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create first") + cli("snapshot create second") + data = cli_json("snapshot dump") + assert isinstance(data, dict), "Dump should return a dict" + assert len(data["snapshots"]) == 2, "Expected two snapshots" + assert ( + len(data["event_parameters"]) == 2 + ), "Expected two event parameter snapshots" + comments = {s["comment"] for s in data["snapshots"]} + assert comments == { + "first", + "second", + }, "Both comments should be present in the dump" + + +# =================================================================== +# Snapshot deletion +# =================================================================== + + +@pytest.mark.cli +class TestSnapshotDelete: + """Tests for the ``snapshot delete`` CLI command. + + Uses IDs obtained from ``snapshot dump`` to verify complete removal of + the snapshot and all related child records. + """ + + def test_delete_removes_snapshot( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that the snapshot ID is absent from the dump after deletion. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create") + data_before = cli_json("snapshot dump") + assert isinstance(data_before, dict), "Dump should return a dict" + snapshot_id = data_before["snapshots"][0]["id"] + + cli(f"snapshot delete {snapshot_id}") + + data_after = cli_json("snapshot dump") + assert isinstance(data_after, dict), "Dump should return a dict" + remaining_ids = [s["id"] for s in data_after["snapshots"]] + assert ( + snapshot_id not in remaining_ids + ), f"Snapshot {snapshot_id} should be absent after deletion" + + def test_delete_removes_event_parameter_snapshot( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that the related event parameter snapshot is removed after deletion. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create") + data_before = cli_json("snapshot dump") + assert isinstance(data_before, dict), "Dump should return a dict" + snapshot_id = data_before["snapshots"][0]["id"] + event_param_ids = {ep["id"] for ep in data_before["event_parameters"]} + + cli(f"snapshot delete {snapshot_id}") + + data_after = cli_json("snapshot dump") + assert isinstance(data_after, dict), "Dump should return a dict" + remaining_event_param_ids = {ep["id"] for ep in data_after["event_parameters"]} + assert event_param_ids.isdisjoint( + remaining_event_param_ids + ), f"Event parameter snapshot IDs {event_param_ids} should all be absent after deletion" + + def test_delete_removes_seismogram_parameter_snapshots( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that all related seismogram parameter snapshots are removed after deletion. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create") + data_before = cli_json("snapshot dump") + assert isinstance(data_before, dict), "Dump should return a dict" + snapshot_id = data_before["snapshots"][0]["id"] + seis_param_ids = {sp["id"] for sp in data_before["seismogram_parameters"]} + assert ( + len(seis_param_ids) > 0 + ), "There should be seismogram parameter snapshots before deletion" + + cli(f"snapshot delete {snapshot_id}") + + data_after = cli_json("snapshot dump") + assert isinstance(data_after, dict), "Dump should return a dict" + remaining_seis_param_ids = { + sp["id"] for sp in data_after["seismogram_parameters"] + } + assert seis_param_ids.isdisjoint( + remaining_seis_param_ids + ), f"Seismogram parameter snapshot IDs {seis_param_ids} should all be absent after deletion" + + def test_delete_one_of_two_snapshots_leaves_other_intact( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that deleting one snapshot does not affect the other. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create first") + cli("snapshot create second") + data_before = cli_json("snapshot dump") + assert isinstance(data_before, dict), "Dump should return a dict" + first_id = next( + s["id"] for s in data_before["snapshots"] if s["comment"] == "first" + ) + second_id = next( + s["id"] for s in data_before["snapshots"] if s["comment"] == "second" + ) + + cli(f"snapshot delete {first_id}") + + data_after = cli_json("snapshot dump") + assert isinstance(data_after, dict), "Dump should return a dict" + remaining_ids = [s["id"] for s in data_after["snapshots"]] + assert ( + first_id not in remaining_ids + ), f"Deleted snapshot {first_id} should be absent" + assert ( + second_id in remaining_ids + ), f"Surviving snapshot {second_id} should still be present" + + def test_delete_snapshot_with_short_id_removes_all_related( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies deletion via short ID removes the snapshot and all related records. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create") + data_before = cli_json("snapshot dump") + assert isinstance(data_before, dict), "Dump should return a dict" + snapshot_id = data_before["snapshots"][0]["id"] + short_id = snapshot_id[:8] + event_param_ids = {ep["id"] for ep in data_before["event_parameters"]} + seis_param_ids = {sp["id"] for sp in data_before["seismogram_parameters"]} + + cli(f"snapshot delete {short_id}") + + data_after = cli_json("snapshot dump") + assert isinstance(data_after, dict), "Dump should return a dict" + remaining_snapshot_ids = [s["id"] for s in data_after["snapshots"]] + remaining_event_param_ids = {ep["id"] for ep in data_after["event_parameters"]} + remaining_seis_param_ids = { + sp["id"] for sp in data_after["seismogram_parameters"] + } + assert ( + snapshot_id not in remaining_snapshot_ids + ), f"Snapshot {snapshot_id} should be absent after deletion via short ID" + assert event_param_ids.isdisjoint( + remaining_event_param_ids + ), f"Event parameter snapshot IDs {event_param_ids} should all be absent" + assert seis_param_ids.isdisjoint( + remaining_seis_param_ids + ), f"Seismogram parameter snapshot IDs {seis_param_ids} should all be absent" + + +# =================================================================== +# Snapshot rollback +# =================================================================== + + +@pytest.mark.cli +class TestSnapshotRollback: + """Tests for the ``snapshot rollback`` CLI command.""" + + def test_rollback_restores_event_parameter( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that rollback restores a previously changed event parameter. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + capsys: The pytest capsys fixture. + """ + cli("snapshot create before-change") + + cli("event parameter set completed true") + cli("event parameter get completed") + assert ( + "True" in capsys.readouterr().out + ), "Parameter should read True after being set" + + data = cli_json("snapshot dump") + assert isinstance(data, dict), "Dump should return a dict" + snapshot_id = data["snapshots"][0]["id"] + + cli(f"snapshot rollback {snapshot_id}") + + cli("event parameter get completed") + assert ( + "False" in capsys.readouterr().out + ), "Parameter should be restored to False after rollback" + + def test_rollback_restores_event_parameter_with_short_id( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that rollback restores a parameter when given a shortened snapshot ID. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + capsys: The pytest capsys fixture. + """ + cli("snapshot create before-change") + + cli("event parameter set completed true") + cli("event parameter get completed") + assert ( + "True" in capsys.readouterr().out + ), "Parameter should read True after being set" + + data = cli_json("snapshot dump") + assert isinstance(data, dict), "Dump should return a dict" + short_id = data["snapshots"][0]["id"][:8] + + cli(f"snapshot rollback {short_id}") + + cli("event parameter get completed") + assert ( + "False" in capsys.readouterr().out + ), "Parameter should be restored to False after rollback via short ID" + + def test_rollback_does_not_delete_snapshot( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that rolling back leaves the snapshot itself in place. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create") + data_before = cli_json("snapshot dump") + assert isinstance(data_before, dict), "Dump should return a dict" + snapshot_id = data_before["snapshots"][0]["id"] + + cli(f"snapshot rollback {snapshot_id}") + + data_after = cli_json("snapshot dump") + assert isinstance(data_after, dict), "Dump should return a dict" + remaining_ids = [s["id"] for s in data_after["snapshots"]] + assert ( + snapshot_id in remaining_ids + ), "Snapshot should still exist after rollback" + + +# =================================================================== +# Snapshot dump +# =================================================================== + + +@pytest.mark.cli +class TestSnapshotDump: + """Tests for the ``snapshot dump`` CLI command.""" + + def test_dump_empty_returns_empty_lists( + self, + loaded_engine: Engine, + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that the dump is empty when no snapshots have been created. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli_json: The in-process CLI JSON dump callable. + """ + data = cli_json("snapshot dump") + assert isinstance(data, dict), "Dump should return a dict" + assert data["snapshots"] == [], "Snapshots list should be empty" + assert data["event_parameters"] == [], "Event parameters list should be empty" + assert ( + data["seismogram_parameters"] == [] + ), "Seismogram parameters list should be empty" + + def test_dump_contains_expected_keys( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that the dump dict contains the three expected top-level keys. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create") + data = cli_json("snapshot dump") + assert isinstance(data, dict), "Dump should return a dict" + assert "snapshots" in data, "Dump should contain 'snapshots' key" + assert "event_parameters" in data, "Dump should contain 'event_parameters' key" + assert ( + "seismogram_parameters" in data + ), "Dump should contain 'seismogram_parameters' key" + + def test_dump_all_events_includes_active( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that ``--all`` includes at least the active event's snapshots. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create") + active_data = cli_json("snapshot dump") + all_data = cli_json("snapshot dump --all") + assert isinstance(active_data, dict), "Active dump should return a dict" + assert isinstance(all_data, dict), "All-events dump should return a dict" + assert len(all_data["snapshots"]) >= len( + active_data["snapshots"] + ), "--all should return at least as many snapshots as the active-event dump" + + def test_dump_snapshot_ids_are_consistent( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + cli_json: Callable[[str], list | dict], + ) -> None: + """Verifies that snapshot IDs referenced in event/seismogram params match the snapshots list. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + cli_json: The in-process CLI JSON dump callable. + """ + cli("snapshot create") + data = cli_json("snapshot dump") + assert isinstance(data, dict), "Dump should return a dict" + snapshot_ids = {s["id"] for s in data["snapshots"]} + for ep in data["event_parameters"]: + assert ( + ep["snapshot_id"] in snapshot_ids + ), f"Event parameter snapshot_id {ep['snapshot_id']} not in snapshots list" + for sp in data["seismogram_parameters"]: + assert ( + sp["snapshot_id"] in snapshot_ids + ), f"Seismogram parameter snapshot_id {sp['snapshot_id']} not in snapshots list" + + +# =================================================================== +# Snapshot list +# =================================================================== + + +@pytest.mark.cli +class TestSnapshotList: + """Tests for the ``snapshot list`` CLI command.""" + + def test_list_active_event( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that the list command produces output for the active event. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("snapshot create") + cli("snapshot list") + assert len(capsys.readouterr().out) > 0, "Expected output from snapshot list" + + def test_list_all_events( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that ``--all`` produces output for all events. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("snapshot create") + cli("snapshot list --all") + assert ( + len(capsys.readouterr().out) > 0 + ), "Expected output from snapshot list --all" + + def test_list_short( + self, + loaded_engine: Engine, + cli: Callable[[str], None], + capsys: pytest.CaptureFixture[str], + ) -> None: + """Verifies that ``--short`` produces output. + + Args: + loaded_engine: The monkeypatched engine with data loaded. + cli: The in-process CLI callable. + capsys: The pytest capsys fixture. + """ + cli("snapshot create") + cli("snapshot list --short") + assert ( + len(capsys.readouterr().out) > 0 + ), "Expected output from snapshot list --short" diff --git a/tests/integration/test_active_event.py b/tests/integration/test_active_event.py new file mode 100644 index 00000000..1aa52330 --- /dev/null +++ b/tests/integration/test_active_event.py @@ -0,0 +1,115 @@ +"""Integration tests for managing the active event in the database.""" + +import pytest +import uuid +from aimbat.core import set_active_event, set_active_event_by_id, get_active_event +from aimbat.models import AimbatEvent +from sqlmodel import Session, select +from sqlalchemy.exc import NoResultFound + +# ----------------------------------------------------------------------------- +# Do all tests with the session fixture that has multi_event data pre-loaded +# ----------------------------------------------------------------------------- + + +@pytest.fixture +def session(loaded_session: Session) -> Session: + """Provides a session with multi_event data pre-loaded. + + Args: + loaded_session (Session): The session fixture with data. + + Returns: + Session: The database session. + """ + return loaded_session + + +class TestActiveEvent: + """Tests for retrieving and switching the active event.""" + + def test_get(self, session: Session) -> None: + """Verifies that `get_active_event` returns the event marked as active in the DB. + + Args: + session (Session): The database session. + """ + active_event = session.exec( + select(AimbatEvent).where(AimbatEvent.active == 1) + ).one() + assert active_event == get_active_event(session) + + def test_switch(self, session: Session) -> None: + """Verifies switching the active event using an event object. + + Args: + session (Session): The database session. + """ + active_event = get_active_event(session) + assert active_event is not None, "expected an active event in the test data" + + all_events = list(session.exec(select(AimbatEvent)).all()) + assert len(all_events) > 1, "expected multiple events in the test data" + + all_events.remove(active_event) + new_active_event = all_events.pop() + assert ( + new_active_event != active_event + ), "expected a different event to switch to" + + set_active_event(session, new_active_event) + assert get_active_event(session) == new_active_event + + def test_switch_by_id(self, session: Session) -> None: + """Verifies switching the active event using an event ID. + + Args: + session (Session): The database session. + """ + active_event = get_active_event(session) + event_ids = list(session.exec(select(AimbatEvent.id)).all()) + + event_ids.remove(active_event.id) + new_active_event_id = event_ids.pop() + assert ( + new_active_event_id != active_event.id + ), "expected a different event id to switch to" + + set_active_event_by_id(session, new_active_event_id) + + assert ( + get_active_event(session).id == new_active_event_id + ), "expected the active event to switch to the new event by id" + + def test_switch_by_id_invalid(self, session: Session) -> None: + """Verifies that switching the active event using an invalid event ID raises an error.""" + + new_uuid = uuid.uuid4() + assert ( + len( + session.exec( + select(AimbatEvent).where(AimbatEvent.id == new_uuid) + ).all() + ) + == 0 + ), "expected no event with the generated UUID in the test data" + + with pytest.raises(ValueError): + set_active_event_by_id(session, uuid.uuid4()) + + def test_get_active_event_no_active(self, session: Session) -> None: + """Verifies that `get_active_event` returns None if no event is marked as active. + + Args: + session (Session): The database session. + """ + active_event = get_active_event(session) + assert active_event is not None, "expected an active event in the test data" + active_event.active = None + assert ( + session.exec(select(AimbatEvent).where(AimbatEvent.active == 1)).first() + is None + ), "expected no active event in the database after deactivating" + + with pytest.raises(NoResultFound): + get_active_event(session) diff --git a/tests/integration/test_data_io.py b/tests/integration/test_data_io.py new file mode 100644 index 00000000..ea100621 --- /dev/null +++ b/tests/integration/test_data_io.py @@ -0,0 +1,327 @@ +"""Integration tests for adding data (SAC files) to the project.""" + +import pytest +import json +from aimbat.core import ( + add_data_to_project, + get_data_for_active_event, + print_data_table, + dump_data_table_to_json, +) +from aimbat.aimbat_types import DataType +from aimbat.models import AimbatDataSource, AimbatEvent, AimbatSeismogram +from pysmo.classes import SAC +from pathlib import Path +from sqlmodel import Session, select +from pydantic import ValidationError +from collections.abc import Generator + + +class TestAddDataToProject: + @pytest.fixture + def session(self, patched_session: Session) -> Generator[Session, None, None]: + """Provides a database session for tests. + + Args: + patched_session (Session): A patched SQLAlchemy session fixture. + """ + yield patched_session + + def test_add_single_sac_file(self, sac_file_good: Path, session: Session) -> None: + """Verifies adding a single valid SAC file to the project. + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + datasource = session.exec(select(AimbatDataSource.sourcename)).all() + assert len(datasource) == 0, "Expected no data sources before adding files." + + # do this 2 times to verify we can only add the same file once and that nothing changes on the second attempt + for _ in range(2): + add_data_to_project( + session, + [sac_file_good], + datatype=DataType.SAC, + ) + seismogram_filename = session.exec( + select(AimbatDataSource.sourcename) + ).one() + assert seismogram_filename == str(sac_file_good) + + def test_add_multiple_sac_files( + self, multi_event_data: list[Path], session: Session + ) -> None: + """Verifies adding multiple SAC files to the project at once. + + Args: + multi_event_data (list[Path]): List of paths to SAC files. + session (Session): Database session. + """ + datasource = session.exec(select(AimbatDataSource.sourcename)).all() + assert len(datasource) == 0, "Expected no data sources before adding files." + + add_data_to_project( + session, + multi_event_data, + datatype=DataType.SAC, + ) + + seismogram_filenames = session.exec(select(AimbatDataSource.sourcename)).all() + assert sorted(seismogram_filenames) == sorted( + [str(path) for path in multi_event_data] + ), "Expected all files from multi_event to be added as data sources." + + def test_add_nonexistent_file(self, session: Session) -> None: + """Verifies that adding a non-existent file raises FileNotFoundError. + + Args: + session (Session): Database session. + """ + non_existent_file = Path("this_file_does_not_exist.sac") + with pytest.raises(FileNotFoundError): + add_data_to_project( + session, + [non_existent_file], + datatype=DataType.SAC, + ) + + def test_add_mixed_valid_and_invalid_files( + self, sac_file_good: Path, session: Session + ) -> None: + """Verifies that adding a mix of valid and invalid files raises an error and adds nothing. + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + non_existent_file = Path("this_file_does_not_exist.sac") + with pytest.raises(FileNotFoundError): + add_data_to_project( + session, + [sac_file_good, non_existent_file], + datatype=DataType.SAC, + ) + + # Verify that the valid file was not added due to the error + datasource = session.exec(select(AimbatDataSource.sourcename)).all() + assert ( + len(datasource) == 0 + ), "Expected no data sources to be added when an error occurs." + + def test_add_sac_file_with_missing_pick( + self, sac_file_good: Path, session: Session + ) -> None: + """Verifies that adding a SAC file missing required pick information raises ValidationError. + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + sac = SAC.from_file(sac_file_good) + sac.timestamps.t0 = None + sac.write(sac_file_good) + with pytest.raises(ValidationError): + add_data_to_project( + session, + [sac_file_good], + datatype=DataType.SAC, + ) + + def test_dry_run_all_new( + self, + multi_event_data: list[Path], + session: Session, + capsys: pytest.CaptureFixture, + ) -> None: + """Verifies dry run behavior when all data is new. + + Args: + multi_event_data (list[Path]): List of paths to SAC files. + session (Session): Database session. + capsys (pytest.CaptureFixture): Fixture to capture stdout/stderr. + """ + add_data_to_project( + session, + multi_event_data, + datatype=DataType.SAC, + dry_run=True, + ) + + datasource = session.exec(select(AimbatDataSource.sourcename)).all() + assert len(datasource) == 0, "Expected no data sources after dry run." + + captured = capsys.readouterr() + assert "Dry Run: Data to be added" in captured.out + n = len(multi_event_data) + assert f"{n} seismogram(s) added, 0 skipped" in captured.out + assert "0 skipped" in captured.out + + def test_dry_run_all_skipped( + self, + multi_event_data: list[Path], + session: Session, + capsys: pytest.CaptureFixture, + ) -> None: + """Verifies dry run behavior when all data already exists (should be skipped). + + Args: + multi_event_data (list[Path]): List of paths to SAC files. + session (Session): Database session. + capsys (pytest.CaptureFixture): Fixture to capture stdout/stderr. + """ + add_data_to_project( + session, + multi_event_data, + datatype=DataType.SAC, + ) + capsys.readouterr() # discard output from the real add + + add_data_to_project( + session, + multi_event_data, + datatype=DataType.SAC, + dry_run=True, + ) + + captured = capsys.readouterr() + assert "Dry Run: Data to be added" in captured.out + n = len(multi_event_data) + assert f"0 station(s) added, {n} skipped" in captured.out + assert f"0 event(s) added, {n} skipped" in captured.out + assert f"0 seismogram(s) added, {n} skipped" in captured.out + + +class TestGetDataSources: + @pytest.fixture + def session(self, loaded_session: Session) -> Generator[Session, None, None]: + """Provides a database session with pre-loaded data sources for tests. + + Args: + loaded_session (Session): A SQLAlchemy session fixture with pre-loaded data sources. + """ + yield loaded_session + + def test_get_data_sources_for_active_event(self, session: Session) -> None: + """Verifies that get_data_sources returns the expected data sources. + + Args: + session (Session): Database session. + """ + + data_sources = get_data_for_active_event(session) + assert len(data_sources) != 0, "Expected data sources for the active event." + assert all( + isinstance(ds, AimbatDataSource) for ds in data_sources + ), "expected all items to be AimbatDataSource instances" + + def test_dump_data_table_to_json(self, session: Session) -> None: + """Verifies that dump_data_table_to_json returns a JSON string with expected content. + + Args: + session (Session): Database session. + """ + json_str = dump_data_table_to_json(session) + json_data = json.loads(json_str) + assert isinstance(json_data, list), "Expected JSON data to be a list." + + expected_ids = map(str, session.exec(select(AimbatDataSource.id)).all()) + returned_ids = [item["id"] for item in json_data] + assert set(expected_ids) == set(returned_ids), "Expected IDs to match." + + def test_print_data_table_for_all_events( + self, session: Session, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that get_data_sources prints the expected table output. + + Args: + session (Session): Database session. + capsys (pytest.CaptureFixture): Fixture to capture stdout/stderr. + """ + print_data_table(session, short=False, all_events=True) + + expected_ids = session.exec(select(AimbatDataSource.id)).all() + + captured = capsys.readouterr() + assert "Data sources for all events" in captured.out + for id in expected_ids: + assert ( + str(id) in captured.out + ), "expected data source ID to be in the output table" + + def test_print_data_table_for_all_events_short( + self, session: Session, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that get_data_sources prints the expected table output. + + Args: + session (Session): Database session. + capsys (pytest.CaptureFixture): Fixture to capture stdout/stderr. + """ + + expected_ids = session.exec(select(AimbatDataSource.id)).all() + + print_data_table(session, short=True, all_events=True) + + captured = capsys.readouterr() + assert "Data sources for all events" in captured.out + for id in expected_ids: + assert ( + str(id)[:2] in captured.out + ), "expected data source ID to be in the output table" + + def test_print_data_table_for_active_event( + self, session: Session, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that get_data_sources prints the expected table output. + + Args: + session (Session): Database session. + capsys (pytest.CaptureFixture): Fixture to capture stdout/stderr. + """ + + # AimbatSeismogram has external_id of datasource and event: + statement = ( + select(AimbatDataSource.id) + .join(AimbatSeismogram) + .join(AimbatEvent) + .where(AimbatEvent.active == 1) + ) + expected_ids = session.exec(statement).all() + + print_data_table(session, short=False, all_events=False) + + captured = capsys.readouterr() + assert "Data sources for event" in captured.out + for id in expected_ids: + assert ( + str(id) in captured.out + ), "expected data source ID to be in the output table" + + def test_print_data_table_for_active_event_short( + self, session: Session, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that get_data_sources prints the expected table output. + + Args: + session (Session): Database session. + capsys (pytest.CaptureFixture): Fixture to capture stdout/stderr. + """ + + # AimbatSeismogram has external_id of datasource and event: + statement = ( + select(AimbatDataSource.id) + .join(AimbatSeismogram) + .join(AimbatEvent) + .where(AimbatEvent.active == 1) + ) + expected_ids = session.exec(statement).all() + + print_data_table(session, short=True, all_events=False) + + captured = capsys.readouterr() + assert "Data sources for event" in captured.out + for id in expected_ids: + assert ( + str(id)[:2] in captured.out + ), "expected data source ID to be in the output table" diff --git a/tests/integration/test_datasource_sac.py b/tests/integration/test_datasource_sac.py new file mode 100644 index 00000000..06567927 --- /dev/null +++ b/tests/integration/test_datasource_sac.py @@ -0,0 +1,300 @@ +"""Integration tests for AIMBAT models backed by SAC files. + +Tests verify that SAC.station, SAC.event, and SAC.seismogram map correctly +to AimbatStation, AimbatEvent, and AimbatSeismogram, and that the data +property reads/writes through to the file on disk. + +Note that in production we only ever read from the SAC file once to populate +the database, and then rely on the database for all subsequent access. However, +these tests verify that the SAC → Aimbat* mapping is correct and that the data +property correctly proxies through to the file on disk. +""" + +import pytest +import numpy as np +from aimbat.models import ( + AimbatDataSource, + AimbatEvent, + AimbatEventParameters, + AimbatSeismogram, + AimbatSeismogramParameters, + AimbatStation, +) +from aimbat.aimbat_types import DataType +from datetime import timezone +from pathlib import Path +from pandas import Timestamp +from pysmo.classes import SAC +from sqlmodel import Session +from collections.abc import Generator + + +@pytest.fixture +def session(patched_session: Session) -> Generator[Session, None, None]: + yield patched_session + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _persist_sac(session: Session, sac_file: Path) -> AimbatSeismogram: + """Helper to build a full object graph from a SAC file and persist it. + + Args: + session (Session): The database session. + sac_file (Path): The path to the SAC file. + + Returns: + AimbatSeismogram: The persisted seismogram object. + """ + sac = SAC.from_file(sac_file) + + event = AimbatEvent.model_validate( + sac.event, + update={"parameters": AimbatEventParameters()}, + ) + session.add(event) + session.flush() + + station = AimbatStation.model_validate(sac.station) + session.add(station) + session.flush() + + seismogram = AimbatSeismogram.model_validate( + sac.seismogram, + update={ + "t0": sac.timestamps.t0, + "parameters": AimbatSeismogramParameters(), + "event": event, + "station": station, + }, + ) + session.add(seismogram) + session.flush() + + datasource = AimbatDataSource( + sourcename=str(sac_file), + datatype=DataType.SAC, + seismogram=seismogram, + ) + session.add(datasource) + session.commit() + return seismogram + + +# =================================================================== +# SAC → AimbatStation +# =================================================================== + + +class TestSacStation: + """Verify SAC.station maps correctly to AimbatStation.""" + + def test_station_fields_match_sac( + self, sac_file_good: Path, session: Session + ) -> None: + """Verifies that AimbatStation fields match the source SAC file headers. + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + sac = SAC.from_file(sac_file_good) + station = AimbatStation.model_validate(sac.station) + session.add(station) + session.commit() + session.refresh(station) + + assert station.name == sac.station.name + assert station.network == sac.station.network + assert station.location == sac.station.location + assert station.channel == sac.station.channel + assert station.latitude == sac.station.latitude + assert station.longitude == sac.station.longitude + assert station.elevation == sac.station.elevation + + def test_station_round_trips_through_db( + self, sac_file_good: Path, session: Session + ) -> None: + """Verifies that a Station persisted and re-fetched retains all values. + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + sac = SAC.from_file(sac_file_good) + station = AimbatStation.model_validate(sac.station) + session.add(station) + session.commit() + + # Expire in-memory state and reload from DB. + session.expire(station) + assert station.name == sac.station.name + assert station.latitude == pytest.approx(sac.station.latitude) + assert station.longitude == pytest.approx(sac.station.longitude) + + +# =================================================================== +# SAC → AimbatEvent +# =================================================================== + + +class TestSacEvent: + """Verify SAC.event maps correctly to AimbatEvent.""" + + def test_event_fields_match_sac( + self, sac_file_good: Path, session: Session + ) -> None: + """Verifies that AimbatEvent fields match the source SAC file headers. + + Note: SAPandasTimestamp truncates to microsecond precision. + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + sac = SAC.from_file(sac_file_good) + event = AimbatEvent.model_validate( + sac.event, + update={"parameters": AimbatEventParameters()}, + ) + session.add(event) + session.commit() + session.refresh(event) + + assert event.time == sac.event.time.floor("us") + assert event.latitude == sac.event.latitude + assert event.longitude == sac.event.longitude + assert event.depth == sac.event.depth + + def test_event_round_trips_through_db( + self, sac_file_good: Path, session: Session + ) -> None: + """Verifies that an Event persisted and re-fetched retains all values. + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + sac = SAC.from_file(sac_file_good) + event = AimbatEvent.model_validate( + sac.event, + update={"parameters": AimbatEventParameters()}, + ) + session.add(event) + session.commit() + + session.expire(event) + assert event.latitude == pytest.approx(sac.event.latitude) + assert event.longitude == pytest.approx(sac.event.longitude) + assert isinstance(event.time, Timestamp) + + +# =================================================================== +# SAC → AimbatSeismogram +# =================================================================== + + +class TestSacSeismogram: + """AimbatSeismogram backed by a real SAC file on disk.""" + + def test_metadata_matches_sac(self, sac_file_good: Path, session: Session) -> None: + """Verify that seismogram model fields correspond to the SAC file. + + SAPandasTimestamp truncates to microsecond precision when storing + in SQLite, so Timestamp comparisons use floor("us"). + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + sac = SAC.from_file(sac_file_good) + seis = _persist_sac(session, sac_file_good) + session.refresh(seis) + + assert seis.begin_time == sac.seismogram.begin_time.floor("us") + assert seis.delta == sac.seismogram.delta + assert seis.t0 == sac.timestamps.t0.floor("us") # type: ignore + + def test_read_data_from_sac(self, sac_file_good: Path, session: Session) -> None: + """Verifies that AimbatSeismogram.data returns the waveform from the SAC file. + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + sac = SAC.from_file(sac_file_good) + seis = _persist_sac(session, sac_file_good) + session.refresh(seis) + + np.testing.assert_array_equal(seis.data, sac.seismogram.data) + + def test_len_matches_data(self, sac_file_good: Path, session: Session) -> None: + """Verifies that len(seismogram) equals the number of data samples. + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + sac = SAC.from_file(sac_file_good) + seis = _persist_sac(session, sac_file_good) + session.refresh(seis) + + assert len(seis.data) == len(sac.seismogram.data) + + def test_end_time_computed(self, sac_file_good: Path, session: Session) -> None: + """Verifies that end_time is correctly computed from begin_time, delta, and npts. + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + seis = _persist_sac(session, sac_file_good) + session.refresh(seis) + + expected = seis.begin_time + seis.delta * (len(seis.data) - 1) + assert seis.end_time == expected + + def test_write_data_to_sac(self, sac_file_good: Path, session: Session) -> None: + """Verifies that writing to AimbatSeismogram.data updates the SAC file on disk. + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + seis = _persist_sac(session, sac_file_good) + session.refresh(seis) + + original_data = seis.data.copy() + new_data = np.zeros_like(original_data) + seis.data = new_data + + # Re-read from disk to confirm the file was updated. + reread = SAC.from_file(sac_file_good).seismogram.data + np.testing.assert_array_equal(reread, new_data) + assert not np.array_equal(reread, original_data) + + def test_proxy_properties(self, sac_file_good: Path, session: Session) -> None: + """Verifies that properties like flip, select, and t1 proxy through to parameters. + + Args: + sac_file_good (Path): Path to a valid SAC file. + session (Session): Database session. + """ + seis = _persist_sac(session, sac_file_good) + session.refresh(seis) + + assert seis.select is True + seis.select = False + assert seis.parameters.select is False + + assert seis.flip is False + seis.flip = True + assert seis.parameters.flip is True + + assert seis.t1 is None + new_t1 = Timestamp("2011-09-15T19:42:25", tz=timezone.utc) + seis.t1 = new_t1 + assert seis.parameters.t1 == new_t1 diff --git a/tests/integration/test_db_operations.py b/tests/integration/test_db_operations.py new file mode 100644 index 00000000..ab7abce4 --- /dev/null +++ b/tests/integration/test_db_operations.py @@ -0,0 +1,473 @@ +"""Integration tests for ORM relationships and cascade deletes in AIMBAT models.""" + +import pytest +from aimbat.core._snapshot import create_snapshot +from aimbat.models import ( + AimbatDataSource, + AimbatEvent, + AimbatEventParameters, + AimbatEventParametersSnapshot, + AimbatSeismogram, + AimbatSeismogramParameters, + AimbatSeismogramParametersSnapshot, + AimbatSnapshot, + AimbatStation, +) +from sqlmodel import Session, select + + +@pytest.fixture +def session(loaded_session: Session) -> Session: + """Provides a session with multi-event data and an active event pre-loaded. + + Args: + loaded_session: A SQLModel Session with data populated. + + Returns: + The database session. + """ + return loaded_session + + +@pytest.fixture +def event(session: Session) -> AimbatEvent: + """Provides the first event from the database. + + Args: + session: The database session. + + Returns: + An AimbatEvent. + """ + return session.exec(select(AimbatEvent)).first() # type: ignore[return-value] + + +@pytest.fixture +def station(session: Session) -> AimbatStation: + """Provides the first station from the database. + + Args: + session: The database session. + + Returns: + An AimbatStation. + """ + return session.exec(select(AimbatStation)).first() # type: ignore[return-value] + + +@pytest.fixture +def seismogram(session: Session) -> AimbatSeismogram: + """Provides the first seismogram from the database. + + Args: + session: The database session. + + Returns: + An AimbatSeismogram. + """ + return session.exec(select(AimbatSeismogram)).first() # type: ignore[return-value] + + +# --------------------------------------------------------------------------- +# Relationship traversal +# --------------------------------------------------------------------------- + + +class TestEventRelationships: + """Tests for navigating relationships on AimbatEvent.""" + + def test_event_has_parameters(self, event: AimbatEvent) -> None: + """Verifies that an event exposes its parameters via the relationship. + + Args: + event: An AimbatEvent instance. + """ + assert isinstance(event.parameters, AimbatEventParameters) + + def test_event_parameters_back_reference(self, event: AimbatEvent) -> None: + """Verifies that event parameters link back to their parent event. + + Args: + event: An AimbatEvent instance. + """ + assert event.parameters.event_id == event.id + + def test_event_has_seismograms(self, event: AimbatEvent) -> None: + """Verifies that an event exposes its seismograms via the relationship. + + Args: + event: An AimbatEvent instance. + """ + assert len(event.seismograms) > 0 + assert all(isinstance(s, AimbatSeismogram) for s in event.seismograms) + + def test_seismogram_back_reference_to_event(self, event: AimbatEvent) -> None: + """Verifies that each seismogram links back to its parent event. + + Args: + event: An AimbatEvent instance. + """ + for seis in event.seismograms: + assert seis.event_id == event.id + + def test_event_seismogram_count(self, event: AimbatEvent) -> None: + """Verifies that seismogram_count matches the number of related seismograms. + + Args: + event: An AimbatEvent instance. + """ + assert event.seismogram_count == len(event.seismograms) + + def test_event_station_count(self, event: AimbatEvent) -> None: + """Verifies that station_count reflects the number of unique stations. + + Args: + event: An AimbatEvent instance. + """ + unique_stations = {s.station_id for s in event.seismograms} + assert event.station_count == len(unique_stations) + + +class TestStationRelationships: + """Tests for navigating relationships on AimbatStation.""" + + def test_station_has_seismograms(self, station: AimbatStation) -> None: + """Verifies that a station exposes its seismograms via the relationship. + + Args: + station: An AimbatStation instance. + """ + assert len(station.seismograms) > 0 + assert all(isinstance(s, AimbatSeismogram) for s in station.seismograms) + + def test_seismogram_back_reference_to_station(self, station: AimbatStation) -> None: + """Verifies that each seismogram links back to its parent station. + + Args: + station: An AimbatStation instance. + """ + for seis in station.seismograms: + assert seis.station_id == station.id + + +class TestSeismogramRelationships: + """Tests for navigating relationships on AimbatSeismogram.""" + + def test_seismogram_has_datasource(self, seismogram: AimbatSeismogram) -> None: + """Verifies that a seismogram exposes its datasource via the relationship. + + Args: + seismogram: An AimbatSeismogram instance. + """ + assert isinstance(seismogram.datasource, AimbatDataSource) + + def test_datasource_back_reference(self, seismogram: AimbatSeismogram) -> None: + """Verifies that the datasource links back to its parent seismogram. + + Args: + seismogram: An AimbatSeismogram instance. + """ + assert seismogram.datasource.seismogram_id == seismogram.id + + def test_seismogram_has_parameters(self, seismogram: AimbatSeismogram) -> None: + """Verifies that a seismogram exposes its parameters via the relationship. + + Args: + seismogram: An AimbatSeismogram instance. + """ + assert isinstance(seismogram.parameters, AimbatSeismogramParameters) + + def test_seismogram_parameters_back_reference( + self, seismogram: AimbatSeismogram + ) -> None: + """Verifies that seismogram parameters link back to their parent seismogram. + + Args: + seismogram: An AimbatSeismogram instance. + """ + assert seismogram.parameters.seismogram_id == seismogram.id + + def test_seismogram_has_event(self, seismogram: AimbatSeismogram) -> None: + """Verifies that a seismogram exposes its parent event via the relationship. + + Args: + seismogram: An AimbatSeismogram instance. + """ + assert isinstance(seismogram.event, AimbatEvent) + + def test_seismogram_has_station(self, seismogram: AimbatSeismogram) -> None: + """Verifies that a seismogram exposes its parent station via the relationship. + + Args: + seismogram: An AimbatSeismogram instance. + """ + assert isinstance(seismogram.station, AimbatStation) + + +class TestSnapshotRelationships: + """Tests for navigating relationships on AimbatSnapshot.""" + + def test_snapshot_has_event_parameters_snapshot(self, session: Session) -> None: + """Verifies that a snapshot exposes its event parameter snapshot. + + Args: + session: The database session. + """ + create_snapshot(session) + snapshot = session.exec(select(AimbatSnapshot)).one() + assert isinstance( + snapshot.event_parameters_snapshot, AimbatEventParametersSnapshot + ) + + def test_snapshot_has_seismogram_parameter_snapshots( + self, session: Session + ) -> None: + """Verifies that a snapshot exposes its seismogram parameter snapshots. + + Args: + session: The database session. + """ + create_snapshot(session) + snapshot = session.exec(select(AimbatSnapshot)).one() + assert len(snapshot.seismogram_parameters_snapshots) > 0 + assert all( + isinstance(s, AimbatSeismogramParametersSnapshot) + for s in snapshot.seismogram_parameters_snapshots + ) + + def test_snapshot_back_reference_to_event(self, session: Session) -> None: + """Verifies that a snapshot links back to its parent event. + + Args: + session: The database session. + """ + create_snapshot(session) + snapshot = session.exec(select(AimbatSnapshot)).one() + assert isinstance(snapshot.event, AimbatEvent) + + +# --------------------------------------------------------------------------- +# Cascade delete +# --------------------------------------------------------------------------- + + +class TestCascadeDeleteEvent: + """Tests that deleting an event cascades to all its dependants.""" + + def test_seismograms_deleted(self, session: Session, event: AimbatEvent) -> None: + """Verifies that deleting an event removes all its seismograms. + + Args: + session: The database session. + event: An AimbatEvent to delete. + """ + seismogram_ids = [s.id for s in event.seismograms] + assert len(seismogram_ids) > 0 + + session.delete(event) + session.commit() + + remaining = session.exec(select(AimbatSeismogram)).all() + remaining_ids = {s.id for s in remaining} + assert not any(sid in remaining_ids for sid in seismogram_ids) + + def test_event_parameters_deleted( + self, session: Session, event: AimbatEvent + ) -> None: + """Verifies that deleting an event removes its parameters. + + Args: + session: The database session. + event: An AimbatEvent to delete. + """ + parameters_id = event.parameters.id + + session.delete(event) + session.commit() + + assert session.get(AimbatEventParameters, parameters_id) is None + + def test_snapshots_deleted(self, session: Session, event: AimbatEvent) -> None: + """Verifies that deleting an event removes all its snapshots. + + Args: + session: The database session. + event: An AimbatEvent to delete. + """ + create_snapshot(session) + session.refresh(event) + assert len(event.snapshots) > 0 + snapshot_ids = [s.id for s in event.snapshots] + + session.delete(event) + session.commit() + + for sid in snapshot_ids: + assert session.get(AimbatSnapshot, sid) is None + + def test_snapshot_parameter_snapshots_deleted( + self, session: Session, event: AimbatEvent + ) -> None: + """Verifies that deleting an event removes all descendant parameter snapshots. + + Args: + session: The database session. + event: An AimbatEvent to delete. + """ + create_snapshot(session) + session.refresh(event) + + session.delete(event) + session.commit() + + assert len(session.exec(select(AimbatEventParametersSnapshot)).all()) == 0 + assert len(session.exec(select(AimbatSeismogramParametersSnapshot)).all()) == 0 + + +class TestCascadeDeleteStation: + """Tests that deleting a station cascades to all its dependants.""" + + def test_seismograms_deleted( + self, session: Session, station: AimbatStation + ) -> None: + """Verifies that deleting a station removes all its seismograms. + + Args: + session: The database session. + station: An AimbatStation to delete. + """ + seismogram_ids = [s.id for s in station.seismograms] + assert len(seismogram_ids) > 0 + + session.delete(station) + session.commit() + + remaining_ids = {s.id for s in session.exec(select(AimbatSeismogram)).all()} + assert not any(sid in remaining_ids for sid in seismogram_ids) + + def test_seismogram_parameters_deleted( + self, session: Session, station: AimbatStation + ) -> None: + """Verifies that deleting a station also removes seismogram parameters. + + Args: + session: The database session. + station: An AimbatStation to delete. + """ + param_ids = [s.parameters.id for s in station.seismograms] + assert len(param_ids) > 0 + + session.delete(station) + session.commit() + + for pid in param_ids: + assert session.get(AimbatSeismogramParameters, pid) is None + + def test_datasources_deleted( + self, session: Session, station: AimbatStation + ) -> None: + """Verifies that deleting a station also removes all seismogram datasources. + + Args: + session: The database session. + station: An AimbatStation to delete. + """ + datasource_ids = [s.datasource.id for s in station.seismograms] + assert len(datasource_ids) > 0 + + session.delete(station) + session.commit() + + for did in datasource_ids: + assert session.get(AimbatDataSource, did) is None + + +class TestCascadeDeleteSeismogram: + """Tests that deleting a seismogram cascades to all its dependants.""" + + def test_datasource_deleted( + self, session: Session, seismogram: AimbatSeismogram + ) -> None: + """Verifies that deleting a seismogram removes its datasource. + + Args: + session: The database session. + seismogram: An AimbatSeismogram to delete. + """ + datasource_id = seismogram.datasource.id + + session.delete(seismogram) + session.commit() + + assert session.get(AimbatDataSource, datasource_id) is None + + def test_parameters_deleted( + self, session: Session, seismogram: AimbatSeismogram + ) -> None: + """Verifies that deleting a seismogram removes its parameters. + + Args: + session: The database session. + seismogram: An AimbatSeismogram to delete. + """ + parameters_id = seismogram.parameters.id + + session.delete(seismogram) + session.commit() + + assert session.get(AimbatSeismogramParameters, parameters_id) is None + + def test_parameter_snapshots_deleted( + self, session: Session, seismogram: AimbatSeismogram + ) -> None: + """Verifies that deleting a seismogram removes its parameter snapshots. + + Args: + session: The database session. + seismogram: An AimbatSeismogram to delete. + """ + create_snapshot(session) + parameters_id = seismogram.parameters.id + + session.delete(seismogram) + session.commit() + + assert session.get(AimbatSeismogramParameters, parameters_id) is None + remaining = session.exec(select(AimbatSeismogramParametersSnapshot)).all() + assert not any(s.seismogram_parameters_id == parameters_id for s in remaining) + + +class TestCascadeDeleteSnapshot: + """Tests that deleting a snapshot cascades to all its dependants.""" + + def test_event_parameters_snapshot_deleted(self, session: Session) -> None: + """Verifies that deleting a snapshot removes its event parameter snapshot. + + Args: + session: The database session. + """ + create_snapshot(session) + snapshot = session.exec(select(AimbatSnapshot)).one() + ep_snapshot_id = snapshot.event_parameters_snapshot.id + + session.delete(snapshot) + session.commit() + + assert session.get(AimbatEventParametersSnapshot, ep_snapshot_id) is None + + def test_seismogram_parameters_snapshots_deleted(self, session: Session) -> None: + """Verifies that deleting a snapshot removes all its seismogram parameter snapshots. + + Args: + session: The database session. + """ + create_snapshot(session) + snapshot = session.exec(select(AimbatSnapshot)).one() + sp_snapshot_ids = [s.id for s in snapshot.seismogram_parameters_snapshots] + assert len(sp_snapshot_ids) > 0 + + session.delete(snapshot) + session.commit() + + for sid in sp_snapshot_ids: + assert session.get(AimbatSeismogramParametersSnapshot, sid) is None diff --git a/tests/integration/test_event.py b/tests/integration/test_event.py new file mode 100644 index 00000000..721992a1 --- /dev/null +++ b/tests/integration/test_event.py @@ -0,0 +1,351 @@ +"""Integration tests for event management functions in aimbat.core._event.""" + +import json +import uuid +import pytest +from aimbat.core._event import ( + delete_event, + delete_event_by_id, + get_completed_events, + get_events_using_station, + get_event_parameter, + set_event_parameter, + dump_event_table_to_json, + dump_event_parameter_table_to_json, + print_event_table, + print_event_parameter_table, +) +from aimbat.aimbat_types import EventParameter +from aimbat.models import AimbatEvent, AimbatStation +from pandas import Timedelta +from sqlmodel import Session, select +from sqlalchemy.exc import NoResultFound + + +@pytest.fixture +def session(loaded_session: Session) -> Session: + """Provides a session with multi-event data and an active event pre-loaded. + + Args: + loaded_session: A SQLModel Session with data populated. + + Returns: + The database session. + """ + return loaded_session + + +class TestDeleteEvent: + """Tests for deleting events from the database.""" + + def test_delete_event(self, session: Session) -> None: + """Verifies that an event is removed from the database after deletion. + + Args: + session: The database session. + """ + events = session.exec(select(AimbatEvent)).all() + count_before = len(events) + non_active = next(e for e in events if not e.active) + + delete_event(session, non_active) + + remaining = session.exec(select(AimbatEvent)).all() + assert len(remaining) == count_before - 1 + assert non_active not in remaining + + def test_delete_event_by_id(self, session: Session) -> None: + """Verifies that an event is removed from the database when deleted by ID. + + Args: + session: The database session. + """ + events = session.exec(select(AimbatEvent)).all() + count_before = len(events) + non_active = next(e for e in events if not e.active) + + delete_event_by_id(session, non_active.id) + + remaining = session.exec(select(AimbatEvent)).all() + assert len(remaining) == count_before - 1 + + def test_delete_event_by_id_not_found(self, session: Session) -> None: + """Verifies that deleting a non-existent event ID raises NoResultFound. + + Args: + session: The database session. + """ + with pytest.raises(NoResultFound): + delete_event_by_id(session, uuid.uuid4()) + + +class TestGetCompletedEvents: + """Tests for retrieving events marked as completed.""" + + def test_no_completed_events(self, session: Session) -> None: + """Verifies that no events are returned when none are marked as completed. + + Args: + session: The database session. + """ + completed = get_completed_events(session) + assert len(completed) == 0 + + def test_get_completed_events(self, session: Session) -> None: + """Verifies that only events marked as completed are returned. + + Args: + session: The database session. + """ + events = session.exec(select(AimbatEvent)).all() + target = events[0] + target.parameters.completed = True + session.add(target) + session.commit() + + completed = get_completed_events(session) + assert len(completed) == 1 + assert target in completed + + +class TestGetEventsUsingStation: + """Tests for retrieving events associated with a particular station.""" + + def test_get_events_using_station(self, session: Session) -> None: + """Verifies that events linked to a station are returned. + + Args: + session: The database session. + """ + station = session.exec(select(AimbatStation)).first() + assert station is not None + + events = get_events_using_station(session, station) + assert len(events) > 0 + for event in events: + station_ids = [s.station_id for s in event.seismograms] + assert station.id in station_ids + + def test_get_events_using_station_no_match(self, session: Session) -> None: + """Verifies that an empty sequence is returned for a station with no events. + + Args: + session: The database session. + """ + orphan = AimbatStation( + network="XX", + name="NONE", + location="00", + channel="BHZ", + latitude=0.0, + longitude=0.0, + ) + session.add(orphan) + session.commit() + + events = get_events_using_station(session, orphan) + assert len(events) == 0 + + +class TestGetEventParameter: + """Tests for reading parameter values from the active event.""" + + def test_get_timedelta_parameter(self, session: Session) -> None: + """Verifies that a Timedelta parameter is returned as a Timedelta. + + Args: + session: The database session. + """ + value = get_event_parameter(session, EventParameter.WINDOW_PRE) + assert isinstance(value, Timedelta) + + def test_get_float_parameter(self, session: Session) -> None: + """Verifies that a float parameter is returned as a float. + + Args: + session: The database session. + """ + value = get_event_parameter(session, EventParameter.MIN_CCNORM) + assert isinstance(value, float) + + def test_get_bool_parameter(self, session: Session) -> None: + """Verifies that a bool parameter is returned as a bool. + + Args: + session: The database session. + """ + value = get_event_parameter(session, EventParameter.COMPLETED) + assert isinstance(value, bool) + + +class TestSetEventParameter: + """Tests for writing parameter values to the active event.""" + + def test_set_timedelta_parameter(self, session: Session) -> None: + """Verifies that a Timedelta parameter is persisted correctly. + + Args: + session: The database session. + """ + new_value = Timedelta(seconds=20) + set_event_parameter(session, EventParameter.WINDOW_POST, new_value) + assert get_event_parameter(session, EventParameter.WINDOW_POST) == new_value + + def test_set_float_parameter(self, session: Session) -> None: + """Verifies that a float parameter is persisted correctly. + + Args: + session: The database session. + """ + new_value = 0.75 + set_event_parameter(session, EventParameter.MIN_CCNORM, new_value) + assert get_event_parameter(session, EventParameter.MIN_CCNORM) == new_value + + def test_set_bool_parameter(self, session: Session) -> None: + """Verifies that a bool parameter is persisted correctly. + + Args: + session: The database session. + """ + set_event_parameter(session, EventParameter.COMPLETED, True) + assert get_event_parameter(session, EventParameter.COMPLETED) is True + + +class TestDumpEventTableToJson: + """Tests for serialising the event table to JSON.""" + + def test_as_string(self, session: Session) -> None: + """Verifies that a JSON string is returned when as_string=True. + + Args: + session: The database session. + """ + result = dump_event_table_to_json(session, as_string=True) + assert isinstance(result, str) + parsed = json.loads(result) + assert isinstance(parsed, list) + assert len(parsed) > 0 + + def test_as_list(self, session: Session) -> None: + """Verifies that a list of dicts is returned when as_string=False. + + Args: + session: The database session. + """ + result = dump_event_table_to_json(session, as_string=False) + assert isinstance(result, list) + assert len(result) > 0 + assert "id" in result[0] + assert "active" in result[0] + + +class TestDumpEventParameterTableToJson: + """Tests for serialising the event parameter table to JSON.""" + + def test_active_event_as_string(self, session: Session) -> None: + """Verifies that a JSON string of the active event parameters is returned. + + Args: + session: The database session. + """ + result = dump_event_parameter_table_to_json( + session, all_events=False, as_string=True + ) + assert isinstance(result, str) + parsed = json.loads(result) + assert "min_ccnorm" in parsed + assert "window_pre" in parsed + assert "window_post" in parsed + + def test_active_event_as_dict(self, session: Session) -> None: + """Verifies that a dict of the active event parameters is returned. + + Args: + session: The database session. + """ + result = dump_event_parameter_table_to_json( + session, all_events=False, as_string=False + ) + assert isinstance(result, dict) + assert "min_ccnorm" in result + assert "window_pre" in result + assert "window_post" in result + + def test_all_events_as_string(self, session: Session) -> None: + """Verifies that a JSON string of all event parameters is returned. + + Args: + session: The database session. + """ + result = dump_event_parameter_table_to_json( + session, all_events=True, as_string=True + ) + assert isinstance(result, str) + parsed = json.loads(result) + assert isinstance(parsed, list) + assert len(parsed) > 0 + + def test_all_events_as_list(self, session: Session) -> None: + """Verifies that a list of dicts of all event parameters is returned. + + Args: + session: The database session. + """ + result = dump_event_parameter_table_to_json( + session, all_events=True, as_string=False + ) + assert isinstance(result, list) + assert len(result) > 0 + assert "min_ccnorm" in result[0] + + +class TestPrintEventTable: + """Tests for printing the event table.""" + + def test_print_short(self, session: Session, capsys: pytest.CaptureFixture) -> None: + """Verifies that print_event_table produces output with short=True. + + Args: + session: The database session. + capsys: The pytest capsys fixture. + """ + print_event_table(session, short=True) + assert len(capsys.readouterr().out) > 0 + + def test_print_long(self, session: Session, capsys: pytest.CaptureFixture) -> None: + """Verifies that print_event_table produces output with short=False. + + Args: + session: The database session. + capsys: The pytest capsys fixture. + """ + print_event_table(session, short=False) + assert len(capsys.readouterr().out) > 0 + + +class TestPrintEventParameterTable: + """Tests for printing the event parameter table.""" + + def test_print_active_event( + self, session: Session, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that print_event_parameter_table produces output for the active event. + + Args: + session: The database session. + capsys: The pytest capsys fixture. + """ + print_event_parameter_table(session, short=False, all_events=False) + assert len(capsys.readouterr().out) > 0 + + def test_print_all_events( + self, session: Session, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that print_event_parameter_table produces output for all events. + + Args: + session: The database session. + capsys: The pytest capsys fixture. + """ + print_event_parameter_table(session, short=False, all_events=True) + assert len(capsys.readouterr().out) > 0 diff --git a/tests/integration/test_models.py b/tests/integration/test_models.py new file mode 100644 index 00000000..e9789b48 --- /dev/null +++ b/tests/integration/test_models.py @@ -0,0 +1,668 @@ +"""Integration tests for AIMBAT SQLModel ORM classes. + +Tests cover cascade deletes, the single-active-event constraint, +type validation, and round-trip persistence of custom time types. +""" + +import pytest +from aimbat.models import ( + AimbatDataSource, + AimbatEvent, + AimbatEventParameters, + AimbatEventParametersBase, + AimbatEventParametersSnapshot, + AimbatSeismogram, + AimbatSeismogramParameters, + AimbatSeismogramParametersSnapshot, + AimbatSnapshot, + AimbatStation, +) +from aimbat.aimbat_types import DataType +from datetime import timezone +from pandas import Timedelta, Timestamp +from pydantic import ValidationError +from sqlmodel import Session, select +from collections.abc import Generator + + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- +@pytest.fixture +def session(patched_session: Session) -> Generator[Session, None, None]: + yield patched_session + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _make_station(session: Session, *, name: str = "AAK") -> AimbatStation: + """Insert and return a minimal station. + + Args: + session (Session): Database session. + name (str): Station name (default: "AAK"). + + Returns: + AimbatStation: The created station. + """ + sta = AimbatStation( + name=name, + network="II", + location="00", + channel="BHZ", + latitude=42.63, + longitude=74.49, + ) + session.add(sta) + session.flush() + return sta + + +def _make_event( + session: Session, + *, + time: str = "2010-02-27T06:34:14", + active: bool | None = None, +) -> AimbatEvent: + """Insert and return an event together with its mandatory parameters. + + Args: + session (Session): Database session. + time (str): Event time string (default: "2010-02-27T06:34:14"). + active (bool | None): Whether the event is active (default: None). + + Returns: + AimbatEvent: The created event. + """ + ev = AimbatEvent( + time=Timestamp(time, tz=timezone.utc), + latitude=-36.12, + longitude=-72.90, + depth=22.9, + active=active, + ) + session.add(ev) + session.flush() + + params = AimbatEventParameters(event=ev) + session.add(params) + session.flush() + return ev + + +def _make_seismogram( + session: Session, + event: AimbatEvent, + station: AimbatStation, +) -> AimbatSeismogram: + """Insert and return a seismogram (with datasource and parameters). + + Args: + session (Session): Database session. + event (AimbatEvent): The associated event. + station (AimbatStation): The associated station. + + Returns: + AimbatSeismogram: The created seismogram. + """ + seis = AimbatSeismogram( + begin_time=Timestamp("2010-02-27T06:30:00", tz=timezone.utc), + delta=Timedelta(seconds=0.05), + t0=Timestamp("2010-02-27T06:40:00", tz=timezone.utc), + event=event, + station=station, + ) + session.add(seis) + session.flush() + + ds = AimbatDataSource( + sourcename="/tmp/fake.sac", + datatype=DataType.SAC, + seismogram=seis, + ) + session.add(ds) + + sp = AimbatSeismogramParameters(seismogram=seis) + session.add(sp) + + session.flush() + return seis + + +# =================================================================== +# Cascade delete tests +# =================================================================== + + +class TestCascadeDeleteEvent: + """Deleting an event must remove all related children.""" + + def test_delete_event_cascades_to_parameters(self, session: Session) -> None: + """Verifies that deleting an event also deletes its parameters. + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + session.commit() + + assert session.exec(select(AimbatEventParameters)).one() is not None + + session.delete(ev) + session.commit() + + assert session.exec(select(AimbatEventParameters)).first() is None + + def test_delete_event_cascades_to_seismograms(self, session: Session) -> None: + """Verifies that deleting an event also deletes its seismograms. + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + sta = _make_station(session) + _make_seismogram(session, ev, sta) + session.commit() + + assert len(session.exec(select(AimbatSeismogram)).all()) == 1 + + session.delete(ev) + session.commit() + + assert len(session.exec(select(AimbatSeismogram)).all()) == 0 + + def test_delete_event_cascades_to_datasource(self, session: Session) -> None: + """Verifies that deleting an event cascades to delete datasources (via seismograms). + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + sta = _make_station(session) + _make_seismogram(session, ev, sta) + session.commit() + + assert session.exec(select(AimbatDataSource)).first() is not None + + session.delete(ev) + session.commit() + + assert session.exec(select(AimbatDataSource)).first() is None + + def test_delete_event_cascades_to_seismogram_parameters( + self, session: Session + ) -> None: + """Verifies that deleting an event cascades to delete seismogram parameters. + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + sta = _make_station(session) + _make_seismogram(session, ev, sta) + session.commit() + + assert session.exec(select(AimbatSeismogramParameters)).first() is not None + + session.delete(ev) + session.commit() + + assert session.exec(select(AimbatSeismogramParameters)).first() is None + + def test_delete_event_cascades_to_snapshots(self, session: Session) -> None: + """Verifies that deleting an event deletes related snapshots and their parameter copies. + + Args: + session (Session): Database session. + """ + ev = _make_event(session, active=True) + sta = _make_station(session) + _make_seismogram(session, ev, sta) + session.commit() + + # Create a snapshot via the core helper (uses the active event). + from aimbat.core import create_snapshot + + create_snapshot(session, comment="before delete") + assert len(session.exec(select(AimbatSnapshot)).all()) == 1 + assert len(session.exec(select(AimbatEventParametersSnapshot)).all()) == 1 + assert len(session.exec(select(AimbatSeismogramParametersSnapshot)).all()) == 1 + + session.delete(ev) + session.commit() + + assert len(session.exec(select(AimbatSnapshot)).all()) == 0 + assert len(session.exec(select(AimbatEventParametersSnapshot)).all()) == 0 + assert len(session.exec(select(AimbatSeismogramParametersSnapshot)).all()) == 0 + + def test_delete_event_does_not_delete_station(self, session: Session) -> None: + """Stations are shared across events and must survive event deletion. + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + sta = _make_station(session) + _make_seismogram(session, ev, sta) + session.commit() + + session.delete(ev) + session.commit() + + remaining = session.exec(select(AimbatStation)).all() + assert len(remaining) == 1 + assert remaining[0].id == sta.id + + +class TestCascadeDeleteStation: + """Deleting a station must remove its seismograms (and their children).""" + + def test_delete_station_cascades_to_seismograms(self, session: Session) -> None: + """Verifies that deleting a station removes associated seismograms and their children. + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + sta = _make_station(session) + _make_seismogram(session, ev, sta) + session.commit() + + session.delete(sta) + session.commit() + + assert len(session.exec(select(AimbatSeismogram)).all()) == 0 + assert session.exec(select(AimbatDataSource)).first() is None + assert session.exec(select(AimbatSeismogramParameters)).first() is None + + +class TestCascadeDeleteSnapshot: + """Deleting a snapshot must remove its parameter snapshots.""" + + def test_delete_snapshot_cascades_to_parameter_snapshots( + self, session: Session + ) -> None: + """Verifies that deleting a snapshot removes its associated parameter snapshots. + + Args: + session (Session): Database session. + """ + ev = _make_event(session, active=True) + sta = _make_station(session) + _make_seismogram(session, ev, sta) + session.commit() + + from aimbat.core import create_snapshot + + create_snapshot(session) + + snapshot = session.exec(select(AimbatSnapshot)).one() + session.delete(snapshot) + session.commit() + + assert len(session.exec(select(AimbatEventParametersSnapshot)).all()) == 0 + assert len(session.exec(select(AimbatSeismogramParametersSnapshot)).all()) == 0 + + +# =================================================================== +# Single active event constraint +# =================================================================== + + +class TestSingleActiveEvent: + """The DB trigger ensures at most one event has active=True.""" + + def test_only_one_active_event_via_insert(self, session: Session) -> None: + """Inserting a new active event deactivates the previous one. + + Args: + session (Session): Database session. + """ + ev1 = _make_event(session, active=True) + session.commit() + session.refresh(ev1) + assert ev1.active is True + + ev2 = _make_event(session, time="2011-03-11T05:46:24", active=True) + session.commit() + + session.refresh(ev1) + session.refresh(ev2) + assert ev1.active is None + assert ev2.active is True + + def test_only_one_active_event_via_update(self, session: Session) -> None: + """Updating an event to active deactivates the previous one. + + Args: + session (Session): Database session. + """ + ev1 = _make_event(session, active=True) + ev2 = _make_event(session, time="2011-03-11T05:46:24") + session.commit() + + ev2.active = True + session.add(ev2) + session.commit() + + session.refresh(ev1) + session.refresh(ev2) + assert ev1.active is None + assert ev2.active is True + + def test_multiple_inactive_events_allowed(self, session: Session) -> None: + """Multiple events may exist without any being active. + + Args: + session (Session): Database session. + """ + _make_event(session, time="2010-01-01T00:00:00") + _make_event(session, time="2011-01-01T00:00:00") + _make_event(session, time="2012-01-01T00:00:00") + session.commit() + + active = session.exec( + select(AimbatEvent).where(AimbatEvent.active == True) # noqa: E712 + ).all() + assert len(active) == 0 + + def test_cycling_active_through_three_events(self, session: Session) -> None: + """Verifies cycling active status through multiple events ensures only one is active at a time. + + Args: + session (Session): Database session. + """ + ev1 = _make_event(session, time="2010-01-01T00:00:00", active=True) + ev2 = _make_event(session, time="2011-01-01T00:00:00") + ev3 = _make_event(session, time="2012-01-01T00:00:00") + session.commit() + + for target in [ev2, ev3, ev1]: + target.active = True + session.add(target) + session.commit() + + active = session.exec( + select(AimbatEvent).where(AimbatEvent.active == True) # noqa: E712 + ).all() + assert len(active) == 1 + session.refresh(target) + assert target.active is True + + +# =================================================================== +# Type validation +# =================================================================== + + +class TestEventValidation: + """Pydantic validation on AimbatEvent fields.""" + + def test_event_time_accepts_string(self, session: Session) -> None: + """Verifies that the event time field accepts ISO format strings and converts them to Timestamp. + + Args: + session (Session): Database session. + """ + ev = AimbatEvent( + time="2010-02-27T06:34:14+00:00", + latitude=0.0, + longitude=0.0, + ) + session.add(ev) + session.flush() + params = AimbatEventParameters(event=ev) + session.add(params) + session.commit() + + session.refresh(ev) + assert isinstance(ev.time, Timestamp) + + def test_event_rejects_invalid_time(self) -> None: + """model_validate enforces Pydantic type coercion for table models.""" + with pytest.raises(ValidationError): + AimbatEvent.model_validate( + {"time": "not-a-date", "latitude": 0.0, "longitude": 0.0} + ) + + +class TestEventParametersValidation: + """Validation rules on AimbatEventParametersBase (non-table base class). + + SQLModel table models skip Pydantic validation on __init__, so we test + via the base class and via model_validate on the table class. + """ + + def test_min_ccnorm_rejects_out_of_range(self) -> None: + """Verifies that min_ccnorm rejects values > 1.0.""" + with pytest.raises(ValidationError): + AimbatEventParametersBase(min_ccnorm=1.5) + + def test_min_ccnorm_rejects_negative(self) -> None: + """Verifies that min_ccnorm rejects negative values.""" + with pytest.raises(ValidationError): + AimbatEventParametersBase(min_ccnorm=-0.1) + + def test_window_pre_must_be_negative(self) -> None: + """Verifies that window_pre must be a negative Timedelta.""" + with pytest.raises(ValidationError): + AimbatEventParametersBase(window_pre=Timedelta(seconds=5)) + + def test_window_post_must_be_positive(self) -> None: + """Verifies that window_post must be a positive Timedelta.""" + with pytest.raises(ValidationError): + AimbatEventParametersBase(window_post=Timedelta(seconds=-5)) + + def test_bandpass_fmax_must_exceed_fmin(self) -> None: + """The bandpass validator mixin is on AimbatEventParameters (table model), + so we must use model_validate to trigger it.""" + with pytest.raises(ValidationError): + AimbatEventParameters.model_validate( + {"bandpass_fmin": 2.0, "bandpass_fmax": 1.0} + ) + + def test_bandpass_fmax_must_not_equal_fmin(self) -> None: + """Verifies that bandpass_fmax cannot equal bandpass_fmin.""" + with pytest.raises(ValidationError): + AimbatEventParameters.model_validate( + {"bandpass_fmin": 1.0, "bandpass_fmax": 1.0} + ) + + def test_model_validate_enforces_rules_on_table_class(self) -> None: + """model_validate on the table class must also reject invalid values.""" + with pytest.raises(ValidationError): + AimbatEventParameters.model_validate({"min_ccnorm": 1.5}) + + def test_valid_parameters_accepted(self, session: Session) -> None: + """Verifies that valid parameters are accepted. + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + session.commit() + session.refresh(ev) + + params = ev.parameters + assert params.completed is False + assert params.min_ccnorm >= 0 + assert params.min_ccnorm <= 1 + assert params.window_pre.total_seconds() < 0 + assert params.window_post.total_seconds() > 0 + + +class TestSeismogramParametersValidation: + """Validation rules on seismogram-related models.""" + + def test_default_select_is_true(self, session: Session) -> None: + """Verifies that the default 'select' parameter is True. + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + sta = _make_station(session) + seis = _make_seismogram(session, ev, sta) + session.commit() + session.refresh(seis) + + assert seis.parameters.select is True + + def test_default_flip_is_false(self, session: Session) -> None: + """Verifies that the default 'flip' parameter is False. + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + sta = _make_station(session) + seis = _make_seismogram(session, ev, sta) + session.commit() + session.refresh(seis) + + assert seis.parameters.flip is False + + def test_default_t1_is_none(self, session: Session) -> None: + """Verifies that the default 't1' parameter is None. + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + sta = _make_station(session) + seis = _make_seismogram(session, ev, sta) + session.commit() + session.refresh(seis) + + assert seis.parameters.t1 is None + + def test_seismogram_delta_must_be_positive(self) -> None: + """model_validate enforces Pydantic type constraints for table models.""" + with pytest.raises(ValidationError): + AimbatSeismogram.model_validate( + { + "begin_time": Timestamp("2010-01-01", tz=timezone.utc), + "delta": Timedelta(seconds=-1), + "t0": Timestamp("2010-01-01", tz=timezone.utc), + } + ) + + +# =================================================================== +# Round-trip persistence of custom time types +# =================================================================== + + +class TestTimestampRoundTrip: + """Pandas Timestamp values survive a write→read cycle via SQLAlchemy.""" + + def test_event_time_round_trip(self, session: Session) -> None: + """Verifies round-trip persistence of event time as a Timestamp. + + Args: + session (Session): Database session. + """ + ts = Timestamp("2010-02-27T06:34:14", tz=timezone.utc) + ev = _make_event(session, time="2010-02-27T06:34:14") + session.commit() + + session.refresh(ev) + assert isinstance(ev.time, Timestamp) + assert ev.time == ts + + def test_seismogram_times_round_trip(self, session: Session) -> None: + """Verifies round-trip persistence of seismogram times as Timestamps. + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + sta = _make_station(session) + seis = _make_seismogram(session, ev, sta) + session.commit() + + session.refresh(seis) + assert isinstance(seis.begin_time, Timestamp) + assert isinstance(seis.t0, Timestamp) + + +class TestTimedeltaRoundTrip: + """Pandas Timedelta values survive a write→read cycle via SQLAlchemy.""" + + def test_event_parameters_window_round_trip(self, session: Session) -> None: + """Verifies round-trip persistence of event parameter windows as Timedeltas. + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + session.commit() + + session.refresh(ev) + params = ev.parameters + assert isinstance(params.window_pre, Timedelta) + assert isinstance(params.window_post, Timedelta) + assert params.window_pre.total_seconds() < 0 + assert params.window_post.total_seconds() > 0 + + def test_seismogram_delta_round_trip(self, session: Session) -> None: + """Verifies round-trip persistence of seismogram delta as Timedelta. + + Args: + session (Session): Database session. + """ + ev = _make_event(session) + sta = _make_station(session) + seis = _make_seismogram(session, ev, sta) + session.commit() + + session.refresh(seis) + assert isinstance(seis.delta, Timedelta) + assert seis.delta == Timedelta(seconds=0.05) + + +# =================================================================== +# Unique constraints +# =================================================================== + + +class TestUniqueConstraints: + """Verify that unique column constraints are enforced.""" + + def test_duplicate_event_time_rejected(self, session: Session) -> None: + """Verifies that creating two events with the same time raises an IntegrityError. + + Args: + session (Session): Database session. + """ + from sqlalchemy.exc import IntegrityError + + same_time = "2010-02-27T06:34:14" + _make_event(session, time=same_time) + session.commit() + + # Manually insert a second event with the same time (bypass helper flush). + ev2 = AimbatEvent( + time=Timestamp(same_time, tz=timezone.utc), + latitude=0.0, + longitude=0.0, + ) + session.add(ev2) + with pytest.raises(IntegrityError): + session.flush() + + def test_different_event_times_allowed(self, session: Session) -> None: + """Verifies that events with different times are allowed. + + Args: + session (Session): Database session. + """ + _make_event(session, time="2010-01-01T00:00:00") + _make_event(session, time="2011-01-01T00:00:00") + session.commit() + + events = session.exec(select(AimbatEvent)).all() + assert len(events) == 2 diff --git a/tests/integration/test_project.py b/tests/integration/test_project.py new file mode 100644 index 00000000..43f63f68 --- /dev/null +++ b/tests/integration/test_project.py @@ -0,0 +1,131 @@ +"""Do some tests with a real file-based database. + +This is to verify that the project creation and deletion works as expected. +""" + +import pytest +from pathlib import Path +from aimbat.core import create_project, delete_project +from aimbat.core._project import _project_exists, print_project_info +from collections.abc import Generator +from sqlalchemy import Engine + + +class TestProjectLifecycle: + """Integration tests for core project management functions.""" + + @pytest.fixture + def engine(self, engine_from_file: Engine) -> Generator[Engine, None, None]: + yield engine_from_file + + def test_create(self, engine: Engine, db_path: Path) -> None: + """Verifies that a new project can be created successfully. + + This test ensures that `create_project` creates the database file and that `_project_exists` + correctly reflects the project's existence. + + Args: + engine (Engine): The SQLAlchemy engine. + project_file (Path): The path to the expected project database file. + """ + assert not db_path.exists(), "expected no project file at the start of the test" + assert ( + _project_exists(engine) is False + ), "expected _project_exists() to return False at the start of the test" + + create_project(engine) + + assert ( + db_path.exists() + ), "expected project file to be created after calling create_project()" + assert ( + _project_exists(engine) is True + ), "expected _project_exists() to return True after creating project" + + def test_create_if_one_exists(self, engine: Engine) -> None: + """Verifies that creating a project fails if one already exists. + + Args: + engine (Engine): The SQLAlchemy engine. + """ + assert not _project_exists( + engine + ), "expected no project at the start of the test" + create_project(engine) + assert _project_exists( + engine + ), "expected project to exist after calling create_project()" + + with pytest.raises(RuntimeError): + create_project(engine) + + def test_delete_project(self, engine: Engine) -> None: + """Verifies that an existing project can be deleted. + + Args: + engine (Engine): The SQLAlchemy engine. + """ + assert not _project_exists( + engine + ), "expected no project at the start of the test" + create_project(engine) + assert _project_exists( + engine + ), "expected project to exist after calling create_project()" + + delete_project(engine) + assert not _project_exists( + engine + ), "expected no project after calling delete_project()" + + def test_delete_project_when_there_is_none(self, engine: Engine) -> None: + """Verifies that attempting to delete a non-existent project raises an error. + + Args: + engine (Engine): The SQLAlchemy engine. + """ + assert not _project_exists( + engine + ), "expected no project at the start of the test" + with pytest.raises(RuntimeError): + delete_project(engine) + + +class TestPrintProjectInfo: + """Tests for printing project summary information.""" + + def test_raises_when_no_project( + self, engine_from_file: Engine, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that a RuntimeError is raised when no project exists. + + Args: + engine_from_file: A SQLAlchemy Engine connected to an empty file database. + capsys: The pytest capsys fixture. + """ + with pytest.raises(RuntimeError): + print_project_info(engine_from_file) + + def test_with_empty_project( + self, patched_engine: Engine, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that output is produced for a project with no data or active event. + + Args: + patched_engine: The monkeypatched SQLAlchemy Engine. + capsys: The pytest capsys fixture. + """ + print_project_info(patched_engine) + assert len(capsys.readouterr().out) > 0 + + def test_with_data_and_active_event( + self, loaded_engine: Engine, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that output is produced for a project with data and an active event. + + Args: + loaded_engine: The monkeypatched SQLAlchemy Engine with data loaded. + capsys: The pytest capsys fixture. + """ + print_project_info(loaded_engine) + assert len(capsys.readouterr().out) > 0 diff --git a/tests/integration/test_seismogram.py b/tests/integration/test_seismogram.py new file mode 100644 index 00000000..2f802ed0 --- /dev/null +++ b/tests/integration/test_seismogram.py @@ -0,0 +1,416 @@ +"""Integration tests for seismogram management functions in aimbat.core._seismogram.""" + +import json +import uuid +import pytest +from aimbat.core._seismogram import ( + delete_seismogram, + delete_seismogram_by_id, + get_seismogram_parameter, + get_seismogram_parameter_by_id, + set_seismogram_parameter, + set_seismogram_parameter_by_id, + get_selected_seismograms, + dump_seismogram_table_to_json, + dump_seismogram_parameter_table_to_json, + print_seismogram_table, + print_seismogram_parameter_table, + plot_all_seismograms, +) +from aimbat.aimbat_types import SeismogramParameter +from aimbat.models import AimbatSeismogram +from matplotlib.figure import Figure +from pandas import Timestamp +from sqlmodel import Session, select +from sqlalchemy.exc import NoResultFound + + +@pytest.fixture +def session(loaded_session: Session) -> Session: + """Provides a session with multi-event data and an active event pre-loaded. + + Args: + loaded_session: A SQLModel Session with data populated. + + Returns: + The database session. + """ + return loaded_session + + +@pytest.fixture +def seismogram(session: Session) -> AimbatSeismogram: + """Provides the first seismogram from the active event. + + Args: + session: The database session. + + Returns: + An AimbatSeismogram from the active event. + """ + return session.exec(select(AimbatSeismogram)).first() # type: ignore[return-value] + + +class TestDeleteSeismogram: + """Tests for deleting seismograms from the database.""" + + def test_delete_seismogram( + self, session: Session, seismogram: AimbatSeismogram + ) -> None: + """Verifies that a seismogram is removed from the database after deletion. + + Args: + session: The database session. + seismogram: An AimbatSeismogram to delete. + """ + count_before = len(session.exec(select(AimbatSeismogram)).all()) + delete_seismogram(session, seismogram) + assert len(session.exec(select(AimbatSeismogram)).all()) == count_before - 1 + + def test_delete_seismogram_by_id( + self, session: Session, seismogram: AimbatSeismogram + ) -> None: + """Verifies that a seismogram is removed from the database when deleted by ID. + + Args: + session: The database session. + seismogram: An AimbatSeismogram whose ID is used for deletion. + """ + count_before = len(session.exec(select(AimbatSeismogram)).all()) + delete_seismogram_by_id(session, seismogram.id) + assert len(session.exec(select(AimbatSeismogram)).all()) == count_before - 1 + + def test_delete_seismogram_by_id_not_found(self, session: Session) -> None: + """Verifies that deleting a non-existent seismogram ID raises NoResultFound. + + Args: + session: The database session. + """ + with pytest.raises(NoResultFound): + delete_seismogram_by_id(session, uuid.uuid4()) + + +class TestGetSeismogramParameter: + """Tests for reading parameter values from a seismogram instance.""" + + def test_get_bool_parameter(self, seismogram: AimbatSeismogram) -> None: + """Verifies that a bool parameter is returned as a bool. + + Args: + seismogram: An AimbatSeismogram instance. + """ + value = get_seismogram_parameter(seismogram, SeismogramParameter.SELECT) + assert isinstance(value, bool) + + def test_get_timestamp_parameter_default_none( + self, seismogram: AimbatSeismogram + ) -> None: + """Verifies that the t1 parameter returns None when not set. + + Args: + seismogram: An AimbatSeismogram instance. + """ + value = get_seismogram_parameter(seismogram, SeismogramParameter.T1) + assert value is None + + def test_get_timestamp_parameter_after_set( + self, session: Session, seismogram: AimbatSeismogram + ) -> None: + """Verifies that t1 is returned as a Timestamp after being set. + + Args: + session: The database session. + seismogram: An AimbatSeismogram instance. + """ + t1 = seismogram.t0 + set_seismogram_parameter(session, seismogram, SeismogramParameter.T1, t1) + value = get_seismogram_parameter(seismogram, SeismogramParameter.T1) + assert isinstance(value, Timestamp) + + +class TestGetSeismogramParameterById: + """Tests for reading parameter values from a seismogram by ID.""" + + def test_get_by_id(self, session: Session, seismogram: AimbatSeismogram) -> None: + """Verifies that a bool parameter is returned correctly when looked up by ID. + + Args: + session: The database session. + seismogram: An AimbatSeismogram whose ID is used for lookup. + """ + value = get_seismogram_parameter_by_id( + session, seismogram.id, SeismogramParameter.SELECT + ) + assert isinstance(value, bool) + + def test_get_by_id_not_found(self, session: Session) -> None: + """Verifies that a ValueError is raised for an unknown seismogram ID. + + Args: + session: The database session. + """ + with pytest.raises(ValueError): + get_seismogram_parameter_by_id( + session, uuid.uuid4(), SeismogramParameter.SELECT + ) + + +class TestSetSeismogramParameter: + """Tests for writing parameter values to a seismogram instance.""" + + def test_set_bool_parameter( + self, session: Session, seismogram: AimbatSeismogram + ) -> None: + """Verifies that a bool parameter is persisted correctly. + + Args: + session: The database session. + seismogram: An AimbatSeismogram instance. + """ + original = get_seismogram_parameter(seismogram, SeismogramParameter.SELECT) + set_seismogram_parameter( + session, seismogram, SeismogramParameter.SELECT, not original + ) + assert ( + get_seismogram_parameter(seismogram, SeismogramParameter.SELECT) + is not original + ) + + def test_set_timestamp_parameter( + self, session: Session, seismogram: AimbatSeismogram + ) -> None: + """Verifies that a Timestamp parameter is persisted correctly. + + Args: + session: The database session. + seismogram: An AimbatSeismogram instance. + """ + t1 = seismogram.t0 + set_seismogram_parameter(session, seismogram, SeismogramParameter.T1, t1) + assert get_seismogram_parameter(seismogram, SeismogramParameter.T1) == t1 + + +class TestSetSeismogramParameterById: + """Tests for writing parameter values to a seismogram by ID.""" + + def test_set_by_id(self, session: Session, seismogram: AimbatSeismogram) -> None: + """Verifies that a bool parameter is persisted when set by seismogram ID. + + Args: + session: The database session. + seismogram: An AimbatSeismogram whose ID is used for lookup. + """ + set_seismogram_parameter_by_id( + session, seismogram.id, SeismogramParameter.FLIP, True + ) + assert get_seismogram_parameter(seismogram, SeismogramParameter.FLIP) is True + + def test_set_by_id_not_found(self, session: Session) -> None: + """Verifies that a ValueError is raised for an unknown seismogram ID. + + Args: + session: The database session. + """ + with pytest.raises(ValueError): + set_seismogram_parameter_by_id( + session, uuid.uuid4(), SeismogramParameter.FLIP, True + ) + + +class TestGetSelectedSeismograms: + """Tests for retrieving selected seismograms.""" + + def test_all_selected_by_default(self, session: Session) -> None: + """Verifies that all seismograms in the active event are selected by default. + + Args: + session: The database session. + """ + selected = get_selected_seismograms(session) + assert len(selected) > 0 + + def test_after_deselecting_one( + self, session: Session, seismogram: AimbatSeismogram + ) -> None: + """Verifies that deselecting a seismogram removes it from the selected set. + + Args: + session: The database session. + seismogram: An AimbatSeismogram to deselect. + """ + count_before = len(get_selected_seismograms(session)) + set_seismogram_parameter(session, seismogram, SeismogramParameter.SELECT, False) + assert len(get_selected_seismograms(session)) == count_before - 1 + + def test_all_events(self, session: Session) -> None: + """Verifies that get_selected_seismograms returns seismograms across all events. + + Args: + session: The database session. + """ + selected_active = get_selected_seismograms(session, all_events=False) + selected_all = get_selected_seismograms(session, all_events=True) + assert len(selected_all) >= len(selected_active) + + +class TestDumpSeismogramTableToJson: + """Tests for serialising the seismogram table to JSON.""" + + def test_returns_json_string(self, session: Session) -> None: + """Verifies that the seismogram table is returned as a valid JSON string. + + Args: + session: The database session. + """ + result = dump_seismogram_table_to_json(session) + assert isinstance(result, str) + parsed = json.loads(result) + assert isinstance(parsed, list) + assert len(parsed) > 0 + + +class TestDumpSeismogramParameterTableToJson: + """Tests for serialising the seismogram parameter table to JSON.""" + + def test_active_event_as_string(self, session: Session) -> None: + """Verifies that a JSON string of the active event's parameters is returned. + + Args: + session: The database session. + """ + result = dump_seismogram_parameter_table_to_json( + session, all_events=False, as_string=True + ) + assert isinstance(result, str) + parsed = json.loads(result) + assert isinstance(parsed, list) + assert len(parsed) > 0 + + def test_active_event_as_list(self, session: Session) -> None: + """Verifies that a list of dicts of the active event's parameters is returned. + + Args: + session: The database session. + """ + result = dump_seismogram_parameter_table_to_json( + session, all_events=False, as_string=False + ) + assert isinstance(result, list) + assert len(result) > 0 + assert "select" in result[0] + + def test_all_events_as_string(self, session: Session) -> None: + """Verifies that a JSON string of all events' parameters is returned. + + Args: + session: The database session. + """ + result = dump_seismogram_parameter_table_to_json( + session, all_events=True, as_string=True + ) + assert isinstance(result, str) + parsed = json.loads(result) + assert isinstance(parsed, list) + assert len(parsed) > 0 + + def test_all_events_as_list(self, session: Session) -> None: + """Verifies that a list of dicts of all events' parameters is returned. + + Args: + session: The database session. + """ + result = dump_seismogram_parameter_table_to_json( + session, all_events=True, as_string=False + ) + assert isinstance(result, list) + assert len(result) > 0 + assert "select" in result[0] + + def test_all_events_returns_more_than_active_only(self, session: Session) -> None: + """Verifies that all_events=True returns more rows than active event only. + + Args: + session: The database session. + """ + active_only = dump_seismogram_parameter_table_to_json( + session, all_events=False, as_string=False + ) + all_events = dump_seismogram_parameter_table_to_json( + session, all_events=True, as_string=False + ) + assert len(all_events) >= len(active_only) + + +class TestPrintSeismogramTable: + """Tests for printing the seismogram table.""" + + def test_active_event_short( + self, session: Session, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that output is produced for the active event with short=True. + + Args: + session: The database session. + capsys: The pytest capsys fixture. + """ + print_seismogram_table(session, short=True, all_events=False) + assert len(capsys.readouterr().out) > 0 + + def test_active_event_long( + self, session: Session, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that output is produced for the active event with short=False. + + Args: + session: The database session. + capsys: The pytest capsys fixture. + """ + print_seismogram_table(session, short=False, all_events=False) + assert len(capsys.readouterr().out) > 0 + + def test_all_events(self, session: Session, capsys: pytest.CaptureFixture) -> None: + """Verifies that output is produced when printing seismograms for all events. + + Args: + session: The database session. + capsys: The pytest capsys fixture. + """ + print_seismogram_table(session, short=False, all_events=True) + assert len(capsys.readouterr().out) > 0 + + +class TestPrintSeismogramParameterTable: + """Tests for printing the seismogram parameter table.""" + + def test_print_short(self, session: Session, capsys: pytest.CaptureFixture) -> None: + """Verifies that output is produced with short=True. + + Args: + session: The database session. + capsys: The pytest capsys fixture. + """ + print_seismogram_parameter_table(session, short=True) + assert len(capsys.readouterr().out) > 0 + + def test_print_long(self, session: Session, capsys: pytest.CaptureFixture) -> None: + """Verifies that output is produced with short=False. + + Args: + session: The database session. + capsys: The pytest capsys fixture. + """ + print_seismogram_parameter_table(session, short=False) + assert len(capsys.readouterr().out) > 0 + + +class TestPlotAllSeismograms: + """Tests for plotting seismograms.""" + + def test_returns_figure(self, session: Session) -> None: + """Verifies that plot_all_seismograms returns a matplotlib Figure. + + Args: + session: The database session. + """ + fig = plot_all_seismograms(session) + assert isinstance(fig, Figure) diff --git a/tests/integration/test_snapshots.py b/tests/integration/test_snapshots.py new file mode 100644 index 00000000..8605bad7 --- /dev/null +++ b/tests/integration/test_snapshots.py @@ -0,0 +1,389 @@ +"""Integration tests for snapshot management functions in aimbat.core._snapshot.""" + +import json +import uuid +import pytest +from aimbat.core._snapshot import ( + create_snapshot, + delete_snapshot, + delete_snapshot_by_id, + get_snapshots, + rollback_to_snapshot, + rollback_to_snapshot_by_id, + dump_snapshot_tables_to_json, + print_snapshot_table, +) +from aimbat.core import get_active_event +from aimbat.models import AimbatSnapshot, AimbatSeismogram +from sqlmodel import Session, select + + +@pytest.fixture +def session(loaded_session: Session) -> Session: + """Provides a session with multi-event data and an active event pre-loaded. + + Args: + loaded_session: A SQLModel Session with data populated. + + Returns: + The database session. + """ + return loaded_session + + +@pytest.fixture +def snapshot(session: Session) -> AimbatSnapshot: + """Provides a snapshot of the active event's current parameters. + + Args: + session: The database session. + + Returns: + An AimbatSnapshot for the active event. + """ + create_snapshot(session) + return session.exec(select(AimbatSnapshot)).one() + + +class TestCreateSnapshot: + """Tests for creating parameter snapshots.""" + + def test_creates_snapshot(self, session: Session) -> None: + """Verifies that a snapshot is written to the database. + + Args: + session: The database session. + """ + assert len(session.exec(select(AimbatSnapshot)).all()) == 0 + create_snapshot(session) + assert len(session.exec(select(AimbatSnapshot)).all()) == 1 + + def test_snapshot_linked_to_active_event(self, session: Session) -> None: + """Verifies that the snapshot is associated with the active event. + + Args: + session: The database session. + """ + active_event = get_active_event(session) + create_snapshot(session) + snapshot = session.exec(select(AimbatSnapshot)).one() + assert snapshot.event_id == active_event.id + + def test_snapshot_with_comment(self, session: Session) -> None: + """Verifies that the optional comment is stored on the snapshot. + + Args: + session: The database session. + """ + create_snapshot(session, comment="test comment") + snapshot = session.exec(select(AimbatSnapshot)).one() + assert snapshot.comment == "test comment" + + def test_snapshot_without_comment(self, session: Session) -> None: + """Verifies that the comment defaults to None when not provided. + + Args: + session: The database session. + """ + create_snapshot(session) + snapshot = session.exec(select(AimbatSnapshot)).one() + assert snapshot.comment is None + + def test_snapshot_captures_seismogram_parameters(self, session: Session) -> None: + """Verifies that the snapshot includes one entry per seismogram. + + Args: + session: The database session. + """ + active_event = get_active_event(session) + n_seismograms = len(active_event.seismograms) + + create_snapshot(session) + snapshot = session.exec(select(AimbatSnapshot)).one() + assert len(snapshot.seismogram_parameters_snapshots) == n_seismograms + + def test_snapshot_captures_event_parameters( + self, session: Session, snapshot: AimbatSnapshot + ) -> None: + """Verifies that the snapshot includes event parameters. + + Args: + session: The database session. + snapshot: An AimbatSnapshot for the active event. + """ + active_event = get_active_event(session) + assert ( + snapshot.event_parameters_snapshot.parameters_id + == active_event.parameters.id + ) + + +class TestDeleteSnapshot: + """Tests for deleting snapshots.""" + + def test_delete_snapshot(self, session: Session, snapshot: AimbatSnapshot) -> None: + """Verifies that a snapshot is removed from the database. + + Args: + session: The database session. + snapshot: An AimbatSnapshot to delete. + """ + delete_snapshot(session, snapshot) + assert len(session.exec(select(AimbatSnapshot)).all()) == 0 + + def test_delete_snapshot_by_id( + self, session: Session, snapshot: AimbatSnapshot + ) -> None: + """Verifies that a snapshot is removed when deleted by ID. + + Args: + session: The database session. + snapshot: An AimbatSnapshot whose ID is used for deletion. + """ + delete_snapshot_by_id(session, snapshot.id) + assert session.get(AimbatSnapshot, snapshot.id) is None + + def test_delete_snapshot_by_id_not_found(self, session: Session) -> None: + """Verifies that deleting a non-existent snapshot ID raises ValueError. + + Args: + session: The database session. + """ + with pytest.raises(ValueError): + delete_snapshot_by_id(session, uuid.uuid4()) + + +class TestRollbackToSnapshot: + """Tests for rolling back parameters to a snapshot.""" + + def test_rollback_restores_event_parameters( + self, session: Session, snapshot: AimbatSnapshot + ) -> None: + """Verifies that event parameters are restored to snapshot values on rollback. + + Args: + session: The database session. + snapshot: An AimbatSnapshot capturing the original parameters. + """ + active_event = get_active_event(session) + original_min_ccnorm = snapshot.event_parameters_snapshot.min_ccnorm + + # Mutate the parameter after taking the snapshot + active_event.parameters.min_ccnorm = 0.0 + session.add(active_event) + session.commit() + assert active_event.parameters.min_ccnorm == 0.0 + + rollback_to_snapshot(session, snapshot) + session.refresh(active_event) + assert active_event.parameters.min_ccnorm == original_min_ccnorm + + def test_rollback_restores_seismogram_parameters( + self, session: Session, snapshot: AimbatSnapshot + ) -> None: + """Verifies that seismogram parameters are restored to snapshot values on rollback. + + Args: + session: The database session. + snapshot: An AimbatSnapshot capturing the original parameters. + """ + active_event = get_active_event(session) + seismogram = active_event.seismograms[0] + original_select = snapshot.seismogram_parameters_snapshots[0].select + + # Mutate the parameter after taking the snapshot + seismogram.parameters.select = not original_select + session.add(seismogram) + session.commit() + + rollback_to_snapshot(session, snapshot) + session.refresh(seismogram) + assert seismogram.parameters.select == original_select + + def test_rollback_by_id(self, session: Session, snapshot: AimbatSnapshot) -> None: + """Verifies that rollback_to_snapshot_by_id produces the same result as rollback_to_snapshot. + + Args: + session: The database session. + snapshot: An AimbatSnapshot to roll back to. + """ + active_event = get_active_event(session) + original_min_ccnorm = snapshot.event_parameters_snapshot.min_ccnorm + + active_event.parameters.min_ccnorm = 0.0 + session.add(active_event) + session.commit() + + rollback_to_snapshot_by_id(session, snapshot.id) + session.refresh(active_event) + assert active_event.parameters.min_ccnorm == original_min_ccnorm + + def test_rollback_by_id_not_found(self, session: Session) -> None: + """Verifies that rolling back to a non-existent snapshot ID raises ValueError. + + Args: + session: The database session. + """ + with pytest.raises(ValueError): + rollback_to_snapshot_by_id(session, uuid.uuid4()) + + +class TestGetSnapshots: + """Tests for retrieving snapshots from the database.""" + + def test_no_snapshots_initially(self, session: Session) -> None: + """Verifies that no snapshots exist before any are created. + + Args: + session: The database session. + """ + assert len(get_snapshots(session)) == 0 + + def test_get_snapshots_for_active_event( + self, session: Session, snapshot: AimbatSnapshot + ) -> None: + """Verifies that snapshots for the active event are returned. + + Args: + session: The database session. + snapshot: An AimbatSnapshot for the active event. + """ + snapshots = get_snapshots(session, all_events=False) + assert len(snapshots) == 1 + assert snapshots[0].id == snapshot.id + + def test_get_snapshots_all_events( + self, session: Session, snapshot: AimbatSnapshot + ) -> None: + """Verifies that get_snapshots with all_events=True includes all events. + + Args: + session: The database session. + snapshot: An AimbatSnapshot for the active event. + """ + all_snapshots = get_snapshots(session, all_events=True) + assert len(all_snapshots) >= 1 + + def test_multiple_snapshots(self, session: Session) -> None: + """Verifies that multiple snapshots can be created and retrieved. + + Args: + session: The database session. + """ + create_snapshot(session, comment="first") + create_snapshot(session, comment="second") + assert len(get_snapshots(session)) == 2 + + +class TestDumpSnapshotTablesToJson: + """Tests for serialising snapshot data to JSON.""" + + def test_as_string(self, session: Session, snapshot: AimbatSnapshot) -> None: + """Verifies that a JSON string is returned when as_string=True. + + Args: + session: The database session. + snapshot: An AimbatSnapshot to include in the dump. + """ + result = dump_snapshot_tables_to_json(session, all_events=False, as_string=True) + assert isinstance(result, str) + parsed = json.loads(result) + assert "snapshots" in parsed + assert "event_parameters" in parsed + assert "seismogram_parameters" in parsed + + def test_as_dict(self, session: Session, snapshot: AimbatSnapshot) -> None: + """Verifies that a dict is returned when as_string=False. + + Args: + session: The database session. + snapshot: An AimbatSnapshot to include in the dump. + """ + result = dump_snapshot_tables_to_json( + session, all_events=False, as_string=False + ) + assert isinstance(result, dict) + assert "snapshots" in result + assert len(result["snapshots"]) == 1 + + def test_all_events_includes_more_snapshots( + self, session: Session, snapshot: AimbatSnapshot + ) -> None: + """Verifies that all_events=True returns at least as many snapshots as active only. + + Args: + session: The database session. + snapshot: An AimbatSnapshot to include in the dump. + """ + active_only = dump_snapshot_tables_to_json( + session, all_events=False, as_string=False + ) + all_events = dump_snapshot_tables_to_json( + session, all_events=True, as_string=False + ) + assert len(all_events["snapshots"]) >= len(active_only["snapshots"]) + + def test_seismogram_parameters_count( + self, session: Session, snapshot: AimbatSnapshot + ) -> None: + """Verifies that seismogram_parameters count matches the active event's seismograms. + + Args: + session: The database session. + snapshot: An AimbatSnapshot to include in the dump. + """ + n_seismograms = len(session.exec(select(AimbatSeismogram)).all()) + result = dump_snapshot_tables_to_json(session, all_events=True, as_string=False) + assert len(result["seismogram_parameters"]) <= n_seismograms + + +class TestPrintSnapshotTable: + """Tests for printing the snapshot table.""" + + def test_print_active_event_short( + self, + session: Session, + snapshot: AimbatSnapshot, + capsys: pytest.CaptureFixture, + ) -> None: + """Verifies that output is produced for the active event with short=True. + + Args: + session: The database session. + snapshot: An AimbatSnapshot to display. + capsys: The pytest capsys fixture. + """ + print_snapshot_table(session, short=True, all_events=False) + assert len(capsys.readouterr().out) > 0 + + def test_print_active_event_long( + self, + session: Session, + snapshot: AimbatSnapshot, + capsys: pytest.CaptureFixture, + ) -> None: + """Verifies that output is produced for the active event with short=False. + + Args: + session: The database session. + snapshot: An AimbatSnapshot to display. + capsys: The pytest capsys fixture. + """ + print_snapshot_table(session, short=False, all_events=False) + assert len(capsys.readouterr().out) > 0 + + def test_print_all_events( + self, + session: Session, + snapshot: AimbatSnapshot, + capsys: pytest.CaptureFixture, + ) -> None: + """Verifies that output is produced when printing snapshots for all events. + + Args: + session: The database session. + snapshot: An AimbatSnapshot to display. + capsys: The pytest capsys fixture. + """ + print_snapshot_table(session, short=False, all_events=True) + assert len(capsys.readouterr().out) > 0 diff --git a/tests/integration/test_station.py b/tests/integration/test_station.py new file mode 100644 index 00000000..74f69b9f --- /dev/null +++ b/tests/integration/test_station.py @@ -0,0 +1,342 @@ +"""Integration tests for station management functions in aimbat.core._station.""" + +import json +import uuid +import pytest +from sqlalchemy.exc import NoResultFound +from sqlmodel import Session, select + +from aimbat.core import get_active_event +from aimbat.core._station import ( + delete_station, + delete_station_by_id, + dump_station_table_to_json, + get_stations_in_active_event, + get_stations_in_event, + get_stations_with_event_seismogram_count, + print_station_table, +) +from aimbat.models import AimbatStation + + +@pytest.fixture +def session(loaded_session: Session) -> Session: + """Provides a session with multi-event data and an active event pre-loaded. + + Args: + loaded_session: A SQLModel Session with data populated. + + Returns: + The database session. + """ + return loaded_session + + +@pytest.fixture +def station(session: Session) -> AimbatStation: + """Provides the first station associated with the active event. + + Args: + session: The database session. + + Returns: + The first AimbatStation in the active event. + """ + active_event = get_active_event(session) + return active_event.seismograms[0].station + + +class TestDeleteStation: + """Tests for deleting stations from the database.""" + + def test_delete_station(self, session: Session, station: AimbatStation) -> None: + """Verifies that a station is removed from the database. + + Args: + session: The database session. + station: The station to delete. + """ + station_id = station.id + delete_station(session, station) + assert ( + session.get(AimbatStation, station_id) is None + ), "Station should be absent after deletion" + + def test_delete_station_by_id( + self, session: Session, station: AimbatStation + ) -> None: + """Verifies that a station is removed when deleted by ID. + + Args: + session: The database session. + station: The station whose ID is used for deletion. + """ + station_id = station.id + delete_station_by_id(session, station_id) + assert ( + session.get(AimbatStation, station_id) is None + ), "Station should be absent after deletion by ID" + + def test_delete_station_by_id_not_found(self, session: Session) -> None: + """Verifies that deleting a non-existent station ID raises NoResultFound. + + Args: + session: The database session. + """ + with pytest.raises(NoResultFound): + delete_station_by_id(session, uuid.uuid4()) + + +class TestGetStationsInActiveEvent: + """Tests for retrieving stations in the active event.""" + + def test_returns_stations(self, session: Session) -> None: + """Verifies that stations for the active event are returned. + + Args: + session: The database session. + """ + stations = get_stations_in_active_event(session, as_json=False) + assert len(stations) > 0, "Expected at least one station for the active event" + + def test_returns_aimbat_station_instances(self, session: Session) -> None: + """Verifies that all returned items are AimbatStation instances. + + Args: + session: The database session. + """ + stations = get_stations_in_active_event(session, as_json=False) + assert all( + isinstance(s, AimbatStation) for s in stations + ), "All returned items should be AimbatStation instances" + + def test_as_json_returns_list_of_dicts(self, session: Session) -> None: + """Verifies that as_json=True returns a list of dicts. + + Args: + session: The database session. + """ + result = get_stations_in_active_event(session, as_json=True) + assert isinstance(result, list), "Expected a list when as_json=True" + assert all( + isinstance(item, dict) for item in result + ), "Each element should be a dict when as_json=True" + + def test_as_json_count_matches_objects(self, session: Session) -> None: + """Verifies that as_json=True and as_json=False return the same number of stations. + + Args: + session: The database session. + """ + objects = get_stations_in_active_event(session, as_json=False) + json_list = get_stations_in_active_event(session, as_json=True) + assert len(objects) == len( + json_list + ), "Object and JSON representations should have the same length" + + def test_stations_belong_to_active_event(self, session: Session) -> None: + """Verifies that the returned stations are associated with the active event. + + Args: + session: The database session. + """ + active_event = get_active_event(session) + active_station_ids = {s.station_id for s in active_event.seismograms} + stations = get_stations_in_active_event(session, as_json=False) + returned_ids = {s.id for s in stations} + assert ( + returned_ids == active_station_ids + ), "Returned station IDs should match those linked to the active event" + + +class TestGetStationsInEvent: + """Tests for retrieving stations in a specific event.""" + + def test_returns_stations_for_event(self, session: Session) -> None: + """Verifies that stations for the given event are returned. + + Args: + session: The database session. + """ + active_event = get_active_event(session) + stations = get_stations_in_event(session, active_event) + assert len(stations) > 0, "Expected at least one station for the given event" + + def test_returns_aimbat_station_instances(self, session: Session) -> None: + """Verifies that all returned items are AimbatStation instances. + + Args: + session: The database session. + """ + active_event = get_active_event(session) + stations = get_stations_in_event(session, active_event) + assert all( + isinstance(s, AimbatStation) for s in stations + ), "All returned items should be AimbatStation instances" + + def test_station_ids_match_event_seismograms(self, session: Session) -> None: + """Verifies that station IDs match those linked to the event's seismograms. + + Args: + session: The database session. + """ + active_event = get_active_event(session) + expected_ids = {s.station_id for s in active_event.seismograms} + returned_ids = {s.id for s in get_stations_in_event(session, active_event)} + assert ( + returned_ids == expected_ids + ), "Station IDs should match those linked to the event's seismograms" + + +class TestGetStationsWithEventSeismogramCount: + """Tests for retrieving stations with associated seismogram and event counts.""" + + def test_returns_all_stations(self, session: Session) -> None: + """Verifies that all stations in the database are returned. + + Args: + session: The database session. + """ + all_stations = session.exec(select(AimbatStation)).all() + results = get_stations_with_event_seismogram_count(session, as_json=False) + assert len(results) == len( + all_stations + ), "Expected one row per station in the database" + + def test_returns_tuples_with_counts(self, session: Session) -> None: + """Verifies that each result is a tuple of (AimbatStation, int, int). + + Args: + session: The database session. + """ + results = get_stations_with_event_seismogram_count(session, as_json=False) + for row in results: + station, seismogram_count, event_count = row + assert isinstance( + station, AimbatStation + ), "First element should be an AimbatStation" + assert isinstance( + seismogram_count, int + ), "Second element should be an int (seismogram count)" + assert isinstance( + event_count, int + ), "Third element should be an int (event count)" + + def test_counts_are_non_negative(self, session: Session) -> None: + """Verifies that all seismogram and event counts are non-negative. + + Args: + session: The database session. + """ + results = get_stations_with_event_seismogram_count(session, as_json=False) + for _, seismogram_count, event_count in results: + assert seismogram_count >= 0, "Seismogram count should be non-negative" + assert event_count >= 0, "Event count should be non-negative" + + def test_as_json_returns_list_of_dicts(self, session: Session) -> None: + """Verifies that as_json=True returns a list of dicts with count fields. + + Args: + session: The database session. + """ + results = get_stations_with_event_seismogram_count(session, as_json=True) + assert isinstance(results, list), "Expected a list when as_json=True" + for item in results: + assert isinstance(item, dict), "Each element should be a dict" + assert "seismogram_count" in item, "Dict should contain 'seismogram_count'" + assert "event_count" in item, "Dict should contain 'event_count'" + + def test_as_json_count_matches_objects(self, session: Session) -> None: + """Verifies that both return modes yield the same number of rows. + + Args: + session: The database session. + """ + objects = get_stations_with_event_seismogram_count(session, as_json=False) + json_list = get_stations_with_event_seismogram_count(session, as_json=True) + assert len(objects) == len( + json_list + ), "Object and JSON representations should have the same number of rows" + + +class TestDumpStationTableToJson: + """Tests for dumping the full station table to JSON.""" + + def test_returns_valid_json_string(self, session: Session) -> None: + """Verifies that the result is a valid JSON string. + + Args: + session: The database session. + """ + result = dump_station_table_to_json(session) + assert isinstance(result, str), "Expected a string result" + parsed = json.loads(result) + assert isinstance(parsed, list), "Parsed JSON should be a list" + + def test_entry_count_matches_database(self, session: Session) -> None: + """Verifies that the JSON contains one entry per station in the database. + + Args: + session: The database session. + """ + all_stations = session.exec(select(AimbatStation)).all() + result = json.loads(dump_station_table_to_json(session)) + assert len(result) == len( + all_stations + ), "JSON entry count should match station count in the database" + + def test_entries_contain_id_field(self, session: Session) -> None: + """Verifies that each entry in the JSON has an 'id' field. + + Args: + session: The database session. + """ + result = json.loads(dump_station_table_to_json(session)) + for entry in result: + assert "id" in entry, "Each station entry should have an 'id' field" + + +class TestPrintStationTable: + """Tests for printing the station table.""" + + def test_print_active_event_short( + self, session: Session, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that output is produced for the active event with short=True. + + Args: + session: The database session. + capsys: The pytest capsys fixture. + """ + print_station_table(session, short=True, all_events=False) + assert ( + len(capsys.readouterr().out) > 0 + ), "Expected output when printing station table (short, active event)" + + def test_print_active_event_long( + self, session: Session, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that output is produced for the active event with short=False. + + Args: + session: The database session. + capsys: The pytest capsys fixture. + """ + print_station_table(session, short=False, all_events=False) + assert ( + len(capsys.readouterr().out) > 0 + ), "Expected output when printing station table (long, active event)" + + def test_print_all_events( + self, session: Session, capsys: pytest.CaptureFixture + ) -> None: + """Verifies that output is produced when printing stations for all events. + + Args: + session: The database session. + capsys: The pytest capsys fixture. + """ + print_station_table(session, short=False, all_events=True) + assert ( + len(capsys.readouterr().out) > 0 + ), "Expected output when printing station table for all events" diff --git a/tests/lib/test_lib_common.py b/tests/lib/test_lib_common.py deleted file mode 100644 index 7127216b..00000000 --- a/tests/lib/test_lib_common.py +++ /dev/null @@ -1,80 +0,0 @@ -from aimbat.models import AimbatStation -from sqlmodel import Session -from sqlalchemy import Engine -from collections.abc import Iterator -import pytest -import uuid - -UUID1 = uuid.UUID("11e6ca37-e03b-42b6-acc4-e9eaba5c1587") -UUID2 = uuid.UUID("12e6ca37-e03b-42b6-acc4-e9eaba5c1587") - - -class TestUuidFunctions: - @pytest.fixture - def session_with_stations( - self, fixture_engine_session_with_project: tuple[Engine, Session] - ) -> Iterator[Session]: - station_1 = AimbatStation( - id=UUID1, - name="TEST1", - network="TE", - channel="BHZ", - location="", - latitude=12, - longitude=12, - elevation=12, - ) - station_2 = AimbatStation( - id=UUID2, - name="TEST2", - network="TE", - channel="BHZ", - location="", - latitude=12, - longitude=12, - elevation=12, - ) - _, session = fixture_engine_session_with_project - session.add_all([station_1, station_2]) - session.commit() - yield session - - @pytest.mark.parametrize( - "uuid_str,expected", - [ - (str(UUID1)[:2], UUID1), - (str(UUID2)[:2], UUID2), - (str(UUID1), UUID1), - (str(UUID1)[:1], ValueError), - (str(UUID2)[:1], ValueError), - (str(uuid.uuid4()), ValueError), - ], - ) - def test_string_to_uuid( - self, - session_with_stations: Session, - uuid_str: str, - expected: uuid.UUID | Exception, - ) -> None: - from aimbat.utils import string_to_uuid - - if isinstance(expected, type) and issubclass(expected, Exception): - with pytest.raises(expected): - string_to_uuid(session_with_stations, uuid_str, AimbatStation) - else: - assert ( - string_to_uuid(session_with_stations, uuid_str, AimbatStation) - == expected - ) - - @pytest.mark.parametrize("test_uuid", [UUID1, UUID2]) - def test_uuid_shortener( - self, session_with_stations: Session, test_uuid: uuid.UUID - ) -> None: - from aimbat.utils import uuid_shortener - - aimbat_station = session_with_stations.get(AimbatStation, test_uuid) - assert aimbat_station is not None - assert ( - uuid_shortener(session_with_stations, aimbat_station) == str(test_uuid)[:2] - ) diff --git a/tests/test_data.py b/tests/test_data.py deleted file mode 100644 index ed6f5ba0..00000000 --- a/tests/test_data.py +++ /dev/null @@ -1,218 +0,0 @@ -from aimbat.app import app -from aimbat.aimbat_types import DataType -from aimbat.models import AimbatDataSource -from pysmo.classes import SAC -from sqlalchemy.exc import NoResultFound -from sqlmodel import select, Session -from sqlalchemy import Engine -from pathlib import Path -from pydantic import TypeAdapter -import aimbat.core._data as data -import pytest -import numpy as np -import json - - -class TestDataBase: - """Base class for testing the data module.""" - - -class TestDataAdd(TestDataBase): - def test_lib_add_sac_file_to_project( - self, - sac_file_good: Path, - fixture_engine_session_with_project: tuple[Engine, Session], - ) -> None: - engine, session = fixture_engine_session_with_project - - # do this 2 times to verify nothing changes - for _ in range(2): - data.add_files_to_project( - session, - [sac_file_good], - datatype=DataType.SAC, - ) - - seismogram_filename = session.exec( - select(AimbatDataSource.sourcename) - ).one() - assert seismogram_filename == str(sac_file_good) - - def test_cli_data_add( - self, - sac_file_good: Path, - fixture_engine_session_with_project: tuple[Engine, Session], - ) -> None: - sac_file_good_as_string = str(sac_file_good) - engine, session = fixture_engine_session_with_project - - with pytest.raises(SystemExit) as excinfo: - app(["data", "add", "--no-progress", sac_file_good_as_string]) - - assert excinfo.value.code == 0 - session.flush() - - seismogram_filename = session.exec(select(AimbatDataSource.sourcename)).one() - assert seismogram_filename == str(sac_file_good) - - -class TestDataTable(TestDataBase): - def test_lib_print_data_table_without_active_event( - self, - fixture_session_with_data: Session, - capsys: pytest.CaptureFixture, - ) -> None: - - session = fixture_session_with_data - # no event active - with pytest.raises(NoResultFound): - data.print_data_table(session, False) - - data.print_data_table(session, False, True) - captured = capsys.readouterr() - assert "AIMBAT data for all events" in captured.out - - @pytest.mark.parametrize( - "short, all_events, expected", - [ - (True, True, "AIMBAT data for all events"), - (True, False, "AIMBAT data for event 2011-09-15 19:31:04"), - (False, True, "AIMBAT data for all events"), - (True, False, "AIMBAT data for event 2011-09-15 19:31:04"), - ], - ) - def test_lib_print_data_table_with_active_event( - self, - fixture_engine_session_with_active_event: tuple[Engine, Session], - capsys: pytest.CaptureFixture, - short: bool, - all_events: bool, - expected: str, - ) -> None: - _, session = fixture_engine_session_with_active_event - data.print_data_table(session, short, all_events) - captured = capsys.readouterr() - assert expected in captured.out - - @pytest.mark.parametrize( - "cli_args,expected", - [ - (["--all", "--no-short"], "AIMBAT data for all events"), - (["--no-short"], "AIMBAT data for event 2011-09-15 19:31:04.080000+00:00"), - (["--all"], "AIMBAT data for all events"), - ([], "AIMBAT data for event 2011-09-15 19:31:04"), - ], - ) - def test_cli_data_list( - self, - fixture_engine_session_with_active_event: tuple[Engine, Session], - capsys: pytest.CaptureFixture, - cli_args: list[str], - expected: str, - ) -> None: - cmd = ["data", "list"] - cmd.extend(cli_args) - with pytest.raises(SystemExit) as excinfo: - app(cmd) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - assert expected in captured.out - - -class TestDataDump(TestDataBase): - def test_lib_dump_data( - self, - fixture_session_with_data: Session, - ) -> None: - json_data = data.dump_data_table_to_json(fixture_session_with_data) - adapter = TypeAdapter(list[AimbatDataSource]) - adapter.validate_json(json_data) - - def test_cli_dump_data( - self, - fixture_session_with_data: Session, - capsys: pytest.CaptureFixture, - ) -> None: - with pytest.raises(SystemExit) as excinfo: - app(["data", "dump"]) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - loaded_json = json.loads(captured.out) - assert isinstance(loaded_json, list) - assert len(loaded_json) > 0 - for i in loaded_json: - _ = AimbatDataSource(**i) - - -class TestDataCompare(TestDataBase): - def test_compare_aimbat_seis_to_sac_seis( - self, - sac_file_good: Path, - sac_instance_good: SAC, - fixture_engine_session_with_project: tuple[Engine, Session], - ) -> None: - from aimbat.models import AimbatSeismogram - - _, session = fixture_engine_session_with_project - - data.add_files_to_project( - session, - [sac_file_good], - datatype=DataType.SAC, - ) - - sac_seismogram = sac_instance_good.seismogram - aimbat_seismogram = session.exec(select(AimbatSeismogram)).one() - - assert np.array_equal(aimbat_seismogram.data, sac_seismogram.data) - assert aimbat_seismogram.delta == sac_seismogram.delta - assert ( - pytest.approx(aimbat_seismogram.begin_time.timestamp()) - == sac_seismogram.begin_time.timestamp() - ) - assert len(aimbat_seismogram) == len(sac_seismogram) - - def test_compare_aimbat_station_to_sac_station( - self, - sac_file_good: Path, - sac_instance_good: SAC, - fixture_engine_session_with_project: tuple[Engine, Session], - ) -> None: - from aimbat.models import AimbatStation, AimbatSeismogram - - _, session = fixture_engine_session_with_project - - data.add_files_to_project(session, [sac_file_good], datatype=DataType.SAC) - - sac_station = sac_instance_good.station - aimbat_seismogram = session.exec(select(AimbatSeismogram)).one() - aimbat_station = session.exec(select(AimbatStation)).one() - assert aimbat_seismogram.station == aimbat_station - assert aimbat_station.name == sac_instance_good.kstnm - assert aimbat_station.latitude == sac_station.latitude - assert aimbat_station.longitude == sac_station.longitude - assert aimbat_station.elevation == sac_station.elevation - - def test_compare_aimbat_event_to_sac_event( - self, - sac_file_good: Path, - sac_instance_good: SAC, - fixture_engine_session_with_project: tuple[Engine, Session], - ) -> None: - from aimbat.models import AimbatEvent, AimbatSeismogram - - _, session = fixture_engine_session_with_project - - data.add_files_to_project(session, [sac_file_good], datatype=DataType.SAC) - - sac_event = sac_instance_good.event - aimbat_seismogram = session.exec(select(AimbatSeismogram)).one() - aimbat_event = session.exec(select(AimbatEvent)).one() - assert aimbat_seismogram.event == aimbat_event - assert aimbat_event.latitude == sac_event.latitude - assert aimbat_event.longitude == sac_event.longitude - assert aimbat_event.depth == sac_event.depth diff --git a/tests/test_event.py b/tests/test_event.py deleted file mode 100644 index 70a38127..00000000 --- a/tests/test_event.py +++ /dev/null @@ -1,344 +0,0 @@ -from aimbat import settings -from aimbat.utils import get_active_event -from aimbat.models import AimbatEvent, AimbatStation, AimbatSeismogram -from aimbat.aimbat_types import EventParameter -from pydantic_core import ValidationError -from sqlmodel import Session, select -from sqlalchemy.exc import NoResultFound -from typing import Any -from collections.abc import Generator, Sequence -from pydantic import TypeAdapter -import aimbat.core._event as event -import pytest -import random -import json - - -class TestEventBase: - @pytest.fixture - def session( - self, fixture_session_with_data: Session - ) -> Generator[Session, Any, Any]: - yield fixture_session_with_data - - def get_random_station(self, session: Session) -> AimbatStation: - stations = session.exec(select(AimbatStation)).all() - return random.choice(stations) - - def get_random_event(self, session: Session) -> AimbatEvent: - events = session.exec(select(AimbatEvent)).all() - return random.choice(events) - - def activate_random_event(self, session: Session) -> AimbatEvent: - random_event = self.get_random_event(session) - event.set_active_event(session, random_event) - return random_event - - -class TestDeleteEvent(TestEventBase): - def test_lib_delete_event_by_id(self, session: Session) -> None: - aimbat_event = self.get_random_event(session) - id = aimbat_event.id - event.delete_event_by_id(session, id) - assert ( - session.exec(select(AimbatEvent).where(AimbatEvent.id == id)).one_or_none() - is None - ) - - def test_cli_delete_event_by_id(self, session: Session) -> None: - from aimbat.app import app - - aimbat_event = self.get_random_event(session) - id = aimbat_event.id - - with pytest.raises(SystemExit) as excinfo: - app(["event", "delete", str(id)]) - - assert excinfo.value.code == 0 - - session.flush() - assert ( - session.exec(select(AimbatEvent).where(AimbatEvent.id == id)).one_or_none() - is None - ) - - def test_cli_delete_event_by_id_with_wrong_id(self, session: Session) -> None: - from aimbat.app import app - from uuid import uuid4 - - id = uuid4() - - with pytest.raises(NoResultFound): - app(["event", "delete", str(id)]) - - def test_cli_delete_event_by_string(self, session: Session) -> None: - from aimbat.app import app - - aimbat_event = random.choice(list(session.exec(select(AimbatEvent)))) - id = aimbat_event.id - - with pytest.raises(SystemExit) as excinfo: - app(["event", "delete", str(id)[:5]]) - - assert excinfo.value.code == 0 - - session.flush() - assert ( - session.exec(select(AimbatEvent).where(AimbatEvent.id == id)).one_or_none() - is None - ) - - -class TestGetActiveEvent(TestEventBase): - def test_get_active_event_when_none_is_active(self, session: Session) -> None: - events = session.exec(select(AimbatEvent)).all() - assert all(e.active is None for e in events) - - with pytest.raises(NoResultFound): - get_active_event(session) - - -class TestSetActiveEvent(TestEventBase): - def test_lib_set_active_event(self, session: Session) -> None: - events = session.exec(select(AimbatEvent)).all() - assert all(e.active is None for e in events) - aimbat_event = random.choice(events) - - event.set_active_event(session, aimbat_event) - assert aimbat_event.active is True - - def test_lib_change_active_event(self, session: Session) -> None: - events = list(session.exec(select(AimbatEvent)).all()) - assert all(e.active is None for e in events) - random.shuffle(events) - - first_active_event = events.pop() - second_active_event = events.pop() - - event.set_active_event(session, first_active_event) - assert first_active_event.active is True - - event.set_active_event(session, second_active_event) - assert first_active_event.active is None - assert second_active_event.active is True - - def test_lib_set_active_event_by_id(self, session: Session) -> None: - import uuid - - events = list(session.exec(select(AimbatEvent)).all()) - assert all(e.active is None for e in events) - aimbat_event = random.choice(events) - - event.set_active_event_by_id(session, aimbat_event.id) - assert aimbat_event.active is True - - with pytest.raises(ValueError): - event.set_active_event_by_id(session, uuid.uuid4()) - - def test_cli_event_activate(self, session: Session) -> None: - from aimbat.app import app - - event = self.get_random_event(session) - assert event.active is None - - with pytest.raises(SystemExit) as excinfo: - app(["event", "activate", str(event.id)]) - - assert excinfo.value.code == 0 - - session.refresh(event) - assert event.active is True - - def test_cli_event_activate_with_str_id(self, session: Session) -> None: - from aimbat.app import app - - event = self.get_random_event(session) - assert event.active is None - short_uuid = str(event.id)[:6] - - with pytest.raises(SystemExit) as excinfo: - app(["event", "activate", short_uuid]) - - assert excinfo.value.code == 0 - - session.refresh(event) - assert event.active is True - - -class TestGetCompletedEvents(TestEventBase): - def test_get_completed_events(self, session: Session) -> None: - assert len(event.get_completed_events(session)) == 0 - events = list(session.exec(select(AimbatEvent)).all()) - aimbat_event = random.choice(events) - aimbat_event.parameters.completed = True - session.commit() - assert len(event.get_completed_events(session)) == 1 - assert event.get_completed_events(session)[0].id == aimbat_event.id - - -class TestGetEvents(TestEventBase): - @pytest.fixture - def all_events( - self, session: Session - ) -> Generator[Sequence[AimbatEvent], Any, Any]: - from aimbat.models import AimbatEvent - - yield session.exec(select(AimbatEvent)).all() - - @pytest.fixture - def all_seismograms( - self, session: Session - ) -> Generator[Sequence[AimbatSeismogram], Any, Any]: - from aimbat.models import AimbatSeismogram - - yield session.exec(select(AimbatSeismogram)).all() - - def test_lib_get_events_using_station( - self, session: Session, all_seismograms: Sequence[AimbatSeismogram] - ) -> None: - station = self.get_random_station(session) - - event_set1 = set( - s.event.id for s in all_seismograms if s.station.id == station.id - ) - event_set2 = set(e.id for e in event.get_events_using_station(session, station)) - - assert event_set1 == event_set2 - - -class TestGetEventParameter(TestEventBase): - def test_lib_get_event_parameter(self, session: Session) -> None: - aimbat_event = self.activate_random_event(session) - - assert ( - event.get_event_parameter(session, EventParameter.COMPLETED) - == aimbat_event.parameters.completed - ) - assert ( - event.get_event_parameter(session, EventParameter.MIN_CCNORM) - == aimbat_event.parameters.min_ccnorm - ) - assert ( - event.get_event_parameter(session, EventParameter.WINDOW_POST) - == aimbat_event.parameters.window_post - ) - assert ( - event.get_event_parameter(session, EventParameter.WINDOW_PRE) - == aimbat_event.parameters.window_pre - ) - - def test_lib_set_event_parameter(self, session: Session) -> None: - _ = self.activate_random_event(session) - - assert event.get_event_parameter(session, EventParameter.COMPLETED) is False - event.set_event_parameter(session, EventParameter.COMPLETED, True) - assert event.get_event_parameter(session, EventParameter.COMPLETED) is True - with pytest.raises(ValidationError): - event.set_event_parameter(session, EventParameter.COMPLETED, "foo") - - def test_lib_print_event_table( - self, session: Session, capsys: pytest.CaptureFixture - ) -> None: - _ = self.activate_random_event(session) - - event.print_event_table(session, short=True) - captured = capsys.readouterr() - assert "AIMBAT Events" in captured.out - assert "2012-01-12 19:31:04" in captured.out - event.print_event_table(session, short=False) - captured = capsys.readouterr() - assert "AIMBAT Events" in captured.out - assert "2011-09-15 19:31:04.080000+00:00" in captured.out - - def test_cli_get_event_parameter( - self, session: Session, capsys: pytest.CaptureFixture - ) -> None: - from aimbat.app import app - - _ = self.activate_random_event(session) - - with pytest.raises(SystemExit) as excinfo: - app(["event", "parameter", "get", "completed"]) - - assert excinfo.value.code == 0 - assert "False" in capsys.readouterr().out - - with pytest.raises(SystemExit) as excinfo: - app(["event", "parameter", "get", "window_post"]) - - assert excinfo.value.code == 0 - assert f"{settings.window_post.total_seconds()}s" in capsys.readouterr().out - - -class TestCliEvent(TestEventBase): - def test_cli_usage(self, capsys: pytest.CaptureFixture) -> None: - from aimbat.app import app - - with pytest.raises(SystemExit) as excinfo: - app(["event", "--help"]) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - assert "Usage" in captured.out - - def test_cli_set_event_parameter( - self, session: Session, capsys: pytest.CaptureFixture - ) -> None: - from aimbat.app import app - - _ = self.activate_random_event(session) - - with pytest.raises(SystemExit) as excinfo: - app(["event", "parameter", "get", "completed"]) - - assert excinfo.value.code == 0 - assert "False" in capsys.readouterr().out - - with pytest.raises(SystemExit) as excinfo: - app(["event", "parameter", "set", "completed", "True"]) - - assert excinfo.value.code == 0 - - with pytest.raises(SystemExit) as excinfo: - app(["event", "parameter", "get", "completed"]) - - assert excinfo.value.code == 0 - assert "True" in capsys.readouterr().out - - def test_cli_event_list( - self, - session: Session, - capsys: pytest.CaptureFixture, - ) -> None: - from aimbat.app import app - - with pytest.raises(SystemExit) as excinfo: - app(["event", "list"]) - - assert excinfo.value.code == 0 - assert "AIMBAT Events" in capsys.readouterr().out - - -class TestEventDump(TestEventBase): - def test_lib_dump_event(self, fixture_session_with_data: Session) -> None: - json_data = event.dump_event_table_to_json(fixture_session_with_data) - adapter = TypeAdapter(list[AimbatEvent]) - adapter.validate_json(json_data) - - def test_cli_dump_data( - self, fixture_session_with_data: Session, capsys: pytest.CaptureFixture - ) -> None: - from aimbat.app import app - - with pytest.raises(SystemExit) as excinfo: - app(["event", "dump"]) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - loaded_json = json.loads(captured.out) - assert isinstance(loaded_json, list) - assert len(loaded_json) > 0 - for i in loaded_json: - _ = AimbatEvent(**i) diff --git a/tests/test_iccs.py b/tests/test_iccs.py deleted file mode 100644 index 74b06d09..00000000 --- a/tests/test_iccs.py +++ /dev/null @@ -1,62 +0,0 @@ -from aimbat.models import AimbatSeismogram -from aimbat.aimbat_types import SeismogramParameter -from pysmo.tools.iccs import ICCSSeismogram -from sqlmodel import Session, select -from sqlalchemy import Engine -from pandas import Timedelta -from typing import Any -from collections.abc import Generator -import pytest -import random - - -class TestICCSBase: - @pytest.fixture - def random_aimbat_seismogram( - self, fixture_engine_session_with_active_event: tuple[Engine, Session] - ) -> Generator[AimbatSeismogram, Any, Any]: - from aimbat.models import AimbatSeismogram - - _, session = fixture_engine_session_with_active_event - - yield random.choice(list(session.exec(select(AimbatSeismogram)).all())) - - -class TestAimbatSeismogramIsICCSSeismogram(TestICCSBase): - def test_is_iccs_seismogram_instance( - self, random_aimbat_seismogram: AimbatSeismogram - ) -> None: - assert isinstance(random_aimbat_seismogram, ICCSSeismogram) - - @pytest.mark.parametrize( - "parameter, expected", - [ - (SeismogramParameter.SELECT, True), - (SeismogramParameter.FLIP, False), - (SeismogramParameter.T1, None), - ], - ) - def test_read_iccs_parameters( - self, - random_aimbat_seismogram: AimbatSeismogram, - parameter: SeismogramParameter, - expected: Any, - ) -> None: - assert getattr(random_aimbat_seismogram, parameter) == expected - - @pytest.mark.parametrize( - "parameter, new_value", - [ - (SeismogramParameter.SELECT, False), - (SeismogramParameter.FLIP, True), - (SeismogramParameter.T1, Timedelta(seconds=2)), - ], - ) - def test_write_iccs_parameters( - self, - random_aimbat_seismogram: AimbatSeismogram, - parameter: SeismogramParameter, - new_value: Any, - ) -> None: - setattr(random_aimbat_seismogram, parameter, new_value) - assert getattr(random_aimbat_seismogram, parameter) == new_value diff --git a/tests/test_io.py b/tests/test_io.py deleted file mode 100644 index 111b777b..00000000 --- a/tests/test_io.py +++ /dev/null @@ -1,78 +0,0 @@ -from aimbat.models import AimbatSeismogram -from aimbat.aimbat_types import DataType -from pysmo.classes import SAC, SacSeismogram -from pysmo import Seismogram -from sqlmodel import Session, select -from sqlalchemy import Engine -from sqlalchemy.exc import StatementError -from typing import Any -from collections.abc import Generator -from pathlib import Path -import aimbat.core._data as data -import numpy as np -import pytest - - -class TestSacBase: - @pytest.fixture - def aimbat_seismogram_from_sac( - self, - fixture_engine_session_with_project: tuple[Engine, Session], - sac_file_good: Path, - ) -> Generator[AimbatSeismogram, Any, Any]: - - _, session = fixture_engine_session_with_project - data.add_files_to_project(session, [sac_file_good], DataType.SAC) - aimbat_file = session.exec(select(AimbatSeismogram)).one() - yield aimbat_file - - @pytest.fixture - def sac_seismogram(self, sac_file_good: Path) -> Generator[SacSeismogram, Any, Any]: - sac_seismogram = SAC.from_file(sac_file_good).seismogram - yield sac_seismogram - - -class TestSacRead(TestSacBase): - def test_parameters_are_equal( - self, - sac_seismogram: SacSeismogram, - aimbat_seismogram_from_sac: AimbatSeismogram, - ) -> None: - assert isinstance(aimbat_seismogram_from_sac, Seismogram) - assert sac_seismogram.delta == aimbat_seismogram_from_sac.delta - assert ( - pytest.approx(sac_seismogram.begin_time.timestamp()) - == aimbat_seismogram_from_sac.begin_time.timestamp() - ) - assert ( - pytest.approx(sac_seismogram.end_time.timestamp()) - == aimbat_seismogram_from_sac.end_time.timestamp() - ) - assert len(sac_seismogram) == len(aimbat_seismogram_from_sac) - - -class TestSacWrite(TestSacBase): - def test_random_data( - self, - sac_file_good: Path, - aimbat_seismogram_from_sac: AimbatSeismogram, - ) -> None: - new_data = np.random.rand(len(aimbat_seismogram_from_sac)) - aimbat_seismogram_from_sac.data = new_data - assert np.allclose(new_data, aimbat_seismogram_from_sac.data) - sac_seismogram = SAC.from_file(sac_file_good).seismogram - assert np.allclose(sac_seismogram.data, aimbat_seismogram_from_sac.data) - - -class TestSacBadFile(TestSacBase): - def test_t0_missing( - self, - sac_file_good: Path, - fixture_engine_session_with_project: tuple[Engine, Session], - ) -> None: - _, session = fixture_engine_session_with_project - sac = SAC.from_file(sac_file_good) - sac.t0 = None - sac.write(sac_file_good) - with pytest.raises(StatementError): - data.add_files_to_project(session, [sac_file_good], DataType.SAC) diff --git a/tests/test_models.py b/tests/test_models.py deleted file mode 100644 index 83845e8a..00000000 --- a/tests/test_models.py +++ /dev/null @@ -1,54 +0,0 @@ -from aimbat.models import AimbatSeismogram -from aimbat.utils import get_active_event -from typing import Any -from collections.abc import Generator -from sqlmodel import Session -from sqlalchemy import Engine -import numpy as np -import pytest -import random - - -class TestModelsBase: - @pytest.fixture - def session( - self, fixture_engine_session_with_active_event: tuple[Engine, Session] - ) -> Generator[Session, Any, Any]: - session = fixture_engine_session_with_active_event[1] - yield session - - -class TestAimbatSeismogram(TestModelsBase): - @pytest.fixture - def random_seismogram( - self, session: Session - ) -> Generator[AimbatSeismogram, Any, Any]: - - yield random.choice(list(get_active_event(session).seismograms)) - - def test_lib_get_seismogram_data_with_no_datasource( - self, random_seismogram: AimbatSeismogram - ) -> None: - _ = random_seismogram.data - random_seismogram.datasource = None # type: ignore - - with pytest.raises(ValueError): - _ = random_seismogram.data - - def test_lib_set_seismogram_data_with_no_datasource( - self, random_seismogram: AimbatSeismogram - ) -> None: - _ = random_seismogram.data - random_seismogram.datasource = None # type: ignore - - with pytest.raises(ValueError): - random_seismogram.data = np.array([1, 2, 3]) - - def test_lib_get_seismogram_begin_time_with_zero_length_data( - self, - random_seismogram: AimbatSeismogram, - monkeypatch: pytest.MonkeyPatch, - ) -> None: - monkeypatch.setattr(random_seismogram, "data", np.array([], dtype=np.float32)) - - assert random_seismogram.begin_time == random_seismogram.end_time diff --git a/tests/test_project.py b/tests/test_project.py deleted file mode 100644 index 42c36651..00000000 --- a/tests/test_project.py +++ /dev/null @@ -1,135 +0,0 @@ -from sqlalchemy import Engine -from aimbat.app import app -from pathlib import Path -from sqlmodel import Session -import aimbat.core._project as project -import pytest - - -class TestProjectBase: - """Base class for project tests.""" - - -class TestProjectExists(TestProjectBase): - def test_lib_project_exists_if_false( - self, fixture_empty_db: tuple[Engine, Session] - ) -> None: - - engine, _ = fixture_empty_db - - assert project._project_exists(engine) is False - - def test_lib_project_exists_if_true( - self, fixture_empty_db: tuple[Engine, Session] - ) -> None: - engine, _ = fixture_empty_db - project.create_project(engine) - assert project._project_exists(engine) is True - - -class TestProjectCreate(TestProjectBase): - @pytest.mark.dependency(name="create_project") - def test_lib_create_project(self, fixture_empty_db: tuple[Engine, Session]) -> None: - engine, _ = fixture_empty_db - assert project._project_exists(engine) is False - project.create_project(engine) - assert project._project_exists(engine) is True - - def test_lib_create_project_when_one_exists( - self, fixture_empty_db: tuple[Engine, Session] - ) -> None: - engine, _ = fixture_empty_db - assert project._project_exists(engine) is False - project.create_project(engine) - assert project._project_exists(engine) is True - with pytest.raises(RuntimeError): - project.create_project(engine) - - def test_cli_create_project(self, fixture_empty_db: tuple[Engine, Session]) -> None: - engine, _ = fixture_empty_db - assert project._project_exists(engine) is False - with pytest.raises(SystemExit) as excinfo: - app(["project", "create"]) - assert excinfo.value.code == 0 - assert project._project_exists(engine) is True - - -class TestProjectDelete(TestProjectBase): - def test_lib_delete_project_file( - self, fixture_session_with_project_file: tuple[Engine, Session, Path] - ) -> None: - - engine, _, _ = fixture_session_with_project_file - - assert project._project_exists(engine) is True - - project.delete_project(engine) - assert project._project_exists(engine) is False - - def test_lib_delete_project( - self, fixture_engine_session_with_project: tuple[Engine, Session] - ) -> None: - engine, _ = fixture_engine_session_with_project - - assert project._project_exists(engine) is True - - project.delete_project(engine) - assert project._project_exists(engine) is False - - def test_cli_delete_project( - self, fixture_engine_session_with_project: tuple[Engine, Session] - ) -> None: - engine, _ = fixture_engine_session_with_project - assert project._project_exists(engine) is True - - with pytest.raises(SystemExit) as excinfo: - app(["project", "delete"]) - assert excinfo.value.code == 0 - assert project._project_exists(engine) is False - - -class TestProjectTable(TestProjectBase): - def test_lib_print_project_info_no_project( - self, fixture_empty_db: tuple[Engine, Session] - ) -> None: - engine, _ = fixture_empty_db - with pytest.raises(RuntimeError): - project.print_project_info(engine) - - def test_lib_print_project_info_with_empty_project( - self, - fixture_engine_session_with_project: tuple[Engine, Session], - capsys: pytest.CaptureFixture, - ) -> None: - engine, _ = fixture_engine_session_with_project - project.print_project_info(engine) - captured = capsys.readouterr() - assert "Project Info" in captured.out - assert "None" in captured.out - - def test_lib_print_project_info_with_active_event( - self, - fixture_engine_session_with_active_event: tuple[Engine, Session], - capsys: pytest.CaptureFixture, - ) -> None: - engine, _ = fixture_engine_session_with_active_event - project.print_project_info(engine) - captured = capsys.readouterr() - assert "Project Info" in captured.out - assert "(3/0)" in captured.out - - def test_cli_print_project_info_with_active_event( - self, - fixture_engine_session_with_active_event: tuple[Engine, Session], - capsys: pytest.CaptureFixture, - ) -> None: - engine, _ = fixture_engine_session_with_active_event - assert project._project_exists(engine) is True - - with pytest.raises(SystemExit) as excinfo: - app(["project", "info"]) - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - assert "Project Info" in captured.out - assert "(3/0)" in captured.out diff --git a/tests/test_seismogram.py b/tests/test_seismogram.py deleted file mode 100644 index 84b5b67b..00000000 --- a/tests/test_seismogram.py +++ /dev/null @@ -1,360 +0,0 @@ -from aimbat.app import app -from aimbat.aimbat_types import SeismogramParameter -from aimbat.models import AimbatSeismogram -from sqlmodel import Session, select -from sqlalchemy import Engine -from typing import Any -from matplotlib.figure import Figure -from collections.abc import Generator -from pydantic import TypeAdapter -import aimbat.core._seismogram as seismogram -import pytest -import random -import json - - -class TestSeismogramBase: - @pytest.fixture(autouse=True) - def session( - self, fixture_engine_session_with_active_event: tuple[Engine, Session] - ) -> Generator[Session, Any, Any]: - session = fixture_engine_session_with_active_event[1] - yield session - - -class TestDeleteSeismogram(TestSeismogramBase): - def test_lib_delete_seismogram_by_id(self, session: Session) -> None: - aimbat_seismogram = random.choice(list(session.exec(select(AimbatSeismogram)))) - id = aimbat_seismogram.id - seismogram.delete_seismogram_by_id(session, id) - assert ( - session.exec( - select(AimbatSeismogram).where(AimbatSeismogram.id == id) - ).one_or_none() - is None - ) - - def test_cli_delete_seismogram_by_id(self, session: Session) -> None: - aimbat_seismogram = random.choice(list(session.exec(select(AimbatSeismogram)))) - id = aimbat_seismogram.id - - with pytest.raises(SystemExit) as excinfo: - app(["seismogram", "delete", str(id)]) - - assert excinfo.value.code == 0 - - session.flush() - assert ( - session.exec( - select(AimbatSeismogram).where(AimbatSeismogram.id == id) - ).one_or_none() - is None - ) - - def test_cli_delete_seismogram_by_id_with_wrong_id(self) -> None: - import uuid - - from aimbat import settings - - settings.log_level = "INFO" - - id = uuid.uuid4() - - with pytest.raises(SystemExit) as excinfo: - app(["seismogram", "delete", str(id)]) - - assert excinfo.value.code == 1 - - def test_cli_delete_seismogram_by_string(self, session: Session) -> None: - aimbat_seismogram = random.choice(list(session.exec(select(AimbatSeismogram)))) - id = aimbat_seismogram.id - - with pytest.raises(SystemExit) as excinfo: - app(["seismogram", "delete", str(id)[:5]]) - - assert excinfo.value.code == 0 - - session.flush() - assert ( - session.exec( - select(AimbatSeismogram).where(AimbatSeismogram.id == id) - ).one_or_none() - is None - ) - - -class TestGetSeismogramParameter(TestSeismogramBase): - @pytest.fixture - def random_seismogram( - self, session: Session - ) -> Generator[AimbatSeismogram, Any, Any]: - from aimbat.utils import get_active_event - - yield random.choice(list(get_active_event(session).seismograms)) - - @pytest.mark.parametrize( - "parameter, expected", - [ - (SeismogramParameter.SELECT, True), - (SeismogramParameter.FLIP, False), - (SeismogramParameter.T1, None), - ], - ) - def test_lib_get_seismogram_parameter( - self, - random_seismogram: AimbatSeismogram, - parameter: SeismogramParameter, - expected: Any, - ) -> None: - assert ( - seismogram.get_seismogram_parameter(random_seismogram, parameter) - == expected - ) - assert getattr(random_seismogram.parameters, parameter) == expected - - def test_lib_get_seismogram_parameter_by_id( - self, session: Session, random_seismogram: AimbatSeismogram - ) -> None: - import uuid - - assert ( - seismogram.get_seismogram_parameter_by_id( - session, random_seismogram.id, SeismogramParameter.SELECT - ) - is True - ) - - with pytest.raises(ValueError): - seismogram.get_seismogram_parameter_by_id( - session, uuid.uuid4(), SeismogramParameter.SELECT - ) - - def test_cli_get_seismogram_parameter_with_uuid( - self, random_seismogram: AimbatSeismogram, capsys: pytest.CaptureFixture - ) -> None: - with pytest.raises(SystemExit) as excinfo: - app( - [ - "seismogram", - "parameter", - "get", - str(random_seismogram.id), - SeismogramParameter.SELECT, - ] - ) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - assert "True" in captured.out - - def test_cli_get_seismogram_parameter_with_string( - self, random_seismogram: AimbatSeismogram, capsys: pytest.CaptureFixture - ) -> None: - with pytest.raises(SystemExit) as excinfo: - app( - [ - "seismogram", - "parameter", - "get", - str(random_seismogram.id)[:6], - SeismogramParameter.SELECT, - ] - ) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - assert "True" in captured.out - - -class TestSetSeismogramParameter(TestSeismogramBase): - @pytest.fixture - def random_seismogram( - self, session: Session - ) -> Generator[AimbatSeismogram, Any, Any]: - from aimbat.utils import get_active_event - - seismogram = random.choice(list(get_active_event(session).seismograms)) - assert seismogram.parameters.select is True - yield seismogram - - def test_lib_set_seismogram_parameter( - self, session: Session, random_seismogram: AimbatSeismogram - ) -> None: - seismogram.set_seismogram_parameter( - session, random_seismogram, SeismogramParameter.SELECT, False - ) - - assert ( - seismogram.get_seismogram_parameter( - random_seismogram, SeismogramParameter.SELECT - ) - is False - ) - - def test_lib_set_seismogram_parameter_by_id( - self, session: Session, random_seismogram: AimbatSeismogram - ) -> None: - import uuid - - seismogram.set_seismogram_parameter_by_id( - session, random_seismogram.id, SeismogramParameter.SELECT, False - ) - - assert ( - seismogram.get_seismogram_parameter( - random_seismogram, SeismogramParameter.SELECT - ) - is False - ) - - with pytest.raises(ValueError): - seismogram.set_seismogram_parameter_by_id( - session, uuid.uuid4(), SeismogramParameter.SELECT, False - ) - - def test_cli_set_seismogram_parameter_with_uuid( - self, random_seismogram: AimbatSeismogram, session: Session - ) -> None: - with pytest.raises(SystemExit) as excinfo: - app( - [ - "seismogram", - "parameter", - "set", - str(random_seismogram.id), - SeismogramParameter.SELECT, - "False", - ] - ) - - assert excinfo.value.code == 0 - - session.refresh(random_seismogram) - assert ( - seismogram.get_seismogram_parameter( - random_seismogram, SeismogramParameter.SELECT - ) - is False - ) - - def test_cli_set_seismogram_parameter_with_string( - self, random_seismogram: AimbatSeismogram, session: Session - ) -> None: - with pytest.raises(SystemExit) as excinfo: - app( - [ - "seismogram", - "parameter", - "set", - str(random_seismogram.id)[:6], - SeismogramParameter.SELECT, - "False", - ] - ) - - assert excinfo.value.code == 0 - - session.refresh(random_seismogram) - assert ( - seismogram.get_seismogram_parameter( - random_seismogram, SeismogramParameter.SELECT - ) - is False - ) - - -class TestGetAllSelectedSeismograms(TestSeismogramBase): - def test_lib_get_selected_seismograms_for_active_event( - self, session: Session - ) -> None: - assert len(seismogram.get_selected_seismograms(session)) == 13 - - def test_lib_get_selected_seismograms_for_all_events( - self, session: Session - ) -> None: - assert len(seismogram.get_selected_seismograms(session, all_events=True)) == 20 - - -class TestPrintSeismogramTable(TestSeismogramBase): - def test_lib_print_seismogram_table_no_short( - self, session: Session, capsys: pytest.CaptureFixture - ) -> None: - seismogram.print_seismogram_table(session, short=False, all_events=False) - captured = capsys.readouterr() - assert "AIMBAT seismograms for event" in captured.out - assert "ID (shortened)" not in captured.out - - def test_lib_print_seismogram_table_short( - self, session: Session, capsys: pytest.CaptureFixture - ) -> None: - seismogram.print_seismogram_table(session, short=True, all_events=False) - captured = capsys.readouterr() - assert "AIMBAT seismograms for event" in captured.out - assert "ID (shortened)" in captured.out - - def test_lib_print_seismogram_table_no_short_all_events( - self, session: Session, capsys: pytest.CaptureFixture - ) -> None: - seismogram.print_seismogram_table(session, short=False, all_events=True) - captured = capsys.readouterr() - assert "AIMBAT seismograms for all events" in captured.out - assert "ID (shortened)" not in captured.out - - def test_lib_print_seismogram_table_short_all_events( - self, session: Session, capsys: pytest.CaptureFixture - ) -> None: - seismogram.print_seismogram_table(session, short=True, all_events=True) - captured = capsys.readouterr() - assert "AIMBAT seismograms for all events" in captured.out - assert "ID (shortened)" in captured.out - - def test_cli_print_seismogram_table(self, capsys: pytest.CaptureFixture) -> None: - with pytest.raises(SystemExit) as excinfo: - app(["seismogram", "list"]) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - assert "AIMBAT seismograms for event" in captured.out - assert "ID (shortened)" in captured.out - - -class TestDumpSeismogram(TestSeismogramBase): - def test_lib_dump_data(self, session: Session) -> None: - json_data = seismogram.dump_seismogram_table_to_json(session) - type_adapter = TypeAdapter(list[AimbatSeismogram]) - type_adapter.validate_json(json_data) - - def test_cli_dump_data(self, capsys: pytest.CaptureFixture) -> None: - with pytest.raises(SystemExit) as excinfo: - app(["seismogram", "dump"]) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - loaded_json = json.loads(captured.out) - assert isinstance(loaded_json, list) - assert len(loaded_json) > 0 - for i in loaded_json: - _ = AimbatSeismogram(**i) - - -class TestSeismogramPlot(TestSeismogramBase): - @pytest.mark.mpl_image_compare - def test_lib_plotseis_mpl(self, session: Session) -> Figure: - return seismogram.plot_all_seismograms(session) - - @pytest.mark.skip(reason="I con't know how to test QT yet.") - def test_lib_plotseis_qt( - self, - session: Session, - ) -> None: - _ = seismogram.plot_all_seismograms(session, use_qt=True) - - def test_cli_plotseis_mpl(self) -> None: - with pytest.raises(SystemExit) as excinfo: - app(["plot", "data"]) - - assert excinfo.value.code == 0 diff --git a/tests/test_settings.py b/tests/test_settings.py deleted file mode 100644 index 7b93fc99..00000000 --- a/tests/test_settings.py +++ /dev/null @@ -1,51 +0,0 @@ -import pytest - - -class TestConfig: - @pytest.mark.parametrize( - "pretty, expected", - [(True, "AIMBAT project file location"), (False, 'AIMBAT_PROJECT="aimbat.db"')], - ) - def test_lib_print_defaults( - self, pretty: bool, expected: str, capsys: pytest.CaptureFixture - ) -> None: - from aimbat._config import print_settings_table - - print_settings_table(pretty) - output = capsys.readouterr().out - assert expected in output - - @pytest.mark.parametrize( - "pretty, expected", - [ - ("--pretty", "AIMBAT project file location"), - ("--no-pretty", 'AIMBAT_PROJECT="aimbat.db"'), - ], - ) - def test_cli_print_defaults( - self, pretty: str, expected: str, capsys: pytest.CaptureFixture - ) -> None: - from aimbat.app import app - - with pytest.raises(SystemExit) as excinfo: - app(["settings", pretty]) - - assert excinfo.value.code == 0 - - output = capsys.readouterr().out - assert expected in output - - @pytest.mark.parametrize("pretty", [True, False]) - def test_lib_print_defaults_without_env_prefix( - self, - pretty: bool, - monkeypatch: pytest.MonkeyPatch, - capsys: pytest.CaptureFixture, - ) -> None: - from aimbat._config import Settings, print_settings_table - - monkeypatch.delitem(Settings.model_config, "env_prefix") - - print_settings_table(pretty) - output = capsys.readouterr().out - assert "AIMBAT_" not in output diff --git a/tests/test_snapshot.py b/tests/test_snapshot.py deleted file mode 100644 index f617f6e1..00000000 --- a/tests/test_snapshot.py +++ /dev/null @@ -1,246 +0,0 @@ -from aimbat.app import app -from sqlmodel import Session -from sqlalchemy import Engine -from typing import Any -from collections.abc import Generator -import aimbat.core._snapshot as snapshot -import pytest - -RANDOM_COMMENT = "Random comment" - - -class TestSnapshotBase: - @pytest.fixture(autouse=True) - def session( - self, fixture_engine_session_with_active_event: tuple[Engine, Session] - ) -> Generator[Session, Any, Any]: - _, session = fixture_engine_session_with_active_event - yield session - - -class TestLibSnapshotGet(TestSnapshotBase): - def test_get_snapshots_when_there_are_none(self, session: Session) -> None: - assert snapshot.get_snapshots(session, all_events=True) == [] - - -class TestLibSnapshotCreate(TestSnapshotBase): - def test_create_snapshot(self, session: Session) -> None: - assert snapshot.get_snapshots(session) == [] - snapshot.create_snapshot(session) - snapshot.create_snapshot(session, comment=RANDOM_COMMENT) - test_snapshot1, test_snapshot2, *_ = snapshot.get_snapshots(session) - assert test_snapshot1.comment is None - assert test_snapshot2.comment == RANDOM_COMMENT - - -class TestLibSnapshotDelete(TestSnapshotBase): - def test_snapshot_delete(self, session: Session) -> None: - snapshot.create_snapshot(session) - snapshot.create_snapshot(session, comment=RANDOM_COMMENT) - test_snapshot1, test_snapshot2, *_ = snapshot.get_snapshots(session) - snapshot.delete_snapshot(session, test_snapshot1) - assert len(snapshot.get_snapshots(session)) == 1 - assert test_snapshot2 == snapshot.get_snapshots(session)[0] - - def test_delete_snapshot_by_id(self, session: Session) -> None: - snapshot.create_snapshot(session) - snapshot.create_snapshot(session, comment=RANDOM_COMMENT) - test_snapshot1, test_snapshot2, *_ = snapshot.get_snapshots(session) - snapshot.delete_snapshot_by_id(session, test_snapshot1.id) - assert len(snapshot.get_snapshots(session)) == 1 - assert test_snapshot2 == snapshot.get_snapshots(session)[0] - - def test_delete_snapshot_by_id_raises_with_random_id( - self, session: Session - ) -> None: - import uuid - - random_id = uuid.uuid4() - with pytest.raises(ValueError): - snapshot.delete_snapshot_by_id(session, random_id) - - -class TestLibSnapshotRollback(TestSnapshotBase): - def test_snapshot_rollback(self, session: Session) -> None: - from aimbat.utils import get_active_event - - active_event = get_active_event(session) - - assert active_event.parameters.completed is False - assert active_event.seismograms[0].parameters.select is True - - snapshot.create_snapshot(session) - - active_event.parameters.completed = True - active_event.seismograms[0].parameters.select = False - session.flush() - assert active_event.parameters.completed is True - assert active_event.seismograms[0].parameters.select is False - - test_snapshot, *_ = snapshot.get_snapshots(session) - snapshot.rollback_to_snapshot(session, test_snapshot) - assert active_event.parameters.completed is False - assert active_event.seismograms[0].parameters.select is True - - def test_rollback_to_snapshot_by_id(self, session: Session) -> None: - snapshot.create_snapshot(session) - test_snapshot, *_ = snapshot.get_snapshots(session) - snapshot.rollback_to_snapshot_by_id(session, test_snapshot.id) - - def test_rollback_to_snapshot_by_id_raises_with_random_id( - self, session: Session - ) -> None: - import uuid - - random_id = uuid.uuid4() - with pytest.raises(ValueError): - snapshot.rollback_to_snapshot_by_id(session, random_id) - - -class TestLibSnapshotTable(TestSnapshotBase): - @pytest.fixture(autouse=True) - def create_snapshots(self, session: Session) -> Generator[None, Any, Any]: - assert snapshot.get_snapshots(session) == [] - snapshot.create_snapshot(session) - snapshot.create_snapshot(session, RANDOM_COMMENT) - yield - - def test_snapshot_table_no_short( - self, session: Session, capsys: pytest.CaptureFixture - ) -> None: - snapshot.print_snapshot_table(session, short=False, all_events=False) - captured = capsys.readouterr() - assert RANDOM_COMMENT in captured.out - assert "AIMBAT snapshots for event" in captured.out - assert "ID (shortened)" not in captured.out - - def test_snapshot_table_short( - self, session: Session, capsys: pytest.CaptureFixture - ) -> None: - snapshot.print_snapshot_table(session, short=True, all_events=False) - captured = capsys.readouterr() - assert RANDOM_COMMENT in captured.out - assert "AIMBAT snapshots for event" in captured.out - assert "ID (shortened)" in captured.out - - def test_snapshot_table_no_short_all_events( - self, session: Session, capsys: pytest.CaptureFixture - ) -> None: - snapshot.print_snapshot_table(session, short=False, all_events=True) - captured = capsys.readouterr() - assert RANDOM_COMMENT in captured.out - assert "AIMBAT snapshots for all events" in captured.out - assert "ID (shortened)" not in captured.out - - def test_snapshot_table_short_all_events( - self, session: Session, capsys: pytest.CaptureFixture - ) -> None: - snapshot.print_snapshot_table(session, short=True, all_events=True) - captured = capsys.readouterr() - assert RANDOM_COMMENT in captured.out - assert "AIMBAT snapshots for all events" in captured.out - assert "ID (shortened)" in captured.out - - -class TestCliSnapshotUsage(TestSnapshotBase): - def test_cli_usage(self, capsys: pytest.CaptureFixture) -> None: - with pytest.raises(SystemExit) as excinfo: - app(["snapshot", "--help"]) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - assert "Usage" in captured.out - - -class TestCliSnapshotCreate(TestSnapshotBase): - def test_create_snapshot(self, session: Session) -> None: - with pytest.raises(SystemExit) as excinfo: - app(["snapshot", "create", RANDOM_COMMENT]) - - assert excinfo.value.code == 0 - - all_snapshots = snapshot.get_snapshots(session) - assert len(all_snapshots) == 1 - assert all_snapshots[0].comment == RANDOM_COMMENT - - -class TestCliSnapshotRollbackAndDelete(TestSnapshotBase): - @pytest.fixture(autouse=True) - def create_snapshots(self, session: Session) -> Generator[None, Any, Any]: - assert snapshot.get_snapshots(session) == [] - snapshot.create_snapshot(session, RANDOM_COMMENT) - session.flush() - yield - - def test_delete_snapshot_with_uuid(self, session: Session) -> None: - all_snapshots = snapshot.get_snapshots(session) - assert len(all_snapshots) == 1 - snapshot_id = all_snapshots[0].id - - with pytest.raises(SystemExit) as excinfo: - app(["snapshot", "delete", str(snapshot_id)]) - - assert excinfo.value.code == 0 - - session.flush() - all_snapshots = snapshot.get_snapshots(session) - assert len(all_snapshots) == 0 - - def test_delete_snapshot_with_string(self, session: Session) -> None: - all_snapshots = snapshot.get_snapshots(session) - assert len(all_snapshots) == 1 - snapshot_id = str(all_snapshots[0].id)[:8] - - with pytest.raises(SystemExit) as excinfo: - app(["snapshot", "delete", str(snapshot_id)]) - - assert excinfo.value.code == 0 - - session.flush() - all_snapshots = snapshot.get_snapshots(session) - assert len(all_snapshots) == 0 - - def test_rollback_to_snapshot_with_uuid(self, session: Session) -> None: - all_snapshots = snapshot.get_snapshots(session) - assert len(all_snapshots) == 1 - snapshot_id = all_snapshots[0].id - - with pytest.raises(SystemExit) as excinfo: - app(["snapshot", "rollback", str(snapshot_id)]) - - assert excinfo.value.code == 0 - - session.flush() - - def test_rollback_to_snapshot_with_string(self, session: Session) -> None: - all_snapshots = snapshot.get_snapshots(session) - assert len(all_snapshots) == 1 - snapshot_id = str(all_snapshots[0].id)[:8] - - with pytest.raises(SystemExit) as excinfo: - app(["snapshot", "rollback", str(snapshot_id)]) - - assert excinfo.value.code == 0 - - session.flush() - - -class TestCliSnapshotTable(TestSnapshotBase): - @pytest.fixture(autouse=True) - def create_snapshots(self, session: Session) -> Generator[None, Any, Any]: - assert snapshot.get_snapshots(session) == [] - snapshot.create_snapshot(session) - snapshot.create_snapshot(session, RANDOM_COMMENT) - yield - - def test_snapshot_table_no_format(self, capsys: pytest.CaptureFixture) -> None: - with pytest.raises(SystemExit) as excinfo: - app(["snapshot", "list"]) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - assert RANDOM_COMMENT in captured.out - assert "AIMBAT snapshots for event" in captured.out - assert "ID (shortened)" in captured.out diff --git a/tests/test_station.py b/tests/test_station.py deleted file mode 100644 index 33e8e7fa..00000000 --- a/tests/test_station.py +++ /dev/null @@ -1,139 +0,0 @@ -from aimbat.models import AimbatStation -from aimbat.app import app -from sqlmodel import Session, select -from sqlalchemy import Engine -from typing import Any -from collections.abc import Generator -from pydantic import TypeAdapter -import aimbat.core._station as station -import random -import pytest -import json - - -class TestStationBase: - @pytest.fixture(autouse=True) - def session( - self, fixture_engine_session_with_active_event: tuple[Engine, Session] - ) -> Generator[Session, Any, Any]: - session = fixture_engine_session_with_active_event[1] - yield session - - -class TestDeleteStation(TestStationBase): - def test_lib_delete_station_by_id(self, session: Session) -> None: - aimbat_station = random.choice(list(session.exec(select(AimbatStation)))) - id = aimbat_station.id - station.delete_station_by_id(session, id) - assert ( - session.exec( - select(AimbatStation).where(AimbatStation.id == id) - ).one_or_none() - is None - ) - - def test_cli_delete_station_by_id(self, session: Session) -> None: - seismogram = random.choice(list(session.exec(select(AimbatStation)))) - id = seismogram.id - - with pytest.raises(SystemExit) as excinfo: - app(["station", "delete", str(id)]) - - assert excinfo.value.code == 0 - - session.flush() - assert ( - session.exec( - select(AimbatStation).where(AimbatStation.id == id) - ).one_or_none() - is None - ) - - def test_cli_delete_station_by_id_with_wrong_id(self) -> None: - from aimbat import settings - - settings.log_level = "INFO" - - import uuid - - id = uuid.uuid4() - - with pytest.raises(SystemExit) as excinfo: - app(["station", "delete", str(id)]) - - assert excinfo.value.code == 1 - - def test_cli_delete_station_by_string(self, session: Session) -> None: - station = random.choice(list(session.exec(select(AimbatStation)))) - id = station.id - - with pytest.raises(SystemExit) as excinfo: - app(["station", "delete", str(id)[:5]]) - - assert excinfo.value.code == 0 - - session.flush() - assert ( - session.exec( - select(AimbatStation).where(AimbatStation.id == id) - ).one_or_none() - is None - ) - - -class TestLibStation(TestStationBase): - def test_sac_data(self, session: Session, capsys: pytest.CaptureFixture) -> None: - station.print_station_table(session, short=False) - assert "AIMBAT stations for event" in capsys.readouterr().out - - station.print_station_table(session, short=True) - assert "ID (shortened)" in capsys.readouterr().out - - station.print_station_table(session, short=False, all_events=True) - assert "AIMBAT stations for all events" in capsys.readouterr().out - - station.print_station_table(session, short=True, all_events=True) - assert "# Seismograms" in capsys.readouterr().out - - -class TestCliStation(TestStationBase): - def test_cli_usage(self, capsys: pytest.CaptureFixture) -> None: - with pytest.raises(SystemExit) as excinfo: - app(["station", "--help"]) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - assert "Usage" in captured.out - - def test_cli_station_list( - self, session: Session, capsys: pytest.CaptureFixture - ) -> None: - with pytest.raises(SystemExit) as excinfo: - app(["station", "list", "--all"]) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - - assert "# Seismograms" in captured.out - - -class TestDumpStation(TestStationBase): - def test_lib_dump_data(self, session: Session) -> None: - json_data = station.dump_station_table_to_json(session) - type_adapter = TypeAdapter(list[AimbatStation]) - type_adapter.validate_json(json_data) - - def test_cli_dump_data(self, capsys: pytest.CaptureFixture) -> None: - with pytest.raises(SystemExit) as excinfo: - app(["station", "dump"]) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - loaded_json = json.loads(captured.out) - assert isinstance(loaded_json, list) - assert len(loaded_json) > 0 - for i in loaded_json: - _ = AimbatStation(**i) diff --git a/tests/test_typing.py b/tests/test_typing.py deleted file mode 100644 index f0de45ed..00000000 --- a/tests/test_typing.py +++ /dev/null @@ -1,57 +0,0 @@ -from sqlmodel import SQLModel -from enum import StrEnum -from typing import get_args, TypeAliasType -from aimbat.models import AimbatEventParametersBase, AimbatSeismogramParametersBase -from aimbat.aimbat_types import ( - EventParameter, - SeismogramParameter, - EventParameterBool, - EventParameterFloat, - EventParameterTimedelta, - SeismogramParameterBool, - SeismogramParameterTimestamp, -) - - -def set_from_basemodel(obj: type[SQLModel]) -> set[str]: - """Returns a set from the basemodel fields and remove "id" from it.""" - my_set: set[str] = set(obj.model_fields) - my_set.discard("id") - - return my_set - - -def set_from_strenum(enum: type[StrEnum]) -> set[str]: - - return set([member.value for member in enum]) - - -def set_from_typealiases(*aliases: TypeAliasType) -> set[str]: - my_list = [] - for alias in aliases: - my_list.extend([v for v in get_args(alias.__value__)]) - - return set(my_list) - - -class TestLibTypes: - """Ensure Default models and types are consistent.""" - - def test_event_parameter_types(self) -> None: - assert set_from_basemodel(AimbatEventParametersBase) == set_from_strenum( - EventParameter - ) - assert set_from_strenum(EventParameter) == set_from_typealiases( - EventParameterBool, - EventParameterFloat, - EventParameterTimedelta, - ) - - def test_seismogram_parameter_types(self) -> None: - assert set_from_basemodel(AimbatSeismogramParametersBase) == set_from_strenum( - SeismogramParameter - ) - assert set_from_strenum(SeismogramParameter) == set_from_typealiases( - SeismogramParameterBool, - SeismogramParameterTimestamp, - ) diff --git a/tests/unit/aimbat_types/test_pydantic.py b/tests/unit/aimbat_types/test_pydantic.py new file mode 100644 index 00000000..215f7ca3 --- /dev/null +++ b/tests/unit/aimbat_types/test_pydantic.py @@ -0,0 +1,131 @@ +"""Tests for aimbat_types._pydantic custom Pydantic types.""" + +import pytest +from pydantic import BaseModel, ValidationError +from aimbat.aimbat_types import ( + PydanticTimestamp, + PydanticTimedelta, + PydanticNegativeTimedelta, + PydanticPositiveTimedelta, +) +from pandas import Timestamp, Timedelta + + +class _TimestampModel(BaseModel): + """Test model for PydanticTimestamp.""" + + value: PydanticTimestamp + + +class _OptionalTimestampModel(BaseModel): + """Test model for optional PydanticTimestamp.""" + + value: PydanticTimestamp | None = None + + +class _TimedeltaModel(BaseModel): + """Test model for PydanticTimedelta.""" + + value: PydanticTimedelta + + +class TestPydanticTimestamp: + """Tests for PydanticTimestamp custom type.""" + + def test_accepts_timestamp(self) -> None: + """Verifies that a pandas Timestamp is accepted.""" + ts = Timestamp("2020-01-01") + assert _TimestampModel(value=ts).value == ts + + def test_accepts_string(self) -> None: + """Verifies that a valid date string is accepted and converted to Timestamp.""" + m = _TimestampModel(value="2020-01-01") # type: ignore[arg-type] + assert isinstance(m.value, Timestamp) + + def test_rejects_none(self) -> None: + """Verifies that None is rejected for a required field.""" + with pytest.raises(ValidationError): + _TimestampModel(value=None) # type: ignore[arg-type] + + def test_optional_accepts_none(self) -> None: + """Verifies that None is accepted for an optional field.""" + assert _OptionalTimestampModel(value=None).value is None + + def test_rejects_invalid_string(self) -> None: + """Verifies that an invalid date string raises ValidationError.""" + with pytest.raises(ValidationError): + _TimestampModel(value="not-a-timestamp") # type: ignore[arg-type] + + +class TestPydanticTimedelta: + """Tests for PydanticTimedelta custom type.""" + + def test_accepts_timedelta(self) -> None: + """Verifies that a pandas Timedelta is accepted.""" + td = Timedelta(seconds=5) + assert _TimedeltaModel(value=td).value == td + + def test_rejects_none(self) -> None: + """Verifies that None is rejected.""" + with pytest.raises(ValidationError): + _TimedeltaModel(value=None) # type: ignore[arg-type] + + +class TestPydanticNegativeTimedelta: + """Tests for PydanticNegativeTimedelta custom type.""" + + def test_accepts_negative(self) -> None: + """Verifies that a negative Timedelta is accepted.""" + + class M(BaseModel): + value: PydanticNegativeTimedelta + + assert M(value=Timedelta(seconds=-1)).value == Timedelta(seconds=-1) + + def test_rejects_positive(self) -> None: + """Verifies that a positive Timedelta is rejected.""" + + class M(BaseModel): + value: PydanticNegativeTimedelta + + with pytest.raises(ValidationError): + M(value=Timedelta(seconds=1)) + + def test_rejects_zero(self) -> None: + """Verifies that a zero Timedelta is rejected.""" + + class M(BaseModel): + value: PydanticNegativeTimedelta + + with pytest.raises(ValidationError): + M(value=Timedelta(0)) + + +class TestPydanticPositiveTimedelta: + """Tests for PydanticPositiveTimedelta custom type.""" + + def test_accepts_positive(self) -> None: + """Verifies that a positive Timedelta is accepted.""" + + class M(BaseModel): + value: PydanticPositiveTimedelta + + assert M(value=Timedelta(seconds=1)).value == Timedelta(seconds=1) + + def test_rejects_negative(self) -> None: + """Verifies that a negative Timedelta is rejected.""" + + class M(BaseModel): + value: PydanticPositiveTimedelta + + with pytest.raises(ValidationError): + M(value=Timedelta(seconds=-1)) + + def test_rejects_zero(self) -> None: + """Verifies that a zero Timedelta is rejected.""" + + class M(BaseModel): + value: PydanticPositiveTimedelta + + with pytest.raises(ValidationError): + M(value=Timedelta(0)) diff --git a/tests/unit/cli/test_common.py b/tests/unit/cli/test_common.py new file mode 100644 index 00000000..ec390384 --- /dev/null +++ b/tests/unit/cli/test_common.py @@ -0,0 +1,198 @@ +"""Unit tests for aimbat.cli._common.""" + +import pytest +from aimbat.cli._common import ( + GlobalParameters, + PlotParameters, + IccsPlotParameters, + TableParameters, + CliHints, + HINTS, + simple_exception, +) +from aimbat import settings + + +class TestGlobalParameters: + """Tests for the GlobalParameters dataclass.""" + + def test_default_debug_is_false(self) -> None: + """Verifies that debug defaults to False.""" + params = GlobalParameters() + assert params.debug is False + + def test_debug_true_sets_log_level(self) -> None: + """Verifies that setting debug=True changes the log level to DEBUG.""" + GlobalParameters(debug=True) + assert settings.log_level == "DEBUG" + + def test_debug_false_does_not_change_log_level(self) -> None: + """Verifies that debug=False does not alter the log level.""" + original = settings.log_level + GlobalParameters(debug=False) + assert settings.log_level == original + + +class TestPlotParameters: + """Tests for the PlotParameters dataclass.""" + + def test_default_use_qt_is_false(self) -> None: + """Verifies that use_qt defaults to False.""" + params = PlotParameters() + assert params.use_qt is False + + def test_use_qt_can_be_set_true(self) -> None: + """Verifies that use_qt can be set to True.""" + params = PlotParameters(use_qt=True) + assert params.use_qt is True + + +class TestIccsPlotParameters: + """Tests for the IccsPlotParameters dataclass.""" + + def test_default_context_is_true(self) -> None: + """Verifies that context defaults to True.""" + params = IccsPlotParameters() + assert params.context is True + + def test_default_all_is_false(self) -> None: + """Verifies that all defaults to False.""" + params = IccsPlotParameters() + assert params.all is False + + def test_context_can_be_set_false(self) -> None: + """Verifies that context can be set to False.""" + params = IccsPlotParameters(context=False) + assert params.context is False + + def test_all_can_be_set_true(self) -> None: + """Verifies that all can be set to True.""" + params = IccsPlotParameters(all=True) + assert params.all is True + + +class TestTableParameters: + """Tests for the TableParameters dataclass.""" + + def test_default_short_is_true(self) -> None: + """Verifies that short defaults to True.""" + params = TableParameters() + assert params.short is True + + def test_short_can_be_set_false(self) -> None: + """Verifies that short can be set to False.""" + params = TableParameters(short=False) + assert params.short is False + + +class TestCliHints: + """Tests for the CliHints frozen dataclass.""" + + def test_activate_event_hint_content(self) -> None: + """Verifies that ACTIVATE_EVENT hint references the activate command.""" + assert "activate" in CliHints.ACTIVATE_EVENT + assert "aimbat event activate" in CliHints.ACTIVATE_EVENT + + def test_list_events_hint_content(self) -> None: + """Verifies that LIST_EVENTS hint references the list command.""" + assert "list" in CliHints.LIST_EVENTS + assert "aimbat event list" in CliHints.LIST_EVENTS + + def test_hints_instance_is_frozen(self) -> None: + """Verifies that the CliHints dataclass is frozen (immutable).""" + with pytest.raises((AttributeError, TypeError)): + HINTS.ACTIVATE_EVENT = "new value" + + def test_hints_singleton_values(self) -> None: + """Verifies that the HINTS singleton has the expected attribute values.""" + assert HINTS.ACTIVATE_EVENT == CliHints.ACTIVATE_EVENT + assert HINTS.LIST_EVENTS == CliHints.LIST_EVENTS + + +class TestSimpleException: + """Tests for the simple_exception decorator.""" + + def test_returns_value_when_no_exception(self) -> None: + """Verifies that the decorated function returns its value normally.""" + + @simple_exception + def good() -> int: + return 42 + + assert good() == 42 + + def test_passes_args_and_kwargs(self) -> None: + """Verifies that args and kwargs are forwarded to the wrapped function.""" + + @simple_exception + def add(a: int, b: int = 0) -> int: + return a + b + + assert add(3, b=4) == 7 + + def test_exits_on_exception_in_normal_mode(self) -> None: + """Verifies that an exception causes SystemExit when not in debug mode.""" + settings.log_level = "INFO" + + @simple_exception + def boom() -> None: + raise ValueError("something went wrong") + + with pytest.raises(SystemExit) as exc_info: + boom() + assert exc_info.value.code == 1 + + def test_reraises_in_debug_mode(self) -> None: + """Verifies that exceptions propagate normally when in DEBUG mode.""" + settings.log_level = "DEBUG" + + @simple_exception + def boom() -> None: + raise ValueError("debug error") + + with pytest.raises(ValueError, match="debug error"): + boom() + + def test_reraises_in_trace_mode(self) -> None: + """Verifies that exceptions propagate normally when in TRACE mode.""" + settings.log_level = "TRACE" + + @simple_exception + def boom() -> None: + raise RuntimeError("trace error") + + with pytest.raises(RuntimeError, match="trace error"): + boom() + + def test_preserves_function_name(self) -> None: + """Verifies that the decorator preserves the original function name.""" + + @simple_exception + def my_function() -> None: + pass + + assert my_function.__name__ == "my_function" + + def test_preserves_function_docstring(self) -> None: + """Verifies that the decorator preserves the original function docstring.""" + + @simple_exception + def documented() -> None: + """My docstring.""" + + assert documented.__doc__ == "My docstring." + + def test_exit_prints_error_panel(self, capsys: pytest.CaptureFixture[str]) -> None: + """Verifies that the exception message is printed before exiting.""" + settings.log_level = "INFO" + + @simple_exception + def boom() -> None: + raise RuntimeError("panel message") + + with pytest.raises(SystemExit): + boom() + + # Rich prints to stderr or stdout; capture via sys.stdout fallback + captured = capsys.readouterr() + assert "panel message" in captured.out or "panel message" in captured.err diff --git a/tests/unit/io/test_sac.py b/tests/unit/io/test_sac.py new file mode 100644 index 00000000..5ed1203d --- /dev/null +++ b/tests/unit/io/test_sac.py @@ -0,0 +1,283 @@ +"""Unit tests for aimbat.io._sac.""" + +from aimbat.io._sac import ( + create_event_from_sacfile, + create_seismogram_from_sacfile_and_pick_header, + create_station_from_sacfile, + read_seismogram_data_from_sacfile, + write_seismogram_data_to_sacfile, +) +from aimbat.models import AimbatEvent, AimbatSeismogram, AimbatStation +from pathlib import Path +from pandas import Timedelta, Timestamp +from pydantic import ValidationError +from pysmo.classes import SAC +import numpy as np +import pytest + +# =================================================================== +# read / write seismogram data +# =================================================================== + + +class TestReadSeismogramData: + """Tests for reading seismogram data from SAC files.""" + + def test_returns_ndarray(self, sac_file_good: Path) -> None: + """Verifies that reading data returns a numpy ndarray. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + data = read_seismogram_data_from_sacfile(sac_file_good) + assert isinstance(data, np.ndarray) + + def test_matches_pysmo_data(self, sac_file_good: Path) -> None: + """Verifies that the read data matches data read by pysmo. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + expected = SAC.from_file(sac_file_good).seismogram.data + data = read_seismogram_data_from_sacfile(sac_file_good) + np.testing.assert_array_equal(data, expected) + + def test_nonexistent_file_raises(self, tmp_path: Path) -> None: + """Verifies that reading from a non-existent file raises FileNotFoundError. + + Args: + tmp_path (Path): Temporary directory path. + """ + with pytest.raises(FileNotFoundError): + read_seismogram_data_from_sacfile(tmp_path / "missing.sac") + + +class TestWriteSeismogramData: + """Tests for writing seismogram data to SAC files.""" + + def test_overwrites_data_on_disk(self, sac_file_good: Path) -> None: + """Verifies that writing data updates the file on disk. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + original = read_seismogram_data_from_sacfile(sac_file_good) + new_data = np.ones_like(original) * 42.0 + + write_seismogram_data_to_sacfile(sac_file_good, new_data) + + reread = read_seismogram_data_from_sacfile(sac_file_good) + np.testing.assert_array_equal(reread, new_data) + + def test_preserves_length(self, sac_file_good: Path) -> None: + """Verifies that writing data preserves the number of points. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + original = read_seismogram_data_from_sacfile(sac_file_good) + write_seismogram_data_to_sacfile(sac_file_good, np.zeros_like(original)) + reread = read_seismogram_data_from_sacfile(sac_file_good) + assert len(reread) == len(original) + + def test_round_trip(self, sac_file_good: Path) -> None: + """Write then read should return the same array. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + data = np.linspace(-1.0, 1.0, 100) + # First overwrite with our data, then verify the round-trip. + write_seismogram_data_to_sacfile(sac_file_good, data) + result = read_seismogram_data_from_sacfile(sac_file_good) + np.testing.assert_allclose(result, data) + + +# =================================================================== +# create_station_from_sacfile +# =================================================================== + + +class TestCreateStation: + """Tests for creating AimbatStation objects from SAC files.""" + + def test_returns_aimbat_station(self, sac_file_good: Path) -> None: + """Verifies that the function returns an AimbatStation instance. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + station = create_station_from_sacfile(sac_file_good) + assert isinstance(station, AimbatStation) + + def test_fields_match_sac(self, sac_file_good: Path) -> None: + """Verifies that station fields match the SAC header values. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + sac = SAC.from_file(sac_file_good) + station = create_station_from_sacfile(sac_file_good) + + assert station.name == sac.station.name + assert station.network == sac.station.network + assert station.location == sac.station.location + assert station.channel == sac.station.channel + assert station.latitude == sac.station.latitude + assert station.longitude == sac.station.longitude + assert station.elevation == sac.station.elevation + + def test_nonexistent_file_raises(self, tmp_path: Path) -> None: + """Verifies that creating a station from a non-existent file raises FileNotFoundError. + + Args: + tmp_path (Path): Temporary directory path. + """ + with pytest.raises(FileNotFoundError): + create_station_from_sacfile(tmp_path / "missing.sac") + + +# =================================================================== +# create_event_from_sacfile +# =================================================================== + + +class TestCreateEvent: + """Tests for creating AimbatEvent objects from SAC files.""" + + def test_returns_aimbat_event(self, sac_file_good: Path) -> None: + """Verifies that the function returns an AimbatEvent instance. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + event = create_event_from_sacfile(sac_file_good) + assert isinstance(event, AimbatEvent) + + def test_fields_match_sac(self, sac_file_good: Path) -> None: + """Verifies that event fields match the SAC header values. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + sac = SAC.from_file(sac_file_good) + event = create_event_from_sacfile(sac_file_good) + + assert isinstance(event.time, Timestamp) + assert event.time == sac.event.time + assert event.latitude == sac.event.latitude + assert event.longitude == sac.event.longitude + assert event.depth == sac.event.depth + + def test_has_parameters(self, sac_file_good: Path) -> None: + """Verifies that the created event has initialized parameters. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + event = create_event_from_sacfile(sac_file_good) + assert event.parameters is not None + + def test_nonexistent_file_raises(self, tmp_path: Path) -> None: + """Verifies that creating an event from a non-existent file raises FileNotFoundError. + + Args: + tmp_path (Path): Temporary directory path. + """ + with pytest.raises(FileNotFoundError): + create_event_from_sacfile(tmp_path / "missing.sac") + + +# =================================================================== +# create_seismogram_from_sacfile_and_pick_header +# =================================================================== + + +class TestCreateSeismogram: + """Tests for creating AimbatSeismogram objects from SAC files.""" + + def test_returns_aimbat_seismogram(self, sac_file_good: Path) -> None: + """Verifies that the function returns an AimbatSeismogram instance. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + seis = create_seismogram_from_sacfile_and_pick_header(sac_file_good, "t0") + assert isinstance(seis, AimbatSeismogram) + + def test_fields_match_sac(self, sac_file_good: Path) -> None: + """Verifies that seismogram fields match the SAC header values. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + sac = SAC.from_file(sac_file_good) + seis = create_seismogram_from_sacfile_and_pick_header(sac_file_good, "t0") + + assert isinstance(seis.begin_time, Timestamp) + assert seis.begin_time == sac.seismogram.begin_time + assert isinstance(seis.delta, Timedelta) + assert seis.delta == sac.seismogram.delta + + def test_t0_uses_requested_pick_header(self, sac_file_good: Path) -> None: + """Verifies that t0 is populated from the specified pick header. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + sac = SAC.from_file(sac_file_good) + + seis_t0 = create_seismogram_from_sacfile_and_pick_header(sac_file_good, "t0") + assert seis_t0.t0 == sac.timestamps.t0 + + seis_t1 = create_seismogram_from_sacfile_and_pick_header(sac_file_good, "t1") + assert seis_t1.t0 == sac.timestamps.t1 + + def test_has_parameters(self, sac_file_good: Path) -> None: + """Verifies that the created seismogram has initialized parameters. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + seis = create_seismogram_from_sacfile_and_pick_header(sac_file_good, "t0") + assert seis.parameters is not None + + def test_nonexistent_file_raises(self, tmp_path: Path) -> None: + """Verifies that creating a seismogram from a non-existent file raises FileNotFoundError. + + Args: + tmp_path (Path): Temporary directory path. + """ + with pytest.raises(FileNotFoundError): + create_seismogram_from_sacfile_and_pick_header( + tmp_path / "missing.sac", "t0" + ) + + def test_invalid_pick_header_raises(self, sac_file_good: Path) -> None: + """Verifies that requesting an invalid pick header raises AttributeError. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + with pytest.raises(AttributeError): + create_seismogram_from_sacfile_and_pick_header( + sac_file_good, "nonexistent_header" + ) + + def test_none_pick_raises(self, sac_file_good: Path) -> None: + """Verifies that if the pick header exists but is None, ValidationError is raised. + + Args: + sac_file_good (Path): Path to a valid SAC file. + """ + sac = SAC.from_file(sac_file_good) + # Find a timestamp header that is None. + none_header = None + for name in ["t4", "t5", "t6", "t7", "t8", "t9"]: + if getattr(sac.timestamps, name) is None: + none_header = name + break + assert none_header is not None, "expected at least one None timestamp header" + + with pytest.raises(ValidationError): + create_seismogram_from_sacfile_and_pick_header(sac_file_good, none_header) diff --git a/tests/unit/models/test_sqlalchemy.py b/tests/unit/models/test_sqlalchemy.py new file mode 100644 index 00000000..f4854adf --- /dev/null +++ b/tests/unit/models/test_sqlalchemy.py @@ -0,0 +1,146 @@ +import pytest +import pandas as pd +from datetime import datetime, timezone +from unittest.mock import MagicMock +from sqlalchemy.engine import Dialect +from aimbat.models._sqlalchemy import SAPandasTimestamp, SAPandasTimedelta + + +@pytest.fixture +def mock_dialect() -> Dialect: + """Fixture for a mock SQLAlchemy dialect.""" + return MagicMock(spec=Dialect) + + +class TestSAPandasTimestamp: + """Tests for the SAPandasTimestamp custom SQLAlchemy type.""" + + @pytest.fixture + def sa_timestamp(self) -> SAPandasTimestamp: + """Fixture providing an instance of SAPandasTimestamp.""" + return SAPandasTimestamp() + + def test_process_bind_param_none( + self, sa_timestamp: SAPandasTimestamp, mock_dialect: Dialect + ) -> None: + """Test that None is passed through unchanged.""" + assert sa_timestamp.process_bind_param(None, mock_dialect) is None + + def test_process_bind_param_naive_timestamp( + self, sa_timestamp: SAPandasTimestamp, mock_dialect: Dialect + ) -> None: + """Test that a naive pandas Timestamp is converted to a UTC datetime.""" + ts_naive = pd.Timestamp("2023-01-01 12:00:00") + result = sa_timestamp.process_bind_param(ts_naive, mock_dialect) + assert isinstance(result, datetime) + assert result.tzinfo == timezone.utc + assert result.year == 2023 + assert result.hour == 12 + + def test_process_bind_param_aware_timestamp( + self, sa_timestamp: SAPandasTimestamp, mock_dialect: Dialect + ) -> None: + """Test that a timezone-aware pandas Timestamp is converted to UTC.""" + # Create a non-UTC timestamp + ts_ny = pd.Timestamp("2023-01-01 12:00:00").tz_localize("America/New_York") + result = sa_timestamp.process_bind_param(ts_ny, mock_dialect) + assert isinstance(result, datetime) + assert result.tzinfo == timezone.utc + # 12:00 NY is 17:00 UTC + assert result.hour == 17 + + def test_process_bind_param_converts_other_types( + self, sa_timestamp: SAPandasTimestamp, mock_dialect: Dialect + ) -> None: + """Test that other datetime types are also converted correctly.""" + dt = datetime(2023, 1, 1, 12, 0, 0) + result = sa_timestamp.process_bind_param(dt, mock_dialect) + assert isinstance(result, datetime) + assert result.tzinfo == timezone.utc + + def test_process_bind_param_truncates_nanoseconds( + self, sa_timestamp: SAPandasTimestamp, mock_dialect: Dialect + ) -> None: + """Test that nanosecond precision is truncated to microseconds.""" + # DateTime in Python only supports microseconds, pandas supports nanoseconds + ts_nano = pd.Timestamp("2023-01-01 12:00:00.123456789") + result = sa_timestamp.process_bind_param(ts_nano, mock_dialect) + assert result is not None + # Should be truncated/floored to microseconds + assert result.microsecond == 123456 + # Ensure it didn't round up or do something unexpected with the extra precision + assert result.second == 0 + + def test_process_result_value_none( + self, sa_timestamp: SAPandasTimestamp, mock_dialect: Dialect + ) -> None: + """Test that None result is passed through unchanged.""" + assert sa_timestamp.process_result_value(None, mock_dialect) is None + + def test_process_result_value_naive_datetime( + self, sa_timestamp: SAPandasTimestamp, mock_dialect: Dialect + ) -> None: + """Test that a naive datetime from DB is converted to a UTC pandas Timestamp.""" + # SQLAlchemy might return a naive datetime (implicit UTC or from DB) + dt_naive = datetime(2023, 1, 1, 12, 0, 0) + result = sa_timestamp.process_result_value(dt_naive, mock_dialect) + assert isinstance(result, pd.Timestamp) + assert result.tzinfo == timezone.utc + assert result.year == 2023 + + def test_process_result_value_aware_datetime( + self, sa_timestamp: SAPandasTimestamp, mock_dialect: Dialect + ) -> None: + """Test that an aware datetime from DB is converted to a UTC pandas Timestamp.""" + dt_aware = datetime(2023, 1, 1, 12, 0, 0, tzinfo=timezone.utc) + result = sa_timestamp.process_result_value(dt_aware, mock_dialect) + assert isinstance(result, pd.Timestamp) + assert result.tzinfo == timezone.utc + + +class TestSAPandasTimedelta: + """Tests for the SAPandasTimedelta custom SQLAlchemy type.""" + + @pytest.fixture + def sa_timedelta(self) -> SAPandasTimedelta: + """Fixture providing an instance of SAPandasTimedelta.""" + return SAPandasTimedelta() + + def test_process_bind_param_none( + self, sa_timedelta: SAPandasTimedelta, mock_dialect: Dialect + ) -> None: + """Test that None is passed through unchanged.""" + assert sa_timedelta.process_bind_param(None, mock_dialect) is None + + def test_process_bind_param_timedelta( + self, sa_timedelta: SAPandasTimedelta, mock_dialect: Dialect + ) -> None: + """Test that a pandas Timedelta is converted to nanoseconds (int).""" + td = pd.Timedelta(seconds=10) + result = sa_timedelta.process_bind_param(td, mock_dialect) + assert isinstance(result, int) + assert result == 10 * 1_000_000_000 # nanoseconds + + def test_process_bind_param_converts_other_types( + self, sa_timedelta: SAPandasTimedelta, mock_dialect: Dialect + ) -> None: + """Test that other types (like strings) are converted to nanoseconds.""" + # String conversion + result = sa_timedelta.process_bind_param("1 days", mock_dialect) + assert isinstance(result, int) + assert result == 86400 * 1_000_000_000 + + def test_process_result_value_none( + self, sa_timedelta: SAPandasTimedelta, mock_dialect: Dialect + ) -> None: + """Test that None result is passed through unchanged.""" + assert sa_timedelta.process_result_value(None, mock_dialect) is None + + def test_process_result_value_int( + self, sa_timedelta: SAPandasTimedelta, mock_dialect: Dialect + ) -> None: + """Test that an integer (nanoseconds) from DB is converted to a pandas Timedelta.""" + ns_value = 5 * 1_000_000_000 # 5 seconds in ns + result = sa_timedelta.process_result_value(ns_value, mock_dialect) + assert isinstance(result, pd.Timedelta) + assert result.total_seconds() == 5.0 diff --git a/tests/test_app.py b/tests/unit/test_app.py similarity index 57% rename from tests/test_app.py rename to tests/unit/test_app.py index 77832dff..a7fad800 100644 --- a/tests/test_app.py +++ b/tests/unit/test_app.py @@ -1,18 +1,42 @@ +"""Unit tests for the main CLI application entry point.""" + from importlib import metadata, reload from typing import Any import pytest def mock_return_str(*args: list[Any], **kwargs: dict[str, Any]) -> str: + """Mock function that returns a fixed string version. + + Args: + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. + + Returns: + str: The string "1.2.3". + """ return "1.2.3" def mock_raise(*args: list[Any], **kwargs: dict[str, Any]) -> None: + """Mock function that raises an Exception. + + Args: + *args: Variable length argument list. + **kwargs: Arbitrary keyword arguments. + + Raises: + Exception: Always raised. + """ raise Exception def test_cli_usage(capsys: pytest.CaptureFixture) -> None: - """Test aimbat cli help output.""" + """Test aimbat cli help output. + + Args: + capsys (pytest.CaptureFixture): Fixture to capture stdout/stderr. + """ from aimbat import app with pytest.raises(SystemExit) as excinfo: @@ -27,7 +51,12 @@ def test_cli_usage(capsys: pytest.CaptureFixture) -> None: def test_cli_version( capsys: pytest.CaptureFixture, monkeypatch: pytest.MonkeyPatch ) -> None: - """Test aimbat cli version flag.""" + """Test aimbat cli version flag. + + Args: + capsys (pytest.CaptureFixture): Fixture to capture stdout/stderr. + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ from aimbat import app monkeypatch.setattr(metadata, "version", mock_return_str) diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py new file mode 100644 index 00000000..9e384980 --- /dev/null +++ b/tests/unit/test_config.py @@ -0,0 +1,227 @@ +"""Unit tests for aimbat._config.""" + +import io +from pathlib import Path +from typing import Any +import pytest +from rich.console import Console +from aimbat._config import Settings, settings, print_settings_table, cli_settings_list + + +def _capture_pretty(monkeypatch: pytest.MonkeyPatch) -> str: + """Call print_settings_table(pretty=True) and return plain rendered output. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + + Returns: + str: The captured output string. + """ + buffer = io.StringIO() + console = Console(file=buffer, highlight=False, no_color=True, width=200) + monkeypatch.setattr("aimbat.utils._json.Console", lambda: console) + print_settings_table(pretty=True) + return buffer.getvalue() + + +class TestSettings: + """Tests for the Settings class configuration.""" + + def test_default_project(self) -> None: + """Verifies the default project file name.""" + s = Settings() + assert s.project == Path("aimbat.db") + + def test_default_logfile(self) -> None: + """Verifies the default log file name.""" + s = Settings() + assert s.logfile == Path("aimbat.log") + + def test_default_log_level(self) -> None: + """Verifies the default log level is INFO.""" + s = Settings() + assert s.log_level == "INFO" + + def test_db_url_derived_from_project(self) -> None: + """Verifies that db_url is derived from the project path by default.""" + s = Settings() + assert str(s.project) in s.db_url + + def test_db_url_custom_not_overridden(self) -> None: + """Verifies that a custom db_url is preserved.""" + s = Settings(db_url="sqlite:///custom.db") + assert s.db_url == "sqlite:///custom.db" + + def test_env_prefix(self) -> None: + """Verifies that the environment variable prefix is 'aimbat_'.""" + assert Settings.model_config.get("env_prefix") == "aimbat_" + + def test_min_id_length_default(self) -> None: + """Verifies the default minimum ID length.""" + s = Settings() + assert s.min_id_length == 2 + + def test_bandpass_apply_default(self) -> None: + """Verifies that bandpass_apply is a boolean.""" + s = Settings() + assert isinstance(s.bandpass_apply, bool) + + def test_min_ccnorm_bounds(self) -> None: + """Verifies that min_ccnorm is within [0, 1].""" + s = Settings() + assert 0 <= float(s.min_ccnorm) <= 1 + + def test_window_pre_is_negative(self) -> None: + """Verifies that window_pre is a negative duration.""" + s = Settings() + assert s.window_pre.total_seconds() < 0 + + def test_window_post_is_positive(self) -> None: + """Verifies that window_post is a positive duration.""" + s = Settings() + assert s.window_post.total_seconds() > 0 + + def test_context_width_is_positive(self) -> None: + """Verifies that context_width is a positive duration.""" + s = Settings() + assert s.context_width.total_seconds() > 0 + + +class TestPrintSettingsTablePlain: + """Tests for print_settings_table with pretty=False.""" + + def test_contains_setting_names(self, capsys: pytest.CaptureFixture[str]) -> None: + """Verifies that output contains setting names in uppercase. + + Args: + capsys (pytest.CaptureFixture[str]): Fixture to capture stdout/stderr. + """ + import json + + print_settings_table(pretty=False) + output = capsys.readouterr().out + for k in json.loads(Settings().model_dump_json()): + assert k.upper() in output + + def test_contains_env_prefix(self, capsys: pytest.CaptureFixture[str]) -> None: + """Verifies that output contains the environment variable prefix. + + Args: + capsys (pytest.CaptureFixture[str]): Fixture to capture stdout/stderr. + """ + print_settings_table(pretty=False) + output = capsys.readouterr().out + assert "AIMBAT_" in output + + def test_contains_values(self, capsys: pytest.CaptureFixture[str]) -> None: + """Verifies that output contains current setting values. + + Args: + capsys (pytest.CaptureFixture[str]): Fixture to capture stdout/stderr. + """ + print_settings_table(pretty=False) + output = capsys.readouterr().out + assert str(settings.project) in output + assert str(settings.logfile) in output + + def test_format_is_key_equals_value( + self, capsys: pytest.CaptureFixture[str] + ) -> None: + """Verifies that output lines are formatted as KEY=VALUE. + + Args: + capsys (pytest.CaptureFixture[str]): Fixture to capture stdout/stderr. + """ + print_settings_table(pretty=False) + output = capsys.readouterr().out + for line in output.strip().splitlines(): + assert "=" in line + + +class TestPrintSettingsTablePretty: + """Tests for print_settings_table with pretty=True.""" + + def test_title_present(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that the table title is present. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_pretty(monkeypatch) + assert "AIMBAT settings" in output + + def test_column_headers_present(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that column headers are present. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_pretty(monkeypatch) + assert "Name" in output + assert "Value" in output + assert "Description" in output + + def test_setting_names_present(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that all setting names are present in the table. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + import json + + output = _capture_pretty(monkeypatch) + for k in json.loads(Settings().model_dump_json()): + assert k in output + + def test_setting_values_present(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that setting values are present in the table. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_pretty(monkeypatch) + assert str(settings.project) in output + assert str(settings.logfile) in output + + def test_env_var_in_description(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that environment variable names are included in descriptions. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_pretty(monkeypatch) + assert "AIMBAT_" in output + + +class TestCliSettingsList: + """Tests for the cli_settings_list function.""" + + def test_delegates_to_print_settings_table( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + """Verifies that the function calls print_settings_table with the correct argument. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + calls: list[dict[str, Any]] = [] + monkeypatch.setattr( + "aimbat._config.print_settings_table", + lambda pretty: calls.append({"pretty": pretty}), + ) + cli_settings_list(pretty=True) + assert calls == [{"pretty": True}] + + def test_default_pretty_is_true(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that 'pretty' defaults to True. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + calls: list[dict[str, Any]] = [] + monkeypatch.setattr( + "aimbat._config.print_settings_table", + lambda pretty: calls.append({"pretty": pretty}), + ) + cli_settings_list() + assert calls[0]["pretty"] is True diff --git a/tests/unit/utils/test_json.py b/tests/unit/utils/test_json.py new file mode 100644 index 00000000..3e174025 --- /dev/null +++ b/tests/unit/utils/test_json.py @@ -0,0 +1,320 @@ +"""Unit tests for aimbat.utils._json.""" + +import io +from typing import Any, Callable +import pytest +from rich.console import Console +from aimbat.utils._json import json_to_table + + +def _capture_table( + monkeypatch: pytest.MonkeyPatch, + data: dict[str, Any] | list[dict[str, Any]], + title: str | None = None, + formatters: dict[str, Callable[[Any], str]] | None = None, + skip_keys: list[str] | None = None, + column_order: list[str] | None = None, + column_kwargs: dict[str, dict[str, Any]] | None = None, + common_column_kwargs: dict[str, Any] | None = None, +) -> str: + """Call json_to_table and return the rendered output as a plain string. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + data (dict[str, Any] | list[dict[str, Any]]): The data to render. + title (str | None): Optional table title. + formatters (dict[str, Callable[[Any], str]] | None): Optional value formatters. + skip_keys (list[str] | None): Keys to exclude from the table. + column_order (list[str] | None): Explicit order of columns. + column_kwargs (dict[str, dict[str, Any]] | None): Column-specific arguments. + common_column_kwargs (dict[str, Any] | None): Arguments applied to all columns. + + Returns: + str: The captured table output. + """ + buffer = io.StringIO() + console = Console(file=buffer, highlight=False, no_color=True, width=200) + monkeypatch.setattr("aimbat.utils._json.Console", lambda: console) + json_to_table( + data, + title=title, + formatters=formatters, + skip_keys=skip_keys, + column_order=column_order, + column_kwargs=column_kwargs, + common_column_kwargs=common_column_kwargs, + ) + return buffer.getvalue() + + +class TestJsonToTableSingleDict: + """Tests json_to_table with a single dictionary input.""" + + def test_basic(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies basic key-value rendering. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table(monkeypatch, {"name": "Alice", "age": 30}) + assert "name" in output + assert "Alice" in output + assert "age" in output + assert "30" in output + + def test_title(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that the title is rendered. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table(monkeypatch, {"name": "Alice"}, title="Person") + assert "Person" in output + + def test_default_column_headers(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies default headers for dictionary input. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table(monkeypatch, {"x": "y"}) + assert "Key" in output + assert "Value" in output + + def test_formatter_applied(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that value formatters are applied. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + {"score": 0.123456}, + formatters={"score": lambda v: f"{v:.2f}"}, + ) + assert "0.12" in output + assert "0.123456" not in output + + def test_skip_keys(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that specified keys are skipped. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + {"name": "Alice", "secret": "hidden"}, + skip_keys=["secret"], + ) + assert "name" in output + assert "secret" not in output + assert "hidden" not in output + + def test_column_order(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that column order is respected (row order for dicts). + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + {"b": "2", "a": "1"}, + column_order=["a", "b"], + ) + pos_a = output.index("a") + pos_b = output.index("b") + assert pos_a < pos_b + + def test_column_kwargs_header_override( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + """Verifies that column headers can be overridden via column_kwargs. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + {"name": "Alice"}, + column_kwargs={"Key": {"header": "Field"}, "Value": {"header": "Data"}}, + ) + assert "Field" in output + assert "Data" in output + assert "Key" not in output + + def test_common_column_kwargs_applied( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + """Verifies that common_column_kwargs are accepted. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + # Ensures no exception is raised when common_column_kwargs is provided. + output = _capture_table( + monkeypatch, + {"x": "1"}, + common_column_kwargs={"min_width": 5}, + ) + assert "x" in output + + def test_per_column_kwargs_override_common( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + """Verifies that column-specific kwargs override common kwargs. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + {"x": "1"}, + common_column_kwargs={"header": "Common"}, + column_kwargs={"Key": {"header": "Specific"}}, + ) + assert "Specific" in output + + +class TestJsonToTableListOfDicts: + """Tests json_to_table with a list of dictionaries.""" + + def test_basic(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies basic table rendering for list of dicts. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + [{"id": 1, "name": "Alice"}, {"id": 2, "name": "Bob"}], + ) + assert "id" in output + assert "name" in output + assert "Alice" in output + assert "Bob" in output + assert "1" in output + assert "2" in output + + def test_empty_list(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that an empty list produces valid output (empty table). + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + # Should not raise and should print an empty table. + output = _capture_table(monkeypatch, []) + assert output is not None + + def test_title(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that the title is rendered. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, [{"description": "test item"}], title="Results" + ) + assert "Results" in output + + def test_formatter_applied(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that value formatters are applied. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + [{"value": 3.14159}], + formatters={"value": lambda v: f"{v:.1f}"}, + ) + assert "3.1" in output + assert "3.14159" not in output + + def test_skip_keys(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that specified keys are skipped. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + [{"name": "Alice", "secret": "x"}], + skip_keys=["secret"], + ) + assert "name" in output + assert "secret" not in output + + def test_column_order(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Verifies that column order is respected. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + [{"b": "2", "a": "1"}], + column_order=["a", "b"], + ) + pos_a = output.index("a") + pos_b = output.index("b") + assert pos_a < pos_b + + def test_column_order_partial(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Keys not listed in column_order should be appended after. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + [{"c": "3", "b": "2", "a": "1"}], + column_order=["a"], + ) + pos_a = output.index("a") + pos_b = output.index("b") + pos_c = output.index("c") + assert pos_a < pos_b + assert pos_a < pos_c + + def test_column_kwargs_header_override( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + """Verifies that column headers can be overridden. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + [{"id": 1}], + column_kwargs={"id": {"header": "Identifier"}}, + ) + assert "Identifier" in output + assert "id" not in output + + def test_missing_key_in_row(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Rows missing a key should render 'None' for that cell. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + [{"a": 1, "b": 2}, {"a": 3}], + ) + assert "None" in output + + def test_common_column_kwargs_applied( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + """Verifies that common_column_kwargs are applied to list columns. + + Args: + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + """ + output = _capture_table( + monkeypatch, + [{"x": "1"}], + common_column_kwargs={"min_width": 5}, + ) + assert "x" in output diff --git a/tests/unit/utils/test_sampledata.py b/tests/unit/utils/test_sampledata.py new file mode 100644 index 00000000..9c6ba4b1 --- /dev/null +++ b/tests/unit/utils/test_sampledata.py @@ -0,0 +1,151 @@ +"""Unit tests for aimbat.utils._sampledata.""" + +import io +import zipfile +from pathlib import Path +from unittest.mock import MagicMock, patch +import pytest +from aimbat.utils._sampledata import delete_sampledata, download_sampledata + + +def _make_zip_bytes(filenames: list[str]) -> bytes: + """Return the bytes of a ZIP archive containing empty files with the given names. + + Args: + filenames (list[str]): List of filenames to include in the ZIP. + + Returns: + bytes: The bytes of the ZIP archive. + """ + buf = io.BytesIO() + with zipfile.ZipFile(buf, mode="w") as zf: + for name in filenames: + zf.writestr(name, b"") + return buf.getvalue() + + +@pytest.fixture() +def sampledata_dir(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> Path: + """Point settings.sampledata_dir at a temp directory for each test. + + Args: + tmp_path (Path): Temporary directory path. + monkeypatch (pytest.MonkeyPatch): Fixture to mock objects/attributes. + + Returns: + Path: The temporary sample data directory. + """ + d = tmp_path / "sample-data" + import aimbat + + monkeypatch.setattr(aimbat.settings, "sampledata_dir", d) + return d + + +class TestDeleteSampledata: + """Tests for the delete_sampledata function.""" + + def test_removes_directory(self, sampledata_dir: Path) -> None: + """Verifies that the sample data directory is removed. + + Args: + sampledata_dir (Path): The sample data directory. + """ + sampledata_dir.mkdir() + (sampledata_dir / "file.txt").write_text("x") + delete_sampledata() + assert not sampledata_dir.exists() + + def test_raises_if_dir_missing(self, sampledata_dir: Path) -> None: + """Verifies that FileNotFoundError is raised if the directory is missing. + + Args: + sampledata_dir (Path): The sample data directory. + """ + assert not sampledata_dir.exists() + with pytest.raises(FileNotFoundError): + delete_sampledata() + + +class TestDownloadSampledata: + """Tests for the download_sampledata function.""" + + def _mock_urlopen(self, filenames: list[str]) -> MagicMock: + """Return a context-manager mock that yields ZIP bytes for urlopen. + + Args: + filenames (list[str]): List of filenames for the mock ZIP. + + Returns: + MagicMock: A mock object behaving like urlopen's return value. + """ + zip_bytes = _make_zip_bytes(filenames) + mock_resp = MagicMock() + mock_resp.read.return_value = zip_bytes + mock_resp.__enter__ = lambda s: s + mock_resp.__exit__ = MagicMock(return_value=False) + mock_urlopen = MagicMock(return_value=mock_resp) + return mock_urlopen + + def test_extracts_files(self, sampledata_dir: Path) -> None: + """Verifies that files are extracted to the sample data directory. + + Args: + sampledata_dir (Path): The sample data directory. + """ + mock_urlopen = self._mock_urlopen(["data/file1.sac", "data/file2.sac"]) + with patch("aimbat.utils._sampledata.urlopen", mock_urlopen): + download_sampledata() + assert sampledata_dir.exists() + + def test_raises_if_dir_non_empty(self, sampledata_dir: Path) -> None: + """Verifies that FileExistsError is raised if the directory is not empty. + + Args: + sampledata_dir (Path): The sample data directory. + """ + sampledata_dir.mkdir() + (sampledata_dir / "existing.txt").write_text("x") + mock_urlopen = self._mock_urlopen(["data/file.sac"]) + with patch("aimbat.utils._sampledata.urlopen", mock_urlopen): + with pytest.raises(FileExistsError): + download_sampledata() + mock_urlopen.assert_not_called() + + def test_force_overwrites_existing(self, sampledata_dir: Path) -> None: + """Verifies that existing files are overwritten when force=True. + + Args: + sampledata_dir (Path): The sample data directory. + """ + sampledata_dir.mkdir() + (sampledata_dir / "old.txt").write_text("old") + mock_urlopen = self._mock_urlopen(["data/new.sac"]) + with patch("aimbat.utils._sampledata.urlopen", mock_urlopen): + download_sampledata(force=True) + assert not (sampledata_dir / "old.txt").exists() + + def test_empty_dir_not_blocked(self, sampledata_dir: Path) -> None: + """Verifies that an existing empty directory does not block download. + + Args: + sampledata_dir (Path): The sample data directory. + """ + sampledata_dir.mkdir() + mock_urlopen = self._mock_urlopen(["data/file.sac"]) + with patch("aimbat.utils._sampledata.urlopen", mock_urlopen): + download_sampledata() + mock_urlopen.assert_called_once() + + def test_urlopen_called_with_src(self, sampledata_dir: Path) -> None: + """Verifies that urlopen is called with the configured source URL. + + Args: + sampledata_dir (Path): The sample data directory. + """ + import aimbat + + mock_urlopen = self._mock_urlopen(["data/file.sac"]) + with patch("aimbat.utils._sampledata.urlopen", mock_urlopen): + download_sampledata() + mock_urlopen.assert_called_once_with(aimbat.settings.sampledata_src) diff --git a/tests/unit/utils/test_uuid.py b/tests/unit/utils/test_uuid.py new file mode 100644 index 00000000..c699e5ee --- /dev/null +++ b/tests/unit/utils/test_uuid.py @@ -0,0 +1,224 @@ +"""Unit tests for aimbat.utils._uuid.""" + +import uuid +from collections.abc import Generator + +import pandas as pd +import pytest +from sqlmodel import Session, SQLModel, create_engine + +from aimbat.models import AimbatEvent +from aimbat.utils._uuid import string_to_uuid, uuid_shortener + + +@pytest.fixture() +def session() -> Generator[Session, None, None]: + """Provide an in-memory SQLite session with all tables created. + + Yields: + Session: The database session. + """ + engine = create_engine( + "sqlite:///:memory:", connect_args={"check_same_thread": False} + ) + SQLModel.metadata.create_all(engine) + with Session(engine) as s: + yield s + engine.dispose() + + +def _make_event(uid: uuid.UUID, offset_seconds: int = 0) -> AimbatEvent: + """Helper to create an AimbatEvent with a specific UUID and time offset. + + Args: + uid (uuid.UUID): The UUID for the event. + offset_seconds (int): Time offset in seconds (default: 0). + + Returns: + AimbatEvent: The created event. + """ + return AimbatEvent( + id=uid, + time=pd.Timestamp("2000-01-01") + pd.Timedelta(seconds=offset_seconds), + latitude=0.0, + longitude=0.0, + depth=0.0, + ) + + +class TestStringToUuid: + """Tests for the string_to_uuid function.""" + + def test_resolves_full_uuid(self, session: Session) -> None: + """Verifies resolving a full UUID string. + + Args: + session (Session): Database session. + """ + uid = uuid.uuid4() + session.add(_make_event(uid)) + session.commit() + result = string_to_uuid(session, str(uid), AimbatEvent) + assert result == uid + + def test_resolves_short_prefix(self, session: Session) -> None: + """Verifies resolving a UUID from a short prefix. + + Args: + session (Session): Database session. + """ + uid = uuid.uuid4() + session.add(_make_event(uid)) + session.commit() + prefix = str(uid).replace("-", "")[:6] + result = string_to_uuid(session, prefix, AimbatEvent) + assert result == uid + + def test_raises_on_no_match(self, session: Session) -> None: + """Verifies that ValueError is raised when no match is found. + + Args: + session (Session): Database session. + """ + with pytest.raises(ValueError, match="Unable to find"): + string_to_uuid(session, "000000", AimbatEvent) + + def test_raises_on_ambiguous_match(self, session: Session) -> None: + """Verifies that ValueError is raised when multiple matches are found. + + Args: + session (Session): Database session. + """ + # Force two UUIDs that share the same prefix by crafting them manually. + uid1 = uuid.UUID("aaaaaaaa-0000-4000-8000-000000000001") + uid2 = uuid.UUID("aaaaaaaa-0000-4000-8000-000000000002") + session.add(_make_event(uid1, offset_seconds=0)) + session.add(_make_event(uid2, offset_seconds=1)) + session.commit() + with pytest.raises(ValueError, match="more than one"): + string_to_uuid(session, "aaaaaaaa", AimbatEvent) + + def test_custom_error_message(self, session: Session) -> None: + """Verifies that a custom error message is used when provided. + + Args: + session (Session): Database session. + """ + with pytest.raises(ValueError, match="custom error"): + string_to_uuid(session, "000000", AimbatEvent, custom_error="custom error") + + def test_ignores_dashes_in_input(self, session: Session) -> None: + """Verifies that dashes in the input string are ignored. + + Args: + session (Session): Database session. + """ + uid = uuid.UUID("abcdef12-1234-4000-8000-000000000001") + session.add(_make_event(uid)) + session.commit() + result = string_to_uuid(session, "abcdef12-1234", AimbatEvent) + assert result == uid + + +class TestUuidShortener: + """Tests for the uuid_shortener function.""" + + def test_returns_unique_prefix_for_single_entry(self, session: Session) -> None: + """Verifies getting a unique prefix for a single event. + + Args: + session (Session): Database session. + """ + uid = uuid.uuid4() + event = _make_event(uid) + session.add(event) + session.commit() + short = uuid_shortener(session, event) + assert str(uid).startswith(short) + + def test_prefix_is_shortest_unique(self, session: Session) -> None: + """Verifies that the returned prefix is the shortest possible unique prefix. + + Args: + session (Session): Database session. + """ + uid1 = uuid.UUID("aaaaaaaa-0000-4000-8000-000000000001") + uid2 = uuid.UUID("bbbbbbbb-0000-4000-8000-000000000002") + e1 = _make_event(uid1, offset_seconds=0) + e2 = _make_event(uid2, offset_seconds=1) + session.add(e1) + session.add(e2) + session.commit() + short1 = uuid_shortener(session, e1) + short2 = uuid_shortener(session, e2) + # Each prefix must uniquely identify its UUID. + assert str(uid1).startswith(short1) + assert str(uid2).startswith(short2) + assert not str(uid2).startswith(short1) + assert not str(uid1).startswith(short2) + + def test_disambiguates_shared_prefix(self, session: Session) -> None: + """Verifies disambiguation when prefixes are shared. + + Args: + session (Session): Database session. + """ + uid1 = uuid.UUID("aaaaaaaa-0000-4000-8000-000000000001") + uid2 = uuid.UUID("aaaaaaaa-0000-4000-8000-000000000002") + e1 = _make_event(uid1, offset_seconds=0) + e2 = _make_event(uid2, offset_seconds=1) + session.add(e1) + session.add(e2) + session.commit() + short1 = uuid_shortener(session, e1) + short2 = uuid_shortener(session, e2) + assert short1 != short2 + assert str(uid1).startswith(short1) + assert str(uid2).startswith(short2) + + def test_class_form_with_str_uuid(self, session: Session) -> None: + """Verifies calling with class and string UUID. + + Args: + session (Session): Database session. + """ + uid = uuid.uuid4() + session.add(_make_event(uid)) + session.commit() + short = uuid_shortener(session, AimbatEvent, str_uuid=str(uid)) + assert str(uid).startswith(short) + + def test_class_form_requires_str_uuid(self, session: Session) -> None: + """Verifies that str_uuid is required when calling with a class. + + Args: + session (Session): Database session. + """ + with pytest.raises(ValueError, match="str_uuid must be provided"): + uuid_shortener(session, AimbatEvent) + + def test_raises_if_id_not_in_table(self, session: Session) -> None: + """Verifies that ValueError is raised if the ID is not in the table. + + Args: + session (Session): Database session. + """ + uid = uuid.uuid4() + # Do not add the event to the session. + with pytest.raises(ValueError, match="not found in table"): + uuid_shortener(session, AimbatEvent, str_uuid=str(uid)) + + def test_min_length_respected(self, session: Session) -> None: + """Verifies that the minimum length constraint is respected. + + Args: + session (Session): Database session. + """ + uid = uuid.uuid4() + session.add(_make_event(uid)) + session.commit() + event = session.get(AimbatEvent, uid) + assert event is not None, "expected event to exist in database" + short = uuid_shortener(session, event, min_length=4) + # Result must be at least 4 chars (excluding any trailing dash). + assert len(short.replace("-", "")) >= 4 diff --git a/tests/utils/test_utils.py b/tests/utils/test_utils.py deleted file mode 100644 index 4bfdd7f4..00000000 --- a/tests/utils/test_utils.py +++ /dev/null @@ -1,184 +0,0 @@ -from aimbat._config import Settings -from aimbat.app import app -from pysmo.classes import SAC -from datetime import datetime, timezone -from typing import Any -from sqlmodel import Session -from sqlalchemy import Engine -from collections.abc import Generator -from pathlib import Path -import aimbat.utils._checkdata as _checkdata -import aimbat.utils._sampledata as _sampledata -import numpy as np -import os -import pytest - - -class TestUtilsBase: - @pytest.fixture - def session( - self, fixture_engine_session_with_active_event: tuple[Engine, Session] - ) -> Generator[Session, Any, Any]: - _, session = fixture_engine_session_with_active_event - yield session - - @pytest.fixture(autouse=True) - def download_dir( - self, - session: Session, - tmp_path_factory: pytest.TempPathFactory, - patch_settings: Settings, - ) -> Generator[Path, Any, Any]: - tmp_dir = tmp_path_factory.mktemp("download_dir") - patch_settings.sampledata_dir = tmp_dir - yield tmp_dir - - -class TestUtilsCheckData(TestUtilsBase): - def test_check_station_no_name(self, sac_instance_good: SAC) -> None: - assert sac_instance_good.station.name - _checkdata.checkdata_station(sac_instance_good.station) - sac_instance_good.kstnm = None - issues = _checkdata.checkdata_station(sac_instance_good.station) - assert "No station name" in issues[0] - - def test_check_station_no_latitude(self, sac_instance_good: SAC) -> None: - assert sac_instance_good.station.latitude - _checkdata.checkdata_station(sac_instance_good.station) - sac_instance_good.stla = None - issues = _checkdata.checkdata_station(sac_instance_good.station) - assert "No station latitude" in issues[0] - - def test_check_station_no_longitude(self, sac_instance_good: SAC) -> None: - assert sac_instance_good.station.longitude - _checkdata.checkdata_station(sac_instance_good.station) - sac_instance_good.stlo = None - issues = _checkdata.checkdata_station(sac_instance_good.station) - assert "No station longitude" in issues[0] - - def test_check_event_no_latitude(self, sac_instance_good: SAC) -> None: - assert sac_instance_good.event.latitude - _checkdata.checkdata_event(sac_instance_good.event) - sac_instance_good.evla = None - issues = _checkdata.checkdata_event(sac_instance_good.event) - assert "No event latitude" in issues[0] - - def test_check_event_no_longitude(self, sac_instance_good: SAC) -> None: - assert sac_instance_good.event.longitude - _checkdata.checkdata_event(sac_instance_good.event) - sac_instance_good.evlo = None - issues = _checkdata.checkdata_event(sac_instance_good.event) - assert "No event longitude" in issues[0] - - def test_check_event_no_time(self, sac_instance_good: SAC) -> None: - assert sac_instance_good.event.time - _checkdata.checkdata_event(sac_instance_good.event) - sac_instance_good.o = None - issues = _checkdata.checkdata_event(sac_instance_good.event) - assert "No event time" in issues[0] - - def test_check_seismogram_no_begin_time(self, sac_instance_good: SAC) -> None: - assert len(sac_instance_good.seismogram.data) > 0 - _checkdata.checkdata_seismogram(sac_instance_good.seismogram) - sac_instance_good.seismogram.data = np.array([]) - issues = _checkdata.checkdata_seismogram(sac_instance_good.seismogram) - assert "No seismogram data" in issues[0] - - def test_cli_usage(self, capsys: pytest.CaptureFixture) -> None: - with pytest.raises(SystemExit) as excinfo: - app(["utils", "checkdata", "--help"]) - - assert excinfo.value.code == 0 - - captured = capsys.readouterr() - assert "Usage" in captured.out - - def test_cli_checkdata( - self, tmp_path_factory: pytest.TempPathFactory, capsys: pytest.CaptureFixture - ) -> None: - """Test AIMBAT cli with checkdata subcommand.""" - - testfile = str(tmp_path_factory.mktemp("checkdata")) + "/test.sac" - - sac = SAC() - sac.write(testfile) - - with pytest.raises(SystemExit) as excinfo: - app(["utils", "checkdata", testfile]) - assert excinfo.value.code == 0 - output = capsys.readouterr().out - for item in ["name", "latitude", "longitude"]: - assert f"No station {item} found in file" in output - for item in ["time", "latitude", "longitude"]: - assert f"No event {item} found in file" in output - assert "No seismogram data found in file" in output - - sac.station.name = "test" - sac.station.latitude = 1.1 - sac.station.longitude = -23 - sac.event.time = datetime.now(timezone.utc) - sac.event.latitude = 33 - sac.event.longitude = 19.1 - sac.seismogram.data = np.random.rand(100) - sac.write(testfile) - with pytest.raises(SystemExit) as excinfo: - app(["utils", "checkdata", testfile]) - assert excinfo.value.code == 0 - output = capsys.readouterr().out - for item in ["name", "latitude", "longitude"]: - assert f"No station {item} found in file" not in output - for item in ["time", "latitude", "longitude"]: - assert f"No event {item} found in file" not in output - assert "No seismogram data found in file" not in output - - -class TestUtilsSampleData(TestUtilsBase): - @pytest.mark.dependency(name="download_sampledata") - def test_lib_download_sampledata(self, download_dir: Path) -> None: - assert len(os.listdir(download_dir)) == 0 - _sampledata.download_sampledata() - assert len(os.listdir(download_dir)) > 0 - with pytest.raises(FileExistsError): - _sampledata.download_sampledata() - _sampledata.download_sampledata(force=True) - - @pytest.mark.dependency(depends=["download_sampledata"]) - def test_lib_delete_sampledata(self, download_dir: Path) -> None: - _sampledata.download_sampledata() - assert len(os.listdir(download_dir)) > 0 - _sampledata.delete_sampledata() - assert download_dir.exists() is False - - def test_cli_usage(self, capsys: pytest.CaptureFixture) -> None: - with pytest.raises(SystemExit) as excinfo: - app(["utils", "sampledata", "--help"]) - assert excinfo.value.code == 0 - assert "Usage" in capsys.readouterr().out - - def test_cli_download_sampledata(self, download_dir: Path) -> None: - assert len(os.listdir((download_dir))) == 0 - with pytest.raises(SystemExit) as excinfo: - app(["utils", "sampledata", "download"]) - assert excinfo.value.code == 0 - assert len(os.listdir((download_dir))) > 0 - - # can't download if it is already there - with pytest.raises(FileExistsError): - app(["utils", "sampledata", "download"]) - - # unless we use force - with pytest.raises(SystemExit) as excinfo: - app(["utils", "sampledata", "download", "--force"]) - assert excinfo.value.code == 0 - - def test_cli__delete_sampledata(self, download_dir: Path) -> None: - assert len(os.listdir((download_dir))) == 0 - with pytest.raises(SystemExit) as excinfo: - app(["utils", "sampledata", "download"]) - assert excinfo.value.code == 0 - assert len(os.listdir((download_dir))) > 0 - - with pytest.raises(SystemExit) as excinfo: - app(["utils", "sampledata", "delete"]) - assert excinfo.value.code == 0 - assert not download_dir.exists() diff --git a/uv.lock b/uv.lock index 607ad101..98c91744 100644 --- a/uv.lock +++ b/uv.lock @@ -368,7 +368,7 @@ wheels = [ [[package]] name = "cyclopts" -version = "4.5.4" +version = "4.6.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, @@ -376,9 +376,9 @@ dependencies = [ { name = "rich" }, { name = "rich-rst" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3b/d2/f37df900b163f51b4faacdb01bf4895c198906d67c5b2a85c2522de85459/cyclopts-4.5.4.tar.gz", hash = "sha256:eed4d6c76d4391aa796d8fcaabd50e5aad7793261792beb19285f62c5c456c8b", size = 162438, upload-time = "2026-02-20T00:58:46.161Z" } +sdist = { url = "https://files.pythonhosted.org/packages/49/5c/88a4068c660a096bbe87efc5b7c190080c9e86919c36ec5f092cb08d852f/cyclopts-4.6.0.tar.gz", hash = "sha256:483c4704b953ea6da742e8de15972f405d2e748d19a848a4d61595e8e5360ee5", size = 162724, upload-time = "2026-02-23T15:44:49.286Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/0f/119fa63fa93e0a331fbedcb27162d8f88d3ba2f38eba1567e3e44307b857/cyclopts-4.5.4-py3-none-any.whl", hash = "sha256:ad001986ec403ca1dc1ed20375c439d62ac796295ea32b451dfe25d6696bc71a", size = 200225, upload-time = "2026-02-20T00:58:47.275Z" }, + { url = "https://files.pythonhosted.org/packages/8f/eb/1e8337755a70dc7d7ff10a73dc8f20e9352c9ad6c2256ed863ac95cd3539/cyclopts-4.6.0-py3-none-any.whl", hash = "sha256:0a891cb55bfd79a3cdce024db8987b33316aba11071e5258c21ac12a640ba9f2", size = 200518, upload-time = "2026-02-23T15:44:47.854Z" }, ] [[package]] @@ -1102,14 +1102,14 @@ wheels = [ [[package]] name = "optype" -version = "0.15.0" +version = "0.16.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d7/93/6b9e43138ce36fbad134bd1a50460a7bbda61105b5a964e4cf773fe4d845/optype-0.15.0.tar.gz", hash = "sha256:457d6ca9e7da19967ec16d42bdf94e240b33b5d70a56fbbf5b427e5ea39cf41e", size = 99978, upload-time = "2025-12-08T12:32:41.422Z" } +sdist = { url = "https://files.pythonhosted.org/packages/66/d3/c88bb4bd90867356275ca839499313851af4b36fce6919ebc5e1de26e7ca/optype-0.16.0.tar.gz", hash = "sha256:fa682fd629ef6b70ba656ebc9fdd6614ba06ce13f52e0416dd8014c7e691a2d1", size = 53498, upload-time = "2026-02-19T23:37:09.495Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/07/8b/93f6c496fc5da062fd7e7c4745b5a8dd09b7b576c626075844fe97951a7d/optype-0.15.0-py3-none-any.whl", hash = "sha256:caba40ece9ea39b499fa76c036a82e0d452a432dd4dd3e8e0d30892be2e8c76c", size = 88716, upload-time = "2025-12-08T12:32:39.669Z" }, + { url = "https://files.pythonhosted.org/packages/91/a8/fe26515203cff140f1afc31236fb7f703d4bb4bd5679d28afcb3661c8d9f/optype-0.16.0-py3-none-any.whl", hash = "sha256:c28905713f55630b4bb8948f38e027ad13a541499ebcf957501f486da54b74d2", size = 65893, upload-time = "2026-02-19T23:37:08.217Z" }, ] [package.optional-dependencies] @@ -1536,8 +1536,8 @@ wheels = [ [[package]] name = "pysmo" -version = "1.0.0.dev12+g1a71df1ca" -source = { git = "https://github.com/pysmo/pysmo?rev=master#1a71df1caeb3737fac7478424412f5898b73302a" } +version = "1.0.0.dev19+g8ccdd62f2" +source = { git = "https://github.com/pysmo/pysmo?rev=master#8ccdd62f20d00e619295e5ea9a1fdb8453c28284" } dependencies = [ { name = "attrs" }, { name = "attrs-strict" }, @@ -1811,75 +1811,75 @@ wheels = [ [[package]] name = "scipy" -version = "1.17.0" +version = "1.17.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/56/3e/9cca699f3486ce6bc12ff46dc2031f1ec8eb9ccc9a320fdaf925f1417426/scipy-1.17.0.tar.gz", hash = "sha256:2591060c8e648d8b96439e111ac41fd8342fdeff1876be2e19dea3fe8930454e", size = 30396830, upload-time = "2026-01-10T21:34:23.009Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0b/11/7241a63e73ba5a516f1930ac8d5b44cbbfabd35ac73a2d08ca206df007c4/scipy-1.17.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:0d5018a57c24cb1dd828bcf51d7b10e65986d549f52ef5adb6b4d1ded3e32a57", size = 31364580, upload-time = "2026-01-10T21:25:25.717Z" }, - { url = "https://files.pythonhosted.org/packages/ed/1d/5057f812d4f6adc91a20a2d6f2ebcdb517fdbc87ae3acc5633c9b97c8ba5/scipy-1.17.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:88c22af9e5d5a4f9e027e26772cc7b5922fab8bcc839edb3ae33de404feebd9e", size = 27969012, upload-time = "2026-01-10T21:25:30.921Z" }, - { url = "https://files.pythonhosted.org/packages/e3/21/f6ec556c1e3b6ec4e088da667d9987bb77cc3ab3026511f427dc8451187d/scipy-1.17.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f3cd947f20fe17013d401b64e857c6b2da83cae567adbb75b9dcba865abc66d8", size = 20140691, upload-time = "2026-01-10T21:25:34.802Z" }, - { url = "https://files.pythonhosted.org/packages/7a/fe/5e5ad04784964ba964a96f16c8d4676aa1b51357199014dce58ab7ec5670/scipy-1.17.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e8c0b331c2c1f531eb51f1b4fc9ba709521a712cce58f1aa627bc007421a5306", size = 22463015, upload-time = "2026-01-10T21:25:39.277Z" }, - { url = "https://files.pythonhosted.org/packages/4a/69/7c347e857224fcaf32a34a05183b9d8a7aca25f8f2d10b8a698b8388561a/scipy-1.17.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5194c445d0a1c7a6c1a4a4681b6b7c71baad98ff66d96b949097e7513c9d6742", size = 32724197, upload-time = "2026-01-10T21:25:44.084Z" }, - { url = "https://files.pythonhosted.org/packages/d1/fe/66d73b76d378ba8cc2fe605920c0c75092e3a65ae746e1e767d9d020a75a/scipy-1.17.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9eeb9b5f5997f75507814ed9d298ab23f62cf79f5a3ef90031b1ee2506abdb5b", size = 35009148, upload-time = "2026-01-10T21:25:50.591Z" }, - { url = "https://files.pythonhosted.org/packages/af/07/07dec27d9dc41c18d8c43c69e9e413431d20c53a0339c388bcf72f353c4b/scipy-1.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:40052543f7bbe921df4408f46003d6f01c6af109b9e2c8a66dd1cf6cf57f7d5d", size = 34798766, upload-time = "2026-01-10T21:25:59.41Z" }, - { url = "https://files.pythonhosted.org/packages/81/61/0470810c8a093cdacd4ba7504b8a218fd49ca070d79eca23a615f5d9a0b0/scipy-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0cf46c8013fec9d3694dc572f0b54100c28405d55d3e2cb15e2895b25057996e", size = 37405953, upload-time = "2026-01-10T21:26:07.75Z" }, - { url = "https://files.pythonhosted.org/packages/92/ce/672ed546f96d5d41ae78c4b9b02006cedd0b3d6f2bf5bb76ea455c320c28/scipy-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:0937a0b0d8d593a198cededd4c439a0ea216a3f36653901ea1f3e4be949056f8", size = 36328121, upload-time = "2026-01-10T21:26:16.509Z" }, - { url = "https://files.pythonhosted.org/packages/9d/21/38165845392cae67b61843a52c6455d47d0cc2a40dd495c89f4362944654/scipy-1.17.0-cp312-cp312-win_arm64.whl", hash = "sha256:f603d8a5518c7426414d1d8f82e253e454471de682ce5e39c29adb0df1efb86b", size = 24314368, upload-time = "2026-01-10T21:26:23.087Z" }, - { url = "https://files.pythonhosted.org/packages/0c/51/3468fdfd49387ddefee1636f5cf6d03ce603b75205bf439bbf0e62069bfd/scipy-1.17.0-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:65ec32f3d32dfc48c72df4291345dae4f048749bc8d5203ee0a3f347f96c5ce6", size = 31344101, upload-time = "2026-01-10T21:26:30.25Z" }, - { url = "https://files.pythonhosted.org/packages/b2/9a/9406aec58268d437636069419e6977af953d1e246df941d42d3720b7277b/scipy-1.17.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:1f9586a58039d7229ce77b52f8472c972448cded5736eaf102d5658bbac4c269", size = 27950385, upload-time = "2026-01-10T21:26:36.801Z" }, - { url = "https://files.pythonhosted.org/packages/4f/98/e7342709e17afdfd1b26b56ae499ef4939b45a23a00e471dfb5375eea205/scipy-1.17.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:9fad7d3578c877d606b1150135c2639e9de9cecd3705caa37b66862977cc3e72", size = 20122115, upload-time = "2026-01-10T21:26:42.107Z" }, - { url = "https://files.pythonhosted.org/packages/fd/0e/9eeeb5357a64fd157cbe0302c213517c541cc16b8486d82de251f3c68ede/scipy-1.17.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:423ca1f6584fc03936972b5f7c06961670dbba9f234e71676a7c7ccf938a0d61", size = 22442402, upload-time = "2026-01-10T21:26:48.029Z" }, - { url = "https://files.pythonhosted.org/packages/c9/10/be13397a0e434f98e0c79552b2b584ae5bb1c8b2be95db421533bbca5369/scipy-1.17.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fe508b5690e9eaaa9467fc047f833af58f1152ae51a0d0aed67aa5801f4dd7d6", size = 32696338, upload-time = "2026-01-10T21:26:55.521Z" }, - { url = "https://files.pythonhosted.org/packages/63/1e/12fbf2a3bb240161651c94bb5cdd0eae5d4e8cc6eaeceb74ab07b12a753d/scipy-1.17.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6680f2dfd4f6182e7d6db161344537da644d1cf85cf293f015c60a17ecf08752", size = 34977201, upload-time = "2026-01-10T21:27:03.501Z" }, - { url = "https://files.pythonhosted.org/packages/19/5b/1a63923e23ccd20bd32156d7dd708af5bbde410daa993aa2500c847ab2d2/scipy-1.17.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:eec3842ec9ac9de5917899b277428886042a93db0b227ebbe3a333b64ec7643d", size = 34777384, upload-time = "2026-01-10T21:27:11.423Z" }, - { url = "https://files.pythonhosted.org/packages/39/22/b5da95d74edcf81e540e467202a988c50fef41bd2011f46e05f72ba07df6/scipy-1.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d7425fcafbc09a03731e1bc05581f5fad988e48c6a861f441b7ab729a49a55ea", size = 37379586, upload-time = "2026-01-10T21:27:20.171Z" }, - { url = "https://files.pythonhosted.org/packages/b9/b6/8ac583d6da79e7b9e520579f03007cb006f063642afd6b2eeb16b890bf93/scipy-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:87b411e42b425b84777718cc41516b8a7e0795abfa8e8e1d573bf0ef014f0812", size = 36287211, upload-time = "2026-01-10T21:28:43.122Z" }, - { url = "https://files.pythonhosted.org/packages/55/fb/7db19e0b3e52f882b420417644ec81dd57eeef1bd1705b6f689d8ff93541/scipy-1.17.0-cp313-cp313-win_arm64.whl", hash = "sha256:357ca001c6e37601066092e7c89cca2f1ce74e2a520ca78d063a6d2201101df2", size = 24312646, upload-time = "2026-01-10T21:28:49.893Z" }, - { url = "https://files.pythonhosted.org/packages/20/b6/7feaa252c21cc7aff335c6c55e1b90ab3e3306da3f048109b8b639b94648/scipy-1.17.0-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:ec0827aa4d36cb79ff1b81de898e948a51ac0b9b1c43e4a372c0508c38c0f9a3", size = 31693194, upload-time = "2026-01-10T21:27:27.454Z" }, - { url = "https://files.pythonhosted.org/packages/76/bb/bbb392005abce039fb7e672cb78ac7d158700e826b0515cab6b5b60c26fb/scipy-1.17.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:819fc26862b4b3c73a60d486dbb919202f3d6d98c87cf20c223511429f2d1a97", size = 28365415, upload-time = "2026-01-10T21:27:34.26Z" }, - { url = "https://files.pythonhosted.org/packages/37/da/9d33196ecc99fba16a409c691ed464a3a283ac454a34a13a3a57c0d66f3a/scipy-1.17.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:363ad4ae2853d88ebcde3ae6ec46ccca903ea9835ee8ba543f12f575e7b07e4e", size = 20537232, upload-time = "2026-01-10T21:27:40.306Z" }, - { url = "https://files.pythonhosted.org/packages/56/9d/f4b184f6ddb28e9a5caea36a6f98e8ecd2a524f9127354087ce780885d83/scipy-1.17.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:979c3a0ff8e5ba254d45d59ebd38cde48fce4f10b5125c680c7a4bfe177aab07", size = 22791051, upload-time = "2026-01-10T21:27:46.539Z" }, - { url = "https://files.pythonhosted.org/packages/9b/9d/025cccdd738a72140efc582b1641d0dd4caf2e86c3fb127568dc80444e6e/scipy-1.17.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:130d12926ae34399d157de777472bf82e9061c60cc081372b3118edacafe1d00", size = 32815098, upload-time = "2026-01-10T21:27:54.389Z" }, - { url = "https://files.pythonhosted.org/packages/48/5f/09b879619f8bca15ce392bfc1894bd9c54377e01d1b3f2f3b595a1b4d945/scipy-1.17.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e886000eb4919eae3a44f035e63f0fd8b651234117e8f6f29bad1cd26e7bc45", size = 35031342, upload-time = "2026-01-10T21:28:03.012Z" }, - { url = "https://files.pythonhosted.org/packages/f2/9a/f0f0a9f0aa079d2f106555b984ff0fbb11a837df280f04f71f056ea9c6e4/scipy-1.17.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:13c4096ac6bc31d706018f06a49abe0485f96499deb82066b94d19b02f664209", size = 34893199, upload-time = "2026-01-10T21:28:10.832Z" }, - { url = "https://files.pythonhosted.org/packages/90/b8/4f0f5cf0c5ea4d7548424e6533e6b17d164f34a6e2fb2e43ffebb6697b06/scipy-1.17.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cacbaddd91fcffde703934897c5cd2c7cb0371fac195d383f4e1f1c5d3f3bd04", size = 37438061, upload-time = "2026-01-10T21:28:19.684Z" }, - { url = "https://files.pythonhosted.org/packages/f9/cc/2bd59140ed3b2fa2882fb15da0a9cb1b5a6443d67cfd0d98d4cec83a57ec/scipy-1.17.0-cp313-cp313t-win_amd64.whl", hash = "sha256:edce1a1cf66298cccdc48a1bdf8fb10a3bf58e8b58d6c3883dd1530e103f87c0", size = 36328593, upload-time = "2026-01-10T21:28:28.007Z" }, - { url = "https://files.pythonhosted.org/packages/13/1b/c87cc44a0d2c7aaf0f003aef2904c3d097b422a96c7e7c07f5efd9073c1b/scipy-1.17.0-cp313-cp313t-win_arm64.whl", hash = "sha256:30509da9dbec1c2ed8f168b8d8aa853bc6723fede1dbc23c7d43a56f5ab72a67", size = 24625083, upload-time = "2026-01-10T21:28:35.188Z" }, - { url = "https://files.pythonhosted.org/packages/1a/2d/51006cd369b8e7879e1c630999a19d1fbf6f8b5ed3e33374f29dc87e53b3/scipy-1.17.0-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:c17514d11b78be8f7e6331b983a65a7f5ca1fd037b95e27b280921fe5606286a", size = 31346803, upload-time = "2026-01-10T21:28:57.24Z" }, - { url = "https://files.pythonhosted.org/packages/d6/2e/2349458c3ce445f53a6c93d4386b1c4c5c0c540917304c01222ff95ff317/scipy-1.17.0-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:4e00562e519c09da34c31685f6acc3aa384d4d50604db0f245c14e1b4488bfa2", size = 27967182, upload-time = "2026-01-10T21:29:04.107Z" }, - { url = "https://files.pythonhosted.org/packages/5e/7c/df525fbfa77b878d1cfe625249529514dc02f4fd5f45f0f6295676a76528/scipy-1.17.0-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:f7df7941d71314e60a481e02d5ebcb3f0185b8d799c70d03d8258f6c80f3d467", size = 20139125, upload-time = "2026-01-10T21:29:10.179Z" }, - { url = "https://files.pythonhosted.org/packages/33/11/fcf9d43a7ed1234d31765ec643b0515a85a30b58eddccc5d5a4d12b5f194/scipy-1.17.0-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:aabf057c632798832f071a8dde013c2e26284043934f53b00489f1773b33527e", size = 22443554, upload-time = "2026-01-10T21:29:15.888Z" }, - { url = "https://files.pythonhosted.org/packages/80/5c/ea5d239cda2dd3d31399424967a24d556cf409fbea7b5b21412b0fd0a44f/scipy-1.17.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a38c3337e00be6fd8a95b4ed66b5d988bac4ec888fd922c2ea9fe5fb1603dd67", size = 32757834, upload-time = "2026-01-10T21:29:23.406Z" }, - { url = "https://files.pythonhosted.org/packages/b8/7e/8c917cc573310e5dc91cbeead76f1b600d3fb17cf0969db02c9cf92e3cfa/scipy-1.17.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00fb5f8ec8398ad90215008d8b6009c9db9fa924fd4c7d6be307c6f945f9cd73", size = 34995775, upload-time = "2026-01-10T21:29:31.915Z" }, - { url = "https://files.pythonhosted.org/packages/c5/43/176c0c3c07b3f7df324e7cdd933d3e2c4898ca202b090bd5ba122f9fe270/scipy-1.17.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f2a4942b0f5f7c23c7cd641a0ca1955e2ae83dedcff537e3a0259096635e186b", size = 34841240, upload-time = "2026-01-10T21:29:39.995Z" }, - { url = "https://files.pythonhosted.org/packages/44/8c/d1f5f4b491160592e7f084d997de53a8e896a3ac01cd07e59f43ca222744/scipy-1.17.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:dbf133ced83889583156566d2bdf7a07ff89228fe0c0cb727f777de92092ec6b", size = 37394463, upload-time = "2026-01-10T21:29:48.723Z" }, - { url = "https://files.pythonhosted.org/packages/9f/ec/42a6657f8d2d087e750e9a5dde0b481fd135657f09eaf1cf5688bb23c338/scipy-1.17.0-cp314-cp314-win_amd64.whl", hash = "sha256:3625c631a7acd7cfd929e4e31d2582cf00f42fcf06011f59281271746d77e061", size = 37053015, upload-time = "2026-01-10T21:30:51.418Z" }, - { url = "https://files.pythonhosted.org/packages/27/58/6b89a6afd132787d89a362d443a7bddd511b8f41336a1ae47f9e4f000dc4/scipy-1.17.0-cp314-cp314-win_arm64.whl", hash = "sha256:9244608d27eafe02b20558523ba57f15c689357c85bdcfe920b1828750aa26eb", size = 24951312, upload-time = "2026-01-10T21:30:56.771Z" }, - { url = "https://files.pythonhosted.org/packages/e9/01/f58916b9d9ae0112b86d7c3b10b9e685625ce6e8248df139d0fcb17f7397/scipy-1.17.0-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:2b531f57e09c946f56ad0b4a3b2abee778789097871fc541e267d2eca081cff1", size = 31706502, upload-time = "2026-01-10T21:29:56.326Z" }, - { url = "https://files.pythonhosted.org/packages/59/8e/2912a87f94a7d1f8b38aabc0faf74b82d3b6c9e22be991c49979f0eceed8/scipy-1.17.0-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:13e861634a2c480bd237deb69333ac79ea1941b94568d4b0efa5db5e263d4fd1", size = 28380854, upload-time = "2026-01-10T21:30:01.554Z" }, - { url = "https://files.pythonhosted.org/packages/bd/1c/874137a52dddab7d5d595c1887089a2125d27d0601fce8c0026a24a92a0b/scipy-1.17.0-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:eb2651271135154aa24f6481cbae5cc8af1f0dd46e6533fb7b56aa9727b6a232", size = 20552752, upload-time = "2026-01-10T21:30:05.93Z" }, - { url = "https://files.pythonhosted.org/packages/3f/f0/7518d171cb735f6400f4576cf70f756d5b419a07fe1867da34e2c2c9c11b/scipy-1.17.0-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:c5e8647f60679790c2f5c76be17e2e9247dc6b98ad0d3b065861e082c56e078d", size = 22803972, upload-time = "2026-01-10T21:30:10.651Z" }, - { url = "https://files.pythonhosted.org/packages/7c/74/3498563a2c619e8a3ebb4d75457486c249b19b5b04a30600dfd9af06bea5/scipy-1.17.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5fb10d17e649e1446410895639f3385fd2bf4c3c7dfc9bea937bddcbc3d7b9ba", size = 32829770, upload-time = "2026-01-10T21:30:16.359Z" }, - { url = "https://files.pythonhosted.org/packages/48/d1/7b50cedd8c6c9d6f706b4b36fa8544d829c712a75e370f763b318e9638c1/scipy-1.17.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8547e7c57f932e7354a2319fab613981cde910631979f74c9b542bb167a8b9db", size = 35051093, upload-time = "2026-01-10T21:30:22.987Z" }, - { url = "https://files.pythonhosted.org/packages/e2/82/a2d684dfddb87ba1b3ea325df7c3293496ee9accb3a19abe9429bce94755/scipy-1.17.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:33af70d040e8af9d5e7a38b5ed3b772adddd281e3062ff23fec49e49681c38cf", size = 34909905, upload-time = "2026-01-10T21:30:28.704Z" }, - { url = "https://files.pythonhosted.org/packages/ef/5e/e565bd73991d42023eb82bb99e51c5b3d9e2c588ca9d4b3e2cc1d3ca62a6/scipy-1.17.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb55bb97d00f8b7ab95cb64f873eb0bf54d9446264d9f3609130381233483f", size = 37457743, upload-time = "2026-01-10T21:30:34.819Z" }, - { url = "https://files.pythonhosted.org/packages/58/a8/a66a75c3d8f1fb2b83f66007d6455a06a6f6cf5618c3dc35bc9b69dd096e/scipy-1.17.0-cp314-cp314t-win_amd64.whl", hash = "sha256:1ff269abf702f6c7e67a4b7aad981d42871a11b9dd83c58d2d2ea624efbd1088", size = 37098574, upload-time = "2026-01-10T21:30:40.782Z" }, - { url = "https://files.pythonhosted.org/packages/56/a5/df8f46ef7da168f1bc52cd86e09a9de5c6f19cc1da04454d51b7d4f43408/scipy-1.17.0-cp314-cp314t-win_arm64.whl", hash = "sha256:031121914e295d9791319a1875444d55079885bbae5bdc9c5e0f2ee5f09d34ff", size = 25246266, upload-time = "2026-01-10T21:30:45.923Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/7a/97/5a3609c4f8d58b039179648e62dd220f89864f56f7357f5d4f45c29eb2cc/scipy-1.17.1.tar.gz", hash = "sha256:95d8e012d8cb8816c226aef832200b1d45109ed4464303e997c5b13122b297c0", size = 30573822, upload-time = "2026-02-23T00:26:24.851Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/48/b992b488d6f299dbe3f11a20b24d3dda3d46f1a635ede1c46b5b17a7b163/scipy-1.17.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:35c3a56d2ef83efc372eaec584314bd0ef2e2f0d2adb21c55e6ad5b344c0dcb8", size = 31610954, upload-time = "2026-02-23T00:17:49.855Z" }, + { url = "https://files.pythonhosted.org/packages/b2/02/cf107b01494c19dc100f1d0b7ac3cc08666e96ba2d64db7626066cee895e/scipy-1.17.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:fcb310ddb270a06114bb64bbe53c94926b943f5b7f0842194d585c65eb4edd76", size = 28172662, upload-time = "2026-02-23T00:18:01.64Z" }, + { url = "https://files.pythonhosted.org/packages/cf/a9/599c28631bad314d219cf9ffd40e985b24d603fc8a2f4ccc5ae8419a535b/scipy-1.17.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:cc90d2e9c7e5c7f1a482c9875007c095c3194b1cfedca3c2f3291cdc2bc7c086", size = 20344366, upload-time = "2026-02-23T00:18:12.015Z" }, + { url = "https://files.pythonhosted.org/packages/35/f5/906eda513271c8deb5af284e5ef0206d17a96239af79f9fa0aebfe0e36b4/scipy-1.17.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:c80be5ede8f3f8eded4eff73cc99a25c388ce98e555b17d31da05287015ffa5b", size = 22704017, upload-time = "2026-02-23T00:18:21.502Z" }, + { url = "https://files.pythonhosted.org/packages/da/34/16f10e3042d2f1d6b66e0428308ab52224b6a23049cb2f5c1756f713815f/scipy-1.17.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e19ebea31758fac5893a2ac360fedd00116cbb7628e650842a6691ba7ca28a21", size = 32927842, upload-time = "2026-02-23T00:18:35.367Z" }, + { url = "https://files.pythonhosted.org/packages/01/8e/1e35281b8ab6d5d72ebe9911edcdffa3f36b04ed9d51dec6dd140396e220/scipy-1.17.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02ae3b274fde71c5e92ac4d54bc06c42d80e399fec704383dcd99b301df37458", size = 35235890, upload-time = "2026-02-23T00:18:49.188Z" }, + { url = "https://files.pythonhosted.org/packages/c5/5c/9d7f4c88bea6e0d5a4f1bc0506a53a00e9fcb198de372bfe4d3652cef482/scipy-1.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8a604bae87c6195d8b1045eddece0514d041604b14f2727bbc2b3020172045eb", size = 35003557, upload-time = "2026-02-23T00:18:54.74Z" }, + { url = "https://files.pythonhosted.org/packages/65/94/7698add8f276dbab7a9de9fb6b0e02fc13ee61d51c7c3f85ac28b65e1239/scipy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f590cd684941912d10becc07325a3eeb77886fe981415660d9265c4c418d0bea", size = 37625856, upload-time = "2026-02-23T00:19:00.307Z" }, + { url = "https://files.pythonhosted.org/packages/a2/84/dc08d77fbf3d87d3ee27f6a0c6dcce1de5829a64f2eae85a0ecc1f0daa73/scipy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:41b71f4a3a4cab9d366cd9065b288efc4d4f3c0b37a91a8e0947fb5bd7f31d87", size = 36549682, upload-time = "2026-02-23T00:19:07.67Z" }, + { url = "https://files.pythonhosted.org/packages/bc/98/fe9ae9ffb3b54b62559f52dedaebe204b408db8109a8c66fdd04869e6424/scipy-1.17.1-cp312-cp312-win_arm64.whl", hash = "sha256:f4115102802df98b2b0db3cce5cb9b92572633a1197c77b7553e5203f284a5b3", size = 24547340, upload-time = "2026-02-23T00:19:12.024Z" }, + { url = "https://files.pythonhosted.org/packages/76/27/07ee1b57b65e92645f219b37148a7e7928b82e2b5dbeccecb4dff7c64f0b/scipy-1.17.1-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:5e3c5c011904115f88a39308379c17f91546f77c1667cea98739fe0fccea804c", size = 31590199, upload-time = "2026-02-23T00:19:17.192Z" }, + { url = "https://files.pythonhosted.org/packages/ec/ae/db19f8ab842e9b724bf5dbb7db29302a91f1e55bc4d04b1025d6d605a2c5/scipy-1.17.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:6fac755ca3d2c3edcb22f479fceaa241704111414831ddd3bc6056e18516892f", size = 28154001, upload-time = "2026-02-23T00:19:22.241Z" }, + { url = "https://files.pythonhosted.org/packages/5b/58/3ce96251560107b381cbd6e8413c483bbb1228a6b919fa8652b0d4090e7f/scipy-1.17.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:7ff200bf9d24f2e4d5dc6ee8c3ac64d739d3a89e2326ba68aaf6c4a2b838fd7d", size = 20325719, upload-time = "2026-02-23T00:19:26.329Z" }, + { url = "https://files.pythonhosted.org/packages/b2/83/15087d945e0e4d48ce2377498abf5ad171ae013232ae31d06f336e64c999/scipy-1.17.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:4b400bdc6f79fa02a4d86640310dde87a21fba0c979efff5248908c6f15fad1b", size = 22683595, upload-time = "2026-02-23T00:19:30.304Z" }, + { url = "https://files.pythonhosted.org/packages/b4/e0/e58fbde4a1a594c8be8114eb4aac1a55bcd6587047efc18a61eb1f5c0d30/scipy-1.17.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2b64ca7d4aee0102a97f3ba22124052b4bd2152522355073580bf4845e2550b6", size = 32896429, upload-time = "2026-02-23T00:19:35.536Z" }, + { url = "https://files.pythonhosted.org/packages/f5/5f/f17563f28ff03c7b6799c50d01d5d856a1d55f2676f537ca8d28c7f627cd/scipy-1.17.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:581b2264fc0aa555f3f435a5944da7504ea3a065d7029ad60e7c3d1ae09c5464", size = 35203952, upload-time = "2026-02-23T00:19:42.259Z" }, + { url = "https://files.pythonhosted.org/packages/8d/a5/9afd17de24f657fdfe4df9a3f1ea049b39aef7c06000c13db1530d81ccca/scipy-1.17.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:beeda3d4ae615106d7094f7e7cef6218392e4465cc95d25f900bebabfded0950", size = 34979063, upload-time = "2026-02-23T00:19:47.547Z" }, + { url = "https://files.pythonhosted.org/packages/8b/13/88b1d2384b424bf7c924f2038c1c409f8d88bb2a8d49d097861dd64a57b2/scipy-1.17.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6609bc224e9568f65064cfa72edc0f24ee6655b47575954ec6339534b2798369", size = 37598449, upload-time = "2026-02-23T00:19:53.238Z" }, + { url = "https://files.pythonhosted.org/packages/35/e5/d6d0e51fc888f692a35134336866341c08655d92614f492c6860dc45bb2c/scipy-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:37425bc9175607b0268f493d79a292c39f9d001a357bebb6b88fdfaff13f6448", size = 36510943, upload-time = "2026-02-23T00:20:50.89Z" }, + { url = "https://files.pythonhosted.org/packages/2a/fd/3be73c564e2a01e690e19cc618811540ba5354c67c8680dce3281123fb79/scipy-1.17.1-cp313-cp313-win_arm64.whl", hash = "sha256:5cf36e801231b6a2059bf354720274b7558746f3b1a4efb43fcf557ccd484a87", size = 24545621, upload-time = "2026-02-23T00:20:55.871Z" }, + { url = "https://files.pythonhosted.org/packages/6f/6b/17787db8b8114933a66f9dcc479a8272e4b4da75fe03b0c282f7b0ade8cd/scipy-1.17.1-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:d59c30000a16d8edc7e64152e30220bfbd724c9bbb08368c054e24c651314f0a", size = 31936708, upload-time = "2026-02-23T00:19:58.694Z" }, + { url = "https://files.pythonhosted.org/packages/38/2e/524405c2b6392765ab1e2b722a41d5da33dc5c7b7278184a8ad29b6cb206/scipy-1.17.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:010f4333c96c9bb1a4516269e33cb5917b08ef2166d5556ca2fd9f082a9e6ea0", size = 28570135, upload-time = "2026-02-23T00:20:03.934Z" }, + { url = "https://files.pythonhosted.org/packages/fd/c3/5bd7199f4ea8556c0c8e39f04ccb014ac37d1468e6cfa6a95c6b3562b76e/scipy-1.17.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:2ceb2d3e01c5f1d83c4189737a42d9cb2fc38a6eeed225e7515eef71ad301dce", size = 20741977, upload-time = "2026-02-23T00:20:07.935Z" }, + { url = "https://files.pythonhosted.org/packages/d9/b8/8ccd9b766ad14c78386599708eb745f6b44f08400a5fd0ade7cf89b6fc93/scipy-1.17.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:844e165636711ef41f80b4103ed234181646b98a53c8f05da12ca5ca289134f6", size = 23029601, upload-time = "2026-02-23T00:20:12.161Z" }, + { url = "https://files.pythonhosted.org/packages/6d/a0/3cb6f4d2fb3e17428ad2880333cac878909ad1a89f678527b5328b93c1d4/scipy-1.17.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:158dd96d2207e21c966063e1635b1063cd7787b627b6f07305315dd73d9c679e", size = 33019667, upload-time = "2026-02-23T00:20:17.208Z" }, + { url = "https://files.pythonhosted.org/packages/f3/c3/2d834a5ac7bf3a0c806ad1508efc02dda3c8c61472a56132d7894c312dea/scipy-1.17.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:74cbb80d93260fe2ffa334efa24cb8f2f0f622a9b9febf8b483c0b865bfb3475", size = 35264159, upload-time = "2026-02-23T00:20:23.087Z" }, + { url = "https://files.pythonhosted.org/packages/4d/77/d3ed4becfdbd217c52062fafe35a72388d1bd82c2d0ba5ca19d6fcc93e11/scipy-1.17.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:dbc12c9f3d185f5c737d801da555fb74b3dcfa1a50b66a1a93e09190f41fab50", size = 35102771, upload-time = "2026-02-23T00:20:28.636Z" }, + { url = "https://files.pythonhosted.org/packages/bd/12/d19da97efde68ca1ee5538bb261d5d2c062f0c055575128f11a2730e3ac1/scipy-1.17.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:94055a11dfebe37c656e70317e1996dc197e1a15bbcc351bcdd4610e128fe1ca", size = 37665910, upload-time = "2026-02-23T00:20:34.743Z" }, + { url = "https://files.pythonhosted.org/packages/06/1c/1172a88d507a4baaf72c5a09bb6c018fe2ae0ab622e5830b703a46cc9e44/scipy-1.17.1-cp313-cp313t-win_amd64.whl", hash = "sha256:e30bdeaa5deed6bc27b4cc490823cd0347d7dae09119b8803ae576ea0ce52e4c", size = 36562980, upload-time = "2026-02-23T00:20:40.575Z" }, + { url = "https://files.pythonhosted.org/packages/70/b0/eb757336e5a76dfa7911f63252e3b7d1de00935d7705cf772db5b45ec238/scipy-1.17.1-cp313-cp313t-win_arm64.whl", hash = "sha256:a720477885a9d2411f94a93d16f9d89bad0f28ca23c3f8daa521e2dcc3f44d49", size = 24856543, upload-time = "2026-02-23T00:20:45.313Z" }, + { url = "https://files.pythonhosted.org/packages/cf/83/333afb452af6f0fd70414dc04f898647ee1423979ce02efa75c3b0f2c28e/scipy-1.17.1-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:a48a72c77a310327f6a3a920092fa2b8fd03d7deaa60f093038f22d98e096717", size = 31584510, upload-time = "2026-02-23T00:21:01.015Z" }, + { url = "https://files.pythonhosted.org/packages/ed/a6/d05a85fd51daeb2e4ea71d102f15b34fedca8e931af02594193ae4fd25f7/scipy-1.17.1-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:45abad819184f07240d8a696117a7aacd39787af9e0b719d00285549ed19a1e9", size = 28170131, upload-time = "2026-02-23T00:21:05.888Z" }, + { url = "https://files.pythonhosted.org/packages/db/7b/8624a203326675d7746a254083a187398090a179335b2e4a20e2ddc46e83/scipy-1.17.1-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:3fd1fcdab3ea951b610dc4cef356d416d5802991e7e32b5254828d342f7b7e0b", size = 20342032, upload-time = "2026-02-23T00:21:09.904Z" }, + { url = "https://files.pythonhosted.org/packages/c9/35/2c342897c00775d688d8ff3987aced3426858fd89d5a0e26e020b660b301/scipy-1.17.1-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:7bdf2da170b67fdf10bca777614b1c7d96ae3ca5794fd9587dce41eb2966e866", size = 22678766, upload-time = "2026-02-23T00:21:14.313Z" }, + { url = "https://files.pythonhosted.org/packages/ef/f2/7cdb8eb308a1a6ae1e19f945913c82c23c0c442a462a46480ce487fdc0ac/scipy-1.17.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:adb2642e060a6549c343603a3851ba76ef0b74cc8c079a9a58121c7ec9fe2350", size = 32957007, upload-time = "2026-02-23T00:21:19.663Z" }, + { url = "https://files.pythonhosted.org/packages/0b/2e/7eea398450457ecb54e18e9d10110993fa65561c4f3add5e8eccd2b9cd41/scipy-1.17.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eee2cfda04c00a857206a4330f0c5e3e56535494e30ca445eb19ec624ae75118", size = 35221333, upload-time = "2026-02-23T00:21:25.278Z" }, + { url = "https://files.pythonhosted.org/packages/d9/77/5b8509d03b77f093a0d52e606d3c4f79e8b06d1d38c441dacb1e26cacf46/scipy-1.17.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d2650c1fb97e184d12d8ba010493ee7b322864f7d3d00d3f9bb97d9c21de4068", size = 35042066, upload-time = "2026-02-23T00:21:31.358Z" }, + { url = "https://files.pythonhosted.org/packages/f9/df/18f80fb99df40b4070328d5ae5c596f2f00fffb50167e31439e932f29e7d/scipy-1.17.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:08b900519463543aa604a06bec02461558a6e1cef8fdbb8098f77a48a83c8118", size = 37612763, upload-time = "2026-02-23T00:21:37.247Z" }, + { url = "https://files.pythonhosted.org/packages/4b/39/f0e8ea762a764a9dc52aa7dabcfad51a354819de1f0d4652b6a1122424d6/scipy-1.17.1-cp314-cp314-win_amd64.whl", hash = "sha256:3877ac408e14da24a6196de0ddcace62092bfc12a83823e92e49e40747e52c19", size = 37290984, upload-time = "2026-02-23T00:22:35.023Z" }, + { url = "https://files.pythonhosted.org/packages/7c/56/fe201e3b0f93d1a8bcf75d3379affd228a63d7e2d80ab45467a74b494947/scipy-1.17.1-cp314-cp314-win_arm64.whl", hash = "sha256:f8885db0bc2bffa59d5c1b72fad7a6a92d3e80e7257f967dd81abb553a90d293", size = 25192877, upload-time = "2026-02-23T00:22:39.798Z" }, + { url = "https://files.pythonhosted.org/packages/96/ad/f8c414e121f82e02d76f310f16db9899c4fcde36710329502a6b2a3c0392/scipy-1.17.1-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:1cc682cea2ae55524432f3cdff9e9a3be743d52a7443d0cba9017c23c87ae2f6", size = 31949750, upload-time = "2026-02-23T00:21:42.289Z" }, + { url = "https://files.pythonhosted.org/packages/7c/b0/c741e8865d61b67c81e255f4f0a832846c064e426636cd7de84e74d209be/scipy-1.17.1-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:2040ad4d1795a0ae89bfc7e8429677f365d45aa9fd5e4587cf1ea737f927b4a1", size = 28585858, upload-time = "2026-02-23T00:21:47.706Z" }, + { url = "https://files.pythonhosted.org/packages/ed/1b/3985219c6177866628fa7c2595bfd23f193ceebbe472c98a08824b9466ff/scipy-1.17.1-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:131f5aaea57602008f9822e2115029b55d4b5f7c070287699fe45c661d051e39", size = 20757723, upload-time = "2026-02-23T00:21:52.039Z" }, + { url = "https://files.pythonhosted.org/packages/c0/19/2a04aa25050d656d6f7b9e7b685cc83d6957fb101665bfd9369ca6534563/scipy-1.17.1-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:9cdc1a2fcfd5c52cfb3045feb399f7b3ce822abdde3a193a6b9a60b3cb5854ca", size = 23043098, upload-time = "2026-02-23T00:21:56.185Z" }, + { url = "https://files.pythonhosted.org/packages/86/f1/3383beb9b5d0dbddd030335bf8a8b32d4317185efe495374f134d8be6cce/scipy-1.17.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e3dcd57ab780c741fde8dc68619de988b966db759a3c3152e8e9142c26295ad", size = 33030397, upload-time = "2026-02-23T00:22:01.404Z" }, + { url = "https://files.pythonhosted.org/packages/41/68/8f21e8a65a5a03f25a79165ec9d2b28c00e66dc80546cf5eb803aeeff35b/scipy-1.17.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a9956e4d4f4a301ebf6cde39850333a6b6110799d470dbbb1e25326ac447f52a", size = 35281163, upload-time = "2026-02-23T00:22:07.024Z" }, + { url = "https://files.pythonhosted.org/packages/84/8d/c8a5e19479554007a5632ed7529e665c315ae7492b4f946b0deb39870e39/scipy-1.17.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:a4328d245944d09fd639771de275701ccadf5f781ba0ff092ad141e017eccda4", size = 35116291, upload-time = "2026-02-23T00:22:12.585Z" }, + { url = "https://files.pythonhosted.org/packages/52/52/e57eceff0e342a1f50e274264ed47497b59e6a4e3118808ee58ddda7b74a/scipy-1.17.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a77cbd07b940d326d39a1d1b37817e2ee4d79cb30e7338f3d0cddffae70fcaa2", size = 37682317, upload-time = "2026-02-23T00:22:18.513Z" }, + { url = "https://files.pythonhosted.org/packages/11/2f/b29eafe4a3fbc3d6de9662b36e028d5f039e72d345e05c250e121a230dd4/scipy-1.17.1-cp314-cp314t-win_amd64.whl", hash = "sha256:eb092099205ef62cd1782b006658db09e2fed75bffcae7cc0d44052d8aa0f484", size = 37345327, upload-time = "2026-02-23T00:22:24.442Z" }, + { url = "https://files.pythonhosted.org/packages/07/39/338d9219c4e87f3e708f18857ecd24d22a0c3094752393319553096b98af/scipy-1.17.1-cp314-cp314t-win_arm64.whl", hash = "sha256:200e1050faffacc162be6a486a984a0497866ec54149a01270adc8a59b7c7d21", size = 25489165, upload-time = "2026-02-23T00:22:29.563Z" }, ] [[package]] name = "scipy-stubs" -version = "1.17.0.2" +version = "1.17.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "optype", extra = ["numpy"] }, ] -sdist = { url = "https://files.pythonhosted.org/packages/40/fe/5fa7da49821ea94d60629ae71277fa8d7e16eb20602f720062b6c30a644c/scipy_stubs-1.17.0.2.tar.gz", hash = "sha256:3981bd7fa4c189a8493307afadaee1a830d9a0de8e3ae2f4603f192b6260ef2a", size = 379897, upload-time = "2026-01-22T19:17:08Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d9/ad/413b0d18efca7bb48574d28e91253409d91ee6121e7937022d0d380dfc6a/scipy_stubs-1.17.1.0.tar.gz", hash = "sha256:5dc51c21765b145c2d132b96b63ff4f835dd5fb768006876d1554e7a59c61571", size = 381420, upload-time = "2026-02-23T10:33:04.742Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/51/e3/20233497e4a27956e7392c3f7879e6ee7f767f268079f24f4b089b70f563/scipy_stubs-1.17.0.2-py3-none-any.whl", hash = "sha256:99d1aa75b7d72a7ee36a68d18bcf1149f62ab577bbd1236c65c471b3b465d824", size = 586137, upload-time = "2026-01-22T19:17:05.802Z" }, + { url = "https://files.pythonhosted.org/packages/6c/ee/c6811e04ff9d5dd1d92236e8df7ebc4db6aa65c70b9938cec293348b8ec4/scipy_stubs-1.17.1.0-py3-none-any.whl", hash = "sha256:5c9c84993d36b104acb2d187b05985eb79f73491c60d83292dd738093d53d96a", size = 587059, upload-time = "2026-02-23T10:33:02.845Z" }, ] [[package]] @@ -1914,57 +1914,61 @@ wheels = [ [[package]] name = "sqlalchemy" -version = "2.0.46" +version = "2.0.47" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/06/aa/9ce0f3e7a9829ead5c8ce549392f33a12c4555a6c0609bb27d882e9c7ddf/sqlalchemy-2.0.46.tar.gz", hash = "sha256:cf36851ee7219c170bb0793dbc3da3e80c582e04a5437bc601bfe8c85c9216d7", size = 9865393, upload-time = "2026-01-21T18:03:45.119Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b6/35/d16bfa235c8b7caba3730bba43e20b1e376d2224f407c178fbf59559f23e/sqlalchemy-2.0.46-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a9a72b0da8387f15d5810f1facca8f879de9b85af8c645138cba61ea147968c", size = 2153405, upload-time = "2026-01-21T19:05:54.143Z" }, - { url = "https://files.pythonhosted.org/packages/06/6c/3192e24486749862f495ddc6584ed730c0c994a67550ec395d872a2ad650/sqlalchemy-2.0.46-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2347c3f0efc4de367ba00218e0ae5c4ba2306e47216ef80d6e31761ac97cb0b9", size = 3334702, upload-time = "2026-01-21T18:46:45.384Z" }, - { url = "https://files.pythonhosted.org/packages/ea/a2/b9f33c8d68a3747d972a0bb758c6b63691f8fb8a49014bc3379ba15d4274/sqlalchemy-2.0.46-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9094c8b3197db12aa6f05c51c05daaad0a92b8c9af5388569847b03b1007fb1b", size = 3347664, upload-time = "2026-01-21T18:40:09.979Z" }, - { url = "https://files.pythonhosted.org/packages/aa/d2/3e59e2a91eaec9db7e8dc6b37b91489b5caeb054f670f32c95bcba98940f/sqlalchemy-2.0.46-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:37fee2164cf21417478b6a906adc1a91d69ae9aba8f9533e67ce882f4bb1de53", size = 3277372, upload-time = "2026-01-21T18:46:47.168Z" }, - { url = "https://files.pythonhosted.org/packages/dd/dd/67bc2e368b524e2192c3927b423798deda72c003e73a1e94c21e74b20a85/sqlalchemy-2.0.46-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b1e14b2f6965a685c7128bd315e27387205429c2e339eeec55cb75ca4ab0ea2e", size = 3312425, upload-time = "2026-01-21T18:40:11.548Z" }, - { url = "https://files.pythonhosted.org/packages/43/82/0ecd68e172bfe62247e96cb47867c2d68752566811a4e8c9d8f6e7c38a65/sqlalchemy-2.0.46-cp312-cp312-win32.whl", hash = "sha256:412f26bb4ba942d52016edc8d12fb15d91d3cd46b0047ba46e424213ad407bcb", size = 2113155, upload-time = "2026-01-21T18:42:49.748Z" }, - { url = "https://files.pythonhosted.org/packages/bc/2a/2821a45742073fc0331dc132552b30de68ba9563230853437cac54b2b53e/sqlalchemy-2.0.46-cp312-cp312-win_amd64.whl", hash = "sha256:ea3cd46b6713a10216323cda3333514944e510aa691c945334713fca6b5279ff", size = 2140078, upload-time = "2026-01-21T18:42:51.197Z" }, - { url = "https://files.pythonhosted.org/packages/b3/4b/fa7838fe20bb752810feed60e45625a9a8b0102c0c09971e2d1d95362992/sqlalchemy-2.0.46-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:93a12da97cca70cea10d4b4fc602589c4511f96c1f8f6c11817620c021d21d00", size = 2150268, upload-time = "2026-01-21T19:05:56.621Z" }, - { url = "https://files.pythonhosted.org/packages/46/c1/b34dccd712e8ea846edf396e00973dda82d598cb93762e55e43e6835eba9/sqlalchemy-2.0.46-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:af865c18752d416798dae13f83f38927c52f085c52e2f32b8ab0fef46fdd02c2", size = 3276511, upload-time = "2026-01-21T18:46:49.022Z" }, - { url = "https://files.pythonhosted.org/packages/96/48/a04d9c94753e5d5d096c628c82a98c4793b9c08ca0e7155c3eb7d7db9f24/sqlalchemy-2.0.46-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8d679b5f318423eacb61f933a9a0f75535bfca7056daeadbf6bd5bcee6183aee", size = 3292881, upload-time = "2026-01-21T18:40:13.089Z" }, - { url = "https://files.pythonhosted.org/packages/be/f4/06eda6e91476f90a7d8058f74311cb65a2fb68d988171aced81707189131/sqlalchemy-2.0.46-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:64901e08c33462acc9ec3bad27fc7a5c2b6491665f2aa57564e57a4f5d7c52ad", size = 3224559, upload-time = "2026-01-21T18:46:50.974Z" }, - { url = "https://files.pythonhosted.org/packages/ab/a2/d2af04095412ca6345ac22b33b89fe8d6f32a481e613ffcb2377d931d8d0/sqlalchemy-2.0.46-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e8ac45e8f4eaac0f9f8043ea0e224158855c6a4329fd4ee37c45c61e3beb518e", size = 3262728, upload-time = "2026-01-21T18:40:14.883Z" }, - { url = "https://files.pythonhosted.org/packages/31/48/1980c7caa5978a3b8225b4d230e69a2a6538a3562b8b31cea679b6933c83/sqlalchemy-2.0.46-cp313-cp313-win32.whl", hash = "sha256:8d3b44b3d0ab2f1319d71d9863d76eeb46766f8cf9e921ac293511804d39813f", size = 2111295, upload-time = "2026-01-21T18:42:52.366Z" }, - { url = "https://files.pythonhosted.org/packages/2d/54/f8d65bbde3d877617c4720f3c9f60e99bb7266df0d5d78b6e25e7c149f35/sqlalchemy-2.0.46-cp313-cp313-win_amd64.whl", hash = "sha256:77f8071d8fbcbb2dd11b7fd40dedd04e8ebe2eb80497916efedba844298065ef", size = 2137076, upload-time = "2026-01-21T18:42:53.924Z" }, - { url = "https://files.pythonhosted.org/packages/56/ba/9be4f97c7eb2b9d5544f2624adfc2853e796ed51d2bb8aec90bc94b7137e/sqlalchemy-2.0.46-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a1e8cc6cc01da346dc92d9509a63033b9b1bda4fed7a7a7807ed385c7dccdc10", size = 3556533, upload-time = "2026-01-21T18:33:06.636Z" }, - { url = "https://files.pythonhosted.org/packages/20/a6/b1fc6634564dbb4415b7ed6419cdfeaadefd2c39cdab1e3aa07a5f2474c2/sqlalchemy-2.0.46-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:96c7cca1a4babaaf3bfff3e4e606e38578856917e52f0384635a95b226c87764", size = 3523208, upload-time = "2026-01-21T18:45:08.436Z" }, - { url = "https://files.pythonhosted.org/packages/a1/d8/41e0bdfc0f930ff236f86fccd12962d8fa03713f17ed57332d38af6a3782/sqlalchemy-2.0.46-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b2a9f9aee38039cf4755891a1e50e1effcc42ea6ba053743f452c372c3152b1b", size = 3464292, upload-time = "2026-01-21T18:33:08.208Z" }, - { url = "https://files.pythonhosted.org/packages/f0/8b/9dcbec62d95bea85f5ecad9b8d65b78cc30fb0ffceeb3597961f3712549b/sqlalchemy-2.0.46-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:db23b1bf8cfe1f7fda19018e7207b20cdb5168f83c437ff7e95d19e39289c447", size = 3473497, upload-time = "2026-01-21T18:45:10.552Z" }, - { url = "https://files.pythonhosted.org/packages/e9/f8/5ecdfc73383ec496de038ed1614de9e740a82db9ad67e6e4514ebc0708a3/sqlalchemy-2.0.46-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:56bdd261bfd0895452006d5316cbf35739c53b9bb71a170a331fa0ea560b2ada", size = 2152079, upload-time = "2026-01-21T19:05:58.477Z" }, - { url = "https://files.pythonhosted.org/packages/e5/bf/eba3036be7663ce4d9c050bc3d63794dc29fbe01691f2bf5ccb64e048d20/sqlalchemy-2.0.46-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:33e462154edb9493f6c3ad2125931e273bbd0be8ae53f3ecd1c161ea9a1dd366", size = 3272216, upload-time = "2026-01-21T18:46:52.634Z" }, - { url = "https://files.pythonhosted.org/packages/05/45/1256fb597bb83b58a01ddb600c59fe6fdf0e5afe333f0456ed75c0f8d7bd/sqlalchemy-2.0.46-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9bcdce05f056622a632f1d44bb47dbdb677f58cad393612280406ce37530eb6d", size = 3277208, upload-time = "2026-01-21T18:40:16.38Z" }, - { url = "https://files.pythonhosted.org/packages/d9/a0/2053b39e4e63b5d7ceb3372cface0859a067c1ddbd575ea7e9985716f771/sqlalchemy-2.0.46-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8e84b09a9b0f19accedcbeff5c2caf36e0dd537341a33aad8d680336152dc34e", size = 3221994, upload-time = "2026-01-21T18:46:54.622Z" }, - { url = "https://files.pythonhosted.org/packages/1e/87/97713497d9502553c68f105a1cb62786ba1ee91dea3852ae4067ed956a50/sqlalchemy-2.0.46-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4f52f7291a92381e9b4de9050b0a65ce5d6a763333406861e33906b8aa4906bf", size = 3243990, upload-time = "2026-01-21T18:40:18.253Z" }, - { url = "https://files.pythonhosted.org/packages/a8/87/5d1b23548f420ff823c236f8bea36b1a997250fd2f892e44a3838ca424f4/sqlalchemy-2.0.46-cp314-cp314-win32.whl", hash = "sha256:70ed2830b169a9960193f4d4322d22be5c0925357d82cbf485b3369893350908", size = 2114215, upload-time = "2026-01-21T18:42:55.232Z" }, - { url = "https://files.pythonhosted.org/packages/3a/20/555f39cbcf0c10cf452988b6a93c2a12495035f68b3dbd1a408531049d31/sqlalchemy-2.0.46-cp314-cp314-win_amd64.whl", hash = "sha256:3c32e993bc57be6d177f7d5d31edb93f30726d798ad86ff9066d75d9bf2e0b6b", size = 2139867, upload-time = "2026-01-21T18:42:56.474Z" }, - { url = "https://files.pythonhosted.org/packages/3e/f0/f96c8057c982d9d8a7a68f45d69c674bc6f78cad401099692fe16521640a/sqlalchemy-2.0.46-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4dafb537740eef640c4d6a7c254611dca2df87eaf6d14d6a5fca9d1f4c3fc0fa", size = 3561202, upload-time = "2026-01-21T18:33:10.337Z" }, - { url = "https://files.pythonhosted.org/packages/d7/53/3b37dda0a5b137f21ef608d8dfc77b08477bab0fe2ac9d3e0a66eaeab6fc/sqlalchemy-2.0.46-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:42a1643dc5427b69aca967dae540a90b0fbf57eaf248f13a90ea5930e0966863", size = 3526296, upload-time = "2026-01-21T18:45:12.657Z" }, - { url = "https://files.pythonhosted.org/packages/33/75/f28622ba6dde79cd545055ea7bd4062dc934e0621f7b3be2891f8563f8de/sqlalchemy-2.0.46-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ff33c6e6ad006bbc0f34f5faf941cfc62c45841c64c0a058ac38c799f15b5ede", size = 3470008, upload-time = "2026-01-21T18:33:11.725Z" }, - { url = "https://files.pythonhosted.org/packages/a9/42/4afecbbc38d5e99b18acef446453c76eec6fbd03db0a457a12a056836e22/sqlalchemy-2.0.46-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:82ec52100ec1e6ec671563bbd02d7c7c8d0b9e71a0723c72f22ecf52d1755330", size = 3476137, upload-time = "2026-01-21T18:45:15.001Z" }, - { url = "https://files.pythonhosted.org/packages/fc/a1/9c4efa03300926601c19c18582531b45aededfb961ab3c3585f1e24f120b/sqlalchemy-2.0.46-py3-none-any.whl", hash = "sha256:f9c11766e7e7c0a2767dda5acb006a118640c9fc0a4104214b96269bfb78399e", size = 1937882, upload-time = "2026-01-21T18:22:10.456Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/cd/4b/1e00561093fe2cd8eef09d406da003c8a118ff02d6548498c1ae677d68d9/sqlalchemy-2.0.47.tar.gz", hash = "sha256:e3e7feb57b267fe897e492b9721ae46d5c7de6f9e8dee58aacf105dc4e154f3d", size = 9886323, upload-time = "2026-02-24T16:34:27.947Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/80/88/74eb470223ff88ea6572a132c0b8de8c1d8ed7b843d3b44a8a3c77f31d39/sqlalchemy-2.0.47-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4fa91b19d6b9821c04cc8f7aa2476429cc8887b9687c762815aa629f5c0edec1", size = 2155687, upload-time = "2026-02-24T17:05:46.451Z" }, + { url = "https://files.pythonhosted.org/packages/ef/ba/1447d3d558971b036cb93b557595cb5dcdfe728f1c7ac4dec16505ef5756/sqlalchemy-2.0.47-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7c5bbbd14eff577c8c79cbfe39a0771eecd20f430f3678533476f0087138f356", size = 3336978, upload-time = "2026-02-24T17:18:04.597Z" }, + { url = "https://files.pythonhosted.org/packages/8a/07/b47472d2ffd0776826f17ccf0b4d01b224c99fbd1904aeb103dffbb4b1cc/sqlalchemy-2.0.47-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a5a6c555da8d4280a3c4c78c5b7a3f990cee2b2884e5f934f87a226191682ff7", size = 3349939, upload-time = "2026-02-24T17:27:18.937Z" }, + { url = "https://files.pythonhosted.org/packages/bb/c6/95fa32b79b57769da3e16f054cf658d90940317b5ca0ec20eac84aa19c4f/sqlalchemy-2.0.47-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ed48a1701d24dff3bb49a5bce94d6bc84cbe33d98af2aa2d3cdcce3dea1709ec", size = 3279648, upload-time = "2026-02-24T17:18:07.038Z" }, + { url = "https://files.pythonhosted.org/packages/bb/c8/3d07e7c73928dc59a0bed40961ca4e313e797bce650b088e8d5fdd3ad939/sqlalchemy-2.0.47-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4f3178c920ad98158f0b6309382194df04b14808fa6052ae07099fdde29d5602", size = 3314695, upload-time = "2026-02-24T17:27:20.93Z" }, + { url = "https://files.pythonhosted.org/packages/6b/d2/ed32b1611c1e19fdb028eee1adc5a9aa138c2952d09ae11f1670170f80ae/sqlalchemy-2.0.47-cp312-cp312-win32.whl", hash = "sha256:b9c11ac9934dd59ece9619fe42780a08abe2faab7b0543bb00d5eabea4f421b9", size = 2115502, upload-time = "2026-02-24T17:22:52.546Z" }, + { url = "https://files.pythonhosted.org/packages/fd/52/9de590356a4dd8e9ef5a881dbba64b2bbc4cbc71bf02bc68e775fb9b1899/sqlalchemy-2.0.47-cp312-cp312-win_amd64.whl", hash = "sha256:db43b72cf8274a99e089755c9c1e0b947159b71adbc2c83c3de2e38d5d607acb", size = 2142435, upload-time = "2026-02-24T17:22:54.268Z" }, + { url = "https://files.pythonhosted.org/packages/4a/e5/0af64ce7d8f60ec5328c10084e2f449e7912a9b8bdbefdcfb44454a25f49/sqlalchemy-2.0.47-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:456a135b790da5d3c6b53d0ef71ac7b7d280b7f41eb0c438986352bf03ca7143", size = 2152551, upload-time = "2026-02-24T17:05:47.675Z" }, + { url = "https://files.pythonhosted.org/packages/63/79/746b8d15f6940e2ac469ce22d7aa5b1124b1ab820bad9b046eb3000c88a6/sqlalchemy-2.0.47-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09a2f7698e44b3135433387da5d8846cf7cc7c10e5425af7c05fee609df978b6", size = 3278782, upload-time = "2026-02-24T17:18:10.012Z" }, + { url = "https://files.pythonhosted.org/packages/91/b1/bd793ddb34345d1ed43b13ab2d88c95d7d4eb2e28f5b5a99128b9cc2bca2/sqlalchemy-2.0.47-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0bbc72e6a177c78d724f9106aaddc0d26a2ada89c6332b5935414eccf04cbd5", size = 3295155, upload-time = "2026-02-24T17:27:22.827Z" }, + { url = "https://files.pythonhosted.org/packages/97/84/7213def33f94e5ca6f5718d259bc9f29de0363134648425aa218d4356b23/sqlalchemy-2.0.47-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:75460456b043b78b6006e41bdf5b86747ee42eafaf7fffa3b24a6e9a456a2092", size = 3226834, upload-time = "2026-02-24T17:18:11.465Z" }, + { url = "https://files.pythonhosted.org/packages/ef/06/456810204f4dc29b5f025b1b0a03b4bd6b600ebf3c1040aebd90a257fa33/sqlalchemy-2.0.47-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5d9adaa616c3bc7d80f9ded57cd84b51d6617cad6a5456621d858c9f23aaee01", size = 3265001, upload-time = "2026-02-24T17:27:24.813Z" }, + { url = "https://files.pythonhosted.org/packages/fb/20/df3920a4b2217dbd7390a5bd277c1902e0393f42baaf49f49b3c935e7328/sqlalchemy-2.0.47-cp313-cp313-win32.whl", hash = "sha256:76e09f974382a496a5ed985db9343628b1cb1ac911f27342e4cc46a8bac10476", size = 2113647, upload-time = "2026-02-24T17:22:55.747Z" }, + { url = "https://files.pythonhosted.org/packages/46/06/7873ddf69918efbfabd7211829f4bd8019739d0a719253112d305d3ba51d/sqlalchemy-2.0.47-cp313-cp313-win_amd64.whl", hash = "sha256:0664089b0bf6724a0bfb49a0cf4d4da24868a0a5c8e937cd7db356d5dcdf2c66", size = 2139425, upload-time = "2026-02-24T17:22:57.033Z" }, + { url = "https://files.pythonhosted.org/packages/54/fa/61ad9731370c90ac7ea5bf8f5eaa12c48bb4beec41c0fa0360becf4ac10d/sqlalchemy-2.0.47-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ed0c967c701ae13da98eb220f9ddab3044ab63504c1ba24ad6a59b26826ad003", size = 3558809, upload-time = "2026-02-24T17:12:15.232Z" }, + { url = "https://files.pythonhosted.org/packages/33/d5/221fac96f0529391fe374875633804c866f2b21a9c6d3a6ca57d9c12cfd7/sqlalchemy-2.0.47-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d3537943a61fd25b241e976426a0c6814434b93cf9b09d39e8e78f3c9eb9a487", size = 3525480, upload-time = "2026-02-24T17:27:59.602Z" }, + { url = "https://files.pythonhosted.org/packages/ec/55/8247d53998c3673e4a8d1958eba75c6f5cc3b39082029d400bb1f2a911ae/sqlalchemy-2.0.47-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:57f7e336a64a0dba686c66392d46b9bc7af2c57d55ce6dc1697b4ef32b043ceb", size = 3466569, upload-time = "2026-02-24T17:12:16.94Z" }, + { url = "https://files.pythonhosted.org/packages/6b/b5/c1f0eea1bac6790845f71420a7fe2f2a0566203aa57543117d4af3b77d1c/sqlalchemy-2.0.47-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dff735a621858680217cb5142b779bad40ef7322ddbb7c12062190db6879772e", size = 3475770, upload-time = "2026-02-24T17:28:02.034Z" }, + { url = "https://files.pythonhosted.org/packages/c5/ed/2f43f92474ea0c43c204657dc47d9d002cd738b96ca2af8e6d29a9b5e42d/sqlalchemy-2.0.47-cp313-cp313t-win32.whl", hash = "sha256:3893dc096bb3cca9608ea3487372ffcea3ae9b162f40e4d3c51dd49db1d1b2dc", size = 2141300, upload-time = "2026-02-24T17:14:37.024Z" }, + { url = "https://files.pythonhosted.org/packages/cc/a9/8b73f9f1695b6e92f7aaf1711135a1e3bbeb78bca9eded35cb79180d3c6d/sqlalchemy-2.0.47-cp313-cp313t-win_amd64.whl", hash = "sha256:b5103427466f4b3e61f04833ae01f9a914b1280a2a8bcde3a9d7ab11f3755b42", size = 2173053, upload-time = "2026-02-24T17:14:38.688Z" }, + { url = "https://files.pythonhosted.org/packages/c1/30/98243209aae58ed80e090ea988d5182244ca7ab3ff59e6d850c3dfc7651e/sqlalchemy-2.0.47-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b03010a5a5dfe71676bc83f2473ebe082478e32d77e6f082c8fe15a31c3b42a6", size = 2154355, upload-time = "2026-02-24T17:05:48.959Z" }, + { url = "https://files.pythonhosted.org/packages/ab/62/12ca6ea92055fe486d6558a2a4efe93e194ff597463849c01f88e5adb99d/sqlalchemy-2.0.47-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f8e3371aa9024520883a415a09cc20c33cfd3eeccf9e0f4f4c367f940b9cbd44", size = 3274486, upload-time = "2026-02-24T17:18:13.659Z" }, + { url = "https://files.pythonhosted.org/packages/97/88/7dfbdeaa8d42b1584e65d6cc713e9d33b6fa563e0d546d5cb87e545bb0e5/sqlalchemy-2.0.47-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9449f747e50d518c6e1b40cc379e48bfc796453c47b15e627ea901c201e48a6", size = 3279481, upload-time = "2026-02-24T17:27:26.491Z" }, + { url = "https://files.pythonhosted.org/packages/d0/b7/75e1c1970616a9dd64a8a6fd788248da2ddaf81c95f4875f2a1e8aee4128/sqlalchemy-2.0.47-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:21410f60d5cac1d6bfe360e05bd91b179be4fa0aa6eea6be46054971d277608f", size = 3224269, upload-time = "2026-02-24T17:18:15.078Z" }, + { url = "https://files.pythonhosted.org/packages/31/ac/eec1a13b891df9a8bc203334caf6e6aac60b02f61b018ef3b4124b8c4120/sqlalchemy-2.0.47-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:819841dd5bb4324c284c09e2874cf96fe6338bfb57a64548d9b81a4e39c9871f", size = 3246262, upload-time = "2026-02-24T17:27:27.986Z" }, + { url = "https://files.pythonhosted.org/packages/c9/b0/661b0245b06421058610da39f8ceb34abcc90b49f90f256380968d761dbe/sqlalchemy-2.0.47-cp314-cp314-win32.whl", hash = "sha256:e255ee44821a7ef45649c43064cf94e74f81f61b4df70547304b97a351e9b7db", size = 2116528, upload-time = "2026-02-24T17:22:59.363Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ef/1035a90d899e61810791c052004958be622a2cf3eb3df71c3fe20778c5d0/sqlalchemy-2.0.47-cp314-cp314-win_amd64.whl", hash = "sha256:209467ff73ea1518fe1a5aaed9ba75bb9e33b2666e2553af9ccd13387bf192cb", size = 2142181, upload-time = "2026-02-24T17:23:01.001Z" }, + { url = "https://files.pythonhosted.org/packages/76/bb/17a1dd09cbba91258218ceb582225f14b5364d2683f9f5a274f72f2d764f/sqlalchemy-2.0.47-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e78fd9186946afaa287f8a1fe147ead06e5d566b08c0afcb601226e9c7322a64", size = 3563477, upload-time = "2026-02-24T17:12:18.46Z" }, + { url = "https://files.pythonhosted.org/packages/66/8f/1a03d24c40cc321ef2f2231f05420d140bb06a84f7047eaa7eaa21d230ba/sqlalchemy-2.0.47-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5740e2f31b5987ed9619d6912ae5b750c03637f2078850da3002934c9532f172", size = 3528568, upload-time = "2026-02-24T17:28:03.732Z" }, + { url = "https://files.pythonhosted.org/packages/fd/53/d56a213055d6b038a5384f0db5ece7343334aca230ff3f0fa1561106f22c/sqlalchemy-2.0.47-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:fb9ac00d03de93acb210e8ec7243fefe3e012515bf5fd2f0898c8dff38bc77a4", size = 3472284, upload-time = "2026-02-24T17:12:20.319Z" }, + { url = "https://files.pythonhosted.org/packages/ff/19/c235d81b9cfdd6130bf63143b7bade0dc4afa46c4b634d5d6b2a96bea233/sqlalchemy-2.0.47-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c72a0b9eb2672d70d112cb149fbaf172d466bc691014c496aaac594f1988e706", size = 3478410, upload-time = "2026-02-24T17:28:05.892Z" }, + { url = "https://files.pythonhosted.org/packages/0e/db/cafdeca5ecdaa3bb0811ba5449501da677ce0d83be8d05c5822da72d2e86/sqlalchemy-2.0.47-cp314-cp314t-win32.whl", hash = "sha256:c200db1128d72a71dc3c31c24b42eb9fd85b2b3e5a3c9ba1e751c11ac31250ff", size = 2147164, upload-time = "2026-02-24T17:14:40.783Z" }, + { url = "https://files.pythonhosted.org/packages/fc/5e/ff41a010e9e0f76418b02ad352060a4341bb15f0af66cedc924ab376c7c6/sqlalchemy-2.0.47-cp314-cp314t-win_amd64.whl", hash = "sha256:669837759b84e575407355dcff912835892058aea9b80bd1cb76d6a151cf37f7", size = 2182154, upload-time = "2026-02-24T17:14:43.205Z" }, + { url = "https://files.pythonhosted.org/packages/15/9f/7c378406b592fcf1fc157248607b495a40e3202ba4a6f1372a2ba6447717/sqlalchemy-2.0.47-py3-none-any.whl", hash = "sha256:e2647043599297a1ef10e720cf310846b7f31b6c841fee093d2b09d81215eb93", size = 1940159, upload-time = "2026-02-24T17:15:07.158Z" }, ] [[package]] name = "sqlmodel" -version = "0.0.35" +version = "0.0.37" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "sqlalchemy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a6/fd/6f468f52977b85f8b1af3f0d7d4396ed77804a59bf589f2f47c524383388/sqlmodel-0.0.35.tar.gz", hash = "sha256:e0079a6ec569323587ffb7326bbbc9d9a1a92e9be271b18e83f54d4a4200d6ac", size = 86087, upload-time = "2026-02-20T16:42:21.254Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/26/1d2faa0fd5a765267f49751de533adac6b9ff9366c7c6e7692df4f32230f/sqlmodel-0.0.37.tar.gz", hash = "sha256:d2c19327175794faf50b1ee31cc966764f55b1dedefc046450bc5741a3d68352", size = 85527, upload-time = "2026-02-21T16:39:47.038Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8f/f3/90f7b2eb86e590b74cf33e37a5313c074092684666355201afe9a1ae7ef5/sqlmodel-0.0.35-py3-none-any.whl", hash = "sha256:367c11719bc4967430d5aadc43ee1a6f7638b9c82ee7c8835401400e05ec9431", size = 27221, upload-time = "2026-02-20T16:42:20.301Z" }, + { url = "https://files.pythonhosted.org/packages/b1/e1/7c8d18e737433f3b5bbe27b56a9072a9fcb36342b48f1bef34b6da1d61f2/sqlmodel-0.0.37-py3-none-any.whl", hash = "sha256:2137a4045ef3fd66a917a7717ada959a1ceb3630d95e1f6aaab39dd2c0aef278", size = 27224, upload-time = "2026-02-21T16:39:47.781Z" }, ] [[package]] From bd49b6de8846f30ce7a9a372865fadb44f3da9f2 Mon Sep 17 00:00:00 2001 From: Simon Lloyd Date: Tue, 24 Feb 2026 20:37:01 +0000 Subject: [PATCH 2/3] fix: use case-insensitive glob and POSIX paths in tests for Windows compatibility - Use glob('*.bhz', case_sensitive=False) in conftest to match SAC files regardless of extension casing on any platform - Use Path.as_posix() when joining file paths into CLI command strings to avoid shlex stripping backslashes from Windows paths - Fix print_project_info to display 'in-memory database' instead of ':memory:' --- tests/conftest.py | 2 +- tests/functional/test_cli_basic_ops.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index e8b96935..445cfe04 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -163,7 +163,7 @@ def multi_event_data(tmp_path_factory: pytest.TempPathFactory) -> list[Path]: for orgfile in orgfiles: testfile = tmpdir / orgfile.name shutil.copy(orgfile, testfile) - return sorted(tmpdir.glob("*.bhz")) + return sorted(tmpdir.glob("*.bhz", case_sensitive=False)) # --------------------------------------------------------------------------- diff --git a/tests/functional/test_cli_basic_ops.py b/tests/functional/test_cli_basic_ops.py index f510d6d9..96a44a41 100644 --- a/tests/functional/test_cli_basic_ops.py +++ b/tests/functional/test_cli_basic_ops.py @@ -85,7 +85,7 @@ def test_add_data( cli_json: Callable[[str], list | dict], ) -> None: """Verifies that data can be added to the project.""" - files = " ".join(str(f) for f in multi_event_data) + files = " ".join(f.as_posix() for f in multi_event_data) cli(f"data add {files} --no-progress") events = cli_json("event dump") assert len(events) > 0 @@ -100,7 +100,7 @@ def test_add_data_idempotent( """Adding the same files twice does not duplicate data.""" events_before = cli_json("event dump") - files = " ".join(str(f) for f in multi_event_data) + files = " ".join(f.as_posix() for f in multi_event_data) cli(f"data add {files} --no-progress") events_after = cli_json("event dump") @@ -128,7 +128,7 @@ def test_dry_run_does_not_add( cli_json: Callable[[str], list | dict], ) -> None: """Verifies that dry-run mode does not modify the database.""" - files = " ".join(str(f) for f in multi_event_data) + files = " ".join(f.as_posix() for f in multi_event_data) cli(f"data add {files} --no-progress --dry-run") events = cli_json("event dump") assert len(events) == 0 @@ -617,7 +617,7 @@ def test_delete_all_events_and_readd( seis_empty = cli_json("seismogram dump") assert len(seis_empty) == 0 - files = " ".join(str(f) for f in multi_event_data) + files = " ".join(f.as_posix() for f in multi_event_data) cli(f"data add {files} --no-progress") events_after = cli_json("event dump") From e2a5f0349a4425ccade6a212ae39b117fbfc9fb0 Mon Sep 17 00:00:00 2001 From: Simon Lloyd Date: Tue, 24 Feb 2026 21:17:15 +0000 Subject: [PATCH 3/3] refactor: move UUID tests to integration, fix add_data_to_project savepoint handling - Move tests/unit/utils/test_uuid.py to tests/integration/test_uuid.py; drop local session fixture in favour of conftest patched_session - Use nested.rollback() instead of session.rollback() in add_data_to_project dry-run path to avoid rolling back the outer transaction - Move session.commit() outside begin_nested() context for clarity - Snapshot existing IDs before entering the savepoint - Fix missing session arg in add_data_to_project docstring --- src/aimbat/cli/_data.py | 16 ++- src/aimbat/core/_data.py | 47 ++++--- src/aimbat/core/_station.py | 1 + tests/integration/test_data_io.py | 16 +-- tests/integration/test_uuid.py | 207 +++++++++++++++++++++++++++ tests/unit/utils/test_uuid.py | 224 ------------------------------ 6 files changed, 250 insertions(+), 261 deletions(-) create mode 100644 tests/integration/test_uuid.py delete mode 100644 tests/unit/utils/test_uuid.py diff --git a/src/aimbat/cli/_data.py b/src/aimbat/cli/_data.py index ca20a3c6..72d6c0fa 100644 --- a/src/aimbat/cli/_data.py +++ b/src/aimbat/cli/_data.py @@ -13,14 +13,16 @@ @app.command(name="add") @simple_exception def cli_data_add( - datasources: Annotated[ + data_sources: Annotated[ list[Path], Parameter( - name="files", consume_multiple=True, validator=validators.Path(exists=True) + name="sources", + consume_multiple=True, + validator=validators.Path(exists=True), ), ], *, - datatype: DataType = DataType.SAC, + data_type: Annotated[DataType, Parameter(name="type")] = DataType.SAC, dry_run: Annotated[bool, Parameter(name="dry-run")] = False, show_progress_bar: Annotated[bool, Parameter(name="progress")] = True, global_parameters: GlobalParameters | None = None, @@ -28,8 +30,8 @@ def cli_data_add( """Add or update data files in the AIMBAT project. Args: - seismogram_files: Seismogram files to be added. - filetype: Specify type of seismogram file. + data_sources: Data sources to be added. + data_type: Specify type of seismogram file. dry_run: If True, print the files that would be added without modifying the database. show_progress_bar: Display progress bar. """ @@ -43,8 +45,8 @@ def cli_data_add( with Session(engine) as session: add_data_to_project( session, - datasources, - datatype, + data_sources, + data_type, dry_run, disable_progress_bar, ) diff --git a/src/aimbat/core/_data.py b/src/aimbat/core/_data.py index 0259748a..1a3b5994 100644 --- a/src/aimbat/core/_data.py +++ b/src/aimbat/core/_data.py @@ -190,39 +190,41 @@ def _print_dry_run_results( def add_data_to_project( session: Session, - datasources: Sequence[str | os.PathLike], - datatype: DataType, + datas_sources: Sequence[str | os.PathLike], + data_type: DataType, dry_run: bool = False, disable_progress_bar: bool = True, ) -> None: """Add files to the AIMBAT database. Args: - datasources: List of data sources to add. - datatype: Type of data. - disable_progress_bar: Do not display progress bar. + session: The SQLModel database session. + data_sources: List of data sources to add. + data_type: Type of data. dry_run: If True, do not commit changes to the database. + disable_progress_bar: Do not display progress bar. """ - logger.info(f"Adding {len(datasources)} {datatype} files to project.") + logger.info(f"Adding {len(datas_sources)} {data_type} files to project.") - try: - with session.begin_nested(): - # Snapshot existing IDs before adding so we can tell new from reused. - if dry_run: - existing_station_ids = set(session.exec(select(AimbatStation.id)).all()) - existing_event_ids = set(session.exec(select(AimbatEvent.id)).all()) - existing_seismogram_ids = set( - session.exec(select(AimbatSeismogram.id)).all() - ) + # Snapshot existing IDs before entering the savepoint so we can identify + # what would be new vs reused when running a dry run. + if dry_run: + existing_station_ids = set(session.exec(select(AimbatStation.id)).all()) + existing_event_ids = set(session.exec(select(AimbatEvent.id)).all()) + existing_seismogram_ids = set(session.exec(select(AimbatSeismogram.id)).all()) - added_datasources: list[AimbatDataSource] = [] + try: + added_datasources: list[AimbatDataSource] = [] + with session.begin_nested() as nested: for datasource in track( - sequence=datasources, + sequence=datas_sources, description="Adding data ...", disable=disable_progress_bar, ): - added_datasources.append(_add_datasource(session, datasource, datatype)) + added_datasources.append( + _add_datasource(session, datasource, data_type) + ) if dry_run: logger.info("Dry run: displaying data that would be added.") @@ -233,11 +235,12 @@ def add_data_to_project( existing_event_ids, existing_seismogram_ids, ) - session.rollback() + nested.rollback() logger.info("Dry run complete. Rolling back changes.") - else: - session.commit() - logger.info("Data added successfully.") + return + + session.commit() + logger.info("Data added successfully.") except Exception as e: logger.error(f"Failed to add data. Rolling back changes. Error: {e}") diff --git a/src/aimbat/core/_station.py b/src/aimbat/core/_station.py index d28fee38..8739e712 100644 --- a/src/aimbat/core/_station.py +++ b/src/aimbat/core/_station.py @@ -80,6 +80,7 @@ def get_stations_in_active_event( statement = ( select(AimbatStation) + .distinct() .join(AimbatSeismogram) .join(AimbatEvent) .where(AimbatEvent.active == True) # noqa: E712 diff --git a/tests/integration/test_data_io.py b/tests/integration/test_data_io.py index ea100621..20abdc34 100644 --- a/tests/integration/test_data_io.py +++ b/tests/integration/test_data_io.py @@ -42,7 +42,7 @@ def test_add_single_sac_file(self, sac_file_good: Path, session: Session) -> Non add_data_to_project( session, [sac_file_good], - datatype=DataType.SAC, + data_type=DataType.SAC, ) seismogram_filename = session.exec( select(AimbatDataSource.sourcename) @@ -64,7 +64,7 @@ def test_add_multiple_sac_files( add_data_to_project( session, multi_event_data, - datatype=DataType.SAC, + data_type=DataType.SAC, ) seismogram_filenames = session.exec(select(AimbatDataSource.sourcename)).all() @@ -83,7 +83,7 @@ def test_add_nonexistent_file(self, session: Session) -> None: add_data_to_project( session, [non_existent_file], - datatype=DataType.SAC, + data_type=DataType.SAC, ) def test_add_mixed_valid_and_invalid_files( @@ -100,7 +100,7 @@ def test_add_mixed_valid_and_invalid_files( add_data_to_project( session, [sac_file_good, non_existent_file], - datatype=DataType.SAC, + data_type=DataType.SAC, ) # Verify that the valid file was not added due to the error @@ -125,7 +125,7 @@ def test_add_sac_file_with_missing_pick( add_data_to_project( session, [sac_file_good], - datatype=DataType.SAC, + data_type=DataType.SAC, ) def test_dry_run_all_new( @@ -144,7 +144,7 @@ def test_dry_run_all_new( add_data_to_project( session, multi_event_data, - datatype=DataType.SAC, + data_type=DataType.SAC, dry_run=True, ) @@ -173,14 +173,14 @@ def test_dry_run_all_skipped( add_data_to_project( session, multi_event_data, - datatype=DataType.SAC, + data_type=DataType.SAC, ) capsys.readouterr() # discard output from the real add add_data_to_project( session, multi_event_data, - datatype=DataType.SAC, + data_type=DataType.SAC, dry_run=True, ) diff --git a/tests/integration/test_uuid.py b/tests/integration/test_uuid.py new file mode 100644 index 00000000..f19cf312 --- /dev/null +++ b/tests/integration/test_uuid.py @@ -0,0 +1,207 @@ +"""Integration tests for aimbat.utils._uuid.""" + +import uuid +import pandas as pd +import pytest +from aimbat.models import AimbatEvent +from aimbat.utils._uuid import string_to_uuid, uuid_shortener +from sqlmodel import Session + + +def _make_event(uid: uuid.UUID, offset_seconds: int = 0) -> AimbatEvent: + """Helper to create an AimbatEvent with a specific UUID and time offset. + + Args: + uid: The UUID for the event. + offset_seconds: Time offset in seconds. + + Returns: + The created event. + """ + return AimbatEvent( + id=uid, + time=pd.Timestamp("2000-01-01") + pd.Timedelta(seconds=offset_seconds), + latitude=0.0, + longitude=0.0, + depth=0.0, + ) + + +class TestStringToUuid: + """Tests for the string_to_uuid function.""" + + def test_resolves_full_uuid(self, patched_session: Session) -> None: + """Verifies resolving a full UUID string. + + Args: + patched_session: The database session. + """ + uid = uuid.uuid4() + patched_session.add(_make_event(uid)) + patched_session.commit() + result = string_to_uuid(patched_session, str(uid), AimbatEvent) + assert result == uid + + def test_resolves_short_prefix(self, patched_session: Session) -> None: + """Verifies resolving a UUID from a short prefix. + + Args: + patched_session: The database session. + """ + uid = uuid.uuid4() + patched_session.add(_make_event(uid)) + patched_session.commit() + prefix = str(uid).replace("-", "")[:6] + result = string_to_uuid(patched_session, prefix, AimbatEvent) + assert result == uid + + def test_raises_on_no_match(self, patched_session: Session) -> None: + """Verifies that ValueError is raised when no match is found. + + Args: + patched_session: The database session. + """ + with pytest.raises(ValueError, match="Unable to find"): + string_to_uuid(patched_session, "000000", AimbatEvent) + + def test_raises_on_ambiguous_match(self, patched_session: Session) -> None: + """Verifies that ValueError is raised when multiple matches are found. + + Args: + patched_session: The database session. + """ + uid1 = uuid.UUID("aaaaaaaa-0000-4000-8000-000000000001") + uid2 = uuid.UUID("aaaaaaaa-0000-4000-8000-000000000002") + patched_session.add(_make_event(uid1, offset_seconds=0)) + patched_session.add(_make_event(uid2, offset_seconds=1)) + patched_session.commit() + with pytest.raises(ValueError, match="more than one"): + string_to_uuid(patched_session, "aaaaaaaa", AimbatEvent) + + def test_custom_error_message(self, patched_session: Session) -> None: + """Verifies that a custom error message is used when provided. + + Args: + patched_session: The database session. + """ + with pytest.raises(ValueError, match="custom error"): + string_to_uuid( + patched_session, "000000", AimbatEvent, custom_error="custom error" + ) + + def test_ignores_dashes_in_input(self, patched_session: Session) -> None: + """Verifies that dashes in the input string are ignored. + + Args: + patched_session: The database session. + """ + uid = uuid.UUID("abcdef12-1234-4000-8000-000000000001") + patched_session.add(_make_event(uid)) + patched_session.commit() + result = string_to_uuid(patched_session, "abcdef12-1234", AimbatEvent) + assert result == uid + + +class TestUuidShortener: + """Tests for the uuid_shortener function.""" + + def test_returns_unique_prefix_for_single_entry( + self, patched_session: Session + ) -> None: + """Verifies getting a unique prefix for a single event. + + Args: + patched_session: The database session. + """ + uid = uuid.uuid4() + event = _make_event(uid) + patched_session.add(event) + patched_session.commit() + short = uuid_shortener(patched_session, event) + assert str(uid).startswith(short) + + def test_prefix_is_shortest_unique(self, patched_session: Session) -> None: + """Verifies that the returned prefix is the shortest possible unique prefix. + + Args: + patched_session: The database session. + """ + uid1 = uuid.UUID("aaaaaaaa-0000-4000-8000-000000000001") + uid2 = uuid.UUID("bbbbbbbb-0000-4000-8000-000000000002") + e1 = _make_event(uid1, offset_seconds=0) + e2 = _make_event(uid2, offset_seconds=1) + patched_session.add(e1) + patched_session.add(e2) + patched_session.commit() + short1 = uuid_shortener(patched_session, e1) + short2 = uuid_shortener(patched_session, e2) + assert str(uid1).startswith(short1), "prefix should match uid1" + assert str(uid2).startswith(short2), "prefix should match uid2" + assert not str(uid2).startswith(short1), "short1 should not match uid2" + assert not str(uid1).startswith(short2), "short2 should not match uid1" + + def test_disambiguates_shared_prefix(self, patched_session: Session) -> None: + """Verifies disambiguation when two UUIDs share a long common prefix. + + Args: + patched_session: The database session. + """ + uid1 = uuid.UUID("aaaaaaaa-0000-4000-8000-000000000001") + uid2 = uuid.UUID("aaaaaaaa-0000-4000-8000-000000000002") + e1 = _make_event(uid1, offset_seconds=0) + e2 = _make_event(uid2, offset_seconds=1) + patched_session.add(e1) + patched_session.add(e2) + patched_session.commit() + short1 = uuid_shortener(patched_session, e1) + short2 = uuid_shortener(patched_session, e2) + assert short1 != short2, "disambiguated prefixes should differ" + assert str(uid1).startswith(short1), "short1 should match uid1" + assert str(uid2).startswith(short2), "short2 should match uid2" + + def test_class_form_with_str_uuid(self, patched_session: Session) -> None: + """Verifies calling with a class and string UUID instead of a model instance. + + Args: + patched_session: The database session. + """ + uid = uuid.uuid4() + patched_session.add(_make_event(uid)) + patched_session.commit() + short = uuid_shortener(patched_session, AimbatEvent, str_uuid=str(uid)) + assert str(uid).startswith(short) + + def test_class_form_requires_str_uuid(self, patched_session: Session) -> None: + """Verifies that str_uuid is required when calling with a class. + + Args: + patched_session: The database session. + """ + with pytest.raises(ValueError, match="str_uuid must be provided"): + uuid_shortener(patched_session, AimbatEvent) + + def test_raises_if_id_not_in_table(self, patched_session: Session) -> None: + """Verifies that ValueError is raised if the UUID is not in the table. + + Args: + patched_session: The database session. + """ + uid = uuid.uuid4() + with pytest.raises(ValueError, match="not found in table"): + uuid_shortener(patched_session, AimbatEvent, str_uuid=str(uid)) + + def test_min_length_respected(self, patched_session: Session) -> None: + """Verifies that the minimum length constraint is respected. + + Args: + patched_session: The database session. + """ + uid = uuid.uuid4() + patched_session.add(_make_event(uid)) + patched_session.commit() + event = patched_session.get(AimbatEvent, uid) + assert event is not None, "expected event to exist in database" + short = uuid_shortener(patched_session, event, min_length=4) + assert ( + len(short.replace("-", "")) >= 4 + ), "result should be at least 4 characters excluding dashes" diff --git a/tests/unit/utils/test_uuid.py b/tests/unit/utils/test_uuid.py deleted file mode 100644 index c699e5ee..00000000 --- a/tests/unit/utils/test_uuid.py +++ /dev/null @@ -1,224 +0,0 @@ -"""Unit tests for aimbat.utils._uuid.""" - -import uuid -from collections.abc import Generator - -import pandas as pd -import pytest -from sqlmodel import Session, SQLModel, create_engine - -from aimbat.models import AimbatEvent -from aimbat.utils._uuid import string_to_uuid, uuid_shortener - - -@pytest.fixture() -def session() -> Generator[Session, None, None]: - """Provide an in-memory SQLite session with all tables created. - - Yields: - Session: The database session. - """ - engine = create_engine( - "sqlite:///:memory:", connect_args={"check_same_thread": False} - ) - SQLModel.metadata.create_all(engine) - with Session(engine) as s: - yield s - engine.dispose() - - -def _make_event(uid: uuid.UUID, offset_seconds: int = 0) -> AimbatEvent: - """Helper to create an AimbatEvent with a specific UUID and time offset. - - Args: - uid (uuid.UUID): The UUID for the event. - offset_seconds (int): Time offset in seconds (default: 0). - - Returns: - AimbatEvent: The created event. - """ - return AimbatEvent( - id=uid, - time=pd.Timestamp("2000-01-01") + pd.Timedelta(seconds=offset_seconds), - latitude=0.0, - longitude=0.0, - depth=0.0, - ) - - -class TestStringToUuid: - """Tests for the string_to_uuid function.""" - - def test_resolves_full_uuid(self, session: Session) -> None: - """Verifies resolving a full UUID string. - - Args: - session (Session): Database session. - """ - uid = uuid.uuid4() - session.add(_make_event(uid)) - session.commit() - result = string_to_uuid(session, str(uid), AimbatEvent) - assert result == uid - - def test_resolves_short_prefix(self, session: Session) -> None: - """Verifies resolving a UUID from a short prefix. - - Args: - session (Session): Database session. - """ - uid = uuid.uuid4() - session.add(_make_event(uid)) - session.commit() - prefix = str(uid).replace("-", "")[:6] - result = string_to_uuid(session, prefix, AimbatEvent) - assert result == uid - - def test_raises_on_no_match(self, session: Session) -> None: - """Verifies that ValueError is raised when no match is found. - - Args: - session (Session): Database session. - """ - with pytest.raises(ValueError, match="Unable to find"): - string_to_uuid(session, "000000", AimbatEvent) - - def test_raises_on_ambiguous_match(self, session: Session) -> None: - """Verifies that ValueError is raised when multiple matches are found. - - Args: - session (Session): Database session. - """ - # Force two UUIDs that share the same prefix by crafting them manually. - uid1 = uuid.UUID("aaaaaaaa-0000-4000-8000-000000000001") - uid2 = uuid.UUID("aaaaaaaa-0000-4000-8000-000000000002") - session.add(_make_event(uid1, offset_seconds=0)) - session.add(_make_event(uid2, offset_seconds=1)) - session.commit() - with pytest.raises(ValueError, match="more than one"): - string_to_uuid(session, "aaaaaaaa", AimbatEvent) - - def test_custom_error_message(self, session: Session) -> None: - """Verifies that a custom error message is used when provided. - - Args: - session (Session): Database session. - """ - with pytest.raises(ValueError, match="custom error"): - string_to_uuid(session, "000000", AimbatEvent, custom_error="custom error") - - def test_ignores_dashes_in_input(self, session: Session) -> None: - """Verifies that dashes in the input string are ignored. - - Args: - session (Session): Database session. - """ - uid = uuid.UUID("abcdef12-1234-4000-8000-000000000001") - session.add(_make_event(uid)) - session.commit() - result = string_to_uuid(session, "abcdef12-1234", AimbatEvent) - assert result == uid - - -class TestUuidShortener: - """Tests for the uuid_shortener function.""" - - def test_returns_unique_prefix_for_single_entry(self, session: Session) -> None: - """Verifies getting a unique prefix for a single event. - - Args: - session (Session): Database session. - """ - uid = uuid.uuid4() - event = _make_event(uid) - session.add(event) - session.commit() - short = uuid_shortener(session, event) - assert str(uid).startswith(short) - - def test_prefix_is_shortest_unique(self, session: Session) -> None: - """Verifies that the returned prefix is the shortest possible unique prefix. - - Args: - session (Session): Database session. - """ - uid1 = uuid.UUID("aaaaaaaa-0000-4000-8000-000000000001") - uid2 = uuid.UUID("bbbbbbbb-0000-4000-8000-000000000002") - e1 = _make_event(uid1, offset_seconds=0) - e2 = _make_event(uid2, offset_seconds=1) - session.add(e1) - session.add(e2) - session.commit() - short1 = uuid_shortener(session, e1) - short2 = uuid_shortener(session, e2) - # Each prefix must uniquely identify its UUID. - assert str(uid1).startswith(short1) - assert str(uid2).startswith(short2) - assert not str(uid2).startswith(short1) - assert not str(uid1).startswith(short2) - - def test_disambiguates_shared_prefix(self, session: Session) -> None: - """Verifies disambiguation when prefixes are shared. - - Args: - session (Session): Database session. - """ - uid1 = uuid.UUID("aaaaaaaa-0000-4000-8000-000000000001") - uid2 = uuid.UUID("aaaaaaaa-0000-4000-8000-000000000002") - e1 = _make_event(uid1, offset_seconds=0) - e2 = _make_event(uid2, offset_seconds=1) - session.add(e1) - session.add(e2) - session.commit() - short1 = uuid_shortener(session, e1) - short2 = uuid_shortener(session, e2) - assert short1 != short2 - assert str(uid1).startswith(short1) - assert str(uid2).startswith(short2) - - def test_class_form_with_str_uuid(self, session: Session) -> None: - """Verifies calling with class and string UUID. - - Args: - session (Session): Database session. - """ - uid = uuid.uuid4() - session.add(_make_event(uid)) - session.commit() - short = uuid_shortener(session, AimbatEvent, str_uuid=str(uid)) - assert str(uid).startswith(short) - - def test_class_form_requires_str_uuid(self, session: Session) -> None: - """Verifies that str_uuid is required when calling with a class. - - Args: - session (Session): Database session. - """ - with pytest.raises(ValueError, match="str_uuid must be provided"): - uuid_shortener(session, AimbatEvent) - - def test_raises_if_id_not_in_table(self, session: Session) -> None: - """Verifies that ValueError is raised if the ID is not in the table. - - Args: - session (Session): Database session. - """ - uid = uuid.uuid4() - # Do not add the event to the session. - with pytest.raises(ValueError, match="not found in table"): - uuid_shortener(session, AimbatEvent, str_uuid=str(uid)) - - def test_min_length_respected(self, session: Session) -> None: - """Verifies that the minimum length constraint is respected. - - Args: - session (Session): Database session. - """ - uid = uuid.uuid4() - session.add(_make_event(uid)) - session.commit() - event = session.get(AimbatEvent, uid) - assert event is not None, "expected event to exist in database" - short = uuid_shortener(session, event, min_length=4) - # Result must be at least 4 chars (excluding any trailing dash). - assert len(short.replace("-", "")) >= 4