diff --git a/.cursor/rules/backend-test-conventions.mdc b/.cursor/rules/backend-test-conventions.mdc new file mode 100644 index 00000000..fb7dc504 --- /dev/null +++ b/.cursor/rules/backend-test-conventions.mdc @@ -0,0 +1,63 @@ +--- +description: Backend Python test conventions (pytest) +globs: tests/backend/**/*.py +alwaysApply: false +--- + +# Backend Test Conventions + +## File Location & Naming + +- Place tests under `tests/backend/unit/`, `tests/backend/integration/`, or `tests/backend/contract/`. +- Name files `test_.py` with descriptive snake_case names. +- Shared test data goes in `tests/backend/fixtures/`. + +## File Structure + +```python +"""One-paragraph summary of what is being tested and why. + +Background +---------- +Brief context about the feature or bug fix these tests validate. +""" +from __future__ import annotations + +import pytest + +pytestmark = [pytest.mark.backend] +``` + +## Conventions + +- Always add `pytestmark = [pytest.mark.backend]` at module level. +- Group related tests in classes prefixed with `Test` (e.g. `TestStripImageBlocks`). +- Use `pytest.mark.parametrize` for data-driven tests instead of writing repetitive cases. +- Use `pytest.fixture()` for shared setup; keep fixtures close to where they are used. +- Unit tests must **not** depend on Flask, network, or external services. +- Mock external calls with `unittest.mock.patch` / `MagicMock`; never make real API calls. +- Each test function should verify **one** behavior and have a clear name: `test__`. + +## Running Tests + +- When running pytest, use `-q` (quiet mode) instead of `-v` (verbose mode) for cleaner output. +- **Preferred:** `python -m pytest tests/backend/ -q` +- **Avoid:** `python -m pytest tests/backend/ -v 2>&1` + +## Example + +```python +# ❌ BAD – vague name, no parametrize, no marker +def test_it_works(): + assert sanitize("hello world") == "hello_world" + +# ✅ GOOD +pytestmark = [pytest.mark.backend] + +@pytest.mark.parametrize("raw,expected", [ + ("hello world", "hello_world"), + ("订单明细", "订单明细"), +]) +def test_sanitize_preserves_unicode(raw: str, expected: str) -> None: + assert sanitize(raw) == expected +``` diff --git a/.cursor/rules/error-response-safety.mdc b/.cursor/rules/error-response-safety.mdc new file mode 100644 index 00000000..f2378fbb --- /dev/null +++ b/.cursor/rules/error-response-safety.mdc @@ -0,0 +1,61 @@ +--- +description: Prevent information exposure through exception messages in HTTP responses +globs: py-src/**/*.py +alwaysApply: false +--- + +# Error Response Safety + +Never return raw exception text (`str(e)`, `f"...{e}"`) directly in HTTP responses. +Python exceptions may contain stack traces, file paths, database connection strings, +API keys, or internal IP addresses — all of which are security risks (CWE-209). + +## Rules + +1. **5xx errors** — return a fixed generic message; never expose exception details. +2. **502 errors** — return `"Upstream service unavailable"`; never include upstream error body. +3. **4xx errors** — run `sanitize_error_message(str(e))` so business-validation messages + stay useful while secrets are stripped. +4. **Logging** — always log the full exception server-side (`logger.warning` / `logger.error` + with `exc_info=True` when needed). The client never needs the stack trace. + +## How To + +For Flask route `except` blocks, use `safe_error_response`: + +```python +from data_formulator.security.sanitize import safe_error_response + +except HTTPError as e: + return safe_error_response(e, 502, log_message="Upstream call failed") +except ValueError as e: + return safe_error_response(e, 400, log_message="Invalid input") +except Exception as e: + return safe_error_response(e, 500, log_message="Unexpected error") +``` + +For non-route contexts (generators, background tasks) where a Flask response +cannot be returned, use `sanitize_error_message` directly: + +```python +from data_formulator.security.sanitize import sanitize_error_message + +except Exception as exc: + logger.error("Task failed: %s", exc, exc_info=True) + payload = {"status": "error", "message": sanitize_error_message(str(exc))} +``` + +## Common Mistakes + +```python +# ❌ BAD — raw exception leaks internal details +return jsonify({"message": str(e)}), 500 +return jsonify({"message": f"Failed: {e}"}), 502 + +# ❌ BAD — manual traceback in response +import traceback +return jsonify({"message": traceback.format_exc()}), 500 + +# ✅ GOOD +return safe_error_response(e, 500, log_message="Operation failed") +``` diff --git a/.cursor/rules/frontend-test-conventions.mdc b/.cursor/rules/frontend-test-conventions.mdc new file mode 100644 index 00000000..a9cce5c8 --- /dev/null +++ b/.cursor/rules/frontend-test-conventions.mdc @@ -0,0 +1,64 @@ +--- +description: Frontend TypeScript test conventions (Vitest) +globs: tests/frontend/**/*.test.{ts,tsx} +alwaysApply: false +--- + +# Frontend Test Conventions + +## File Location & Naming + +- Place tests under `tests/frontend/unit/` mirroring the `src/` structure: + - `tests/frontend/unit/data/` → tests for `src/data/` + - `tests/frontend/unit/app/` → tests for `src/app/` + - `tests/frontend/unit/views/` → tests for `src/views/` +- Name files `.test.ts` (or `.test.tsx` for React rendering tests). + +## File Structure + +```typescript +import { describe, it, expect } from 'vitest'; +// For React rendering tests: +// import { render } from '@testing-library/react'; + +import { myFunction } from '../../../../src/'; + +describe('myFunction', () => { + it('should handle ', () => { + expect(myFunction(input)).toBe(expected); + }); +}); +``` + +## Conventions + +- Import `describe`, `it`, `expect` explicitly from `vitest` (globals are enabled but explicit imports improve readability). +- Use `@testing-library/react` and `@testing-library/jest-dom` for component rendering tests. +- Prefer testing **exported pure functions** over testing internal component state. +- When component logic is complex, extract it into an exported helper and test that directly. +- Group tests with `describe` blocks; use section comments (`// --- Null cases ---`) for clarity. +- One assertion per `it` block when possible; name tests as `should `. +- Do **not** import from `node_modules` internals; only use public API. +- Keep tests independent — no shared mutable state between `it` blocks. + +## Example + +```typescript +// ❌ BAD – no describe, vague test name +import { expect, test } from 'vitest'; +test('works', () => { expect(fn(1)).toBe(2); }); + +// ✅ GOOD +import { describe, it, expect } from 'vitest'; +import { checkIsLikelyTextOnlyModel } from '../../../../src/views/DataLoadingThread'; + +describe('checkIsLikelyTextOnlyModel', () => { + it('returns true for deepseek-chat', () => { + expect(checkIsLikelyTextOnlyModel('deepseek-chat')).toBe(true); + }); + + it('returns false for undefined', () => { + expect(checkIsLikelyTextOnlyModel(undefined)).toBe(false); + }); +}); +``` diff --git a/.cursor/rules/i18n-no-hardcoded-strings.mdc b/.cursor/rules/i18n-no-hardcoded-strings.mdc new file mode 100644 index 00000000..365b8c52 --- /dev/null +++ b/.cursor/rules/i18n-no-hardcoded-strings.mdc @@ -0,0 +1,40 @@ +--- +description: No hardcoded UI strings — use i18n translation keys +globs: src/**/*.{ts,tsx} +alwaysApply: false +--- + +# i18n: No Hardcoded UI Strings + +All user-visible text in the frontend MUST go through the i18n system. Never hardcode Chinese, English, or any other language string directly in components. + +## How to Use + +```tsx +import { useTranslation } from 'react-i18next'; + +const { t } = useTranslation(); + +// ✅ GOOD + + + +// ❌ BAD + + + +``` + +## Translation Files + +- English: `src/i18n/locales/en/.json` +- Chinese: `src/i18n/locales/zh/.json` +- Namespaces: `common`, `upload`, `chart`, `model`, `encoding`, `messages`, `navigation` + +When adding a new key, add it to **both** `en` and `zh` locale files. Pick the namespace that fits; create a new namespace only if none applies. + +## What Counts as User-Visible + +Must use `t()`: button labels, tooltips, placeholders, error messages, dialog titles, tab names, toast notifications, table headers, empty-state text. + +May stay hardcoded: log messages (`console.log`), error messages thrown but never displayed, internal constants, CSS class names, test IDs. diff --git a/.cursor/rules/language-injection-conventions.mdc b/.cursor/rules/language-injection-conventions.mdc new file mode 100644 index 00000000..c9473fbc --- /dev/null +++ b/.cursor/rules/language-injection-conventions.mdc @@ -0,0 +1,21 @@ +--- +description: Language injection conventions for LLM Agent prompts +globs: py-src/data_formulator/agents/**/*.py,py-src/data_formulator/agent_routes.py +alwaysApply: false +--- + +# Language Injection Conventions + +Language flows per-request: `Frontend i18n → Accept-Language header → get_language_instruction() → system prompt`. + +## Rules + +1. **User-facing LLM output** MUST inject language via `get_language_instruction(mode=...)` in the route handler. +2. **Mode selection:** `"full"` for text-heavy agents, `"compact"` for code-generation agents and short-text endpoints. +3. **Inject into system prompt only** — append or insert before a marker, never into user messages. +4. **Do NOT inject** for non-user-facing calls (health checks, internal tool calls). +5. **Do NOT duplicate** — if upstream messages already contain language instruction, skip. +6. **Do NOT** use env vars, global interceptors, or hardcoded language strings (e.g. `"回答请使用中文"`) — always use `build_language_instruction()`. +7. **New language?** Add to `LANGUAGE_DISPLAY_NAMES` in `agents/agent_language.py` and add locale files in `src/i18n/locales//`. + +For detailed architecture, code examples, and anti-pattern explanations, see the language-injection skill. diff --git a/.cursor/rules/package-manager-conventions.mdc b/.cursor/rules/package-manager-conventions.mdc new file mode 100644 index 00000000..5383e30a --- /dev/null +++ b/.cursor/rules/package-manager-conventions.mdc @@ -0,0 +1,12 @@ +--- +description: Use Yarn only, never npm/pnpm +globs: package.json, yarn.lock +alwaysApply: true +--- + +# Package Manager Rules + +- Use Yarn v1.22.22 only - never use npm or pnpm +- Never manually edit yarn.lock +- Keep yarn.lock changes minimal when adding deps +- Registry must be https://registry.yarnpkg.com diff --git a/.cursor/skills/language-injection/SKILL.md b/.cursor/skills/language-injection/SKILL.md new file mode 100644 index 00000000..a49cde7e --- /dev/null +++ b/.cursor/skills/language-injection/SKILL.md @@ -0,0 +1,72 @@ +# Language Injection for Agent Prompts + +Detailed guide for the language injection system. The short version lives in `.cursor/rules/language-injection-conventions.mdc`. + +## Architecture + +``` +Frontend i18n.language → Accept-Language header → get_language_instruction() + │ + build_language_instruction() + (agents/agent_language.py) + │ + ┌────────────┴────────────┐ + ▼ ▼ + mode="full" mode="compact" + (text-heavy agents) (code-gen agents) +``` + +### Core Modules + +| Module | Role | +|--------|------| +| `agents/agent_language.py` | `build_language_instruction(lang, mode)` — generates prompt fragments; supports 20 languages; returns `""` for English | +| `agent_routes.py` → `get_language_instruction()` | Reads `Accept-Language` header, delegates to `build_language_instruction` | +| `src/app/utils.tsx` → `fetchWithIdentity()` | Sets `Accept-Language` header on every API request from `i18n.language` | + +## Code Examples + +### Route handler — inject language + +```python +# In a Flask route handler: +lang_instruction = get_language_instruction(mode="compact") +lang_suffix = f"\n\n{lang_instruction}" if lang_instruction else "" + +messages = [ + {"role": "system", "content": "You are a helpful assistant." + lang_suffix}, + {"role": "user", "content": user_input}, +] +``` + +### Agent constructor — marker-based insertion + +```python +if language_instruction: + marker = "**About the execution environment:**" + idx = self.system_prompt.find(marker) + if idx > 0: + self.system_prompt = ( + self.system_prompt[:idx] + + language_instruction + "\n\n" + + self.system_prompt[idx:] + ) + else: + self.system_prompt += "\n\n" + language_instruction +``` + +## Anti-Patterns (with explanations) + +| Pattern | Why it's wrong | +|---------|---------------| +| `os.environ.get("DF_DEFAULT_LANGUAGE")` | Process-level — all users get same language; breaks multi-user | +| Global LLM client interceptor | Hidden behavior; can't distinguish full/compact mode; fragile string detection | +| New `MessageBuilder` class | Duplicates `agent_language.py`; creates parallel conflicting abstractions | +| Hardcoded `"回答请使用中文"` in prompts | Not configurable; skips the mode system; breaks for other languages | + +## Adding a New Language + +1. Add language code + display name to `LANGUAGE_DISPLAY_NAMES` in `agents/agent_language.py`. +2. Optionally add extra rules to `LANGUAGE_EXTRA_RULES` (e.g. simplified vs traditional Chinese). +3. Add frontend translations in `src/i18n/locales//` — copy an existing locale folder as template. +4. No Agent code changes needed — the existing flow picks up new languages automatically. diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..1838bc1b --- /dev/null +++ b/.dockerignore @@ -0,0 +1,12 @@ +.git +.gitignore +node_modules +__pycache__ +*.pyc +.env +.env.* +*.egg-info +dist +build +.pytest_cache +.mypy_cache diff --git a/.env.template b/.env.template index ec44c133..ff6fe840 100644 --- a/.env.template +++ b/.env.template @@ -9,6 +9,37 @@ DISABLE_DISPLAY_KEYS=false # if true, API keys will not be shown in the frontend SANDBOX=local # code execution backend: 'local' (default) or 'docker' +# LOG_LEVEL=INFO # logging level for data_formulator modules (DEBUG, INFO, WARNING, ERROR) + +# --- Feature gates --- +# Disable external data connectors (MySQL, PostgreSQL, etc.). +# Recommended for multi-user anonymous deployments to prevent credential exposure. +# DISABLE_DATA_CONNECTORS=false + +# Prevent users from adding custom LLM endpoints via the UI.\n# Only server-configured models (below) will be available.\n# DISABLE_CUSTOM_MODELS=false + +# Flask session secret key — used to sign cookies and encrypt session data. +# Required for SSO and plugin auth (Superset, etc.). Generate one with: +# python -c "import secrets; print(secrets.token_hex(32))" +# FLASK_SECRET_KEY= + +# Data directory — where workspaces and user data are stored on disk. +# Useful for server deployments with large datasets or dedicated storage volumes. +# Resolution order: --data-dir CLI flag > DATA_FORMULATOR_HOME env var > ~/.data_formulator +# Directory structure: +# DATA_FORMULATOR_HOME/ +# ├── users//workspaces/ (per-user workspace data: parquet, metadata) +# ├── workspaces/ (legacy default workspace root) +# └── cache/ (local cache, only for azure_blob backend) +# DATA_FORMULATOR_HOME= + +# Available UI languages (optional, comma-separated). +# Default: en,zh — if not set, both English and Chinese are available. +# Supported values: en, zh (add more after creating locale files) +# Examples: +# AVAILABLE_LANGUAGES=zh # only Chinese, language switcher hidden +# AVAILABLE_LANGUAGES=en,zh,ja # three languages +# AVAILABLE_LANGUAGES= # ------------------------------------------------------------------- # LLM provider API keys @@ -19,13 +50,13 @@ SANDBOX=local # code execution backend: 'local' (default) or 'docke # OpenAI OPENAI_ENABLED=true OPENAI_API_KEY=#your-openai-api-key -OPENAI_MODELS=gpt-5.2,gpt-5.1 # comma separated list of models +OPENAI_MODELS=gpt-5.4,gpt-4.1 # comma separated list of models # Azure OpenAI AZURE_ENABLED=true AZURE_API_KEY=#your-azure-openai-api-key AZURE_API_BASE=https://your-azure-openai-endpoint.openai.azure.com/ -AZURE_MODELS=gpt-5.1 +AZURE_MODELS=gpt-5.4 # Anthropic ANTHROPIC_ENABLED=true @@ -35,23 +66,169 @@ ANTHROPIC_MODELS=claude-sonnet-4-20250514 # Ollama OLLAMA_ENABLED=true OLLAMA_API_BASE=http://localhost:11434 -OLLAMA_MODELS=deepseek-v3.1:latest # models with good code generation capabilities recommended +OLLAMA_MODELS=qwen3:32b # models with good code generation capabilities recommended # Add other LiteLLM-supported providers with PROVIDER_API_KEY, PROVIDER_MODELS, etc. # ------------------------------------------------------------------- -# Azure Blob Storage Workspace (optional) +# API base URL allowlist (SSRF protection) # ------------------------------------------------------------------- -# Set WORKSPACE_BACKEND=azure_blob to store workspace data in Azure Blob Storage -# instead of the local filesystem. +# When users add custom models via the UI, they can provide an arbitrary +# api_base URL. To prevent the server from being used as an SSRF proxy, +# set a comma-separated list of allowed URL glob patterns. +# +# Open mode (default): leave unset to allow all URLs (convenient for local dev). +# Enforce mode: set to restrict which endpoints users can target. +# +# Glob patterns use fnmatch syntax (* matches anything, case-insensitive). +# Empty api_base (provider defaults like OpenAI/Anthropic) is always allowed. +# Global models (configured above via env vars) bypass this check. +# +# Examples: +# DF_ALLOWED_API_BASES=https://api.openai.com*,https://*.openai.azure.com/* +# DF_ALLOWED_API_BASES=https://api.openai.com*,https://*.openai.azure.com/*,http://localhost:11434/* +# DF_ALLOWED_API_BASES= + +# ------------------------------------------------------------------- +# Authentication (SSO / OAuth2) +# ------------------------------------------------------------------- +# Single-select: only one provider can be active at a time. +# Leave unset for anonymous mode (default, no login required). +# +# AUTH_PROVIDER values: +# oidc — OIDC-compliant IdP (discovers via /.well-known/openid-configuration) +# Use this for Keycloak, Auth0, Okta, Authelia, etc. +# oauth2 — Plain OAuth2 server (discovers via /.well-known/oauth-authorization-server) +# Use this for custom SSO servers that only expose the RFC 8414 endpoint. +# github — GitHub OAuth2 +# azure_easyauth — Azure App Service built-in auth (platform headers) +# +# NOTE: If the primary discovery URL fails, the system automatically tries the +# other discovery path as a fallback, so either value usually works. +# +# --- OIDC / OAuth2 --- +# AUTH_PROVIDER=oidc +# OIDC_ISSUER_URL=https://your-idp.example.com/realms/main +# OIDC_CLIENT_ID=your-client-id +# +# Auto-discovery (recommended): +# Only the three variables above are needed. The backend fetches all endpoint +# URLs from the discovery document and passes them to the frontend. +# +# Manual endpoints (fallback when discovery is not available): +# When your IdP does NOT expose a discovery document, set the endpoint URLs +# explicitly. Manual values always take precedence over discovery. +# +# OIDC_AUTHORIZE_URL=https://your-idp.example.com/oauth2/authorize +# OIDC_TOKEN_URL=https://your-idp.example.com/oauth2/token +# OIDC_USERINFO_URL=https://your-idp.example.com/oauth2/userinfo +# OIDC_JWKS_URL=https://your-idp.example.com/oauth2/jwks +# +# If your IdP has no JWKS endpoint, leave OIDC_JWKS_URL unset — the backend +# will validate tokens by calling the UserInfo endpoint instead. +# +# OAuth2 scopes to request (optional — auto-selected if unset): +# OIDC_SCOPES=openid profile email +# +# SSL verification (set to false for self-signed certificates): +# OIDC_VERIFY_SSL=true +# +# Client secret (server-side only — NEVER exposed to the browser). +# The frontend uses PKCE (Public Client) for secure token exchange. +# Only set this if your IdP requires it for backend token validation or +# if you need server-to-server API calls: +# OIDC_CLIENT_SECRET= +# +# The frontend redirect URI to register in your IdP: http(s):///callback +# In your IdP, register the client as a "Public Client" and enable PKCE (S256). +# +# --- GitHub OAuth2 --- +# AUTH_PROVIDER=github +# GITHUB_CLIENT_ID=your-github-app-client-id +# GITHUB_CLIENT_SECRET=your-github-app-client-secret +# +# Create a GitHub OAuth App at https://github.com/settings/developers +# Set the callback URL to: http(s):///api/auth/github/callback +# +# --- Azure App Service EasyAuth --- +# AUTH_PROVIDER=azure_easyauth +# (No extra variables needed — Azure injects identity via platform headers) # +# --- Common options (all providers) --- +# ALLOW_ANONYMOUS=true # allow unauthenticated access as fallback (default: true) +# AUTH_DISPLAY_NAME=SSO Login # label shown on the login button + +# ------------------------------------------------------------------- +# Workspace storage backend +# ------------------------------------------------------------------- +# Controls where workspace data (tables, sessions, metadata) is persisted. +# Also configurable via CLI: --workspace-backend +# +# Choices: +# local — (default) stores data under DATA_FORMULATOR_HOME +# azure_blob — stores data in Azure Blob Storage (see below) +# ephemeral — temp dirs; data does NOT survive restart (if you want to prevent user data retention on server disk for privacy reasons) +# +# WORKSPACE_BACKEND=local + +# ------------------------------------------------------------------- +# Azure Blob Storage settings (only when WORKSPACE_BACKEND=azure_blob) +# ------------------------------------------------------------------- # Authentication — choose ONE of the following: # Option A: Connection string (shared key / SAS) # AZURE_BLOB_CONNECTION_STRING=DefaultEndpointsProtocol=https;AccountName=... # Option B: Entra ID (Managed Identity / az login / workload identity) # AZURE_BLOB_ACCOUNT_URL=https://.blob.core.windows.net # -# WORKSPACE_BACKEND=local +# Blob container name (default: data-formulator): +# AZURE_BLOB_CONTAINER=data-formulator +# +# CLI equivalents: +# --azure-blob-connection-string, --azure-blob-account-url, --azure-blob-container +# # AZURE_BLOB_CONNECTION_STRING= # AZURE_BLOB_ACCOUNT_URL= -# AZURE_BLOB_CONTAINER=data-formulator \ No newline at end of file +# AZURE_BLOB_CONTAINER=data-formulator + +# ------------------------------------------------------------------- +# Data source plugins +# ------------------------------------------------------------------- +# Plugins are activated by setting their required env vars. +# Each plugin uses a PLG__ prefix. +# +# --- Apache Superset --- +# PLG_SUPERSET_URL=http://superset.example.com:8088 +# +# SSO login URL (optional): +# By default DF opens {PLG_SUPERSET_URL}/df-sso-bridge/ as the SSO popup. +# Override this if your Superset requires going through the login page first: +# PLG_SUPERSET_SSO_LOGIN_URL=http://superset.example.com:8088/login/?next=/df-sso-bridge/ +# +# Superset-side setup: +# The Superset instance needs a small bridge endpoint at /df-sso-bridge/ +# that converts a Superset session into a JWT and posts it back to DF. +# See: superset-sso-bridge-setup.md + +# ------------------------------------------------------------------- +# Deployment profiles (quick-start presets) +# ------------------------------------------------------------------- +# See DEVELOPMENT.md "Deployment Profiles" for full documentation. +# +# Profile 1 — Single-user local (default, no changes needed): +# Just run: data_formulator +# +# Profile 2 — Multi-user anonymous demo: +# WORKSPACE_BACKEND=ephemeral +# DISABLE_DATA_CONNECTORS=true +# DISABLE_CUSTOM_MODELS=true +# DISABLE_DISPLAY_KEYS=true +# (or simply: DISABLE_DATABASE=true as shortcut) +# +# Profile 3 — Multi-user authenticated (enterprise): +# AUTH_PROVIDER=oidc +# OIDC_ISSUER_URL=https://your-idp.example.com/realms/main +# OIDC_CLIENT_ID=data-formulator +# ALLOW_ANONYMOUS=false +# DISABLE_CUSTOM_MODELS=true +# WORKSPACE_BACKEND=azure_blob +# FLASK_SECRET_KEY= \ No newline at end of file diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md new file mode 100644 index 00000000..26a5da2d --- /dev/null +++ b/.github/copilot-instructions.md @@ -0,0 +1,8 @@ +# Project Instructions + +## Python Environment + +- Always use `uv` instead of `pip` for installing packages (e.g. `uv pip install`, `uv pip install -e .`). +- Use `uv run` to execute Python scripts and modules (e.g. `uv run python script.py`, `uv run pytest`). +- The virtual environment is at `.venv/`. Activate it with `source .venv/bin/activate` if needed. +- The Python source is under `py-src/`. The project is installed in editable mode via `uv pip install -e .`. diff --git a/.gitignore b/.gitignore index f4cda9a6..f43b7bd7 100644 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,9 @@ build/ dist/ experiment_data/ +py-src/eval_rec_ts/* +py-src/evaluation_old/* + ## Ignore Visual Studio temporary files, build results, and ## files generated by popular Visual Studio add-ons. ## @@ -336,6 +339,7 @@ paket-files/ # Python Tools for Visual Studio (PTVS) __pycache__/ *.pyc +*.egg-info/ # Cake - Uncomment if you are using it # tools/** @@ -408,6 +412,11 @@ FodyWeavers.xsd *.sln.iml venv +# Temporary documentation directory +tmp-docs/ \.\NUL -NUL \ No newline at end of file +NUL + +# Package manager lock files (using yarn) +package-lock.json \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000..e1cb88e2 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,8 @@ +{ + "python.defaultInterpreterPath": "${workspaceFolder}/.venv/bin/python", + "chat.tools.terminal.autoApprove": { + "npx tsc": true, + "npx vite": true, + "npx tsx": true + } +} diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md index cb9ed8e3..543751d8 100644 --- a/DEVELOPMENT.md +++ b/DEVELOPMENT.md @@ -121,6 +121,45 @@ uv run data_formulator --dev # Run backend only (for frontend development) Open [http://localhost:5567](http://localhost:5567) to view it in the browser. +## Docker + +Docker is the easiest way to run Data Formulator without installing Python or Node.js locally. + +### Quick start + +1. **Copy the environment template and add your API keys:** + + ```bash + cp .env.template .env + # Edit .env and set your OPENAI_API_KEY, ANTHROPIC_API_KEY, etc. + ``` + +2. **Build and start the container:** + + ```bash + docker compose up --build + ``` + +3. Open [http://localhost:5567](http://localhost:5567) in your browser. + +To stop the container: `docker compose down` + +Workspace data (uploaded files, sessions) is persisted in a Docker volume (`data_formulator_home`) so it survives container restarts. + +### Build the image manually + +```bash +docker build -t data-formulator . +docker run --rm -p 5567:5567 --env-file .env data-formulator +``` + +### Docker sandbox (`SANDBOX=docker`) is not supported inside a container + +The Docker sandbox backend works by calling `docker run -v :...` to bind-mount temporary workspace directories into child containers. When Data Formulator itself runs in a Docker container those paths refer to the *container* filesystem, not the host, so Docker daemon cannot mount them and the feature does not work. + +Use `SANDBOX=docker` only when running Data Formulator **directly on the host** (e.g. with `uv run data_formulator --sandbox docker` or `python -m data_formulator --sandbox docker`). When using the Docker image, keep the default `SANDBOX=local`. + + ## Sandbox AI-generated Python code runs inside a **sandbox** to isolate it from the main server process. Two backends are available: @@ -278,62 +317,196 @@ data-formulator/ ← container | Flag | Env var | Default | Description | |------|---------|---------|-------------| -| `--workspace-backend` | `WORKSPACE_BACKEND` | `local` | `local` or `azure_blob` | +| `--workspace-backend` | `WORKSPACE_BACKEND` | `local` | `local`, `azure_blob`, or `ephemeral` | | `--azure-blob-connection-string` | `AZURE_BLOB_CONNECTION_STRING` | — | Shared-key connection string | | `--azure-blob-account-url` | `AZURE_BLOB_ACCOUNT_URL` | — | Account URL for Entra ID auth | | `--azure-blob-container` | `AZURE_BLOB_CONTAINER` | `data-formulator` | Blob container name | -## Security Considerations for Production Deployment +## Deployment Profiles -⚠️ **IMPORTANT SECURITY WARNING FOR PRODUCTION DEPLOYMENT** +Data Formulator supports three deployment configurations. **All defaults are optimized for Profile 1 (single-user local)** — you only need to set flags when deploying as multi-user. -When deploying Data Formulator to production, please be aware of the following security considerations: +### Profile 1: Single-User Local (default) -### Database and Data Storage Security +A personal instance running on `localhost`. No login required, full feature access. -1. **Workspace and table data**: Table data is stored in per-identity workspaces (e.g. parquet files). DuckDB is used only in-memory per request when needed (e.g. for SQL mode); no persistent DuckDB database files are created by the app. +```bash +# Everything uses defaults — just run it: +data_formulator -2. **Identity Management**: - - Each user's data is isolated by a namespaced identity key (e.g., `user:alice@example.com` or `browser:550e8400-...`) - - Anonymous users get a browser-based UUID stored in localStorage - - Authenticated users get their verified user ID from the auth provider +# Or equivalently: +data_formulator \ + --workspace-backend local \ + --sandbox local +``` -3. **Data persistence**: User data may be written to workspace storage (e.g. parquet) on the server. In multi-tenant deployments, ensure workspace directories are isolated and access-controlled. +| Setting | Value | Why | +|---------|-------|-----| +| `AUTH_PROVIDER` | *(unset)* | Single user, no login needed | +| `WORKSPACE_BACKEND` | `local` | Persist workspaces to `~/.data_formulator/` | +| `DISABLE_DATA_CONNECTORS` | `false` | Full access to MySQL, PostgreSQL, etc. | +| `DISABLE_CUSTOM_MODELS` | `false` | User can add any LLM endpoint | +| `DISABLE_DISPLAY_KEYS` | `false` | User can see/manage their own API keys | +| Credential vault | auto-enabled | Remembers DB credentials across restarts | +| Identity | `local:` | Fixed, OS-derived — survives localStorage clear | -### Recommended Security Measures +**Security notes:** In single-user localhost mode, the server ignores the `X-Identity-Id` header entirely and uses a fixed identity derived from the OS username (e.g., `local:alice`). This means vault credentials and workspaces are tied to your OS account, not a random browser UUID — clearing localStorage won't orphan your data. + +### Profile 2: Multi-User Anonymous (demo / public hosting) + +A shared server (e.g., for demos, workshops, public access). No login, no server-side state, no sensitive features. + +```bash +data_formulator \ + --workspace-backend ephemeral \ + --disable-data-connectors \ + --disable-custom-models \ + --disable-display-keys +``` + +> **Shortcut:** `--disable-database` (or `DISABLE_DATABASE=true`) bundles all of the above into a single flag. + +Or via environment variables: + +```env +WORKSPACE_BACKEND=ephemeral +DISABLE_DATA_CONNECTORS=true +DISABLE_CUSTOM_MODELS=true +DISABLE_DISPLAY_KEYS=true +# Pre-configure the LLM models users can access: +OPENAI_ENABLED=true +OPENAI_API_KEY=sk-... +OPENAI_MODELS=gpt-4.1 +``` -For production deployment, consider: +| Setting | Value | Why | +|---------|-------|-----| +| `AUTH_PROVIDER` | *(unset)* | Anonymous access for demos | +| `WORKSPACE_BACKEND` | `ephemeral` | No server-side persistence — data lives only in browser IndexedDB | +| `DISABLE_DATA_CONNECTORS` | `true` | **Critical** — prevents DB credential exposure via identity spoofing | +| `DISABLE_CUSTOM_MODELS` | `true` | Prevents users from adding arbitrary LLM endpoints (SSRF risk) | +| `DISABLE_DISPLAY_KEYS` | `true` | Hides server-configured API keys from UI | +| Credential vault | N/A | No connectors → no credentials to store | +| Identity | anonymous (`browser:`) | Acceptable — no sensitive server-side state to protect | -1. **Use `--disable-database` flag** to disable table-connector routes when you do not need external or uploaded table support -2. **Implement proper authentication, authorization, and other security measures** as needed for your specific use case, for example: - - User authentication (OAuth, JWT tokens, etc.) - - Role-based access control - - API rate limiting - - HTTPS/TLS encryption - - Input validation and sanitization +**Security notes:** With data connectors disabled, the anonymous identity spoofing risk is eliminated — there are no DB credentials or persistent workspaces on the server to access. Each user's data lives entirely in their browser. The only server-side resource is the LLM proxy, which is locked down by `DF_ALLOWED_API_BASES`. -### Configuration for Production +### Profile 3: Multi-User Authenticated (enterprise / team) + +A shared server with SSO login. Full features, proper identity isolation. ```bash -# For stateless deployment (recommended for public hosting) -python -m data_formulator.app --disable-database +data_formulator \ + --workspace-backend azure_blob \ + --disable-display-keys +``` + +```env +AUTH_PROVIDER=oidc +OIDC_ISSUER_URL=https://your-idp.example.com/realms/main +OIDC_CLIENT_ID=data-formulator +ALLOW_ANONYMOUS=false +WORKSPACE_BACKEND=azure_blob +AZURE_BLOB_ACCOUNT_URL=https://.blob.core.windows.net +DISABLE_DISPLAY_KEYS=true +DISABLE_CUSTOM_MODELS=true +FLASK_SECRET_KEY= ``` +| Setting | Value | Why | +|---------|-------|-----| +| `AUTH_PROVIDER` | `oidc` / `github` / `azure_easyauth` | Verified identity from SSO | +| `ALLOW_ANONYMOUS` | `false` | Login required — no anonymous fallback | +| `WORKSPACE_BACKEND` | `azure_blob` or `local` | Persistent per-user workspaces | +| `DISABLE_DATA_CONNECTORS` | `false` | Safe — identity comes from auth provider, not spoofable | +| `DISABLE_CUSTOM_MODELS` | `true` | Users only use server-configured models | +| `DISABLE_DISPLAY_KEYS` | `true` | Hide server keys; users add their own | +| `FLASK_SECRET_KEY` | set explicitly | Required for stable sessions across server restarts | +| Credential vault | auto-enabled | DB credentials scoped to verified `user:` | +| Identity | `user:` from auth provider | Server-verified, cannot be spoofed | + +**Security notes:** With an auth provider, `get_identity_id()` returns `user:` from the IdP token — the `X-Identity-Id` header is ignored entirely. Workspaces, vault credentials, and DB connections are all scoped to the verified identity. Set `ALLOW_ANONYMOUS=false` to prevent unauthenticated access. + +### Profile Comparison + +| Feature | Profile 1 (Local) | Profile 2 (Demo) | Profile 3 (Enterprise) | +|---------|:-:|:-:|:-:| +| Login required | No | No | Yes | +| Data connectors (DB) | Yes | **No** | Yes | +| Custom LLM endpoints | Yes | **No** | Operator choice | +| Credential vault | Yes | N/A | Yes | +| Workspace persistence | Local disk | Browser only | Cloud / disk | +| Identity | `local:` (fixed) | `browser:` (client) | `user:` (SSO) | + +### CLI Flags Reference (complete) + +| Flag | Env var | Default | Description | +|------|---------|---------|-------------| +| `--workspace-backend` | `WORKSPACE_BACKEND` | `local` | `local`, `azure_blob`, or `ephemeral` | +| `--sandbox` | `SANDBOX` | `local` | Code execution backend: `local` or `docker` | +| `--disable-database` | `DISABLE_DATABASE` | `false` | **Multi-user anonymous preset**: bundles ephemeral + no connectors + no custom models + hide keys | +| `--disable-display-keys` | `DISABLE_DISPLAY_KEYS` | `false` | Hide API keys in frontend UI | +| `--disable-data-connectors` | `DISABLE_DATA_CONNECTORS` | `false` | Disable external DB connectors | +| `--disable-custom-models` | `DISABLE_CUSTOM_MODELS` | `false` | Prevent users from adding custom LLM endpoints | +| `--max-display-rows` | `MAX_DISPLAY_ROWS` | `10000` | Max rows sent to frontend | +| `--data-dir` | `DATA_FORMULATOR_HOME` | `~/.data_formulator` | Data directory | +| `--host` | `HOST` | `127.0.0.1` | Network interface to bind | +| `-p`, `--port` | — | `5567` | Port number | +| `--dev` | `DEV_MODE` | `false` | Development mode (no auto-open browser) | +| — | `AUTH_PROVIDER` | *(unset)* | `oidc`, `github`, `azure_easyauth`, or unset for anonymous | +| — | `ALLOW_ANONYMOUS` | `true` | Allow unauthenticated access when auth provider is set | +| — | `DF_ALLOWED_API_BASES` | *(unset, all allowed)* | Comma-separated URL globs for LLM endpoint allowlist | +| — | `FLASK_SECRET_KEY` | auto-generated | Session signing key (set explicitly for production) | +| `--azure-blob-connection-string` | `AZURE_BLOB_CONNECTION_STRING` | — | Azure Blob shared-key connection string | +| `--azure-blob-account-url` | `AZURE_BLOB_ACCOUNT_URL` | — | Azure Blob account URL for Entra ID auth | +| `--azure-blob-container` | `AZURE_BLOB_CONTAINER` | `data-formulator` | Azure Blob container name | + + +## Security Considerations for Production Deployment + +⚠️ **IMPORTANT SECURITY WARNING FOR PRODUCTION DEPLOYMENT** + +### Identity System + +Data Formulator uses a **namespaced identity** system with three tiers: +- **Local mode** (`127.0.0.1`, no auth provider): Identity is `local:`, determined by the server. The `X-Identity-Id` header is ignored. Vault and workspaces are tied to the OS user. +- **Anonymous mode** (multi-user, no auth provider): Identity is `browser:` where the UUID is generated in the browser's `localStorage`. The server trusts the client-provided `X-Identity-Id` header, but always forces the `browser:` prefix. +- **Authenticated mode** (auth provider configured): Identity is `user:` from the auth provider. The `X-Identity-Id` header is ignored entirely. + +**Key security principle**: An attacker sending `X-Identity-Id: user:alice@...` gets `browser:alice@...` — completely separate from the real `user:alice@...` that only authenticated Alice can access. + +**Anonymous spoofing risk**: In anonymous mode, if an attacker knows another user's browser UUID, they can impersonate them via the `X-Identity-Id` header. This is why **Profile 2 disables data connectors** (no DB credentials to steal) and **Profile 3 requires authentication** (header is ignored). + +### Data Storage + +| Backend | Flag | Storage | Persistence | +|---------|------|---------|-------------| +| **local** (default) | `--workspace-backend local` | `~/.data_formulator/users//workspaces/` | Server filesystem | +| **azure_blob** | `--workspace-backend azure_blob` | Azure Blob container | Cloud | +| **ephemeral** | `--workspace-backend ephemeral` | Browser IndexedDB (frontend) + temp dirs (backend) | Browser session only | + +### Recommended Security Measures + +1. **Multi-user anonymous (demos)**: Use Profile 2 — `--workspace-backend ephemeral --disable-data-connectors --disable-custom-models --disable-display-keys` (or `--disable-database` as shortcut) +2. **Multi-user authenticated**: Use Profile 3 — set `AUTH_PROVIDER`, `ALLOW_ANONYMOUS=false`, and `DISABLE_CUSTOM_MODELS=true` +3. **HTTPS**: Use a reverse proxy (nginx, Azure App Gateway) with TLS termination +4. **`FLASK_SECRET_KEY`**: Set explicitly for production (auto-generated key changes on restart) + ## Authentication Architecture -Data Formulator supports a **hybrid identity system** that supports both anonymous and authenticated users. +Data Formulator supports a **hybrid identity system** with anonymous and authenticated modes. +See **Deployment Profiles** above for which mode to use in each scenario. -### Identity Flow Overview +### Identity Flow ``` ┌─────────────────────────────────────────────────────────────────────┐ │ Frontend Request │ ├─────────────────────────────────────────────────────────────────────┤ │ Headers: │ -│ X-Identity-Id: "browser:550e8400-..." (namespace sent by client) │ -│ Authorization: Bearer (if custom auth implemented) │ -│ (Azure also adds X-MS-CLIENT-PRINCIPAL-ID automatically) │ +│ X-Identity-Id: "local:alice" / "browser:550e8400-..." / ... │ +│ Authorization: Bearer (if auth provider configured) │ └─────────────────────────────────────────────────────────────────────┘ │ ▼ @@ -341,69 +514,33 @@ Data Formulator supports a **hybrid identity system** that supports both anonymo │ Backend Identity Resolution │ │ (auth.py: get_identity_id) │ ├─────────────────────────────────────────────────────────────────────┤ -│ Priority 1: Azure X-MS-CLIENT-PRINCIPAL-ID → "user:" │ -│ Priority 2: JWT Bearer token (if implemented) → "user:" │ -│ Priority 3: X-Identity-Id header → ALWAYS "browser:" │ -│ (client-provided namespace is IGNORED for security) │ +│ Priority 1: Auth provider (OIDC/GitHub/EasyAuth) → "user:" │ +│ Priority 2: Localhost mode (127.0.0.1) → "local:" │ +│ (ignores X-Identity-Id header) │ +│ Priority 3: X-Identity-Id header → "browser:" │ +│ (client-provided namespace prefix is IGNORED) │ └─────────────────────────────────────────────────────────────────────┘ │ ▼ ┌─────────────────────────────────────────────────────────────────────┐ │ Storage Isolation │ ├─────────────────────────────────────────────────────────────────────┤ -│ "user:alice@example.com" → alice's DuckDB file (ONLY via auth) │ -│ "browser:550e8400-..." → anonymous user's DuckDB file │ +│ "user:alice@example.com" → alice's workspace (ONLY via auth) │ +│ "local:alice" → localhost user's workspace (fixed) │ +│ "browser:550e8400-..." → anonymous user's workspace │ └─────────────────────────────────────────────────────────────────────┘ ``` -### Security Model - -**Critical Security Rule:** The backend NEVER trusts the namespace prefix from the client-provided `X-Identity-Id` header. Even if a client sends `X-Identity-Id: "user:alice@..."`, the backend strips the prefix and forces `browser:alice@...`. Only verified authentication (Azure headers or JWT) can result in a `user:` prefixed identity. - -The key security principle is **namespaced isolation with forced prefixing**: - -| Scenario | X-Identity-Id Sent | Backend Resolution | Storage Key | -|----------|-------------------|-------------------|-------------| -| Anonymous user | `browser:550e8400-...` | Strips prefix, forces `browser:` | `browser:550e8400-...` | -| Azure logged-in user | `browser:550e8400-...` | Uses Azure header (priority 1) | `user:alice@...` | -| Attacker spoofing | `user:alice@...` (forged) | No valid auth, strips & forces `browser:` | `browser:alice@...` | - -**Why this is secure:** An attacker sending `X-Identity-Id: user:alice@...` gets `browser:alice@...` as their storage key, which is completely separate from the real `user:alice@...` that only authenticated Alice can access. - -### Implementing Custom Authentication - -To add JWT-based authentication: +### Auth Provider Setup -1. **Backend** (`tables_routes.py`): Uncomment and configure the JWT verification code in `get_identity_id()` -2. **Frontend** (`utils.tsx`): Implement `getAuthToken()` to retrieve the JWT from your auth context -3. **Add JWT secret** to Flask config: `current_app.config['JWT_SECRET']` - -### Azure App Service Authentication - -When deployed to Azure with EasyAuth enabled: -- Azure automatically adds `X-MS-CLIENT-PRINCIPAL-ID` header to authenticated requests -- The backend reads this header first (highest priority) -- No frontend changes needed - Azure handles the auth flow - -### Frontend Identity Management - -The frontend (`src/app/identity.ts`) manages identity as follows: - -```typescript -// Identity is always initialized with browser ID -identity: { type: 'browser', id: getBrowserId() } - -// If user logs in (e.g., via Azure), it's updated to: -identity: { type: 'user', id: userInfo.userId } - -// All API requests send namespaced identity: -// X-Identity-Id: "browser:550e8400-..." or "user:alice@..." -``` +See the `AUTH_PROVIDER` section in `.env.template` for configuration details. -This ensures: -1. **Anonymous users**: Work immediately with localStorage-based browser ID -2. **Logged-in users**: Get their verified user ID from the auth provider -3. **Cross-tab consistency**: Browser ID is shared via localStorage across all tabs +| Provider | `AUTH_PROVIDER` | Setup | +|----------|----------------|-------| +| OIDC / OAuth2 | `oidc` | Set `OIDC_ISSUER_URL` + `OIDC_CLIENT_ID` | +| GitHub | `github` | Set `GITHUB_CLIENT_ID` + `GITHUB_CLIENT_SECRET` | +| Azure EasyAuth | `azure_easyauth` | Enable in Azure App Service (no extra env vars) | +| Anonymous only | *(unset)* | Default — no login, `browser:` identity | ## Usage See the [Usage section on the README.md page](README.md#usage). diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..d066f436 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,68 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +# --------------------------------------------------------------------------- +# Stage 1: Build the React/TypeScript frontend +# --------------------------------------------------------------------------- +FROM node:20-slim AS frontend-builder + +WORKDIR /app + +# Install dependencies +COPY package.json yarn.lock ./ +RUN yarn install --frozen-lockfile + +# Copy source and build +COPY index.html tsconfig.json vite.config.ts eslint.config.js ./ +COPY public ./public +COPY src ./src +RUN yarn build + +# --------------------------------------------------------------------------- +# Stage 2: Python runtime with the built frontend bundled in +# --------------------------------------------------------------------------- +FROM python:3.11-slim AS runtime + +# System dependencies needed by some Python packages +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + g++ \ + libpq-dev \ + unixodbc-dev \ + curl \ + && rm -rf /var/lib/apt/lists/* + +# Create a non-root user to run the application +RUN useradd -m -s /bin/bash appuser + +# Ensure Unicode filenames work correctly (Chinese, Japanese, etc.) +ENV LANG=C.UTF-8 + +# Set the home directory for workspace data to a deterministic path +ENV DATA_FORMULATOR_HOME=/home/appuser/.data_formulator + +WORKDIR /app + +# Copy Python package sources +COPY pyproject.toml MANIFEST.in README.md ./ +COPY py-src ./py-src + +# Copy the compiled frontend into the package's expected location +COPY --from=frontend-builder /app/py-src/data_formulator/dist ./py-src/data_formulator/dist + +# Install the package and its dependencies +RUN pip install --no-cache-dir . + +# Switch to non-root user and ensure workspace and app directories are owned by it +RUN mkdir -p "${DATA_FORMULATOR_HOME}" && chown -R appuser:appuser /app "${DATA_FORMULATOR_HOME}" +USER appuser + +EXPOSE 5567 + +HEALTHCHECK --interval=30s --timeout=5s --retries=3 \ + CMD curl -f http://localhost:5567/ || exit 1 + +# Run the app on all interfaces so Docker port-forwarding works. +# We do not pass --dev so Flask runs in production mode (no debugger/reloader). +# webbrowser.open() fails silently in a headless container, which is harmless. +ENTRYPOINT ["python", "-m", "data_formulator", "--host", "0.0.0.0", "--port", "5567"] diff --git a/README.md b/README.md index b39d7722..19b9abfc 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ https://github.com/user-attachments/assets/8ca57b68-4d7a-42cb-bcce-43f8b1681ce2 ## News 🔥🔥🔥 -[03-02-2026] **Data Formulator 0.7 (alpha)** — More charts, new experience, enterprise-ready +[03-18-2026] **Data Formulator 0.7 (alpha)** — More charts, new experience, enterprise-ready - 📊 **30 chart types** with a new semantic chart engine (area, streamgraph, candlestick, pie, radar, maps, and more). - 💬 **Hybrid chat + data thread** — chat woven into the exploration timeline with lineage, previews, and reasoning. - 🤖 **Unified `DataAgent`** replacing four separate agents, plus new recommendation and insight agents. @@ -42,6 +42,11 @@ https://github.com/user-attachments/assets/8ca57b68-4d7a-42cb-bcce-43f8b1681ce2 - 📦 **UV-first build** — reproducible builds via `uv.lock`; `uv sync` + `uv run data_formulator`. - 📝 Detailed writeup on the new architecture coming soon — stay tuned! +> [!TIP] +> **Are you a developer?** Join us to shape the future of AI-powered data exploration! +> We're looking for help with new agents, data connectors, chart templates, and more. +> Check out the [Developers' Guide](DEVELOPMENT.md) and our [open issues](https://github.com/microsoft/data-formulator/issues). + ## Previous Updates Here are milestones that lead to the current design: @@ -60,179 +65,63 @@ Here are milestones that lead to the current design: - **Data Extraction**: Parse data from images and text ([demo](https://github.com/microsoft/data-formulator/pull/31#issuecomment-2403652717)) - **Initial Release**: [Blog](https://www.microsoft.com/en-us/research/blog/data-formulator-exploring-how-ai-can-help-analysts-create-rich-data-visualizations/) | [Video](https://youtu.be/3ndlwt0Wi3c) -
-View detailed update history - -- [07-10-2025] Data Formulator 0.2.2: Start with an analysis goal - - Some key frontend performance updates. - - You can start your exploration with a goal, or, tab and see if the agent can recommend some good exploration ideas for you. [Demo](https://github.com/microsoft/data-formulator/pull/176) - -- [05-13-2025] Data Formulator 0.2.1.3/4: External Data Loader - - We introduced external data loader class to make import data easier. [Readme](https://github.com/microsoft/data-formulator/tree/main/py-src/data_formulator/data_loader) and [Demo](https://github.com/microsoft/data-formulator/pull/155) - - Current data loaders: MySQL, Azure Data Explorer (Kusto), Azure Blob and Amazon S3 (json, parquet, csv). - - [07-01-2025] Updated with: Postgresql, mssql. - - Call for action [link](https://github.com/microsoft/data-formulator/issues/156): - - Users: let us know which data source you'd like to load data from. - - Developers: let's build more data loaders. - -- [04-23-2025] Data Formulator 0.2: working with *large* data 📦📦📦 - - Explore large data by: - 1. Upload large data file to the local database (powered by [DuckDB](https://github.com/duckdb/duckdb)). - 2. Use drag-and-drop to specify charts, and Data Formulator dynamically fetches data from the database to create visualizations (with ⚡️⚡️⚡️ speeds). - 3. Work with AI agents: they generate SQL queries to transform the data to create rich visualizations! - 4. Anchor the result / follow up / create a new branch / join tables; let's dive deeper. - - Checkout the demos at [[https://github.com/microsoft/data-formulator/releases/tag/0.2]](https://github.com/microsoft/data-formulator/releases/tag/0.2) - - Improved overall system performance, and enjoy the updated derive concept functionality. - -- [03-20-2025] Data Formulator 0.1.7: Anchoring ⚓︎ - - Anchor an intermediate dataset, so that followup data analysis are built on top of the anchored data, not the original one. - - Clean a data and work with only the cleaned data; create a subset from the original data or join multiple data, and then go from there. AI agents will be less likely to get confused and work faster. ⚡️⚡️ - - Check out the demos at [[https://github.com/microsoft/data-formulator/releases/tag/0.1.7]](https://github.com/microsoft/data-formulator/releases/tag/0.1.7) - - Don't forget to update Data Formulator to test it out! - -- [02-20-2025] Data Formulator 0.1.6 released! - - Now supports working with multiple datasets at once! Tell Data Formulator which data tables you would like to use in the encoding shelf, and it will figure out how to join the tables to create a visualization to answer your question. 🪄 - - Checkout the demo at [[https://github.com/microsoft/data-formulator/releases/tag/0.1.6]](https://github.com/microsoft/data-formulator/releases/tag/0.1.6). - - Update your Data Formulator to the latest version to play with the new features. - -- [02-12-2025] More models supported now! - - Now supports OpenAI, Azure, Ollama, and Anthropic models (and more powered by [LiteLLM](https://github.com/BerriAI/litellm)); - - Models with strong code generation and instruction following capabilities are recommended (gpt-4o, claude-3-5-sonnet etc.); - - You can store API keys in `.env` to avoid typing them every time (copy `.env.template` to `.env` and fill in your keys). - - Let us know which models you have good/bad experiences with, and what models you would like to see supported! [[comment here]](https://github.com/microsoft/data-formulator/issues/49) - -- [11-07-2024] Minor fun update: data visualization challenges! - - We added a few visualization challenges with the sample datasets. Can you complete them all? [[try them out!]](https://github.com/microsoft/data-formulator/issues/53#issue-2641841252) - - Comment in the issue when you did, or share your results/questions with others! [[comment here]](https://github.com/microsoft/data-formulator/issues/53) - -- [10-11-2024] Data Formulator python package released! - - You can now install Data Formulator using Python and run it locally, easily. [[check it out]](#get-started). - - Our Codespaces configuration is also updated for fast start up ⚡️. [[try it now!]](https://codespaces.new/microsoft/data-formulator?quickstart=1) - - New experimental feature: load an image or a messy text, and ask AI to parse and clean it for you(!). [[demo]](https://github.com/microsoft/data-formulator/pull/31#issuecomment-2403652717) - -- [10-01-2024] Initial release of Data Formulator, check out our [[blog]](https://www.microsoft.com/en-us/research/blog/data-formulator-exploring-how-ai-can-help-analysts-create-rich-data-visualizations/) and [[video]](https://youtu.be/3ndlwt0Wi3c)! - -
- ## Overview **Data Formulator** is a Microsoft Research prototype for data exploration with visualizations powered by AI agents. -Data Formulator enables analysts to iteratively explore and visualize data. Started with data in any format (screenshot, text, csv, or database), users can work with AI agents with a novel blended interface that combines *user interface interactions (UI)* and *natural language (NL) inputs* to communicate their intents, control branching exploration directions, and create reports to share their insights. +Data Formulator enables analysts to explore data with visualizations. Started with data in any format (screenshot, text, csv, or database), you can work with AI agents with a novel blended interface that combines *user interface interactions (UI)* and *natural language (NL) inputs* to communicate their intents, control branching exploration directions, and create reports to share their insights. ## Get Started -Play with Data Formulator with one of the following options: +Play with Data Formulator with one of the following options. - **Option 1: Install via uv (recommended)** [uv](https://docs.astral.sh/uv/) is an extremely fast Python package manager. If you have uv installed, you can run Data Formulator directly without any setup: ```bash - # Run data formulator directly (no install needed) uvx data_formulator ``` - Or install it in a project/virtual environment: - - ```bash - # Install data_formulator - uv pip install data_formulator - - # Run data formulator - python -m data_formulator - ``` - - Data Formulator will be automatically opened in the browser at [http://localhost:5567](http://localhost:5567). + Run `uvx data_formulator --help` to see all available options, such as custom port, sandboxing mode, and data storage location. - **Option 2: Install via pip** Use pip for installation (recommend: install it in a virtual environment). ```bash - # install data_formulator - pip install data_formulator - - # Run data formulator with this command - python -m data_formulator + pip install data_formulator # install + python -m data_formulator # run ``` Data Formulator will be automatically opened in the browser at [http://localhost:5567](http://localhost:5567). - *you can specify the port number (e.g., 8080) by `python -m data_formulator --port 8080` if the default port is occupied.* - -- **Option 3: Codespaces (5 minutes)** - - You can also run Data Formulator in Codespaces; we have everything pre-configured. For more details, see [CODESPACES.md](CODESPACES.md). - - [![Open in GitHub Codespaces](https://github.com/codespaces/badge.svg)](https://codespaces.new/microsoft/data-formulator?quickstart=1) +- **Option 3: Run with Docker** -- **Option 4: Working in the developer mode** - - You can build Data Formulator locally if you prefer full control over your development environment and develop your own version on top. For detailed instructions, refer to [DEVELOPMENT.md](DEVELOPMENT.md). + ```bash + docker compose up --build + ``` + Open [http://localhost:5567](http://localhost:5567) in your browser. To stop, press `Ctrl+C` or run `docker compose down`. -## Using Data Formulator +- **Option 4: Codespaces** -### Load Data + You can run Data Formulator in Codespaces; we have everything pre-configured. For more details, see [CODESPACES.md](CODESPACES.md). + + [![Open in GitHub Codespaces](https://github.com/codespaces/badge.svg)](https://codespaces.new/microsoft/data-formulator?quickstart=1) -Besides uploading csv, tsv or xlsx files that contain structured data, you can ask Data Formulator to extract data from screenshots, text blocks or websites, or load data from databases use connectors. Then you are ready to explore. -image +- **Option 5: Working as developer** + + You can build Data Formulator locally and develop your own version. Check out details in [DEVELOPMENT.md](DEVELOPMENT.md). -### Explore Data -There are four levels to explore data based depending on whether you want more vibe or more control: +## Using Data Formulator -- Level 1 (most control): Create charts with UI via drag-and-drop, if all fields to be visualized are already in the data. -- Level 2: Specify chart designs with natural language + NL. Describe how new fields should be visualized in your chart, AI will automatically transform data to realize the design. -- Level 3: Get recommendations: Ask AI agents to recommend charts directly from NL descriptions, or even directly ask for exploration ideas. -- Level 4 (most vibe): In agent mode, provide a high-level goal and let AI agents automatically plan and explore data in multiple turns. Exploration threads will be created automatically. +Besides uploading csv, tsv or xlsx files that contain structured data, you can ask Data Formulator to extract data from screenshots, text blocks or websites, or load data from databases use connectors. Then you are ready to explore. Ask visualizaiton questions, edit charts, or delegate some exploration tasks to agents. Then, create reports to share your insights. https://github.com/user-attachments/assets/164aff58-9f93-4792-b8ed-9944578fbb72 -- Level 5: In practice, leverage all of them to keep up with both vibe and control! - -### Create Reports - -Use the report builder to compose a report of the style you like, based on selected charts. Then share the reports to others! - - - -## Developers' Guide - -Follow the [developers' instructions](DEVELOPMENT.md) to build your new data analysis tools on top of Data Formulator. - -Help wanted: - -* Add more database connectors (https://github.com/microsoft/data-formulator/issues/156) -* Scaling up messy data extractor: more document types and larger files. -* Adding more chart templates (e.g., maps). -* other ideas? - ## Research Papers * [Data Formulator 2: Iteratively Creating Rich Visualizations with AI](https://arxiv.org/abs/2408.16119) diff --git a/design-docs/0-development-roadmap.md b/design-docs/0-development-roadmap.md new file mode 100644 index 00000000..87a0e540 --- /dev/null +++ b/design-docs/0-development-roadmap.md @@ -0,0 +1,725 @@ +# SSO + 数据源插件 开发路线图 + +> **定位**:本文档是开发实施计划,不重复设计细节。每个步骤链接到设计文档的对应章节。 +> +> **设计文档**: +> - `1-sso-plugin-architecture.md` — SSO 认证 + 统一架构(以下简称 **SSO 文档**) +> - `1-data-source-plugin-architecture.md` — 数据源插件详细设计(以下简称 **Plugin 文档**) +> - `2-external-dataloader-enhancements.md` — ExternalDataLoader 改进(独立推进,不在本路线图中) + +--- + +## 测试策略 + +### 工作流:测试先行 + +每个 Step 遵循 **测试 → 实现 → 通过** 的节奏: + +1. 先写测试 — 基于设计文档中的接口契约和预期行为 +2. 运行测试 — 确认全部失败(红) +3. 实现功能 — 写到测试通过为止(绿) +4. 重构 — 在测试保护下清理代码 + +### 测试分层与现有基础设施对齐 + +项目已有完善的测试体系(见 `tests/test_plan.md`),新增测试沿用现有分层和约定: + +| 层级 | 目录 | 运行方式 | 特征 | +|------|------|---------|------| +| 后端单元 | `tests/backend/unit/` | `pytest`(默认运行) | 纯函数、无网络、无 Docker | +| 后端安全 | `tests/backend/security/` | `pytest`(默认运行) | 认证、隔离、防伪造 | +| 后端集成 | `tests/backend/integration/` | `pytest`(默认运行) | Flask test_client、Workspace 交互 | +| 后端契约 | `tests/backend/contract/` | `pytest`(默认运行) | API 边界保证 | +| 前端单元 | `tests/frontend/unit/` | `vitest` | React 组件、工具函数 | + +新增标记(追加到 `pytest.ini`): + +```ini +markers = + ...existing... + auth: authentication provider tests + plugin: data source plugin framework tests + vault: credential vault tests +``` + +### Mock 设计原则 + +**只 mock 外部边界,不 mock 自己的代码**: + +| 边界 | Mock 方式 | 说明 | +|------|----------|------| +| OIDC IdP(JWKS 端点) | 测试时生成 RSA 密钥对 → 用私钥签 JWT → 用公钥构造 JWKS 响应 | 验证真实的 JWT 验签逻辑,而不是跳过验签 | +| GitHub API | `unittest.mock.patch("requests.get")` | 返回录制的 GitHub `/user` 响应 | +| Superset REST API | `unittest.mock.patch` on `requests.Session` in SupersetClient | 返回录制的 Superset API 响应 fixture | +| Workspace 文件系统 | `tmp_path` fixture(pytest 内置) | 真实 Parquet 读写,但在临时目录 | +| SQLite(Vault) | `tmp_path` 下的临时 DB 文件 | 真实加密/解密,无需 mock | +| 前端 OIDC UserManager | vitest mock module | 模拟登录状态和 token | + +**不要 mock 的东西**: +- Workspace 内部逻辑(`write_parquet` / `list_tables`)— 用真实 temp workspace +- Fernet 加密 — 用真实密钥,验证端到端加密/解密 +- Flask 路由注册 — 用真实 `app.test_client()` + +### Superset API Mock Fixture 设计 + +Superset 插件的测试需要模拟 Superset REST API 的响应。在 `tests/backend/fixtures/superset/` 下存放录制的 JSON 响应: + +``` +tests/backend/fixtures/superset/ +├── auth_login_200.json # POST /api/v1/security/login 成功响应 +├── auth_login_401.json # 登录失败响应 +├── me_200.json # GET /api/v1/me/ 当前用户信息 +├── datasets_list_200.json # GET /api/v1/dataset/ 数据集列表 +├── dataset_detail_42.json # GET /api/v1/dataset/42 单个数据集详情 +├── dashboard_list_200.json # GET /api/v1/dashboard/ 仪表盘列表 +├── sqllab_execute_200.json # POST /api/v1/sqllab/execute/ 查询结果 +└── csrf_token_200.json # GET /api/v1/security/csrf_token/ +``` + +这些 fixture 从真实 Superset 实例录制(`curl` 输出保存),保证字段结构与实际 API 一致。测试中通过 `patch` 注入: + +```python +@pytest.fixture +def superset_responses(fixture_dir): + """加载 Superset API fixture 响应。""" + def _load(name): + return json.loads((fixture_dir / "superset" / name).read_text()) + return _load +``` + +### 什么不测 + +- **不测 IdP 本身**:Keycloak/Auth0 的行为不是我们的代码,集成测试只验证我们的对接逻辑 +- **不测前端 UI 样式**:不做截图对比或像素级验证 +- **不测第三方库内部**:不测 PyJWT 能不能解码、Fernet 加不加密——只测我们**调用**这些库的逻辑 +- **不重复已有测试**:`test_auth.py` 中已有的 `_validate_identity_value` 测试不重复,只扩展 Provider 链部分 + +--- + +## 开发顺序与依据 + +``` +Layer 1: AuthProvider (SSO) ← 地基,确定"你是谁" + │ + ├── Layer 3: CredentialVault ← 依赖身份,按用户存取凭证 + │ + └── Layer 2: DataSourcePlugin ← 依赖 Layer 1 获取 SSO token + 依赖 Layer 3 获取已存凭证 +``` + +**先做 SSO,后做插件**。理由: + +1. **单向依赖**:插件系统的 SSO 透传、凭证保险箱、Workspace 身份隔离,全部依赖 AuthProvider 提供的用户身份([SSO 文档 § 2 架构全景](1-sso-plugin-architecture.md#2-架构全景)) +2. **插件不改 auth 代码**:先把认证层稳定下来,后续插件开发只在 `plugins/` 目录内工作,不触碰核心 +3. **渐进可验证**:每个 Phase 完成后都有独立可测试的交付物,不需要等到全部完成才能验证 + +> **注意**:Plugin 框架本身*可以*在无 SSO 时工作(匿名模式),但 SSO 透传是核心价值之一。先做 SSO 避免后期回头改 auth 代码。 + +--- + +## Phase 1:认证基础 — AuthProvider 链 + +> 对应:[SSO 文档 § 3 Layer 1](1-sso-plugin-architecture.md#3-layer-1可插拔认证体系-authprovider)、[SSO 文档 § 11 Phase 1](1-sso-plugin-architecture.md#11-实施路径) + +**目标**:将 `auth.py` 重构为可插拔 Provider,激活 OIDC + GitHub OAuth。 + +### Step 1.0 先写测试 + +在写任何实现代码之前,先创建以下测试文件。测试基于设计文档中的接口契约,此时运行应**全部失败**。 + +#### 后端测试 + +**`tests/backend/security/test_auth_provider_chain.py`** — Provider 链集成(扩展现有 `test_auth.py` 的思路) + +```python +# 要验证的行为(基于 SSO 文档 § 3.1 的优先级链): +# - AUTH_PROVIDER=oidc 时,合法 JWT → user:sub_claim +# - AUTH_PROVIDER=oidc 时,无 JWT + ALLOW_ANONYMOUS=true → browser:xxx +# - AUTH_PROVIDER=oidc 时,无 JWT + ALLOW_ANONYMOUS=false → 401 +# - AUTH_PROVIDER 未设置 → 匿名模式(与现有行为一致) +# - init_auth() 加载指定 Provider,忽略其他 +# - get_sso_token() 在 OIDC 认证后返回 access_token +# - get_sso_token() 在匿名模式下返回 None + +# mock 策略:用 cryptography 生成 RSA 密钥对, +# 用私钥签发测试 JWT,patch JWKS 端点返回对应公钥。 +``` + +**`tests/backend/unit/test_oidc_provider.py`** — OIDC Provider 单元测试 + +```python +# 要验证的行为(基于 SSO 文档 § 3.4): +# - 合法 JWT(正确 issuer + audience + 未过期)→ AuthResult(user_id=sub) +# - 过期 JWT → 抛 AuthenticationError +# - 错误 issuer → 抛 AuthenticationError +# - 错误 audience → 抛 AuthenticationError +# - 签名不匹配(用错误密钥签名)→ 抛 AuthenticationError +# - 请求无 Authorization 头 → 返回 None(此 Provider 不适用) +# - Authorization 头非 Bearer → 返回 None +# - get_auth_info() 返回 {action: "frontend", ...} 包含 OIDC 配置 +# - enabled 属性:OIDC_ISSUER_URL 缺失时返回 False + +# mock 策略:fixture 中生成 RSA 密钥对, +# 用 PyJWT 签发各种测试 JWT,monkeypatch JWKS HTTP 请求。 +``` + +**`tests/backend/unit/test_github_oauth_provider.py`** — GitHub OAuth Provider + +```python +# 要验证的行为(基于 SSO 文档 § 3.5): +# - Flask session 中有 github_user → AuthResult(user_id=github_login) +# - Flask session 为空 → 返回 None +# - get_auth_info() 返回 {action: "redirect", url: "/api/auth/github/login"} +# - enabled 属性:GITHUB_CLIENT_ID 缺失时返回 False + +# mock 策略:Flask test_request_context + session mock,无需真实 GitHub。 +``` + +**`tests/backend/unit/test_azure_easyauth_provider.py`** — Azure EasyAuth Provider(迁移验证) + +```python +# 从现有 test_auth.py 中的 Azure 测试用例迁移验证: +# - X-MS-CLIENT-PRINCIPAL-ID 存在 → AuthResult(user_id=principal_id) +# - 头不存在 → 返回 None +# - 确保迁移后行为与原 get_identity_id() 中的 Azure 逻辑一致 +``` + +**`tests/backend/integration/test_auth_info_endpoint.py`** — `/api/auth/info` 端点 + +```python +# 要验证的行为(基于 SSO 文档 § 3.2 get_auth_info 自描述): +# - OIDC Provider 激活时,返回 {action: "frontend", authority, client_id, ...} +# - GitHub Provider 激活时,返回 {action: "redirect", url: ...} +# - 匿名模式时,返回 {action: "none"} +# - 前端据此决定登录交互方式 + +# mock 策略:Flask test_client + 环境变量 patch。 +``` + +#### 前端测试 + +**`tests/frontend/unit/app/fetchWithIdentity.test.ts`** — Bearer token 附加 + 401 重试 + +```typescript +// 要验证的行为(基于 SSO 文档 § 3.8b): +// - 有 OIDC token 时,请求携带 Authorization: Bearer +// - 无 token 时(匿名模式),只携带 X-Identity-Id(现有行为) +// - 收到 401 时,触发 token 刷新后重试一次 +// - 重试后仍 401 → 不再重试,返回错误 +// - 非 401 错误不触发重试 + +// mock 策略:vitest mock fetch,模拟各种响应状态码。 +``` + +### Step 1.1 后端 AuthProvider 框架 + +| 任务 | 产出文件 | 参考 | +|------|---------|------| +| 定义基类 `AuthProvider` + `AuthResult` | `auth_providers/base.py` | [SSO § 3.2](1-sso-plugin-architecture.md#32-authprovider-基类) | +| Provider 自动发现(`pkgutil` 扫描) | `auth_providers/__init__.py` | [SSO § 3.2b](1-sso-plugin-architecture.md#3-layer-1可插拔认证体系-authprovider) | +| 迁移 Azure EasyAuth 为 Provider | `auth_providers/azure_easyauth.py` | [SSO § 3.3](1-sso-plugin-architecture.md#3-layer-1可插拔认证体系-authprovider) | +| 实现 OIDC Provider(JWT 验签) | `auth_providers/oidc.py` | [SSO § 3.4](1-sso-plugin-architecture.md#3-layer-1可插拔认证体系-authprovider) | +| 实现 GitHub OAuth Provider | `auth_providers/github_oauth.py` | [SSO § 3.5](1-sso-plugin-architecture.md#3-layer-1可插拔认证体系-authprovider) | +| GitHub 授权码交换网关 | `auth_gateways/github_gateway.py` | [SSO § 3.5](1-sso-plugin-architecture.md#3-layer-1可插拔认证体系-authprovider) | +| 重构 `auth.py` — `init_auth()` + `get_sso_token()` | `auth.py` 修改 | [SSO § 3.2](1-sso-plugin-architecture.md#32-authprovider-基类) | + +**核心逻辑**:`AUTH_PROVIDER` 环境变量选择主 Provider → 匿名回退(`ALLOW_ANONYMOUS=true`)→ `get_identity_id()` 返回值格式不变(`user:xxx` / `browser:xxx`)。 + +### Step 1.2 前端 OIDC 集成 + +| 任务 | 产出文件 | 参考 | +|------|---------|------| +| OIDC 配置 + UserManager | `src/app/oidcConfig.ts` | [SSO § 3.6](1-sso-plugin-architecture.md#3-layer-1可插拔认证体系-authprovider) | +| OIDC 回调页面 | `src/app/OidcCallback.tsx` | [SSO § 3.7](1-sso-plugin-architecture.md#3-layer-1可插拔认证体系-authprovider) | +| 统一登录面板(`/api/auth/info` 驱动) | `src/app/LoginPanel.tsx` | [SSO § 3.8](1-sso-plugin-architecture.md#3-layer-1可插拔认证体系-authprovider) | +| `fetchWithIdentity` 携带 Bearer token + 401 重试 | `src/app/utils.tsx` 修改 | [SSO § 3.8b](1-sso-plugin-architecture.md#3-layer-1可插拔认证体系-authprovider) | +| `App.tsx` 统一 initAuth | `src/app/App.tsx` 修改 | [SSO § 3.8](1-sso-plugin-architecture.md#3-layer-1可插拔认证体系-authprovider) | + +**依赖安装**:`pip install PyJWT cryptography`,`npm install oidc-client-ts` + +### Step 1.3 验证 + +- [ ] 配置 Keycloak → OIDC 登录成功,`get_identity_id()` 返回 `user:sub_claim` +- [ ] 配置 GitHub OAuth → OAuth 登录成功 +- [ ] 不配置任何 Provider → 匿名模式,行为与 0.7 现版本一致 +- [ ] `get_sso_token()` 返回当前用户的 OIDC access_token + +--- + +## Phase 2:插件框架 + Superset 插件 + +> 对应:[Plugin 文档 § 5~10](1-data-source-plugin-architecture.md#5-插件架构总体设计)、[SSO 文档 § 4 Layer 2](1-sso-plugin-architecture.md#4-layer-2数据源插件系统-datasourceplugin)、[SSO 文档 § 11 Phase 2](1-sso-plugin-architecture.md#11-实施路径) + +**目标**:建立插件框架,将 0.6 Superset 集成迁移为第一个插件。 + +### Step 2.0 先写测试 + +#### 2.0.1 插件框架测试(实现 Step 2.1 之前写) + +**`tests/backend/unit/test_plugin_discovery.py`** — 插件自动发现 + +```python +# 要验证的行为(基于 SSO 文档 § 4.4): +# - plugins/ 下有合法子包(含 plugin_class)→ 被发现并注册 +# - plugins/ 下有子包但缺 plugin_class → 跳过,记录警告 +# - plugin_class.manifest() 中 required_env 全满足 → 启用 +# - required_env 缺一个 → 跳过,记入 DISABLED_PLUGINS +# - PLUGIN_BLOCKLIST 中列出的 plugin_id → 强制跳过 +# - 导入异常(如缺依赖)→ 优雅降级,不影响其他插件 + +# mock 策略:在 tmp_path 下构造包含 __init__.py 的 dummy plugin 包, +# monkeypatch plugins 包的 __path__ 指向 tmp_path。 +``` + +**`tests/backend/unit/test_plugin_data_writer.py`** — PluginDataWriter 写入工具 + +```python +# 要验证的行为(基于 Plugin 文档 § 6.2): +# - write_dataframe(df, name, overwrite=True) → 写入 Parquet,返回正确元数据 +# - write_dataframe(df, name, overwrite=True) 第二次 → 覆盖同名表 +# - write_dataframe(df, name, overwrite=False) 同名已存在 → 自动加后缀 _1 +# - write_arrow(arrow_table, name) → 跳过 pandas 转换,直接写入 +# - write_batches → append → finish → 合并为一个 Parquet 文件 +# - source_metadata 完整写入 loader_metadata +# - 表名 sanitize(特殊字符替换) + +# mock 策略:Workspace 使用 tmp_path 下的真实临时目录,验证实际 Parquet 文件。 +# 使用 Flask test_request_context 提供 identity(PluginDataWriter 内部调用 get_identity_id)。 +``` + +**`tests/backend/integration/test_plugin_app_config.py`** — `/api/app-config` 插件字段 + +```python +# 要验证的行为(基于 Plugin 文档 § 8.1): +# - 有插件启用时,/api/app-config 响应包含 PLUGINS 字段 +# - PLUGINS 字段合并了 manifest() 和 get_frontend_config() 的内容 +# - 无插件启用时,PLUGINS 为空 dict 或不存在 +# - 插件的敏感配置(如 SUPERSET_URL 原始值)不暴露给前端 + +# mock 策略:注册一个 DummyPlugin 到 ENABLED_PLUGINS,用 Flask test_client 请求。 +``` + +#### 2.0.2 Superset 插件测试(实现 Step 2.2 之前写) + +先准备 Superset API 的 fixture 文件(从真实 Superset 录制或按 API 文档构造): + +``` +tests/backend/fixtures/superset/ +├── auth_login_200.json # {"access_token": "eyJ...", "refresh_token": "..."} +├── auth_login_401.json # {"message": "Invalid credentials"} +├── me_200.json # {"result": {"username": "alice", ...}} +├── datasets_list_200.json # {"result": [{"id": 42, "table_name": "sales", ...}]} +├── dataset_detail_42.json # {"result": {"id": 42, "columns": [...], ...}} +├── dashboard_list_200.json # {"result": [{"id": 7, "dashboard_title": "Sales", ...}]} +├── sqllab_execute_200.json # {"data": [{"region": "Asia", "amount": 100}, ...]} +└── csrf_token_200.json # {"result": "abc123"} +``` + +**`tests/backend/integration/test_superset_plugin.py`** — Superset 插件路由集成测试 + +```python +# 要验证的行为(基于 Plugin 文档 § 9.1 端到端流程): +# +# 认证路由: +# - POST /api/plugins/superset/auth/login {username, password} +# → mock SupersetClient 返回 auth_login_200 → 200 + session 中存入 token +# - POST /api/plugins/superset/auth/login 密码错误 +# → mock 返回 auth_login_401 → 401 +# - GET /api/plugins/superset/auth/status +# → session 无 token → {"authenticated": false} +# → session 有 token → {"authenticated": true, "user": "alice"} +# +# 目录路由: +# - GET /api/plugins/superset/catalog/datasets +# → mock 返回 datasets_list_200 → 200 + 数据集列表 +# - 未认证时访问目录 → 401 +# +# 数据加载路由: +# - POST /api/plugins/superset/data/load-dataset {dataset_id: 42} +# → mock SQL Lab 返回 sqllab_execute_200 +# → 验证 Workspace 中生成了 Parquet 文件 +# → 响应包含 {table_name, row_count, columns} +# - POST /api/plugins/superset/data/refresh +# → 用 stored load_params 重新加载 → 覆盖同名表 + +# mock 策略: +# - SupersetClient 的所有 HTTP 调用通过 patch("requests.Session.get/post") 拦截 +# - 返回 fixture 目录中对应的 JSON +# - Workspace 使用 tmp_path 真实临时目录 +# - Flask session 通过 test_client 的 session_transaction 注入 token +``` + +**`tests/backend/unit/test_superset_client.py`** — SupersetClient 单元测试 + +```python +# 要验证的行为(基于 0.6 superset_client.py 已有逻辑): +# - get_datasets() → 正确解析 /api/v1/dataset/ 响应 +# - get_dataset(42) → 正确解析单个数据集详情 +# - execute_sql() → 正确调用 SQL Lab API,返回数据行 +# - get_dashboards() → 正确解析仪表盘列表 +# - HTTP 错误(500/超时)→ 抛出有意义的异常 +# - CSRF token 在需要时自动获取 + +# mock 策略:patch requests.Session,返回 fixture JSON。 +``` + +#### 2.0.3 前端插件框架测试(实现 Step 2.3 之前写) + +**`tests/frontend/unit/plugins/registry.test.ts`** — 插件动态加载 + +```typescript +// 要验证的行为(基于 Plugin 文档 § 7.2 + SSO 文档 § 4.4): +// - 从 /api/app-config 获取的 plugins 列表 → 动态加载对应模块 +// - 插件模块导出 manifest + PanelComponent → 注册成功 +// - 插件模块导出不完整 → 跳过,console.warn +// - 空 plugins 列表 → 返回空数组,无报错 +``` + +**`tests/frontend/unit/plugins/PluginHost.test.tsx`** — 插件容器组件 + +```tsx +// 要验证的行为(基于 Plugin 文档 § 7.2): +// - 有 2 个已注册插件 → 渲染 2 个 Tab +// - 点击 Tab → 切换到对应插件面板 +// - 0 个插件 → 不渲染插件区域 +// - 插件面板调用 onDataLoaded → 触发表列表刷新 +``` + +### Step 2.1 后端插件框架 + +| 任务 | 产出文件 | 参考 | +|------|---------|------| +| 插件基类 `DataSourcePlugin` | `plugins/base.py` | [Plugin § 6.1](1-data-source-plugin-architecture.md#61-插件基类)、[SSO § 4.2](1-sso-plugin-architecture.md#4-layer-2数据源插件系统-datasourceplugin) | +| 插件自动发现 `discover_and_register()` | `plugins/__init__.py` | [SSO § 4.4](1-sso-plugin-architecture.md#44-插件注册与发现) | +| 插件数据写入工具 `PluginDataWriter` | `plugins/data_writer.py` | [Plugin § 6.2](1-data-source-plugin-architecture.md#62-插件加载的数据怎么进入-workspace) | +| `app.py` 集成 — 调用 `discover_and_register()` | `app.py` 修改 | [SSO § 4.4](1-sso-plugin-architecture.md#44-插件注册与发现) | +| `/api/app-config` 返回 plugins 字段 | `app.py` 修改 | [Plugin § 8.1](1-data-source-plugin-architecture.md#81-apiapp-config-中的插件字段组装) | + +### Step 2.2 Superset 插件后端 + +| 任务 | 产出文件 | 参考 | +|------|---------|------| +| `SupersetPlugin` 实现(manifest + blueprint) | `plugins/superset/__init__.py` | [Plugin § 10.4](1-data-source-plugin-architecture.md#104-supersetplugin-实现) | +| 迁移 `superset_client.py` | `plugins/superset/superset_client.py` | [Plugin § 4.2](1-data-source-plugin-architecture.md#42-后端模块) | +| 迁移 `auth_bridge.py` | `plugins/superset/auth_bridge.py` | 同上 | +| 迁移 `catalog.py` | `plugins/superset/catalog.py` | 同上 | +| 迁移认证路由(+ SSO 透传) | `plugins/superset/routes/auth.py` | [SSO § 4.3](1-sso-plugin-architecture.md#43-插件与-sso-的集成模式)、[SSO § 6](1-sso-plugin-architecture.md#6-sso-token-透传机制) | +| 迁移目录路由 | `plugins/superset/routes/catalog.py` | [Plugin § 4.2](1-data-source-plugin-architecture.md#42-后端模块) | +| 迁移数据加载路由(DuckDB → Workspace Parquet) | `plugins/superset/routes/data.py` | [Plugin § 10.2](1-data-source-plugin-architecture.md#102-核心改动) | + +**关键改动**:`data_routes.py` 从 0.6 的 DuckDB 写入改为 0.7 的 Workspace Parquet 写入([Plugin § 10.2](1-data-source-plugin-architecture.md#102-核心改动))。 + +> **注**:0.6 Superset 集成代码在独立的定制分支中(`data-formulator-0.6`),0.7 上游代码库不含任何 Superset 残留。此处是将 0.6 代码**迁入**0.7 插件框架,无需清理。 + +### Step 2.3 前端插件框架 + +| 任务 | 产出文件 | 参考 | +|------|---------|------| +| 插件类型定义 | `src/plugins/types.ts` | [Plugin § 7.1](1-data-source-plugin-architecture.md#71-插件面板契约) | +| 插件动态加载(`import.meta.glob`) | `src/plugins/registry.ts` | [SSO § 4.4](1-sso-plugin-architecture.md#44-插件注册与发现) | +| 插件容器组件 `PluginHost` | `src/plugins/PluginHost.tsx` | [Plugin § 7.2](1-data-source-plugin-architecture.md#72-plugin-host前端插件容器) | +| `dfSlice.tsx` 增加 `plugins` 字段 | `src/app/dfSlice.tsx` 修改 | [SSO § 10.2](1-sso-plugin-architecture.md#102-前端新增文件) | +| `UnifiedDataUploadDialog.tsx` 渲染插件 Tab | 修改 | [Plugin § 7.2](1-data-source-plugin-architecture.md#72-plugin-host前端插件容器) | +| `onDataLoaded` 回调 → 刷新表列表 / loadTable | 修改 | [Plugin § 7.3](1-data-source-plugin-architecture.md#73-数据加载完成后的流程) | + +### Step 2.4 Superset 插件前端 + +| 任务 | 产出文件 | 参考 | +|------|---------|------| +| 插件入口 + manifest | `src/plugins/superset/index.ts` | [Plugin § 10.3](1-data-source-plugin-architecture.md#103-前端) | +| 迁移 SupersetPanel | `src/plugins/superset/SupersetPanel.tsx` | [Plugin § 4.3](1-data-source-plugin-architecture.md#43-前端组件) | +| 迁移 SupersetCatalog | `src/plugins/superset/SupersetCatalog.tsx` | 同上 | +| 迁移 SupersetDashboards | `src/plugins/superset/SupersetDashboards.tsx` | 同上 | +| 迁移 SupersetFilterDialog | `src/plugins/superset/SupersetFilterDialog.tsx` | 同上 | +| 迁移 SupersetLogin | `src/plugins/superset/SupersetLogin.tsx` | 同上 | +| API 封装 | `src/plugins/superset/api.ts` | 同上 | + +### Step 2.5 验证 + +- [ ] 设置 `SUPERSET_URL` → 前端自动出现 Superset Tab +- [ ] 手动登录 Superset → 浏览数据集 → 加载数据到 Workspace +- [ ] SSO 模式(Phase 1 已完成)→ 无需输入 Superset 密码即可访问 +- [ ] 不设置 `SUPERSET_URL` → 无任何影响,行为与现版本一致 +- [ ] 数据刷新:加载后点刷新按钮 → 重新拉取最新数据([Plugin § 7.4](1-data-source-plugin-architecture.md#74-数据刷新协议)) + +--- + +## Phase 3:凭证保险箱 + +> 对应:[SSO 文档 § 5 Layer 3](1-sso-plugin-architecture.md#5-layer-3凭证保险箱-credentialvault)、[SSO 文档 § 11 Phase 3](1-sso-plugin-architecture.md#11-实施路径) + +**目标**:服务端加密凭证存储,替代 Session 级别的临时存储。 + +### Step 3.0 先写测试 + +**`tests/backend/unit/test_credential_vault.py`** — LocalCredentialVault 单元测试 + +```python +# 要验证的行为(基于 SSO 文档 § 5.2~5.3): +# - store(user_a, "superset", {username, password}) → 成功存入 +# - retrieve(user_a, "superset") → 返回明文 {username, password} +# - retrieve(user_b, "superset") → 返回 None(用户隔离) +# - store 同一 (user, source) 两次 → 后者覆盖前者 +# - delete(user_a, "superset") → 删除后 retrieve 返回 None +# - list_sources(user_a) → ["superset"],delete 后为 [] +# - 换一个 CREDENTIAL_VAULT_KEY 实例化 → 之前存的凭证解密失败,返回 None(非崩溃) +# - 空密钥 → 初始化时报错 + +# mock 策略:全部使用 tmp_path 下的真实 SQLite 文件 + 真实 Fernet 密钥。 +# 不需要 mock 任何东西——这个模块足够独立。 +``` + +**`tests/backend/unit/test_credential_vault_factory.py`** — Vault 工厂 + +```python +# 要验证的行为(基于 SSO 文档 § 5.4): +# - CREDENTIAL_VAULT_KEY 已设置 → get_credential_vault() 返回 LocalCredentialVault 实例 +# - CREDENTIAL_VAULT_KEY 未设置 → 返回 None +# - CREDENTIAL_VAULT=local → 使用 LocalCredentialVault +# - CREDENTIAL_VAULT=unknown → 返回 None,记录警告 +# - 多次调用 get_credential_vault() → 返回同一个单例 + +# mock 策略:monkeypatch 环境变量 + tmp_path。 +``` + +**`tests/backend/integration/test_credential_routes.py`** — 凭证 API 端点 + +```python +# 要验证的行为(基于 SSO 文档 § 5.5): +# - POST /api/credentials/store → 存储成功 +# - GET /api/credentials/list → 返回已存储的 source_key 列表(不含凭证内容) +# - POST /api/credentials/delete → 删除后 list 不再包含 +# - Vault 未配置时 → /store 和 /delete 返回 503 +# - 不同用户(不同 X-Identity-Id)之间凭证隔离 + +# mock 策略:Flask test_client + 真实 tmp_path Vault + X-Identity-Id 头切换身份。 +``` + +**`tests/backend/integration/test_plugin_auth_with_vault.py`** — 插件认证 + Vault 联动 + +```python +# 要验证的行为(基于 SSO 文档 § 4.3 三种认证模式): +# - Vault 中有已存凭证 → 插件 auth/login 自动取出,无需用户输入 +# - Vault 中凭证已过期(外部系统密码已改)→ 返回 vault_stale 提示 +# - 用户手动输入 + remember=true → 凭证存入 Vault +# - SSO token 可用 + 插件 supports_sso_passthrough → 自动透传 + +# mock 策略:patch SupersetClient 的认证调用 + 真实 Vault。 +``` + +### Step 3.1 后端 + +| 任务 | 产出文件 | 参考 | +|------|---------|------| +| Vault 抽象接口 | `credential_vault/base.py` | [SSO § 5.2](1-sso-plugin-architecture.md#52-credentialvault-接口) | +| 本地加密实现(SQLite + Fernet) | `credential_vault/local_vault.py` | [SSO § 5.3](1-sso-plugin-architecture.md#53-本地加密实现) | +| Vault 工厂 | `credential_vault/__init__.py` | [SSO § 5.4](1-sso-plugin-architecture.md#54-vault-工厂) | +| 凭证管理 API | `credential_routes.py` | [SSO § 5.5](1-sso-plugin-architecture.md#55-凭证管理-api) | +| 插件认证路由增强 — 自动从 Vault 取凭证 | `plugins/superset/routes/auth.py` 修改 | [SSO § 4.3](1-sso-plugin-architecture.md#43-插件与-sso-的集成模式) | + +### Step 3.2 前端 + +| 任务 | 产出文件 | 参考 | +|------|---------|------| +| 凭证管理 UI | `src/plugins/CredentialManager.tsx` | [SSO § 10.2](1-sso-plugin-architecture.md#102-前端新增文件) | + +### Step 3.3 验证 + +- [ ] 设置 `CREDENTIAL_VAULT_KEY` → 用户输入 Superset 密码后加密存储 +- [ ] 换浏览器 → SSO 登录 → 已存凭证自动可用,无需重新输入 +- [ ] 不设置 `CREDENTIAL_VAULT_KEY` → 回退到 Session 存储(现有行为) + +--- + +## Phase 4:第二个插件验证 + +> 对应:[SSO 文档 § 11 Phase 4](1-sso-plugin-architecture.md#11-实施路径) + +**目标**:用 Metabase 插件验证框架通用性 — **核心代码零修改**。 + +### Step 4.0 先写测试 — 框架通用性验证 + +Phase 4 的测试本身就是核心交付物。它验证的不是 Metabase 的业务逻辑,而是**插件框架的扩展性承诺**。 + +**`tests/backend/contract/test_plugin_zero_core_change.py`** — 核心代码零修改契约 + +```python +# 这个测试在 Metabase 插件代码写完后运行: +# - 检查 plugins/__init__.py 的 git diff → 无修改 +# - 检查 app.py 的 git diff → 无修改 +# - 检查 src/plugins/registry.ts 的 git diff → 无修改 +# - Metabase plugin 仅存在于 plugins/metabase/ 和 src/plugins/metabase/ +# - discover_and_register() 能发现 Metabase 插件 +# - /api/app-config 返回的 PLUGINS 中包含 metabase +# +# 这是一个**契约测试**:如果未来框架改动导致新增插件需要改核心代码, +# 这个测试应当失败,提醒开发者修复框架的扩展性。 +``` + +**`tests/backend/integration/test_metabase_plugin.py`** — Metabase 插件路由 + +```python +# 与 Superset 插件测试同结构,mock Metabase REST API: +# - /api/plugins/metabase/auth/login → mock Metabase session API +# - /api/plugins/metabase/catalog/questions → mock /api/card/ 列表 +# - /api/plugins/metabase/data/load-question → mock 查询结果 + 写入 Workspace +``` + +| 任务 | 产出文件 | +|------|---------| +| Metabase 插件后端 | `plugins/metabase/` | +| Metabase 插件前端 | `src/plugins/metabase/` | + +**验证标准**:仅新增目录,无需修改 `plugins/__init__.py`、`registry.ts`、`app.py` 等任何现有文件。 + +--- + +## Phase 5:完善与增强 + +> 对应:[SSO 文档 § 11 Phase 5](1-sso-plugin-architecture.md#11-实施路径)、[Plugin 文档 § 7.7](1-data-source-plugin-architecture.md#77-外部系统元数据拉取) + +| 任务 | 优先级 | 参考 | +|------|--------|------| +| 外部元数据拉取(列描述、语义类型) | P0 | [Plugin § 7.7](1-data-source-plugin-architecture.md#77-外部系统元数据拉取) | +| 多协议 SSO 支持(SAML / LDAP / CAS) | P1 | [SSO § 3.9](1-sso-plugin-architecture.md#39-多协议支持从-oidc-扩展到-saml--ldap--cas--反向代理) | +| ExternalDataLoader 改进 | P1 | [2-external-dataloader-enhancements.md](2-external-dataloader-enhancements.md) | +| 插件错误边界和降级处理 | P1 | — | +| 管理员配置 UI | P2 | — | +| 审计日志 | P2 | — | + +> 注:单元测试和集成测试已嵌入 Phase 1~4 的每个 Step 中,不再单独列为待办。 + +--- + +## 全局依赖清单 + +| 包 | 用途 | 引入阶段 | 安装 | +|----|------|---------|------| +| `PyJWT` | OIDC JWT 验签 | Phase 1 | `pip install PyJWT` | +| `cryptography` | Fernet 加密 + JWT 验签 + 测试密钥生成 | Phase 1 | `pip install cryptography` | +| `oidc-client-ts` | 前端 OIDC PKCE | Phase 1 | `npm install oidc-client-ts` | +| `requests` | 插件 HTTP 调用 | Phase 2 | 已有 | + +`cryptography` 同时用于生产代码(Fernet 加密、JWT RS256 验签)和测试(生成 RSA 密钥对签发测试 JWT),不需要额外的测试专用依赖。 + +现有 `pytest`、`vitest`、`unittest.mock`、`flask.testing` 已满足所有测试需求,**不需要引入新的测试框架或 mock 库**。 + +> 参考:[SSO 文档 附录 B](1-sso-plugin-architecture.md#附录-b关键依赖) + +--- + +## 核心代码改动范围(一次性) + +以下文件在 Phase 1~2 中需要修改。Phase 3+ 不再触碰核心代码。 + +| 文件 | 改动阶段 | 改动量 | 说明 | +|------|---------|--------|------| +| `py-src/.../auth.py` | Phase 1 | ~60 行 | Provider 自动发现 + `get_sso_token()` | +| `py-src/.../app.py` | Phase 1+2 | ~25 行 | `init_auth()` + `discover_and_register()` + app-config | +| `src/app/App.tsx` | Phase 1 | ~35 行 | 统一 initAuth + 登录 UI | +| `src/app/utils.tsx` | Phase 1 | ~15 行 | Bearer token + 401 重试 | +| `src/app/dfSlice.tsx` | Phase 2 | ~5 行 | `ServerConfig.plugins` | +| `src/views/UnifiedDataUploadDialog.tsx` | Phase 2 | ~20 行 | PluginHost 渲染 | + +> 参考:[SSO 文档 § 10.3](1-sso-plugin-architecture.md#103-对现有文件的改动清单) + +--- + +## 文件结构总览 + +完成全部 Phase 后的新增文件结构(含测试): + +``` +py-src/data_formulator/ +├── auth_providers/ ← Phase 1 +│ ├── base.py +│ ├── azure_easyauth.py +│ ├── oidc.py +│ └── github_oauth.py +├── auth_gateways/ ← Phase 1 +│ ├── github_gateway.py +│ └── logout.py +├── credential_vault/ ← Phase 3 +│ ├── base.py +│ └── local_vault.py +├── credential_routes.py ← Phase 3 +├── plugins/ ← Phase 2 +│ ├── base.py +│ ├── data_writer.py +│ └── superset/ +│ ├── __init__.py +│ ├── superset_client.py +│ ├── auth_bridge.py +│ ├── catalog.py +│ └── routes/ + +src/ +├── app/ +│ ├── oidcConfig.ts ← Phase 1 +│ └── OidcCallback.tsx ← Phase 1 +├── plugins/ ← Phase 2 +│ ├── types.ts +│ ├── registry.ts +│ ├── PluginHost.tsx +│ ├── CredentialManager.tsx ← Phase 3 +│ └── superset/ +│ ├── index.ts +│ ├── SupersetPanel.tsx +│ └── ... + +tests/ +├── backend/ +│ ├── unit/ +│ │ ├── test_oidc_provider.py ← Phase 1 +│ │ ├── test_github_oauth_provider.py ← Phase 1 +│ │ ├── test_azure_easyauth_provider.py ← Phase 1 +│ │ ├── test_plugin_discovery.py ← Phase 2 +│ │ ├── test_plugin_data_writer.py ← Phase 2 +│ │ ├── test_superset_client.py ← Phase 2 +│ │ ├── test_credential_vault.py ← Phase 3 +│ │ └── test_credential_vault_factory.py ← Phase 3 +│ ├── security/ +│ │ └── test_auth_provider_chain.py ← Phase 1 +│ ├── integration/ +│ │ ├── test_auth_info_endpoint.py ← Phase 1 +│ │ ├── test_plugin_app_config.py ← Phase 2 +│ │ ├── test_superset_plugin.py ← Phase 2 +│ │ ├── test_credential_routes.py ← Phase 3 +│ │ ├── test_plugin_auth_with_vault.py ← Phase 3 +│ │ └── test_metabase_plugin.py ← Phase 4 +│ ├── contract/ +│ │ └── test_plugin_zero_core_change.py ← Phase 4 +│ └── fixtures/ +│ └── superset/ ← Phase 2 +│ ├── auth_login_200.json +│ ├── datasets_list_200.json +│ ├── dataset_detail_42.json +│ ├── sqllab_execute_200.json +│ └── ... +├── frontend/ +│ └── unit/ +│ ├── app/ +│ │ └── fetchWithIdentity.test.ts ← Phase 1 +│ └── plugins/ +│ ├── registry.test.ts ← Phase 2 +│ └── PluginHost.test.tsx ← Phase 2 +``` + +> 参考:[SSO 文档 § 10](1-sso-plugin-architecture.md#10-目录结构) + +--- + +## 文档交付要求 + +每个 Phase 完成时,除代码和测试外,还需交付或更新以下文档: + +| Phase | 必须交付的文档 | 说明 | +|-------|-------------|------| +| Phase 1 | `auth_providers/README.md` | 如何新增一个 AuthProvider:基类契约、环境变量约定、`get_auth_info()` 返回格式、测试方法 | +| Phase 2 | `plugins/README.md` | **插件开发指南**:目录约定、`plugin_class` 暴露方式、manifest 字段说明、路由前缀规则、PluginDataWriter 用法、前端 `index.ts` 导出规范、fixture 录制方法 | +| Phase 2 | `.env.template` 更新 | 新增 `SUPERSET_URL` 等插件环境变量的说明 | +| Phase 3 | `credential_vault/README.md` | Vault 配置方式、密钥生成命令、插件如何调用 Vault API | +| Phase 4 | `plugins/README.md` 更新 | 用 Metabase 插件作为实际案例补充到指南中,验证文档的可操作性 | +| 每个 Phase | `CHANGELOG.md` 追加 | 简要记录本阶段新增的能力和配置变更 | + +**核心原则**:文档写给"下一个要开发新插件的人"看。如果按照 `plugins/README.md` 的步骤无法从零完成一个新插件,说明文档不合格。Phase 4(Metabase)就是对这份文档的实战验证。 diff --git a/design-docs/1-data-source-plugin-architecture.md b/design-docs/1-data-source-plugin-architecture.md new file mode 100644 index 00000000..f883cbd6 --- /dev/null +++ b/design-docs/1-data-source-plugin-architecture.md @@ -0,0 +1,1801 @@ +# Data Formulator 数据源插件架构设计方案 + +## 目录 + +1. [背景与动机](#1-背景与动机) +2. [部署模型分析:个人工具 vs 团队平台](#2-部署模型分析个人工具-vs-团队平台) +3. [现状分析](#3-现状分析) +4. [0.6 版本 Superset 集成回顾](#4-06-版本-superset-集成回顾) +5. [插件架构总体设计](#5-插件架构总体设计) +6. [后端插件接口](#6-后端插件接口) +7. [前端插件接口](#7-前端插件接口) +8. [插件注册与发现](#8-插件注册与发现) +9. [数据流设计](#9-数据流设计) +10. [Superset 插件迁移示例](#10-superset-插件迁移示例) +11. [与现有 ExternalDataLoader 的关系](#11-与现有-externaldataloader-的关系) +12. [插件 i18n 自包含方案](#12-插件-i18n-自包含方案) +13. [目录结构](#13-目录结构) +14. [实施路径](#14-实施路径) +15. [关键设计难点:外部系统配置与用户身份](#15-关键设计难点外部系统配置与用户身份) +16. [FAQ](#16-faq) +17. [附录 A:核心代码改动清单](#附录-a核心代码改动清单) +18. [附录 B:新增插件的完整步骤](#附录-b新增插件的完整步骤零核心改动) +19. [附录 C:关联文档](#附录-c关联文档) + +--- + +## 1. 背景与动机 + +### 1.1 核心需求 + +Data Formulator 需要对接外部 BI/报表系统(如 Apache Superset、Metabase、Power BI 等)作为数据源,让用户可以: + +- 用外部系统的**账号权限**登录 +- 浏览该用户**有权访问**的数据集、仪表盘、报表 +- 将数据拉取到 Data Formulator 中进行可视化分析 + +### 1.2 为什么需要插件机制 + +在 0.6 版本中,我们已经实现了 Superset 集成,但存在以下问题: + +| 问题 | 说明 | +|------|------| +| **对核心代码侵入较高** | 修改了 `app.py`、`dfSlice.tsx`、`App.tsx`、`utils.tsx`、`UnifiedDataUploadDialog.tsx` 等多个核心文件 | +| **不可复用** | 如果再集成一个 Metabase,需要重复修改同一批核心文件 | +| **耦合认证逻辑** | Superset 的 JWT 认证直接嵌入 Flask session,与应用认证逻辑耦合 | +| **升级困难** | 上游 Data Formulator 版本更新时,合并冲突概率高 | + +**插件机制的价值**:每个外部系统的集成代码自成一体(后端 + 前端),对核心代码的修改只需一次性地建立插件框架即可。后续新增任何 BI 系统,只需编写一个新插件,**不再需要修改核心代码**。 + +--- + +## 2. 部署模型分析:个人工具 vs 团队平台 + +### 2.1 数据存储模型 + +Data Formulator 的所有数据统一通过 **Workspace** 管理。Workspace 后端由 `WORKSPACE_BACKEND` 配置决定,支持多种部署形态: + +``` +┌────────────────────────────────────────────────────────┐ +│ Workspace 统一存储模型 │ +│ │ +│ 所有数据来源 (Upload/Paste/URL/DB/插件) │ +│ ↓ │ +│ loadTable → Workspace │ +│ ↓ │ +│ ┌──────────────────────────────────────────────────┐ │ +│ │ WORKSPACE_BACKEND = ? │ │ +│ │ │ │ +│ │ local → 本地磁盘 (~/.data_formulator/) │ │ +│ │ ephemeral → 仅内存(会话结束即消失) │ │ +│ │ cloud → 远程对象存储(未来) │ │ +│ └──────────────────────────────────────────────────┘ │ +│ │ +│ 前端始终只拿 sample rows + 元数据 │ +└────────────────────────────────────────────────────────┘ +``` + +> **历史说明**:早期版本中曾有 `storeOnServer` 用户开关和 `DISABLE_DATABASE` 环境变量, +> 分别用于让用户选择"浏览器临时存储 vs 磁盘持久化"和"禁用服务端存储"。 +> 现在这些概念已被 `WORKSPACE_BACKEND` 统一取代——`ephemeral` 模式等价于旧的纯浏览器模式。 + +### 2.2 接入 BI 系统后的模型变化 + +当需要集成 Superset 等 BI 系统时,部署模型发生了根本变化: + +``` +团队部署模式(插件场景的实际部署): + + ┌───────────┐ ┌──────────────┐ ┌────────────┐ + │ 用户A浏览器│────→│ │────→│ │ + │ 用户B浏览器│────→│ DF 服务器 │────→│ Superset │ + │ 用户C浏览器│────→│ (IT部署管理) │────→│ (IT管理) │ + └───────────┘ └──────────────┘ └────────────┘ + + 在这个模型下: + - "服务器"不再是用户自己的电脑 + - 数据必然经过服务器(插件后端调 Superset API) + - 隐私关注点变成了"谁控制服务器",而不是"数据在不在服务器上" + - BI 系统的连接地址是基础设施,由 IT 管理,不是用户自行添加 +``` + +### 2.3 团队部署下仍存在的差异 + +Workspace 统一存储解决了数据持久化的问题,但个人与团队部署之间仍有两个需要关注的差异: + +| 差异 | 个人模式 | 团队模式 | +|------|---------|---------| +| 数据库/BI 连接参数 | 前端填,自己用方便 | 应服务端集中管理(减少重复填写、防止 SSRF) | +| 模型 API Key | 前端填,自己的 Key | 服务端全局配置(0.7 已实现) | + +> 0.7 版本已将模型管理升级为"服务端全局配置"。插件系统沿着同样的方向继续——连接端点由服务端配置,用户只需认证。 + +### 2.4 对插件系统的设计决策 + +由于所有数据统一走 Workspace,插件系统只需关注两个插件特有的问题: + +**1. 插件配置(URL 等):只在服务端配置,不在前端添加。** + +- BI 系统的 URL 是**基础设施端点**,不是用户数据,由 IT 部门管理 +- 用户只需**认证**(登录 Superset),而不是"添加一个 Superset 连接" +- 禁止前端输入任意 URL(防止 SSRF) + +**2. 插件认证:per-user 凭据,服务端管理。** + +- 用户对 BI 系统的登录凭据存储在服务端(CredentialVault),不暴露给前端 +- 尊重 BI 系统自身的权限模型(RBAC / RLS) + +``` +┌──────────────────────────────────────────────────────────────┐ +│ Data Formulator │ +│ │ +│ 数据来源 │ +│ ┌────────────────────────┐ ┌───────────────────────────┐ │ +│ │ 内置来源 │ │ 插件来源 │ │ +│ │ Upload / Paste / URL │ │ Superset / Metabase / ... │ │ +│ │ Database / Extract │ │ │ │ +│ └──────────┬─────────────┘ └─────────────┬─────────────┘ │ +│ │ │ │ +│ └──────────┬───────────────────┘ │ +│ ↓ │ +│ loadTable → Workspace │ +│ (统一数据入口 → 统一存储) │ +└──────────────────────────────────────────────────────────────┘ +``` + +### 2.5 各层的配置与数据归属总结 + +| 层级 | 谁配置 | 存在哪里 | 示例 | +|------|--------|---------|------| +| **插件端点** | IT 管理员 | 服务端 `.env` | `PLG_SUPERSET_URL=http://...` | +| **用户认证** | 用户自己 | Flask Session(服务端内存) | Superset JWT Token | +| **用户数据** | 插件自动加载 | Workspace(由 `WORKSPACE_BACKEND` 决定存储位置) | 从 Superset 拉取的数据集 | +| **前端状态** | 自动管理 | Redux Store(浏览器内存) | sample rows、表元数据 | + +--- + +## 3. 现状分析 + +### 3.1 当前数据加载架构(0.7 版本) + +``` +用户操作 + ├─ Upload (本地文件) ──→ 解析 → DictTable + ├─ Paste (粘贴数据) ──→ 解析 → DictTable + ├─ URL (远程文件) ──→ fetch → 解析 → DictTable + ├─ Explore (示例数据) ──→ fetch → 解析 → DictTable + ├─ Extract (AI 提取) ──→ Agent → 解析 → DictTable + └─ Database (外部数据库) ──→ ExternalDataLoader → Arrow → Parquet + │ + 所有路径最终 → loadTable thunk → Redux Store +``` + +### 3.2 后端现有扩展点 + +**ExternalDataLoader** — 面向数据库的抽象基类: + +```python +class ExternalDataLoader(ABC): + def __init__(self, params: dict) # 连接参数 + def list_tables(...) # 列出表 + def fetch_data_as_arrow(...) # 拉取数据(→ Arrow) + def ingest_to_workspace(...) # 写入 workspace(→ Parquet) + def list_params() # 声明所需参数 + def auth_instructions() # 认证说明 +``` + +注册方式(`data_loader/__init__.py`): + +```python +_LOADER_SPECS = [ + ("mysql", "...mysql_data_loader", "MySQLDataLoader", "pymysql"), + ("postgresql", "...postgresql_data_loader", "PostgreSQLDataLoader", "psycopg2-binary"), + # ... 共 9 种 +] +``` + +**这套机制适合数据库连接器**,但 **不适合 BI 系统集成**,因为 BI 系统需要: + +| 能力 | ExternalDataLoader | BI 系统需要 | +|------|:--:|:--:| +| 连接参数 | ✅ 简单 key-value | 需要 URL + 认证流程 | +| 认证 | ✅ 用户名/密码/密钥 | JWT / OAuth / SSO | +| 列出数据 | ✅ `list_tables()` | 数据集 + 仪表盘 + 报表 + 筛选条件 | +| 拉取数据 | ✅ `fetch_data_as_arrow()` | 通过 BI 系统的 SQL Lab / API 拉取(尊重 RBAC/RLS) | +| 前端 UI | ❌ 无(通用表单) | 需要专用的目录浏览、筛选、登录等 UI | +| 自有 API 路由 | ❌ 无 | 需要注册独立的 Blueprint | + +### 3.3 前端现有扩展点 + +数据加载入口统一在 `UnifiedDataUploadDialog.tsx`,支持 6 种 Tab: + +```typescript +type UploadTabType = 'menu' | 'upload' | 'paste' | 'url' | 'database' | 'extract' | 'explore'; +``` + +数据库入口由 `DBManagerPane` 组件处理,支持上述 9 种 ExternalDataLoader。 + +所有数据加载最终通过 `loadTable` thunk 进入 Redux Store。 + +--- + +## 4. 0.6 版本 Superset 集成回顾 + +### 4.1 架构概览 + +``` +前端 后端 Superset +┌─────────────┐ HTTP ┌────────────────┐ REST ┌──────────┐ +│ LoginView │──────────→│ auth_routes │───────────→│ JWT 登录 │ +│ SupersetPanel│──────────→│ catalog_routes │───────────→│ 数据集API│ +│ SupersetCatalog│────────→│ data_routes │───────────→│ SQL Lab │ +│ SupersetDashboards│─────→│ auth_bridge │ └──────────┘ +└─────────────┘ │ superset_client│ + │ catalog │ + └────────────────┘ + │ + ↓ + 写入 DuckDB(0.6) + / Workspace Parquet(0.7 目标) +``` + +### 4.2 后端模块 + +| 文件 | 职责 | +|------|------| +| `superset_client.py` | Superset REST API 封装(数据集列表、详情、仪表盘、SQL Lab 执行) | +| `auth_bridge.py` | JWT 登录/刷新/验证 | +| `auth_routes.py` | `/api/superset/auth/*` 认证 API | +| `catalog_routes.py` | `/api/superset/catalog/*` 数据集/仪表盘目录 API | +| `data_routes.py` | `/api/superset/data/*` 数据加载 API(含筛选条件) | +| `catalog.py` | 带 TTL 缓存的数据目录(两级:摘要/详情) | + +### 4.3 前端组件 + +| 组件 | 职责 | +|------|------| +| `LoginView.tsx` | 登录页(用户名密码 / SSO 弹窗) | +| `SupersetPanel.tsx` | Tab 容器(仪表盘 + 数据集) | +| `SupersetCatalog.tsx` | 数据集目录浏览(搜索、预览、加载) | +| `SupersetDashboards.tsx` | 仪表盘列表(展开查看数据集) | +| `SupersetDashboardFilterDialog.tsx` | 仪表盘筛选条件对话框 | + +### 4.4 对核心代码的改动 + +``` +app.py +50 行 (配置、Blueprint 注册、app-config 扩展) +dfSlice.tsx +3 字段 (SUPERSET_ENABLED、SSO_LOGIN_URL、AUTH_USER) +App.tsx +20 行 (登录逻辑、LoginView) +utils.tsx +6 URL (Superset API 地址) +UnifiedDataUploadDialog.tsx +30 行 (SplitDatabasePane + SupersetPanel) +DBTableManager.tsx +4 行 (监听 superset-dataset-loaded 事件) +``` + +### 4.5 可以复用的部分 + +核心的业务逻辑(SupersetClient、AuthBridge、Catalog、FilterBuilder)可以直接迁移为 Superset 插件的实现。 + +--- + +## 5. 插件架构总体设计 + +### 5.1 设计原则 + +1. **最小侵入**:核心代码只需一次性改动来建立插件框架,后续新增插件不再修改核心 +2. **自包含**:每个插件独立提供后端路由 + 前端组件 + 配置声明 +3. **自动发现**:后端通过目录扫描、前端通过 `import.meta.glob` 自动发现插件,新增插件无需修改任何注册表 +4. **可选加载**:插件通过环境变量启用,未启用的插件不加载任何代码 +5. **统一出口**:插件加载的数据最终通过现有的 `loadTable` 进入系统 +6. **权限透传**:尊重外部系统自身的权限模型(RBAC / RLS) +7. **统一范式**:与 AuthProvider(认证)、CredentialVault(凭证)共享"抽象基类 + 动态注册 + 环境变量启用"的插件设计范式(详见 `sso-plugin-architecture.md`) + +### 5.2 环境变量命名约定 + +系统中有三类环境变量驱动的自动发现机制,各自使用不同的命名空间以避免冲突: + +| 类别 | 前缀 | 发现机制 | 示例 | +|------|------|---------|------| +| **系统配置** | 无(直接命名) | 固定读取 | `LOG_LEVEL`、`FLASK_SECRET_KEY`、`WORKSPACE_BACKEND` | +| **LLM 模型** | `{PROVIDER}_`(遗留命名) | 扫描 `*_ENABLED=true` | `DEEPSEEK_ENABLED`、`DEEPSEEK_API_KEY`、`QWEN_MODELS` | +| **数据源插件** | **`PLG_`** + `{PLUGIN}_` | manifest 中 `required_env` 全部存在 | `PLG_SUPERSET_URL`、`PLG_GRAFANA_TIMEOUT` | +| **认证 Provider** | `AUTH_PROVIDER` 单选 + Provider 自有前缀 | `AUTH_PROVIDER=xxx` 指定 | `AUTH_PROVIDER=oidc`、`OIDC_ISSUER_URL` | +| **凭证保险箱** | `CREDENTIAL_VAULT_` | `CREDENTIAL_VAULT_KEY` 存在 | `CREDENTIAL_VAULT=local`、`CREDENTIAL_VAULT_KEY=...` | + +**为什么插件需要 `PLG_` 前缀?** + +LLM 模型的自动发现靠扫描所有 `*_ENABLED=true` 的环境变量。如果插件也使用裸前缀(如 `SUPERSET_SSO_ENABLED=true`),会被模型注册器误识别为 model provider。加 `PLG_` 前缀后,命名空间彻底隔离: + +```env +# ============================================================= +# LLM 模型配置(遗留命名,{PROVIDER}_ 前缀) +# ============================================================= +DEEPSEEK_ENABLED=true +DEEPSEEK_API_KEY=sk-xxx +DEEPSEEK_API_BASE=https://api.deepseek.com +DEEPSEEK_MODELS=deepseek-chat + +# ============================================================= +# 数据源插件(PLG_{PLUGIN}_ 前缀) +# ============================================================= +PLG_SUPERSET_URL=http://superset:8088 +PLG_SUPERSET_SSO=true + +PLG_GRAFANA_URL=http://grafana.example.com:3000 +PLG_GRAFANA_TIMEOUT=10 + +# ============================================================= +# 认证(无 PLG_ 前缀,单选机制不会冲突) +# ============================================================= +AUTH_PROVIDER=oidc +OIDC_ISSUER_URL=https://login.microsoftonline.com/xxx/v2.0 +OIDC_CLIENT_ID=abc123 +``` + +> **注意**:LLM 模型的 `{PROVIDER}_` 命名是遗留约定,暂不添加 `MODEL_` 前缀以保持向后兼容。 +> 未来如需统一,可分步迁移。当前只为新增的插件系统建立 `PLG_` 前缀规范。 + +### 5.3 概念模型 + +``` +┌──────────────────────────────────────────────────────────────────────┐ +│ Data Formulator 核心 │ +│ │ +│ ┌──────────────────────────────────────────────────────────────────┐ │ +│ │ Layer 1: AuthProvider 链 (SSO / Azure EasyAuth / 浏览器 UUID) │ │ +│ │ → 确定"你是谁" │ │ +│ └──────────────────────────────────────────────────────────────────┘ │ +│ │ +│ ┌──────────────────┐ ┌───────────────┐ ┌──────────────┐ │ +│ │ 前端 Plugin Host │ │ loadTable │ │ Workspace │ │ +│ │ (渲染插件面板) │ │ (统一数据入口)│ │ (Parquet存储)│ │ +│ └────────┬─────────┘ └───────┬───────┘ └──────┬───────┘ │ +│ │ │ │ │ +│ ┌────────┴────────────────────┴──────────────────┴───────┐ │ +│ │ Layer 2: Plugin Registry (自动扫描 + 环境变量门控) │ │ +│ └────────┬──────────────┬──────────────┬─────────────────┘ │ +│ │ │ │ │ +│ ┌────────┴─────┐ ┌──────┴─────┐ ┌──────┴──────┐ │ +│ │ Superset │ │ Metabase │ │ Power BI │ ... │ +│ │ Plugin │ │ Plugin │ │ Plugin │ │ +│ │ │ │ │ │ │ │ +│ │ ┌──────────┐ │ │ ┌────────┐ │ │ ┌─────────┐ │ │ +│ │ │ 后端路由 │ │ │ │ 后端 │ │ │ │ 后端 │ │ │ +│ │ │ 前端面板 │ │ │ │ 前端 │ │ │ │ 前端 │ │ │ +│ │ │ 认证逻辑 │ │ │ │ 认证 │ │ │ │ 认证 │ │ │ +│ │ │ 目录缓存 │ │ │ │ 目录 │ │ │ │ 目录 │ │ │ +│ │ └──────────┘ │ │ └────────┘ │ │ └─────────┘ │ │ +│ └──────────────┘ └────────────┘ └─────────────┘ │ +│ │ +│ ┌──────────────────────────────────────────────────────────────────┐ │ +│ │ Layer 3: CredentialVault (加密凭证存储,per-user per-source) │ │ +│ │ → 插件认证时自动存/取凭证 │ │ +│ └──────────────────────────────────────────────────────────────────┘ │ +│ │ +│ → 三层统一范式: 抽象基类 + 动态注册 + 环境变量启用 │ +│ → 详见 sso-plugin-architecture.md │ +└──────────────────────────────────────────────────────────────────────┘ +``` + +### 5.3 核心概念 + +| 概念 | 说明 | +|------|------| +| **DataSourcePlugin** | 一个外部 BI 系统的完整集成,包含后端和前端 | +| **Plugin Manifest** | 插件的自我描述(ID、名称、图标、配置需求、启用条件) | +| **Plugin Backend** | Flask Blueprint + 认证 + 目录 + 数据拉取 | +| **Plugin Frontend** | React 组件(面板 UI),在 `UnifiedDataUploadDialog` 中以 Tab 形式呈现 | +| **Plugin Registry** | 后端的插件发现与注册中心 | +| **Plugin Host** | 前端的插件容器,负责渲染已启用插件的面板 | + +--- + +## 6. 后端插件接口 + +### 6.1 插件基类 + +```python +# py-src/data_formulator/plugins/base.py + +from abc import ABC, abstractmethod +from typing import Any +from flask import Blueprint + + +class DataSourcePlugin(ABC): + """外部数据源插件的基类。 + + 每个插件需要实现以下内容: + 1. manifest() — 描述插件自身的元数据 + 2. create_blueprint() — 提供 Flask Blueprint(后端 API 路由) + 3. get_frontend_config() — 声明前端需要的信息(组件标识、配置) + 4. on_enable() / on_disable() — 生命周期钩子 + """ + + @staticmethod + @abstractmethod + def manifest() -> dict[str, Any]: + """返回插件的自我描述。 + + manifest() 只包含后端框架需要的声明性信息。 + UI 相关配置(catalog_entry_types 等)由 get_frontend_config() 返回。 + + Returns: + { + "id": "superset", # 唯一标识符,用作路由前缀和前端标识 + "name": "Apache Superset", # 显示名称 + "icon": "superset", # 前端图标标识 + "description": "...", # 简短描述 + "version": "1.0.0", + "env_prefix": "PLG_SUPERSET", # 环境变量前缀(PLG_SUPERSET_URL, etc.) + "required_env": ["PLG_SUPERSET_URL"], # 必需的环境变量(缺失则不启用) + "optional_env": ["PLG_SUPERSET_TIMEOUT"], + "auth_modes": ["sso", "jwt", "password"], # 支持的认证方式(数组) + "capabilities": [ # 框架识别的标准能力标识 + "datasets", # 可以列出数据集 + "dashboards", # 可以列出仪表盘 + "filters", # 支持数据筛选 + "preview", # 支持预览(GET /data/preview) + "refresh", # 支持带参数刷新(POST /data/refresh) + "batch_load", # 支持分批流式加载(NDJSON) + "metadata", # 可提供列描述、表描述等外部元数据 + ], + } + """ + pass + + @abstractmethod + def create_blueprint(self) -> Blueprint: + """创建 Flask Blueprint。 + + Blueprint 的 url_prefix 应为 /api/plugins// + 插件内部路由自行组织,例如: + /api/plugins/superset/auth/login + /api/plugins/superset/catalog/datasets + /api/plugins/superset/data/load-dataset + + Returns: + 配置好路由的 Flask Blueprint + """ + pass + + @abstractmethod + def get_frontend_config(self) -> dict[str, Any]: + """返回传递给前端的配置信息。 + + 这些信息会通过 /api/app-config 的 plugins 字段下发到前端, + 前端据此决定显示哪些插件面板、如何配置。 + UI 相关的声明(如 catalog_entry_types)应放在这里而非 manifest()。 + + Returns: + { + "enabled": True, + "sso_login_url": "http://superset:8088/df-sso-bridge/", + "catalog_entry_types": [ + { + "type": "dataset", + "label": "Datasets", + "icon": "table_chart", + "supports_filters": True, + }, + { + "type": "dashboard_chart", + "label": "Dashboard Charts", + "icon": "dashboard", + "supports_filters": True, + }, + ], + # ... 其他前端需要的配置 + } + 注意:不要返回密钥等敏感信息。 + """ + pass + + def on_enable(self, app) -> None: + """插件被启用时调用(可选)。 + + 可以用来: + - 注册 Flask extensions + - 初始化连接池或缓存 + - 设置定时任务 + - 获取 CredentialVault 引用(用于 SSO token 或用户凭证的存取) + """ + pass + + def on_disable(self) -> None: + """插件被禁用时调用(可选)。""" + pass + + def get_auth_status(self, session: dict) -> dict[str, Any] | None: + """返回当前用户的认证状态(可选)。 + + 如果插件有自己的认证逻辑,实现此方法来返回当前用户信息。 + 返回 None 表示未认证。 + """ + return None + + def supports_sso_passthrough(self) -> bool: + """此插件是否支持 SSO token 透传。 + + 如果返回 True,插件可以从 auth.get_sso_token() 获取用户的 + OIDC access token,直接用于调用外部系统 API,无需用户单独登录。 + 默认 False。子类按需覆盖。 + """ + return False +``` + +### 6.2 插件加载的数据怎么进入 Workspace + +插件从外部系统拉取到数据后,需要将数据写入 DF 的 Workspace。这个过程参考了 0.6 版本 Superset 集成中的实际经验,需要解决三个问题:**表名管理**、**大数据量写入性能**、**写入方式选择**。 + +#### 5.2.1 表名管理:覆盖 vs 新建 + +0.6 版本的 Superset 集成支持两种表名策略,这个设计在实际使用中被证明非常实用: + +| 操作 | 行为 | 使用场景 | +|------|------|---------| +| **覆盖(默认)** | 用数据集原名写入,已存在则替换 | 刷新数据,获取最新版本 | +| **新建(加后缀)** | 用户指定后缀,生成 `name_suffix` 形式的新表名 | 保留历史快照,对比不同时间点的数据 | + +0.6 前端的实现方式是在每个数据集条目上提供两个按钮: +- 下载图标 → 直接覆盖加载(使用默认表名) +- 加号图标 → 弹出后缀对话框(用户输入后缀,如日期 `20250322`,生成 `sales_20250322`) + +后端通过 `table_name` 参数控制: +- 不传 `table_name`:使用数据集原名,覆盖同名表 +- 传 `table_name`:使用指定名称写入 + +在 0.7 的 Workspace 中,这映射到现有 API: + +```python +from data_formulator.workspace_factory import get_workspace +from data_formulator.datalake.parquet_utils import sanitize_table_name +from data_formulator.auth import get_identity_id + +workspace = get_workspace(get_identity_id()) +safe_name = sanitize_table_name(table_name_override or original_name) + +# write_parquet / write_parquet_from_arrow 已有覆盖逻辑: +# 如果 safe_name 已存在 → 删除旧 parquet → 写入新 parquet +workspace.write_parquet(df, safe_name, loader_metadata={ + "loader_type": "SupersetPlugin", + "loader_params": {"dataset_id": dataset_id, "filters": filters}, + "source_table": original_name, +}) +``` + +如果需要"不覆盖、自动加后缀"的行为(类似 `tables_routes.py` 中 `create_table` 的去重逻辑),可以这样处理: + +```python +# 自动去重表名(确保不覆盖已有表) +base_name = sanitize_table_name(table_name) +final_name = base_name +counter = 1 +existing_tables = workspace.list_tables() +while final_name in existing_tables: + final_name = f"{base_name}_{counter}" + counter += 1 +workspace.write_parquet(df, final_name) +``` + +#### 5.2.2 大数据量写入:插件专用写入工具函数 + +0.6 版本中,Superset 数据通过 SQL Lab 查询返回 JSON → 转 DataFrame → 写入存储。对于大数据量(10 万+ 行),有两个性能瓶颈: +1. 从外部系统拉取数据时的网络传输 +2. 写入 Workspace 时的序列化开销 + +为了让所有插件都能高效写入,我们提供一个**插件专用的写入工具函数**,封装常见的写入模式: + +```python +# py-src/data_formulator/plugins/data_writer.py + +import logging +import pandas as pd +import pyarrow as pa +from typing import Any, Optional + +from data_formulator.auth import get_identity_id +from data_formulator.workspace_factory import get_workspace +from data_formulator.datalake.parquet_utils import sanitize_table_name + +logger = logging.getLogger(__name__) + + +class PluginDataWriter: + """插件专用的数据写入工具。 + + 封装了表名管理、覆盖/新建策略、大数据量写入等常用逻辑, + 让插件开发者不需要直接操作 Workspace 底层 API。 + """ + + def __init__(self, plugin_id: str): + self.plugin_id = plugin_id + + def _get_workspace(self): + return get_workspace(get_identity_id()) + + def write_dataframe( + self, + df: pd.DataFrame, + table_name: str, + *, + overwrite: bool = True, + source_metadata: Optional[dict[str, Any]] = None, + ) -> dict[str, Any]: + """将 DataFrame 写入 Workspace。 + + Args: + df: 要写入的数据 + table_name: 目标表名(会自动 sanitize) + overwrite: True=覆盖同名表, False=自动加后缀避免冲突 + source_metadata: 来源元数据(用于刷新等场景) + + Returns: + {"table_name": str, "row_count": int, "columns": list, "is_renamed": bool} + """ + workspace = self._get_workspace() + base_name = sanitize_table_name(table_name) + final_name = base_name + is_renamed = False + + if not overwrite: + counter = 1 + existing = set(workspace.list_tables()) + while final_name in existing: + final_name = f"{base_name}_{counter}" + counter += 1 + is_renamed = True + + loader_metadata = { + "loader_type": f"plugin:{self.plugin_id}", + **(source_metadata or {}), + } + + meta = workspace.write_parquet( + df, final_name, loader_metadata=loader_metadata + ) + + logger.info( + "Plugin '%s' wrote table '%s': %d rows, %d cols", + self.plugin_id, final_name, len(df), len(df.columns), + ) + + return { + "table_name": meta.name, + "row_count": meta.row_count, + "columns": [c.name for c in (meta.columns or [])], + "is_renamed": is_renamed, + } + + def write_arrow( + self, + table: pa.Table, + table_name: str, + *, + overwrite: bool = True, + source_metadata: Optional[dict[str, Any]] = None, + ) -> dict[str, Any]: + """将 Arrow Table 写入 Workspace(更高效,跳过 pandas 转换)。""" + workspace = self._get_workspace() + base_name = sanitize_table_name(table_name) + final_name = base_name + is_renamed = False + + if not overwrite: + counter = 1 + existing = set(workspace.list_tables()) + while final_name in existing: + final_name = f"{base_name}_{counter}" + counter += 1 + is_renamed = True + + loader_metadata = { + "loader_type": f"plugin:{self.plugin_id}", + **(source_metadata or {}), + } + + meta = workspace.write_parquet_from_arrow( + table, final_name, loader_metadata=loader_metadata + ) + + return { + "table_name": meta.name, + "row_count": meta.row_count, + "columns": [c.name for c in (meta.columns or [])], + "is_renamed": is_renamed, + } + + def write_batches( + self, + first_batch: pd.DataFrame, + table_name: str, + *, + overwrite: bool = True, + source_metadata: Optional[dict[str, Any]] = None, + ) -> "BatchWriter": + """创建一个批量写入器,适用于数据需要分批拉取的场景。 + + 用法: + writer = data_writer.write_batches(first_df, "my_table") + writer.append(second_df) + writer.append(third_df) + result = writer.finish() + + 内部机制: 先将第一批数据写入临时文件,后续批次追加, + 最后合并为一个完整的 Parquet 文件。 + """ + return BatchWriter( + self, first_batch, table_name, + overwrite=overwrite, + source_metadata=source_metadata, + ) + + +class BatchWriter: + """支持分批追加写入的写入器。 + + 适用于外部系统的数据需要分页/分批拉取的场景(如 Superset SQL Lab + 有行数限制,或网络传输需要分批)。 + + 内部使用 PyArrow 的 RecordBatch 累积数据,最终一次性写入 Parquet, + 避免多次写入的 I/O 开销。 + """ + + def __init__( + self, + writer: PluginDataWriter, + first_batch: pd.DataFrame, + table_name: str, + *, + overwrite: bool, + source_metadata: Optional[dict[str, Any]], + ): + self._writer = writer + self._table_name = table_name + self._overwrite = overwrite + self._source_metadata = source_metadata + self._batches: list[pa.RecordBatch] = [] + self._total_rows = 0 + self.append(first_batch) + + def append(self, df: pd.DataFrame) -> int: + """追加一批数据。返回目前累积的总行数。""" + if len(df) == 0: + return self._total_rows + batch = pa.RecordBatch.from_pandas(df) + self._batches.append(batch) + self._total_rows += len(df) + return self._total_rows + + def finish(self) -> dict[str, Any]: + """将所有批次合并写入 Workspace,返回写入结果。""" + if not self._batches: + raise ValueError("No data batches to write") + + combined = pa.Table.from_batches(self._batches) + result = self._writer.write_arrow( + combined, + self._table_name, + overwrite=self._overwrite, + source_metadata=self._source_metadata, + ) + + logger.info( + "BatchWriter finished: %d batches, %d total rows → '%s'", + len(self._batches), self._total_rows, result["table_name"], + ) + self._batches.clear() + return result +``` + +#### 5.2.3 插件如何使用写入工具 + +以 Superset 插件的数据加载路由为例: + +```python +# plugins/superset/routes/data.py + +from data_formulator.plugins.data_writer import PluginDataWriter + +writer = PluginDataWriter("superset") + +@bp.route("/data/load-dataset", methods=["POST"]) +def load_dataset(): + # ... 认证、参数解析 ... + + # 从 Superset SQL Lab 拉取数据 + result = superset_client.execute_sql_with_session( + sql_session, db_id, full_sql, schema, row_limit + ) + all_rows = result.get("data", []) or [] + + if not all_rows: + return jsonify({"status": "error", "message": "No data returned"}), 404 + + df = pd.DataFrame(all_rows) + + # 使用写入工具:table_name_override 支持用户自定义表名 + # overwrite=True 表示覆盖同名表(0.6 中的默认下载行为) + # + # source_metadata 结构与前端 DataProvenance 对齐, + # 框架会将其完整存入 loader_metadata,用于后续刷新。 + write_result = writer.write_dataframe( + df, + table_name=table_name_override or original_table_name, + overwrite=True, + source_metadata={ + "source_type": "dataset", + "source_id": str(dataset_id), + "source_name": original_table_name, + "load_params": { + "entry_type": "dataset", + "dataset_id": dataset_id, + "database_id": database_id, + "schema": schema, + "filters": filters, + "row_limit": row_limit, + }, + "refreshable": True, + }, + ) + + return jsonify({ + "status": "ok", + **write_result, # table_name, row_count, columns, is_renamed + }) +``` + +对于需要分批拉取的大数据场景: + +```python +@bp.route("/data/load-dataset-batched", methods=["POST"]) +def load_dataset_batched(): + """分批拉取并写入,支持流式进度报告。""" + # ... 认证、参数解析 ... + + def _generate(): + batch_writer = None + offset = 0 + + while offset < row_limit: + batch_sql = f"{base_sql} LIMIT {batch_size} OFFSET {offset}" + result = superset_client.execute_sql_with_session( + sql_session, db_id, batch_sql, schema, batch_size + ) + rows = result.get("data", []) or [] + if not rows: + break + + df = pd.DataFrame(rows) + + if batch_writer is None: + batch_writer = writer.write_batches( + df, table_name, + overwrite=True, + source_metadata={...}, + ) + else: + batch_writer.append(df) + + offset += len(rows) + + # 流式报告进度 + yield json.dumps({ + "type": "progress", + "loaded_rows": offset, + "batch_size": len(rows), + }, ensure_ascii=False) + "\n" + + if len(rows) < batch_size: + break + + # 合并所有批次,写入 Parquet + if batch_writer: + result = batch_writer.finish() + yield json.dumps({ + "type": "done", + "status": "ok", + **result, + }, ensure_ascii=False) + "\n" + else: + yield json.dumps({ + "type": "done", + "status": "ok", + "row_count": 0, + }, ensure_ascii=False) + "\n" + + return Response( + stream_with_context(_generate()), + content_type="text/x-ndjson; charset=utf-8", + headers={"Cache-Control": "no-cache"}, + ) +``` + +#### 5.2.4 数据溯源描述:结构化条件 + 模板拼接 + +从外部系统加载数据时,用户通常会选择筛选条件(如地区、日期范围)。这些信息需要记录到数据上,以便后续明确"这份数据到底包含什么"。 + +**设计决策:不用 AI 生成描述,用模板拼接。** + +| | AI 生成描述 | 模板拼接 + 原始条件 | +|--|-----------|---------------------| +| 准确性 | 可能编造细节 | 100% 准确 | +| 成本 | 每次加载消耗 token | 零成本 | +| 可刷新 | 描述是文本,无法回放 | `loadParams` 原样回传即可刷新 | + +数据写入时**两层信息同时存储**,各司其职: + +``` +loader_metadata +├── loadParams ← 原始筛选条件(机器用:精确刷新) +│ {"filters": [{"col":"region","op":"==","val":"Asia"}], "row_limit": 50000} +│ +└── description ← 模板拼接的可读摘要(人/AI 用:理解数据内容) + "来源: superset · sales_data\n筛选: region = Asia, order_date >= 2025-01-01\n行数: 12,345" +``` + +`description` 会流向前端 `attachedMetadata`,AI Agent 分析数据时自动进入 prompt,帮助 AI 理解数据的子集范围。 + +**实现位置:`PluginDataWriter.write_dataframe()` 内部自动生成**,所有插件无需额外代码: + +```python +# plugins/data_writer.py — write_dataframe 内部 + +def _build_description(self, source_metadata: dict) -> str: + """从 source_metadata 模板拼接可读描述。""" + parts = [f"来源: {self.plugin_id} · {source_metadata.get('source_name', '')}"] + + load_params = source_metadata.get("load_params", {}) + + # 筛选条件(各插件格式不同,尝试通用提取) + filters = load_params.get("filters", []) + if filters: + filter_strs = [] + for f in filters: + if isinstance(f, dict) and "col" in f: + filter_strs.append(f"{f['col']} {f.get('op', '=')} {f['val']}") + if filter_strs: + parts.append(f"筛选: {', '.join(filter_strs)}") + + # 时间范围(常见于仪表盘数据) + time_range = load_params.get("time_range") + if time_range: + parts.append(f"时间范围: {time_range}") + + row_limit = load_params.get("row_limit") + if row_limit: + parts.append(f"行限制: {row_limit}") + + return "\n".join(parts) +``` + +调用时机:`write_dataframe()` / `write_arrow()` 在写入 Parquet 后,自动调用 `_build_description()` 将结果存入 `loader_metadata["description"]`。插件也可通过参数覆盖自动生成的描述。此描述与外部系统自带的表/列描述([§ 7.7](#77-外部系统元数据拉取) 中的 `table_description`、`column_metadata`)互不覆盖——前者记录"加载时用了什么条件",后者记录"这张表/列本身是什么含义",两者并存于 metadata 中。 + +> **各平台的 `filters` 格式差异很大**(见 [§ 7.5 各 BI 平台查询参数兼容性分析](#75-各-bi-平台查询参数兼容性分析))。 +> `_build_description()` 只做"尽力提取":能解析的条件拼成可读文本,无法解析的保留在 `loadParams` 中。 +> 插件也可覆盖 `_build_description()` 来提供更精确的描述。 + +#### 5.2.5 两种写入路径对比 + +| | 直接写入 Workspace(推荐) | 返回 JSON 由前端处理 | +|---|---|---| +| **流向** | 插件后端 → Workspace Parquet | 插件后端 → JSON → 前端 → loadTable → Workspace | +| **适用数据量** | 任意大小 | < 5 万行(受 HTTP 响应大小和前端内存限制) | +| **性能** | 高(Parquet 压缩存储,无 JSON 序列化开销) | 低(JSON 序列化 + 网络传输 + 前端解析) | +| **进度反馈** | 通过 NDJSON 流式响应 | 无(等待完整响应) | +| **前端通知** | 返回 `table_name` → 前端刷新 `list-tables` | 前端收到数据后走 `loadTable` thunk | +| **刷新支持** | 有(`loader_metadata` 记录来源信息,可用于 `refresh-table`) | 无 | + +**结论**:插件应默认使用"直接写入 Workspace"路径。仅在特殊场景(如用户只想预览但不保存、或 `WORKSPACE_BACKEND=ephemeral`)时才返回 JSON。 + +--- + +## 7. 前端插件接口 + +### 7.1 插件面板契约 + +每个插件需要提供一个 React 组件,该组件遵循以下接口: + +```typescript +// src/plugins/types.ts + +export interface PluginManifest { + id: string; // 与后端 manifest.id 一致 + name: string; // 显示名称 + icon: string; // MUI 图标名或 SVG 路径 + description: string; + authType: 'jwt' | 'oauth' | 'api_key' | 'none'; + capabilities: string[]; +} + +// ───── 数据溯源:记录数据从哪来、用了什么参数 ───── + +/** + * 每次插件加载数据后,随 onDataLoaded 一起返回。 + * 核心框架会将其序列化到 loader_metadata,用于刷新/重放。 + * + * 各 BI 平台的 loadParams 差异极大(见下方"兼容性分析"), + * 因此 loadParams 设计为 Record——框架不解析, + * 只原样存储,刷新时原样回传给插件后端。 + */ +export interface DataProvenance { + pluginId: string; // "superset" | "metabase" | ... + sourceType: string; // 插件自定义的来源类型,如 "dataset" | "dashboard_chart" | "question" + sourceId: string; // 外部系统中的唯一标识(dataset_id、card_id 等) + sourceName: string; // 人类可读的名称(用于 UI 显示"来自 xxx") + loadParams: Record; // 加载时使用的完整参数(过滤器、行限制、时间范围等) + loadedAt: string; // ISO 8601 时间戳 + refreshable: boolean; // 是否支持用同样的参数刷新 +} + +export interface PluginPanelProps { + pluginId: string; + config: Record; // 从 /api/app-config 获取的插件配置 + onDataLoaded: (result: { // 数据加载完成后的回调 + tableName: string; + rowCount: number; + columns: string[]; + source: 'workspace' | 'json'; // 数据在 workspace 中还是在响应 JSON 中 + rows?: any[]; // source === 'json' 时提供 + provenance: DataProvenance; // 数据溯源信息(用于刷新和 UI 显示) + }) => void; + onPreviewLoaded?: (result: { // 预览数据回调(可选,框架支持但不强制) + columns: string[]; + sampleRows: any[]; // 预览行(通常 50-100 行) + totalRowEstimate?: number; // 总行数估计 + provenance: DataProvenance; + }) => void; +} + +export interface DataSourcePluginModule { + manifest: PluginManifest; + + // 主面板组件(显示在 UnifiedDataUploadDialog 的 Tab 中) + PanelComponent: React.ComponentType; + + // 登录组件(可选,如果插件有自己的认证流程) + LoginComponent?: React.ComponentType<{ + config: Record; + onLoginSuccess: () => void; + }>; +} +``` + +### 7.2 Plugin Host(前端插件容器) + +在 `UnifiedDataUploadDialog.tsx` 中增加一个通用的插件 Tab 渲染逻辑: + +```typescript +// 伪代码:插件面板的渲染 + +// 1. 从 /api/app-config 拿到已启用插件列表 +const enabledPlugins = serverConfig.plugins; // [{ id: "superset", ... }, ...] + +// 2. 动态导入对应的插件模块 +const pluginModules = usePluginModules(enabledPlugins); + +// 3. 在数据加载对话框中,为每个插件渲染一个 Tab +{pluginModules.map(plugin => ( + + p.id === plugin.manifest.id)} + onDataLoaded={handlePluginDataLoaded} + /> + +))} +``` + +### 7.3 数据加载完成后的流程 + +插件面板通过 `onDataLoaded` 回调通知宿主。宿主根据 `source` 字段决定下一步: + +``` +onDataLoaded({ tableName, source, provenance }) + │ + ├─ source === 'workspace' + │ → 调用 GET /api/tables/list-tables 刷新表列表 + │ → 新表自动出现在左侧面板 + │ → provenance 序列化到 loader_metadata(用于刷新) + │ + └─ source === 'json' + → 构建 DictTable + → dispatch(loadTable({ table })) + → 走常规 loadTable 流程(自动写入 Workspace) + → provenance 同样保存 +``` + +### 7.4 数据刷新协议 + +当一个表的 `loader_metadata` 中包含 `provenance`(即通过插件加载的数据),前端可以在表上显示"刷新"按钮。刷新时将 `provenance` 回传给插件后端: + +``` +用户点击表上的「刷新」按钮 + → 框架取出 loader_metadata.provenance + → POST /api/plugins/{provenance.pluginId}/data/refresh + Body: { + source_type: provenance.sourceType, + source_id: provenance.sourceId, + load_params: provenance.loadParams, ← 完整的原始查询参数 + table_name: 当前表名 ← 覆盖写入 + } + → 插件后端用同样的参数重新拉取数据 + → 写入同名表(覆盖) + → 前端刷新表列表 +``` + +后端实现:每个插件可选实现 `/data/refresh` 路由。由于 `load_params` 是插件自己定义和存储的,刷新时原样取出即可: + +```python +# plugins/superset/routes/data.py 中的 refresh 路由 + +@bp.route("/data/refresh", methods=["POST"]) +def refresh_dataset(): + data = request.get_json() + source_type = data["source_type"] # "dataset" + source_id = data["source_id"] # "42" + load_params = data["load_params"] # { "filters": [...], "row_limit": 10000, ... } + table_name = data["table_name"] # 覆盖同名表 + + # 用 load_params 中的参数重新执行查询——与首次加载逻辑复用 + df = _fetch_dataset(source_id, **load_params) + + return jsonify(writer.write_dataframe(df, table_name, overwrite=True, + source_metadata={"source_type": source_type, "source_id": source_id, + "loader_params": load_params})) +``` + +**关键设计决策**:`load_params` 是个**不透明的 JSON 对象**。框架只负责存储和回传,不解析其内部结构。这样每个 BI 平台都可以在里面放自己特有的参数,框架完全不需要知道各平台的参数细节。 + +### 7.5 各 BI 平台查询参数兼容性分析 + +这是选择"参数不透明"设计的核心原因——各平台的参数差异太大,无法统一: + +#### Superset 的 `load_params` 示例 + +```json +{ + "entry_type": "dataset", + "dataset_id": 42, + "database_id": 1, + "schema": "public", + "filters": [ + { "col": "region", "op": "==", "val": "Asia" }, + { "col": "order_date", "op": ">=", "val": "2025-01-01" } + ], + "row_limit": 50000, + "sql_override": null +} +``` + +从仪表盘图表加载时: + +```json +{ + "entry_type": "dashboard_chart", + "dashboard_id": 7, + "chart_id": 123, + "dataset_id": 42, + "native_filters": { + "NATIVE_FILTER-abc": { "col": "country", "op": "IN", "val": ["CN", "JP"] } + }, + "time_range": "Last 90 days", + "granularity": "P1D", + "row_limit": 10000 +} +``` + +#### Metabase 的 `load_params` 示例 + +```json +{ + "entry_type": "question", + "card_id": 156, + "parameters": [ + { "type": "date/range", "target": ["variable", ["template-tag", "date_range"]], "value": "2025-01-01~2025-03-31" }, + { "type": "category", "target": ["dimension", ["field", 23, null]], "value": ["Active"] } + ] +} +``` + +从 Dashboard 加载时: + +```json +{ + "entry_type": "dashboard", + "dashboard_id": 8, + "card_id": 156, + "dashboard_filters": { + "Status": "Active", + "Date Range": "past30days" + } +} +``` + +#### Power BI 的 `load_params` 示例 + +```json +{ + "entry_type": "report_visual", + "workspace_id": "aaa-bbb-ccc", + "report_id": "ddd-eee-fff", + "page_name": "ReportSection1", + "visual_name": "SalesChart", + "dax_query": "EVALUATE TOPN(10000, Sales, Sales[Date], DESC)", + "slicer_state": { + "Region": ["Asia", "Europe"], + "Year": [2024, 2025] + } +} +``` + +#### Grafana 的 `load_params` 示例 + +```json +{ + "entry_type": "panel", + "dashboard_uid": "abc123", + "panel_id": 4, + "datasource_uid": "prometheus-1", + "time_range": { "from": "now-24h", "to": "now" }, + "interval": "5m", + "variables": { + "host": "server-01", + "env": "production" + }, + "max_data_points": 1000 +} +``` + +#### Looker 的 `load_params` 示例 + +```json +{ + "entry_type": "explore", + "model_name": "ecommerce", + "explore_name": "orders", + "fields": ["orders.id", "orders.total", "users.name"], + "filters": { + "orders.created_date": "90 days", + "orders.status": "complete" + }, + "sorts": ["orders.total desc"], + "limit": 5000, + "pivots": ["orders.created_month"] +} +``` + +#### 为什么不尝试统一过滤器格式 + +可以看到,每个平台的过滤器语义完全不同: + +| 平台 | 过滤器模型 | 特点 | +|------|----------|------| +| Superset | `col + op + val` + SQL WHERE | 列级过滤 + 原生 SQL | +| Metabase | Template Tag + Dimension Target | 绑定到查询模板的参数化变量 | +| Power BI | Slicer State + DAX Expression | 交互式切片器 + 查询语言 | +| Grafana | Template Variable + Ad-hoc Filter | 变量替换 + 即席过滤 | +| Looker | LookML Filter Expression | 模型驱动的过滤表达式语言 | + +如果强行定义 `UnifiedFilter = { column, operator, value }` 这样的通用格式,会导致: +1. 丢失平台特有能力(如 Metabase 的 Template Tag、Grafana 的 Variable) +2. 每个插件需要做双向转换(通用格式 ↔ 平台原生格式),增加复杂度 +3. 通用格式不可避免地变成"最大公约数",表达力反而最弱 + +**因此,`load_params` 保持为不透明 JSON 是正确的设计**:框架只负责"存储 → 回传 → 刷新",不试图理解参数内容。 + +### 7.6 预览协议(可选能力) + +部分 BI 平台支持在全量加载前预览少量数据。插件可以通过 `onPreviewLoaded` 回调返回预览结果: + +``` +用户在 Superset 数据集上点击「预览」 + → 插件前端调用 GET /api/plugins/superset/data/preview?dataset_id=42&limit=50 + → 后端拉取 50 行样本数据 + → 返回 JSON(不写入 Workspace) + → 插件前端调用 onPreviewLoaded({ columns, sampleRows, totalRowEstimate }) + → 框架在对话框中渲染预览表格 + → 用户确认后点「加载」→ 走完整的 onDataLoaded 流程 +``` + +预览是可选能力。后端路由约定为 `/data/preview`,前端通过 `manifest.capabilities` 中是否包含 `"preview"` 来判断是否显示预览按钮。 + +### 7.7 外部系统元数据拉取 + +很多 BI 平台为数据集和字段维护了丰富的元数据(描述信息、语义标签、认证状态等)。将这些元数据随数据一起拉取到 DF 有极高价值——它们可以直接填入 DF 已有的 `semanticType` 和 `attachedMetadata` 字段,大幅提升 AI Agent 的分析质量。 + +#### 7.7.1 各 BI 平台提供的元数据对比 + +| 元数据类型 | Superset | Metabase | Power BI | Looker | +|-----------|----------|----------|----------|--------| +| **表级描述** | `dataset.description` | `table.description` | `table.description` | `explore.description` | +| **表级标签** | `dataset.owners`, `is_certified` | `table.visibility_type` | — | `explore.tags` | +| **列名** | `column.column_name` | `field.name` | `column.name` | `field.name` | +| **列描述** | `column.description` | `field.description` | `column.description` | `field.description` | +| **列数据类型** | `column.type` | `field.base_type` | `column.dataType` | `field.type` | +| **列语义类型** | `column.is_dttm`, `filterable`, `groupby` | `field.semantic_type` (如 `type/Email`, `type/FK`) | `column.formatString` | `field.tags` (如 `email`, `currency`) | +| **计算列/度量** | `metric.expression`, `metric.description` | `field.formula` | `measure.expression` | `measure.sql` | +| **值域统计** | — | `field.fingerprint` (分布统计) | — | `field.enumerations` | + +#### 7.7.2 设计:不透明 blob + 文本描述(简化方案) + +> **设计决策**:不扩展 `ColumnInfo` 和 `TableMetadata` 的结构化字段。 +> 理由与 `loadParams` 保持不透明([§ 7.5](#75-各-bi-平台查询参数兼容性分析))相同——各平台元数据格式差异极大,强行统一到 `semantic_type`、`is_metric` 等字段是有损抽象。 +> +> 外部元数据的主要消费者是 **AI prompt**(文本)和 **UI tooltip**(文本),不需要结构化查询。 +> 如果未来确实出现结构化需求(如"列出所有 certified 的表"),再从 blob 中提取,成本很低。 + +**后端改动最小化**:仅在 `TableMetadata` 新增 1 个可选字段: + +```python +@dataclass +class TableMetadata: + # ... 现有字段全部不动 ... + description: str | None = None # ← 已有,复用 + + # 新增:来自外部系统的原始元数据(插件写入,框架不解析) + external_metadata: dict | None = None +``` + +`ColumnInfo` **不改**。`list-tables` API **不改**。前端 `DictTable` 类型 **不改**。 + +#### 7.7.3 插件如何提供元数据 + +插件在调用 `PluginDataWriter.write_dataframe()` 时,将外部系统的原始元数据塞入 `external_metadata`: + +```python +# plugins/superset/routes/data.py + +result = writer.write_dataframe( + df, table_name, overwrite=True, + source_metadata={...}, # 用于刷新的 loadParams(不变) + external_metadata={ # 外部系统的原始元数据(新增,blob) + "source": "superset", + "dataset_description": dataset_info.get("description"), + "owners": [o["username"] for o in dataset_info.get("owners", [])], + "certified": dataset_info.get("is_certified", False), + "columns": { + col["column_name"]: { + "description": col.get("description"), + "is_dttm": col.get("is_dttm"), + "filterable": col.get("filterable"), + } + for col in dataset_info.get("columns", []) + }, + "metrics": { + m["metric_name"]: { + "description": m.get("description"), + "expression": m.get("expression"), + } + for m in dataset_info.get("metrics", []) + }, + }, +) +``` + +Metabase 插件塞的结构完全不同也没关系——框架不解析它。 + +#### 7.7.4 元数据如何流向 AI + +`PluginDataWriter` 自动将 `external_metadata` 拼成可读文本,写入 `TableMetadata.description`(已有字段): + +``` +来源: superset · sales_data +描述: 公司季度销售数据,按区域和产品线划分 +所有者: alice@corp.com +筛选: region = Asia, order_date >= 2025-01-01 +列: region (销售区域), order_date (订单日期, 时间类型), amount (订单金额, SUM(amount)) +行数: 12,345 +``` + +这段文本通过现有的 `description` → `attachedMetadata` 链路自动进入 AI prompt,**无需修改任何前端代码**。 + +``` +外部 BI 系统 API 后端 前端 +───────────────── ───── ───── +dataset + columns + metrics → external_metadata (blob) + ↓ PluginDataWriter 拼接 + description (文本) → attachedMetadata → AI prompt + loader_params (结构化) → source_metadata → 刷新按钮 +``` + +#### 7.7.5 渐进增强 + +| 层次 | 场景 | 效果 | +|------|------|------| +| 0 | Upload/Paste/Database(现有行为) | `external_metadata=None`,AI 自行推断,完全不受影响 | +| 1 | 插件只传数据,不传元数据 | 与 Upload 行为一致 | +| 2 | 插件传了 `external_metadata` | `description` 自动丰富,AI prompt 质量提升 | + +插件开发者把能拿到的元数据原样塞进 `external_metadata` 即可。拼接逻辑在 `PluginDataWriter` 中统一处理,尽力提取可读信息;无法解析的字段静默忽略。 + +--- + +## 8. 插件注册与发现 + +> **权威定义**:插件自动发现与注册(`discover_and_register()`)的完整实现、 +> 插件约定、安全措施、发现流程图、`app.py` 集成方式、`/api/app-config` 暴露方式、 +> 以及前端 `import.meta.glob` 自动发现,均定义在 `1-sso-plugin-architecture.md` § 4.4。 +> +> 本文档不再重复这些内容。以下仅补充 Plugin 文档特有的上下文说明。 + +### 8.1 `/api/app-config` 中的插件字段组装 + +在 `get_app_config()` 中将 `manifest()` 和 `get_frontend_config()` 合并下发: + +```python +from data_formulator.plugins import ENABLED_PLUGINS + +plugins_config = {} +for plugin_id, plugin in ENABLED_PLUGINS.items(): + manifest = plugin.manifest() + frontend_config = plugin.get_frontend_config() + plugins_config[plugin_id] = { + "id": plugin_id, + "name": manifest["name"], + "icon": manifest.get("icon"), + "description": manifest.get("description"), + "auth_modes": manifest.get("auth_modes", ["none"]), + "capabilities": manifest.get("capabilities", []), + **frontend_config, # catalog_entry_types 等 UI 配置在此注入 + } +config["PLUGINS"] = plugins_config +``` + +注意 `auth_modes`(数组)取代了旧的 `auth_type`(单字符串),且 `catalog_entry_types` 由 `get_frontend_config()` 提供而非 `manifest()`。 + +--- + +## 9. 数据流设计 + +### 9.1 端到端流程 + +``` +┌─────────────────────────────────────────────────────────────────┐ +│ 启动阶段 │ +│ │ +│ 1. 环境变量设置 PLG_SUPERSET_URL=http://superset:8088 │ +│ 2. app.py → _register_blueprints() → discover_and_register() │ +│ 3. SupersetPlugin 被实例化,Blueprint 被注册 │ +│ 4. /api/app-config 返回 PLUGINS: { superset: { enabled, ... }} │ +└─────────────────────────────────────────────────────────────────┘ + │ + ↓ +┌─────────────────────────────────────────────────────────────────┐ +│ 前端初始化 │ +│ │ +│ 1. App.tsx 请求 /api/app-config │ +│ 2. 解析 PLUGINS 字段 → 存入 Redux (serverConfig.plugins) │ +│ 3. 动态加载对应的前端插件模块 │ +│ 4. 如果插件需要认证且未登录 → 显示登录入口 │ +└─────────────────────────────────────────────────────────────────┘ + │ + ↓ +┌─────────────────────────────────────────────────────────────────┐ +│ 认证阶段(以 Superset 为例) │ +│ │ +│ 用户点击「连接 Superset」 │ +│ → 显示 Superset 登录组件 │ +│ → POST /api/plugins/superset/auth/login │ +│ → 后端转发到 Superset JWT 登录 │ +│ → Token 存入 Flask Session │ +│ → 返回用户信息给前端 │ +└─────────────────────────────────────────────────────────────────┘ + │ + ↓ +┌─────────────────────────────────────────────────────────────────┐ +│ 浏览阶段 │ +│ │ +│ 用户打开「数据源」对话框 → 看到 Superset Tab │ +│ → 请求 /api/plugins/superset/catalog/datasets │ +│ → 后端用 JWT 调用 Superset API │ +│ → 返回用户有权限看到的数据集列表 │ +│ → 前端渲染数据集目录(搜索、预览等) │ +└─────────────────────────────────────────────────────────────────┘ + │ + ↓ +┌─────────────────────────────────────────────────────────────────┐ +│ 数据加载阶段 │ +│ │ +│ 用户选中一个数据集,点击「加载」 │ +│ → POST /api/plugins/superset/data/load-dataset │ +│ → 后端通过 Superset SQL Lab 执行查询(尊重 RBAC + RLS) │ +│ → pd.DataFrame → workspace.write_parquet() │ +│ → 返回 { table_name, row_count, columns } │ +│ │ +│ 前端收到响应: │ +│ → onDataLoaded({ tableName, source: 'workspace' }) │ +│ → 触发 list-tables 刷新 │ +│ → 新表出现在左侧面板 │ +│ → 用户可以开始数据分析 │ +└─────────────────────────────────────────────────────────────────┘ +``` + +### 9.2 认证会话管理 + +每个插件的认证信息独立存储在 Flask Session 中,以 `plugin_id` 为前缀隔离: + +```python +# 存储 +session[f"plugin_{plugin_id}_token"] = access_token +session[f"plugin_{plugin_id}_user"] = user_info + +# 读取 +token = session.get(f"plugin_{plugin_id}_token") +``` + +前端通过 `/api/plugins/{plugin_id}/auth/status` 查询当前认证状态。 + +#### 与 SSO 的集成(详见 `sso-plugin-architecture.md`) + +当系统配置了 OIDC SSO 后,插件的认证有三种模式自动协商: + +``` +场景 A: DF 有 SSO + 外部系统也接了同一 IdP + → 自动 SSO Token 透传(用户零交互) + +场景 B: DF 有 SSO + 外部系统没有接 SSO + → 用户首次输入凭证 → 存入 CredentialVault → 后续自动取出 + +场景 C: DF 无 SSO(本地匿名模式) + → 手动登录 → Token 存 Flask Session(现有行为) +``` + +--- + +## 10. Superset 插件迁移示例 + +将 0.6 版本的 Superset 集成迁移为插件: + +### 10.1 后端 + +``` +py-src/data_formulator/plugins/superset/ +├── __init__.py # SupersetPlugin 类(实现 DataSourcePlugin) +├── superset_client.py # ← 直接迁移自 0.6 +├── auth_bridge.py # ← 直接迁移自 0.6 +├── catalog.py # ← 直接迁移自 0.6 +├── routes/ +│ ├── __init__.py +│ ├── auth.py # ← 迁移自 0.6 auth_routes.py +│ ├── catalog.py # ← 迁移自 0.6 catalog_routes.py +│ └── data.py # ← 迁移自 0.6 data_routes.py(改用 workspace) +└── requirements.txt # requests(已内置,无额外依赖) +``` + +### 10.2 核心改动 + +**data_routes.py 的变化**:0.6 用 DuckDB,0.7 用 Workspace Parquet + +```python +# 0.6 版本:写入 DuckDB +with db_manager.connection(sid) as conn: + conn.execute(f'DROP TABLE IF EXISTS "{safe_name}"') + conn.execute(f'CREATE TABLE "{safe_name}" AS SELECT * FROM df') + +# 0.7 插件版本:写入 Workspace Parquet +from data_formulator.workspace_factory import get_workspace +from data_formulator.auth import get_identity_id + +workspace = get_workspace(get_identity_id()) +workspace.write_parquet(df, safe_name) +``` + +### 10.3 前端 + +``` +src/plugins/superset/ +├── index.ts # 导出 DataSourcePluginModule +├── SupersetPanel.tsx # ← 迁移自 0.6(Tab 容器) +├── SupersetCatalog.tsx # ← 迁移自 0.6(数据集目录) +├── SupersetDashboards.tsx # ← 迁移自 0.6(仪表盘列表) +├── SupersetFilterDialog.tsx # ← 迁移自 0.6(筛选条件对话框) +├── SupersetLogin.tsx # ← 迁移自 0.6 LoginView(Superset 部分) +└── api.ts # API 调用封装 +``` + +### 10.4 SupersetPlugin 实现 + +```python +class SupersetPlugin(DataSourcePlugin): + + @staticmethod + def manifest(): + return { + "id": "superset", + "name": "Apache Superset", + "icon": "superset", + "description": "Load data from Superset dashboards and datasets", + "version": "1.0.0", + "env_prefix": "PLG_SUPERSET", + "required_env": ["PLG_SUPERSET_URL"], + "auth_modes": ["sso", "jwt", "password"], + "capabilities": [ + "datasets", "dashboards", "filters", + "preview", "refresh", "batch_load", "metadata", + ], + } + + def create_blueprint(self): + from data_formulator.plugins.superset.routes import create_superset_blueprint + return create_superset_blueprint(self._client, self._catalog, self._bridge) + + def get_frontend_config(self): + url = os.environ.get("PLG_SUPERSET_URL", "") + return { + "enabled": True, + "sso_login_url": f"{url.rstrip('/')}/df-sso-bridge/" if url else None, + "catalog_entry_types": [ + { + "type": "dataset", + "label": "Datasets", + "icon": "table_chart", + "supports_filters": True, + }, + { + "type": "dashboard_chart", + "label": "Dashboard Charts", + "icon": "dashboard", + "supports_filters": True, + }, + ], + } + + def on_enable(self, app): + url = os.environ["PLG_SUPERSET_URL"] + self._client = SupersetClient(url) + self._bridge = SupersetAuthBridge(url) + self._catalog = SupersetCatalog(self._client) + app.extensions["superset_client"] = self._client + app.extensions["superset_bridge"] = self._bridge + app.extensions["superset_catalog"] = self._catalog +``` + +--- + +## 11. 与现有 ExternalDataLoader 的关系 + +### 11.1 两套机制并行 + +``` +数据源类型 │ 适用机制 │ 原因 +─────────────────┼─────────────────────┼────────────────────── +MySQL/PG/MSSQL │ ExternalDataLoader │ 标准数据库连接,无需额外认证/UI +MongoDB/BigQuery │ ExternalDataLoader │ 同上 +S3/Azure Blob │ ExternalDataLoader │ 文件存储,list+fetch 即可 +─────────────────┼─────────────────────┼────────────────────── +Superset │ DataSourcePlugin │ 需要认证流程、目录浏览、筛选、RBAC +Metabase │ DataSourcePlugin │ 同上 +Power BI │ DataSourcePlugin │ 同上 +Grafana │ DataSourcePlugin │ 同上 +``` + +### 11.2 判断依据 + +使用 **ExternalDataLoader** 的场景: +- 只需要连接参数 → 列出表 → 拉取数据 +- 前端用通用的 `DBManagerPane` 表单即可 + +使用 **DataSourcePlugin** 的场景: +- 有自己的认证体系(JWT / OAuth / SSO) +- 有自己的数据组织方式(仪表盘、报表、数据集等概念) +- 需要尊重外部系统的权限模型 +- 需要专用的 UI(目录浏览、筛选条件等) + +两套机制互不干扰,共存于系统中。 + +### 11.3 ExternalDataLoader 演进方向(已拆分) + +> ExternalDataLoader 的现有缺陷分析和三个改进方案(数据库元数据拉取 P0、SSO Token 透传 P1、凭证持久化 P2) +> 已拆分为独立文档:**`2-external-dataloader-enhancements.md`**。 +> +> 这些改进针对数据库连接器,与 DataSourcePlugin(BI 系统插件)互不干扰, +> 可以独立于插件框架按优先级逐步实施。 + +--- + +## 12. 插件 i18n 自包含方案 + +### 12.1 问题 + +宿主项目的翻译文件(如 `src/i18n/locales/en/common.json`)是核心项目的一部分。如果每个插件的翻译 key 都直接写入这些文件,会导致: + +1. **插件开发者被迫修改宿主项目文件** — 违反"自包含"原则 +2. **多个插件的翻译 key 混杂在同一个 JSON 中** — 职责不清 +3. **上游更新时容易冲突** — 宿主 `common.json` 频繁变动 + +### 12.2 方案:插件自带翻译 + 启动时自动合并 + +每个插件在自己的目录下维护翻译文件,通过 `DataSourcePluginModule.locales` 字段导出,框架在启动时自动合并到 i18next 的 `translation` namespace 中。 + +#### 目录结构 + +``` +src/plugins/superset/ + ├── locales/ + │ ├── en.json ← 插件自己的英文翻译 + │ └── zh.json ← 插件自己的中文翻译 + ├── api.ts + ├── SupersetPanel.tsx + └── index.tsx ← 通过 locales 字段导出翻译 +``` + +#### 翻译文件格式 + +JSON 结构保持与宿主项目相同的 key path 风格,以 `plugin..` 作为命名空间前缀: + +```json +{ + "plugin": { + "superset": { + "login": "Sign In", + "logout": "Sign Out", + "datasets": "Datasets" + } + } +} +``` + +#### 插件模块导出 + +```typescript +// src/plugins/superset/index.tsx +import en from './locales/en.json'; +import zh from './locales/zh.json'; + +const supersetPlugin: DataSourcePluginModule = { + id: 'superset', + Icon: SupersetIcon, + Panel: SupersetPanel, + locales: { en, zh }, +}; +``` + +#### 框架自动注册 + +`src/plugins/registry.ts` 提供 `registerPluginTranslations()` 函数,在应用启动时调用。该函数遍历所有已发现的插件模块,将它们的 `locales` deep-merge 到 i18next: + +```typescript +import i18n from '../i18n'; + +export function registerPluginTranslations(): void { + for (const [, mod] of _modules) { + if (!mod.locales) continue; + for (const [lang, bundle] of Object.entries(mod.locales)) { + i18n.addResourceBundle(lang, 'translation', bundle, true, true); + } + } +} +``` + +`addResourceBundle(lang, ns, bundle, deep=true, overwrite=true)` 是 i18next 内置 API,deep-merge 到已有 resources,无需重新初始化。 + +#### 调用时机 + +在 `src/index.tsx` 中,`import './i18n'`(初始化 i18next)之后、React 渲染之前调用: + +```typescript +import './i18n'; +import { registerPluginTranslations } from './plugins/registry'; +registerPluginTranslations(); +``` + +由于 `import.meta.glob` 使用 eager 模式,此时所有插件模块(包括其 locales JSON)已经加载完成。 + +### 12.3 运行机制说明 + +`locales: { en, zh }` 是**数据声明**而非语言选择——它声明"该插件提供 en 和 zh 两套翻译"。`registerPluginTranslations()` 会将**所有语言的 bundle 都注册**到 i18next 中。实际使用哪套翻译由 i18next 的语言检测器(`LanguageDetector`)或 `i18n.changeLanguage()` 决定,与宿主项目的语言切换行为完全一致。 + +### 12.4 不变的地方 + +- 所有插件组件继续使用 `useTranslation()` 不带参数 +- 所有 `t('plugin.superset.xxx')` 调用不变 +- 宿主项目自己的翻译文件和加载方式完全不变 +- 语言切换自动生效,插件翻译跟随系统设置 + +### 12.5 新增插件的 i18n 清单 + +1. 在插件目录下创建 `locales/en.json` 和 `locales/zh.json`(或其他语言) +2. JSON 顶层结构为 `{ "plugin": { "": { ... } } }` +3. 在插件的 `index.tsx` 中导入并通过 `locales` 字段导出 +4. 无需修改宿主项目的任何翻译文件 + diff --git a/design-docs/1-sso-plugin-architecture.md b/design-docs/1-sso-plugin-architecture.md new file mode 100644 index 00000000..3e0aba42 --- /dev/null +++ b/design-docs/1-sso-plugin-architecture.md @@ -0,0 +1,3695 @@ +# Data Formulator — SSO 认证 + 数据源插件 统一架构设计 + +## 目录 + +1. [概述与目标](#1-概述与目标) +2. [架构全景](#2-架构全景) +3. [Layer 1:可插拔认证体系 (AuthProvider)](#3-layer-1可插拔认证体系-authprovider) + - 3.1~3.2b — AuthProvider 基类(含 `get_auth_info()` 自描述)、Provider 自动发现 + - 3.3~3.5 — Azure EasyAuth、OIDC(仅 2 个环境变量)、GitHub OAuth、auth.py 重构(基于自动发现) + - 3.6~3.8 — 前端 OIDC 流程、回调页面、登录/登出 UI + - 3.8b — Token 生命周期管理(静默刷新、401 重试、CORS/CSP、Phase 1 不含 refresh_token 的设计决策与局限性) + - [3.9 多协议支持](#39-多协议支持从-oidc-扩展到-saml--ldap--cas--反向代理) + - 3.9.1~3.9.2 协议全景对比、双轨模型设计 + - 3.9.3~3.9.8 Phase 2+ 扩展摘要(反向代理 / SAML / LDAP / CAS) + - 3.9.9 前端适配(统一登录入口 + `/api/auth/info` 委托模式) + - 3.9.10~3.9.11 模型图、Token 透传差异 + - 3.9.12~3.9.14 协议选择指南、依赖、优先级 +4. [Layer 2:数据源插件系统 (DataSourcePlugin)](#4-layer-2数据源插件系统-datasourceplugin) +5. [Layer 3:凭证保险箱 (CredentialVault)](#5-layer-3凭证保险箱-credentialvault) +6. [SSO Token 透传机制](#6-sso-token-透传机制) +7. [现有 ExternalDataLoader 的演进路径](#7-现有-externaldataloader-的演进路径) +8. [身份管理:SSO 时代的简化](#8-身份管理sso-时代的简化) +9. [配置参考](#9-配置参考) +10. [目录结构](#10-目录结构) +11. [实施路径](#11-实施路径) +12. [安全模型](#12-安全模型) +13. [FAQ](#13-faq) + +--- + +## 1. 概述与目标 + +### 1.1 背景 + +Data Formulator 0.7 的身份体系基于两种机制: +- **Azure App Service EasyAuth** — 部署到 Azure 时由平台注入 `X-MS-CLIENT-PRINCIPAL-ID` +- **浏览器 UUID** — 本地使用时 `localStorage` 中的随机 UUID + +这套机制存在三个根本性限制: + +| 限制 | 影响 | +|------|------| +| 仅绑定 Azure 生态 | 使用 Keycloak、Okta、Auth0、Google 等 IdP 的团队无法接入 | +| 无真实用户身份 | 无法实现跨设备数据同步、审计追踪、细粒度权限 | +| 无法透传认证 | 当外部系统(Superset、Metabase)也接了同一个 IdP 时,用户仍需重复登录 | + +同时,数据源连接方面,现有的 `ExternalDataLoader` 体系面向数据库设计,无法覆盖 BI 报表系统的复杂认证、数据浏览和权限透传需求。 + +### 1.2 设计目标 + +构建一套 **SSO 认证 + 数据源插件 + 凭证管理** 的统一架构,实现: + +1. **通用 SSO 登录** — 用户通过 OIDC、SAML、LDAP、CAS 或反向代理等任意方式登录 Data Formulator +2. **插件化数据源** — BI 报表系统(Superset、Metabase、Power BI 等)以插件形式接入,新增系统不修改核心代码 +3. **SSO Token 透传** — 外部系统与 DF 共用同一 IdP 时,用户无需重复登录 +4. **凭证保险箱** — 未接 SSO 的外部系统,凭证在服务端加密存储,跨设备可用 +5. **向后兼容** — 本地个人使用场景下,匿名浏览器模式依然可用,零配置启动 + +### 1.3 设计原则 + +| 原则 | 说明 | +|------|------| +| **不自建用户管理** | 认证是 IdP 的事,DF 只做身份消费者,不管理密码和注册 | +| **插件自包含** | 每个外部系统的后端路由 + 前端 UI + 认证逻辑完全独立于核心代码 | +| **渐进式采纳** | 本地模式 → 加 SSO → 加插件 → 加凭证保险箱,每一步都可独立部署 | +| **安全纵深** | 认证链路上 OIDC JWT 验签、服务端凭证加密、Workspace 身份隔离 三层防护 | + +--- + +## 2. 架构全景 + +``` +┌─────────────────────────────────────────────────────────────────────────────┐ +│ 用户浏览器 │ +│ │ +│ ┌──────────────┐ ┌──────────────────┐ ┌──────────────────────────────┐ │ +│ │ OIDC Login │ │ Credential │ │ Data Source Dialog │ │ +│ │ (PKCE flow) │ │ Manager UI │ │ ┌──────┐ ┌────────┐ ┌────┐ │ │ +│ │ │ │ (per-source) │ │ │Upload│ │Database│ │插件│ │ │ +│ └──────┬───────┘ └────────┬─────────┘ │ │Paste │ │(现有) │ │Tab │ │ │ +│ │ │ │ │URL │ │ │ │ │ │ │ +│ ▼ ▼ │ └──────┘ └────────┘ └──┬─┘ │ │ +│ ┌─────────────────────────────────────────────────────────────────┘ │ +│ │ fetchWithIdentity (增强) │ +│ │ X-Identity-Id: user:alice@corp.com (SSO 登录后) │ +│ │ Authorization: Bearer │ +│ └──────────────────────────┬───────────────────────────────────────────┘ +│ │ │ +└──────────────────────────────┼──────────────────────────────────────────────┘ + │ HTTPS + ▼ +┌──────────────────────────────────────────────────────────────────────────────┐ +│ Flask 后端 │ +│ │ +│ ┌─────────────────────────────────────────────────────────────────────────┐ │ +│ │ auth.py — get_identity_id() │ │ +│ │ │ │ +│ │ AUTH_PROVIDER (单选) + 匿名回退: │ │ +│ │ ┌─────────────────────────────────┐ ┌───────────────────────────┐ │ │ +│ │ │ 主 Provider (由 AUTH_PROVIDER │ │ Browser UUID │ │ │ +│ │ │ 指定: oidc / github / azure / │→ │ (ALLOW_ANONYMOUS=true时) │ │ │ +│ │ │ proxy / saml / ldap / cas) │ │ │ │ │ +│ │ └─────────────────────────────────┘ └───────────────────────────┘ │ │ +│ └────────────────────────────────┬────────────────────────────────────────┘ │ +│ │ │ +│ ┌────────────────────────────────┼────────────────────────────────────────┐ │ +│ │ Plugin Registry + Credential Vault │ │ +│ │ │ │ +│ │ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ │ │ +│ │ │ Superset │ │ Metabase │ │ Power BI │ │ Grafana │ │ │ +│ │ │ Plugin │ │ Plugin │ │ Plugin │ │ Plugin │ ... │ │ +│ │ │ │ │ │ │ │ │ │ │ │ +│ │ │ 认证: SSO透传│ │ 认证: 用户名 │ │ 认证: OAuth │ │ 认证: API Key│ │ │ +│ │ │ 或 JWT登录 │ │ /密码+保险箱│ │ +SSO透传 │ │ +保险箱 │ │ │ +│ │ └──────┬──────┘ └──────┬──────┘ └──────┬──────┘ └──────┬──────┘ │ │ +│ │ │ │ │ │ │ │ +│ │ ▼ ▼ ▼ ▼ │ │ +│ │ ┌─────────────────────────────────────────────────────────────┐ │ │ +│ │ │ Credential Vault (加密凭证存储) │ │ │ +│ │ │ per-user, per-source 的服务端加密存储 │ │ │ +│ │ └─────────────────────────────────────────────────────────────┘ │ │ +│ └────────────────────────────────────────────────────────────────────────┘ │ +│ │ │ +│ ┌────────────────────────────────┼────────────────────────────────────────┐ │ +│ │ Data Layer (不变) │ │ +│ │ ExternalDataLoader (9种DB) + Workspace (Parquet) + Redux Store │ │ +│ └────────────────────────────────────────────────────────────────────────┘ │ +└──────────────────────────────────────────────────────────────────────────────┘ + │ + ▼ + ┌─────────────────────┐ + │ 外部系统 │ + │ Superset / Metabase │ + │ / Power BI / ... │ + └─────────────────────┘ +``` + +**三层分工**: + +| 层 | 职责 | 状态 | +|----|------|------| +| **Layer 1: AuthProvider** | 解决"谁在用 DF" — 单一认证源 + 匿名回退 | 扩展现有 `auth.py` | +| **Layer 2: DataSourcePlugin** | 解决"从哪拉数据" — 外部 BI 系统的插件化接入 | 新建插件框架 | +| **Layer 3: CredentialVault** | 解决"用什么身份访问外部系统" — 加密凭证存储 | 新建 | + +**三层分层依赖、向上服务**:Layer 1 是地基,确定用户身份(`user:xxx` 或 `browser:xxx`);Layer 3 依赖 Layer 1 的身份信息按用户存取凭证;Layer 2 同时依赖 Layer 1(获取 SSO token)和 Layer 3(获取已存凭证)来访问外部系统。依赖方向单向向下,不存在循环依赖。 + +``` +Layer 2: DataSourcePlugin ──依赖──→ Layer 1: AuthProvider (身份 + SSO token) + │ ▲ + └──依赖──→ Layer 3: CredentialVault ────┘ (按用户身份存取) +``` + +### 统一的插件范式 + +三层虽然解决不同问题,但共享同一套 **"抽象基类 + 环境变量声明依赖 + 按需自动启用"** 的设计范式: + +| 维度 | AuthProvider | DataSourcePlugin | CredentialVault | +|------|-------------|-----------------|-----------------| +| 抽象基类 | `AuthProvider(ABC)` | `DataSourcePlugin(ABC)` | `CredentialVault(ABC)` | +| 动态加载 | `importlib.import_module` | 目录自动扫描 (`pkgutil`) | 工厂函数 `get_credential_vault()` | +| 启用判定 | `AUTH_PROVIDER` 环境变量指定 | `manifest()` 中 `required_env`(`PLG_` 前缀)全部存在 | `CREDENTIAL_VAULT_KEY` 存在 | +| 按需启用 | 未指定则匿名模式 | 缺 `required_env` 则跳过 | 缺密钥则返回 None | +| 新增方式 | **在 `auth_providers/` 下创建 `.py` 即可**(自动发现) | **在 `plugins/` 下创建目录即可**(零改动) | 写一个 `.py` + 工厂加一个分支 | +| 生命周期钩子 | `on_configure(app)` | `on_enable(app)` | — | + +**协作模式**: + +``` +AuthProvider — 单选 + 匿名回退 (Single Provider + Fallback) + ┌──────────────────┐ ┌──────────────┐ + │ 主 Provider │ ──→ │ Browser UUID │ + │ (由 AUTH_PROVIDER │ 未命中│ (匿名回退) │ + │ 环境变量指定) │ │ │ + └──────────────────┘ └──────────────┘ + ▸ Phase 1: AUTH_PROVIDER=oidc / github / azure_easyauth + ▸ Phase 2+: proxy_header / saml / ldap / cas + ▸ 同一时间只有一个主 Provider 生效 + ▸ ALLOW_ANONYMOUS=true 时允许匿名回退,否则未认证请求被拒绝 + +DataSourcePlugin — 并行 (Registry) + ┌──────────┐ ┌──────────┐ ┌──────────┐ + │ Superset │ │ Metabase │ │ Power BI │ ... + │ Plugin │ │ Plugin │ │ Plugin │ + └──────────┘ └──────────┘ └──────────┘ + ▸ 所有已启用插件同时存在(目录自动扫描,无需注册) + ▸ 每个插件注册独立的 Blueprint 路由 + ▸ 前端为每个插件渲染一个独立 Tab + ▸ 用户可以同时连多个系统 + +CredentialVault — 单例 (Strategy) + ┌──────────┐ 或 ┌──────────────┐ 或 ┌──────────────┐ + │ Local │ │ Azure │ │ HashiCorp │ + │ (SQLite) │ │ Key Vault │ │ Vault │ ... + └──────────┘ └──────────────┘ └──────────────┘ + ▸ 同一时间只有一个实现生效 + ▸ 由 CREDENTIAL_VAULT 环境变量选择 + ▸ 所有插件和 DataLoader 共享同一个 Vault 实例 +``` + +这套统一范式意味着:未来无论是新增认证方式、新增数据源(如 Grafana、Tableau)、还是新增凭证后端(如 HashiCorp Vault),步骤都是相同的 —— **写一个实现类,配置环境变量启用**。核心代码零修改。 + +认证体系进一步细分为两轨: +- **A 类(无状态)** — 直接实现 `AuthProvider.authenticate()`,无需额外路由(如 OIDC、Azure EasyAuth、反向代理头、GitHub OAuth) +- **B 类(有状态)** — 编写 Login Gateway Blueprint + 复用通用 `SessionProvider`(如 SAML、LDAP、CAS) + +详见 [3.9 多协议支持](#39-多协议支持从-oidc-扩展到-saml--ldap--cas--反向代理)。 + +--- + +## 3. Layer 1:可插拔认证体系 (AuthProvider) + +### 3.1 设计思路 + +将现有 `auth.py` 中硬编码的三级优先级,重构为 **单一 AuthProvider + 匿名回退** 模型。管理员通过 `AUTH_PROVIDER` 环境变量选择一种认证方式,框架自动加载对应的 Provider。支持无状态(OIDC/Header)和有状态(SAML/LDAP/CAS via Session)两种认证模型(详见 [3.9](#39-多协议支持从-oidc-扩展到-saml--ldap--cas--反向代理))。 + +``` +请求进入 + │ + ├─ 主 Provider (由 AUTH_PROVIDER 环境变量指定,Phase 1): + │ ├─ oidc → 检查 Authorization: Bearer → 命中 → user:xxx + │ ├─ azure_easyauth → 检查 X-MS-CLIENT-PRINCIPAL-ID → 命中 → user:xxx + │ └─ github → 检查 Flask session (OAuth) → 命中 → user:xxx + │ + │ Phase 2+ 扩展: + │ ├─ proxy_header → 检查 X-Forwarded-User (可信IP) → 命中 → user:xxx + │ ├─ saml → 检查 Flask session (SAML ACS) → 命中 → user:xxx + │ ├─ ldap → 检查 Flask session (LDAP bind) → 命中 → user:xxx + │ └─ cas → 检查 Flask session (CAS ticket) → 命中 → user:xxx + │ + ├─ 匿名回退 (ALLOW_ANONYMOUS=true 时): + │ └─ BrowserIdentity → 检查 X-Identity-Id → 命中 → browser:xxx + │ + └─ 全部未命中 → 401 Unauthorized +``` + +### 3.2 AuthProvider 基类 + +```python +# py-src/data_formulator/auth_providers/base.py + +from abc import ABC, abstractmethod +from dataclasses import dataclass +from typing import Any, Optional +from flask import Request + + +@dataclass +class AuthResult: + """认证结果(Phase 1 精简版,仅保留核心字段)。 + + 设计决策 — 不包含 refresh_token: + Phase 1 采用纯前端(oidc-client-ts)管理 token 刷新,后端保持无状态。 + refresh_token 仅存在于前端 UserManager 内部,不经过后端,不在此结构中出现。 + 局限性及未来扩展方向详见 3.8b 节"设计决策与局限性"。 + """ + user_id: str # 唯一标识 (sub claim / principal ID / UUID) + display_name: Optional[str] = None # 显示名称 + email: Optional[str] = None # 邮箱 + raw_token: Optional[str] = None # 原始 access_token (用于 SSO 透传,非 refresh_token) + + +class AuthProvider(ABC): + """认证提供者基类。 + + 每个 Provider 从 HTTP 请求中尝试提取并验证用户身份。 + 返回 None 表示此 Provider 不适用,交给链中的下一个。 + 抛出异常表示认证信息存在但无效(如 token 过期)。 + """ + + @property + @abstractmethod + def name(self) -> str: + """Provider 名称,用于日志和调试。""" + ... + + @abstractmethod + def authenticate(self, request: Request) -> Optional[AuthResult]: + """尝试从请求中提取用户身份。 + + Returns: + AuthResult — 认证成功 + None — 此 Provider 不适用(请求中没有此 Provider 的认证信息) + + Raises: + AuthenticationError — 认证信息存在但无效(token 过期、签名错误等) + """ + ... + + @property + def enabled(self) -> bool: + """Provider 是否已正确配置(必需的环境变量等)。 + + 默认返回 True。子类可覆盖此属性,当必要配置缺失时返回 False, + init_auth() 会据此拒绝激活该 Provider 并输出日志。 + """ + return True + + def on_configure(self, app) -> None: + """Flask app 创建后调用,可用于初始化(如下载 JWKS)。""" + pass + + def get_auth_info(self) -> dict[str, Any]: + """返回前端所需的认证配置信息(供 /api/auth/info 端点使用)。 + + 每个 Provider 自描述其前端交互方式,消除 auth.py 中的 switch 语句。 + 新增 Provider 时只需实现此方法,无需修改 auth.py。 + + Returns: + { + "action": "frontend" | "redirect" | "form" | "transparent" | "none", + "label": "显示名称", + ... (Provider 特定的配置) + } + """ + return {"action": "none"} + + +class AuthenticationError(Exception): + """认证信息存在但验证失败。""" + def __init__(self, message: str, provider: str = ""): + self.provider = provider + super().__init__(message) +``` + +### 3.2b Provider 自动发现(auth_providers/__init__.py) + +`auth_providers` 包在导入时自动扫描同目录下的所有模块,发现并注册所有 `AuthProvider` 子类。 +新增 Provider 只需在 `auth_providers/` 下创建 `.py` 文件并实现 `AuthProvider` 子类,**无需修改任何注册表或配置文件**。 + +```python +# py-src/data_formulator/auth_providers/__init__.py + +import importlib +import inspect +import logging +import pkgutil +from typing import Optional + +from .base import AuthProvider + +_log = logging.getLogger(__name__) + +_PROVIDER_REGISTRY: dict[str, type[AuthProvider]] = {} + +def _discover_providers() -> None: + """扫描 auth_providers/ 目录,收集所有 AuthProvider 子类。""" + for finder, module_name, ispkg in pkgutil.iter_modules(__path__): + if module_name == "base": + continue + try: + mod = importlib.import_module(f".{module_name}", __package__) + for attr_name in dir(mod): + cls = getattr(mod, attr_name) + if (isinstance(cls, type) + and issubclass(cls, AuthProvider) + and cls is not AuthProvider): + instance = cls() + _PROVIDER_REGISTRY[instance.name] = cls + _log.debug("Discovered auth provider: '%s' from %s", + instance.name, module_name) + except ImportError as e: + _log.debug("Skipped '%s' (missing dep): %s", module_name, e) + +_discover_providers() + + +def get_provider_class(name: str) -> Optional[type[AuthProvider]]: + """根据 AUTH_PROVIDER 环境变量的值获取对应的 Provider 类。""" + return _PROVIDER_REGISTRY.get(name) + + +def list_available_providers() -> list[str]: + """返回所有已发现的 Provider 名称(用于日志和错误提示)。""" + return sorted(_PROVIDER_REGISTRY.keys()) +``` + +**工作原理**: + +1. `pkgutil.iter_modules(__path__)` 扫描 `auth_providers/` 目录下的所有 `.py` 文件 +2. 跳过 `base.py`(基类不是具体 Provider) +3. 对每个模块,用 `inspect` 逻辑找出所有 `AuthProvider` 子类 +4. 实例化以获取 `name` 属性(来自子类的 `@property`),作为注册表的 key +5. 依赖缺失的模块(如 Phase 2 的 SAML 需要 `python3-saml`)会被静默跳过 + +**安全性保障**:自动发现只决定"有哪些 Provider 可用",实际激活哪个仍由 `AUTH_PROVIDER` 环境变量**单选**控制。 +未被选中的 Provider 不会执行 `on_configure()`,不会处理任何请求。 + +### 3.3 Azure EasyAuth Provider(迁移现有逻辑) + +```python +# py-src/data_formulator/auth_providers/azure_easyauth.py + +import logging +from flask import Request +from typing import Optional +from .base import AuthProvider, AuthResult + +logger = logging.getLogger(__name__) + + +class AzureEasyAuthProvider(AuthProvider): + """Azure App Service 内置认证 (EasyAuth)。 + + 当 DF 部署在 Azure App Service 并启用了身份验证时, + Azure 会在请求到达 Flask 之前验证用户身份,并注入以下头: + - X-MS-CLIENT-PRINCIPAL-ID: 用户的 Object ID + - X-MS-CLIENT-PRINCIPAL-NAME: 用户名 (可选) + + 这些头由 Azure 基础设施设置,客户端无法伪造。 + """ + + @property + def name(self) -> str: + return "azure_easyauth" + + def authenticate(self, request: Request) -> Optional[AuthResult]: + principal_id = request.headers.get("X-MS-CLIENT-PRINCIPAL-ID") + if not principal_id: + return None + + principal_name = request.headers.get("X-MS-CLIENT-PRINCIPAL-NAME", "") + logger.debug("Azure EasyAuth: principal_id=%s...", principal_id[:8]) + + return AuthResult( + user_id=principal_id.strip(), + display_name=principal_name.strip() or None, + ) +``` + +### 3.4 OIDC Provider(新增核心) + +```python +# py-src/data_formulator/auth_providers/oidc.py + +import logging +import os +import time +from typing import Optional + +import jwt +from jwt import PyJWKClient +from flask import Request + +from .base import AuthProvider, AuthResult, AuthenticationError + +logger = logging.getLogger(__name__) + + +class OIDCProvider(AuthProvider): + """通用 OIDC (OpenID Connect) 认证提供者。 + + 支持任何标准 OIDC 兼容的 Identity Provider: + - Keycloak + - Okta + - Auth0 + - Azure AD / Entra ID + - Google Identity Platform + - Authelia / Authentik + - Casdoor + + 工作流程: + 1. 前端通过 PKCE Authorization Code Flow 从 IdP 获取 access_token + 2. 前端将 access_token 放在 Authorization: Bearer 头中发送 + 3. 本 Provider 用 IdP 的 JWKS 公钥验证 token 签名和 claims + 4. 验证通过后提取 sub (用户唯一ID)、name、email 等信息 + + 配置(环境变量)— 仅需两项: + OIDC_ISSUER_URL — IdP 的 issuer URL + OIDC_CLIENT_ID — 注册的 client ID (同时用作 audience 校验) + + 其余信息(JWKS URI、签名算法)从 OIDC Discovery 自动获取, + claim 名称遵循 OIDC 标准 (sub / name / email),无需配置。 + """ + + def __init__(self): + self._issuer = os.environ.get("OIDC_ISSUER_URL", "").strip().rstrip("/") + self._client_id = os.environ.get("OIDC_CLIENT_ID", "").strip() + + self._jwks_client: Optional[PyJWKClient] = None + self._jwks_uri: Optional[str] = None + self._algorithms: list[str] = ["RS256"] + + @property + def name(self) -> str: + return "oidc" + + @property + def enabled(self) -> bool: + return bool(self._issuer and self._client_id) + + def on_configure(self, app) -> None: + if not self.enabled: + logger.info("OIDC provider not configured (OIDC_ISSUER_URL / OIDC_CLIENT_ID missing)") + return + + # 从 OIDC Discovery 自动获取 JWKS URI 和签名算法 + try: + import urllib.request, json + discovery_url = f"{self._issuer}/.well-known/openid-configuration" + with urllib.request.urlopen(discovery_url, timeout=10) as resp: + discovery = json.loads(resp.read()) + + self._jwks_uri = discovery["jwks_uri"] + self._jwks_client = PyJWKClient(self._jwks_uri, cache_keys=True) + + if "id_token_signing_alg_values_supported" in discovery: + self._algorithms = discovery["id_token_signing_alg_values_supported"] + + logger.info( + "OIDC provider configured: issuer=%s, client_id=%s", + self._issuer, self._client_id, + ) + except Exception as e: + logger.error("Failed to initialize OIDC provider: %s", e) + + def get_auth_info(self) -> dict: + """返回 OIDC 前端配置,供 /api/auth/info 端点使用。""" + return { + "action": "frontend", + "label": os.environ.get("AUTH_DISPLAY_NAME", "SSO Login"), + "oidc": { + "authority": self._issuer, + "clientId": self._client_id, + "scopes": "openid profile email", + }, + } + + def authenticate(self, request: Request) -> Optional[AuthResult]: + if not self._jwks_client: + return None + + auth_header = request.headers.get("Authorization", "") + if not auth_header.startswith("Bearer "): + return None + + token = auth_header[7:].strip() + if not token: + return None + + try: + signing_key = self._jwks_client.get_signing_key_from_jwt(token) + payload = jwt.decode( + token, + signing_key.key, + algorithms=self._algorithms, + issuer=self._issuer, + audience=self._client_id, + options={ + "verify_exp": True, + "verify_iss": True, + "verify_aud": True, + }, + ) + except jwt.ExpiredSignatureError: + raise AuthenticationError("OIDC token expired", provider=self.name) + except jwt.InvalidTokenError as e: + raise AuthenticationError(f"Invalid OIDC token: {e}", provider=self.name) + + user_id = payload.get("sub") + if not user_id: + raise AuthenticationError( + "OIDC token missing 'sub' claim", + provider=self.name, + ) + + return AuthResult( + user_id=str(user_id), + display_name=payload.get("name"), + email=payload.get("email"), + raw_token=token, + ) +``` + +#### 对接 OIDC/OAuth2 Provider 的 IdP 要求 + +> **适用范围**:任何使用 `AUTH_PROVIDER=oidc`(或别名 `oauth2`)对接的身份提供者, +> 包括 Keycloak、Auth0、Okta、自建 SSO 等。 + +**为什么既写 `oidc` 又写 `oauth2`?** + +`AUTH_PROVIDER=oidc` 和 `AUTH_PROVIDER=oauth2` 是同一个 Provider 的两个名字(别名),实际行为完全相同:后端用 JWKS 验 JWT 签名,前端用 Authorization Code + PKCE 流程。之所以加别名,是因为该 Provider 不要求严格的 OpenID Connect 协议——任何支持 JWT + JWKS 的 OAuth2 身份提供者都可以使用。 + +**Data Formulator 支持两种配置模式**: + +##### 模式 A:自动发现(推荐,适用于标准 OIDC IdP) + +只需 3 个环境变量,其余端点自动从 Discovery 获取: + +``` +AUTH_PROVIDER=oidc # 或 oauth2,等价 +OIDC_ISSUER_URL=https://your-idp.example.com/path +OIDC_CLIENT_ID=your-client-id +``` + +**工作原理**:启动时后端请求 `{OIDC_ISSUER_URL}/.well-known/openid-configuration`,从返回的 JSON 中自动提取 `authorization_endpoint`、`token_endpoint`、`userinfo_endpoint`、`jwks_uri` 等全部端点地址。前端 `oidc-client-ts` 也会独立请求同一个 Discovery 端点获取授权和 Token 地址。 + +**适用场景**:Keycloak、Auth0、Okta、Azure AD / Entra ID、Google、Authelia、Authentik、Casdoor 等标准 OIDC IdP 均原生支持 Discovery,无需额外配置。 + +**IdP 要求**: +- `{OIDC_ISSUER_URL}/.well-known/openid-configuration` 必须可访问且返回合法 JSON +- 返回的 `issuer` 字段值必须与 `OIDC_ISSUER_URL` **完全一致**(含协议、端口、路径,不含尾部 `/`) +- Discovery JSON 中必须包含 `authorization_endpoint`、`token_endpoint`、`jwks_uri` +- JWKS 端点返回的公钥用于后端本地验证 JWT 签名(高性能,无额外网络开销) + +**判断你的 IdP 是否支持**:在浏览器中直接访问 `{你的 ISSUER URL}/.well-known/openid-configuration`,如果能看到 JSON 响应则支持模式 A;如果返回 404 或错误页面,请使用模式 B。 + +##### 模式 B:手动端点(适用于无 Discovery 的 OAuth2 服务器) + +许多企业自建的 OAuth2 系统没有实现 OIDC Discovery。此时可直接配置端点 URL: + +``` +AUTH_PROVIDER=oidc +OIDC_ISSUER_URL=https://sso.example.com +OIDC_CLIENT_ID=your-client-id +OIDC_AUTHORIZE_URL=https://sso.example.com/oauth2/authorize +OIDC_TOKEN_URL=https://sso.example.com/oauth2/token +OIDC_USERINFO_URL=https://sso.example.com/oauth2/userinfo +# OIDC_JWKS_URL=... # 可选,如无则通过 UserInfo 端点验证 token +# OIDC_CLIENT_SECRET=... # 可选,机密客户端才需要 +``` + +**优先级**:手动配置的值 > Discovery 自动发现的值。两者可以混用。 + +##### 端点功能说明 + +| 环境变量 | 用途 | 必须? | +|----------|------|--------| +| `OIDC_ISSUER_URL` | IdP 标识 / Discovery 基础 URL | **必须** | +| `OIDC_CLIENT_ID` | 在 IdP 注册的应用 ID | **必须** | +| `OIDC_AUTHORIZE_URL` | 前端浏览器跳转进行用户授权 | 模式 B 必须 | +| `OIDC_TOKEN_URL` | 前端用 authorization code 换取 access_token | 模式 B 必须 | +| `OIDC_USERINFO_URL` | 后端验证 token(无 JWKS 时的回退方案) | 推荐 | +| `OIDC_JWKS_URL` | 后端本地验证 JWT 签名(更高效) | 可选 | +| `OIDC_CLIENT_SECRET` | 机密客户端的密钥 | 可选 | + +##### Token 验证策略 + +后端按以下优先级验证 access_token: + +1. **JWKS 本地验证**(首选):用 `OIDC_JWKS_URL` 提供的公钥验证 JWT 签名,校验 `iss`、`aud`、`exp` claim。性能最优,无网络开销。 +2. **UserInfo 远程验证**(回退):向 `OIDC_USERINFO_URL` 发送 `Authorization: Bearer ` 请求。如果 IdP 返回用户信息,则 token 有效。适用于无 JWKS 的 OAuth2 服务器或使用不透明 token 的场景。 + +##### Discovery 端点最小 JSON 格式(自建 SSO 如选择实现) + +```json +{ + "issuer": "https://your-sso.example.com/path", + "jwks_uri": "https://your-sso.example.com/path/jwks", + "authorization_endpoint": "https://your-sso.example.com/path/authorize", + "token_endpoint": "https://your-sso.example.com/path/token", + "userinfo_endpoint": "https://your-sso.example.com/path/userinfo" +} +``` + +> **自建 SSO 提示**:如果不想实现 Discovery,使用模式 B 手动配置端点即可,无需修改 SSO。 + +##### 回调地址 + +在 IdP 中将 `http(s):///callback` 注册为合法 redirect URI。 + +**JWT Access Token claim 要求**(使用 JWKS 验证时): + +| Claim | 用途 | 必须? | +|-------|------|--------| +| `sub` | 用户唯一 ID → `get_identity_id()` 返回 `user:` | **必须** | +| `iss` | 后端验证 token 来源 | **必须** | +| `aud` | 后端验证 token 受众 = `OIDC_CLIENT_ID` | **必须** | +| `exp` | 后端验证 token 未过期 | **必须** | +| `name` | 前端显示用户名 | 推荐 | +| `email` | 前端显示邮箱 | 推荐 | + +> 使用 UserInfo 验证时,后端从 UserInfo 响应中提取 `sub`/`id`/`user_id`、`name`/`username`、`email`,对 JWT 内部 claim 无要求。 + +### 3.4b GitHub OAuth Provider(新增 — 社交登录) + +GitHub OAuth 是纯 OAuth2(不是 OIDC,没有 `id_token`),需要后端完成授权码交换,因此属于 **B 类有状态 Provider**。 + +```python +# py-src/data_formulator/auth_providers/github_oauth.py + +import os +import logging +from typing import Optional +from flask import Request, session + +from .base import AuthProvider, AuthResult + +logger = logging.getLogger(__name__) + + +class GitHubOAuthProvider(AuthProvider): + """GitHub OAuth 2.0 认证。 + + 配置(环境变量): + GITHUB_CLIENT_ID — GitHub OAuth App 的 Client ID (必需) + GITHUB_CLIENT_SECRET — GitHub OAuth App 的 Client Secret (必需) + + 工作流程(B 类有状态): + 1. 前端重定向到 /api/auth/github/login → 302 到 GitHub 授权页 + 2. 用户授权后 GitHub 回调到 /api/auth/github/callback + 3. 后端用 code 换取 access_token,查询 /user API 获取用户信息 + 4. 写入 Flask session + 5. 后续请求由本 Provider 从 session 中读取身份 + """ + + def __init__(self): + self._client_id = os.environ.get("GITHUB_CLIENT_ID", "").strip() + self._client_secret = os.environ.get("GITHUB_CLIENT_SECRET", "").strip() + + @property + def name(self) -> str: + return "github" + + @property + def enabled(self) -> bool: + return bool(self._client_id and self._client_secret) + + def authenticate(self, request: Request) -> Optional[AuthResult]: + """从 Flask session 中读取 GitHub OAuth 认证结果。""" + user_data = session.get("df_user") + if not user_data or user_data.get("provider") != "github": + return None + + return AuthResult( + user_id=user_data["user_id"], + display_name=user_data.get("display_name"), + email=user_data.get("email"), + raw_token=user_data.get("raw_token"), + ) + + def get_auth_info(self) -> dict: + """返回 GitHub OAuth 前端配置。""" + return { + "action": "redirect", + "url": "/api/auth/github/login", + "label": os.environ.get("AUTH_DISPLAY_NAME", "GitHub Login"), + } +``` + +```python +# py-src/data_formulator/auth_gateways/github_gateway.py + +import os +import logging +import urllib.parse +from flask import Blueprint, request, redirect, session, jsonify + +from data_formulator.auth_providers.base import AuthResult + +logger = logging.getLogger(__name__) + +github_bp = Blueprint("github_auth", __name__, url_prefix="/api/auth/github") + + +@github_bp.route("/login") +def github_login(): + """重定向到 GitHub 授权页。""" + client_id = os.environ.get("GITHUB_CLIENT_ID", "") + redirect_uri = request.url_root.rstrip("/") + "/api/auth/github/callback" + scope = "read:user user:email" + params = urllib.parse.urlencode({ + "client_id": client_id, + "redirect_uri": redirect_uri, + "scope": scope, + }) + return redirect(f"https://github.com/login/oauth/authorize?{params}") + + +@github_bp.route("/callback") +def github_callback(): + """GitHub OAuth 回调 — 用授权码换取 access_token 并查询用户信息。""" + import requests as http_requests + + code = request.args.get("code") + if not code: + return jsonify({"error": "Missing authorization code"}), 400 + + client_id = os.environ.get("GITHUB_CLIENT_ID", "") + client_secret = os.environ.get("GITHUB_CLIENT_SECRET", "") + redirect_uri = request.url_root.rstrip("/") + "/api/auth/github/callback" + + # 用 code 换 access_token + token_resp = http_requests.post( + "https://github.com/login/oauth/access_token", + json={"client_id": client_id, "client_secret": client_secret, + "code": code, "redirect_uri": redirect_uri}, + headers={"Accept": "application/json"}, + timeout=10, + ) + if not token_resp.ok: + return jsonify({"error": "Failed to exchange code for token"}), 502 + + access_token = token_resp.json().get("access_token") + if not access_token: + return jsonify({"error": "No access_token in response"}), 502 + + # 查询 GitHub 用户信息 + user_resp = http_requests.get( + "https://api.github.com/user", + headers={"Authorization": f"Bearer {access_token}", "Accept": "application/json"}, + timeout=10, + ) + if not user_resp.ok: + return jsonify({"error": "Failed to fetch GitHub user info"}), 502 + + user_info = user_resp.json() + user_id = str(user_info.get("id", "")) + login = user_info.get("login", "") + + session["df_user"] = { + "user_id": f"github:{user_id}", + "display_name": user_info.get("name") or login, + "email": user_info.get("email"), + "raw_token": access_token, + "provider": "github", + } + logger.info("GitHub login successful: user=%s (%s)", login, user_id) + + return redirect("/") +``` + +### 3.5 重构后的 auth.py + +```python +# py-src/data_formulator/auth.py (重构) + +""" +Authentication and identity management for Data Formulator. + +Single AuthProvider + anonymous fallback: + AUTH_PROVIDER=oidc → OIDCProvider → user: + ALLOW_ANONYMOUS=true → Browser UUID → browser: +""" + +import logging +import re +import os +from typing import Optional +from flask import request, g, Flask + +from data_formulator.auth_providers.base import ( + AuthProvider, AuthResult, AuthenticationError, +) +from data_formulator.auth_providers import ( + get_provider_class, list_available_providers, +) + +logger = logging.getLogger(__name__) + +_MAX_IDENTITY_LENGTH = 256 +_IDENTITY_RE = re.compile(r'^[\w@.\-+/: ]+$', re.ASCII) + +# 主 Provider,由 init_auth() 初始化 +_provider: Optional[AuthProvider] = None +_allow_anonymous: bool = True + + +def _validate_identity_value(value: str, source: str) -> str: + value = value.strip() + if not value: + raise ValueError(f"Empty identity value from {source}") + if len(value) > _MAX_IDENTITY_LENGTH: + raise ValueError(f"Identity from {source} exceeds {_MAX_IDENTITY_LENGTH} chars") + if not _IDENTITY_RE.match(value): + raise ValueError(f"Identity from {source} contains disallowed characters") + return value + + +def init_auth(app: Flask) -> None: + """初始化认证。在 app 创建后调用一次。 + + 配置模型极简: + AUTH_PROVIDER=oidc ← 选一种(不设置 = 纯匿名模式) + ALLOW_ANONYMOUS=false ← 仅在需要强制登录时设置(默认 true,允许匿名回退) + """ + global _provider, _allow_anonymous + + _allow_anonymous = os.environ.get("ALLOW_ANONYMOUS", "true").lower() in ("true", "1", "yes") + provider_name = os.environ.get("AUTH_PROVIDER", "").strip().lower() + + if not provider_name or provider_name == "anonymous": + logger.info("Auth mode: anonymous only (no AUTH_PROVIDER configured)") + return + + provider_cls = get_provider_class(provider_name) + if not provider_cls: + logger.error("Unknown AUTH_PROVIDER: '%s'. Available: %s", + provider_name, ", ".join(list_available_providers())) + return + + try: + provider: AuthProvider = provider_cls() + + if not provider.enabled: + logger.error( + "AUTH_PROVIDER='%s' is set but required configuration is missing. " + "Provider will NOT be activated. Check environment variables.", + provider_name, + ) + return + + provider.on_configure(app) + _provider = provider + logger.info("Auth provider '%s' activated", provider_name) + except Exception as e: + logger.error("Auth provider '%s' failed to init: %s", provider_name, e) + + logger.info( + "Auth mode: %s%s", + provider_name or "anonymous", + " + anonymous fallback" if _allow_anonymous else " (login required)", + ) + + +def get_identity_id() -> str: + """获取当前请求的命名空间身份 ID。 + + 逻辑: + 1. 主 Provider 认证成功 → user: + 2. ALLOW_ANONYMOUS=true + X-Identity-Id 头 → browser: + 3. 以上均无 → 401 + """ + # 尝试主 Provider + if _provider: + try: + result = _provider.authenticate(request) + if result is not None: + validated = _validate_identity_value(result.user_id, _provider.name) + logger.debug("Authenticated via %s: user:%s...", _provider.name, validated[:8]) + g.df_auth_result = result + return f"user:{validated}" + except AuthenticationError as e: + logger.warning("Auth provider '%s' rejected request: %s", e.provider, e) + raise ValueError(f"Authentication failed: {e}") + + # 匿名回退 + if _allow_anonymous: + client_identity = request.headers.get("X-Identity-Id") + if client_identity: + if ":" in client_identity: + identity_value = client_identity.split(":", 1)[1] + else: + identity_value = client_identity + validated = _validate_identity_value(identity_value, "X-Identity-Id header") + return f"browser:{validated}" + + raise ValueError("Authentication required. Please log in.") + + +def get_auth_result() -> Optional[AuthResult]: + """获取当前请求的完整认证结果。 + + 仅在 get_identity_id() 通过主 Provider 认证成功后可用。 + browser 身份请求返回 None。 + + 用途: + - 获取 raw_token 用于 SSO 透传 + - 获取 display_name / email 用于 UI 显示 + """ + return getattr(g, "df_auth_result", None) + + +def get_sso_token() -> Optional[str]: + """获取当前用户的 SSO access token,用于透传给外部系统。 + + Returns: + access_token 字符串,或 None(匿名用户 / Provider 不提供 token) + """ + result = get_auth_result() + return result.raw_token if result else None +``` + +### 3.6 前端 OIDC 登录流程 + +前端使用 **PKCE (Proof Key for Code Exchange)** 流程 — 这是 SPA 的标准 OIDC 方式,不需要 client secret。 + +推荐使用 `oidc-client-ts` 库(轻量、标准兼容、维护活跃)。 + +```typescript +// src/app/oidcConfig.ts — 运行时从后端获取配置(简化版) + +import { UserManager, WebStorageStateStore, User } from "oidc-client-ts"; + +let _userManager: UserManager | null = null; +let _configPromise: Promise<{authority: string; clientId: string; redirectUri: string} | null> | null = null; + +// 从统一端点获取 OIDC 配置(无需前端编译时配置) +export async function getOidcConfig(): Promise<{authority: string; clientId: string; redirectUri: string} | null> { + if (_configPromise) return _configPromise; + + _configPromise = fetch('/api/auth/info') + .then(r => r.ok ? r.json() : null) + .then(info => { + if (info?.provider !== 'oidc' || !info?.oidc) return null; + return { + authority: info.oidc.authority, + clientId: info.oidc.clientId, + redirectUri: info.oidc.redirectUri || `${window.location.origin}/callback`, + }; + }) + .catch(() => null); + + return _configPromise; +} + +export async function getUserManager(): Promise { + if (_userManager) return _userManager; + const config = await getOidcConfig(); + if (!config) return null; + + _userManager = new UserManager({ + authority: config.authority, + client_id: config.clientId, + redirect_uri: config.redirectUri, + response_type: "code", + scope: "openid profile email", + automaticSilentRenew: true, + userStore: new WebStorageStateStore({ store: window.localStorage }), + }); + + return _userManager; +} + +export async function getAccessToken(): Promise { + const mgr = await getUserManager(); + if (!mgr) return null; + const user = await mgr.getUser(); + if (!user || user.expired) return null; + return user.access_token; +} +``` + +前端认证初始化统一通过 `/api/auth/info` 端点驱动(详见 [3.9.9 节](#399-前端适配统一登录入口)),一次请求确定认证模式,无需串行回退。 + +修改 `fetchWithIdentity` 以自动携带 OIDC token: + +```typescript +// src/app/utils.tsx — fetchWithIdentity 增强 + +export async function fetchWithIdentity( + url: string | URL, + options: RequestInit = {} +): Promise { + const urlString = typeof url === "string" ? url : url.toString(); + + if (urlString.startsWith("/api/")) { + const headers = new Headers(options.headers); + + // 身份标识 (所有请求) + const namespacedIdentity = await getCurrentNamespacedIdentity(); + headers.set("X-Identity-Id", namespacedIdentity); + headers.set("Accept-Language", getAgentLanguage()); + + // OIDC token (如果可用) + const accessToken = await getAccessToken(); // 从 oidcConfig.ts + if (accessToken) { + headers.set("Authorization", `Bearer ${accessToken}`); + } + + options = { ...options, headers }; + } + + return fetch(url, options); +} +``` + +### 3.7 OIDC 回调页面 + +> **国际化约定**:所有用户可见的文本使用 `react-i18next` 的 `useTranslation()` / `t()` 获取, +> 翻译 key 统一放在 `auth.*` 命名空间下(复用 0.6 已有的 i18n 基础设施)。 +> UI 样式对齐 0.6 `LoginView.tsx` 的 MUI Paper 居中卡片风格。 + +```typescript +// src/app/OidcCallback.tsx + +import { useEffect, useState } from "react"; +import { Box, CircularProgress, Typography, Alert, Paper, alpha, useTheme } from "@mui/material"; +import { useTranslation } from "react-i18next"; +import { getUserManager } from "./oidcConfig"; +import dfLogo from "../assets/df-logo.png"; + +export function OidcCallback() { + const { t } = useTranslation(); + const theme = useTheme(); + const [error, setError] = useState(null); + + useEffect(() => { + (async () => { + try { + const mgr = await getUserManager(); + if (mgr) { + await mgr.signinRedirectCallback(); + window.location.href = "/"; + } + } catch (err: any) { + setError(err?.message || "Unknown error"); + } + })(); + }, []); + + return ( + + + + {error ? ( + + {t("auth.callbackFailed", { message: error })} + + ) : ( + <> + + + {t("auth.completingLogin")} + + + )} + + + ); +} +``` + +在路由中注册回调路径(如使用 React Router)或在 `App.tsx` 中检测 URL path。 + +### 3.8 登录 / 登出 UI + +登录 UI 由 `/api/auth/info` 返回的 `action` 字段驱动(见 3.9.9), +沿用 0.6 `LoginView.tsx` 的居中 Paper 卡片布局和 Fluent 配色。 + +```typescript +// src/app/AuthButton.tsx — AppBar 中的登录/登出按钮 + +import { useTranslation } from "react-i18next"; + +function AuthButton() { + const { t } = useTranslation(); + const identity = useSelector((state: DataFormulatorState) => state.identity); + const [mgr, setMgr] = useState(null); + + useEffect(() => { getUserManager().then(setMgr); }, []); + + if (identity?.type === "user") { + return ( + + + {t("auth.connectedAs", { name: identity.displayName || identity.id })} + + mgr?.signoutRedirect()} + title={t("auth.signOut")} + > + + + + ); + } + + if (mgr) { + return ( + + ); + } + + return null; +} +``` + +#### 3.8a 新增 i18n key + +在 `src/i18n/locales/` 的 `en/common.json` 和 `zh/common.json` 的 `auth` 节点下新增(复用 0.6 现有 key,仅补充 OIDC 新增的): + +```json +// en/common.json — auth 节点新增 +{ + "auth": { + "completingLogin": "Completing login...", + "callbackFailed": "Login callback failed: {{message}}", + "oidcLogin": "SSO Login", + "oidcLoggingIn": "Logging in via SSO...", + "oidcDescription": "Login with your enterprise account via Single Sign-On", + "sessionExpired": "Session expired. Please sign in again.", + "silentRenewFailed": "Background token refresh failed. Redirecting to login..." + } +} +``` + +```json +// zh/common.json — auth 节点新增 +{ + "auth": { + "completingLogin": "正在完成登录...", + "callbackFailed": "登录回调失败:{{message}}", + "oidcLogin": "SSO 单点登录", + "oidcLoggingIn": "正在通过 SSO 登录...", + "oidcDescription": "使用企业账号通过单点登录系统认证", + "sessionExpired": "会话已过期,请重新登录。", + "silentRenewFailed": "后台令牌刷新失败,正在跳转到登录页..." + } +} +``` + +> **说明**:0.6 已有的 `auth.signIn`、`auth.signOut`、`auth.connectedAs`、`auth.continueAsGuest`、 +> `auth.guestDescription`、`auth.ssoLogin`、`auth.ssoPopupBlocked` 等 key 原样复用,不重复定义。 +> 新增 key 仅覆盖 OIDC PKCE 流程特有的场景(回调页、静默刷新失败等)。 + +### 3.8b Token 生命周期管理 + +OIDC access_token 有有限的有效期(通常 5~60 分钟)。前端必须妥善处理 token 过期和刷新,否则用户会在使用过程中突然收到 401 错误。 + +#### 静默刷新(Silent Renew) + +前端 `oidc-client-ts` 配置了 `automaticSilentRenew: true`,会在 token 过期前自动通过 iframe 向 IdP 发起无感刷新: + +``` +Token 有效期: 3600s (1h) + │ + ┌───────────────┼─────────────────┐ + │ │ │ + 0s 3300s (55min) 3600s + │ │ │ + 签发 自动触发 token + signinSilent() 过期 + │ + ├─ 成功 → 无缝更新 access_token + └─ 失败 → 触发重新登录 +``` + +#### 刷新失败的处理 + +静默刷新可能因以下原因失败: +- IdP session 已过期(用户在 IdP 端登出或 session 超时) +- iframe 被 CSP 策略阻止 +- 网络错误 + +```typescript +// src/app/oidcConfig.ts — 刷新失败处理 + +const mgr = await getUserManager(); +if (mgr) { + mgr.events.addSilentRenewError(() => { + console.warn("Silent renew failed, redirecting to login..."); + mgr.signinRedirect(); + }); +} +``` + +#### 后端 401 响应与前端重试 + +当后端 `OIDCProvider` 检测到过期 token 时,抛出 `AuthenticationError`,`get_identity_id()` 将其转为 `ValueError`,API 层返回 `401`。前端 `fetchWithIdentity` 应拦截 401 并触发 token 刷新: + +```typescript +// src/app/utils.tsx — fetchWithIdentity 增强:401 自动重试 + +export async function fetchWithIdentity( + url: string | URL, + options: RequestInit = {} +): Promise { + const resp = await _doFetch(url, options); + + if (resp.status === 401) { + const mgr = await getUserManager(); + if (mgr) { + try { + await mgr.signinSilent(); + return _doFetch(url, options); // 用新 token 重试一次 + } catch { + mgr.signinRedirect(); // 静默刷新失败,跳转登录 + return resp; + } + } + } + + return resp; +} +``` + +#### CORS 和 CSP 注意事项 + +OIDC PKCE 流程涉及跨域交互,生产部署需确保: + +| 配置项 | 说明 | +|-------|------| +| **CSP `frame-src`** | 允许 IdP 域名,`signinSilent()` 使用 iframe | +| **CORS** | 如果 DF 前端和后端不同源,后端需配置 `Access-Control-Allow-Origin` | +| **IdP redirect URI** | IdP 侧注册的 callback URL 必须与实际部署域名一致 | +| **HTTPS** | 生产环境必须全链路 HTTPS,否则 cookie / token 可能泄漏 | + +#### 设计决策与局限性 + +**Phase 1 决策:后端不持有 refresh_token,token 刷新完全由前端负责。** + +| 项目 | Phase 1 现状 | +|------|-------------| +| **access_token 存储** | 前端 `oidc-client-ts` UserManager 内存中 | +| **refresh_token 存储** | 前端 `oidc-client-ts` 内部管理,不传给后端 | +| **刷新方式** | 前端 `signinSilent()`(iframe 或 refresh_token grant) | +| **后端角色** | 纯无状态验证(每次请求校验 `Authorization: Bearer `) | +| **`AuthResult.raw_token`** | 仅存当次请求的 access_token,用于 SSO 透传到下游 API | + +**选择此方案的理由:** + +1. **架构简单** — 后端无需管理 token 存储、加密、过期清理等有状态逻辑,完全无状态可水平扩展。 +2. **安全边界清晰** — refresh_token 不经过 DF 后端,减少了服务端 token 泄漏的攻击面。SPA + PKCE 是 OIDC 推荐的公共客户端模式。 +3. **与 Data Formulator 使用场景匹配** — DF 是交互式数据分析工具,用户操作间隔较短(通常不超过 token 有效期),前端静默刷新足以覆盖绝大多数场景。 + +**已知局限性:** + +| 局限 | 影响 | 缓解措施 | +|------|------|---------| +| **长时间后台任务** | 如果 DF 未来支持长时间运行的后台任务(>token 有效期),后端持有的 access_token 会过期,下游 API 调用失败 | 目前不存在此场景;Phase 2 可引入后端 refresh_token 管理 | +| **IdP 不支持静默刷新** | 部分 IdP 禁用 iframe(X-Frame-Options)或不支持 `prompt=none`,导致前端 `signinSilent()` 失败 | 回退到 `signinRedirect()`(重新登录);或在 IdP 侧配置允许 iframe | +| **短有效期 token + 高频操作** | 如果 IdP 签发的 access_token 有效期极短(<5 分钟),频繁的静默刷新可能产生明显延迟 | 建议 IdP 配置合理的 token 有效期(≥15 分钟) | +| **多标签页 token 同步** | 用户同时打开多个 DF 标签页时,各自独立持有 token,刷新时机不同步 | `oidc-client-ts` 支持 `monitorSession` 跨标签页同步;Phase 2 可评估 | + +**Phase 2 可选扩展(仅规划,不在 Phase 1 实现):** + +如果未来出现后端需要长期持有 token 的场景(如后台定时任务、异步数据管道),可考虑: +- 后端 OIDC Confidential Client 模式(使用 `client_secret`),通过 Authorization Code Flow 获取 refresh_token +- 服务端加密存储 refresh_token(可复用 CredentialVault 基础设施) +- `AuthResult` 扩展 `refresh_token` 字段和 `token_expires_at` 时间戳 + +### 3.9 多协议支持:从 OIDC 扩展到 SAML / LDAP / CAS / 反向代理 + +#### 3.9.1 为什么不止 OIDC + +OIDC 覆盖了大部分现代 IdP(Keycloak、Okta、Auth0、Azure AD、Google),但企业环境中仍广泛存在其他认证协议: + +| 协议 | 验证模型 | 典型场景 | access_token 可透传 | +|------|---------|---------|:---:| +| **OAuth 2.0** | 无状态 (access_token per request) | 纯授权场景、旧系统 | **是** — access_token | +| **OIDC** | 无状态 (JWT per request,OAuth2 超集) | 现代 IdP、SaaS | **是** — access_token (同 OAuth2) | +| **Azure EasyAuth** | 无状态 (可信 Header) | Azure App Service | **是** — 通过 `/.auth/me` 获取 | +| **反向代理头** | 无状态 (可信 Header) | Authelia / Authentik / nginx / Traefik | 否 — 无 token | +| **SAML 2.0** | 有状态 (Assertion → Session) | ADFS、Shibboleth、PingFederate、OneLogin | 否 — 但可 Token Exchange 换取 | +| **LDAP / AD** | 有状态 (Bind → Session) | 无中心 IdP 的企业/高校 | 否 — 无 token | +| **CAS** | 有状态 (Ticket → Session) | 高校 (Apereo CAS) | 否 — ticket 一次性 | +| **Kerberos / SPNEGO** | 有状态 (Negotiate → Session) | Windows AD 域环境 | 否 — ticket 绑定特定服务 | + +**核心矛盾**:当前 `AuthProvider.authenticate(request)` 假设每个请求自带可验证的凭据(JWT/Header),这对 OIDC 和反向代理头完美适用。但 SAML/LDAP/CAS 需要先完成一个登录流程(浏览器重定向或表单提交),然后用服务端会话(session)识别后续请求。 + +#### 3.9.2 设计方案:双轨模型(Stateless + Session Gateway) + +解决思路是把认证协议分为两类,用不同的机制处理,但最终汇入同一条 AuthProvider 链: + +``` +┌─────────────────────────────────────────────────────────────────────┐ +│ AuthProvider 链 (per-request) │ +│ │ +│ ┌──────────────┐ ┌──────────┐ ┌──────────────┐ ┌────────────┐ │ +│ │ Azure │→ │ OIDC │→ │ Proxy Header │→ │ Session │ │ +│ │ EasyAuth │ │ (JWT) │ │ (可信头) │ │ (Cookie) │ │ +│ └──────────────┘ └──────────┘ └──────────────┘ └─────┬──────┘ │ +│ │ │ +│ A类: 无状态 A类: 无状态 B类 │ +│ (每次请求自带凭据) (每次请求自带凭据) (查session)│ +│ │ │ +│ ┌───────────────────────────────────────────────────────┘ │ +│ │ Session 中的身份从哪来? → Login Gateway 在登录时写入 │ +│ │ │ +│ │ ┌──────────────────────────────────────────────────┐ │ +│ │ │ Login Gateway (Flask routes) │ │ +│ │ │ │ │ +│ │ │ /api/auth/saml/login ←→ SAML IdP │ │ +│ │ │ /api/auth/saml/acs ← SAML Assertion (POST) │ │ +│ │ │ /api/auth/ldap/login ← username + password │ │ +│ │ │ /api/auth/cas/login ←→ CAS Server │ │ +│ │ │ /api/auth/cas/callback← CAS ticket │ │ +│ │ │ │ │ +│ │ │ 验证通过 → session["df_user"] = AuthResult │ │ +│ │ └──────────────────────────────────────────────────┘ │ +│ └─────────────────────────────────────────────────────────────── │ +│ │ +│ 最终 Fallback: Browser UUID │ +└─────────────────────────────────────────────────────────────────────┘ +``` + +**A 类 — 无状态 Provider**(现有设计已覆盖): +- 每个请求自带可独立验证的凭据(JWT Bearer / 可信 Header) +- 直接在 `authenticate(request)` 中完成验证 +- 代表:OIDC、Azure EasyAuth、反向代理头 + +**B 类 — 有状态 Provider**(新增 Login Gateway + SessionProvider): +- 登录时走协议特定的流程(SAML redirect、LDAP bind、CAS redirect) +- 登录成功后在 Flask session 中存储 `AuthResult` +- 后续请求由通用的 `SessionProvider` 从 session 中读取身份 +- 代表:SAML、LDAP、CAS、Kerberos + +**核心优势:Login Gateway 是协议特定的,但 SessionProvider 是通用的。** 新增一种有状态协议只需要写一个 Login Gateway blueprint,不需要修改 AuthProvider 链。 + +#### 3.9.3 ~ 3.9.8 Phase 2+ 扩展(反向代理 / SAML / LDAP / CAS / Login Gateway) + +> **以下内容属于 Phase 2+ 规划,此处仅记录架构扩展点,不展开实现细节。** +> 具体实现代码将在需求明确时编写独立的协议扩展文档。 + +**架构预留的扩展点:** + +1. **`AuthResult` 扩展** — 在 Phase 2 引入 `groups`、`auth_protocol`、`token_expiry`、`to_session_dict()` / `from_session_dict()` 等字段和方法,支持会话序列化。 +2. **反向代理头 Provider** — A 类无状态,通过 `PROXY_TRUSTED_IPS` 校验可信 IP,从 `X-Forwarded-User` 等 header 提取身份。 +3. **SessionProvider** — B 类通用读取端,从 Flask session 中读取 Login Gateway 写入的身份。 +4. **Login Gateway Blueprint** — SAML ACS (`/api/auth/saml/acs`)、LDAP bind (`/api/auth/ldap/login`)、CAS ticket 验证 (`/api/auth/cas/callback`) 等协议特定的登录流程。 +5. **通用登出** — 清除 session + 返回协议特定的 SLO URL。 +6. **Gateway 注册** — `_register_login_gateways(app, provider_name)` 根据 `AUTH_PROVIDER` 值按需注册对应的 Blueprint。 + +**新增 B 类协议的步骤**: +1. 在 `auth_providers/` 下创建新的 `.py` 文件,实现 `AuthProvider` 子类(自动发现,无需修改注册表) +2. 实现 Login Gateway Blueprint(完成协议特定的登录流程,写入 `session["df_user"]`) +3. 在 Provider 类中实现 `get_auth_info()` 返回前端交互方式 + +核心代码零修改 — 自动发现机制会扫描到新 Provider,`AUTH_PROVIDER` 环境变量选择激活即可。 + +#### 3.9.9 前端适配:统一登录入口 + +前端通过单一的 `/api/auth/info` 端点获取当前认证模式和所需配置,一次请求搞定。 + +**后端统一认证信息 API — 委托 Provider 自描述(消除 switch 膨胀):** + +```python +# auth.py — 新增 + +@app.route("/api/auth/info") +def auth_info(): + """返回当前认证模式 + 前端所需的配置信息。 + + 通过调用 Provider 的 get_auth_info() 方法获取 Provider 特定配置, + 而非在此处 switch 每种协议。新增 Provider 无需修改此端点。 + """ + provider_name = os.environ.get("AUTH_PROVIDER", "anonymous").strip().lower() + + info = { + "provider": provider_name, + "allow_anonymous": _allow_anonymous, + } + + if _provider: + info.update(_provider.get_auth_info()) + else: + info["action"] = "none" + + return jsonify(info) +``` + +每个 Provider 通过实现 `get_auth_info()` 声明前端交互方式(详见 3.2 节基类定义)。例如: +- `OIDCProvider.get_auth_info()` 返回 `{"action": "frontend", "oidc": {...}}` +- `GitHubOAuthProvider.get_auth_info()` 返回 `{"action": "redirect", "url": "/api/auth/github/login"}` +- `AzureEasyAuthProvider.get_auth_info()` 返回 `{"action": "transparent"}` + +新增 Provider 只需在自己的类中实现此方法,`auth.py` 无需任何修改。 + +**前端统一登录组件:** + +沿用 0.6 `LoginView.tsx` 的 Paper 居中卡片布局、Fluent 配色和 i18n 模式。 +所有用户可见文本通过 `t('auth.*')` 获取,不硬编码任何语言。 + +```typescript +// src/app/LoginPanel.tsx — 根据 /api/auth/info 渲染对应的登录 UI + +import React, { FC, useEffect, useState } from "react"; +import { + Box, Button, TextField, Typography, Divider, + CircularProgress, Alert, Paper, alpha, useTheme, +} from "@mui/material"; +import LoginIcon from "@mui/icons-material/Login"; +import OpenInNewIcon from "@mui/icons-material/OpenInNew"; +import PersonOutlineIcon from "@mui/icons-material/PersonOutline"; +import { useTranslation } from "react-i18next"; +import { getUserManager } from "./oidcConfig"; +import dfLogo from "../assets/df-logo.png"; +import { toolName } from "./App"; + +interface AuthInfo { + provider: string; + allow_anonymous: boolean; + action: "frontend" | "redirect" | "form" | "transparent" | "none"; + label?: string; + url?: string; + fields?: string[]; + oidc?: { authority: string; clientId: string; redirectUri: string; scopes: string }; +} + +interface LoginPanelProps { + onGuestContinue: () => void; +} + +export const LoginPanel: FC = ({ onGuestContinue }) => { + const theme = useTheme(); + const { t } = useTranslation(); + + const [authInfo, setAuthInfo] = useState(null); + const [formData, setFormData] = useState({ username: "", password: "" }); + const [loading, setLoading] = useState(false); + const [error, setError] = useState(null); + + useEffect(() => { + fetch("/api/auth/info").then(r => r.json()).then(setAuthInfo).catch(() => null); + }, []); + + if (!authInfo) return null; + if (authInfo.action === "none" || authInfo.action === "transparent") return null; + + const renderAuthAction = () => { + switch (authInfo.action) { + case "frontend": + return ( + <> + + + {t("auth.oidcDescription")} + + + ); + + case "redirect": + return ( + <> + + + {t("auth.ssoDescription")} + + + ); + + case "form": + return ( + { + e.preventDefault(); + if (!formData.username || !formData.password) return; + setLoading(true); + setError(null); + try { + const resp = await fetch(authInfo.url!, { + method: "POST", + credentials: "include", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(formData), + }); + const data = await resp.json(); + if (resp.ok && data.status === "ok") { + window.location.reload(); + } else { + setError(data.message || t("auth.loginFailed", { message: "Unknown error" })); + } + } catch (err: any) { + setError(err.message || "Network error"); + } finally { + setLoading(false); + } + }} + sx={{ width: "100%", display: "flex", flexDirection: "column", gap: 2 }} + > + setFormData(f => ({ ...f, username: e.target.value }))} + autoComplete="username" + autoFocus + fullWidth + /> + setFormData(f => ({ ...f, password: e.target.value }))} + autoComplete="current-password" + fullWidth + /> + + + ); + + default: + return null; + } + }; + + return ( + + + + + + {toolName} + + + + + {t("auth.loginSubtitle")} + + + {error && ( + + {t("auth.loginFailed", { message: error })} + + )} + + {renderAuthAction()} + + {authInfo.allow_anonymous && ( + <> + + + {t("auth.or")} + + + + + + {t("auth.guestDescription")} + + + )} + + + ); +}; +``` + +> **说明**:组件中所有用户可见文本均通过 `t('auth.*')` 获取。 +> 复用的 0.6 已有 key:`auth.loginSubtitle`、`auth.username`、`auth.password`、`auth.signIn`、 +> `auth.signingIn`、`auth.loginFailed`、`auth.or`、`auth.continueAsGuest`、`auth.guestDescription`、 +> `auth.ssoLogin`、`auth.ssoDescription`。 +> 0.7 新增 key(已在 3.8a 中定义):`auth.oidcLogin`、`auth.oidcLoggingIn`、`auth.oidcDescription`。 + +**前端 `initAuth` — 纯 action 驱动(不按 provider 名称分支):** + +前端**只看 `action` 字段**,完全不看 `provider` 名称。这样新增 Provider 只要 +复用已有的 action 类型,前端就不用改。 + +```typescript +// src/app/App.tsx — initAuth:纯 action 驱动,零 provider 分支 + +useEffect(() => { + async function initAuth() { + const authInfo = await fetch("/api/auth/info").then(r => r.json()).catch(() => null); + if (!authInfo) { setAuthChecked(true); return; } + + switch (authInfo.action) { + case "frontend": { + // 前端管理的认证流程(OIDC PKCE 等) + // authInfo 中携带了所需配置(如 oidc.authority, oidc.clientId) + const mgr = await getUserManager(); + if (mgr) { + let user = await mgr.getUser(); + if (!user || user.expired) { + try { user = await mgr.signinSilent(); } catch { user = null; } + } + if (user) { + setUserInfo({ name: user.profile.name || "", userId: user.profile.sub }); + setAuthChecked(true); + return; + } + } + break; + } + + case "transparent": { + // 平台已完成认证(Azure EasyAuth、反向代理等),查询身份即可 + try { + const resp = await fetch("/api/auth/whoami"); + if (resp.ok) { + const data = await resp.json(); + if (data.user_id) { + setUserInfo({ name: data.display_name || "", userId: data.user_id }); + setAuthChecked(true); + return; + } + } + } catch { /* 未认证 */ } + break; + } + + case "redirect": + case "form": { + // 服务端管理的认证(GitHub OAuth、SAML、LDAP、CAS 等) + // 用户尚未登录时由 LoginPanel 渲染按钮/表单,已登录则从 session 读取 + try { + const resp = await fetch("/api/auth/whoami"); + if (resp.ok) { + const data = await resp.json(); + if (data.user_id) { + setUserInfo({ name: data.display_name || "", userId: data.user_id }); + setAuthChecked(true); + return; + } + } + } catch { /* 未登录 */ } + break; + } + + case "none": + default: + break; + } + + // 匿名模式(或 SSO 未登录 + 允许匿名) + setAuthChecked(true); + } + initAuth(); +}, []); +``` + +**关键设计约束**:`initAuth` 中没有任何 `authInfo.provider === "xxx"` 的判断。 +新增一个 Provider(如 SAML)时,只要其 `get_auth_info()` 返回已有的 action 类型 +(如 `"redirect"`),前端代码零修改。 + +**后端 `/api/auth/whoami` 端点:** + +```python +# auth.py — 新增 + +@app.route("/api/auth/whoami") +def whoami(): + """返回当前 session 中的用户信息(如有)。""" + user_data = session.get("df_user") + if user_data: + return jsonify({ + "user_id": user_data.get("user_id"), + "display_name": user_data.get("display_name"), + "email": user_data.get("email"), + }) + return jsonify({}), 401 +``` + +#### 3.9.10 AuthProvider 模型图 + +单一 Provider + 匿名回退的认证模型: + +``` +AUTH_PROVIDER=oidc (由管理员选一种) + + ┌──────────────────────┐ ┌──────────────┐ + │ 主 Provider │ │ Browser UUID │ + │ (OIDC / GitHub / │ ──→ │ (匿名回退) │ + │ Azure / SAML / LDAP │ 未命中│ │ + │ / CAS / proxy_header)│ │ 仅在 │ + │ │ │ ALLOW_ANONYMOUS│ + │ A类: 每次请求验证 │ │ =true 时生效 │ + │ B类: 从 session 读取 │ └──────────────┘ + └───────────┬───────────┘ + │ + B 类的 session 来自: + ┌────────────────┐ + │ Login Gateway │ + │ ├── GitHub OAuth│ + │ ├── SAML ACS │ + │ ├── LDAP login │ + │ └── CAS callback│ + └────────────────┘ +``` + +#### 3.9.11 SSO Token 透传的协议差异 + +不同协议对 "token 透传到下游数据源" 的支持差异很大。关键区分点是 **协议是否产出一个可作为 Bearer token 的 access_token**: + +> **OIDC 与 OAuth2 的关系**:OIDC 是 OAuth2 的超集(OIDC = OAuth2 + 身份层)。 +> OIDC 在 OAuth2 的 access_token 之上额外给出一个 id_token (JWT) 以标识"谁在用"。 +> **透传给下游 API 的始终是 OAuth2 的 access_token**,与 OIDC 的 id_token 无关。 +> 因此只要是基于 OAuth2 的流程(无论是否带 OIDC),access_token 都可以透传。 + +| 协议 | 产出物 | 能否透传 | 说明 | +|------|--------|:---:|------| +| **OAuth 2.0 / OIDC** | access_token(opaque 或 JWT) | **能** | `AuthResult.raw_token` 存储 access_token,直接作为下游 API 的 Bearer token | +| **Azure EasyAuth** | 平台托管的 token | **能** | 通过 `/.auth/me` 或 `X-MS-TOKEN-*` 头获取 access_token;本质仍是 OAuth2 | +| **反向代理头** | 无 token(仅 header) | **不能** | 代理已消费了原始 token,DF 只拿到用户名等纯文本 header | +| **SAML 2.0** | XML Assertion | **不能直接用** | Assertion 是 XML 格式且有 Audience 限制;但可通过 RFC 8693 Token Exchange 或 SAML Bearer Assertion Grant (RFC 7522) 向 IdP 换取 OAuth2 access_token | +| **LDAP / AD** | 无(仅验证密码) | **不能** | 没有任何 token 产出 | +| **CAS** | Service Ticket(一次性) | **不能** | Ticket 验证后即失效,不可复用 | +| **Kerberos** | Service Ticket | **不能直接用** | Kerberos ticket 绑定到特定服务,无法代用;但 Windows 域中 Kerberos → OAuth2 的桥接方案存在 | + +**设计应对**: + +- **OAuth2/OIDC 用户**:`AuthResult.raw_token` 持有 access_token,DataSourcePlugin 和 DataLoader 可直接用它调用共享同一 IdP 的下游 API(零额外登录)。 +- **SAML 用户(高级)**:如果 IdP 同时支持 OAuth2(如 ADFS、PingFederate 通常都支持),可在 Login Gateway 中用 SAML Assertion 通过 Token Exchange 换取 OAuth2 access_token,再存入 `AuthResult.raw_token`,从而获得透传能力。这种情况下建议直接走 OIDC 而非 SAML。 +- **LDAP / CAS / 反向代理头用户**:需要走 CredentialVault 路线 —— 用户手动配置下游数据源的凭证(或 API Key),由 CredentialVault 加密存储后供 Plugin 使用。 + +这也是为什么 **Layer 3 CredentialVault 是整个架构不可缺少的一层** —— 它为无法 token 透传的认证协议提供了凭证存储的兜底方案。同时这也说明 **OIDC 是首选协议**(P0 优先级),因为它是唯一能同时解决"身份识别"和"下游透传"两个问题的方案。 + +#### 3.9.12 协议选择指南 + +为方便运维人员选择,提供以下决策树: + +``` +你的组织使用什么身份系统? + │ + ├─ Azure AD / Entra ID + │ ├─ 部署在 Azure App Service? → 用 azure_easyauth (零配置) + │ └─ 其他部署 → 用 oidc (Azure AD 支持 OIDC) + │ + ├─ Keycloak / Okta / Auth0 / Google Workspace → 用 oidc + │ + ├─ ADFS / Shibboleth / PingFederate (仅 SAML) + │ ├─ 能配 OIDC 吗? → 优先 oidc (ADFS/Ping 一般都支持) + │ └─ 只有 SAML → 用 saml (session 模式) + │ + ├─ IC 卡 / 智能卡 / PKI 证书 + │ ├─ IdP 能签发 OAuth2 token? → 用 oidc/saml + Token Exchange (未来扩展,见 3.9.12) + │ └─ 否 → 暂不支持,建议升级 IdP 或使用 API Key + CredentialVault + │ + ├─ Authelia / Authentik / nginx / Traefik (反向代理已认证) + │ └─ 用 proxy_header + │ + ├─ 只有 LDAP / Active Directory (无 SSO 中心) + │ └─ 用 ldap (session 模式) + │ + ├─ CAS (高校) + │ └─ 用 cas (session 模式) + │ + └─ 无任何身份系统 + └─ 默认 Browser UUID (匿名模式) +``` + +#### 3.9.13 新增依赖说明 + +各协议的 Python 依赖作为 **可选依赖** 安装,基础安装不引入: + +```toml +# pyproject.toml — optional dependencies +[project.optional-dependencies] +oidc = ["PyJWT>=2.8", "cryptography>=41.0"] +saml = ["python3-saml>=1.16"] +ldap = ["ldap3>=2.9"] +cas = [] # 纯标准库实现,无额外依赖 + +# 快捷安装全部认证协议 +auth-all = ["PyJWT>=2.8", "cryptography>=41.0", "python3-saml>=1.16", "ldap3>=2.9"] +``` + +#### 3.9.14 优先级建议 + +| 优先级 | 协议 | 理由 | +|:---:|------|------| +| **P0** | OIDC | 覆盖面最广,现代 IdP 基本都支持,且是唯一支持 token 透传的协议 | +| **P0** | Browser UUID | 保持向后兼容的匿名模式 | +| **P1** | 反向代理头 | 自建部署最常见的方式,实现简单 | +| **P1** | LDAP | 覆盖没有 SSO 中心的传统企业/高校 | +| **P2** | SAML | 大型企业有时只提供 SAML,但 ADFS/PingFederate 通常也支持 OIDC | +| **P3** | CAS | 受众窄(主要是高校),需求出现时再实现 | + +--- + +## 4. Layer 2:数据源插件系统 (DataSourcePlugin) + +### 4.1 设计思路 + +BI 报表系统(Superset、Metabase、Power BI 等)的集成需求远超现有 `ExternalDataLoader` 的能力: + +| 能力 | ExternalDataLoader | BI 系统需要 | +|------|:--:|:--:| +| 连接参数 | 简单 key-value 表单 | URL + 认证流程 (JWT/OAuth/SSO) | +| 数据浏览 | `list_tables()` → 表名列表 | 数据集 + 仪表盘 + 报表 + 筛选条件 | +| 权限模型 | 无 (用数据库账号的权限) | 需尊重 BI 系统自身的 RBAC/RLS | +| 前端 UI | 通用字段表单 | 需要专用目录浏览、搜索、筛选等交互 | +| 独立 API 路由 | 无 | 需要注册 Blueprint | + +因此,BI 系统使用独立的 **DataSourcePlugin** 机制,与 `ExternalDataLoader` **并行存在**。 + +### 4.2 Plugin 基类 + +```python +# py-src/data_formulator/plugins/base.py + +from abc import ABC, abstractmethod +from typing import Any, Optional +from flask import Blueprint + + +class DataSourcePlugin(ABC): + """外部数据源插件基类。 + + 每个插件实现以下契约: + 1. manifest() — 自我描述(ID、名称、配置需求) + 2. create_blueprint() — Flask 路由(认证 + 目录 + 数据拉取) + 3. get_frontend_config() — 传给前端的非敏感配置 + 4. on_enable() / on_disable() — 生命周期钩子 + """ + + @staticmethod + @abstractmethod + def manifest() -> dict[str, Any]: + """插件元数据。 + + Returns: + { + "id": "superset", + "name": "Apache Superset", + "icon": "superset", + "description": "从 Superset 加载数据集和仪表盘数据", + "version": "1.0.0", + "env_prefix": "PLG_SUPERSET", + "required_env": ["PLG_SUPERSET_URL"], + "optional_env": ["PLG_SUPERSET_TIMEOUT"], + "auth_modes": ["sso", "jwt", "password"], + "capabilities": ["datasets", "dashboards", "filters"], + } + """ + ... + + @abstractmethod + def create_blueprint(self) -> Blueprint: + """创建 Flask Blueprint。 + + 路由前缀: /api/plugins// + 示例路由: + /api/plugins/superset/auth/login + /api/plugins/superset/auth/status + /api/plugins/superset/catalog/datasets + /api/plugins/superset/data/load-dataset + """ + ... + + @abstractmethod + def get_frontend_config(self) -> dict[str, Any]: + """返回传给前端的配置(不包含敏感信息)。 + + Returns: + { + "auth_modes": ["sso", "jwt", "password"], + "sso_login_url": "http://superset:8088/df-sso-bridge/", + "capabilities": ["datasets", "dashboards", "filters"], + } + """ + ... + + def on_enable(self, app) -> None: + """插件启用时调用。可初始化连接池、缓存等。""" + pass + + def on_disable(self) -> None: + """插件禁用时调用。""" + pass + + def get_auth_status(self, session: dict) -> Optional[dict[str, Any]]: + """返回当前用户在此插件中的认证状态。 + + Returns: + {"authenticated": True, "user": "john", ...} 或 None + """ + return None + + def supports_sso_passthrough(self) -> bool: + """此插件是否支持 SSO token 透传。 + + 如果返回 True,插件可以从 auth.get_sso_token() 获取用户的 + OIDC access token,直接用于调用外部系统 API。 + """ + return False +``` + +### 4.3 插件与 SSO 的集成模式 + +每个插件可以支持多种认证方式,根据部署环境自动选择: + +``` +┌──────────────────────────────────────────────────────────────────┐ +│ 插件认证模式选择 │ +│ │ +│ 场景 A: DF 有 SSO + 外部系统也接了同一 IdP │ +│ ───────────────────────────────────────── │ +│ → 自动使用 SSO Token 透传 │ +│ → 用户无需额外登录 │ +│ → 外部系统通过 token 识别用户,应用自身 RBAC │ +│ │ +│ 场景 B: DF 有 SSO + 外部系统没有接 SSO │ +│ ───────────────────────────────────── │ +│ → 首次使用时,用户在插件 UI 中输入外部系统的账号/密码/API Key │ +│ → 凭证存入 CredentialVault(按 SSO user_id 关联) │ +│ → 后续自动从 Vault 取出,无需重复输入 │ +│ → 换设备后只要 SSO 登录,凭证自动可用 │ +│ │ +│ 场景 C: DF 无 SSO(本地匿名模式) │ +│ ────────────────────────────── │ +│ → 用户在插件 UI 中输入外部系统的账号密码 │ +│ → Token 存在 Flask Session 中(仅当次会话有效) │ +│ → 行为与 0.6 版本一致 │ +└──────────────────────────────────────────────────────────────────┘ +``` + +插件内部的认证路由应检查这三种模式: + +```python +# 插件认证路由模板 + +@bp.route("/auth/login", methods=["POST"]) +def plugin_login(): + """处理插件认证。自动选择最佳模式。""" + + # 模式 1: SSO Token 透传 + sso_token = get_sso_token() + if sso_token and plugin.supports_sso_passthrough(): + # 用 SSO token 直接调用外部系统的 token exchange / introspection + external_token = exchange_sso_token(sso_token) + if external_token: + store_plugin_session(plugin_id, external_token) + return jsonify({"status": "ok", "auth_mode": "sso"}) + + # 模式 2: 从 Credential Vault 取已存储的凭证 + vault = get_credential_vault() + identity = get_identity_id() + stored = vault.retrieve(identity, plugin_id) if vault else None + if stored: + external_token = authenticate_with_stored_credentials(stored) + if external_token: + store_plugin_session(plugin_id, external_token) + return jsonify({"status": "ok", "auth_mode": "vault"}) + + # 模式 3: 用户手动输入 + data = request.get_json() + username = data.get("username") + password = data.get("password") + if username and password: + external_token = authenticate_with_credentials(username, password) + # 可选:存入 Vault 以便下次自动使用 + if vault and data.get("remember", True): + vault.store(identity, plugin_id, {"username": username, "password": password}) + store_plugin_session(plugin_id, external_token) + return jsonify({"status": "ok", "auth_mode": "credentials"}) + + return jsonify({"status": "needs_login", "available_modes": get_available_modes()}) +``` + +### 4.4 插件注册与发现 + +#### 4.4.1 注册机制方案选型 + +新增一个数据源插件时,注册表是否需要改代码?有三种方案: + +| 方案 | 新增插件要改代码吗 | 复杂度 | 安全性 | 适合场景 | +|------|:---:|:---:|:---:|------| +| **A. 硬编码列表** | 要,改注册表一行 | 最低 | 最高(只加载白名单) | 插件由同一团队开发,随主项目发布 | +| **B. 目录自动扫描** | 不要,放进目录就生效 | 低 | 中(需约定和校验) | 插件持续增加,希望"拖入即用" | +| **C. setuptools entry_points** | 不要,`pip install` 后自动注册 | 中 | 中 | 插件作为独立 pip 包发布 | + +**方案 A(硬编码列表)** 是现有 `ExternalDataLoader` 的做法(`_LOADER_SPECS` 列表),也是 0.7 系统的成熟模式。优点是简单透明,缺点是每加一个插件都要改 `__init__.py`。 + +**方案 C(entry_points)** 适合有第三方插件生态的平台(如 pytest、Flask 扩展),对当前项目来说过于重型,且前端部分无法通过 pip 安装(仍需编译到主 bundle)。 + +**选择方案 B(目录自动扫描)** — 理由: + +1. **插件会持续增长** — 未来对接的报表系统只会越来越多,每次加一个都改注册表是无意义的样板修改 +2. **插件都是内部开发** — 不需要跨包的 entry_points 机制 +3. **manifest 自描述** — 插件的 ID、必需环境变量等信息已经在 `manifest()` 中声明,不需要在注册表中重复 +4. **安全保底** — 通过 `PLUGIN_BLOCKLIST` 环境变量提供黑名单能力 + +> **统一范式**:AuthProvider 与 DataSourcePlugin 采用相同的目录自动扫描机制。 +> 区别仅在于激活策略 — 认证是**单选**的(`AUTH_PROVIDER` 环境变量指定唯一活跃 Provider), +> DataSourcePlugin 是**并行**的(所有已发现的插件同时存在)。详见 4.4.8 对比表。 + +#### 4.4.2 插件约定 + +每个插件是 `plugins/` 目录下的一个 Python 子包,必须满足以下约定: + +``` +plugins/superset/ +├── __init__.py ← 必须暴露 plugin_class = SupersetPlugin +├── superset_client.py +├── auth_bridge.py +├── catalog.py +└── routes/ + ├── auth.py + ├── catalog.py + └── data.py +``` + +`__init__.py` 的最低要求: + +```python +# plugins/superset/__init__.py + +from .plugin import SupersetPlugin + +# 框架通过此变量发现插件类 +plugin_class = SupersetPlugin +``` + +框架通过 `plugin_class` 变量找到插件类,再调用 `plugin_class.manifest()` 获取自描述信息(ID、必需环境变量等)。不需要在任何注册表中手动登记。 + +#### 4.4.3 自动扫描实现 + +```python +# py-src/data_formulator/plugins/__init__.py + +""" +数据源插件自动发现与注册。 + +扫描 plugins/ 目录下所有子包,查找暴露 plugin_class 变量的模块。 +通过 manifest() 中的 required_env 判断是否启用。 +通过 PLUGIN_BLOCKLIST 环境变量支持显式禁用。 + +新增插件步骤: + 1. 在 plugins/ 下创建子目录 + 2. __init__.py 中暴露 plugin_class = YourPlugin + 3. .env 中设置必需环境变量 + 4. 重启服务 → 自动发现、自动注册 + 无需修改任何现有代码。 +""" + +import importlib +import logging +import os +import pkgutil +from typing import Any + +from data_formulator.plugins.base import DataSourcePlugin + +_log = logging.getLogger(__name__) + +ENABLED_PLUGINS: dict[str, DataSourcePlugin] = {} +DISABLED_PLUGINS: dict[str, str] = {} + +# 显式黑名单:PLUGIN_BLOCKLIST=powerbi,grafana +_BLOCKLIST = set( + p.strip() + for p in os.environ.get("PLUGIN_BLOCKLIST", "").split(",") + if p.strip() +) + + +def discover_and_register(app) -> None: + """扫描 plugins/ 子包,发现并注册所有已启用的插件。 + + 在 app.py 的 _register_blueprints() 中调用一次。 + """ + for finder, pkg_name, ispkg in pkgutil.iter_modules(__path__): + # 跳过非包(如 base.py, data_writer.py)和黑名单 + if not ispkg: + continue + if pkg_name in _BLOCKLIST: + DISABLED_PLUGINS[pkg_name] = "Blocked by PLUGIN_BLOCKLIST" + _log.info("Plugin '%s' blocked by PLUGIN_BLOCKLIST", pkg_name) + continue + + try: + mod = importlib.import_module(f"data_formulator.plugins.{pkg_name}") + except ImportError as exc: + DISABLED_PLUGINS[pkg_name] = f"Missing dependency: {exc.name}" + _log.info("Plugin '%s' disabled (import error): %s", pkg_name, exc) + continue + + # 检查是否暴露了 plugin_class + plugin_cls = getattr(mod, "plugin_class", None) + if plugin_cls is None: + continue # 不是插件目录(可能是工具模块),静默跳过 + if not (isinstance(plugin_cls, type) and issubclass(plugin_cls, DataSourcePlugin)): + _log.warning( + "Plugin '%s': plugin_class is not a DataSourcePlugin subclass, skipped", + pkg_name, + ) + continue + + # 从 manifest 获取元数据 + try: + manifest = plugin_cls.manifest() + except Exception as exc: + DISABLED_PLUGINS[pkg_name] = f"manifest() failed: {exc}" + _log.error("Plugin '%s' manifest() failed: %s", pkg_name, exc) + continue + + plugin_id = manifest["id"] + required_env = manifest.get("required_env", []) + + # 检查必需环境变量 + missing_env = [e for e in required_env if not os.environ.get(e)] + if missing_env: + DISABLED_PLUGINS[plugin_id] = f"Not configured: {', '.join(missing_env)}" + _log.info( + "Plugin '%s' disabled: missing env %s", + plugin_id, ", ".join(missing_env), + ) + continue + + # 实例化、注册 Blueprint、启用 + try: + plugin: DataSourcePlugin = plugin_cls() + bp = plugin.create_blueprint() + app.register_blueprint(bp) + plugin.on_enable(app) + + ENABLED_PLUGINS[plugin_id] = plugin + _log.info( + "Plugin '%s' enabled (auto-discovered from plugins/%s/)", + plugin_id, pkg_name, + ) + except Exception as exc: + DISABLED_PLUGINS[plugin_id] = str(exc) + _log.error( + "Plugin '%s' failed to initialize: %s", + plugin_id, exc, exc_info=True, + ) +``` + +#### 4.4.4 发现流程图 + +``` +plugins/ +├── __init__.py ← discover_and_register() 在这里 +├── base.py ← DataSourcePlugin 基类 (ispkg=False, 跳过) +├── data_writer.py ← 工具模块 (ispkg=False, 跳过) +├── superset/ ← ispkg=True +│ └── __init__.py → plugin_class = SupersetPlugin +│ → manifest(): required_env=["PLG_SUPERSET_URL"] +│ → os.environ["PLG_SUPERSET_URL"] 存在? +│ → 是 → 实例化 → 注册 Blueprint → ENABLED ✅ +│ → 否 → DISABLED (Not configured) +├── metabase/ ← ispkg=True +│ └── __init__.py → plugin_class = MetabasePlugin +│ → manifest(): required_env=["PLG_METABASE_URL"] +│ → os.environ["PLG_METABASE_URL"] 不存在 +│ → DISABLED (Not configured) +└── _helpers/ ← ispkg=True, 但无 plugin_class → 静默跳过 + └── __init__.py → (没有 plugin_class 变量) +``` + +#### 4.4.5 新增插件的完整步骤 + +以新增一个 Grafana 插件为例: + +**步骤 1**:创建插件目录和代码 + +``` +plugins/grafana/ +├── __init__.py # plugin_class = GrafanaPlugin +├── plugin.py # GrafanaPlugin(DataSourcePlugin) 实现 +├── grafana_client.py # Grafana REST API 封装 +└── routes/ + ├── auth.py # /api/plugins/grafana/auth/* + ├── catalog.py # /api/plugins/grafana/catalog/* + └── data.py # /api/plugins/grafana/data/* +``` + +**步骤 2**:在 `.env` 中设置环境变量 + +```bash +PLG_GRAFANA_URL=http://grafana.example.com:3000 +``` + +**步骤 3**:重启服务 + +``` + Loading data source plugins... + Plugin 'grafana' enabled (auto-discovered from plugins/grafana/) +``` + +**核心代码改动:0 行。** 不需要修改 `__init__.py`、`app.py` 或任何其他文件。 + +#### 4.4.6 安全措施 + +| 措施 | 说明 | +|------|------| +| **类型校验** | `plugin_class` 必须是 `DataSourcePlugin` 的子类,否则跳过 | +| **环境变量门控** | `required_env` 中的变量缺失则不启用,防止未配置的插件意外加载 | +| **显式黑名单** | `PLUGIN_BLOCKLIST=powerbi,grafana` 可以禁用特定插件 | +| **Blueprint 前缀隔离** | 插件路由强制在 `/api/plugins//` 下,无法覆盖核心路由 | +| **错误隔离** | 单个插件加载失败不影响其他插件和核心系统 | + +#### 4.4.7 前端的对应扫描机制 + +前端由于 Vite/Webpack 的编译时限制,无法做到运行时自动扫描。但可以用 **Vite 的 `import.meta.glob`** 实现编译时自动发现: + +```typescript +// src/plugins/registry.ts + +import { DataSourcePluginModule } from "./types"; + +// Vite 编译时自动扫描 src/plugins/*/index.ts +// 返回 { "./superset/index.ts": () => import(...), "./metabase/index.ts": () => import(...) } +const pluginModules = import.meta.glob<{ default: DataSourcePluginModule }>( + "./*/index.ts" +); + +// 提取插件 ID → 懒加载函数的映射 +const pluginLoaders: Record Promise> = {}; +for (const [path, loader] of Object.entries(pluginModules)) { + // "./superset/index.ts" → "superset" + const match = path.match(/^\.\/([^/]+)\/index\.ts$/); + if (match) { + const pluginId = match[1]; + pluginLoaders[pluginId] = () => loader().then((m) => m.default); + } +} + +export async function loadEnabledPlugins( + enabledPluginIds: string[] +): Promise { + const modules: DataSourcePluginModule[] = []; + for (const id of enabledPluginIds) { + const loader = pluginLoaders[id]; + if (loader) { + try { + modules.push(await loader()); + } catch (e) { + console.warn(`Failed to load plugin: ${id}`, e); + } + } + } + return modules; +} +``` + +这样前端也做到了"创建 `src/plugins/grafana/index.ts` 即自动纳入编译",不需要手动维护 `pluginLoaders` 映射表。 + +> **后端自动扫描 + 前端 `import.meta.glob` = 全栈零注册新增插件。** + +#### 4.4.8 与 AuthProvider 注册机制的对比 + +| 维度 | DataSourcePlugin | AuthProvider | +|------|-----------------|-------------| +| 协作模式 | 并行(所有插件同时存在) | 单选(同一时间只有一个主 Provider) | +| 注册方式 | `plugins/` 目录自动扫描 | `auth_providers/` 目录自动扫描 | +| 激活方式 | 所有已发现的插件同时启用 | `AUTH_PROVIDER` 环境变量**单选**激活一个 | +| 新增方式 | 在 `plugins/` 下创建目录即可 | 在 `auth_providers/` 下创建 `.py` 即可 | +| 安全控制 | `PLUGIN_BLOCKLIST` 黑名单 | 仅被选中的 Provider 执行 `on_configure()` | + +**统一的设计哲学**:发现与激活分离 — 两者都通过目录扫描自动发现,但激活策略不同(插件全量启用,Provider 单选启用)。新增组件时核心代码零修改。 + +### 4.5 插件数据写入工具 + +插件从外部系统拉取到数据后,通过 `PluginDataWriter` 写入 Workspace: + +```python +# py-src/data_formulator/plugins/data_writer.py + +import logging +import pandas as pd +import pyarrow as pa +from typing import Any, Optional + +from data_formulator.auth import get_identity_id +from data_formulator.workspace_factory import get_workspace +from data_formulator.datalake.parquet_utils import sanitize_table_name + +logger = logging.getLogger(__name__) + + +class PluginDataWriter: + """插件专用的数据写入工具。""" + + def __init__(self, plugin_id: str): + self.plugin_id = plugin_id + + def _get_workspace(self): + return get_workspace(get_identity_id()) + + def write_dataframe( + self, + df: pd.DataFrame, + table_name: str, + *, + overwrite: bool = True, + source_metadata: Optional[dict[str, Any]] = None, + ) -> dict[str, Any]: + """将 DataFrame 写入当前用户的 Workspace。""" + workspace = self._get_workspace() + base_name = sanitize_table_name(table_name) + final_name = base_name + is_renamed = False + + if not overwrite: + counter = 1 + existing = set(workspace.list_tables()) + while final_name in existing: + final_name = f"{base_name}_{counter}" + counter += 1 + is_renamed = True + + loader_metadata = { + "loader_type": f"plugin:{self.plugin_id}", + **(source_metadata or {}), + } + + meta = workspace.write_parquet(df, final_name, loader_metadata=loader_metadata) + + logger.info( + "Plugin '%s' wrote '%s': %d rows, %d cols", + self.plugin_id, final_name, len(df), len(df.columns), + ) + + return { + "table_name": meta.name, + "row_count": meta.row_count, + "columns": [c.name for c in (meta.columns or [])], + "is_renamed": is_renamed, + } + + def write_arrow( + self, + table: pa.Table, + table_name: str, + *, + overwrite: bool = True, + source_metadata: Optional[dict[str, Any]] = None, + ) -> dict[str, Any]: + """将 Arrow Table 写入 Workspace(跳过 pandas 转换,更高效)。""" + workspace = self._get_workspace() + base_name = sanitize_table_name(table_name) + final_name = base_name + is_renamed = False + + if not overwrite: + counter = 1 + existing = set(workspace.list_tables()) + while final_name in existing: + final_name = f"{base_name}_{counter}" + counter += 1 + is_renamed = True + + loader_metadata = { + "loader_type": f"plugin:{self.plugin_id}", + **(source_metadata or {}), + } + + meta = workspace.write_parquet_from_arrow(table, final_name, loader_metadata=loader_metadata) + + return { + "table_name": meta.name, + "row_count": meta.row_count, + "columns": [c.name for c in (meta.columns or [])], + "is_renamed": is_renamed, + } +``` + +### 4.6 前端插件接口 + +```typescript +// src/plugins/types.ts + +export interface PluginManifest { + id: string; + name: string; + icon: string; + description: string; + authModes: Array<"sso" | "jwt" | "password" | "api_key" | "none">; + capabilities: string[]; +} + +// PluginPanelProps、DataProvenance、DataSourcePluginModule 的完整定义 +// 见 1-data-source-plugin-architecture.md § 7.1 +// +// 要点: +// - 前端组件不接收 ssoToken prop。SSO token 由插件后端通过 +// auth.get_sso_token() 从 Flask session 获取,前端无需感知。 +// - onDataLoaded 回调必须包含 DataProvenance(数据溯源), +// 以支持"用同样的参数刷新"和 UI 显示数据来源。 +// - onPreviewLoaded(可选)支持"先预览再加载"的交互。 +// - LoginComponent 不接收 ssoToken,认证流程走插件自身后端。 +``` + +前端插件注册使用 `import.meta.glob` 自动扫描(详见 4.4.7 节),此处不再重复。新增插件只需在 `src/plugins/` 下创建子目录并导出 `index.ts`,无需手动维护注册表。 + +--- + +## 5. Layer 3:凭证保险箱 (CredentialVault) + +### 5.1 设计思路 + +用户连接未接 SSO 的外部系统时,需要输入该系统的账号密码。这些凭证应该: + +| 需求 | 现状 (0.7) | 目标 | +|------|-----------|------| +| 持久化 | 浏览器 IndexedDB (redux-persist),换浏览器丢失 | 服务端加密存储,跟随用户身份 | +| 安全性 | 前端明文存储 | 服务端 Fernet 对称加密 | +| 跨设备 | 不支持 | SSO 登录后自动可用 | +| 按用户隔离 | 基于 browser UUID | 基于 SSO user_id 或 browser UUID | + +### 5.2 CredentialVault 接口 + +```python +# py-src/data_formulator/credential_vault/base.py + +from abc import ABC, abstractmethod +from typing import Optional + + +class CredentialVault(ABC): + """凭证保险箱抽象接口。 + + 按 (user_identity, source_key) 二元组存取加密凭证。 + - user_identity: 来自 auth.get_identity_id(),如 "user:alice@corp.com" + - source_key: 外部系统标识,如 "superset"、"metabase-prod" + """ + + @abstractmethod + def store(self, user_id: str, source_key: str, credentials: dict) -> None: + """存储凭证。已存在则覆盖。""" + ... + + @abstractmethod + def retrieve(self, user_id: str, source_key: str) -> Optional[dict]: + """取出凭证。不存在返回 None。""" + ... + + @abstractmethod + def delete(self, user_id: str, source_key: str) -> None: + """删除凭证。""" + ... + + @abstractmethod + def list_sources(self, user_id: str) -> list[str]: + """列出该用户所有已存储凭证的 source_key。""" + ... +``` + +### 5.3 本地加密实现 + +```python +# py-src/data_formulator/credential_vault/local_vault.py + +import json +import logging +import sqlite3 +from pathlib import Path +from typing import Optional + +from cryptography.fernet import Fernet + +from .base import CredentialVault + +logger = logging.getLogger(__name__) + + +class LocalCredentialVault(CredentialVault): + """基于 SQLite + Fernet 的本地加密凭证存储。 + + 存储位置: DATA_FORMULATOR_HOME/credentials.db + 加密密钥: CREDENTIAL_VAULT_KEY 环境变量 (Fernet key) + + 生成密钥: + python -c "from cryptography.fernet import Fernet; print(Fernet.generate_key().decode())" + """ + + def __init__(self, db_path: str | Path, encryption_key: str): + self._db_path = str(db_path) + self._fernet = Fernet(encryption_key.encode() if isinstance(encryption_key, str) else encryption_key) + self._init_db() + + def _init_db(self): + with sqlite3.connect(self._db_path) as conn: + conn.execute(""" + CREATE TABLE IF NOT EXISTS credentials ( + user_id TEXT NOT NULL, + source_key TEXT NOT NULL, + encrypted_data BLOB NOT NULL, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (user_id, source_key) + ) + """) + + def store(self, user_id: str, source_key: str, credentials: dict) -> None: + encrypted = self._fernet.encrypt(json.dumps(credentials).encode("utf-8")) + with sqlite3.connect(self._db_path) as conn: + conn.execute( + "INSERT OR REPLACE INTO credentials (user_id, source_key, encrypted_data, updated_at) " + "VALUES (?, ?, ?, CURRENT_TIMESTAMP)", + (user_id, source_key, encrypted), + ) + logger.debug("Stored credentials for %s / %s", user_id[:16], source_key) + + def retrieve(self, user_id: str, source_key: str) -> Optional[dict]: + with sqlite3.connect(self._db_path) as conn: + row = conn.execute( + "SELECT encrypted_data FROM credentials WHERE user_id = ? AND source_key = ?", + (user_id, source_key), + ).fetchone() + if not row: + return None + try: + decrypted = self._fernet.decrypt(row[0]) + return json.loads(decrypted.decode("utf-8")) + except Exception as e: + logger.warning("Failed to decrypt credentials for %s / %s: %s", user_id[:16], source_key, e) + return None + + def delete(self, user_id: str, source_key: str) -> None: + with sqlite3.connect(self._db_path) as conn: + conn.execute( + "DELETE FROM credentials WHERE user_id = ? AND source_key = ?", + (user_id, source_key), + ) + + def list_sources(self, user_id: str) -> list[str]: + with sqlite3.connect(self._db_path) as conn: + rows = conn.execute( + "SELECT source_key FROM credentials WHERE user_id = ?", + (user_id,), + ).fetchall() + return [r[0] for r in rows] +``` + +### 5.4 Vault 工厂 + +```python +# py-src/data_formulator/credential_vault/__init__.py + +import os +import logging +from typing import Optional + +from .base import CredentialVault + +logger = logging.getLogger(__name__) + +_vault: Optional[CredentialVault] = None +_initialized = False + + +def get_credential_vault() -> Optional[CredentialVault]: + """获取全局 CredentialVault 实例。 + + 返回 None 表示 Vault 未配置(CREDENTIAL_VAULT_KEY 未设置)。 + 此时插件应回退到仅 Session 级别的凭证存储。 + """ + global _vault, _initialized + if _initialized: + return _vault + + _initialized = True + key = os.environ.get("CREDENTIAL_VAULT_KEY", "").strip() + if not key: + logger.info("Credential vault not configured (CREDENTIAL_VAULT_KEY not set)") + return None + + vault_type = os.environ.get("CREDENTIAL_VAULT", "local").strip().lower() + + if vault_type == "local": + from data_formulator.credential_vault.local_vault import LocalCredentialVault + from data_formulator.datalake.workspace import get_data_formulator_home + + db_path = get_data_formulator_home() / "credentials.db" + db_path.parent.mkdir(parents=True, exist_ok=True) + _vault = LocalCredentialVault(db_path, key) + logger.info("Credential vault initialized: local (%s)", db_path) + else: + logger.warning("Unknown credential vault type: %s", vault_type) + + return _vault +``` + +### 5.5 凭证管理 API + +```python +# py-src/data_formulator/credential_routes.py + +import flask +from flask import Blueprint, request, jsonify +from data_formulator.auth import get_identity_id +from data_formulator.credential_vault import get_credential_vault + +credential_bp = Blueprint("credentials", __name__, url_prefix="/api/credentials") + + +@credential_bp.route("/list", methods=["GET"]) +def list_credentials(): + """列出当前用户已存储凭证的外部系统。不返回凭证内容。""" + vault = get_credential_vault() + if not vault: + return jsonify({"sources": []}) + + identity = get_identity_id() + sources = vault.list_sources(identity) + return jsonify({"sources": sources}) + + +@credential_bp.route("/store", methods=["POST"]) +def store_credential(): + """存储或更新凭证。""" + vault = get_credential_vault() + if not vault: + return jsonify({"error": "Credential vault not configured"}), 503 + + data = request.get_json() + source_key = data.get("source_key") + credentials = data.get("credentials") + if not source_key or not credentials: + return jsonify({"error": "source_key and credentials required"}), 400 + + identity = get_identity_id() + vault.store(identity, source_key, credentials) + return jsonify({"status": "stored", "source_key": source_key}) + + +@credential_bp.route("/delete", methods=["POST"]) +def delete_credential(): + """删除凭证。""" + vault = get_credential_vault() + if not vault: + return jsonify({"error": "Credential vault not configured"}), 503 + + data = request.get_json() + source_key = data.get("source_key") + if not source_key: + return jsonify({"error": "source_key required"}), 400 + + identity = get_identity_id() + vault.delete(identity, source_key) + return jsonify({"status": "deleted", "source_key": source_key}) +``` + +--- + +## 6. SSO Token 透传机制 + +### 6.1 原理 + +当 Data Formulator 和外部 BI 系统(如 Superset)共用同一个 OIDC IdP 时,用户登录 DF 获得的 `access_token` 可以**直接用于调用外部系统的 API**,前提是外部系统信任同一个 Issuer。 + +``` + 同一个 IdP (Keycloak / Okta / ...) + │ + ┌──────────┼──────────┐ + │ │ + ▼ ▼ + Data Formulator Superset + (client_id: df) (client_id: superset) + │ │ + │ 用户的 access_token │ + │ (audience: df) │ + │ │ + └──────── ? ──────────┘ + +两种方式让 Superset 接受 DF 的 token: + +方式 A: Token Exchange (标准, 推荐) + DF 后端 → IdP token exchange endpoint + → 用 df 的 token 换取 superset audience 的 token + → 用新 token 调用 Superset API + +方式 B: 共享 Audience (简单, 适合内部系统) + IdP 中将 df 和 superset 配置为同一个 audience + → DF 的 token 直接被 Superset 接受 +``` + +### 6.2 插件中的 SSO 透传实现 + +```python +# 在 Superset 插件中 + +class SupersetPlugin(DataSourcePlugin): + + def supports_sso_passthrough(self) -> bool: + return bool(os.environ.get("PLG_SUPERSET_SSO", "").lower() == "true") + + def _get_superset_token_via_sso(self, sso_token: str) -> Optional[str]: + """用 DF 用户的 SSO token 获取 Superset 的 access token。""" + superset_url = os.environ["PLG_SUPERSET_URL"] + + # 方式 A: 如果 Superset 支持 OAuth token introspection / exchange + # 用 SSO token 调用 Superset 的 OAuth 端点换取 Superset session + try: + resp = requests.post( + f"{superset_url}/api/v1/security/login", + json={"token": sso_token, "provider": "oidc"}, + timeout=10, + ) + if resp.status_code == 200: + return resp.json().get("access_token") + except Exception as e: + logger.warning("SSO passthrough to Superset failed: %s", e) + + # 方式 B: 直接用 SSO token 作为 Bearer (如果 Superset 配置了同一 IdP) + try: + resp = requests.get( + f"{superset_url}/api/v1/me/", + headers={"Authorization": f"Bearer {sso_token}"}, + timeout=10, + ) + if resp.status_code == 200: + return sso_token # token 直接可用 + except Exception: + pass + + return None +``` + +### 6.3 认证模式自动协商 + +``` +用户打开插件面板 + │ + ▼ +前端: POST /api/plugins/superset/auth/status + │ + ▼ +后端检查: + ├─ Session 中已有有效 token? → {"authenticated": true} + │ + ├─ SSO token 可用 + 插件支持透传? + │ → 尝试透传 → 成功 → {"authenticated": true, "mode": "sso"} + │ → 失败 → 继续检查 + │ + ├─ Credential Vault 中有已存凭证? + │ → 尝试登录 → 成功 → {"authenticated": true, "mode": "vault"} + │ → 失败 (密码已改) → {"authenticated": false, "vault_stale": true} + │ + └─ 以上均无 → {"authenticated": false, "available_modes": ["password", "api_key"]} + +前端根据响应: + ├─ authenticated=true → 直接显示数据目录 + ├─ authenticated=false + SSO 可用 → "正在通过 SSO 登录..." (自动重试) + └─ authenticated=false + 需手动 → 显示登录表单 +``` + +--- + +## 7. 现有 ExternalDataLoader 的演进路径 + +### 7.1 短期:两套机制并行 + +``` +数据源类型 │ 使用机制 │ 原因 +───────────────────┼───────────────────────┼───────────────────────── +MySQL/PG/MSSQL │ ExternalDataLoader │ 标准数据库,通用表单即可 +MongoDB/BigQuery │ ExternalDataLoader │ 同上 +S3/Azure Blob │ ExternalDataLoader │ 文件存储 +───────────────────┼───────────────────────┼───────────────────────── +Superset │ DataSourcePlugin │ 有认证/目录/筛选/RBAC +Metabase │ DataSourcePlugin │ 同上 +Power BI │ DataSourcePlugin │ 同上 +``` + +**判断标准**:如果只需要 `连接参数 → list_tables → fetch_data`,用 DataLoader;如果需要自己的认证流程、数据浏览 UI、权限模型,用 Plugin。 + +### 7.2 中期:DataLoader 接入 CredentialVault + +现有 DataLoader 的连接参数(数据库密码等)可以选择性地存入 CredentialVault,而不是留在浏览器 IndexedDB 中: + +```python +# tables_routes.py 增强 + +@tables_bp.route("/data-loader/connect", methods=["POST"]) +def connect_data_loader(): + data = request.get_json() + data_loader_type = data["data_loader_type"] + data_loader_params = data["data_loader_params"] + remember = data.get("remember_credentials", False) + + # 正常连接逻辑... + loader = DATA_LOADERS[data_loader_type](data_loader_params) + tables = loader.list_tables() + + # 如果用户选择"记住凭证",存入 Vault + if remember: + vault = get_credential_vault() + if vault: + identity = get_identity_id() + vault.store(identity, f"dataloader:{data_loader_type}", data_loader_params) + + return jsonify({"tables": tables}) +``` + +### 7.3 长期:统一为插件体系(可选) + +如果未来需要给数据库连接器也加上专用 UI(如 schema 浏览、SQL 编辑器),可以将其包装为 Plugin。但这不是必须的 — 现有的通用表单 UI 对数据库连接器已经够用。 + +``` +未来可能的架构: + +DataSourcePlugin (统一基类) +├── BI Plugin (Superset, Metabase, ...) +│ └── 自带完整 UI + 认证流程 +├── Database Plugin (PG, MySQL, ...) ← 可选迁移 +│ └── 复用 DBManagerPane 的通用表单 +└── Storage Plugin (S3, Azure Blob, ...) ← 可选迁移 + └── 复用 DBManagerPane 的通用表单 + +ExternalDataLoader 可以作为 Database/Storage Plugin 的内部实现被保留, +外面包一层 Plugin 壳即可。 +``` + +--- + +## 8. 身份管理:SSO 时代的简化 + +### 8.1 有 SSO vs 无 SSO 的身份模型对比 + +``` +┌─────────────────────────────────────────────────────────────┐ +│ 无 SSO (现有模式) │ +│ │ +│ 电脑A: browser:aaa-111 │ +│ 电脑B: browser:bbb-222 ← 完全不同的身份,数据不通 │ +│ │ +│ 需要 IdentityStore + 身份合并 才能跨设备 (复杂) │ +└─────────────────────────────────────────────────────────────┘ + +┌─────────────────────────────────────────────────────────────┐ +│ 有 SSO (新增) │ +│ │ +│ 电脑A: user:alice@corp.com (SSO 登录) │ +│ 电脑B: user:alice@corp.com (SSO 登录) ← 同一身份! │ +│ │ +│ 天然跨设备,无需身份合并。Workspace 按 user:xxx 隔离即可。 │ +│ CredentialVault 也按 user:xxx 存取,自动跨设备可用。 │ +└─────────────────────────────────────────────────────────────┘ +``` + +**SSO 从根本上解决了身份漫游问题**。原有文档中复杂的 `IdentityStore` + 身份链接 + 合并对话框,在 SSO 模式下完全不需要。 + +### 8.2 身份管理策略 + +| 部署模式 | 身份来源 | Workspace 键 | Credential Vault 键 | 跨设备 | +|---------|---------|-------------|--------------------|----| +| 本地匿名 | 浏览器 UUID | `browser:xxx` | `browser:xxx` (不可靠) | 不支持 | +| SSO 登录 | OIDC sub claim | `user:alice@corp.com` | `user:alice@corp.com` | 自动支持 | +| Azure EasyAuth | Azure Principal | `user:guid` | `user:guid` | 自动支持 | + +### 8.3 身份迁移(匿名 → 认证用户) + +当匿名用户(`browser:xxx`)首次通过 SSO 登录后,身份变为 `user:xxx`,两者分属不同 Workspace。系统自动检测此转换并提示用户选择: + +**检测机制**(前端 `App.tsx`): +- 应用启动时,redux-persist 恢复上次 `identity`(`browser:uuid`) +- Auth useEffect 解析出新身份(`user:sub`) +- 如果 `旧.type === 'browser'` 且 `新.type === 'user'` → 触发迁移流程 + +**迁移流程**: +1. 前端调用 `GET /api/sessions/list?source_identity=browser:` 检查旧匿名身份是否有 workspace 数据 +2. 如果有 → 弹出 `IdentityMigrationDialog`,提供两个选择: + - **导入数据**:调用 `POST /api/sessions/migrate { source_identity: "browser:" }`,后端将旧身份的 workspace 文件夹复制到新身份下(不删除源数据,安全、幂等) + - **全新开始**:直接清空前端持久化状态 +3. 如果无 → 静默清空前端持久化状态,无弹窗 +4. 无论哪种选择,最后都执行 `persistor.purge()` 清除 localforage 中的旧 Redux 状态 + +**安全约束**: +- `source_identity` 参数仅接受 `browser:` 前缀,且调用者必须是 `user:` 身份 +- 迁移只做复制(`shutil.copytree`),不删除源数据 +- Ephemeral 模式下跳过(无服务端数据可迁移) + +--- + +## 9. 配置参考 + +### 9.1 完整 .env 配置示例 + +```bash +# ============================================================== +# Data Formulator — 完整配置示例 +# ============================================================== + +# -------------------------------------------------------------- +# 基础设置 +# -------------------------------------------------------------- +LOG_LEVEL=INFO +SANDBOX=local +DATA_FORMULATOR_HOME=/data/data-formulator + +# -------------------------------------------------------------- +# 认证设置(主认证 + 可选匿名回退) +# -------------------------------------------------------------- +# 主认证模式(选一种): +# anonymous(默认)| oidc / oauth2 | github | azure_easyauth | proxy_header | saml | ldap | cas +# 注:oidc 和 oauth2 是同一个 Provider 的别名,适用于任何 OAuth2/OIDC + JWT + JWKS 的 IdP +AUTH_PROVIDER=oidc + +# 是否允许匿名访问(默认 true,无需配置) +# 仅在需要强制登录时设为 false: +# ALLOW_ANONYMOUS=false + +# ─── GitHub OAuth 配置 ─── +# GITHUB_CLIENT_ID=xxx +# GITHUB_CLIENT_SECRET=xxx + +# ─── OIDC / OAuth2 配置 ─── +# 模式 A(自动发现):只需 OIDC_ISSUER_URL + OIDC_CLIENT_ID +# 模式 B(手动端点):额外配置 OIDC_AUTHORIZE_URL / OIDC_TOKEN_URL 等 +# 详见 § 3.4「对接 OIDC/OAuth2 Provider 的 IdP 要求」 +OIDC_ISSUER_URL=https://keycloak.example.com/realms/my-org +OIDC_CLIENT_ID=data-formulator +# OIDC_AUTHORIZE_URL=https://sso.example.com/oauth2/authorize # 模式 B +# OIDC_TOKEN_URL=https://sso.example.com/oauth2/token # 模式 B +# OIDC_USERINFO_URL=https://sso.example.com/oauth2/userinfo # 推荐 +# OIDC_JWKS_URL=https://sso.example.com/oauth2/jwks # 可选 +# OIDC_CLIENT_SECRET=xxx # 机密客户端 + +# -------------------------------------------------------------- +# 凭证保险箱 +# -------------------------------------------------------------- +# 存储类型: local (默认) +CREDENTIAL_VAULT=local +# 加密密钥 (Fernet) +# 生成: python -c "from cryptography.fernet import Fernet; print(Fernet.generate_key().decode())" +CREDENTIAL_VAULT_KEY=your-fernet-key-here + +# -------------------------------------------------------------- +# LLM 模型配置 +# -------------------------------------------------------------- +DEEPSEEK_ENABLED=true +DEEPSEEK_ENDPOINT=openai +DEEPSEEK_API_KEY=sk-xxx +DEEPSEEK_API_BASE=https://api.deepseek.com +DEEPSEEK_MODELS=deepseek-chat + +QWEN_ENABLED=true +QWEN_ENDPOINT=openai +QWEN_API_KEY=sk-xxx +QWEN_API_BASE=https://dashscope.aliyuncs.com/compatible-mode/v1 +QWEN_MODELS=qwen3-omni-flash + +# -------------------------------------------------------------- +# 数据源插件 +# -------------------------------------------------------------- +# Superset (配置了 PLG_SUPERSET_URL 即自动启用) +PLG_SUPERSET_URL=http://superset.example.com:8088 +PLG_SUPERSET_SSO=true # 启用 SSO token 透传到 Superset +# PLG_SUPERSET_TIMEOUT=30 # API 超时秒数 (可选) + +# Metabase (配置了 PLG_METABASE_URL 即自动启用) +# PLG_METABASE_URL=http://metabase.example.com:3000 + +# Power BI (配置了 PLG_POWERBI_TENANT_ID 即自动启用) +# PLG_POWERBI_TENANT_ID=your-tenant-id +# PLG_POWERBI_CLIENT_ID=your-client-id + +# -------------------------------------------------------------- +# Workspace 存储 +# -------------------------------------------------------------- +# WORKSPACE_BACKEND=local +# AZURE_BLOB_CONNECTION_STRING= +# AZURE_BLOB_ACCOUNT_URL= +``` + +### 9.2 免费 IdP 方案(生产可用) + +如果组织没有 Google Workspace、Microsoft 365 或 AWS 等企业订阅,以下免费方案可用于生产环境: + +| 方案 | 费用 | 适用场景 | 特点 | +|------|------|---------|------| +| **Keycloak** | 免费(自托管) | 中大型企业 | 功能最全,支持 OIDC/SAML/LDAP,需自行运维 | +| **Authelia** | 免费(自托管) | 个人/小团队 | 轻量级,与反向代理集成好,配置简单 | +| **Authentik** | 免费(自托管) | 中小团队 | 界面友好,功能丰富,支持 OIDC/SAML/LDAP | +| **Auth0 免费版** | 免费(7,500用户限制) | 小团队/初创公司 | 托管服务,无需运维,有用户数量限制 | + +#### Keycloak 配置示例 + +```bash +# 使用 Docker 运行 Keycloak +docker run -p 8080:8080 \ + -e KEYCLOAK_ADMIN=admin \ + -e KEYCLOAK_ADMIN_PASSWORD=admin \ + quay.io/keycloak/keycloak:22.0 start-dev + +# Data Formulator 配置(仅需后端配置,前端自动获取) +OIDC_ISSUER_URL=http://localhost:8080/realms/master +OIDC_CLIENT_ID=df-client +# Keycloak 中创建 client 时获取 +OIDC_CLIENT_SECRET=xxx +``` + +#### Authelia 配置示例 + +```bash +# docker-compose.yml 示例 +version: '3' +services: + authelia: + image: authelia/authelia:latest + ports: + - "9091:9091" + volumes: + - ./authelia:/config + +# Data Formulator 使用反向代理头认证 +AUTH_PROVIDER=proxy_header +PROXY_HEADER_USER=Remote-User +PROXY_HEADER_EMAIL=Remote-Email +PROXY_TRUSTED_IPS=127.0.0.1,172.16.0.0/12 +``` + +#### Auth0 免费版配置示例 + +```bash +# 在 https://auth0.com/ 注册免费账号,创建 Application +# 仅需后端配置,前端自动获取 +OIDC_ISSUER_URL=https://your-tenant.auth0.com/ +OIDC_CLIENT_ID=your-client-id +OIDC_CLIENT_SECRET=your-client-secret +``` + +**注意**:Google、Microsoft、Amazon 的 OIDC 服务都需要付费的企业订阅(Workspace、M365、AWS),个人账号无法作为 IdP 使用。 + +#### 社交登录集成(无需企业账号) + +如果不想部署自托管 IdP,可以使用社交登录平台。这些平台**不需要企业账号**,个人开发者账号即可免费使用: + +| 平台 | 需要企业账号? | 费用 | 用户群体 | 推荐度 | +|------|--------------|------|---------|--------| +| **GitHub** | ❌ 不需要 | 免费 | 开发者 | ⭐⭐⭐⭐⭐ | +| **Google** | ❌ 不需要 | 免费 | 大众用户 | ⭐⭐⭐⭐ | +| **Microsoft** | ❌ 不需要 | 免费 | 企业/个人 | ⭐⭐⭐ | + +**GitHub OAuth 配置示例**(最简单): + +```bash +# 1. 在 GitHub 注册 OAuth App +# 访问 https://github.com/settings/developers +# 点击 "New OAuth App" +# 填写: +# - Application name: Data Formulator +# - Homepage URL: https://your-domain.com +# - Authorization callback URL: https://your-domain.com/api/auth/callback + +# 2. Data Formulator 配置(仅需后端配置,前端自动获取) +AUTH_PROVIDER=github +GITHUB_CLIENT_ID=your-github-client-id +GITHUB_CLIENT_SECRET=your-github-client-secret +``` + +**简化配置模式**(主认证 + 匿名回退): + +```bash +# 模式 1:仅匿名(本地个人使用) +AUTH_PROVIDER=anonymous + +# 模式 2:GitHub OAuth + 匿名回退 +AUTH_PROVIDER=github +GITHUB_CLIENT_ID=xxx +GITHUB_CLIENT_SECRET=xxx +# ALLOW_ANONYMOUS 默认 true,匿名用户可正常使用 + +# 模式 3:企业 SSO + 匿名回退 +AUTH_PROVIDER=oidc +OIDC_ISSUER_URL=https://keycloak.company.com/realms/main +OIDC_CLIENT_ID=data-formulator +# ALLOW_ANONYMOUS 默认 true,匿名用户可正常使用 +``` + +**说明**: +- 主认证方式(github/oidc)提供完整功能(数据同步、跨设备、SSO 透传) +- 匿名模式作为回退,方便临时使用或快速体验 +- 如需强制登录,设置 `ALLOW_ANONYMOUS=false` + +#### 一键 Docker 部署方案 + +为降低配置门槛,提供开箱即用的 Docker Compose 配置: + +**方案 A:Keycloak + Data Formulator(完整 SSO)** + +```yaml +# docker-compose.sso.yml +version: '3.8' + +services: + keycloak: + image: quay.io/keycloak/keycloak:22.0 + environment: + KEYCLOAK_ADMIN: admin + KEYCLOAK_ADMIN_PASSWORD: admin + KC_DB: postgres + KC_DB_URL: jdbc:postgresql://postgres:5432/keycloak + KC_DB_USERNAME: keycloak + KC_DB_PASSWORD: keycloak + KC_HOSTNAME: localhost + ports: + - "8080:8080" + command: start-dev + depends_on: + - postgres + + postgres: + image: postgres:15 + environment: + POSTGRES_DB: keycloak + POSTGRES_USER: keycloak + POSTGRES_PASSWORD: keycloak + volumes: + - postgres_data:/var/lib/postgresql/data + + data-formulator: + image: data-formulator:latest + environment: + # 仅需后端配置,前端运行时自动获取 + AUTH_PROVIDER: oidc + OIDC_ISSUER_URL: http://keycloak:8080/realms/master + OIDC_CLIENT_ID: df-client + OIDC_CLIENT_SECRET: ${OIDC_CLIENT_SECRET} + ALLOW_ANONYMOUS: "true" + CREDENTIAL_VAULT: local + CREDENTIAL_VAULT_KEY: ${VAULT_KEY} + ports: + - "5000:5000" + depends_on: + - keycloak + +volumes: + postgres_data: +``` + +启动命令: +```bash +# 1. 生成加密密钥 +export VAULT_KEY=$(python -c "from cryptography.fernet import Fernet; print(Fernet.generate_key().decode())") + +# 2. 在 Keycloak 中创建 client,获取 secret +# 访问 http://localhost:8080 (admin/admin) +# 创建 realm → 创建 client → 获取 secret + +# 3. 启动服务 +export OIDC_CLIENT_SECRET=your-client-secret +docker-compose -f docker-compose.sso.yml up +``` + +**方案 B:Authelia + Data Formulator(轻量级)** + +```yaml +# docker-compose.authelia.yml +version: '3.8' + +services: + authelia: + image: authelia/authelia:latest + ports: + - "9091:9091" + volumes: + - ./authelia:/config + environment: + AUTHELIA_JWT_SECRET_FILE: /config/jwt_secret + AUTHELIA_SESSION_SECRET_FILE: /config/session_secret + + redis: + image: redis:alpine + + data-formulator: + image: data-formulator:latest + environment: + AUTH_PROVIDER: proxy_header + PROXY_HEADER_USER: Remote-User + PROXY_HEADER_EMAIL: Remote-Email + PROXY_TRUSTED_IPS: 172.16.0.0/12 + ports: + - "5000:5000" +``` + +**方案 C:仅 Data Formulator(匿名模式,零配置)** + +```yaml +# docker-compose.minimal.yml +version: '3.8' + +services: + data-formulator: + image: data-formulator:latest + environment: + # 无 SSO 配置,自动使用匿名模式 + DEEPSEEK_ENABLED: "true" + DEEPSEEK_API_KEY: ${DEEPSEEK_API_KEY} + ports: + - "5000:5000" + volumes: + - df_data:/data/data-formulator + +volumes: + df_data: +``` + +启动命令: +```bash +export DEEPSEEK_API_KEY=sk-xxx +docker-compose -f docker-compose.minimal.yml up +``` + +### 9.3 最小配置(本地匿名使用) + +```bash +# 最小配置 — 本地使用,无 SSO,无插件 +DEEPSEEK_ENABLED=true +DEEPSEEK_ENDPOINT=openai +DEEPSEEK_API_KEY=sk-xxx +DEEPSEEK_API_BASE=https://api.deepseek.com +DEEPSEEK_MODELS=deepseek-chat +``` + +### 9.4 团队部署配置(SSO + Superset) + +```bash +# 团队部署 — SSO + Superset(精简版) +AUTH_PROVIDER=oidc +OIDC_ISSUER_URL=https://keycloak.internal:8443/realms/team +OIDC_CLIENT_ID=data-formulator + +PLG_SUPERSET_URL=http://superset.internal:8088 +PLG_SUPERSET_SSO=true + +DEEPSEEK_ENABLED=true +DEEPSEEK_API_KEY=sk-xxx +``` + +--- + +## 10. 目录结构 + +### 10.1 后端新增文件 + +``` +py-src/data_formulator/ +├── auth.py # 重构:Provider 自动发现 + /api/auth/info 委托 +├── auth_providers/ # 新增:认证提供者(自动发现) +│ ├── __init__.py # Provider 自动扫描 + get_provider_class() API +│ ├── base.py # AuthProvider(含 get_auth_info())/ AuthResult 基类 +│ ├── azure_easyauth.py # Azure EasyAuth (迁移现有逻辑) +│ ├── oidc.py # 通用 OIDC Provider ★(仅需 2 个环境变量) +│ └── github_oauth.py # GitHub OAuth Provider +├── auth_gateways/ # 新增:有状态协议的登录网关 +│ ├── github_gateway.py # GitHub OAuth 授权码交换 +│ └── logout.py # 通用登出 +├── credential_vault/ # 新增:凭证保险箱 +│ ├── __init__.py # get_credential_vault() 工厂 +│ ├── base.py # CredentialVault 抽象接口 +│ └── local_vault.py # SQLite + Fernet 加密实现 +├── credential_routes.py # 新增:凭证管理 API +├── plugins/ # 新增:插件系统 +│ ├── __init__.py # 插件注册中心 (discover_and_register) +│ ├── base.py # DataSourcePlugin 基类 +│ ├── data_writer.py # PluginDataWriter 写入工具 +│ ├── superset/ # Superset 插件 +│ │ ├── __init__.py # SupersetPlugin 实现 +│ │ ├── superset_client.py # Superset REST API 封装 +│ │ ├── auth_bridge.py # JWT/SSO 认证桥接 +│ │ ├── catalog.py # 带缓存的数据目录 +│ │ └── routes/ +│ │ ├── __init__.py +│ │ ├── auth.py # /api/plugins/superset/auth/* +│ │ ├── catalog.py # /api/plugins/superset/catalog/* +│ │ └── data.py # /api/plugins/superset/data/* +│ └── metabase/ # Metabase 插件 (未来) +│ └── ... +├── data_loader/ # 现有 ExternalDataLoader 体系 (不变) +│ └── ... +└── app.py # 修改:集成 init_auth() + 插件发现 +``` + +### 10.2 前端新增文件 + +``` +src/ +├── app/ +│ ├── oidcConfig.ts # 新增:OIDC 配置和 UserManager +│ ├── OidcCallback.tsx # 新增:OIDC 回调页面 +│ ├── identity.ts # 修改:增加 setBrowserId() +│ ├── utils.tsx # 修改:fetchWithIdentity 携带 Bearer token +│ ├── dfSlice.tsx # 修改:ServerConfig 增加 plugins 字段 +│ └── App.tsx # 修改:OIDC 初始化 + 登录UI +├── plugins/ # 新增:插件前端 +│ ├── types.ts # PluginManifest, PluginPanelProps 类型 +│ ├── registry.ts # 插件动态加载 +│ ├── PluginHost.tsx # 插件容器组件 +│ ├── CredentialManager.tsx # 凭证管理 UI +│ ├── superset/ # Superset 前端插件 +│ │ ├── index.ts +│ │ ├── SupersetPanel.tsx +│ │ ├── SupersetCatalog.tsx +│ │ ├── SupersetDashboards.tsx +│ │ ├── SupersetFilterDialog.tsx +│ │ ├── SupersetLogin.tsx +│ │ └── api.ts +│ └── metabase/ # Metabase 前端插件 (未来) +│ └── ... +└── views/ + └── UnifiedDataUploadDialog.tsx # 修改:增加 PluginHost 渲染 +``` + +### 10.3 对现有文件的改动清单 + +| 文件 | 改动类型 | 改动量 | 说明 | +|------|---------|--------|------| +| `py-src/.../auth.py` | 重构 | ~60 行 | 基于自动发现的 `init_auth()` + `/api/auth/info` 委托给 Provider 自描述 | +| `py-src/.../app.py` | 修改 | ~25 行 | 调用 `init_auth()`、`discover_and_register()`、注册 credential/identity blueprint、`app-config` 返回 plugins | +| `src/app/App.tsx` | 修改 | ~35 行 | 统一 initAuth(`/api/auth/info` 驱动,纯 action 分支)、登录/登出 UI | +| `src/app/utils.tsx` | 修改 | ~15 行 | `fetchWithIdentity` 携带 Bearer token + 401 自动重试 | +| `src/app/dfSlice.tsx` | 修改 | ~5 行 | `ServerConfig` 增加 `plugins` 字段 | +| `src/app/identity.ts` | 修改 | ~5 行 | 增加 `setBrowserId()` | +| `src/views/UnifiedDataUploadDialog.tsx` | 修改 | ~20 行 | 导入 PluginHost,渲染插件 Tab | + +--- + +## 11. 实施路径 + +### Phase 1:认证基础 (AuthProvider 链) + +**目标**:将现有 `auth.py` 重构为可插拔的 Provider 链,激活 OIDC Provider。 + +**交付物**: +- `auth_providers/__init__.py` — Provider 自动发现(`pkgutil` 扫描 + `get_provider_class()` API) +- `auth_providers/base.py` — 基类(含 `get_auth_info()` 自描述接口) +- `auth_providers/azure_easyauth.py` — 迁移现有 Azure 逻辑 +- `auth_providers/oidc.py` — 通用 OIDC 验签(仅需 `OIDC_ISSUER_URL` + `OIDC_CLIENT_ID`) +- `auth_providers/github_oauth.py` — GitHub OAuth Provider +- `auth_gateways/github_gateway.py` — GitHub 授权码交换 +- `auth.py` 重构 — 基于自动发现的 `init_auth()` + `/api/auth/info` 委托 +- `src/app/oidcConfig.ts` — 前端 OIDC 配置 +- `src/app/OidcCallback.tsx` — 回调页面 +- `src/app/LoginPanel.tsx` — 统一登录组件(由 `/api/auth/info` 驱动) +- `App.tsx` 修改 — 统一 initAuth(单端点驱动)+ 401 自动重试 +- `utils.tsx` 修改 — Bearer token + 401 重试逻辑 + +**验证标准**: +- 配置 Keycloak + OIDC 环境变量后,用户可以通过浏览器登录 +- 后端正确从 JWT 中提取 `sub` 作为 `user:xxx` 身份 +- 不配置 OIDC 时,行为与现有版本完全一致(浏览器 UUID) +- `get_sso_token()` 可以返回当前用户的 access token + +**依赖**:`pip install PyJWT cryptography`,`npm install oidc-client-ts` + +### Phase 2:插件框架 + Superset 插件 + +**目标**:建立插件框架,将 0.6 版本 Superset 集成迁移为第一个插件。 + +**交付物**: +- `plugins/__init__.py` — 插件注册中心 +- `plugins/base.py` — DataSourcePlugin 基类 +- `plugins/data_writer.py` — PluginDataWriter +- `plugins/superset/` — 完整的 Superset 插件 +- `src/plugins/` — 前端插件框架 +- `app.py` 修改 — 调用 `discover_and_register()` +- `UnifiedDataUploadDialog.tsx` 修改 — PluginHost + +**验证标准**: +- 配置 `PLG_SUPERSET_URL` 后,前端自动显示 Superset Tab +- 用户可以登录 Superset、浏览数据集、加载数据 +- SSO 模式下,用户无需再次输入 Superset 密码 +- 不配置 `PLG_SUPERSET_URL` 时,无任何影响 + +### Phase 3:凭证保险箱 + +**目标**:服务端加密凭证存储,替代浏览器 IndexedDB 的不安全存储。 + +**交付物**: +- `credential_vault/` — Vault 接口 + 本地加密实现 +- `credential_routes.py` — 凭证管理 API +- `src/plugins/CredentialManager.tsx` — 凭证管理 UI +- 插件认证路由增强 — 自动从 Vault 取凭证 + +**验证标准**: +- 凭证加密存储在服务端 SQLite +- SSO 用户换设备后,已存凭证自动可用 +- Vault 未配置时,回退到 Session 级别存储(现有行为) + +### Phase 4:第二个插件 (Metabase) + +**目标**:验证插件框架的通用性 —— 新增插件是否真的不需要修改核心代码。 + +**交付物**: +- `plugins/metabase/` — 完整的 Metabase 插件 + +**验证标准**: +- 仅新增 `plugins/metabase/` 目录和 `src/plugins/metabase/` 目录 +- **核心代码零修改**(目录自动扫描 + `import.meta.glob` 自动发现) + +### Phase 5:完善 + +- DataLoader 凭证接入 Vault(可选记住密码) +- 插件国际化 (i18n) +- 插件错误边界和降级处理 +- 管理员配置 UI +- 审计日志(谁在什么时候访问了哪些数据) +- 单元测试和集成测试 + +--- + +## 12. 安全模型 + +### 12.1 认证链路安全 + +``` +前端 OIDC PKCE 流程 (无 client secret 暴露) + │ + ▼ +IdP 签发 access_token (RS256 签名) + │ + ▼ +前端在 Authorization: Bearer 头中携带 + │ + ▼ +后端 OIDCProvider 用 JWKS 公钥验签 + ├─ 验证签名 (RS256) + ├─ 验证 issuer (防止跨 IdP 攻击) + ├─ 验证 audience (防止 token 被其他应用滥用) + ├─ 验证 exp (防止过期 token) + └─ 提取 sub → user:xxx +``` + +### 12.2 凭证存储安全 + +| 层次 | 措施 | +|------|------| +| 传输 | HTTPS (生产环境必须) | +| 存储加密 | Fernet 对称加密 (AES-128-CBC + HMAC-SHA256) | +| 密钥管理 | `CREDENTIAL_VAULT_KEY` 环境变量,不存在代码中 | +| 访问隔离 | 凭证按 `(user_identity, source_key)` 隔离,用户只能访问自己的 | +| 前端不触碰 | 凭证仅在服务端存取,前端只知道"有没有已存凭证",不知道内容 | + +### 12.3 插件隔离安全 + +| 风险 | 缓解 | +|------|------| +| 插件 A 访问插件 B 的 Session | Session key 按 `plugin_{id}_` 前缀隔离 | +| 插件窃取 SSO token | `get_sso_token()` 是只读的,插件不能修改;且 token 本来就是要透传的 | +| 恶意插件注册危险路由 | 插件 Blueprint 强制 prefix `/api/plugins//`,无法覆盖核心路由 | +| SSRF (前端输入任意 URL) | 插件端点 URL 在 `.env` 中由管理员配置,不接受前端输入 | + +### 12.4 匿名模式的安全限制 + +当无 SSO 时(`browser:` 身份),系统不对安全性做强保证: + +- 同一浏览器的所有 Tab 共享同一 `browser:xxx` 身份 +- 清除 localStorage 即可获得新身份 +- Credential Vault 中按 `browser:xxx` 存储的凭证仅在同一浏览器可用 +- 这与现有行为一致,且与"个人本地工具"的定位匹配 + +--- + +## 13. FAQ + +### Q1: 为什么不自建用户注册/登录系统? + +密码存储、哈希、重置、邮件验证、安全审计 —— 这些都是沉重的安全负担。Data Formulator 的核心价值是数据可视化,不是身份管理。OIDC 把这些责任交给专业的 IdP (Keycloak 一个 Docker 容器就能跑),更安全、维护成本更低。 + +### Q2: 小团队不想搭建 IdP 怎么办? + +有几个极轻量的选择: +- **Keycloak**: `docker run -p 8080:8080 quay.io/keycloak/keycloak start-dev` +- **Authelia**: 支持 OIDC 的轻量级认证网关 +- **Authentik**: 现代化的开源 IdP +- 或者直接使用 SaaS: Auth0 免费版支持 7000 月活用户 + +不搭 IdP 也没关系 —— 不配置 `OIDC_ISSUER_URL`,系统自动回退到匿名浏览器模式,与现在完全一致。 + +### Q3: 如果外部 BI 系统既没接 SSO,也不想让用户输密码? + +插件支持多种认证模式,包括 **API Key**。例如 Superset 和 Metabase 都支持生成 API Token: +- 管理员在 BI 系统中为每个用户生成 long-lived API token +- 用户在 DF 中输入一次 API token,存入 Credential Vault +- 后续自动使用 + +### Q4: 如果某个外部系统的 SDK 没有 Python 包怎么办? + +所有 BI 系统都有 REST API,插件通过 `requests` 库调用即可。不需要专用 SDK。如果某个系统需要特殊的 Python 包,在 `__init__.py` 中 `import` 即可——导入失败时插件自动扫描机制会将其标记为 `DISABLED_PLUGINS`(与 `ExternalDataLoader` 的降级机制一致)。 + +### Q5: 如何开发一个新的数据源插件? + +**最小步骤**: + +1. 创建 `py-src/data_formulator/plugins/your_system/` 目录 +2. 实现 `DataSourcePlugin` 子类 (manifest + blueprint + frontend_config) +3. 在 blueprint 中实现 `auth/login`, `catalog/list`, `data/load` 三组路由 +4. 创建 `src/plugins/your_system/` 目录,导出 `index.ts` +5. 实现 `PanelComponent` (列表浏览 + 加载按钮) +6. 在 `.env` 中设置环境变量启用 +7. 重启服务 → 后端自动扫描发现,前端 `import.meta.glob` 自动编译 + +**核心代码改动:0 行。** 无需修改任何注册表或配置文件。 + +### Q6: 现有的 ExternalDataLoader (数据库连接器) 会被废弃吗? + +不会。数据库连接器的需求(连接参数 → 列表 → 拉取)与插件不同,`ExternalDataLoader` 的通用表单 UI 完全够用。两套机制长期并行。如果未来需要给某个数据库加专用 UI,可以考虑包装为 Plugin,但不是必须的。 + +### Q7: 多个 IdP 可以同时配置吗? + +当前设计每次只有一个 OIDC issuer。如果需要支持多个 IdP(如同时支持 Google 和 Okta),有两种路径: +- **推荐**:用一个 IdP (如 Keycloak) 作为联合身份代理,配置多个上游 IdP +- **扩展**:修改 `OIDCProvider` 支持多 issuer(需在 `_providers` 中注册多个实例) + +### Q8: 这套架构对上游 Data Formulator 的兼容性如何? + +`auth.py` 的重构是最大的改动,但保持了 `get_identity_id()` 的签名和返回值格式(`user:xxx` / `browser:xxx`)不变。所有调用 `get_identity_id()` 的代码无需修改。插件系统和凭证保险箱是纯新增代码,不修改任何现有模块。 + +--- + +## 附录 A:开发新插件的检查清单 + +``` +□ 后端 + □ plugins/your_system/__init__.py — 暴露 plugin_class = YourPlugin + □ plugins/your_system/plugin.py — 实现 DataSourcePlugin (manifest + blueprint) + □ plugins/your_system/routes/ — auth, catalog, data 三组路由 + □ plugins/your_system/ — API client, auth bridge 等 + □ 无需修改 plugins/__init__.py(目录自动扫描) + □ 测试:启用/禁用切换正常 + +□ 前端 + □ src/plugins/your_system/index.ts — 导出 DataSourcePluginModule + □ src/plugins/your_system/*Panel.tsx — 主面板组件 + □ src/plugins/your_system/*Login.tsx — 登录组件 (如需要) + □ 无需修改 registry.ts(import.meta.glob 自动发现) + □ 测试:Tab 显示/隐藏正常 + +□ 配置 + □ .env.template 增加环境变量说明 + □ 文档更新 + +□ 认证模式 + □ SSO 透传测试 (如果外部系统支持) + □ Credential Vault 存取测试 + □ 手动登录测试 + □ Session 过期 / Token 刷新测试 +``` + +## 附录 B:关键依赖 + +| 包 | 用途 | 安装 | +|-----|------|------| +| `PyJWT` | OIDC JWT 验签 | `pip install PyJWT` | +| `cryptography` | Fernet 加密 (Vault + JWT) | `pip install cryptography` | +| `oidc-client-ts` | 前端 OIDC PKCE 流程 | `npm install oidc-client-ts` | +| `requests` | 插件 HTTP 调用 (已有) | — | + +## 附录 C:与原有插件架构文档的关系 + +本文档是 `data-source-plugin-architecture.md` 的**上层补充**。原文档详细描述了: +- 插件基类和前端接口的完整设计 +- Superset 0.6→0.7 迁移的具体方案 +- PluginDataWriter 和 BatchWriter 的完整实现 +- 身份链接表 (IdentityStore) 的详细设计 + +本文档新增的内容: +- **AuthProvider 可插拔认证层** — 原文档假设浏览器 UUID 为主要身份,本文档用 OIDC 替代 +- **CredentialVault 凭证保险箱** — 原文档中插件凭证存在 Flask Session,本文档增加持久化加密存储 +- **SSO Token 透传** — 原文档中每个插件独立认证,本文档增加 SSO 自动透传 +- **身份模型简化** — 有了 SSO,原文档中复杂的 IdentityStore + 身份合并被简化为一次性 browser→user 迁移 + +两份文档互补使用:本文档定义整体架构和集成方式,原文档提供插件内部的详细实现指导。 diff --git a/design-docs/2-external-dataloader-enhancements.md b/design-docs/2-external-dataloader-enhancements.md new file mode 100644 index 00000000..9ace229a --- /dev/null +++ b/design-docs/2-external-dataloader-enhancements.md @@ -0,0 +1,787 @@ +# ExternalDataLoader 演进方案 + +> **来源**:从 `1-data-source-plugin-architecture.md` Section 11.3~11.6 拆分。 +> 这些改进针对现有的 ExternalDataLoader(数据库连接器),与 DataSourcePlugin(BI 系统插件)互不干扰。 + +--- + +## 目录 + +1. [现有缺陷与演进方向](#1-现有缺陷与演进方向) +2. [改进方案一:数据库元数据拉取 (P0)](#2-改进方案一数据库元数据拉取-p0) +3. [改进方案二:SSO Token 透传到数据库 (P1)](#3-改进方案二sso-token-透传到数据库-p1) +4. [改进方案三:凭证持久化 (P2)](#4-改进方案三凭证持久化-p2) + +--- + +## 1. 现有缺陷与演进方向 + +审查了全部 9 个 DataLoader 后,发现两大类可改进的问题: + +### 缺陷一:数据库元数据(注释/描述)未拉取 + +所有 DataLoader 的 `list_tables()` 只查了 `information_schema` 的列名和数据类型,**完全忽略了数据库自带的注释系统**。这些注释是 DBA 维护的宝贵业务知识: + +| DataLoader | 只查了 | 数据库有但没查的 | 查询方法 | +|---|---|---|---| +| **PostgreSQL** | `column_name`, `data_type` | 表/列注释 (`COMMENT ON`) | `SELECT obj_description(oid) FROM pg_class` + `SELECT col_description(attrelid, attnum) FROM pg_attribute` | +| **MSSQL** | `COLUMN_NAME`, `DATA_TYPE`, `IS_NULLABLE` | 扩展属性 (`MS_Description`) | `SELECT value FROM sys.extended_properties WHERE name='MS_Description'` | +| **BigQuery** | `field.name`, `field.field_type` | `table.description`, `field.description` | `table_ref.description`, `field.description`(已有 API,一行代码) | +| **MySQL** | (预估同样缺失) | `COLUMN_COMMENT` | `SELECT COLUMN_COMMENT FROM information_schema.COLUMNS` | +| **Kusto** | `Name`, `Type` | 表和列的 DocString | `.show table T schema` 返回的 `DocString` 字段 | + +**改进方案**:扩展 `list_tables()` 返回的 `columns` 结构,增加 `description` 字段。以 PostgreSQL 为例: + +```python +# postgresql_data_loader.py — list_tables() 增加注释查询 + +# 现有查询只拿了列名和类型: +columns_query = """ + SELECT column_name, data_type + FROM information_schema.columns ... +""" + +# 改进后同时查询列注释: +columns_query = """ + SELECT + c.column_name, + c.data_type, + pgd.description AS column_comment + FROM information_schema.columns c + LEFT JOIN pg_catalog.pg_statio_all_tables st + ON st.schemaname = c.table_schema AND st.relname = c.table_name + LEFT JOIN pg_catalog.pg_description pgd + ON pgd.objoid = st.relid AND pgd.objsubid = c.ordinal_position + WHERE c.table_schema = '{schema}' AND c.table_name = '{table_name}' + ORDER BY c.ordinal_position +""" + +# 表注释也一并查询: +table_comment_query = """ + SELECT obj_description(c.oid) AS table_comment + FROM pg_catalog.pg_class c + JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace + WHERE n.nspname = '{schema}' AND c.relname = '{table_name}' +""" +``` + +BigQuery 改进更简单(已有现成属性,只是没用): + +```python +# bigquery_data_loader.py — list_tables() 中已经有 table_ref,只是没取 description + +table_ref = self.client.get_table(table.reference) + +# 现有代码: +columns = [{"name": field.name, "type": field.field_type} for field in table_ref.schema[:10]] + +# 改进后: +columns = [{ + "name": field.name, + "type": field.field_type, + "description": field.description, # ← 一行代码,BigQuery SDK 直接支持 +} for field in table_ref.schema[:10]] + +# 表描述也直接有: +table_description = table_ref.description # ← 同样一行 +``` + +**改进成本**:每个 DataLoader 改 5-20 行代码即可。注释不存在的列/表返回 `None`,与 `ColumnInfo` 扩展字段完美对齐——对前端和 AI prompt 的提升效果与插件拉取的元数据一致。 + +### 缺陷二:认证方式单一,缺少 SSO/集成认证 + +现有 DataLoader 的认证能力参差不齐,很多数据库明明支持 SSO 或集成认证,但 DataLoader 只实现了用户名/密码模式: + +| DataLoader | 现有认证 | 数据库支持但 DataLoader 未实现的 | +|---|---|---| +| **PostgreSQL** | user + password | Kerberos/GSSAPI; Azure AD token (`password=`); AWS IAM token | +| **MSSQL** | user/password 或 Windows Auth | Azure AD token (`Authentication=ActiveDirectoryAccessToken`) | +| **BigQuery** | Service Account JSON 或 ADC | OIDC 联邦身份 (`google.auth.identity_pool`); Workforce Identity | +| **Kusto** | App Key 或 `az login` | Azure AD user token (`with_aad_user_token_authentication`) | +| **Snowflake** | (未实现) | OAuth 2.0 token (`authenticator='oauth'`, `token=`) | +| **Databricks** | (未实现) | Azure AD token / PAT | + +Kusto 和 BigQuery 已经有了 CLI 认证(`az login` / `gcloud auth`),但这要求用户**在服务器终端上手动执行命令**——在团队部署模式下不现实。 + +**改进方案**:在 `ExternalDataLoader` 基类中增加 SSO token 注入能力。 + +```python +class ExternalDataLoader(ABC): + + @staticmethod + def supported_auth_methods() -> list[str]: + """返回支持的认证方式列表。 + + 可选值: + - "credentials" — 用户名/密码(默认,所有 loader 都支持) + - "sso_token" — OIDC/OAuth access_token + - "azure_ad_token" — Azure AD access_token + - "iam_token" — AWS IAM 认证 token + - "service_account" — 服务账号 JSON key + - "cli" — 本地 CLI 认证(az login / gcloud auth) + """ + return ["credentials"] + + def set_auth_token(self, token: str, token_type: str = "bearer") -> None: + """注入来自 DF SSO 层的认证 token(可选实现)。""" + raise NotImplementedError( + f"{self.__class__.__name__} does not support token injection" + ) +``` + +### 缺陷三:凭证不持久化 + +当前 DataLoader 的连接参数(包括密码)存在**前端 Redux Store** 中,刷新页面即丢失。用户每次打开都要重新输入。接入 CredentialVault(`sso-plugin-architecture.md` 中设计)后可以提供"记住密码"能力。 + +### 综合改进路线图 + +| 改进项 | 复杂度 | 价值 | 优先级 | 前置依赖 | +|--------|:---:|:---:|:---:|------| +| **数据库注释拉取** | 低(5-20 行/loader) | 高(直接提升 AI 分析质量) | P0 | 无(现在可做) | +| **SSO Token 透传** | 中 | 高(团队部署必需) | P1 | SSO AuthProvider 上线 | +| **凭证持久化** | 中 | 中(用户体验提升) | P2 | CredentialVault 上线 | +| **升级为 Plugin** | 高 | 低 | P3 | 仅 Snowflake 等需要 | + +--- + +## 2. 改进方案一:数据库元数据拉取 (P0) + +**目标**:让 DataLoader 在 `list_tables()` 和 `ingest_to_workspace()` 时一并拉取数据库的表/列注释,写入 `ColumnInfo` 和 `TableMetadata`,最终流入前端 AI prompt。 + +**不修改基类接口**,仅在各 DataLoader 内部实现中增强查询逻辑。对前端无感,通过现有 `list-tables` API 自然下发。 + +### 2.1 基类增加可选方法 + +```python +# external_data_loader.py — 新增可选方法 + +class ExternalDataLoader(ABC): + # ... 现有方法不变 ... + + def fetch_table_description(self, source_table: str) -> str | None: + """获取表的描述/注释(可选实现)。""" + return None + + def fetch_column_descriptions(self, source_table: str) -> dict[str, str]: + """获取各列的描述/注释(可选实现)。 + + Returns: + {"column_name": "列描述", ...},缺失注释的列不含在结果中。 + """ + return {} +``` + +这两个方法**不是 abstract 的**——默认返回空,对现有 loader 零影响。哪个 loader 想支持元数据就 override 即可。 + +### 2.2 各 DataLoader 的具体实现 + +**PostgreSQL**: + +```python +# postgresql_data_loader.py — 新增方法 + +def fetch_table_description(self, source_table: str) -> str | None: + schema, table = self._parse_table_name(source_table) + query = f""" + SELECT obj_description(c.oid) AS comment + FROM pg_catalog.pg_class c + JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace + WHERE n.nspname = '{schema}' AND c.relname = '{table}' + """ + result = self._read_sql(query).to_pandas() + if len(result) > 0 and result.iloc[0]['comment']: + return str(result.iloc[0]['comment']) + return None + +def fetch_column_descriptions(self, source_table: str) -> dict[str, str]: + schema, table = self._parse_table_name(source_table) + query = f""" + SELECT + a.attname AS column_name, + d.description AS comment + FROM pg_catalog.pg_attribute a + JOIN pg_catalog.pg_class c ON c.oid = a.attrelid + JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace + LEFT JOIN pg_catalog.pg_description d + ON d.objoid = a.attrelid AND d.objsubid = a.attnum + WHERE n.nspname = '{schema}' + AND c.relname = '{table}' + AND a.attnum > 0 + AND NOT a.attisdropped + AND d.description IS NOT NULL + """ + result = self._read_sql(query).to_pandas() + return {row['column_name']: row['comment'] for _, row in result.iterrows()} +``` + +**MSSQL**: + +```python +# mssql_data_loader.py — 新增方法 + +def fetch_table_description(self, source_table: str) -> str | None: + schema, table = self._parse_table_name(source_table) + query = f""" + SELECT CAST(ep.value AS NVARCHAR(MAX)) AS comment + FROM sys.extended_properties ep + JOIN sys.tables t ON ep.major_id = t.object_id + JOIN sys.schemas s ON t.schema_id = s.schema_id + WHERE s.name = '{schema}' AND t.name = '{table}' + AND ep.minor_id = 0 AND ep.name = 'MS_Description' + """ + result = self._execute_query(query).to_pandas() + if len(result) > 0 and result.iloc[0]['comment']: + return str(result.iloc[0]['comment']) + return None + +def fetch_column_descriptions(self, source_table: str) -> dict[str, str]: + schema, table = self._parse_table_name(source_table) + query = f""" + SELECT c.name AS column_name, + CAST(ep.value AS NVARCHAR(MAX)) AS comment + FROM sys.columns c + JOIN sys.tables t ON c.object_id = t.object_id + JOIN sys.schemas s ON t.schema_id = s.schema_id + LEFT JOIN sys.extended_properties ep + ON ep.major_id = c.object_id + AND ep.minor_id = c.column_id + AND ep.name = 'MS_Description' + WHERE s.name = '{schema}' AND t.name = '{table}' + AND ep.value IS NOT NULL + """ + result = self._execute_query(query).to_pandas() + return {row['column_name']: row['comment'] for _, row in result.iterrows()} +``` + +**BigQuery**(最简单——SDK 已有属性,只需取出): + +```python +# bigquery_data_loader.py — 新增方法 + +def fetch_table_description(self, source_table: str) -> str | None: + table_ref = self.client.get_table(source_table) + return table_ref.description or None + +def fetch_column_descriptions(self, source_table: str) -> dict[str, str]: + table_ref = self.client.get_table(source_table) + return { + field.name: field.description + for field in table_ref.schema + if field.description + } +``` + +**MySQL**: + +```python +# mysql_data_loader.py — 新增方法 + +def fetch_table_description(self, source_table: str) -> str | None: + schema, table = self._parse_table_name(source_table) + query = f""" + SELECT TABLE_COMMENT + FROM information_schema.TABLES + WHERE TABLE_SCHEMA = '{schema}' AND TABLE_NAME = '{table}' + """ + result = self._read_sql(query).to_pandas() + comment = result.iloc[0]['TABLE_COMMENT'] if len(result) > 0 else None + return comment if comment and comment.strip() else None + +def fetch_column_descriptions(self, source_table: str) -> dict[str, str]: + schema, table = self._parse_table_name(source_table) + query = f""" + SELECT COLUMN_NAME, COLUMN_COMMENT + FROM information_schema.COLUMNS + WHERE TABLE_SCHEMA = '{schema}' AND TABLE_NAME = '{table}' + AND COLUMN_COMMENT IS NOT NULL AND COLUMN_COMMENT != '' + """ + result = self._read_sql(query).to_pandas() + return {row['COLUMN_NAME']: row['COLUMN_COMMENT'] for _, row in result.iterrows()} +``` + +### 2.3 元数据如何写入 Workspace + +在 `ingest_to_workspace()` 中调用这两个方法,将注释写入 `ColumnInfo` 和 `TableMetadata`: + +```python +# external_data_loader.py — ingest_to_workspace 增强 + +def ingest_to_workspace(self, workspace, table_name, source_table, size=1000000, + sort_columns=None, sort_order='asc'): + arrow_table = self.fetch_data_as_arrow(source_table, size, sort_columns, sort_order) + + loader_metadata = { + "loader_type": self.__class__.__name__, + "loader_params": self.get_safe_params(), + "source_table": source_table, + } + + table_metadata = workspace.write_parquet_from_arrow( + table=arrow_table, table_name=table_name, loader_metadata=loader_metadata, + ) + + # ---- 新增:拉取并写入元数据 ---- + try: + table_desc = self.fetch_table_description(source_table) + col_descs = self.fetch_column_descriptions(source_table) + + if table_desc or col_descs: + from data_formulator.datalake.metadata import ColumnInfo, update_metadata + + def _enrich(meta): + tbl = meta.get_table(table_name) + if tbl is None: + return + if table_desc: + tbl.description = table_desc + if col_descs and tbl.columns: + for col in tbl.columns: + desc = col_descs.get(col.name) + if desc: + col.description = desc + + update_metadata(workspace._workspace_path, _enrich) + logger.info("Enriched metadata for '%s': table_desc=%s, col_descs=%d", + table_name, bool(table_desc), len(col_descs)) + except Exception as e: + logger.warning("Failed to fetch metadata for '%s': %s (data is still saved)", + source_table, e) + # ---- 元数据增强结束,失败不影响数据写入 ---- + + return table_metadata +``` + +**关键设计决策**:元数据拉取在数据写入**之后**,用 try/except 包裹。即使元数据查询失败(权限不足、数据库不支持等),数据本身已经安全写入 workspace。 + +### 2.4 `list_tables()` 返回值增强 + +同时在 `list_tables()` 中也返回注释信息,让前端在浏览数据库表时就能看到描述: + +```python +# postgresql_data_loader.py — list_tables 增强 + +def _list_tables(self, table_filter=None): + # ... 现有的 tables 查询 ... + + for _, row in tables_df.iterrows(): + schema = row['schemaname'] + table_name = row['tablename'] + full_table_name = f"{schema}.{table_name}" + + # 列信息(现有)+ 列注释(新增) + columns_query = f""" + SELECT + c.column_name, + c.data_type, + pgd.description AS column_comment + FROM information_schema.columns c + LEFT JOIN pg_catalog.pg_statio_all_tables st + ON st.schemaname = c.table_schema AND st.relname = c.table_name + LEFT JOIN pg_catalog.pg_description pgd + ON pgd.objoid = st.relid AND pgd.objsubid = c.ordinal_position + WHERE c.table_schema = '{schema}' AND c.table_name = '{table_name}' + ORDER BY c.ordinal_position + """ + columns_df = self._read_sql(columns_query).to_pandas() + columns = [{ + 'name': r['column_name'], + 'type': r['data_type'], + 'description': r['column_comment'] or None, # ← 新增 + } for _, r in columns_df.iterrows()] + + # 表注释(新增) + table_comment = self.fetch_table_description(full_table_name) + + table_metadata = { + "row_count": int(row_count), + "columns": columns, + "sample_rows": sample_rows, + "description": table_comment, # ← 新增 + } + results.append({"name": full_table_name, "metadata": table_metadata}) +``` + +### 2.5 前端展示(自然兼容) + +现有前端 `DBManagerPane` 已经渲染了 `columns` 列表。只需小幅调整,当 `column.description` 存在时显示 tooltip: + +``` +┌─────────────────────────────────────────────────┐ +│ public.orders — 订单主表,记录所有用户订单 │ ← table description +│ │ +│ 列名 类型 描述 │ +│ ────── ──── ──── │ +│ id integer 订单唯一标识 │ +│ customer_id integer 关联客户表 (FK) │ +│ created_at timestamp 订单创建时间(UTC) │ +│ total_amount numeric 订单总金额(含税) │ ← column descriptions +│ status varchar pending/paid/shipped │ +│ │ +│ 行数: 1,234,567 [ 加载 ▾ ] │ +└─────────────────────────────────────────────────┘ +``` + +### 2.6 改动文件清单 + +| 文件 | 改动 | 行数估计 | +|------|------|:---:| +| `external_data_loader.py` | 新增 `fetch_table_description()`、`fetch_column_descriptions()` 默认实现;`ingest_to_workspace()` 增加元数据写入 | ~30 行 | +| `postgresql_data_loader.py` | 实现两个描述方法 + `list_tables()` 查询增强 | ~25 行 | +| `mssql_data_loader.py` | 实现两个描述方法 + `list_tables()` 查询增强 | ~30 行 | +| `bigquery_data_loader.py` | 实现两个描述方法(各 3 行)+ `list_tables()` 取 description | ~10 行 | +| `mysql_data_loader.py` | 实现两个描述方法 + `list_tables()` 取 `COLUMN_COMMENT` | ~20 行 | +| `kusto_data_loader.py` | 实现两个描述方法(从 schema JSON 取 DocString) | ~15 行 | +| `metadata.py` | `ColumnInfo` 增加 `description` 字段;`TableMetadata` 增加 `description` 字段 | ~15 行 | +| **前端 `DBManagerPane`** | columns 列表显示 description tooltip | ~10 行 | +| **总计** | | **~155 行** | + +--- + +## 3. 改进方案二:SSO Token 透传到数据库 (P1) + +**目标**:当 DF 用户通过 OIDC SSO 登录后,如果目标数据库也信任同一 IdP,DataLoader 自动使用 SSO token 连接数据库,用户无需输入数据库密码。 + +**前置条件**:SSO AuthProvider 链已上线(`sso-plugin-architecture.md` Phase 1)。 + +### 3.1 基类增加认证能力声明 + +```python +# external_data_loader.py — 新增 + +class ExternalDataLoader(ABC): + # ... 现有方法 ... + + @staticmethod + def supported_auth_methods() -> list[dict]: + """声明该 loader 支持哪些认证方式。 + + 框架据此在前端渲染不同的认证 UI(密码表单 vs SSO 按钮等)。 + """ + return [ + {"method": "credentials", "label": "Username & Password", "default": True}, + ] +``` + +各 DataLoader 按实际能力 override: + +```python +# mssql_data_loader.py +@staticmethod +def supported_auth_methods(): + return [ + {"method": "credentials", "label": "SQL Server Authentication"}, + {"method": "windows_auth", "label": "Windows Integrated Authentication"}, + {"method": "azure_ad_token", "label": "Azure AD (SSO)", + "requires_sso": True, "token_audience": "https://database.windows.net/"}, + ] + +# postgresql_data_loader.py +@staticmethod +def supported_auth_methods(): + return [ + {"method": "credentials", "label": "Username & Password", "default": True}, + {"method": "azure_ad_token", "label": "Azure AD (SSO)", + "requires_sso": True, "token_audience": "https://ossrdbms-aad.database.windows.net"}, + {"method": "aws_iam_token", "label": "AWS IAM", + "requires_env": ["AWS_REGION"]}, + ] + +# bigquery_data_loader.py +@staticmethod +def supported_auth_methods(): + return [ + {"method": "service_account", "label": "Service Account JSON", "default": True}, + {"method": "cli", "label": "gcloud CLI (local dev)"}, + {"method": "oidc_federation", "label": "OIDC Federation (SSO)", + "requires_sso": True}, + ] +``` + +### 3.2 Token 注入流程 + +``` +用户通过 OIDC SSO 登录 DF + → 获得 access_token(存在 AuthResult 中) + → 用户打开数据库连接面板 + +前端渲染: + GET /api/data-loader/postgresql/auth-methods + → 返回 supported_auth_methods() + → 发现有 "azure_ad_token",且 requires_sso=true + → DF 当前有 SSO 登录 → 显示「使用 SSO 连接」按钮 + +用户点击「使用 SSO 连接」: + → 前端带上 auth_method: "azure_ad_token" + → 后端从 auth.get_sso_token() 拿到 access_token + → 如果 token_audience 不同,用 token exchange 获取目标 audience 的 token + → 将 token 注入 DataLoader 的 params + → DataLoader 用 token 连接数据库 +``` + +### 3.3 各数据库的 Token 认证实现 + +**Azure SQL / MSSQL**: + +```python +def __init__(self, params): + auth_method = params.get("auth_method", "credentials") + + if auth_method == "azure_ad_token": + token = params["access_token"] + conn_str = f"DRIVER={{{self.driver}}};SERVER={self.server},{self.port};DATABASE={self.database};" + + SQL_COPT_SS_ACCESS_TOKEN = 1256 + token_struct = struct.pack( + f' m.requires_sso); +const isSsoLoggedIn = !!serverConfig.auth_user; + +{authMethods.length > 1 && ( + + {authMethods.map(m => ( + } + /> + ))} + +)} + +{selectedAuthMethod === "credentials" && ( + <> + + + +)} +{selectedAuthMethod === "azure_ad_token" && ( + + {t('db.ssoConnectInfo', { user: serverConfig.auth_user })} + +)} +``` + +### 3.5 改动文件清单 + +| 文件 | 改动 | 行数估计 | +|------|------|:---:| +| `external_data_loader.py` | 新增 `supported_auth_methods()` 默认实现 | ~10 行 | +| `mssql_data_loader.py` | override `supported_auth_methods()`;`__init__` 增加 `azure_ad_token` 分支 | ~25 行 | +| `postgresql_data_loader.py` | 同上 | ~20 行 | +| `bigquery_data_loader.py` | 同上(OIDC federation) | ~20 行 | +| `kusto_data_loader.py` | 同上(`with_aad_user_token_authentication`) | ~15 行 | +| `tables_routes.py` | 新增 `/api/data-loader/{type}/auth-methods` 路由 | ~15 行 | +| **前端** `DBManagerPane.tsx` | 动态渲染认证方式选择 UI | ~40 行 | +| **总计** | | **~145 行** | + +--- + +## 4. 改进方案三:凭证持久化 (P2) + +**目标**:用户连接数据库后,可以选择"记住连接",凭证加密存入 CredentialVault,下次打开 DF 无需重新输入。 + +**前置条件**:CredentialVault 已上线(`sso-plugin-architecture.md` Layer 3)。 + +### 4.1 用户体验流程 + +``` +首次连接: + 用户填写 host/port/user/password → 连接成功 + → 弹出「保存此连接?」提示 + → 用户确认 → 凭证加密存入 CredentialVault + 键: credential:dataloader:postgresql:{user_id}:{host}:{database} + 值: AES 加密的 {"user": "...", "password": "...", "host": "...", ...} + +再次打开 DF: + → 前端请求 GET /api/data-loader/saved-connections + → 返回已保存的连接列表(不含明文密码,只有名称和类型) + → 用户点击已保存的连接 → 一键连接 + → 后端从 CredentialVault 解密取出凭证 → 创建 DataLoader + +管理: + → 用户可以查看、删除已保存的连接 + → 删除操作同时清除 CredentialVault 中的加密凭证 +``` + +### 4.2 后端 API + +```python +# tables_routes.py — 新增路由 + +@tables_bp.route('/data-loader/saved-connections', methods=['GET']) +def list_saved_connections(): + """列出当前用户保存的数据库连接(不含明文密码)。""" + user_id = get_identity_id() + vault = get_credential_vault() + connections = vault.list_credentials(user_id, prefix="dataloader:") + return jsonify([{ + "id": conn.credential_id, + "loader_type": conn.metadata.get("loader_type"), + "display_name": conn.metadata.get("display_name"), + "host": conn.metadata.get("host"), + "database": conn.metadata.get("database"), + "saved_at": conn.metadata.get("saved_at"), + } for conn in connections]) + +@tables_bp.route('/data-loader/saved-connections', methods=['POST']) +def save_connection(): + """保存一个数据库连接(凭证加密存储)。""" + data = request.get_json() + user_id = get_identity_id() + vault = get_credential_vault() + + loader_type = data["loader_type"] + params = data["params"] + display_name = data.get("display_name", f"{loader_type}:{params.get('host','')}/{params.get('database','')}") + + credential_id = f"dataloader:{loader_type}:{params.get('host','')}:{params.get('database','')}" + + vault.store_credential( + user_id=user_id, + credential_id=credential_id, + secret_data=params, + metadata={ + "loader_type": loader_type, + "display_name": display_name, + "host": params.get("host"), + "database": params.get("database"), + "saved_at": datetime.now(timezone.utc).isoformat(), + }, + ) + return jsonify({"status": "ok", "credential_id": credential_id}) + +@tables_bp.route('/data-loader/saved-connections//connect', methods=['POST']) +def connect_saved(): + """使用已保存的凭证连接数据库。""" + user_id = get_identity_id() + vault = get_credential_vault() + + cred = vault.get_credential(user_id, credential_id) + if cred is None: + return jsonify({"status": "error", "message": "Connection not found"}), 404 + + loader_type = cred.metadata["loader_type"] + params = cred.secret_data # 自动解密 + + loader_cls = DATA_LOADERS.get(loader_type) + loader = loader_cls(params) + # ... 后续逻辑与手动连接相同 ... +``` + +### 4.3 前端 UI + +``` +┌─────────────────────────────────────────────────────┐ +│ 数据库连接 │ +│ │ +│ ┌─── 已保存的连接 ─────────────────────────────────┐ │ +│ │ 🔗 生产 PostgreSQL (pg.company.com/analytics) │ │ +│ │ 上次连接: 2025-03-20 [ 连接 ] [ 🗑 删除 ] │ │ +│ │ │ │ +│ │ 🔗 测试 MSSQL (sql-test/reporting) │ │ +│ │ 上次连接: 2025-03-18 [ 连接 ] [ 🗑 删除 ] │ │ +│ └───────────────────────────────────────────────────┘ │ +│ │ +│ ┌─── 新建连接 ─────────────────────────────────────┐ │ +│ │ 类型: [PostgreSQL ▾] │ │ +│ │ 认证: ● 用户名密码 ○ SSO (Azure AD) │ │ +│ │ Host: [________________] │ │ +│ │ ... │ │ +│ │ ☑ 记住此连接 │ │ +│ │ │ │ +│ │ [ 连接 ] │ │ +│ └───────────────────────────────────────────────────┘ │ +└─────────────────────────────────────────────────────┘ +``` + +### 4.4 安全性 + +| 措施 | 说明 | +|------|------| +| **加密存储** | 密码等敏感字段通过 CredentialVault 使用 Fernet (AES-128-CBC) 加密 | +| **per-user 隔离** | 每个用户只能访问自己保存的连接(通过 `user_id` 隔离) | +| **不回显密码** | `list_saved_connections` 只返回元数据,不返回明文密码 | +| **手动删除** | 用户随时可以删除已保存的连接和对应的加密凭证 | +| **SSO 优先** | 如果数据库支持 SSO 且 DF 已 SSO 登录,优先推荐 SSO(无需存密码) | + +--- + +## 关联文档 + +| 文档 | 关系 | +|------|------| +| `1-data-source-plugin-architecture.md` § 11.1~11.2 | ExternalDataLoader vs DataSourcePlugin 的分工定义 | +| `1-sso-plugin-architecture.md` | SSO AuthProvider(P1 前置)、CredentialVault(P2 前置) | diff --git a/design-docs/3-language-injection-analysis.md b/design-docs/3-language-injection-analysis.md new file mode 100644 index 00000000..dbac5628 --- /dev/null +++ b/design-docs/3-language-injection-analysis.md @@ -0,0 +1,239 @@ +# Data Formulator 多语言提示词注入分析 + +## 1. 问题概述 + +项目已接入多语言(i18n)支持,核心 Agent 提示词通过 `language_instruction` 参数注入语言指令。但仍有个别 LLM 调用点遗漏了语言注入,导致部分场景下输出语言与用户界面语言不一致。 + +--- + +## 2. 现有语言注入架构 + +### 2.1 整体流程 + +``` +前端 i18n.language ──► Accept-Language header ──► get_language_instruction() + │ + ▼ + build_language_instruction() + (agent_language.py) + │ + ┌────────────┴────────────┐ + ▼ ▼ + mode="full" mode="compact" + (文本型 Agent) (代码生成 Agent) +``` + +**关键模块**: + +| 模块 | 职责 | +|------|------| +| `src/app/utils.tsx` → `getAgentLanguage()` | 从 `i18n.language` 提取语言代码 | +| `src/app/utils.tsx` → `fetchWithIdentity()` | 在每个 API 请求的 `Accept-Language` header 中注入语言代码 | +| `py-src/.../agents/agent_language.py` | `build_language_instruction(lang, mode)` — 根据语言代码和模式生成提示词片段 | +| `py-src/.../agent_routes.py` → `get_language_instruction()` | 从 `Accept-Language` header 解析语言,调用 `build_language_instruction` | + +### 2.2 已正确注入语言的 Agent + +#### `agent_data_rec.py` 和 `agent_data_transform.py` + +通过构造函数接收 `language_instruction`,注入到 system prompt 中: + +```python +if language_instruction: + marker = "**About the execution environment:**" + idx = self.system_prompt.find(marker) + if idx > 0: + self.system_prompt = ( + self.system_prompt[:idx] + + language_instruction + "\n\n" + + self.system_prompt[idx:] + ) + else: + self.system_prompt = self.system_prompt + "\n\n" + language_instruction +``` + +**注入位置策略**:在 `"**About the execution environment:**"` 标记之前插入,确保语言要求在技术细节之前被声明。如果找不到标记,则追加到末尾。 + +#### `data_agent.py` + +通过 `self.language_instruction` 实例属性,在 `_build_system_prompt()` 中注入: + +```python +def _build_system_prompt(self) -> str: + # ... 构建 prompt ... + if self.language_instruction: + prompt = prompt + "\n\n" + self.language_instruction + return prompt +``` + +#### 其他已注入的路由 + +`agent_routes.py` 中的大部分路由处理函数都已正确调用 `get_language_instruction(mode=...)` 并传入对应 Agent 构造函数。 + +### 2.3 `agent_language.py` 的两种模式 + +| 模式 | 适用场景 | 特点 | +|------|---------|------| +| `"full"` | 文本型 Agent(ChartInsight、InteractiveExplore、ReportGen 等) | 详细的逐字段规则,区分用户可见字段和内部字段 | +| `"compact"` | 代码生成 Agent(DataRec、DataTransformation、DataLoad) | 简短 3 句话指令,避免干扰模型生成代码 | + +支持 20 种语言(en、zh、ja、ko、fr、de 等),当语言为 `"en"` 时返回空字符串(无需注入)。 + +--- + +## 3. 遗漏分析 + +### 3.1 `agent_routes.py` — 工作区命名(需修复) + +```python +# L1073-1086 +messages = [ + { + "role": "system", + "content": ( + "You are a helpful assistant. Generate a very short name (3-5 words) " + "for a data analysis workspace based on the context below. " + "Return ONLY the name, no quotes, no explanation." + ), + }, + {"role": "user", "content": context_str}, +] +``` + +**问题**:工作区名称直接展示在 UI 中,应跟随用户界面语言。当前未注入语言指令,中文用户会看到英文工作区名称。 + +**影响级别**:中 — 用户可见,体验不一致。 + +### 3.2 `agent_routes.py` — 健康检查(无需修复) + +```python +# L227-230 +messages=[ + {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": "Respond 'I can hear you.' if you can hear me. Do not say anything other than 'I can hear you.'"}, +] +``` + +**分析**:这是 `/test-model` 的连通性测试,期望固定返回 `"I can hear you."`。不应注入语言指令,原因: + +- 返回内容不面向最终用户展示 +- 注入语言指令会增加无意义的 token 消耗 +- 如果 LLM 遵从语言指令返回中文,可能导致连通性判断逻辑异常 + +**影响级别**:无 — 无需修改。 + +### 3.3 `agent_utils.py` — 补充代码生成(无需修复) + +```python +# L243-247 +supp_resp = client.get_completion(messages=[ + *messages, + {"role": "assistant", "content": assistant_content}, + {"role": "user", "content": prompt}, +]) +``` + +**分析**:`messages` 列表从上游 Agent 传入,上游 Agent 在构造 system prompt 时已注入了 `language_instruction`。因此补充生成继承了上游的语言指令,无需重复注入。 + +**影响级别**:无 — 无需修改。 + +--- + +## 4. 修复方案 + +### 4.1 修复工作区命名(唯一需要修改的地方) + +复用现有的 `get_language_instruction()` 函数,使用 `mode="compact"` 模式(因为工作区名称是短文本): + +```python +# agent_routes.py — workspace_summary() + +lang_instruction = get_language_instruction(mode="compact") +lang_suffix = f"\n\n{lang_instruction}" if lang_instruction else "" + +messages = [ + { + "role": "system", + "content": ( + "You are a helpful assistant. Generate a very short name (3-5 words) " + "for a data analysis workspace based on the context below. " + "Return ONLY the name, no quotes, no explanation." + + lang_suffix + ), + }, + {"role": "user", "content": context_str}, +] +``` + +**注意**:`get_language_instruction()` 在 `lang == "en"` 时返回空字符串,所以英文用户不会受影响。 + +### 4.2 不需要新建 `MessageBuilder` 或全局拦截 + +现有架构已经提供了完整的语言注入体系: + +- `agent_language.py` 管理语言模板和生成逻辑 +- `get_language_instruction()` 从请求 header 解析语言 +- 各 Agent 构造函数接收 `language_instruction` 参数 + +**不建议**引入 `MessageBuilder` 工具类或 LLMClient 全局拦截,原因: + +| 方案 | 问题 | +|------|------| +| `MessageBuilder` + 环境变量 | 语言来源从 per-request(`Accept-Language` header)退化为 per-process(环境变量),破坏多用户场景 | +| LLMClient 全局拦截 | 隐式修改 system prompt 导致调试困难;字符串检测 `"**Language Requirement:**"` 脆弱;无法区分 full/compact 模式 | + +--- + +## 5. 防止未来遗漏的建议 + +### 5.1 开发规范 + +每次新增 LLM 调用点时,开发者应检查: + +1. 该调用的输出是否面向用户展示? +2. 如果是,是否调用了 `get_language_instruction()` 并注入到 system prompt 中? +3. 选择正确的 mode:文本型用 `"full"`,短文本/代码型用 `"compact"` + +### 5.2 测试验证 + +实施完成后验证以下场景: + +- [ ] 新建工作区时生成的名称跟随 UI 语言 +- [ ] 数据推荐 Agent 的代码注释和说明跟随 UI 语言 +- [ ] 数据转换 Agent 的解释说明跟随 UI 语言 +- [ ] 数据探索 Agent 的交互内容跟随 UI 语言 +- [ ] 英文用户不受影响(`build_language_instruction("en")` 返回 `""`) + +--- + +## 6. 相关文件 + +| 文件路径 | 说明 | +|---------|------| +| `py-src/data_formulator/agents/agent_language.py` | 语言指令构建核心模块(模板、模式、多语言支持) | +| `py-src/data_formulator/agent_routes.py` | 路由层:`get_language_instruction()` + 各端点调用 | +| `py-src/data_formulator/agents/data_agent.py` | 数据探索 Agent(已有语言注入) | +| `py-src/data_formulator/agents/agent_data_rec.py` | 数据推荐 Agent(已有语言注入) | +| `py-src/data_formulator/agents/agent_data_transform.py` | 数据转换 Agent(已有语言注入) | +| `py-src/data_formulator/agents/agent_utils.py` | 补充代码生成工具(继承上游语言指令) | +| `src/app/utils.tsx` | 前端:`getAgentLanguage()` + `fetchWithIdentity()` | +| `src/i18n/index.ts` | 前端 i18n 配置(i18next + LanguageDetector) | + +--- + +## 7. 附录:`agent_language.py` 架构说明 + +### 语言注册表 + +`LANGUAGE_DISPLAY_NAMES` 定义了 20 种语言的显示名称,用于生成提示词中的语言标识。 + +### 特定语言的额外规则 + +`LANGUAGE_EXTRA_RULES` 为特定语言提供补充说明(如中文要求使用简体中文、日文要求使用敬体)。 + +### 输出逻辑 + +- 当 `language == "en"` 时,返回空字符串(不注入任何语言指令) +- 当 `language != "en"` 时,根据 `mode` 参数返回 full 或 compact 格式的语言指令 +- full 模式详细列出哪些字段用目标语言、哪些字段保持英文 +- compact 模式仅用 3 句话说明基本规则,适合代码生成场景 diff --git a/design-docs/6-path-safety-confined-dir.md b/design-docs/6-path-safety-confined-dir.md new file mode 100644 index 00000000..fd36a23c --- /dev/null +++ b/design-docs/6-path-safety-confined-dir.md @@ -0,0 +1,607 @@ +# 服务端路径安全加固 — ConfinedDir 统一防护 + +> **来源**:CodeQL `py/path-injection` 审计 + 全量代码人工审查。 +> **目标**:将"根目录 + 用户/外部输入"拼接这一高频模式收敛到单一原语,消除散落式校验遗漏。 + +--- + +## 目录 + +1. [问题分析](#1-问题分析) +2. [现有防护盘点](#2-现有防护盘点) +3. [方案设计:ConfinedDir](#3-方案设计confineddir) +4. [改造清单](#4-改造清单) +5. [HTTP 响应头注入修复](#5-http-响应头注入修复) +6. [测试计划](#6-测试计划) +7. [实施步骤](#7-实施步骤) +8. [后续防线](#8-后续防线) + +--- + +## 1. 问题分析 + +### 1.1 攻击面 + +服务端存在"根目录 + 不可信子路径"拼接的场景: + +| 场景 | 根目录 | 子路径来源 | 攻击方式 | +|------|--------|-----------|---------| +| 用户上传文件 | workspace `data/` | HTTP `filename` | `../../etc/passwd` | +| Azure Blob 物化到本地 | `tempfile.mkdtemp()` | Blob 名称 | blob key 含 `../` 段 | +| Session zip 导入 | 临时目录 | zip 内文件路径 | Zip-Slip | +| Cache 层本地镜像 | `~/.data_formulator/cache/` | blob 相对路径 | blob key 含 `../` 段 | +| Workspace ID → 目录名 | `workspaces//` | 用户/浏览器 ID | `../../other_user` | + +### 1.2 现有代码的核心问题 + +防护**分散在调用点**,每个开发者写新代码时必须主动记住调用清洗函数。遗漏 = 漏洞。 + +**已发现的 2 处遗漏**: + +1. `azure_blob_workspace.py` — `local_dir()` (第 576–598 行)、`save_workspace_snapshot()` (第 616–626 行) + - blob 相对路径直接拼接到临时目录,无 `..` 段检查或 `resolve()` 校验 + - 如果 blob 名称包含 `..` 段,可写出临时目录之外(任意文件写入) + +2. `tables_routes.py` — `export-table-csv` (第 667–672 行) + - `table_name` 直接拼入 `Content-Disposition` 响应头 + - 可注入 `"`、`\r\n` 等字符,造成 HTTP 响应头注入 + +### 1.3 为什么"每次手动校验"不可持续 + +- 0.7 已有 **87 个 Python 文件**,路径操作分布在 datalake、routes、sandbox、plugins 多个模块 +- 未来 Plugin 系统会引入第三方开发者写的路径代码,控制力更弱 +- CodeQL 对"先拼接后校验"模式报误报,开发者可能习惯性忽略告警 + +--- + +## 2. 现有防护盘点 + +### 2.1 已有的防护措施(保留并继续使用) + +| 措施 | 位置 | 作用 | +|------|------|------| +| `safe_data_filename()` | `parquet_utils.py:42-66` | 提取 basename,拒绝 `.` / `..`,保留 Unicode | +| `Workspace.get_file_path()` | `workspace.py:226-252` | `safe_data_filename` + `resolve().relative_to()` 双重校验 | +| `CachedAzureBlobWorkspace._cache_path()` | `cached_azure_blob_workspace.py:230-242` | `resolve()` + `is_relative_to()` | +| `Workspace._sanitize_identity_id()` | `workspace.py:205-218` | `secure_filename` 清洗用户 ID | +| `WorkspaceManager._safe_id()` | `workspace_manager.py:72-78` | `secure_filename` 清洗 workspace ID | +| `import_session_zip()` | `workspace.py:770-784` | 逐段 `secure_filename` + 跳过空段 | + +### 2.2 这些措施的共同模式 + +每处防护本质上在做同一件事: + +``` +给定 root_dir + untrusted_relative_path: + 1. 清洗 untrusted_relative_path(basename / 拒绝 .. / secure_filename) + 2. 拼接:candidate = root_dir / cleaned_path + 3. 校验:candidate.resolve().is_relative_to(root_dir.resolve()) + 4. 否则 raise ValueError +``` + +**ConfinedDir 就是把这四步封装成一个对象。** + +--- + +## 3. 方案设计:ConfinedDir + +### 3.1 核心类 + +新增文件 `py-src/data_formulator/security/path_safety.py`: + +```python +"""Path confinement primitive — prevents path traversal at the API level. + +Usage: + jail = ConfinedDir("/tmp/workspace") + safe = jail / "data/sales.parquet" # OK + jail / "../etc/passwd" # raises ValueError + jail.write("data/out.parquet", raw_bytes) # resolve + mkdir + write +""" + +from __future__ import annotations + +import logging +import re +from pathlib import Path + +logger = logging.getLogger(__name__) + + +class ConfinedDir: + """A directory jail that prevents any path operation from escaping its root. + + All path resolution goes through this single chokepoint. If the + resolved path escapes the root, ``ValueError`` is raised immediately. + + Thread-safe: instances are immutable after construction; Path.resolve() + and is_relative_to() are OS-level and inherently safe for concurrent use. + """ + + __slots__ = ("_root",) + + def __init__(self, root: Path | str, *, mkdir: bool = True): + self._root = Path(root).resolve() + if mkdir: + self._root.mkdir(parents=True, exist_ok=True) + + # -- properties -------------------------------------------------------- + + @property + def root(self) -> Path: + """The resolved, canonical root directory.""" + return self._root + + # -- core API ---------------------------------------------------------- + + def resolve(self, relative: str, *, mkdir_parents: bool = False) -> Path: + """Resolve *relative* within this jail. + + Raises ``ValueError`` if the result would escape the root. + + Defence is layered: + 1. Reject absolute paths outright. + 2. Reject path segments equal to ``..``. + 3. Join onto root, canonicalise with ``resolve()``, and confirm + the result is still under root (catches symlink escapes). + """ + if not relative: + raise ValueError("Empty relative path") + if Path(relative).is_absolute(): + raise ValueError(f"Absolute path not allowed: {relative!r}") + + parts = Path(relative).parts + if ".." in parts: + raise ValueError(f"Path traversal segment '..' in: {relative!r}") + + candidate = (self._root / relative).resolve() + if not candidate.is_relative_to(self._root): + raise ValueError( + f"Path escapes confined directory: {relative!r} " + f"resolves to {candidate}" + ) + + if mkdir_parents: + candidate.parent.mkdir(parents=True, exist_ok=True) + + return candidate + + def write(self, relative: str, data: bytes) -> Path: + """Resolve, create parent dirs, and write *data* atomically.""" + target = self.resolve(relative, mkdir_parents=True) + target.write_bytes(data) + return target + + def __truediv__(self, relative: str) -> Path: + """Operator overload: ``jail / "sub/path"`` → ``jail.resolve("sub/path")``.""" + return self.resolve(relative) + + def __repr__(self) -> str: + return f"ConfinedDir({self._root})" +``` + +### 3.2 设计要点 + +| 要点 | 说明 | +|------|------| +| **不可变** | 构造后 `_root` 不可修改,线程安全 | +| **三层防御** | 拒绝绝对路径 → 拒绝 `..` 段 → `resolve()` + `is_relative_to()` | +| **Symlink 安全** | `resolve()` 在 OS 层面展开符号链接后再检查包含关系 | +| **操作符重载** | `jail / "sub/path"` 语法糖,让调用点代码简洁 | +| **mkdir 内置** | `write()` 方法自动创建父目录,减少调用点样板代码 | + +### 3.3 与现有 API 的兼容策略 + +`ConfinedDir` 作为**底层原语**引入,不替换现有的 `safe_data_filename()` 或 `secure_filename()`。层次关系: + +``` +调用者传入的 filename / relative_path + │ + ▼ +safe_data_filename() / secure_filename() ← 第一层:输入清洗 + │ + ▼ +ConfinedDir.resolve() ← 第二层:路径约束(新增) + │ + ▼ +最终的 Path 对象 ← 安全的文件路径 +``` + +### 3.4 在 security 包中注册 + +更新 `py-src/data_formulator/security/__init__.py`: + +```python +from data_formulator.security.path_safety import ConfinedDir + +__all__ = [ + ..., + "ConfinedDir", +] +``` + +--- + +## 4. 改造清单 + +### 4.1 [漏洞] `AzureBlobWorkspace.local_dir()` — 中等风险 + +**文件**:`py-src/data_formulator/datalake/azure_blob_workspace.py`,第 576–598 行 + +**Before**: + +```python +@contextmanager +def local_dir(self): + tmp = tempfile.mkdtemp(prefix="df_blob_ws_") + tmp_path = Path(tmp) + try: + for blob in self._container.list_blobs(name_starts_with=self._prefix): + rel = blob.name[len(self._prefix):] + if not rel or rel == METADATA_FILENAME: + continue + local_file = tmp_path / rel # ← 无校验 + local_file.parent.mkdir(parents=True, exist_ok=True) + data = self._container.download_blob(blob.name).readall() + local_file.write_bytes(data) + yield tmp_path + finally: + shutil.rmtree(tmp, ignore_errors=True) +``` + +**After**: + +```python +from data_formulator.security.path_safety import ConfinedDir + +@contextmanager +def local_dir(self): + tmp = tempfile.mkdtemp(prefix="df_blob_ws_") + tmp_path = Path(tmp) + jail = ConfinedDir(tmp_path, mkdir=False) + try: + for blob in self._container.list_blobs(name_starts_with=self._prefix): + rel = blob.name[len(self._prefix):] + if not rel or rel == METADATA_FILENAME: + continue + try: + data = self._container.download_blob(blob.name).readall() + jail.write(rel, data) + except ValueError: + logger.warning( + "Skipping blob with unsafe path: %s", blob.name, + ) + yield tmp_path + finally: + shutil.rmtree(tmp, ignore_errors=True) +``` + +### 4.2 [漏洞] `AzureBlobWorkspace.save_workspace_snapshot()` — 中等风险 + +**文件**:同上,第 616–626 行 + +**Before**: + +```python +def save_workspace_snapshot(self, dst: Path) -> None: + for blob in self._container.list_blobs(name_starts_with=self._prefix): + rel = blob.name[len(self._prefix):] + if not rel: + continue + dst.mkdir(parents=True, exist_ok=True) + local_file = dst / rel # ← 无校验 + local_file.parent.mkdir(parents=True, exist_ok=True) + data = self._container.download_blob(blob.name).readall() + local_file.write_bytes(data) +``` + +**After**: + +```python +def save_workspace_snapshot(self, dst: Path) -> None: + jail = ConfinedDir(dst) + for blob in self._container.list_blobs(name_starts_with=self._prefix): + rel = blob.name[len(self._prefix):] + if not rel: + continue + try: + data = self._container.download_blob(blob.name).readall() + jail.write(rel, data) + except ValueError: + logger.warning( + "Skipping blob with unsafe path in snapshot: %s", blob.name, + ) +``` + +### 4.3 [加固] `CachedAzureBlobWorkspace._cache_path()` — 替换手写逻辑 + +**文件**:`py-src/data_formulator/datalake/cached_azure_blob_workspace.py`,第 230–242 行 + +**Before**: + +```python +def _cache_path(self, filename: str) -> Path: + resolved = (self._cache_dir / filename).resolve() + if not resolved.is_relative_to(self._cache_dir.resolve()): + raise ValueError( + f"Path traversal detected: {filename!r} resolves outside " + f"the cache directory" + ) + return resolved +``` + +**After**: + +```python +def __init__(self, ...): + ... + self._cache_jail = ConfinedDir(self._cache_dir, mkdir=True) + +def _cache_path(self, filename: str) -> Path: + return self._cache_jail.resolve(filename) +``` + +收益:消除 CodeQL `py/path-injection` 在此处的告警(验证逻辑在 `ConfinedDir` 内部完成,不再是"先拼接后验证")。 + +### 4.4 [加固] `Workspace.get_file_path()` — 可选改造 + +**文件**:`py-src/data_formulator/datalake/workspace.py`,第 226–252 行 + +当前已有 `safe_data_filename` + `resolve().relative_to()` 双重防护,功能正确。可选择用 `ConfinedDir` 替换以统一风格: + +```python +def __init__(self, ...): + ... + self._data_jail = ConfinedDir(self._path / "data") + +def get_file_path(self, filename: str) -> Path: + basename = safe_data_filename(filename) + return self._data_jail.resolve(basename) +``` + +此项为**可选优化**,现有逻辑已安全。 + +--- + +## 5. HTTP 响应头注入修复 + +### 5.1 工具函数 + +新增 `py-src/data_formulator/security/http_headers.py`: + +```python +"""HTTP response header safety helpers.""" + +from werkzeug.utils import secure_filename + + +def safe_download_name(name: str, fallback: str = "export") -> str: + """Sanitize a user-provided name for Content-Disposition filename. + + Strips directory components, special characters (quotes, newlines), + and falls back to *fallback* if the result is empty. + """ + safe = secure_filename(name) if name else "" + return safe or fallback +``` + +### 5.2 改造点 + +**文件**:`py-src/data_formulator/tables_routes.py`,第 667–672 行 + +**Before**: + +```python +headers={ + 'Content-Disposition': f'attachment; filename="{table_name}.{ext}"', +} +``` + +**After**: + +```python +from data_formulator.security.http_headers import safe_download_name + +safe_name = safe_download_name(table_name) +headers={ + 'Content-Disposition': f'attachment; filename="{safe_name}.{ext}"', +} +``` + +--- + +## 6. 测试计划 + +### 6.1 `ConfinedDir` 单元测试 + +**文件**:`tests/backend/security/test_path_safety.py` + +```python +# 要验证的行为: +# +# --- 正常路径 --- +# - jail.resolve("file.txt") → root/file.txt +# - jail.resolve("sub/dir/file.txt") → root/sub/dir/file.txt +# - jail / "file.txt" → 等价于 resolve +# - jail.write("out.bin", b"data") → 文件创建成功,内容正确 +# - resolve(mkdir_parents=True) → 父目录自动创建 +# +# --- 路径穿越拒绝 --- +# - jail.resolve("../etc/passwd") → raises ValueError +# - jail.resolve("sub/../../etc/passwd") → raises ValueError +# - jail.resolve("..") → raises ValueError +# - jail.resolve("/etc/passwd") → raises ValueError(绝对路径) +# - jail.resolve("") → raises ValueError(空路径) +# - jail.resolve("sub/\x00hidden") → 行为取决于 OS,至少不能逃逸 +# +# --- Symlink 逃逸 --- +# - 在 jail 内创建指向 jail 外的 symlink → resolve 后检测逃逸 → raises ValueError +# +# --- Unicode 安全 --- +# - jail.resolve("数据/报表.parquet") → 正常工作(CJK 字符保留) +# - jail.resolve("données/résumé.csv") → 正常工作(Latin 扩展字符保留) +# +# --- 边界情况 --- +# - 多层嵌套 "../../../.." → raises ValueError +# - Windows 风格分隔符 "sub\\..\\..\\etc" → raises ValueError +# - 以 "~" 开头的路径 "~root/.ssh/key" → 不逃逸即允许 + +# 测试策略:使用 pytest tmp_path fixture,真实文件系统操作 +``` + +### 6.2 Azure Blob 路径安全回归测试 + +**文件**:`tests/backend/security/test_blob_path_traversal.py` + +```python +# 要验证的行为: +# +# - local_dir() 遇到 blob key 含 "../" 时 → 跳过该 blob,不抛异常,日志 warning +# - local_dir() 正常 blob → 文件正确物化到 tmp 目录内 +# - save_workspace_snapshot() 同上 +# - 构造含 "../" 的 mock blob → 验证不会写出目标目录 +# +# Mock 策略: +# - Mock ContainerClient.list_blobs() 返回包含恶意 blob name 的列表 +# - Mock download_blob().readall() 返回测试数据 +# - 用 tmp_path 作为 local_dir / snapshot 目标 +# - 验证 tmp_path 之外没有被写入文件 +``` + +### 6.3 HTTP 响应头注入测试 + +**文件**:`tests/backend/security/test_http_headers.py` + +```python +# 要验证的行为: +# +# - safe_download_name("normal_name") → "normal_name" +# - safe_download_name('name"with"quotes') → 引号被移除 +# - safe_download_name("name\r\ninjection") → 换行被移除 +# - safe_download_name("") → "export"(fallback) +# - safe_download_name("数据报表") → "export"(secure_filename 清除非 ASCII) +# 注:下载文件名不需要保留 Unicode,不同于文件存储 +# - safe_download_name(None) → "export" +``` + +### 6.4 现有测试不受影响 + +`ConfinedDir` 是在现有防护之下的**新增底层防线**,不改变上层 API 的行为契约。以下现有测试应继续通过: + +- `test_safe_data_filename.py` — `safe_data_filename()` 功能不变 +- `test_workspace_manager.py` — Workspace ID 清洗不变 +- `test_workspace_source_file_ops.py` — `get_file_path()` 行为不变 +- `test_same_basename_upload.py` — 上传管道不变 + +--- + +## 7. 实施步骤 + +### Step 1:新增 `ConfinedDir` + 测试 (安全基础) + +| 任务 | 文件 | +|------|------| +| 实现 `ConfinedDir` | `py-src/data_formulator/security/path_safety.py` | +| 注册到 `security/__init__.py` | `py-src/data_formulator/security/__init__.py` | +| 单元测试 | `tests/backend/security/test_path_safety.py` | + +**验收标准**:`pytest tests/backend/security/test_path_safety.py` 全部通过。 + +### Step 2:修复 Azure Blob 路径穿越漏洞 + +| 任务 | 文件 | +|------|------| +| `local_dir()` 使用 `ConfinedDir` | `azure_blob_workspace.py` | +| `save_workspace_snapshot()` 使用 `ConfinedDir` | `azure_blob_workspace.py` | +| 回归测试 | `tests/backend/security/test_blob_path_traversal.py` | + +**验收标准**:恶意 blob 名称被安全跳过;正常 blob 正确物化;现有 workspace 测试全部通过。 + +### Step 3:加固 Cache 层 + Workspace + +| 任务 | 文件 | +|------|------| +| `_cache_path()` 改用 `ConfinedDir` | `cached_azure_blob_workspace.py` | +| (可选)`get_file_path()` 改用 `ConfinedDir` | `workspace.py` | + +**验收标准**:现有测试全部通过;CodeQL `py/path-injection` 告警消失或减少。 + +### Step 4:修复 HTTP 响应头注入 + +| 任务 | 文件 | +|------|------| +| 实现 `safe_download_name()` | `py-src/data_formulator/security/http_headers.py` | +| 改造 `export-table-csv` | `tables_routes.py` | +| 测试 | `tests/backend/security/test_http_headers.py` | + +**验收标准**:含特殊字符的 `table_name` 不再注入响应头。 + +### Step 5:验证与清理 + +| 任务 | 说明 | +|------|------| +| 全量测试 | `pytest tests/backend/` 全部通过 | +| CodeQL 扫描 | `py/path-injection` 告警清零或仅剩已标注的可接受误报 | +| 代码审查 | 搜索 `Path(...) / variable` 模式,确认无遗漏 | + +--- + +## 8. 后续防线 + +### 8.1 Code Review Checklist + +PR 模板中新增检查项: + +```markdown +### 安全检查 +- [ ] 文件路径操作使用了 `ConfinedDir` 或 `safe_data_filename()` +- [ ] 未直接使用 `Path(root) / user_input` 模式 +- [ ] HTTP 响应头中的用户输入已清洗 +``` + +### 8.2 Lint 规则(可选进阶) + +通过自定义 Ruff 或 Semgrep 规则,检测裸路径拼接模式: + +```yaml +# .semgrep/path-safety.yaml +rules: + - id: no-bare-path-join-with-variable + pattern: $ROOT / $USER_INPUT + message: "Use ConfinedDir instead of bare path joining with variables" + severity: WARNING + languages: [python] +``` + +### 8.3 Plugin 开发者指南 + +在 `5-plugin-development-guide.md` 中补充路径安全章节: + +- 插件代码中**禁止**直接操作 `Path`,必须通过 `ConfinedDir` 或 Workspace API +- 写入文件必须使用 `PluginDataWriter`(内部已经过 Workspace 的路径校验) +- 示例代码展示正确和错误的路径操作对比 + +### 8.4 CodeQL Annotation + +对 `ConfinedDir.resolve()` 方法添加 CodeQL 建模,告知静态分析器该方法是路径校验点: + +```python +class ConfinedDir: + def resolve(self, relative: str, ...) -> Path: + # CodeQL: this method is a path sanitizer + # See: https://codeql.github.com/docs/codeql-for-python/ + ... +``` + +具体方式是在 `.github/codeql/` 下添加自定义 query 或 `qlpack.yml` 中的 sanitizer 建模。 + +--- + +## 附录:风险矩阵 + +| 编号 | 问题 | 严重度 | 可利用性 | 修复步骤 | +|------|------|--------|---------|---------| +| V-01 | `AzureBlobWorkspace.local_dir()` 路径穿越 | 中 | 需要控制 blob 存储内容 | Step 2 | +| V-02 | `AzureBlobWorkspace.save_workspace_snapshot()` 路径穿越 | 中 | 同上 | Step 2 | +| V-03 | `export-table-csv` Content-Disposition 头注入 | 低-中 | 需要能创建含特殊字符的表名 | Step 4 | +| H-01 | `cached_azure_blob_workspace._cache_path()` CodeQL 误报 | 信息 | 已有防护,仅静态分析噪音 | Step 3 | diff --git a/design-docs/7-language-standardization-plan.md b/design-docs/7-language-standardization-plan.md new file mode 100644 index 00000000..be50d4be --- /dev/null +++ b/design-docs/7-language-standardization-plan.md @@ -0,0 +1,506 @@ +# 多语言(i18n)规范化开发计划 + +> 编号:design-doc-7 | 创建:2026-04-12 | 状态:草案 + +--- + +## 0. 背景与动机 + +项目已建立了 `agent_language.py` 作为 LLM 提示词多语言注入的核心模块,并在 `agent_routes.py` 中通过 `get_language_instruction()` 从 `Accept-Language` header 读取用户语言。然而: + +1. **调用覆盖不完整** — 部分 Agent 路由遗漏了语言注入(如 `workspace-summary`、`sort-data`) +2. **注入方式不统一** — 各 Agent 以不同方式拼接 language_instruction(有的用 marker 定位插入、有的直接追加到末尾),没有统一的接口约束 +3. **前端只支持 en/zh** — `agent_language.py` 注册了 20 种语言,但前端 i18n 翻译文件只有 `en` 和 `zh` 两组 +4. **缺少自动化保障** — 没有 lint 规则或单元测试来防止新的 LLM 调用点遗漏语言注入 +5. **已有规范文档分散** — Cursor rule、SKILL.md、design-doc-3 分别有一些约定,但开发者容易遗漏 + +本文档定义一个系统性的规范化方案,确保多语言处理有统一的模式、完整的覆盖和可持续的质量保障。 + +--- + +## 1. 现状审计 + +### 1.1 后端 Agent 语言注入覆盖表 + +| Agent 类 | 文件 | 接收 `language_instruction` | 路由注入 | mode | 状态 | +|----------|------|:---:|:---:|------|------| +| `DataRecAgent` | `agent_data_rec.py` | ✅ | ✅ `derive-data` | compact | **正常** | +| `DataTransformationAgent` | `agent_data_transform.py` | ✅ | ✅ `derive-data` / `refine-data` | compact | **正常** | +| `DataAgent` | `data_agent.py` | ✅ | ✅ `data-agent-streaming` | full + compact(rec) | **正常** | +| `DataLoadAgent` | `agent_data_load.py` | ✅ | ✅ `process-data-on-load` | compact | **正常** | +| `DataCleanAgentStream` | `agent_data_clean_stream.py` | ✅ | ✅ `clean-data-stream` | full | **正常** | +| `CodeExplanationAgent` | `agent_code_explanation.py` | ✅ | ✅ `code-expl` | full | **正常** | +| `ChartInsightAgent` | `agent_chart_insight.py` | ✅ | ✅ `chart-insight` | full | **正常** | +| `InteractiveExploreAgent` | `agent_interactive_explore.py` | ✅ | ✅ `get-recommendation-questions` | full | **正常** | +| `ReportGenAgent` | `agent_report_gen.py` | ✅ | ✅ `generate-report-stream` | full | **正常** | +| `SortDataAgent` | `agent_sort_data.py` | ❌ | ❌ `sort-data` | — | **⚠️ 遗漏** | +| *(inline)* workspace-summary | `agent_routes.py` L992-1046 | — | ❌ | — | **⚠️ 遗漏** | +| *(inline)* test-model | `agent_routes.py` L188-227 | — | ❌ | — | **无需注入**(非用户可见) | + +### 1.2 遗漏详情 + +#### SortDataAgent(`sort-data` 路由) + +`SortDataAgent.__init__()` 不接收 `language_instruction` 参数,路由处理也未调用 `get_language_instruction()`。虽然排序结果本身是数据值的重排列(不涉及翻译),但返回的 `reason` 字段是面向用户的自然语言文本,应该跟随 UI 语言。 + +**影响级别**:低 — `reason` 字段在 UI 中显示但不是核心功能文本。 + +#### workspace-summary 路由 + +工作区名称直接展示在侧边栏,但 system prompt 中未注入语言指令。中文用户会看到英文的工作区名称。 + +**影响级别**:中 — 用户每次打开应用都会看到。 + +### 1.3 注入方式一致性审计 + +| Agent | 注入方式 | 说明 | +|-------|---------|------| +| `DataRecAgent` | marker 定位插入 (`"You are a data scientist"` 之后) | 策略性插入到 role 声明之后 | +| `DataTransformationAgent` | marker 定位插入 (`"**About the execution environment:**"` 之前) | 策略性插入到技术细节之前 | +| `DataAgent` | `_build_system_prompt()` 末尾追加 | 动态构建 prompt,末尾追加 | +| `DataLoadAgent` | system prompt 末尾追加 | 简单追加 | +| `DataCleanAgentStream` | system prompt 末尾追加 | 简单追加 | +| `CodeExplanationAgent` | system prompt 末尾追加 | 简单追加 | +| `ChartInsightAgent` | system prompt 末尾追加 | 简单追加 | +| `InteractiveExploreAgent` | system prompt 末尾追加 | 简单追加 | +| `ReportGenAgent` | system prompt 末尾追加 | 简单追加 | + +**结论**:有两种注入策略(marker 定位 vs 末尾追加),两种都是可接受的。marker 策略适用于需要精确控制指令位置的复杂 prompt,末尾追加适用于简单场景。**当前不需要强制统一**,但需要在规范中明确这两种模式的适用条件。 + +### 1.4 前端 i18n 覆盖 + +| 层面 | 状态 | +|------|------| +| UI 翻译文件 (locales) | 仅 `en` 和 `zh` | +| `agent_language.py` 语言注册表 | 20 种语言 | +| 前端语言切换器 | 从 Redux `availableLanguages` 动态读取 | +| `fetchWithIdentity()` header | ✅ 正确注入 `Accept-Language` | +| Plugin 翻译 (Superset) | 仅 `en` 和 `zh` | + +--- + +## 2. 规范化目标 + +### P0(必须完成) +1. 补齐遗漏的语言注入点(SortDataAgent、workspace-summary) +2. 建立 Agent 基类或 Mixin,统一 `language_instruction` 的接收和注入接口 +3. 添加单元测试,确保所有 Agent 构造函数支持 `language_instruction` +4. 更新开发者文档,合并分散的约定到一个权威参考文档 + +### P1(应该完成) +5. 添加 lint 或静态检查规则,检测新增的 LLM 调用点是否注入了语言指令 +6. 创建语言注入集成测试,模拟不同语言请求验证完整链路 +7. 规范 `mode` 选择决策树,让新 Agent 开发者能快速判断 + +### P2(锦上添花) +8. 扩展前端翻译覆盖(优先添加 ja、ko、fr、de 等高需求语言) +9. 将 `agent_language.py` 中的模板抽象为配置文件,支持运行时热加载 +10. 建立翻译贡献流程(community translation) + +--- + +## 3. 详细方案 + +### 3.1 Phase 1:补齐遗漏(P0,预计 0.5 天) + +#### 3.1.1 SortDataAgent 添加语言支持 + +```python +# agent_sort_data.py +class SortDataAgent(object): + + def __init__(self, client, language_instruction=""): + self.client = client + self.language_instruction = language_instruction + + def run(self, name, values, n=1): + system_prompt = SYSTEM_PROMPT + if self.language_instruction: + system_prompt = system_prompt + "\n\n" + self.language_instruction + + # ... 其余不变 ... +``` + +```python +# agent_routes.py — sort_data_request() +language_instruction = get_language_instruction(mode="compact") +agent = SortDataAgent(client=client, language_instruction=language_instruction) +``` + +**mode 选择**:`"compact"` — SortDataAgent 的输出是结构化 JSON,仅 `reason` 字段面向用户,用 compact 模式足够且不干扰排序逻辑。 + +#### 3.1.2 workspace-summary 添加语言注入 + +```python +# agent_routes.py — workspace_summary() +lang_instruction = get_language_instruction(mode="compact") +lang_suffix = f"\n\n{lang_instruction}" if lang_instruction else "" + +messages = [ + { + "role": "system", + "content": ( + "You are a helpful assistant. Generate a very short name (3-5 words) " + "for a data analysis workspace based on the context below. " + "Return ONLY the name, no quotes, no explanation." + + lang_suffix + ), + }, + {"role": "user", "content": context_str}, +] +``` + +### 3.2 Phase 2:统一 Agent 接口约束(P0,预计 1 天) + +#### 3.2.1 定义 Agent 协议(Protocol/ABC) + +不强制所有 Agent 继承同一基类(避免大范围重构),而是采用 Python Protocol 约束: + +```python +# agents/agent_protocol.py +from typing import Protocol, runtime_checkable + +@runtime_checkable +class LanguageAwareAgent(Protocol): + """Any Agent that receives LLM language instructions must expose this attribute.""" + language_instruction: str +``` + +#### 3.2.2 统一注入辅助函数 + +抽取重复的 "追加到 system prompt" 逻辑为公共函数: + +```python +# agents/agent_language.py(在 build_language_instruction 之后添加) + +def inject_language_instruction( + system_prompt: str, + language_instruction: str, + *, + marker: str | None = None, +) -> str: + """Inject language instruction into a system prompt. + + Parameters + ---------- + system_prompt : str + The base system prompt. + language_instruction : str + The language instruction block (empty string = no-op). + marker : str | None + If provided, insert before this marker. Otherwise append. + """ + if not language_instruction: + return system_prompt + + if marker: + idx = system_prompt.find(marker) + if idx > 0: + return ( + system_prompt[:idx] + + language_instruction + "\n\n" + + system_prompt[idx:] + ) + + return system_prompt + "\n\n" + language_instruction +``` + +然后各 Agent 统一调用: + +```python +from data_formulator.agents.agent_language import inject_language_instruction + +# DataTransformationAgent.__init__() +self.system_prompt = inject_language_instruction( + self.system_prompt, language_instruction, + marker="**About the execution environment:**" +) + +# ChartInsightAgent.__init__() (简单场景) +system_prompt = inject_language_instruction(system_prompt, self.language_instruction) +``` + +### 3.3 Phase 3:测试保障(P1,预计 1 天) + +#### 3.3.1 单元测试:所有 Agent 支持 language_instruction + +```python +# tests/test_language_injection.py +import pytest +from data_formulator.agents.agent_language import ( + build_language_instruction, + inject_language_instruction, + LANGUAGE_DISPLAY_NAMES, +) + +ALL_AGENTS = [ + ("DataRecAgent", "data_formulator.agents.agent_data_rec", "DataRecAgent"), + ("DataTransformationAgent", "data_formulator.agents.agent_data_transform", "DataTransformationAgent"), + ("DataAgent", "data_formulator.agents.data_agent", "DataAgent"), + ("DataLoadAgent", "data_formulator.agents.agent_data_load", "DataLoadAgent"), + ("DataCleanAgentStream", "data_formulator.agents.agent_data_clean_stream", "DataCleanAgentStream"), + ("CodeExplanationAgent", "data_formulator.agents.agent_code_explanation", "CodeExplanationAgent"), + ("ChartInsightAgent", "data_formulator.agents.agent_chart_insight", "ChartInsightAgent"), + ("InteractiveExploreAgent", "data_formulator.agents.agent_interactive_explore", "InteractiveExploreAgent"), + ("ReportGenAgent", "data_formulator.agents.agent_report_gen", "ReportGenAgent"), + ("SortDataAgent", "data_formulator.agents.agent_sort_data", "SortDataAgent"), +] + + +class TestBuildLanguageInstruction: + def test_english_returns_empty(self): + assert build_language_instruction("en") == "" + + def test_non_english_returns_instruction(self): + result = build_language_instruction("zh") + assert "[LANGUAGE INSTRUCTION]" in result + assert "Simplified Chinese" in result + + def test_compact_mode(self): + full = build_language_instruction("zh", mode="full") + compact = build_language_instruction("zh", mode="compact") + assert len(compact) < len(full) + + @pytest.mark.parametrize("lang", [k for k in LANGUAGE_DISPLAY_NAMES if k != "en"]) + def test_all_registered_languages(self, lang): + result = build_language_instruction(lang) + assert result != "" + assert "[LANGUAGE INSTRUCTION]" in result + + +class TestInjectLanguageInstruction: + def test_empty_instruction_noop(self): + prompt = "You are a data scientist." + assert inject_language_instruction(prompt, "") == prompt + + def test_append_without_marker(self): + prompt = "You are a data scientist." + result = inject_language_instruction(prompt, "[LANG]") + assert result.endswith("[LANG]") + + def test_insert_before_marker(self): + prompt = "Role description.\n\n**About the execution environment:**\nDetails." + result = inject_language_instruction( + prompt, "[LANG]", + marker="**About the execution environment:**" + ) + assert result.index("[LANG]") < result.index("**About the execution environment:**") + + +class TestAgentLanguageParam: + """Verify each Agent constructor accepts language_instruction.""" + + @pytest.mark.parametrize("label,module_path,class_name", ALL_AGENTS) + def test_constructor_has_language_instruction(self, label, module_path, class_name): + import importlib, inspect + mod = importlib.import_module(module_path) + cls = getattr(mod, class_name) + sig = inspect.signature(cls.__init__) + params = list(sig.parameters.keys()) + assert "language_instruction" in params, ( + f"{label}.__init__() missing language_instruction parameter" + ) +``` + +#### 3.3.2 路由层集成测试 + +```python +# tests/test_route_language_injection.py +"""Verify that all user-facing agent routes call get_language_instruction().""" + +ROUTES_NEEDING_LANGUAGE = [ + "process-data-on-load", + "clean-data-stream", + "derive-data", + "refine-data", + "data-agent-streaming", + "code-expl", + "chart-insight", + "get-recommendation-questions", + "generate-report-stream", + "sort-data", + "workspace-summary", +] + +ROUTES_EXEMPT = [ + "test-model", + "list-global-models", + "check-available-models", + "refresh-derived-data", +] +``` + +### 3.4 Phase 4:开发者规范文档整合(P0,预计 0.5 天) + +将 `design-docs/3-language-injection-analysis.md`、`.cursor/rules/language-injection-conventions.mdc`、`.cursor/skills/language-injection/SKILL.md` 的核心约定整合为一个权威参考,避免信息分散。 + +#### 3.4.1 核心决策树:新增 LLM 调用点 + +``` +新增 LLM 调用 → 输出是否面向用户展示? + │ + ├── 否(健康检查、内部工具调用、日志) + │ └── ✅ 不需要注入语言指令 + │ + └── 是 + ├── 是否为独立 Agent 类? + │ ├── 是 → 构造函数添加 language_instruction="" 参数 + │ │ 使用 inject_language_instruction() 注入到 system prompt + │ │ 在路由中调用 get_language_instruction(mode=?) 传入 + │ │ + │ └── 否(内联 LLM 调用) + │ └── 直接在路由中拼接到 system prompt + │ + └── mode 选择: + ├── 输出主要是自然语言文本 → mode="full" + │ (ChartInsight、Report、Explore、CodeExplanation、DataClean) + │ + └── 输出主要是代码/结构化 JSON → mode="compact" + (DataRec、DataTransform、DataLoad、Sort、workspace-summary) +``` + +#### 3.4.2 开发者检查清单 + +新增或修改 Agent/LLM 调用时,PR reviewer 应检查: + +- [ ] Agent 构造函数是否接收 `language_instruction` 参数? +- [ ] 路由是否调用 `get_language_instruction(mode=...)` 并传入 Agent? +- [ ] mode 选择是否正确(full vs compact)? +- [ ] 是否使用了 `inject_language_instruction()` 辅助函数? +- [ ] 是否有硬编码的语言字符串(如 `"回答请使用中文"`)? +- [ ] `agent_diagnostics.py` 是否记录了 language_instruction? +- [ ] 是否有对应的单元测试验证 language_instruction 参数存在? + +### 3.5 Phase 5:静态检查与 CI 保障(P1,预计 0.5 天) + +#### 3.5.1 自定义 lint 脚本 + +创建一个简单的 Python 脚本检测 `agent_routes.py` 中所有调用 `client.get_completion()` 或实例化 Agent 类的地方,验证上下文中是否有 `get_language_instruction` 调用: + +```python +# scripts/check_language_injection.py +"""CI check: verify all user-facing LLM calls in agent_routes.py inject language.""" + +import ast, sys + +EXEMPT_FUNCTIONS = {"test_model", "check_available_models", "list_global_models"} + +# 解析 AST,对每个路由函数检查是否包含 get_language_instruction 调用 +# ... +``` + +#### 3.5.2 Pre-commit hook + +```yaml +# .pre-commit-config.yaml (追加) +- repo: local + hooks: + - id: check-language-injection + name: Check language injection in agent routes + entry: python scripts/check_language_injection.py + language: python + files: agent_routes\.py$ +``` + +### 3.6 Phase 6:前端翻译扩展(P2,按需) + +#### 3.6.1 优先扩展的语言 + +根据 `agent_language.py` 注册表和用户需求,推荐优先级: + +| 优先级 | 语言 | 理由 | +|--------|------|------| +| 1 | ja (日语) | 东亚高活跃用户群 | +| 2 | ko (韩语) | 东亚高活跃用户群 | +| 3 | fr (法语) | 欧洲及非洲广泛使用 | +| 4 | de (德语) | 欧洲技术社区活跃 | +| 5 | es (西班牙语) | 全球第二大母语人口 | + +#### 3.6.2 翻译文件结构 + +每种新语言需要: + +``` +src/i18n/locales// +├── common.json +├── upload.json +├── chart.json +├── model.json +├── encoding.json +├── messages.json +├── navigation.json +└── index.ts +``` + +加上 `i18n/index.ts` 和 `i18n/locales/index.ts` 的注册。 + +--- + +## 4. 实施计划 + +| Phase | 内容 | 优先级 | 预估工期 | 前置依赖 | +|-------|------|--------|---------|---------| +| **Phase 1** | 补齐 SortDataAgent、workspace-summary 的语言注入 | P0 | 0.5 天 | 无 | +| **Phase 2** | 定义 `inject_language_instruction()` 辅助函数;重构各 Agent 统一调用 | P0 | 1 天 | Phase 1 | +| **Phase 3** | 单元测试 + 路由集成测试 | P1 | 1 天 | Phase 2 | +| **Phase 4** | 整合开发者规范文档,更新 Cursor rule 和 SKILL | P0 | 0.5 天 | Phase 2 | +| **Phase 5** | 静态检查脚本 + pre-commit hook | P1 | 0.5 天 | Phase 4 | +| **Phase 6** | 前端翻译扩展(ja、ko、fr 等) | P2 | 按需 | Phase 1 | + +**总计 Phase 1-5**:约 3.5 个开发日 + +--- + +## 5. 反模式清单(明确禁止) + +| 反模式 | 为什么不行 | 正确做法 | +|--------|-----------|---------| +| 使用环境变量 `os.environ.get("DF_DEFAULT_LANGUAGE")` | 退化为 per-process 语言,破坏多用户场景 | 始终从 `Accept-Language` header 读取 | +| 在 LLM client 层做全局拦截注入 | 隐式行为、无法区分 full/compact mode、调试困难 | 在路由层显式注入 | +| 硬编码语言字符串 `"回答请使用中文"` | 不可配置、不支持其他语言 | 使用 `build_language_instruction()` | +| 新建 `MessageBuilder` 工具类 | 与 `agent_language.py` 形成并行抽象,增加维护成本 | 复用现有 `inject_language_instruction()` | +| 在 user message 中注入语言指令 | 与 OpenAI 最佳实践相悖(系统指令应在 system prompt) | 只在 system prompt 中注入 | +| 跳过 `get_language_instruction()` 直接调用 `build_language_instruction()` | 绕过了从 request header 读取语言的标准链路 | 在路由中使用 `get_language_instruction(mode=...)` | + +--- + +## 6. 风险与注意事项 + +| 风险 | 缓解措施 | +|------|---------| +| Phase 2 重构可能引入 system prompt 格式变化 | 通过对比测试确保重构前后生成的 prompt 内容一致 | +| 新增语言翻译质量难以保证 | 建立 community review 流程,先覆盖高需求语言 | +| compact mode 下语言指令过简导致 LLM 不遵从 | 对各语言进行 A/B 测试,必要时调整 compact 模板 | +| SortDataAgent 注入语言后 LLM 排序行为变化 | 排序测试用例覆盖中文、日文等非拉丁文字数据 | + +--- + +## 7. 相关文件索引 + +| 文件 | 角色 | +|------|------| +| `py-src/data_formulator/agents/agent_language.py` | 语言指令构建核心模块 | +| `py-src/data_formulator/agent_routes.py` | 路由层:`get_language_instruction()` + 各端点调用 | +| `py-src/data_formulator/agents/agent_sort_data.py` | **待修复**:缺少 language_instruction | +| `src/app/utils.tsx` | 前端:`getAgentLanguage()` + `fetchWithIdentity()` | +| `src/i18n/index.ts` | 前端 i18n 配置 | +| `src/i18n/locales/` | 前端翻译文件(当前仅 en/zh) | +| `.cursor/rules/language-injection-conventions.mdc` | Cursor 开发规范 | +| `.cursor/skills/language-injection/SKILL.md` | 详细架构说明 | +| `design-docs/3-language-injection-analysis.md` | 早期分析文档(本文档是其后续) | + +--- + +## 8. 验收标准 + +Phase 1-4 完成后,以下测试全部通过: + +- [ ] `build_language_instruction()` 对所有 20 种注册语言返回非空指令 +- [ ] 所有 Agent 构造函数均接受 `language_instruction` 参数 +- [ ] 所有面向用户的路由端点均调用 `get_language_instruction()` +- [ ] SortDataAgent 返回的 `reason` 字段跟随 UI 语言 +- [ ] workspace-summary 返回的名称跟随 UI 语言 +- [ ] 英文用户不受影响(`build_language_instruction("en")` 返回 `""`) +- [ ] `inject_language_instruction()` 辅助函数被所有 Agent 使用 +- [ ] PR review 检查清单已纳入团队流程 +- [ ] 静态检查脚本能检测到新增的未注入语言的 LLM 调用点 diff --git a/design-docs/8-superset-token-passthrough-design.md b/design-docs/8-superset-token-passthrough-design.md new file mode 100644 index 00000000..3766aa33 --- /dev/null +++ b/design-docs/8-superset-token-passthrough-design.md @@ -0,0 +1,450 @@ +# 8. Superset Token 透传与 Agent 数据源授权设计 + +> 状态:设计阶段 +> 创建日期:2026-04-15 +> 关联:`1-data-source-plugin-architecture.md`、`1-sso-plugin-architecture.md` + +--- + +## 1. 背景与问题 + +### 1.1 当前架构 + +DF 与 Superset 的认证是两条独立的链路: + +``` +DF 认证链路: + 用户 → SSO (y-sso-system) → DF 获得 SSO access_token + → 用于 DF 自身 API 的身份验证 + +Superset 认证链路: + 用户 → SSO Bridge 弹窗 → Superset OAuth → Superset 签发自己的 JWT + → 用于 Superset API 的身份验证 +``` + +两个 token 是不同系统签发的、格式不同、密钥不同: + +| 属性 | SSO access_token | Superset JWT | +|------|-----------------|--------------| +| 签发方 | y-sso-system | Superset (flask_jwt_extended) | +| payload | `{ sub: "zhangsan", iss: "sso.example.com" }` | `{ identity: "42", fresh: true }` | +| 签名密钥 | SSO 的密钥 | Superset 的 `SECRET_KEY` | +| 用途 | DF API 认证 | Superset API 认证 | + +**核心限制**:Superset 原生不支持外部 token 的直接使用。这是 OAuth2/OIDC 协议的标准设计——每个资源服务器只信任自己签发的凭证。Google、Microsoft 等所有系统都遵循同样的原则。 + +### 1.2 为什么需要 Token 透传 + +DF 未来将引入 **Agent 自动化数据分析** 功能: + +``` +用户提出分析需求 + → Agent 自动扫描所有数据源(包括 Superset 插件) + → Agent 自动判断哪些数据集与分析目标相关 + → Agent 自动拉取数据 + → Agent 完成分析并生成可视化 +``` + +在当前的 Bridge 弹窗方案下,Agent 无法自主完成 Superset 数据的读取: + +``` +Agent 要读 Superset 数据 + → DF 后端发现没有 Superset JWT + → 返回 "未授权" + → 前端需要弹窗让用户手动授权 ← Agent 被阻断 +``` + +**目标**:设计一套机制,使 Agent 能够在最少人工干预的情况下访问 Superset 数据源。 + +--- + +## 2. 方案设计 + +采用**渐进式策略**:先实现"按需授权"(Phase 1),待 Agent 功能成熟后升级为"自动换票"(Phase 2)。两个阶段不冲突,Phase 2 是 Phase 1 的超集。 + +### 2.1 Phase 1:Agent 按需授权(推荐先实现) + +#### 核心思路 + +Agent 运行时检测数据源授权状态,对未授权的数据源暂停并提示用户一次性授权,授权完成后继续执行。 + +#### 交互流程 + +``` +用户:帮我分析上个月的销售数据 + +Agent:正在扫描可用数据源... + ┌──────────────────────────────────────────┐ + │ Superset 数据源需要您的授权 │ + │ │ + │ Agent 需要读取 Superset 中的数据 │ + │ 来完成本次分析。 │ + │ 授权后本次会话内不再需要重复操作。 │ + │ │ + │ [ 点击授权 ] │ + └──────────────────────────────────────────┘ + +用户:(点击 → SSO 弹窗秒过 → 关闭) + +Agent:已获取 Superset 授权,继续分析... + 发现以下相关数据集: + - sales_monthly(月度销售汇总) + - product_catalog(产品目录) + 正在拉取数据... +``` + +#### 技术架构 + +``` +┌─────────────────────────────────────────────────────────┐ +│ DF 前端 │ +│ │ +│ ┌──────────┐ ┌──────────────┐ ┌───────────────┐ │ +│ │ Agent │───→│ DataSource │───→│ Superset SSO │ │ +│ │ Runtime │ │ AuthGate │ │ Bridge Popup │ │ +│ │ │←───│ (新增) │←───│ (已有) │ │ +│ └──────────┘ └──────────────┘ └───────────────┘ │ +│ │ │ │ +│ ▼ ▼ │ +│ ┌──────────────────────────────────────────────────┐ │ +│ │ fetchWithIdentity → DF Backend API │ │ +│ └──────────────────────────────────────────────────┘ │ +└─────────────────────────────────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────────┐ +│ DF 后端 │ +│ │ +│ ┌──────────────┐ ┌───────────────┐ │ +│ │ Plugin Auth │───→│ Superset │ │ +│ │ require_auth │ │ Client │ │ +│ │ (已有) │ │ Bearer JWT │ │ +│ └──────────────┘ └───────────────┘ │ +│ │ │ │ +│ ▼ ▼ │ +│ ┌──────────────┐ ┌───────────────┐ │ +│ │ Flask Session │ │ Superset API │ │ +│ │ JWT 存储 │ │ (外部) │ │ +│ └──────────────┘ └───────────────┘ │ +└─────────────────────────────────────────────────────────┘ +``` + +#### 需要改动的代码 + +**前端新增:DataSource AuthGate 组件** + +Agent 执行前,检查所有目标数据源的授权状态: + +```typescript +interface DataSourceAuthStatus { + pluginId: string; // e.g. "superset" + name: string; // e.g. "Superset (生产)" + authorized: boolean; + authMethod: "sso" | "password" | "guest"; +} + +// Agent 运行时调用 +async function ensureDataSourcesAuthorized( + requiredSources: string[] +): Promise<{ allAuthorized: boolean; pending: DataSourceAuthStatus[] }> { + const statuses = await Promise.all( + requiredSources.map(id => checkPluginAuthStatus(id)) + ); + const pending = statuses.filter(s => !s.authorized); + return { allAuthorized: pending.length === 0, pending }; +} +``` + +**后端新增:批量授权状态查询端点** + +```python +# GET /api/plugins/auth-status +# 返回所有已启用插件的授权状态 +@app.route("/api/plugins/auth-status") +def plugins_auth_status(): + results = {} + for plugin_id, plugin in enabled_plugins.items(): + token, user = plugin.require_auth() + results[plugin_id] = { + "authorized": token is not None, + "user": user, + "expires_soon": is_token_expiring_soon(token), + } + return jsonify(results) +``` + +**Agent 层面:授权中断与恢复** + +```python +class AgentDataSourceResolver: + async def resolve_sources(self, query: str) -> list[DataSource]: + sources = self.scan_available_sources() + unauthorized = [s for s in sources if not s.is_authorized] + + if unauthorized: + # 向前端发送授权请求,Agent 暂停 + await self.request_user_authorization(unauthorized) + # 用户完成授权后,前端通知 Agent 恢复 + sources = self.scan_available_sources() + + return sources +``` + +#### 用户体验优化 + +1. **SSO 同源快速授权**:用户已登录 DF SSO,弹出 Superset OAuth 窗口时,由于同一 SSO,可能直接跳过登录页秒级完成 +2. **Session 持久化**:一次授权后 JWT 存入 session(含 refresh_token),整个会话期间无需重复授权 +3. **预检提示**:Agent 开始前就检测授权状态,而非执行到一半再中断 +4. **批量授权**:如果有多个数据源未授权,一次性列出让用户逐个点击,不会反复中断 + +--- + +### 2.2 Phase 2:自动 Token Exchange(后续升级) + +#### 核心思路 + +在 Superset 侧新增一个自定义端点,接受 SSO access_token 并返回 Superset JWT。DF 后端在用户登录时自动完成换票,Agent 全程无感知。 + +#### 交互流程 + +``` +用户登录 DF (SSO) + → DF 后端检测到用户有 SSO access_token + → 自动调用 Superset 的 /api/v1/df-token-exchange/ + → Superset 验证 SSO token → 签发 Superset JWT → 返回 + → DF 后端存入 session + → Agent 直接使用,无任何用户交互 +``` + +#### Superset 侧:Token Exchange 端点 + +```python +# 部署在 Superset 的 oauth_config.py 中 +from flask_appbuilder import BaseView, expose +from flask import request, jsonify, current_app +from flask_jwt_extended import create_access_token, create_refresh_token +import requests + +class TokenExchangeView(BaseView): + """ + 接受外部 SSO access_token,验证后签发 Superset JWT。 + 仅供受信任的内部系统(如 DF)调用。 + """ + route_base = "/api/v1/df-token-exchange" + + @expose("/", methods=["POST"]) + def exchange(self): + data = request.get_json(force=True) + sso_token = data.get("sso_access_token") + if not sso_token: + return jsonify({"error": "missing_token"}), 400 + + # 1. 用 SSO token 获取用户信息 + sso_userinfo_url = current_app.config.get("SSO_USERINFO_URL") + try: + resp = requests.get( + sso_userinfo_url, + headers={"Authorization": f"Bearer {sso_token}"}, + timeout=5, + ) + resp.raise_for_status() + user_info = resp.json() + except Exception: + return jsonify({"error": "sso_token_invalid"}), 401 + + username = user_info.get("preferred_username") or user_info.get("username") + if not username: + return jsonify({"error": "no_username_in_token"}), 401 + + # 2. 在 Superset 中查找用户 + sm = current_app.appbuilder.sm + user = sm.find_user(username=username) + if not user or not user.is_active: + return jsonify({"error": "user_not_in_superset"}), 403 + + # 3. 签发 Superset JWT + access_token = create_access_token( + identity=str(user.id), fresh=True + ) + refresh_token = create_refresh_token(identity=str(user.id)) + + return jsonify({ + "access_token": access_token, + "refresh_token": refresh_token, + "user": { + "id": user.id, + "username": user.username, + "first_name": user.first_name, + "last_name": user.last_name, + }, + }) +``` + +注册方式(在 `FLASK_APP_MUTATOR` 中): + +```python +def mutator(app): + with app.app_context(): + appbuilder = app.extensions["appbuilder"] + appbuilder.add_view_no_menu(TokenExchangeView()) +``` + +#### DF 侧:自动换票逻辑 + +```python +# auth_bridge.py 新增 +class SupersetAuthBridge: + def exchange_sso_token(self, sso_access_token: str) -> dict: + """用 SSO access_token 换取 Superset JWT。""" + resp = requests.post( + f"{self.superset_url}/api/v1/df-token-exchange/", + json={"sso_access_token": sso_access_token}, + timeout=10, + ) + resp.raise_for_status() + return resp.json() +``` + +```python +# session_helpers.py 中 require_auth() 增加优先级 +def require_auth() -> tuple[Optional[str], Optional[dict]]: + token = get_token() + user = get_user() + + # 优先级 1:已有有效 Superset JWT + if token and not is_token_expired(token): + return token, user + + # 优先级 2:JWT 过期,尝试 refresh + if token and is_token_expired(token): + token = try_refresh() + if token: + return token, get_user() + + # 优先级 3(Phase 2 新增):用 OIDC token 自动换票 + auth_header = request.headers.get("Authorization", "") + if auth_header.startswith("Bearer "): + sso_token = auth_header[7:] + try: + result = _bridge.exchange_sso_token(sso_token) + superset_token = result["access_token"] + user_info = result.get("user", {}) + save_session( + superset_token, user_info, result.get("refresh_token") + ) + return superset_token, user_info + except Exception: + pass # 换票失败,降级到下一优先级 + + # 优先级 4:访客模式 + return None, user +``` + +#### 安全措施 + +| 安全关注点 | 应对措施 | +|-----------|---------| +| Token Exchange 端点被外部调用 | 配置 IP 白名单,只允许 DF 服务器 IP 访问 | +| SSO token 泄露导致 Superset 越权 | token 有效期短(通常 5-30 分钟),且权限受限于该用户在 Superset 中的角色 | +| 中间人攻击 | DF → Superset 通信走内网或 HTTPS | +| 用户在 Superset 中不存在 | 返回 403,降级为 Phase 1 的弹窗授权流程 | + +--- + +## 3. 数据源授权优先级(完整策略) + +``` +┌──────────────────────────────────────────────────────┐ +│ DF 访问 Superset 数据策略 │ +├──────────────────────────────────────────────────────┤ +│ │ +│ 优先级 1:Session 中已有有效 Superset JWT │ +│ → 直接使用(当前已实现) │ +│ │ +│ 优先级 2:JWT 过期 + 存在 refresh_token │ +│ → 调 Superset /api/v1/security/refresh 续期 │ +│ → 更新 Session(当前已实现) │ +│ │ +│ 优先级 3:用户有 SSO Token(Phase 2) │ +│ → 后端自动调 Token Exchange 端点 │ +│ → 换取 Superset JWT,存入 Session │ +│ │ +│ 优先级 4:上述均无 → 提示用户授权(Phase 1) │ +│ → Agent 暂停,前端弹出授权提示 │ +│ → 用户点击 → Bridge 弹窗 → 拿到 JWT │ +│ → Agent 恢复执行 │ +│ │ +│ 优先级 5:用户拒绝授权或无账号 │ +│ → 访客模式(仅公开数据) │ +│ → Agent 在受限范围内尽力分析 │ +│ │ +└──────────────────────────────────────────────────────┘ +``` + +--- + +## 4. 实施路线图 + +### Phase 1(与 Agent 功能同步开发) + +| 任务 | 改动范围 | 工作量 | +|------|---------|--------| +| 新增 `/api/plugins/auth-status` 端点 | DF 后端 | 小 | +| Agent 前置授权检查 (`ensureDataSourcesAuthorized`) | DF 前端 | 中 | +| Agent 暂停/恢复机制 | Agent Runtime | 中 | +| 授权提示 UI 组件 | DF 前端 | 小 | + +**改动文件**: +- `py-src/data_formulator/plugins/superset/routes/auth.py` — 新增 auth-status +- `src/plugins/superset/SupersetLogin.tsx` — 供 Agent 调用的授权入口 +- Agent Runtime(待开发)— 暂停/恢复逻辑 + +**不需要改动 Superset**。 + +### Phase 2(Agent 功能成熟后按需升级) + +| 任务 | 改动范围 | 工作量 | +|------|---------|--------| +| Superset 新增 `TokenExchangeView` | Superset oauth_config.py | 中 | +| DF `auth_bridge.py` 新增 `exchange_sso_token` | DF 后端 | 小 | +| DF `session_helpers.py` 增加优先级 3 逻辑 | DF 后端 | 小 | +| 安全配置(IP 白名单等) | Superset 部署配置 | 小 | + +**需要改动 Superset 配置**(`oauth_config.py`、`FLASK_APP_MUTATOR`)。 + +--- + +## 5. 对其他数据源插件的通用性 + +此设计模式不限于 Superset,可推广到任何需要独立认证的数据源插件: + +```typescript +interface DataSourcePlugin { + id: string; + checkAuthStatus(): Promise; + + // Phase 1: 交互式授权 + requestInteractiveAuth(): Promise; + + // Phase 2: 自动换票(可选实现) + exchangeToken?(ssoToken: string): Promise; +} +``` + +未来如果接入 Grafana、Metabase、PowerBI 等数据源,同样可以复用这套: + +1. Agent 前置检查所有数据源的授权状态 +2. 未授权的先暂停提示用户点击 +3. 如果该数据源支持 Token Exchange,则自动完成 + +--- + +## 6. 决策记录 + +| 决策 | 选择 | 理由 | +|------|------|------| +| 是否用 Service Account 全局访问 | 否 | 多用户环境需要尊重个人数据权限 | +| Phase 1 vs 直接做 Phase 2 | 先 Phase 1 | Agent 功能尚未开发,过早做 Token Exchange 意义不大;Phase 1 零 Superset 改动成本 | +| 按需授权是否阻断 Agent | 仅暂停 | Agent 暂停等待授权后自动恢复,不终止整个分析流程 | +| Token Exchange 是否标准协议 | 自定义端点 | OAuth2 Token Exchange (RFC 8693) Superset 不支持,自建端点更可控 | +| 授权提示时机 | Agent 执行前预检 | 避免分析进行到一半再中断,体验更好 | diff --git a/design-docs/8-unified-data-source-panel.md b/design-docs/8-unified-data-source-panel.md new file mode 100644 index 00000000..f6c07d32 --- /dev/null +++ b/design-docs/8-unified-data-source-panel.md @@ -0,0 +1,384 @@ +# Unified Data Source Panel — File-Directory Approach + +## Status: Draft / Discussion + +## 1. Problem + +The current Superset plugin uses a two-tab layout (Dashboards tab + Datasets tab) that ultimately does the same thing: load a dataset into the workspace. As we add more data plugins (Superset, Metabase, databases, file uploads, etc.), users need a single, intuitive way to browse and import data from all sources. + +Additionally, there's no clear starting experience — before any data is loaded, the user sees a landing page with upload options and demos, but no persistent "data browser" that encourages exploration. + +## 2. Proposal + +### 2.1 File-Directory Panel on Left Side of Data Thread + +Add a collapsible **data source browser** on the left side, styled like a file system tree. Users can expand/collapse sources, browse their contents, and import data into the workspace with a single click. + +``` +DATA SOURCES (collapsible sidebar) +───────────────────────────────── +▸ 📂 Local Files + upload.csv + paste-data.tsv + +▾ 📂 Superset (connected) + ▾ 📊 Q3 Sales Dashboard + orders_fact (150k rows) [⊕] + product_dim (2k rows) [⊕] + region_hierarchy (500 rows) [⊕] + ▸ 📊 Customer Analytics + ▸ 📁 Ungrouped Datasets + raw_events (1M rows) [⊕] + +▸ 📂 MySQL — analytics-db + schema: public + ▸ users + ▸ events + +▸ 📂 Metabase (not connected) +``` + +**[⊕] = one click to import** into workspace (adds table to data thread). + +### 2.2 Hierarchy Design + +``` +Plugin (data source) + └─ Group (optional: dashboard, schema, folder) + └─ Table / Dataset +``` + +**Open question: should groups nest deeper?** + +| Approach | Example | Pros | Cons | +|----------|---------|------|------| +| **Flat (2 levels)** | Plugin → Tables | Simple, fast to scan | Databases with many schemas may be overwhelming | +| **Grouped (3 levels)** | Plugin → Group → Tables | Natural for dashboards, schemas | Deeper nesting = more clicks | +| **Plugin-defined** | Plugins define their own depth (Superset uses groups, file upload is flat) | Each plugin presents data naturally; respects the source's native structure | Slightly inconsistent tree depth | + +**Recommendation: Plugin-defined hierarchy.** The tree renders whatever structure the plugin provides — Data Formulator doesn't impose or flatten it. Each plugin knows its data best: Superset naturally groups by dashboard, a database plugin exposes schema → table, and file uploads are flat. This respects the source system's native organization and avoids lossy abstraction. + +### 2.3 Interaction Model + +| Action | Behavior | +|--------|----------| +| **Expand plugin** | If not connected, show login/connect prompt inline. If connected, fetch and show contents. | +| **Expand group (dashboard)** | Fetch datasets in that group. Shows row count and column info. | +| **Click [⊕] on a table** | If dataset fits within row limit → import directly. If it exceeds the limit → pop up a filter/column-selection dialog (see §2.5). | +| **Right-click / long-press** | Context menu: Force open filter dialog, custom table name. | +| **Drag table** | (Future) Drag into data thread to position in a specific chain. | +| **Search** | Filter tree by name across all sources. | +| **Switch plugin** | All plugins visible at once — no switching needed. Collapse ones you don't use. | + +### 2.4 Plugin Switching + +Since all plugins appear as top-level folders in one tree, there's **no need to switch** between them. Users just expand the source they want. This is better than tabs/dropdowns: +- No "which tab am I on?" confusion +- Easy to pull data from multiple sources in one session +- Collapsed plugins take minimal space + +### 2.5 Progressive Import: Auto-Filter for Large Datasets + +One button [⊕] handles both small and large datasets: + +**Small dataset (within row limit):** Import happens immediately, no extra steps. + +**Large dataset (exceeds row limit):** A filter dialog pops up automatically: + +``` +┌─ Import: orders_fact (1.2M rows) ──────────────────┐ +│ │ +│ This dataset exceeds the row limit (50,000). │ +│ Select columns and filters to narrow the data. │ +│ │ +│ Columns (12 available): │ +│ ☑ order_id ☑ customer_id ☑ amount │ +│ ☑ region ☐ internal_id ☐ updated_at │ +│ ☑ order_date ☐ raw_payload ☑ status │ +│ │ +│ Filters: │ +│ ┌─────────────┬────┬──────────────────────┐ │ +│ │ region │ = │ US, EU │ │ +│ │ order_date │ >= │ 2025-01-01 │ │ +│ │ │ │ [+ Add filter] │ │ +│ └─────────────┴────┴──────────────────────┘ │ +│ │ +│ Estimated rows after filter: ~38,000 │ +│ │ +│ [Cancel] [Import] │ +└─────────────────────────────────────────────────────┘ +``` + +**Design rationale:** +- **One button for everything** — no upfront decision about "raw vs filtered" +- **Zero friction for small data** — most imports are instant +- **Progressive disclosure** — filter UI only appears when actually needed +- **Column selection** — users can drop columns they don't need, reducing data size +- **Server-side filtering** — filters are applied as SQL WHERE clauses before download, so only the relevant subset crosses the wire +- Users can also right-click any dataset to force-open the filter dialog even for small datasets + +### 2.6 Views vs Tables + +Data sources can expose both **tables** (raw data) and **views** (pre-filtered/transformed data). The tree doesn't distinguish between them at the interaction level — both are leaf nodes with [⊕] to import. The difference is just metadata. + +- A Superset dashboard's filtered dataset = a **view** +- A MySQL `CREATE VIEW` = a **view** +- A raw database table = a **table** + +The plugin labels each leaf node with its type, and optionally shows the view definition as a code snippet: + +``` +▾ 📂 Superset + ▾ 📊 Q3 Sales Dashboard + orders_fact (view) (150k rows) [⊕] + WHERE region IN ('US','EU') AND order_date >= '2025-01-01' + product_dim (table) (2k rows) [⊕] + +▾ 📂 MySQL — analytics-db + ▾ 📁 public + users (table) (500k rows) [⊕] + active_users (view) (50k rows) [⊕] + SELECT * FROM users WHERE status = 'active' +``` + +The `TreeNode` supports this simply: + +```typescript +interface TreeNode { + // ... existing fields ... + metadata?: { + rowCount?: number; + columnCount?: number; + nodeKind?: 'table' | 'view'; // Displayed as label + viewDefinition?: string; // Shown as code snippet if present + }; +} +``` + +Users click [⊕] on either — the import flow is identical. The view definition is informational so users understand what data they're getting. + +## 3. Starting Panel (Empty State) + +Before the user loads any data, the current landing page shows upload options + demo sessions. The question: **how should the data source panel appear here?** + +### Landing Page (Before Data is Loaded) + +The existing landing page is preserved — it shows quick-start actions, example sessions, and recent workspaces. The data source browser is embedded as a section within the landing page, giving users a preview of available sources and encouraging them to connect before entering the editor. + +``` +┌─────────────────────────────────────────────────────────┐ +│ DATA FORMULATOR │ +│ AI-powered data visualization │ +│ │ +│ ┌─ Quick Start ───────┐ ┌─ Data Sources ──────────┐ │ +│ │ 📎 Upload CSV │ │ ▸ Superset (Connect →) │ │ +│ │ 📋 Paste data │ │ ▸ MySQL (Connect →) │ │ +│ │ 🔗 From URL │ │ ▸ Metabase (Connect →) │ │ +│ └─────────────────────┘ └─────────────────────────┘ │ +│ │ +│ ┌─ Examples ──────────────────────────────────────┐ │ +│ │ [Stock Prices] [Gas Prices] [Movies] [...] │ │ +│ └─────────────────────────────────────────────────┘ │ +│ │ +│ Recent Workspaces │ +│ ┌──────────┐ ┌──────────┐ ┌──────────┐ │ +│ │ Sales Q3 │ │ Customer │ │ Survey │ │ +│ │ 3 tables │ │ 5 tables │ │ 2 tables │ │ +│ └──────────┘ └──────────┘ └──────────┘ │ +└─────────────────────────────────────────────────────────┘ +``` + +- The "Data Sources" card lists configured plugins as collapsed entries +- Clicking "Connect →" opens the plugin's auth flow inline or in a dialog +- Once connected, the entry expands to show top-level groups/tables right on the landing page +- Clicking a dataset or uploading a file transitions into the **editor layout** + +### Editor Layout (After Data is Loaded) + +Once the user imports data, the UI transitions to the editor with the file-tree data source panel on the left: + +``` +┌─────────────────┬───────────────────────────────────────┐ +│ DATA SOURCES │ │ +│ │ Data Thread / Visualization │ +│ ▸ Upload Files │ │ +│ ▾ Superset │ (editor content) │ +│ ▾ Q3 Sales │ │ +│ orders_fact │ │ +│ product_dim │ │ +│ ▸ Analytics │ │ +│ ▸ MySQL │ │ +│ ───────────────│ │ +│ WORKSPACE │ │ +│ orders_fact ✓ │ │ +│ my_upload.csv │ │ +└─────────────────┴───────────────────────────────────────┘ +``` + +- The file-tree panel is collapsible to save space +- Already-imported tables show a ✓ badge in the source tree +- The WORKSPACE section below shows tables currently in the workspace + +## 4. Plugin-Provided Metadata Contract + +The UI is **entirely metadata-driven**. Plugins have **no custom frontend code** — there are no per-plugin React components, no `SupersetPanel.tsx` or `MySQLPanel.tsx`. Instead, the frontend reads structured metadata from the plugin's backend API and renders one generic tree component for all plugins. + +The flow: +1. Frontend calls `GET /api/plugins/` → gets list of registered plugins + their descriptors +2. Frontend calls `GET /api/plugins/{id}/children?parentId=...` → gets tree nodes +3. Frontend renders everything using the same generic tree component + +### 4.1 Plugin Descriptor (Static Metadata) + +Returned by the backend at plugin registration / discovery time. Tells the UI what this plugin looks like and what it can do: + +```typescript +interface DataSourcePluginDescriptor { + id: string; // e.g. "superset", "mysql" + displayName: string; // e.g. "Superset" + icon: string; // Icon identifier or URL + + // Authentication + requiresAuth: boolean; + authType?: 'sso' | 'credentials' | 'connection-string'; + + // Hierarchy declaration — tells the UI what levels to expect + hierarchy: HierarchyLevel[]; + + // Capabilities — tells the UI what actions to offer + capabilities: { + search?: boolean; // Can this plugin handle server-side search? + preview?: boolean; // Can tables be previewed before import? + serverSideFilter?: boolean; // Can the plugin apply WHERE clauses before download? + rowLimitOptions?: number[]; // e.g. [20000, 50000, 100000] + }; +} + +// Each level describes one tier of the tree +interface HierarchyLevel { + type: string; // e.g. "dashboard", "schema", "table" + label: string; // Display name for this level, e.g. "Dashboards" + icon?: string; // Default icon for nodes at this level + expandable: boolean; // Does this level have children? + isLeaf?: boolean; // Is this the importable data level? +} +``` + +**Example descriptors:** + +```typescript +// Superset: 2 levels (dashboard → dataset) +{ + id: 'superset', + hierarchy: [ + { type: 'dashboard', label: 'Dashboards', icon: '📊', expandable: true }, + { type: 'dataset', label: 'Datasets', icon: '📄', expandable: false, isLeaf: true } + ], + capabilities: { search: true, serverSideFilter: true, rowLimitOptions: [20000, 50000, 100000] } +} + +// MySQL: 2 levels (schema → table) +{ + id: 'mysql', + hierarchy: [ + { type: 'schema', label: 'Schemas', icon: '📁', expandable: true }, + { type: 'table', label: 'Tables', icon: '📄', expandable: false, isLeaf: true } + ], + capabilities: { search: true, preview: true, rowLimitOptions: [10000, 50000, 200000] } +} + +// File upload: flat (just tables) +{ + id: 'local-files', + hierarchy: [ + { type: 'file', label: 'Files', icon: '📄', expandable: false, isLeaf: true } + ], + capabilities: { search: false } +} +``` + +### 4.2 Backend API Endpoints (Dynamic Metadata) + +Each plugin backend exposes a standard set of REST endpoints. The frontend fetches tree content lazily as the user expands nodes — all through the same generic API shape: + +``` +# All plugins expose the same endpoint pattern: +GET /api/plugins/{plugin_id}/auth/status +POST /api/plugins/{plugin_id}/auth/login +GET /api/plugins/{plugin_id}/children?parentId= +POST /api/plugins/{plugin_id}/load +``` + +The backend plugin implements a standard Python interface: + +```python +class DataSourcePlugin: + descriptor: DataSourcePluginDescriptor + + def get_auth_status(self, session) -> AuthStatus: ... + def authenticate(self, session, credentials) -> AuthResult: ... + + # Tree content — generic node fetching + # parent_id=None → root-level nodes (dashboards, schemas, etc.) + # parent_id= → children of that node + def get_children(self, session, parent_id: str | None) -> list[TreeNode]: ... + + # Import a leaf node into workspace + # options may include column selection, filters, row limit (from the generic filter dialog) + def load_table(self, session, node_id: str, options: LoadOptions) -> LoadResult: ... +``` + +interface TreeNode { + id: string; + name: string; + type: string; // Matches a HierarchyLevel.type + icon?: string; // Override default icon + metadata?: { // Displayed as secondary info + rowCount?: number; + columnCount?: number; + [key: string]: any; // Plugin can add custom display fields + }; + hasChildren: boolean; // Whether expand arrow is shown +} +``` + +### 4.3 How the UI Uses This + +The tree renderer is **one generic React component** (``) shared across all plugins: + +1. On startup, fetches `GET /api/plugins/` → gets all plugin descriptors +2. Reads `descriptor.hierarchy` to know what levels to expect, what icons/labels to use +3. Calls `GET /api/plugins/{id}/children?parentId=` to populate root nodes when plugin is expanded +4. Calls the same endpoint with `parentId=` when a non-leaf node is expanded +5. Shows [⊕] import button on leaf nodes (`isLeaf: true`) +6. On [⊕] click: if dataset fits within row limit, imports directly; if it exceeds the limit, opens the generic filter/column-selection dialog (§2.5) +7. If plugin declares `serverSideFilter: true`, the filter dialog sends column/filter selections to the plugin backend for server-side execution + +**No plugin-specific frontend code exists.** Adding a new data source means writing only a backend plugin that implements the standard Python interface — the UI picks it up automatically. + +## 5. Migration from Current Design + +| Current | New | +|---------|-----| +| SupersetPanel with 2 tabs | Single tree under "Superset" folder, datasets grouped by dashboard | +| DataLoadMenu (upload/paste/URL) | "Local Files" / "Upload" top-level folder in tree | +| Separate plugin panels | All plugins in one tree | +| Landing page with demos | Main area welcome when workspace is empty (Option C) | + +## 6. Design Decisions + +1. **Search scope**: Global search across all plugins with source badges. +2. **Lazy loading**: Load on expand, cache aggressively. +3. **Workspace section in tree**: A "Recently Imported Tables" section appears in the tree, showing tables the user has previously imported across sessions. This makes it easy to reuse the same data for new analysis. For v1, this can simply display tables from existing sessions rather than maintaining a separate copy. +4. **Multi-instance plugins**: Supported. Users can connect multiple instances of the same plugin type (e.g., two MySQL databases). Each instance gets a unique plugin instance ID and appears as a separate top-level folder. +5. **Drag-and-drop**: Click-to-import only for v1. Drag-and-drop from source tree to data thread is a future enhancement. + +## 7. Open Questions + +(None — all resolved.) + +## 7. Related Docs + +- [1-data-source-plugin-architecture.md](1-data-source-plugin-architecture.md) — Plugin system design +- [1-sso-plugin-architecture.md](1-sso-plugin-architecture.md) — SSO authentication +- [2-external-dataloader-enhancements.md](2-external-dataloader-enhancements.md) — Data loading improvements diff --git a/design-docs/9-generalized-data-source-plugins.md b/design-docs/9-generalized-data-source-plugins.md new file mode 100644 index 00000000..ed3e0fa7 --- /dev/null +++ b/design-docs/9-generalized-data-source-plugins.md @@ -0,0 +1,1744 @@ +# Generalized Data Source Plugins — Unifying DataLoader + Plugin into a Lifecycle-Managed Connection + +## Status: Phase 3 complete (legacy plugins removed, backend restructured) + +## 1. Problem + +We have **two separate abstractions** for loading external data: + +| Abstraction | Example | Auth | Catalog Browsing | Refresh | Session Lifecycle | +|-------------|---------|------|-------------------|---------|-------------------| +| **ExternalDataLoader** | MySQL, PostgreSQL, Kusto, BigQuery, S3 | One-shot (params in request) | `list_tables()` per request | Manual re-import | None — stateless | +| **DataSourcePlugin** | Superset | Full (login/session/vault) | Rich catalog with caching | Not implemented | Full — session, token refresh | + +This split causes problems: + +1. **No persistent connections for databases.** A user who connects to PostgreSQL to browse tables must re-send credentials every time. There's no "logged into Postgres" state. +2. **No refresh.** Once a table is imported from MySQL, there's no way to re-pull the latest data without manually re-entering connection details. +3. **The Superset plugin is over-specialized.** It hard-codes dashboard/dataset concepts. Meanwhile, Kusto, PostgreSQL, MySQL all need the same pattern (auth → browse catalog → filter → import → refresh) but don't have it. +4. **Plugin naming is BI-centric.** `DataSourcePlugin` was designed for BI platforms (Superset, Metabase), but the real need is broader: any system you can authenticate into and continuously pull data from. + +### The Key Insight + +A DataLoader already knows *how* to talk to a data source (connect, list tables, fetch data). A Plugin knows *how* to manage a session (login, persist auth, browse, present UI). **Combining them gives us a lifecycle-managed data connection** — which is what users actually want. + +## 2. Proposal: `DataConnector` — A Generalized Plugin Built from a DataLoader + +### 2.1 Core Idea + +Define a **generic plugin factory** that takes any `ExternalDataLoader` class and automatically wraps it with: + +- **Session management** — persistent connection state (logged in / not) +- **Catalog browsing** — `list_tables()` exposed as a browsable tree +- **Filtered import** — column selection + row limits +- **Refresh** — re-fetch a previously imported table with the same parameters +- **Auto-discovery** — same env-var gating as existing plugins + +This means: to add "PostgreSQL as a connected data source," you write **zero new plugin code**. The existing `PostgreSQLDataLoader` is automatically promoted to a full plugin with auth, catalog, refresh, and UI. + +### 2.2 Architecture + +``` +┌─────────────────────────────────────────────────────────────┐ +│ DataConnector │ +│ (generic plugin framework) │ +│ │ +│ ┌──────────────┐ ┌──────────────┐ ┌───────────────────┐ │ +│ │ Auth Layer │ │ Catalog Layer│ │ Data Layer │ │ +│ │ │ │ │ │ │ │ +│ │ • login() │ │ • list() │ │ • load() │ │ +│ │ • logout() │ │ • detail() │ │ • refresh() │ │ +│ │ • status() │ │ • search() │ │ • preview() │ │ +│ │ • refresh() │ │ • tree() │ │ │ │ +│ └──────┬───────┘ └──────┬───────┘ └────────┬──────────┘ │ +│ │ │ │ │ +│ └─────────────────┼────────────────────┘ │ +│ │ │ +│ ┌───────▼────────┐ │ +│ │ ExternalData │ │ +│ │ Loader │ │ +│ │ (existing) │ │ +│ └────────────────┘ │ +└─────────────────────────────────────────────────────────────┘ +``` + +### 2.3 The Unification: Databases and BI Tools Are Both Hierarchical Data Sources + +From DF's perspective, **every external data source is the same thing**: an authenticated system with a hierarchical catalog whose leaf nodes are importable tables. The only difference is what the intermediate levels are called: + +| Source Type | Hierarchy | Leaf Node | +|-------------|-----------|----------| +| MySQL | `server → database → table` | table | +| PostgreSQL | `server → database → schema → table` | table / view | +| BigQuery | `project → dataset → table` | table / view | +| Kusto | `cluster → database → table` | table | +| S3 | `bucket → prefix → object` | CSV/Parquet file | +| **Superset** | `instance → dashboard → dataset` | dataset (= filtered table) | +| **Metabase** | `instance → collection → question` | question (= query result) | +| **Grafana** | `instance → datasource → query` | query result | + +The core user loop is always: **connect → browse tree → pick leaf → import → refresh.** + +This means we don't need separate abstractions for "BI plugin" vs. "database plugin." We unify them: + +| Component | Change | +|-----------|--------| +| `ExternalDataLoader` | **Evolves** into the universal data protocol. Gains `catalog_hierarchy()` + `ls()` + `effective_hierarchy()` for tree browsing with scope pinning. | +| `DataSourcePlugin` | **Stays** as the abstract base, but now primarily implemented via `DataConnector`. | +| **New: `DataConnector`** | Generic `DataSourcePlugin` subclass that wraps any `ExternalDataLoader`. Auto-generates auth/catalog/data routes. | +| **New: `DataConnectorPanel`** | Generic React component for all connected data sources (login → tree browser → import). | +| `SupersetPlugin` | **Migrates** to a `DataConnector` backed by a `SupersetLoader`. Dashboards are `"namespace"` nodes, datasets are `"table"` nodes — hierarchy labels provide the UI terminology. | + +## 3. API Design + +### 3.1 Backend: `DataConnector` Base + +```python +class DataConnector(DataSourcePlugin): + """A DataSourcePlugin auto-generated from an ExternalDataLoader. + + Provides lifecycle management: connection persistence, catalog browsing, + filtered import, and refresh — all driven by the underlying loader. + """ + + # Subclass must set these (or override manifest()) + LOADER_CLASS: type[ExternalDataLoader] # e.g., PostgreSQLDataLoader + SOURCE_ID: str # e.g., "postgresql" + SOURCE_NAME: str # e.g., "PostgreSQL" + + # ----- Auto-generated manifest from loader metadata ----- + + @staticmethod + def manifest() -> dict: + """Built from LOADER_CLASS.list_params() + SOURCE_ID.""" + return { + "id": cls.SOURCE_ID, + "name": cls.SOURCE_NAME, + "env_prefix": f"PLG_{cls.SOURCE_ID.upper()}", + "required_env": [], # DB plugins enabled by default (user provides creds at runtime) + "auth_modes": ["password"], + "capabilities": ["tables", "refresh"], + } + + # ----- Auth Routes (auto-generated) ----- + # POST /api/plugins/{id}/auth/connect — validate & persist connection + # POST /api/plugins/{id}/auth/disconnect — tear down connection + # GET /api/plugins/{id}/auth/status — is connection alive? + + # ----- Catalog Routes (auto-generated) ----- + # POST /api/plugins/{id}/catalog/ls — list children at a path (lazy) + # POST /api/plugins/{id}/catalog/metadata — get metadata for one node + + # ----- Data Routes (auto-generated) ----- + # POST /api/plugins/{id}/data/import — fetch & import to workspace + # POST /api/plugins/{id}/data/refresh — re-import with stored params + # POST /api/plugins/{id}/data/preview — fetch first N rows for preview +``` + +### 3.2 The Full API Surface + +#### 3.2.1 Auth / Connection Management + +``` +POST /api/plugins/{id}/auth/connect + Body: { params: { host, port, user, password, database, ... } } + Response: { status: "connected", user: "...", server: "...", database: "..." } + Side-effect: Validates connection, stores params in session (+ vault if available) + +POST /api/plugins/{id}/auth/disconnect + Response: { status: "disconnected" } + Side-effect: Clears session + vault + +GET /api/plugins/{id}/auth/status + Response: { + connected: true/false, + user: "...", + server: "...", + database: "...", + params_form: [...] // list_params() for the login form if not connected + } + Side-effect: If session empty but vault has creds → auto-reconnect +``` + +**Note on auth diversity:** "Connecting" means different things for different sources. For traditional databases it's validating host/user/password (e.g., `SELECT 1`). For cloud databases it may be OAuth (Azure AD for Kusto, IAM for AWS RDS). For BI tools it's obtaining a JWT. The framework doesn't care — the loader's `list_params()` declares what it needs, and the `auth_mode()` (see §6.3) tells the framework whether to persist a connection object or a token. The generic connection form renders whatever params the loader declares (password fields, file pickers for service account keys, OAuth redirect buttons, etc.). + +#### 3.2.2 Catalog Browsing (Tree-Based) + +The catalog is a **lazy tree** that mirrors the data source's natural hierarchy (see §3.4 for full design). Each expand in the UI triggers one API call. + +We use **POST** for catalog APIs (not GET) because: +- `path` is structured data (JSON array) that may contain special characters (dots, spaces in dashboard names) +- The request body will grow as we add filters, pagination, and import context +- Catalog results are not cacheable — the source data changes + +``` +POST /api/plugins/{id}/catalog/ls + Body: { + path: [], // JSON array: [] = root, ["mydb"], ["mydb","public"] + filter: "...", // optional name filter + } + Response: { + hierarchy: ["database", "schema", "table"], // source's level labels (from catalog_hierarchy) + effective_hierarchy: ["schema", "table"], // browsable levels (pinned levels removed) + path: [], + nodes: [ + { name: "analytics", node_type: "namespace", path: ["analytics"], + metadata: { table_count: 42 } }, + { name: "production", node_type: "namespace", path: ["production"], + metadata: { table_count: 15 } }, + ... + ] + } + +POST /api/plugins/{id}/catalog/ls + Body: { path: ["production", "public"] } + Response: { + hierarchy: ["database", "schema", "table"], + effective_hierarchy: ["schema", "table"], + path: ["production", "public"], + nodes: [ + { name: "users", node_type: "table", path: ["production","public","users"], + metadata: { row_count: 150000, columns: [...] } }, + ... + ] + } + +POST /api/plugins/{id}/catalog/metadata + Body: { path: ["production", "public", "users"] } + Response: { + name: "users", + path: ["production", "public", "users"], + node_type: "table", + columns: [...], // full column detail + row_count: 150000, + sample_rows: [...], // first 5 rows for preview + description: "...", // table comment if available + } +``` + +**How this maps to `ExternalDataLoader`:** The `ls(path)` method (§3.4) drives every tree expansion. `DataConnector` adds caching (per-session, with TTL) on top. + +#### 3.2.3 Data Loading + Refresh + +``` +POST /api/plugins/{id}/data/import + Body: { + source_table: "public.users", + table_name: "users", // name in workspace (optional, auto-generated) + size: 50000, // row limit + sort_columns: ["created_at"], + sort_order: "desc", + columns: ["id", "email", "name"], // column selection (optional) + } + Response: { + table_id: "tbl_abc123", + table_name: "users", + row_count: 50000, + columns: [...], + refreshable: true, + refresh_params: { ... } // stored for later refresh + } + +POST /api/plugins/{id}/data/refresh + Body: { + table_id: "tbl_abc123", // workspace table to refresh + } + Response: { + table_id: "tbl_abc123", + row_count: 52000, // may differ from original + refreshed_at: "2026-04-13T10:30:00Z" + } + Side-effect: Re-runs the same fetch with stored params, overwrites parquet + +POST /api/plugins/{id}/data/preview + Body: { + source_table: "public.users", + columns: ["id", "email"], // optional column selection + size: 10 // small preview + } + Response: { + columns: [...], + rows: [...] // first N rows + } +``` + +### 3.3 Refresh Mechanism + +Refresh is a first-class concept. When a table is imported via a `DataConnector`, the workspace metadata stores: + +```python +{ + "table_id": "tbl_abc123", + "table_name": "users", + "source": { + "plugin_id": "postgresql", # which plugin + "source_table": "public.users", # what was fetched + "size": 50000, + "sort_columns": ["created_at"], + "sort_order": "desc", + "columns": ["id", "email", "name"], # column selection + "fetched_at": "2026-04-13T10:00:00Z" + }, + "refreshable": True +} +``` + +On refresh: +1. Check if the plugin connection is still alive (auto-reconnect via vault if needed) +2. Re-run `loader.fetch_data_as_arrow()` with stored params +3. Overwrite the parquet file in workspace +4. Update `fetched_at` timestamp +5. Notify frontend of updated row count / schema changes + +### 3.4 Hierarchical Catalog Exploration + +#### The Problem with Single-Database Loaders + +Current loaders are scoped to a single database at init time: + +| Loader | Init Scope | `list_tables()` Sees | Natural Full Hierarchy | +|--------|-----------|---------------------|------------------------| +| MySQL | `host + database` | Tables in that one DB | `server → database → table` | +| PostgreSQL | `host + database` | Schemas + tables in one DB | `server → database → schema → table` | +| MSSQL | `server + database` | Schemas + tables in one DB | `server → database → schema → table` | +| Kusto | `cluster + database` | Tables in that one DB | `cluster → database → table` | +| BigQuery | `project (+ dataset)` | Datasets + tables | `project → dataset → table` | +| MongoDB | `host + database` | Collections in one DB | `server → database → collection` | +| S3 | `bucket` | Keys in that bucket | `bucket → prefix → object` | + +This means a user exploring a MySQL server with 10 databases must disconnect and reconnect 10 times. That's friction we should eliminate. + +#### Proposed: Tree-Based Catalog Model + +Instead of the flat `list_tables()` → `[table, table, ...]` model, introduce a **tree-based catalog** where loaders declare their hierarchy and support lazy expansion at each level: + +```python +@dataclass +class CatalogNode: + """A node in the data source's catalog tree. + + Only two kinds of node: + - "namespace" — expandable container (database, schema, bucket, dashboard, …). + The hierarchy's "label" tells the UI what to call it. + - "table" — importable leaf (table, file, dataset, …). + + The *level name* (e.g. "Database", "Schema") comes from + catalog_hierarchy(), not from the node itself. + """ + name: str # Display name ("public", "users", "events") + node_type: str # "namespace" or "table" + path: list[str] # Full path from root: ["mydb", "public", "users"] + metadata: dict | None = None # Row count, column info, description, etc. +``` + +This follows the **Iceberg REST / Unity Catalog convention**: every container is a `namespace`, every importable unit is a `table`. The hierarchy labels (what to call each level in the UI) come from `catalog_hierarchy()`, keeping the node model itself minimal and universal. + +Each data source declares its hierarchy as a sequence of **level descriptors** — each with a type key and the display label users see: + +```python +class ExternalDataLoader(ABC): + + @staticmethod + def catalog_hierarchy() -> list[dict[str, str]]: + """Declare the levels in this source's catalog tree. + + Returns ordered list from root to leaf. Each entry has: + - "key": internal identifier (used in params, APIs) + - "label": user-facing display name + + The last level is always the importable unit (table/file/dataset). + + Examples: + MySQL: + [{"key": "database", "label": "Database"}, + {"key": "table", "label": "Table"}] + + PostgreSQL: + [{"key": "database", "label": "Database"}, + {"key": "schema", "label": "Schema"}, + {"key": "table", "label": "Table"}] + + BigQuery: + [{"key": "project", "label": "Project"}, + {"key": "dataset", "label": "Dataset"}, + {"key": "table", "label": "Table"}] + + Superset: + [{"key": "dashboard", "label": "Dashboard"}, + {"key": "dataset", "label": "Dataset"}] + + S3: + [{"key": "bucket", "label": "Bucket"}, + {"key": "prefix", "label": "Folder"}, + {"key": "object", "label": "File"}] + + Default: [{"key": "table", "label": "Table"}] (flat). + """ + return [{"key": "table", "label": "Table"}] +``` + +The keys serve double duty: they match the parameter names in `list_params()` (see §3.4.2 Scope Pinning), and the labels are what users see in the tree UI — so each source presents its own natural terminology. + +#### Lazy Expansion API + +Browsing happens **one level at a time**, like expanding directories in a file browser. The loader only fetches children when the user expands a node: + +```python +class ExternalDataLoader(ABC): + + def ls( + self, + path: list[str] | None = None, + filter: str | None = None, + ) -> list[CatalogNode]: + """List children at a catalog path (like `ls` in a filesystem). + + path is relative to the *effective* (unpinned) hierarchy. + + * path=[] — list nodes at the first browsable level. + * path=["public"] — expand that node one level deeper. + + Nodes are either "namespace" (expandable) or "table" (importable leaf). + The hierarchy's label tells the UI what to call each level. + + Args: + path: Path to list, as a list of names at each level. + None or [] = root level. + filter: Optional name filter (substring match). + + Returns: + Children at the given path. + + Examples: + MySQL (database not pinned): + ls([]) → [CatalogNode("mydb", "namespace", ["mydb"])] + ls(["mydb"]) → [CatalogNode("users", "table", ["mydb","users"])] + + PostgreSQL (database not pinned): + ls([]) → [CatalogNode("analytics", "namespace", ["analytics"])] + ls(["analytics"]) → [CatalogNode("public", "namespace", ["analytics","public"])] + ls(["analytics","public"])→ [CatalogNode("users", "table", ["analytics","public","users"])] + + PostgreSQL (database="analytics" pinned → effective hierarchy is schema→table): + ls([]) → [CatalogNode("public", "namespace", ["public"])] + ls(["public"]) → [CatalogNode("users", "table", ["public","users"])] + + BigQuery (project pinned): + ls([]) → [CatalogNode("sales", "namespace", ["sales"])] + ls(["sales"]) → [CatalogNode("orders", "table", ["sales","orders"])] + """ + pass +``` + +#### Scope Pinning: Pre-Configuring the Starting Level + +Not every user should browse from the top. An admin might restrict a deployment to one database, or a user might only care about one schema. **Scope pinning** lets connection params fix one or more hierarchy levels, so the tree starts deeper: + +``` +Full hierarchy (MySQL): server → database → table +Pinned to database="mydb": server → table (user sees tables directly) + +Full hierarchy (PostgreSQL): server → database → schema → table +Pinned to database="prod": server → schema → table +Pinned to db+schema: server → table +``` + +This works naturally because hierarchy level keys match parameter names in `list_params()`. When a connection param matches a hierarchy level key, that level is pinned and hidden from browsing: + +```python +# MySQL — no pinning: user browses databases → tables +MySQLDataLoader({"host": "db.example.com", "user": "me", "password": "..."}) +# ls([]) → [CatalogNode("mydb", "namespace", ["mydb"]), CatalogNode("other", "namespace", ["other"])] +# ls(["mydb"]) → [CatalogNode("users", "table", ["mydb","users"]), ...] + +# MySQL — database pinned: user sees tables directly +MySQLDataLoader({"host": "db.example.com", "user": "me", "password": "...", "database": "mydb"}) +# ls([]) → [CatalogNode("users", "table", ["users"]), ...] (database level skipped) + +# PostgreSQL — database pinned, schema free: user browses schemas → tables +PostgreSQLDataLoader({"host": "...", "user": "...", "password": "...", "database": "prod"}) +# ls([]) → [CatalogNode("public", "namespace", ["public"]), CatalogNode("analytics", "namespace", ["analytics"])] +# ls(["public"]) → [CatalogNode("users", "table", ["public","users"]), ...] + +# BigQuery — project pinned: user browses datasets → tables +BigQueryDataLoader({"project": "my-gcp-project"}) +# ls([]) → [CatalogNode("sales", "namespace", ["sales"]), ...] +``` + +The loader determines the **effective hierarchy** at connection time: + +```python +class ExternalDataLoader(ABC): + def effective_hierarchy(self) -> list[dict[str, str]]: + """Remove pinned levels from the catalog hierarchy. + + A level is pinned when the user provided a non-empty value for its + key in the connection params (e.g., database="prod" pins the database level). + """ + params = getattr(self, "params", {}) or {} + full = self.catalog_hierarchy() + return [level for level in full if not params.get(level["key"])] + + def pinned_scope(self) -> dict[str, str]: + """Return {level_key: value} for every pinned hierarchy level.""" + params = getattr(self, "params", {}) or {} + return { + level["key"]: params[level["key"]] + for level in self.catalog_hierarchy() + if params.get(level["key"]) + } +``` + +**How pinning is configured:** + +| Who | How | Example | +|-----|-----|---------| +| **Admin (env vars)** | Pre-fill params via `PLG_{ID}_{PARAM}` env vars. User never sees these fields. | `PLG_MYSQL_HOST=db.internal PLG_MYSQL_DATABASE=analytics` → users only see tables in `analytics` | +| **Admin (connection form)** | Mark params as `hidden` in `list_params()` when env var provides the value | Same as above, but the form shows remaining fields only | +| **User (connection form)** | Fill in or leave blank optional scope params | Leave `database` empty → browse all; fill it in → pinned to that DB | + +#### How `list_params()` Supports Scope Pinning + +```python +@staticmethod +def list_params() -> list[dict[str, Any]]: + return [ + {"name": "host", "type": "string", "required": True, "description": "Database host"}, + {"name": "port", "type": "number", "required": True, "default": 3306}, + {"name": "user", "type": "string", "required": True}, + {"name": "password", "type": "password", "required": True}, + # Scope params: match hierarchy level keys. Optional = user can browse that level. + {"name": "database", "type": "string", "required": False, + "scope_level": True, # <-- marks this as a hierarchy scope param + "description": "Database (leave empty to browse all databases)"}, + ] +``` + +The `scope_level: True` flag tells the framework this param corresponds to a catalog hierarchy level. When provided, it pins that level. When empty, the user browses it. + +#### Catalog API Endpoints (Revised) + +All catalog endpoints use **POST** with JSON body (see §3.2.2 for rationale): + +``` +POST /api/plugins/{id}/catalog/ls + Body: { path: ["mydb", "public"], filter: "..." } + Response: { + hierarchy: ["database", "schema", "table"], // from catalog_hierarchy() + effective_hierarchy: ["schema", "table"], // browsable levels (pinned removed) + path: ["mydb", "public"], + nodes: [ + { + name: "users", + node_type: "table", + path: ["mydb", "public", "users"], + metadata: { row_count: 150000, columns: [...] } + }, + { + name: "orders", + node_type: "table", + path: ["mydb", "public", "orders"], + metadata: { row_count: 1200000, columns: [...] } + } + ] + } +``` + +#### Tree Rendering with Scope Pinning + +The same source looks different depending on what's pinned: + +**Unpinned (user browses full hierarchy):** +``` +▾ 📂 MySQL — db.example.com (connected) + ▸ 📁 analytics ← database level + ▾ 📁 production ← database level (expanded) + users (150k rows) [⊕] ← table level (leaf) + orders (1.2M rows) [⊕] + products (5k rows) [⊕] + ▸ 📁 staging +``` + +**Pinned to `database=production` (admin or user pre-configured):** +``` +▾ 📂 MySQL — db.example.com / production (connected) + users (150k rows) [⊕] ← table level (leaf, top-level) + orders (1.2M rows) [⊕] + products (5k rows) [⊕] +``` + +**PostgreSQL — pinned to `database=reporting`, schema browsable:** +``` +▾ 📂 PostgreSQL — warehouse.corp / reporting (connected) + ▾ 📁 public ← schema level (now top-level) + monthly_revenue (3k rows) [⊕] + customer_ltv (50k rows) [⊕] + ▸ 📁 internal +``` + +**BigQuery — unpinned:** +``` +▾ 📂 BigQuery — my-gcp-project (connected) + ▾ 📁 sales_dataset ← dataset level + transactions (10M rows) [⊕] + returns (500k rows) [⊕] + ▸ 📁 analytics_dataset +``` + +**Superset — unpinned:** +``` +▾ 📂 Superset — bi.company.com (connected) + ▾ 📊 Q3 Sales Dashboard ← dashboard level + orders_fact (150k rows) [⊕] + product_dim (2k rows) [⊕] + ▸ 📊 Customer Analytics + ▸ 📁 Ungrouped Datasets +``` + +Each expand click triggers a lazy `ls(path)` call — no upfront loading of the entire catalog. The framework computes `effective_hierarchy()` at connection time to know how many levels to render. + +### 3.5 Revised `ExternalDataLoader` Interface + +The full loader interface after the redesign. The catalog API methods (`catalog_hierarchy`, `ls`, `get_metadata`, `test_connection`) have **default implementations** on the base class so loaders can be upgraded incrementally — un-upgraded loaders still work via fallback to `list_tables()`. + +```python +class ExternalDataLoader(ABC): + """Universal data source driver. + + Required interface for all data sources (databases, BI tools, cloud storage). + """ + + # ----- Connection ----- + + @abstractmethod + def __init__(self, params: dict[str, Any]): + """Initialize with connection parameters.""" + pass + + def test_connection(self) -> bool: + """Validate the connection is alive. Used by auth/status. + Default: tries list_tables(). Subclasses should override with + something cheaper (e.g. SELECT 1).""" + ... + + def get_safe_params(self) -> dict[str, Any]: + """Connection params with secrets removed. For metadata storage.""" + ... # existing implementation + + # ----- Catalog (new — all have defaults for backward compat) ----- + + @staticmethod + def catalog_hierarchy() -> list[dict[str, str]]: + """Declare the *full* hierarchy of this data source. + + Each entry: {"key": "database", "label": "Database"} + Last level is always the importable leaf (table/dataset/file). + Default: [{"key": "table", "label": "Table"}] (flat). + """ + return [{"key": "table", "label": "Table"}] + + def effective_hierarchy(self) -> list[dict[str, str]]: + """Browsable hierarchy — full minus pinned levels. + A level is pinned when its key matches a non-empty connection param.""" + ... + + def pinned_scope(self) -> dict[str, str]: + """Return {level_key: value} for every pinned hierarchy level.""" + ... + + def ls( + self, + path: list[str] | None = None, + filter: str | None = None, + ) -> list[CatalogNode]: + """List children at a catalog path (like `ls` in a filesystem). + + path is relative to the effective (unpinned) hierarchy. + Returns CatalogNode with node_type "namespace" or "table". + Default: falls back to list_tables() at the root level. + """ + ... + + def get_metadata(self, path: list[str]) -> dict[str, Any]: + """Get detailed metadata for a node (columns, row count, sample rows). + Default: finds the node via ls() and returns its metadata dict.""" + ... + + # ----- Flat listing (always available) ----- + + @abstractmethod + def list_tables(self, table_filter: str | None = None) -> list[dict[str, Any]]: + """List all accessible tables within the pinned scope (flat/eager). + + The simple, complete way to see everything the user can access. + Potentially slow for large catalogs — ls() is the lazy alternative. + Both coexist permanently; ls() falls back to this by default.""" + pass + + # ----- Data Fetching ----- + + @abstractmethod + def fetch_data_as_arrow( + self, + source_table: str, + import_options: dict | None = None, + ) -> pa.Table: + """Fetch data from the external source as a PyArrow Table. + + import_options is a single extensible dict: + - size (int): row limit (default: 1000000) + - columns (list[str]): column projection + - sort_columns (list[str]): ordering + - sort_order (str): 'asc' or 'desc' + - filters (list[dict]): standard SPJ filters + - source_filters (dict): BI-tool-specific filters (from CatalogNode.metadata) + """ + pass + + def fetch_preview( + self, + source_table: str, + import_options: dict | None = None, + ) -> pa.Table: + """Fetch a small preview. Default: delegates to fetch_data_as_arrow. + + Loaders can override for efficiency (e.g., TABLESAMPLE). + """ + opts = {"size": 10, **(import_options or {})} + return self.fetch_data_as_arrow( + source_table=source_table, import_options=opts + ) + + def fetch_data_as_dataframe(self, source_table: str, import_options: dict | None = None) -> pd.DataFrame: + """Convenience wrapper. Calls fetch_data_as_arrow().to_pandas().""" + return self.fetch_data_as_arrow(source_table=source_table, import_options=import_options).to_pandas() + + def ingest_to_workspace(self, workspace, table_name, source_table, import_options=None): + """Fetch → Arrow → Parquet in workspace.""" + ... # existing implementation + + # ----- Metadata / Config ----- + + @staticmethod + @abstractmethod + def list_params() -> list[dict[str, Any]]: + """Connection parameters (for auto-generated connection form).""" + pass + + @staticmethod + @abstractmethod + def auth_instructions() -> str: + """Human-readable setup guide (markdown).""" + pass + + @staticmethod + def auth_mode() -> str: + """'connection' (default) or 'token'. See §6.3.""" + return "connection" + + @staticmethod + def rate_limit() -> dict | None: + """Optional rate limit hints. See §6.3.""" + return None + + @staticmethod + def import_options(table_metadata: dict) -> list[dict] | None: + """Optional import-time options for the import dialog. See §6.3.""" + return None +``` + +**Key design decisions:** +- **`CatalogNode.node_type`** uses `"namespace"` / `"table"` (following the Iceberg REST / Unity Catalog convention), not per-source types like `"database"`, `"schema"`. The hierarchy labels provide the per-source terminology. +- **`list_tables()` is kept permanently** as the flat/eager complement to `ls()`. It returns every importable table in the pinned scope — simple and complete, but potentially slow. `ls()` is the lazy/hierarchical alternative. The default `ls()` falls back to `list_tables()` for loaders that haven't implemented hierarchical browsing. +- **`effective_hierarchy()` and `pinned_scope()`** live on the loader itself (not on `DataConnector`), since the loader has access to its own `params`. +- **`test_connection()`** has a default implementation, but loaders should override with something lightweight. +- **`import_options`** is a single extensible dict replacing the old scattered `size`/`sort_columns`/`sort_order`/`columns`/`import_context` params. All data-shaping options go through one bag: `size`, `columns`, `sort_columns`, `sort_order`, `filters`, `source_filters`. Loaders extract what they need; unknown keys are ignored. + +## 4. Plugin Registration: Config-Driven, Zero Code + +### 4.1 The Insight + +Since every `ExternalDataLoader` is fully self-describing — `list_params()`, `catalog_hierarchy()`, `auth_instructions()`, `auth_mode()` — the framework can auto-register any installed loader as a plugin with **zero Python code**. Users and admins just need to say "enable this loader" and optionally pre-fill some connection params. + +No one should need to touch DF's source code to add a data source. + +### 4.2 Configuration Sources (Priority Order) + +The framework reads plugin config from multiple sources, merged in priority order (higher overrides lower): + +| Priority | Source | Who Uses It | Format | +|----------|--------|-------------|--------| +| 1 (highest) | **Environment variables** | Docker/K8s admins, CI | `DF_SOURCES__{id}__{key}=value` | +| 2 | **Config file** (`data-sources.yml`) | Admins, power users | YAML in project or `~/.data-formulator/` | +| 3 | **UI settings panel** | End users | Saved to workspace config | +| 4 (lowest) | **Auto-discovery** | Default | Any installed loader with deps available | + +### 4.3 Config File: `data-sources.yml` + +A single YAML file declares which data sources are available and how they're pre-configured: + +```yaml +# ~/.data-formulator/data-sources.yml (user-level) +# or ./data-sources.yml (project-level) +# or /etc/data-formulator/data-sources.yml (system-level) + +sources: + # Minimal: just enable a loader by its registry key + - type: postgresql + + # With pre-filled connection params (scope pinning) + - type: mysql + name: "Analytics DB" # custom display name (optional) + icon: mysql # icon key (optional, defaults from loader) + params: + host: db.internal.corp + port: 3306 + database: analytics # pinned — user only sees tables in this DB + + # Multiple instances of the same loader type + - type: postgresql + name: "Production Warehouse" + params: + host: warehouse.corp + port: 5432 + database: prod + + - type: postgresql + name: "Staging" + params: + host: staging.corp + database: staging + + # BI tool + - type: superset + name: "Company Superset" + params: + url: https://bi.company.com + + # Cloud + - type: bigquery + params: + project: my-gcp-project + + # Kusto with Azure AD + - type: kusto + name: "Telemetry Cluster" + params: + kusto_cluster: https://telemetry.kusto.windows.net + +# Optional: disable auto-discovery (only show explicitly configured sources) +auto_discover: false +``` + +**Key design decisions:** +- `type` maps to the loader registry key (e.g., `"postgresql"` → `PostgreSQLDataLoader`) +- Same `type` can appear multiple times → solves the multi-instance problem (Q2) +- `params` pre-fills connection fields — the user only sees what's left +- Sensitive params (`password`, `token`) should use env var references: `password: ${PG_PASSWORD}` + +### 4.4 Environment Variables + +For Docker / CI / Kubernetes deployments where YAML isn't convenient: + +```bash +# Enable PostgreSQL with pre-configured host +DF_SOURCES__pg_prod__type=postgresql +DF_SOURCES__pg_prod__name="Production DB" +DF_SOURCES__pg_prod__params__host=db.internal.corp +DF_SOURCES__pg_prod__params__database=analytics +DF_SOURCES__pg_prod__params__port=5432 + +# Enable Superset +DF_SOURCES__superset__type=superset +DF_SOURCES__superset__params__url=https://bi.company.com + +# Disable auto-discovery +DF_AUTO_DISCOVER_SOURCES=false +``` + +Convention: `DF_SOURCES__{instance_id}__{key}` with `__` as separator (avoids conflict with dots/dashes in names). + +### 4.5 Auto-Discovery (Default Behavior) + +When no config file or env vars are set, the framework **auto-discovers** all installed loaders: + +```python +def discover_sources(app): + """Auto-register every installed ExternalDataLoader as a DataConnector plugin.""" + for key, loader_class in DATA_LOADERS.items(): + # DATA_LOADERS is the existing registry from data_loader/__init__.py + # Only contains loaders whose pip dependencies are installed + plugin = DataConnector.from_loader(loader_class, source_id=key) + register_plugin(app, plugin) + + # Log disabled loaders (missing deps) + for key, reason in DISABLED_LOADERS.items(): + logger.info(f"Source '{key}' not available: {reason}") +``` + +With auto-discovery, a fresh DF install with `pymysql` installed automatically shows "MySQL" in the data source panel — no config needed. The user fills in host/user/password at connect time. + +### 4.6 Auth: Admin-Configured vs. User-Provided + +The config `params` and the loader's `list_params()` together determine what the user sees at connect time. Each param falls into one of three categories: + +| Category | Where it comes from | User sees it? | Example | +|----------|-------------------|--------------|---------| +| **Admin-fixed** | YAML `params` or env var | No — hidden, pre-filled | `host: db.internal.corp` | +| **Admin-defaulted** | YAML `params` with `user_editable: true` | Yes — pre-filled but editable | `port: 5432` | +| **User-provided** | Not in config; loader declares it in `list_params()` | Yes — empty, must fill in | `user`, `password` | + +#### Scenario 1: Admin provides infra, user provides credentials + +The most common enterprise setup. Admin locks down the server, user brings their own identity: + +```yaml +# data-sources.yml +sources: + - type: postgresql + name: "Analytics DB" + params: + host: warehouse.corp + port: 5432 + database: analytics +``` + +The user's connect form only shows what's **not** in config: + +``` +┌─ Connect to Analytics DB ──────────────────┐ +│ │ +│ ⓘ Server: warehouse.corp:5432/analytics │ ← info only, not editable +│ │ +│ Username: [ ] │ ← user fills in +│ Password: [•••••••• ] │ ← user fills in +│ │ +│ [Cancel] [Connect] │ +└─────────────────────────────────────────────┘ +``` + +#### Scenario 2: Admin provides everything (shared service account) + +For read-only dashboards or demo deployments. No user interaction needed: + +```yaml +sources: + - type: postgresql + name: "Analytics DB" + auto_connect: true # connect on first access, no form + params: + host: warehouse.corp + database: analytics + user: readonly_svc + password: ${ANALYTICS_DB_PASSWORD} # env var reference — not stored in YAML +``` + +The user clicks "Analytics DB" in the tree → auto-connects immediately. No connect form shown. The password is resolved from the `ANALYTICS_DB_PASSWORD` environment variable at startup. + +#### Scenario 3: User provides everything (auto-discovered) + +No config file. The user sees the full connection form: + +``` +┌─ Connect to PostgreSQL ────────────────────┐ +│ Host: [ ] │ +│ Port: [5432 ] │ +│ Username: [ ] │ +│ Password: [•••••••• ] │ +│ Database: [ ] (optional) │ +│ │ +│ [Cancel] [Connect] │ +└─────────────────────────────────────────────┘ +``` + +#### Scenario 4: Token / OAuth sources + +For Kusto (Azure AD), BigQuery (service account), Superset (JWT): + +```yaml +sources: + - type: kusto + name: "Telemetry Cluster" + params: + kusto_cluster: https://telemetry.kusto.windows.net + # No user/password — Kusto uses Azure AD +``` + +The connect form shows whatever the loader's `list_params()` declares — for Kusto that might be an "Authenticate with Azure AD" button that triggers an OAuth redirect. + +#### How `list_params()` drives the form + +The framework computes the connect form at startup: + +```python +def compute_connect_form(loader_class, config_params): + """Determine which params the user needs to fill in.""" + all_params = loader_class.list_params() + form_fields = [] + pinned = {} + + for param in all_params: + if param["name"] in config_params: + # Admin provided this — don't show in form + pinned[param["name"]] = config_params[param["name"]] + else: + # User must provide this + form_fields.append(param) + + return form_fields, pinned +``` + +The result goes into `/api/app-config`: +- `params_form` — fields the user fills in (rendered as the connect form) +- `pinned_params` — values the user can see (as info) but not edit + +#### Credential & Connection Persistence + +Two-level storage, no in-memory tricks: + +| Scope | Where | What | Who manages | +|-------|-------|------|-------------| +| **User connections** | Workspace directory (`workspace/connections/`) | Per-user saved connection params (encrypted) | User, via connect/disconnect | +| **Admin connections** | DF home (`~/.data-formulator/data-sources.yml` or `/etc/data-formulator/`) | Shared/pre-configured sources | Admin, via config file or env vars | + +**User connections live in the workspace.** When a user connects to a source, their params (host, user, encrypted password) are saved to `workspace/connections/{source_id}.json`. On next session, the framework reads this file → re-instantiates the loader → user is auto-connected. No vault service, no in-memory pool, no Flask sessions. + +``` +workspace/ + connections/ + pg_prod.json # {"type": "postgresql", "params": {"host": "...", "user": "...", "password": ""}} + superset.json # {"type": "superset", "params": {"url": "...", "username": "...", "token": ""}} + tables/ + users.parquet + orders.parquet + metadata.json +``` + +**Admin connections live in DF home.** The `data-sources.yml` file (§4.3) is read-only for users. Admin-provided params are merged with user-provided params at connect time. + +**Flow:** +1. User submits credentials via connect form +2. Framework validates (instantiate loader, call `test_connection()`) +3. On success: save encrypted params to `workspace/connections/{source_id}.json`, keep loader instance alive for the current process +4. On next session/restart: read saved connections → re-instantiate loaders on first access (lazy) +5. On disconnect: delete the connection file, close loader + +**Loader instances** are created on-demand and cached in-process for the duration of the server process — this is just normal Python object lifecycle, not a special pool. If the process restarts, the saved connection file lets us recreate the loader transparently. + +**Encryption:** Passwords and tokens are encrypted at rest using a per-workspace key (or a key derived from the user's session secret). The framework decrypts on read, never exposes in API responses. + +For **admin-provided credentials** (`auto_connect: true`), the connection file is pre-populated from config at startup — the user never needs to connect manually. + +### 4.7 UI Settings Panel (Future) + +End users can add/remove sources from the DF UI: + +``` +┌─ Settings → Data Sources ───────────────────────────┐ +│ │ +│ Configured Sources: │ +│ ┌──────────────────────┬────────────┬─────────┐ │ +│ │ Name │ Type │ Status │ │ +│ ├──────────────────────┼────────────┼─────────┤ │ +│ │ Production DB │ PostgreSQL │ ● Ready │ │ +│ │ Company Superset │ Superset │ ● Ready │ │ +│ │ Telemetry Cluster │ Kusto │ ○ No dep│ │ +│ └──────────────────────┴────────────┴─────────┘ │ +│ │ +│ [+ Add Source] │ +│ │ +│ Available Source Types: │ +│ PostgreSQL, MySQL, BigQuery, Kusto, S3, MongoDB, │ +│ MSSQL, Azure Blob, Superset │ +│ │ +└──────────────────────────────────────────────────────┘ +``` + +### 4.8 How It Works Internally + +At startup, the framework: + +1. **Scan** `DATA_LOADERS` registry → all installed loader classes +2. **Read** config sources (env vars → YAML → UI settings) → merge +3. **For each configured source** (or auto-discovered loader): + - Resolve the `ExternalDataLoader` class from `type` + - Create a `DataConnector` instance with pre-filled `params` + - Generate Flask Blueprint with auth/catalog/data routes + - Register frontend module (generic `DataConnectorPanel`) +4. **Serve** `/api/app-config` with the list of enabled sources + +```python +# Internal — no user code needed +def register_sources(app): + config = load_source_config() # merge env + yaml + UI settings + + for source_spec in config.sources: + loader_class = DATA_LOADERS.get(source_spec.type) + if not loader_class: + logger.warn(f"Unknown source type: {source_spec.type}") + continue + + plugin = DataConnector.from_loader( + loader_class, + source_id=source_spec.id, # auto-generated or from config + display_name=source_spec.name, # optional custom name + default_params=source_spec.params, # pre-filled connection params + icon=source_spec.icon, + ) + register_plugin(app, plugin) +``` + +### 4.9 Frontend: No Per-Source Registration Needed + +Since all `DataConnector` plugins use the same generic `DataConnectorPanel`, the frontend doesn't need per-source modules either. The backend's `/api/app-config` tells the frontend what sources are available: + +```json +{ + "CONNECTORS": [ + { + "id": "pg_prod", + "type": "postgresql", + "name": "Production DB", + "icon": "postgresql", + "params_form": [ + {"name": "user", "type": "string", "required": true}, + {"name": "password", "type": "password", "required": true} + ], + "pinned_params": {"host": "db.internal.corp", "database": "analytics"}, + "hierarchy": [{"key": "schema", "label": "Schema"}, {"key": "table", "label": "Table"}] + }, + { + "id": "superset", + "type": "superset", + "name": "Company Superset", + "icon": "superset", + "params_form": [ + {"name": "username", "type": "string", "required": true}, + {"name": "password", "type": "password", "required": true} + ], + "pinned_params": {"url": "https://bi.company.com"}, + "hierarchy": [{"key": "dashboard", "label": "Dashboard"}, {"key": "dataset", "label": "Dataset"}] + } + ] +} +``` + +The frontend renders one `DataConnectorPanel` per source in the `SOURCES` list — each with its own connection form, tree hierarchy, and icon. **Zero frontend code per source.** + +## 5. Frontend: Generic `DataConnectorPanel` + +### 5.1 Shared UI for All Database-Type Sources + +Instead of writing a custom React panel per data source, `DataConnector` plugins share a single generic panel: + +```typescript +// src/plugins/_shared/DataConnectorPanel.tsx + +interface DataConnectorPanelProps { + pluginId: string; + config: PluginConfig; + callbacks: PluginHostCallbacks; +} + +function DataConnectorPanel({ pluginId, config, callbacks }: DataConnectorPanelProps) { + // State machine: disconnected → connecting → connected → browsing → importing + + // 1. If not connected: show connection form (auto-generated from list_params) + // 2. If connected: show table browser (tree view with groups/schemas) + // 3. On table select: show detail + preview + import button + // 4. On import: optional filter dialog (if large) → load → notify host +} +``` + +### 5.2 Auto-Generated Connection Form + +The connection form is generated from `ExternalDataLoader.list_params()`: + +```typescript +// list_params() returns: +[ + { name: "host", type: "string", required: true, default: "localhost", description: "Database host" }, + { name: "port", type: "number", required: true, default: 5432, description: "Port" }, + { name: "user", type: "string", required: true, description: "Username" }, + { name: "password", type: "password", required: true, description: "Password" }, + { name: "database", type: "string", required: true, description: "Database name" }, +] + +// Renders as: +┌─ Connect to PostgreSQL ────────────────┐ +│ Host: [localhost ] │ +│ Port: [5432 ] │ +│ User: [ ] │ +│ Password: [•••••••• ] │ +│ Database: [ ] │ +│ │ +│ [Cancel] [Connect] │ +└─────────────────────────────────────────┘ +``` + +### 5.3 Table Browser + +Once connected, the table browser uses the unified tree from [design-doc #8](8-unified-data-source-panel.md): + +``` +▾ 📂 PostgreSQL — analytics-db (connected) + ▾ 📁 public + users (150k rows) [⊕] [↻] + orders (1.2M rows) [⊕] [↻] + products (5k rows) [⊕] [↻] + ▸ 📁 staging + ▸ 📁 analytics +``` + +- **[⊕]** = Import to workspace +- **[↻]** = Refresh (only shown for already-imported tables) + +### 5.4 Frontend Plugin Registration + +No per-source frontend code needed. The backend's `/api/app-config` response (see §4.9) tells the frontend what sources exist and what their connection forms / hierarchy look like. One generic `DataConnectorPanel` handles all of them. + +The frontend factory is only needed once, in the shared module: + +```typescript +// src/plugins/_shared/DataConnectorPanel.tsx +// Handles ALL connected data sources — databases, BI tools, cloud storage +// Reads source config from /api/app-config → SOURCES[] +// Renders: connection form (from params_form) → tree browser (from hierarchy) → import +``` + +## 6. Full Unification: BI Tools as Data Loaders + +Since DF only **consumes** data, both databases and BI tools serve the same role: hierarchical sources of importable tables. We unify them under the same `DataConnector` model. + +### 6.1 Architecture (Unified) + +``` + DataConnector (generic lifecycle wrapper) + | + ┌────────────┼────────────────┐ + │ │ │ + Database Loaders Cloud Loaders BI Tool Loaders + ┌────┬────┐ ┌────┬────┐ ┌─────────┬──────────┐ + MySQL PG MSSQL BQ Kusto S3 Superset Metabase Grafana +``` + +**Everything is a loader.** Superset becomes a `SupersetLoader(ExternalDataLoader)` that: +- Connects via JWT instead of host/password +- Exposes `catalog_hierarchy() → [{"key":"dashboard","label":"Dashboard"}, {"key":"dataset","label":"Dataset"}]` +- Returns `CatalogNode(node_type="namespace", ...)` for dashboards (expandable containers) +- Returns datasets as `CatalogNode(node_type="table", ...)` leaf nodes with optional pre-applied filters + +### 6.2 How Superset Migrates + +```python +class SupersetLoader(ExternalDataLoader): + """Treats Superset as a hierarchical data source.""" + + @staticmethod + def catalog_hierarchy() -> list[dict[str, str]]: + return [ + {"key": "dashboard", "label": "Dashboard"}, + {"key": "dataset", "label": "Dataset"}, + ] + + def ls(self, path=None, filter=None) -> list[CatalogNode]: + path = path or [] + if not path: # root → list dashboards + "Ungrouped Datasets" + dashboards = self.client.list_dashboards(self.token) + return [ + CatalogNode(name=d["title"], node_type="namespace", + path=[d["title"]]) + for d in dashboards + ] + [CatalogNode(name="Ungrouped Datasets", node_type="namespace", + path=["Ungrouped Datasets"])] + + if len(path) == 1: # dashboard → list its datasets + datasets = self.client.get_dashboard_datasets(self.token, path[0]) + return [ + CatalogNode( + name=ds["name"], node_type="table", + path=[path[0], ds["name"]], + metadata={"row_count": ds["count"], "filters": ds.get("filters")}, + ) + for ds in datasets + ] + return [] + + def fetch_data_as_arrow(self, source_table, size=100000, **kwargs) -> pa.Table: + # source_table = dataset ID; executes SQL via Superset's SQL Lab + return self.client.execute_sql_as_arrow(self.token, source_table, size) + + @staticmethod + def list_params() -> list[dict]: + return [ + {"name": "url", "type": "string", "required": True, "description": "Superset URL"}, + {"name": "username", "type": "string", "required": True}, + {"name": "password", "type": "password", "required": True}, + ] + +# Plugin registration — same one-liner as databases: +plugin_class = create_connected_data_source(SupersetLoader, "superset", "Superset", icon="superset") +``` + +The rich Superset-specific features (dashboard filters, column metadata, etc.) are expressed as **metadata on `CatalogNode`** rather than as a separate plugin architecture. + +### 6.3 Critical Differences to Be Aware Of + +Unification is the right call, but these differences must be handled in the `DataConnector` framework: + +#### 1. Auth Model Diversity + +| Source Type | Auth Mechanism | Token Lifecycle | +|-------------|---------------|------------------| +| MySQL, PG, MSSQL | Connection params (host/user/password) | Connection object — alive until closed | +| Kusto, BigQuery | OAuth / service account token | Expires, needs refresh | +| Superset, Metabase | JWT (username/password → token) | Expires, needs refresh | +| Grafana | API key | Long-lived, no refresh | + +**Solution:** The `DataConnector` auth layer must support both: +- **Persistent connection** mode (databases): store connection object in session, reconnect on failure +- **Token** mode (BI tools, cloud): store token in session, auto-refresh on expiry + +The loader declares which mode it uses: +```python +class ExternalDataLoader(ABC): + @staticmethod + def auth_mode() -> str: + """'connection' (default) or 'token'.""" + return "connection" +``` + +#### 2. Catalog Node Semantics: Import vs. Import-With-Context + +Database tables are **context-free** — `SELECT * FROM users` means the same thing regardless of how you navigated to it. But BI tool datasets can carry **context from their parent**: + +``` +Superset: + 📊 Q3 Sales Dashboard + orders_fact → import with dashboard's date filter pre-applied + 📁 Ungrouped Datasets + orders_fact → import raw, no filters +``` + +The same leaf ("orders_fact") means different things depending on which parent you expanded from. + +**Solution:** `CatalogNode.metadata` carries the context: +```python +@dataclass +class CatalogNode: + name: str + node_type: str # "namespace" or "table" + path: list[str] + metadata: dict | None = None # <-- includes import_context + # e.g., metadata = { + # "filters": [{"column": "date", "op": ">=", "value": "2025-07-01"}], + # "description": "Filtered by Q3 Sales Dashboard", + # } +``` + +When importing, the framework passes `metadata` to the loader, which can apply filters server-side. Databases ignore this (no filters in metadata). BI tools use it. + +#### 3. Data Freshness & Caching + +| Source | Data Freshness | Caching Behavior | +|--------|---------------|------------------| +| Database | Live — query returns current state | No source-side cache; DF caches catalog metadata only | +| BI tool | May have source-side cache (Superset caches query results) | Catalog may be stale; need cache-bust option | + +**Solution:** `CatalogNode.metadata` can include `cached_at` / `cache_ttl` hints. The tree UI shows a staleness indicator and offers a "refresh catalog" action per source. + +#### 4. Rate Limiting & Quotas + +BI tools often have API rate limits (Superset: N requests/minute). Databases have connection limits but no per-query throttling. + +**Solution:** Loaders can declare rate limit hints: +```python +class ExternalDataLoader(ABC): + @staticmethod + def rate_limit() -> dict | None: + """Optional rate limit hints. None = no limit.""" + return None # or {"requests_per_minute": 60, "concurrent": 5} +``` +The `DataConnector` framework uses this to throttle catalog expansion and data loads. + +#### 5. Import Filtering: Standard SPJ + Source-Defined Filters + +Large datasets need filtering before import. There are two layers: + +**Layer 1: Standard SPJ (Select-Project-Join) — all sources get this for free** + +The framework provides a built-in filter UI for every data source, regardless of type: + +``` +┌─ Import: orders (1.2M rows) ───────────────────────────┐ +│ │ +│ Columns (select): │ +│ ☑ order_id ☑ customer_id ☑ amount │ +│ ☑ region ☐ internal_id ☐ updated_at │ +│ │ +│ Filters (where): │ +│ ┌──────────────┬─────┬──────────────────────┐ │ +│ │ region │ IN │ [US, EU] │ │ +│ │ amount │ >= │ [100] │ │ +│ │ order_date │ >= │ [2025-01-01] │ │ +│ │ │ │ [+ Add filter] │ │ +│ └──────────────┴─────┴──────────────────────┘ │ +│ │ +│ Sort by: [order_date ▾] [desc ▾] │ +│ Row limit: [50000 ] │ +│ │ +│ Estimated rows: ~38,000 │ +│ │ +│ [Cancel] [Import] │ +└─────────────────────────────────────────────────────────┘ +``` + +This UI is **auto-generated from column metadata** (`get_metadata()` returns column names and types). The framework builds the SQL WHERE clause server-side via a safe, parameterized filter DSL — no raw SQL from the user. + +The filter DSL: +```python +# Sent in data/import request body +{ + "source_table": ["production", "public", "orders"], + "columns": ["order_id", "customer_id", "amount", "region", "order_date"], + "filters": [ + {"column": "region", "op": "in", "value": ["US", "EU"]}, + {"column": "amount", "op": ">=", "value": 100}, + {"column": "order_date", "op": ">=", "value": "2025-01-01"} + ], + "sort_columns": ["order_date"], + "sort_order": "desc", + "size": 50000 +} +``` + +The loader receives this in `import_context` and translates to the source's query language (SQL WHERE, Kusto where, S3 Select, etc.). Each loader handles its own dialect safely. + +**Layer 2: Source-defined filters — for BI tools and curated datasets** + +Some sources provide **pre-defined filter sets** created by the data source owner (e.g., Superset dashboard native filters, Metabase question parameters). These appear as additional interactive controls above the standard SPJ filters: + +``` +┌─ Import: orders_fact (from Q3 Sales Dashboard) ────────┐ +│ │ +│ Dashboard Filters (pre-defined by source): │ +│ ┌──────────────────────────────────────────────┐ │ +│ │ Quarter: [Q3 2025 ▾] │ │ +│ │ Region: [☑ US ☑ EU ☐ APAC ☐ LATAM] │ │ +│ │ Product: [All ▾] │ │ +│ └──────────────────────────────────────────────┘ │ +│ │ +│ Additional Filters (standard): │ +│ ┌──────────────┬─────┬──────────────────────┐ │ +│ │ amount │ >= │ [100] │ │ +│ │ │ │ [+ Add filter] │ │ +│ └──────────────┴─────┴──────────────────────┘ │ +│ │ +│ Columns: ☑ order_id ☑ customer_id ☑ amount ... │ +│ Row limit: [50000] │ +│ │ +│ [Cancel] [Import] │ +└─────────────────────────────────────────────────────────┘ +``` + +Source-defined filters come from `CatalogNode.metadata`: +```python +# CatalogNode for "orders_fact" under "Q3 Sales Dashboard" +CatalogNode( + name="orders_fact", + node_type="table", + path=["Q3 Sales Dashboard", "orders_fact"], + metadata={ + "row_count": 150000, + "source_filters": [ + { + "name": "Quarter", "column": "quarter", + "type": "select", "options": ["Q1 2025", "Q2 2025", "Q3 2025", "Q4 2025"], + "default": "Q3 2025" + }, + { + "name": "Region", "column": "region", + "type": "multi_select", "options": ["US", "EU", "APAC", "LATAM"], + "default": ["US", "EU"] + }, + { + "name": "Product", "column": "product_category", + "type": "select", "options_endpoint": "/filter-values", # lazy-loaded + "default": "All" + } + ] + } +) +``` + +The import dialog renders both layers. The combined request: +```python +{ + "source_table": ["Q3 Sales Dashboard", "orders_fact"], + "columns": ["order_id", "customer_id", "amount"], + "filters": [ # standard SPJ filters (layer 1) + {"column": "amount", "op": ">=", "value": 100} + ], + "import_context": { # source-defined filters (layer 2) + "source_filters": [ + {"column": "quarter", "value": "Q3 2025"}, + {"column": "region", "value": ["US", "EU"]}, + {"column": "product_category", "value": "All"} + ] + }, + "size": 50000 +} +``` + +The loader applies both: source-defined filters first (they define the base dataset), then standard SPJ filters on top (user refinement). + +**How loaders declare filter support:** + +```python +class ExternalDataLoader(ABC): + @staticmethod + def supports_standard_filters() -> bool: + """Whether this loader can apply SPJ filters server-side. + + True → framework sends filters in import_context, loader builds WHERE clause + False → framework fetches all data, applies filters client-side (slower) + Default: True for SQL databases, loaders can override. + """ + return True +``` + +For sources that can't filter server-side (e.g., some REST APIs), the framework falls back to client-side filtering after fetch — less efficient but still works. + +## 7. Migration Plan + +### Phase 1: Core Framework + Loader Upgrade + +1. ✅ Add `CatalogNode` dataclass (`"namespace"` / `"table"`) and new base methods on `ExternalDataLoader`: `catalog_hierarchy()`, `effective_hierarchy()`, `pinned_scope()`, `ls()`, `get_metadata()`, `test_connection()`, `auth_mode()`, `rate_limit()` — all with default implementations so existing loaders keep working +2. ✅ **Upgrade all 9 loaders** to override the new methods: + - MySQL, PostgreSQL, MSSQL: `catalog_hierarchy()`, `ls()`, `get_metadata()`, `test_connection()` — database param made optional for scope pinning + - BigQuery: project always pinned (required), dataset_id optional — 3-level hierarchy + - Kusto: kusto_database made optional — 2-level hierarchy + - Athena: database already optional — 2-level hierarchy + - MongoDB: database required, collection is scope param — 2-level hierarchy + - S3, Azure Blob: bucket/container required (can't list safely) — 2-level hierarchy +3. ✅ Unify `fetch_data_as_arrow()` signature: replace `size`/`sort_columns`/`sort_order` positional params with single `import_options: dict` — extensible for `columns`, `filters`, `source_filters`. All 9 loaders, callers, and tests updated. Renamed `loader_metadata` → `source_info`. Removed pandas from PG/MySQL/MSSQL query path (cursor + `pa.table()` directly). `import_options` stored in workspace metadata for refresh replay. +4. ✅ Implement `DataConnector` base class with generic auth/catalog/data routes — auto-registers all 10 loaders at startup (90 routes under `/api/connectors/{id}/`), exposes `SOURCES` in `/api/app-config` +5. ✅ Implement `SupersetLoader(ExternalDataLoader)` — JWT-based auth (`auth_mode="token"`), dashboard→dataset hierarchy, SQL Lab data fetch. Registered as 10th loader, auto-wrapped by `DataConnector` with 9 routes. +6. ✅ Implement config-driven registration — `data-sources.yml` (searched in `DATA_FORMULATOR_HOME`, cwd, `~/.data-formulator/`, `/etc/`), env vars (`DF_SOURCES__id__key`), `${ENV_REF}` resolution, `auto_discover: false` to restrict to configured sources only. Multiple instances of same type supported. +7. ✅ Integrate `DataConnector` into frontend — `SOURCES` from `/api/app-config` rendered in `DBManagerPane` sidebar alongside legacy loaders. `DataLoaderForm` accepts optional `connectorId` to route through `/api/connectors/{id}/*`. `loadTable` thunk updated to support connected source import. Zero new components — reuses existing form/table UI. + +### Phase 2: Integration Testing + +7. ✅ Test database loaders end-to-end: PostgreSQL, MySQL via auto-discovery and `data-sources.yml` config + - Connect → browse hierarchy → scope pinning → import with SPJ filters → refresh → disconnect → reconnect from saved credentials + - 40 unit tests for DataConnector framework (mock loader), 17 config tests, E2E route tests for PG + MySQL (Docker-gated) +8. ✅ Test `SupersetLoader` end-to-end: dashboard → dataset hierarchy, source-defined filters, SSO auth + - 16 integration tests with mocked Superset API (JWT auth, catalog browsing, data preview/import, token refresh) +9. ✅ Deprecate old hand-written `SupersetPlugin(DataSourcePlugin)` — deprecation warnings added, docstrings updated +10. ✅ Verify remaining loaders via auto-discovery: Kusto, BigQuery, MSSQL, MongoDB, S3, Azure Blob + - 16 verification tests confirm catalog_hierarchy, effective_hierarchy, scope pinning, auth_mode, list_params, blueprint generation for all 10 loaders + - Also found and fixed operator-precedence bug in `_build_source_specs` YAML ID assignment + +### Phase 3: Cleanup + Unified Panel ✅ + +#### 3a: Legacy route removal ✅ +- ✅ Removed 8 legacy `/api/tables/data-loader/*` backend routes from `tables_routes.py` +- ✅ Removed 9 `DATA_LOADER_*` URL constants from frontend `utils.tsx` +- ✅ `DBTableManager` now uses only `serverConfig.SOURCES` (DataConnector) for data source discovery +- ✅ `DataLoaderForm` uses only connected source auth/catalog/import routes (no legacy branches) +- ✅ `loadTable` thunk uses only connected source routes for both store-on-server and ephemeral paths +- ✅ `useDataRefresh` uses connected source `DATA_REFRESH` endpoint (requires active connection) +- ✅ Added `connectorId` to `DataSourceConfig` so tables remember their source +- ✅ Added `DISABLED_SOURCES` to app-config for greyed-out UI entries +- ✅ Enhanced `data/preview` route to support full `import_options` (sort, limit) + +#### 3b: Connection model (doc 9.1) ✅ +- ✅ Vault-based credential persistence wired into `DataConnector` (`_vault_store`, `_vault_retrieve`, `_vault_delete`) +- ✅ Auto-reconnect from vault on server restart (lazy, on first `/get-status` or catalog/data call) +- ✅ Disconnect preserves vault credentials (fast reconnect), Delete clears them +- ✅ Multi-user isolation via `(user_identity, connector_id)` composite key +- ✅ Centralized `credentials.db` at `DATA_FORMULATOR_HOME/` with Fernet encryption + +#### 3c: Promoted data source cards (doc 9.3) ✅ +- ✅ Single shared `connectors_bp` blueprint — all action routes take `connector_id` in JSON body +- ✅ `GET /api/data-loaders`, `GET /api/connectors`, `POST /api/connectors`, `DELETE /api/connectors/{id}` +- ✅ Action routes: `/connect`, `/disconnect`, `/get-status`, `/get-catalog`, `/get-catalog-tree`, `/preview-data`, `/import-data`, `/import-group`, `/refresh-data` +- ✅ Connected sources promoted as top-level cards on Load Data menu +- ✅ "Add Connection" card with type picker + param form +- ✅ Removed legacy "Database" tab from UI + +#### 3d: Remove legacy plugin system ✅ +- ✅ Relocated `SupersetClient` + `SupersetAuthBridge` from `plugins/superset/` to `data_loader/` (used by `SupersetLoader`) +- ✅ Deleted `py-src/data_formulator/plugins/` directory (base classes, discovery engine, Superset plugin, all routes) +- ✅ Deleted `src/plugins/` directory (frontend plugin host, registry, Superset UI components) +- ✅ Removed plugin registration from `app.py` (`discover_and_register`, `ENABLED_PLUGINS`) +- ✅ Removed frontend plugin imports (`getEnabledPlugins`, `PluginHost`, `registerPluginTranslations`) +- ✅ Deleted legacy plugin tests + +#### 3e: Backend restructuring ✅ +- ✅ Created `auth/` package — merged `security/auth.py` → `auth/identity.py`, `auth_providers/` → `auth/providers/`, `auth_gateways/` → `auth/gateways/`, `credential_vault/` → `auth/vault/` +- ✅ Created `routes/` package — moved `tables_routes.py` → `routes/tables.py`, `agent_routes.py` → `routes/agents.py`, `session_routes.py` → `routes/sessions.py`, `credential_routes.py` → `routes/credentials.py`, `demo_stream_routes.py` → `routes/demo_stream.py` +- ✅ `security/` kept for non-auth concerns: `code_signing.py`, `sanitize.py`, `url_allowlist.py` +- ✅ Updated all import paths + patch targets across ~30 files +- ✅ Improved `_sanitize_error()` to preserve actionable detail in connector error messages +- ✅ Moved Docker-gated integration tests to `tests/database-dockers/` (mysql, postgres, bigquery, mongodb, superset) +- ✅ Fixed `test_auth_provider_chain` missing `_localhost_identity` reset +- [ ] Integrate with unified data source panel ([doc #8](8-unified-data-source-panel.md)) + +#### Post-restructuring backend layout + +``` +py-src/data_formulator/ +├── app.py ← Flask app + bootstrap +├── __main__.py ← CLI entry point +├── data_connector.py ← DataConnector framework + shared routes +├── workspace_factory.py ← Workspace resolution +├── model_registry.py ← AI model config +├── example_datasets_config.py ← Sample dataset config +│ +├── auth/ ← Identity, providers, gateways, vault +│ ├── identity.py ← init_auth, get_identity_id, get_active_provider +│ ├── providers/ ← AuthProvider subclasses (github, oidc, azure) +│ ├── gateways/ ← OAuth callback routes (github) +│ └── vault/ ← Fernet-encrypted credential storage +│ +├── routes/ ← Flask blueprints +│ ├── tables.py ← Table CRUD, file upload, parsing +│ ├── agents.py ← AI agent endpoints +│ ├── sessions.py ← Workspace session management +│ ├── credentials.py ← Vault API routes +│ └── demo_stream.py ← ISS demo + streaming +│ +├── security/ ← Non-auth security utilities +│ ├── code_signing.py ← HMAC signing for AI-generated code +│ ├── sanitize.py ← Error message scrubbing +│ └── url_allowlist.py ← API base URL validation +│ +├── agents/ ← AI agent implementations +├── data_loader/ ← ExternalDataLoader drivers (10 sources) +├── datalake/ ← Workspace storage layer +├── sandbox/ ← Code execution sandboxes +└── workflows/ ← Chart/viz generation +``` + +### Sub-doc Summary (9.1–9.3) + +| Doc | Title | Status | Key Deliverables | +|-----|-------|--------|------------------| +| [9.1](9.1-data-source-connection-model.md) | Connection Model | Complete | Vault credential persistence, auto-reconnect, multi-user isolation, centralized `credentials.db` | +| [9.2](9.2-table-group-bundle-loading.md) | TableGroup Bundle Loading | Draft (design only) | `table_group` node type for BI dashboards, source filters, group load API — not yet implemented | +| [9.3](9.3-promoted-data-source-cards.md) | Promoted Data Source Cards | Complete | Single shared blueprint API, promoted cards UI, "Add Connection" flow, legacy "Database" tab removed | + +### Phase 4: Advanced Features + +13. Scheduled refresh (periodic re-fetch) +14. Incremental refresh (append-only for time-series data) +15. Connection sharing in team deployments (admin-managed connections) +16. Cross-database queries (join tables from different databases in tree) +17. Metabase / Grafana loaders + +## 8. Open Questions + +### Q1: What happens to `DataSourcePlugin` and the `plugins/` directory? + +**Done.** The entire `plugins/` directory and `DataSourcePlugin` base class have been removed (Phase 3d). The architecture is now: + +- **`data_loader/`** — all `ExternalDataLoader` subclasses (the driver layer), including `SupersetLoader` with its `superset_client.py` and `superset_auth_bridge.py` +- **`data_connector.py`** — generic lifecycle wrapper with shared routes (the framework layer) +- **`auth/`** — identity, providers, gateways, credential vault (the auth layer) +- **`routes/`** — all Flask blueprints + +There are no "plugins" anymore — just loaders, the connector framework, and config-driven registration. + +### Q2: Multiple connections to the same source type? + +**Solved by config.** Users list multiple entries with the same `type` in `data-sources.yml`: + +```yaml +sources: + - type: postgresql + name: "Production" + params: { host: prod.corp, database: prod } + - type: postgresql + name: "Staging" + params: { host: staging.corp, database: staging } +``` + +Each becomes a separate entry in the data source tree. No code changes needed. + +### Q3: How deep should hierarchical browsing go? + +Different sources have different depths: + +| Source | Levels | Example | +|--------|--------|---------| +| MySQL | 2 | `database → table` | +| PostgreSQL | 3 | `database → schema → table` | +| BigQuery | 3 | `project → dataset → table` | +| Kusto | 2 | `database → table` | +| S3 | 2+ | `bucket → prefix → ... → object` (variable depth) | + +**Recommendation:** Each loader declares its hierarchy via `catalog_hierarchy()`. The tree UI renders whatever depth the loader declares. S3-style variable depth can be handled by repeating level types (e.g., `["bucket", "prefix", "prefix", "object"]` or a special "recursive" marker). + +### Q4: How do column selection and filtering interact with the loader? + +The current `fetch_data_as_arrow(source_table, size, ...)` doesn't support column selection or arbitrary WHERE clauses. Options: + +- **Column selection:** Add `columns` param to `fetch_data_as_arrow()` — loaders build `SELECT col1, col2 FROM ...` +- **Server-side filtering:** More complex. Would need a filter DSL or raw SQL passthrough. + +**Recommendation:** Phase 1 supports column selection + size limit only. Server-side filtering (like Superset has) is Phase 4 for database plugins — it requires building SQL WHERE clauses safely, which varies per database dialect. + +### Q5: What about token-based auth (Kusto, BigQuery)? + +Some data sources use OAuth/service accounts, not username/password. The `list_params()` already handles this — BigQuery asks for a service account JSON, Kusto uses Azure AD tokens. + +The `DataConnector` auth layer should support: +- **Password mode** (MySQL, PostgreSQL, MSSQL): user/password fields +- **Token/key mode** (BigQuery, Kusto): API key or token file +- **OAuth mode** (future): redirect-based auth flow + +`list_params()` already declares the param types — the generic connection form renders whatever the loader needs. + +### Q6: Should the old `db-manager` endpoints remain? + +**Done.** The legacy `/api/tables/data-loader/*` endpoints were removed in Phase 3a. All data loading flows through `/api/connectors/*` now. + +## 9. Summary + +**The generalized plugin library unifies databases and BI tools into one model:** + +``` +ExternalDataLoader (data protocol: how to connect, browse, fetch) + + +DataConnector (lifecycle mgmt: session, caching, refresh, UI) + = +A full plugin — for databases AND BI tools — for free +``` + +All data sources are **hierarchical trees of `namespace` → `table` nodes**: +- MySQL: `database (namespace) → table` +- PostgreSQL: `database (namespace) → schema (namespace) → table` +- Superset: `dashboard (namespace) → dataset (table)` +- S3: `bucket (namespace) → file (table)` + +The hierarchy labels (what to call each namespace level) come from `catalog_hierarchy()`. **Scope pinning** lets users skip levels they don't need to browse — if you provide `database="prod"` in your connection params, that level is hidden and browsing starts at the next level. + +The five critical differences between databases and BI tools (auth model, contextual import, caching, rate limits, import options) are handled as **optional capabilities** on `ExternalDataLoader` and `CatalogNode.metadata` — not as separate plugin architectures. + +**What plugin authors / admins write:** + +| Scenario | What to do | +|----------|------------| +| Enable an already-installed loader | Add one entry to `data-sources.yml` or set env var | +| Pre-configure a database for all users | Add entry with `params` (host, database, etc.) in YAML or env | +| Multiple connections to same DB type | Add multiple entries with same `type`, different `name` and `params` | +| New loader not yet in DF | Implement `ExternalDataLoader` subclass (~100 lines), `pip install` it | +| BI platform with custom hierarchy | Same as above, implement `ls()` with custom hierarchy (~200 lines) | + +**What users get:** + +- Log into PostgreSQL / Kusto / MySQL / BigQuery / **Superset / Metabase** once → browse hierarchy → import → refresh +- All data sources visible in one unified tree panel +- Consistent experience: same connect → browse → import → refresh loop everywhere +- No re-entering credentials for every data pull diff --git a/design-docs/9.1-data-source-connection-model.md b/design-docs/9.1-data-source-connection-model.md new file mode 100644 index 00000000..8fa04487 --- /dev/null +++ b/design-docs/9.1-data-source-connection-model.md @@ -0,0 +1,315 @@ +# Data Source Connection Model — Auth, Persistence, and Multi-User Isolation + +## Status: Complete (Phase A + B done, Phase C deferred to doc 9 Phase 4) + +Parent: [9-generalized-data-source-plugins.md](9-generalized-data-source-plugins.md) + +## 1. Problem + +After Phase 3 of the generalized plugin migration, all external data sources flow through `DataConnector`. But the **connection lifecycle** has gaps: + +1. **Connections are ephemeral.** `DataConnector._loaders` is an in-memory dict. Server restart = all connections lost. Users must re-enter credentials every session. +2. **No "already connected" state.** The data loader panel shows all sources as "Available" with a connect form. There's no way to show "you're already connected to Kusto — here are your tables." +3. **Credential storage exists but isn't wired.** `CredentialVault` (Fernet-encrypted SQLite) exists and works for the Superset plugin, but `DataConnector` doesn't use it. +4. **Multi-user isolation works but has no persistence.** Two users hitting `/api/connectors/kusto/auth/connect` get separate loaders (keyed by identity), but neither survives a restart. + +## 2. Desired UX + +The data loader panel should present two categories: + +### 2.1 Connected Sources (user has active/stored credentials) + +``` +┌──────────────────────────────────┐ +│ ● PostgreSQL (prod) Connected │ ← vault has credentials +│ ● Kusto (corp) Connected │ ← vault has credentials +│ ○ BigQuery (analytics) Session │ ← in-memory only, this session +└──────────────────────────────────┘ +``` + +**Behavior:** User clicks → jumps directly to catalog/table browser. No credential form needed. + +- **Vault-backed (●):** Credentials encrypted in `credentials.db`. Auto-reconnect on server restart. +- **Session-only (○):** In-memory only. Connected this session but credentials not persisted. Lost on restart. + +### 2.2 Available Sources (registered but no credentials) + +``` +┌──────────────────────────────────┐ +│ MySQL │ ← installed, no connection yet +│ S3 │ ← installed, no connection yet +│ MongoDB │ ← installed, no connection yet +│ ───────────────────────────── │ +│ Athena (install) │ ← missing deps +│ MSSQL (install) │ ← missing deps +└──────────────────────────────────┘ +``` + +**Behavior:** User clicks → shown credential form → connect → source moves to "Connected" category. + +### 2.3 Multi-User Isolation + +Same route path, different state per identity: + +``` +Route: /api/connectors/kusto/auth/connect +Alice → _loaders["user:alice@corp.com"] = KustoLoader(cluster="alice-cluster") +Bob → _loaders["user:bob@corp.com"] = KustoLoader(cluster="bob-cluster") +``` + +The admin can also pin shared params via config: + +```yaml +# data-sources.yml +sources: + - type: kusto + id: kusto_corp + name: "Corp Kusto" + params: + cluster: "https://corp.kusto.windows.net" # pinned — hidden from user form + # Users only see: database, token +``` + +In this scenario, both Alice and Bob connect to the same cluster but provide their own database and token. Their loaders are still separate. + +## 3. Credential Persistence Design + +### 3.1 Existing Infrastructure + +| Component | Location | Status | +|-----------|----------|--------| +| `CredentialVault` (abstract) | `credential_vault/base.py` | ✅ Working | +| `LocalCredentialVault` (Fernet + SQLite) | `credential_vault/local_vault.py` | ✅ Working | +| Key auto-generation | `credential_vault/__init__.py` | ✅ Zero-config for local mode | +| API endpoints | `credential_routes.py` | ✅ `/api/credentials/store\|list\|delete` | +| Vault integration | `plugins/superset/` only | ⚠️ Only wired for Superset | + +### 3.2 What Needs to Happen + +Wire `DataConnector` into `CredentialVault`: + +``` +Connect flow: + 1. User submits params via /auth/connect + 2. DataConnector._connect() creates loader, tests connection + 3. If success AND vault available: + → vault.store(identity, source_id, {user_params + safe metadata}) + 4. Loader cached in _loaders[identity] + +Auto-reconnect flow (on /auth/status or first catalog/data call): + 1. _loaders[identity] is empty + 2. Check vault.retrieve(identity, source_id) + 3. If credentials found → _connect(stored_params) → test connection + 4. If test fails → delete stale vault entry, return "not connected" + 5. If test succeeds → loader ready, return "connected" + +Disconnect flow: + 1. User calls /auth/disconnect + 2. _loaders.pop(identity) + 3. vault.delete(identity, source_id) +``` + +### 3.3 Storage Architecture: Centralized Vault + +**Decision: Single centralized `credentials.db` at `DATA_FORMULATOR_HOME/`.** All users' credentials in one Fernet-encrypted SQLite file, keyed by `(user_id, source_key)`. + +Considered and rejected: per-user storage at `users/{id}/credentials.db`. + +**Rationale:** + +| Concern | Centralized | Per-user dirs | +|---------|-------------|---------------| +| Security boundary | Server process holds the Fernet key and can decrypt all entries regardless of file layout | Same — server still needs all keys | +| Operational simplicity | One file, one volume mount, one backup | N directories, must manage creation/cleanup/permissions | +| User data deletion (GDPR) | `DELETE WHERE user_id = ?` | Delete user dir | +| Concurrent access | SQLite handles fine (rare writes) | No contention but N DB connections | +| Backend swap (e.g., Azure Key Vault) | One interface to replace | N stores to replace | + +The logical separation is in the composite key `(user_id, source_key)`, not the physical file layout. Admin-configured credentials don't go in the vault at all — they live in `data-sources.yml` with `auto_connect: true`. + +### 3.4 What Gets Stored in the Vault + +```json +{ + "user_params": { + "host": "db.corp.com", + "port": "5432", + "database": "analytics", + "password": "hunter2" + }, + "connected_at": "2026-04-14T10:30:00Z", + "source_id": "postgresql" +} +``` + +The vault encrypts the **entire blob** with Fernet (AES-128-CBC + HMAC-SHA256). The encryption key: +- **Local mode:** Auto-generated, stored at `DATA_FORMULATOR_HOME/.vault_key` +- **Server mode:** Set via `CREDENTIAL_VAULT_KEY` env var + +### 3.5 What Gets Stored in Workspace Metadata (Unchanged) + +Workspace YAML only stores **non-sensitive** params (via `get_safe_params()`). This is already the case — passwords, tokens, and secrets are filtered out. No change needed. + +### 3.6 Connection State Summary + +| Scenario | _loaders dict | Vault | Survives restart? | +|----------|--------------|-------|-------------------| +| Just connected | ✅ has loader | ✅ encrypted | Yes | +| Reconnected from vault | ✅ has loader | ✅ encrypted | Yes | +| Vault disabled / not available | ✅ has loader | ❌ nothing | No | +| Disconnected | ❌ removed | ❌ deleted | — | +| Server restarted, vault has creds | ❌ empty | ✅ encrypted | Yes (auto-reconnect on next access) | + +## 4. Deployment Scenarios + +### 4.1 Local Mode (single user, `WORKSPACE_BACKEND=local`) + +- User IS the admin +- All auto-discovered sources appear as "Available" +- User connects → credentials stored in vault (zero-config, key auto-generated) +- Server restart → auto-reconnect from vault +- No multi-user concerns + +### 4.2 Centrally Managed (multi-user, auth provider configured) + +- Admin configures shared sources in `data-sources.yml` with pinned params +- Each user provides their own credentials (password/token) for the unpinned params +- Vault keyed by `(user_identity, source_id)` — full isolation +- Two users connecting to the same source_id with different params = two separate vault entries, two separate loaders + +Example: + +```yaml +# Admin config: data-sources.yml +sources: + - type: kusto + id: kusto_corp + name: "Corp Kusto" + params: + cluster: "https://corp.kusto.windows.net" +``` + +``` +Alice connects: vault["user:alice", "kusto_corp"] = {database: "sales", token: "aaa"} +Bob connects: vault["user:bob", "kusto_corp"] = {database: "eng", token: "bbb"} + +Same route: /api/connectors/kusto_corp/auth/connect +Different credentials, different catalog results. +``` + +### 4.3 SSO / Token Forwarding (future) + +When the app's auth provider (OIDC/Azure) issues tokens that the data source also accepts: + +``` +User logs in via OIDC → gets access_token +DataConnector sees auth_mode = "token_forward" + → auto-connect using the user's OIDC token (no credential form) + → no vault storage needed (token comes from auth session) +``` + +This is how the Superset SSO bridge already works. Generalizing it to DataConnector is a future enhancement. + +### 4.4 Ephemeral Mode (`WORKSPACE_BACKEND=ephemeral`) + +- No vault (no persistent storage) +- Connections are session-only (in-memory `_loaders` dict) +- Credentials typed each time +- This is fine — ephemeral mode is for demos/public instances where no state should persist + +## 5. Frontend Changes + +### 5.1 `/api/app-config` Enhancement + +Add `CONNECTED_CONNECTORS` to the config response — the list of source_ids where the current user has vault credentials: + +```json +{ + "CONNECTORS": [...], + "DISABLED_SOURCES": {...}, + "CONNECTED_CONNECTORS": ["postgresql", "kusto_corp"] +} +``` + +This lets the frontend immediately render the "Connected / Available" split on mount without calling `/auth/status` for each source. + +### 5.2 Data Loader Panel States + +```typescript +// Derived from serverConfig.CONNECTORS + serverConfig.CONNECTED_CONNECTORS +const connectedSources = sources.filter(s => connectedIds.includes(s.source_id)); +const availableSources = sources.filter(s => !connectedIds.includes(s.source_id)); +``` + +**Connected source row:** +``` +[●] PostgreSQL (prod) [Browse Tables] [Disconnect] +``` + +**Available source row:** +``` +[ ] MySQL [Connect...] +``` + +### 5.3 Connect Flow UI Change + +After successful connect, the source moves from "Available" to "Connected": +1. Frontend sends `{ params, persist }` to `/auth/connect` (30s AbortController timeout) +2. Backend creates loader → tests connection → persists if requested +3. Backend returns `{ status: "connected", persisted: true/false }` +4. Frontend checks `status === "connected"` before calling `onConnected()` +5. Source re-renders in "Connected" category with catalog browser +6. If timeout or error → source stays in "Available", error message shown + +### 5.4 Persist Credentials Toggle + +The connect form includes a "Remember credentials" checkbox (default: checked). +When unchecked, `persist: false` is sent to the backend, and credentials are +session-only (in-memory). The toggle is only shown when there are param fields. + +## 6. Implementation Plan + +### Phase A: Vault Integration in DataConnector + +1. ✅ Add `_vault_store()`, `_vault_retrieve()`, `_vault_delete()`, `_persist_credentials()` helpers +2. ✅ `_connect()` creates loader in-memory; vault persistence is separate via `_persist_credentials()` +3. ✅ Wire into `_disconnect()` → delete from vault +4. ✅ Add auto-reconnect in `_require_loader()` → try vault before raising +5. ✅ Add `CONNECTED_CONNECTORS` to `/api/app-config` +6. ✅ Tests: vault store/retrieve/disconnect/auto-reconnect/persist-flag (21 tests) + +### Phase B: Frontend Two-Panel UX + +7. ✅ Parse `CONNECTED_CONNECTORS` from server config +8. ✅ Split data loader panel into Connected / Available sections +9. ✅ Auto-open catalog browser for connected sources (auto-reconnect from vault) +10. ✅ "Remember credentials" checkbox (default: on), sends `persist` flag to backend +11. ✅ Connection timeout (30s AbortController), verified `status === "connected"` before state transition + +### Phase C: Token Forwarding (deferred) + +12. Add `auth_mode: "token_forward"` to DataConnector +13. Auto-connect using the user's auth session token +14. No credential form needed — just catalog browser + +## 7. Design Decisions (Resolved) + +### D1: Credential persistence — opt-out (default: persist) + +Local users expect "remember me" behavior. They can disconnect to clear. Server admins can disable the vault entirely by not setting `CREDENTIAL_VAULT_KEY` (though local mode auto-generates a key, so it's always available unless explicitly blocked). + +### D2: Credential rotation / expiry — lazy invalidation + +Vault entries don't expire. Auto-reconnect tests the connection — if the password has changed, the stale entry is deleted and the user is prompted to reconnect. Token-based connections (OAuth) would need refresh token support (Phase C). + +### D3: Vault scope — global, not per-workspace + +A user who connects to PostgreSQL in workspace A should see it connected in workspace B too. The vault key is `(user_id, source_key)` with no workspace dimension. + +### D4: Admin-provided credentials — config file, not vault + +Use `auto_connect: true` in `data-sources.yml`. The admin provides full credentials (with `${ENV_VAR}` refs), and all users auto-connect without entering anything. These never enter the per-user vault. + +### D5: Storage architecture — single centralized vault + +One `credentials.db` at `DATA_FORMULATOR_HOME/`, keyed by `(user_id, source_key)`. Not per-user files. The trust boundary is the server process (which holds the Fernet key), so physical file separation adds operational complexity without security benefit. Admin credentials stay in config; user credentials stay in vault. User data deletion is `DELETE WHERE user_id = ?`. diff --git a/design-docs/9.2-table-group-bundle-loading.md b/design-docs/9.2-table-group-bundle-loading.md new file mode 100644 index 00000000..99c2b114 --- /dev/null +++ b/design-docs/9.2-table-group-bundle-loading.md @@ -0,0 +1,244 @@ +# TableGroup — Bundle Loading for BI Dashboards + +## Status: Draft + +Parent: [9-generalized-data-source-plugins.md](9-generalized-data-source-plugins.md) + +## 1. Problem + +BI dashboards (Superset, Power BI, Metabase, Tableau) organize multiple datasets under a single dashboard with **shared filter context**. Today, Data Formulator treats dashboards as simple folders: the user must manually browse into each dataset and load them one at a time. This loses the relationship between datasets and discards admin-defined filters. + +## 2. Concept: `table_group` + +A **table_group** is a loadable bundle — a catalog node that contains multiple tables and optional shared filters defined by the BI tool's admin. + +| Platform | Group Unit | Tables | Shared Filters | +|----------|-----------|---------------|----------------| +| Superset | Dashboard | Datasets | Native filters | +| Power BI | Report | Dataset tables | Slicers / parameters | +| Metabase | Dashboard | Questions (SQL) | Dashboard parameters | +| Tableau | Workbook | Worksheets / data sources | Workbook filters | +| Database | *(none)* | Tables are standalone | N/A | + +For databases, everything stays as-is — no grouping. The `table_group` concept only appears when the source provides it. + +## 3. Node Type Extension + +Current: `node_type: 'namespace' | 'table'` + +Extended: `node_type: 'namespace' | 'table' | 'table_group'` + +### 3.1 Backend — CatalogNode + +```python +CatalogNode( + name="Sales Dashboard", + node_type="table_group", + path=["workspace", "Sales Dashboard"], + metadata={ + "tables": [ + {"name": "orders", "dataset_id": 42, "row_count": 100000, + "columns": ["order_id", "region", "order_date", "amount", ...]}, + {"name": "customers", "dataset_id": 99, "row_count": 5000, + "columns": ["customer_id", "name", "region", "segment", ...]}, + {"name": "products", "dataset_id": 101, "row_count": 200, + "columns": ["product_id", "category", "price", ...]}, + ], + "source_filters": [ + { + "name": "Region", + "column": "region", + "input_type": "select", # select | numeric | time | text + "column_type": "STRING", # STRING | NUMERIC | TEMPORAL + "multi": True, + "required": False, + "default_value": ["APAC"], + "applies_to": [42, 99], # dataset IDs this filter targets + }, + { + "name": "Year", + "column": "order_year", + "input_type": "select", + "column_type": "NUMERIC", + "multi": False, + "required": False, + "default_value": null, + "applies_to": [42], + }, + ], + }, + children=[] # no children in tree; tables listed in metadata +) +``` + +A `table_group` node is a **leaf** in the catalog tree — not expandable. Member tables are shown in the right panel's group load UI. + +### 3.2 Frontend — CatalogTreeNode + +```typescript +interface CatalogTreeNode { + name: string; + node_type: 'namespace' | 'table' | 'table_group'; + path: string[]; + metadata: Record | null; + children?: CatalogTreeNode[]; +} +``` + +### 3.3 Source Filter Definition + +```typescript +interface SourceFilter { + name: string; // Display label + column: string; // Physical column name + input_type: 'select' | 'numeric' | 'time' | 'text'; + column_type: 'STRING' | 'NUMERIC' | 'TEMPORAL' | 'BOOLEAN'; + multi: boolean; + required: boolean; + default_value?: unknown; + applies_to?: number[]; // dataset IDs; omit = applies to all + options?: string[]; // Pre-fetched for small cardinality; omit = lazy-load +} +``` + +## 4. Tree Display + +`table_group` nodes use a **distinct icon** — dashboard/grid icon rather than a folder icon — to visually distinguish them from namespace folders. + +``` +📁 My Workspace ← namespace (folder icon) + 📊 Sales Dashboard (3 tables) ← table_group (dashboard icon, leaf) + 📊 Marketing Dashboard (1 table) ← table_group (leaf) + 📁 SQL Lab ← namespace + 📋 ad_hoc_query_1 ← table +``` + +`table_group` nodes are **not expandable** — tables are listed in the right panel's group load UI instead. + +## 5. Load Flow + +### 5.1 Load All (Group-Level) + +When the user selects a `table_group` node and clicks **Load Dashboard**: + +1. All member datasets are fetched in parallel (each respecting the per-table row limit and sort settings). +2. Shared `source_filters` are applied as WHERE clauses to each dataset whose `applies_to` list includes that dataset. +3. All tables appear in the Data Formulator workspace, tagged with their group origin. +4. If the user wants to remove some tables afterward, they can do so from the workspace. + +### 5.2 Re-Filter + +If the user changes filter values and loads again, it's a fresh load. No incremental update — just re-query with new filter values. + +## 6. Right Panel — Group Load UI + +When a `table_group` is selected: + +``` +┌─────────────────────────────────────────┐ +│ 📊 Sales Dashboard │ +│ │ +│ ── Tables ──────────────────────────── │ +│ 📋 orders 100,000 rows │ +│ ▸ 12 columns │ +│ 📋 customers 5,000 rows │ +│ ▸ 8 columns │ +│ 📋 products 200 rows │ +│ ▸ 5 columns │ +│ │ +│ (click ▸ to expand column list) │ +│ │ +│ 📋 orders 100,000 rows │ +│ ▾ 12 columns │ +│ order_id, region, order_date, │ +│ amount, customer_id, product_id, │ +│ status, ship_date, ... │ +│ │ +│ ── Source Filters ──────────────────── │ +│ Region [APAC ▾] multi │ +│ Year [ ▾] │ +│ │ +│ ── Load Settings ───────────────────── │ +│ Row limit per table [All ▾] │ +│ │ +│ [Load Dashboard] │ +└─────────────────────────────────────────┘ +``` + +When a child `table` under the group is selected, the standard per-table load panel appears (row limit, sort, etc.). + +## 7. Backend Data Flow + +### 7.1 `ls()` — Attach Filters to Group Metadata + +During `ls()` for a dashboard-level path, the loader: + +1. Fetches the dashboard detail (contains `native_filter_configuration` or equivalent). +2. Parses filter definitions into the `source_filters` format. +3. Attaches them to the `table_group` node's `metadata`. +4. Filter option values are included in `metadata.source_filters[].options` (admin-defined filters are typically low cardinality). + +### 7.2 `fetch_data_as_arrow()` — Apply Source Filters + +`import_options` gains a `source_filters` key: + +```python +import_options = { + "size": 50000, + "sort_by": "order_date", + "sort_order": "DESC", + "source_filters": [ + {"column": "region", "operator": "IN", "value": ["APAC", "EMEA"]}, + {"column": "order_year", "operator": "EQ", "value": 2025}, + ] +} +``` + +`fetch_data_as_arrow()` builds WHERE clauses from `source_filters` and appends them to the base SQL. + +### 7.3 Group Load API + +Load all tables with shared filters: + +``` +POST /api/connectors/{loader}/load-group +{ + "group_path": ["workspace", "Sales Dashboard"], + "row_limit": -1, + "source_filters": [ + {"column": "region", "operator": "IN", "value": ["APAC"]}, + {"column": "order_year", "operator": "EQ", "value": 2025} + ] +} +``` + +Response: streams Arrow IPC batches for each table, or returns them sequentially. + +## 8. Frontend State — Tables in Workspace + +Loaded tables from a group are stored as separate `DFTable` entries — no group tracking needed. Each table is independently usable in Data Formulator's analysis pipeline. The table name carries the dashboard context: + +```typescript +{ + id: "conn_superset_orders_1713200000", + tableName: "Sales Dashboard / orders", + ... +} +``` + +If the user wants to re-load with different filters, they go back to the `table_group` node in the catalog tree and load again. + +## 9. Implementation Phases + +### Phase A: Backend — `table_group` in CatalogNode + filter extraction +- Add `table_group` to `CatalogNode.node_type` +- SupersetLoader: extract native filters during `ls()`, attach to `metadata.source_filters` +- SupersetLoader: honor `source_filters` in `fetch_data_as_arrow()` + +### Phase B: Frontend — Tree + Group Load Panel +- Extend `CatalogTreeNode` type with `table_group` +- Dashboard icon for `table_group` nodes +- Group load panel: shows tables, source filter controls, "Load Dashboard" button +- Wire load action to call backend for each table with filters + + diff --git a/design-docs/9.3-promoted-data-source-cards.md b/design-docs/9.3-promoted-data-source-cards.md new file mode 100644 index 00000000..82fef66a --- /dev/null +++ b/design-docs/9.3-promoted-data-source-cards.md @@ -0,0 +1,356 @@ +# Promoted Data Source Cards + +## Status: Complete (Phase A + B done, Phase C merged into doc 9 Phase 3 cleanup) + +Parent: [9-generalized-data-source-plugins.md](9-generalized-data-source-plugins.md) + +### Implementation Summary + +**Phase A — Backend API redesign (complete):** +- [x] Single shared `connectors_bp` blueprint — all action routes accept `connector_id` in JSON body +- [x] `GET /api/data-loaders` — lists available loader types with param definitions +- [x] `GET /api/connectors` — lists registered instances with connection status +- [x] `POST /api/connectors` — creates user connector instance, auto-connects +- [x] `DELETE /api/connectors/{id}` — tears down instance, clears vault +- [x] Action routes: `/connect`, `/disconnect`, `/get-status`, `/get-catalog`, `/get-catalog-tree`, `/preview-data`, `/import-data`, `/import-group`, `/refresh-data` +- [x] No per-instance Flask blueprints — eliminated `create_blueprint()` and dynamic registration +- [x] Side-effect-free `/get-status`; auto-reconnect moved to `/connect` +- [x] Admin-provisioned connectors from `connectors.yaml` + `DF_SOURCES__*` env vars + +**Phase B — Frontend promoted cards (complete):** +- [x] Connected sources promoted as top-level cards on Load Data menu +- [x] "Add Connection" card with left/right layout: pick type → fill params → Add & Connect +- [x] Each card click → `DataLoaderForm` (browse-only when connected) +- [x] Removed legacy "Database" tab from UI +- [x] `DBTableManager` uses only `serverConfig.SOURCES` (DataConnector) for source discovery + +**Phase C — Cleanup:** +- [x] Unify Superset plugin into `/api/connectors/` flow (done via SupersetLoader) +- [x] Disconnect / Delete actions on each card + +> **Note:** `dataLoaderConnectParams` stays in Redux — it manages transient form field state (partially filled connection forms). Registered connection metadata lives server-side via the connectors API. + +## 1. Problem + +Today, all external data sources (MySQL, PostgreSQL, Superset, …) are crammed behind a single "Database" card on the Load Data menu. The user clicks "Database" → picks a source from a list → fills connection params → connects → browses tables. This is: + +- **Deep**: 4 levels of nesting before the user sees any data. +- **Mixed concerns**: The connection form and the data browser share the same panel, wasting space on param fields the user doesn't need after connecting. +- **Inconsistent**: Superset (plugin-based SSO) already gets its own top-level card, while generic connectors are hidden inside the "Database" section. + +## 2. Proposal + +### 2.1 Connected sources become top-level cards + +Once a data source is registered / connected, it appears as its own card on the Load Data menu page — at the same level as "Upload File", "Load from URL", "Sample Datasets", etc. + +**Before:** +``` +Local data + [Sample Datasets] [Upload File] + [Paste Data] [Extract Unstructured] + +Connect to a data source + [Load from URL] [Database] ← everything hidden in here + [Apache Superset] +``` + +**After:** +``` +Local data + [Sample Datasets] [Upload File] + [Paste Data] [Extract Unstructured] + +Data sources + [Load from URL] + [MySQL · mydb] ← promoted, one card per connection + [PostgreSQL · analytics] + [Superset · prod] + [+ Add Connection] ← register a new source +``` + +### 2.2 "Add Connection" card + +A card on the menu page (styled similarly to "add new session" on the front page) that opens the connection registration flow: + +1. User picks a source type (MySQL, PostgreSQL, Superset, …). +2. User fills connection params (host, port, credentials, etc.). +3. On success, a new card appears on the menu page. + +This replaces the current "Database" tab's multi-step flow. + +### 2.3 DataConnectorPane — browse-only + +Clicking a connected source card opens a **DataConnectorPane**: just tree + preview + load controls. No connection params, no source picker — those were handled at registration time. + +``` +┌─────────────────────────────────────────────────────┐ +│ MySQL · mydb [⚙] [Disconnect]│ +├──────────┬──────────────────────────────────────────┤ +│ 🔍 filter│ orders │ +│ ─────────│ 1,234 rows × 8 columns │ +│ 📁 public│ ┌──────────────────────────────────┐ │ +│ 📄 users│ │ id │ name │ email │ created_at │ │ +│ 📄 orders│ │ 1 │ ... │ ... │ ... │ │ +│ 📄 items │ │ 2 │ ... │ ... │ ... │ │ +│ 📁 staging│ └──────────────────────────────────┘ │ +│ │ │ +│ │ Row limit: [2,000,000 ▾] [Load Table] │ +└──────────┴───────────────────────────────────────────┘ +``` + +### 2.4 Disconnect vs Delete + +| Action | Effect | +|------------|---------------------------------------------------------------| +| Disconnect | Drops the active session / token. Card stays on the menu (grayed or with "reconnect" badge). Clicking it triggers re-auth. | +| Delete | Removes the card entirely. Clears saved credentials (vault entries, tokens, cookies). | + +Disconnect is the default quick action (e.g., session expired, user switches accounts). Delete is a deliberate destructive action (confirmation required). + +## 3. Card Data Model + +Each registered connection is persisted as a **DataSourceEntry**: + +```typescript +interface DataSourceEntry { + /** Unique ID for this connection instance */ + id: string; + /** Connector type key (e.g. "mysql", "superset") */ + source_type: string; + /** User-facing label, e.g. "MySQL · mydb" */ + display_name: string; + /** Connection parameters (host, port, database, etc.) */ + params: Record; + /** Whether this connection is currently authenticated / active */ + connected: boolean; + /** Timestamp of last successful connection */ + last_connected?: number; +} +``` + +This replaces the current flat `dataLoaderConnectParams` map (which stores params by loader type, limiting support to one connection per type). + +## 4. Key Design Decisions + +### 4.1 Multiple connections of the same type + +A user may have two MySQL connections (e.g. "MySQL · prod" and "MySQL · staging"). Each gets its own card. The `DataSourceEntry.id` distinguishes them, not the `source_type`. + +### 4.2 Where entries are stored + +- **Local mode**: In Redux state, persisted to localStorage (same as current `dataLoaderConnectParams`). +- **Azure/ephemeral mode**: In workspace session on the server side, with credentials in vault. + +### 4.3 Legacy "Database" tab + +Removed. All its functionality is absorbed by: +- "Add Connection" card → connection registration +- Per-source cards → browsing / loading + +### 4.4 Re-auth flow + +When a connection's `connected` is false (session expired, token revoked): +- The card appears with a visual indicator (dimmed icon, "reconnect" label). +- Clicking it opens a lightweight re-auth prompt (just the credential fields, not the full registration form), since host/port/database are already known. + +## 5. API Redesign + +### 5.1 Current Problems + +| # | Issue | Detail | +|---|-------|--------| +| 1 | **Ghost endpoints** | `register_data_connectors()` pre-creates Flask blueprints for every discovered loader at startup. `/api/connectors/mysql/...` exists even if no MySQL connection was ever created. | +| 2 | ~~POST for reads~~ | Kept POST — params are JSON bodies (paths, filters, options). Common industry pattern (Elasticsearch, GraphQL). Not worth the migration cost for marginal REST-purity benefit. | +| 3 | **snake_case URLs** | `/catalog/list_tables` (snake) vs `/data/load-group` (kebab) vs `/auth/token-connect` (kebab). Inconsistent. | +| 4 | **Status has side effects** | `/auth/status` calls `_try_auto_reconnect()` — creates loaders, hits vault. Should be side-effect-free. | +| 5 | **Dual namespaces** | Connectors: `/api/connectors/{id}/`. Plugins: `/api/plugins/{id}/`. Same concept, different URL trees. | +| 6 | **Single-instance** | `DATA_CONNECTORS` is keyed by loader type. Can't have two MySQL connections. | +| 7 | **Auth ↔ catalog coupling** | `/auth/connect` response includes `hierarchy`, `effective_hierarchy`, `pinned_scope` — catalog data bundled into auth. | +| 8 | **Inconsistent param names** | `filter` vs `table_filter` vs `source_table` vs `table_name` vs `size` vs `row_limit`. | + +### 5.2 New API Surface + +Principle: **one shared blueprint, no per-instance routes**. All action routes accept `connector_id` in the JSON body. This avoids Flask's limitation that blueprints can't be registered after the first request, and eliminates ghost endpoints entirely. + +#### Discovery + CRUD (always available) + +``` +GET /api/data-loaders → list available loader types + param definitions +GET /api/connectors → list registered connector instances + status +POST /api/connectors → create instance (type + display_name + params → auto-connect) +DELETE /api/connectors/{id} → delete instance, clear vault credentials +``` + +#### Action routes (shared — connector_id in body) + +All action routes are `POST` and accept `{"connector_id": "mysql:prod", ...}` in the JSON body. +The handler resolves the `DataConnector` from `DATA_CONNECTORS[connector_id]`. + +``` +# Connection +POST /api/connectors/connect → {connector_id, params?, mode?, persist?} +POST /api/connectors/disconnect → {connector_id} +POST /api/connectors/get-status → {connector_id} (no side effects) + +# Catalog +POST /api/connectors/get-catalog → {connector_id, path?, filter?} +POST /api/connectors/get-catalog-tree → {connector_id, filter?} + +# Data +POST /api/connectors/preview-data → {connector_id, source_table, limit?} +POST /api/connectors/import-data → {connector_id, source_table, table_name?, import_options?} +POST /api/connectors/import-group → {connector_id, tables, row_limit?, source_filters?, group_name?} +POST /api/connectors/refresh-data → {connector_id, table_name} +``` + +#### Why not `/api/connectors/{id}/action`? + +Flask's `register_blueprint()` cannot be called after the app handles its first request. +Per-instance blueprints require dynamic registration at runtime (when user creates a new connection). +Putting `connector_id` in the body instead of the URL means all routes live on a single blueprint registered once at startup. +This is the same pattern used by GraphQL, Elasticsearch, and other APIs that dispatch to resources via payload rather than URL path. + +#### Implementation + +Three layers of storage, merged at runtime: + +``` +┌───────────────────────────────────────────────────────────┐ +│ Admin config (global, read-only for users) │ +│ DATA_FORMULATOR_HOME/connectors.yaml │ +│ + DF_SOURCES__* env vars │ +│ Shared across all users │ +├───────────────────────────────────────────────────────────┤ +│ User config (per-user, cross-workspace) │ +│ DATA_FORMULATOR_HOME/users//connectors.yaml │ +│ CRUD by the user │ +├───────────────────────────────────────────────────────────┤ +│ Credentials (per-identity + connector_id) │ +│ DATA_FORMULATOR_HOME/credentials.db (encrypted) │ +│ Vault — both admin & user connectors │ +├───────────────────────────────────────────────────────────┤ +│ In-memory (transient, union of admin + user) │ +│ connector_id → DataConnector w/ live loader │ +└───────────────────────────────────────────────────────────┘ +``` + +Same file name (`connectors.yaml`), same format, two scopes. `data-sources.yml` is retired. + +**Admin config** (`DATA_FORMULATOR_HOME/connectors.yaml`, `DF_SOURCES__*` env vars): +- Global connectors shared across all users. Read-only for users. +- Loaded at startup and merged into memory. Cannot be deleted by users. + +**User config** (`DATA_FORMULATOR_HOME/users//connectors.yaml`): +- Per-user connector definitions: `{connector_id, loader_type, display_name, icon, default_params}`. +- Same `users//` directory tree used by workspace storage. +- Created via `POST /api/connectors`, deleted via `DELETE /api/connectors/{id}`. +- Survives server restarts, available across all workspaces for that user. + +**Vault** (per-identity + connector_id): +- Encrypted credentials (passwords, tokens) for both admin and user connectors. +- Written on `connect` (with `persist: true`), cleared on `delete`. +- `disconnect` keeps vault credentials by default (so "reconnect" is fast). + +**In-memory cache** (`DATA_CONNECTORS` dict): +- Union of admin + user connectors, lazily hydrated. +- Holds live `ExternalDataLoader` instances per identity. +- Transient — rebuilt from user config + vault on server restart / first request. + +**`GET /api/connectors`** returns the merged list with a `source` field: +```json +[ + {"id": "mysql:prod", "source": "admin", "deletable": false, ...}, + {"id": "mysql:my-local", "source": "user", "deletable": true, ...} +] +``` + +A single `connectors_bp` Flask Blueprint handles everything: +- Helper `_resolve_connector(data)` extracts `connector_id` from the JSON body and looks up `DATA_CONNECTORS[connector_id]`, returning 404 if not found (after attempting lazy load from user config). +- `DataConnector` is a plain Python object with methods like `_connect()`, `_disconnect()`, `_require_loader()`, etc. +- At startup, `register_data_connectors(app)` registers `connectors_bp` once and populates `DATA_CONNECTORS` from admin config + user config. +- At runtime, `POST /api/connectors` (create) writes to user config + adds to memory. +- `DELETE /api/connectors/{id}` removes from user config + vault + memory (blocked for admin connectors). + +**Disconnect vs Delete:** + +| Action | In-memory loader | User config | Vault creds | Card visible | +|--------|-----------------|-------------|-------------|-------------| +| Disconnect | Cleared | Kept | Kept (reconnect fast) | Yes, shows "reconnect" | +| Delete | Cleared | Removed | Cleared | No | + +#### What changed + +| Before | After | Why | +|--------|-------|-----| +| `/api/connectors/{id}/connect` | `POST /api/connectors/connect` `{connector_id}` | No per-instance routes; connector_id in body | +| `/api/connectors/{id}/disconnect` | `POST /api/connectors/disconnect` `{connector_id}` | Same | +| `/api/connectors/{id}/status` | `POST /api/connectors/get-status` `{connector_id}` | Same; verb-based name | +| `/api/connectors/{id}/catalog` | `POST /api/connectors/get-catalog` `{connector_id}` | Same; verb-based name | +| `/api/connectors/{id}/catalog/tree` | `POST /api/connectors/get-catalog-tree` `{connector_id}` | Same; flat path | +| `/api/connectors/{id}/preview` | `POST /api/connectors/preview-data` `{connector_id}` | Same; verb-based name | +| `/api/connectors/{id}/import` | `POST /api/connectors/import-data` `{connector_id}` | Same; verb-based name | +| `/api/connectors/{id}/import-group` | `POST /api/connectors/import-group` `{connector_id}` | Same | +| `/api/connectors/{id}/refresh` | `POST /api/connectors/refresh-data` `{connector_id}` | Same; verb-based name | +| Per-instance Flask Blueprint | None | Eliminated — single shared blueprint, no dynamic registration | + +#### Dropped + +| Endpoint | Reason | +|----------|--------| +| `create_blueprint()` | No per-instance blueprints. `DataConnector` is a plain object. | +| `/catalog/metadata` | Merged into `/catalog` (done in Phase A) | +| `/catalog/list_tables` | Frontend never uses it. `get-catalog-tree` covers the use case | +| `/auth/token-connect` | Absorbed into `/connect` with a `mode` field (done in Phase A) | + +### 5.3 Instance ID scheme + +Each connector instance gets a stable ID: `{loader_type}:{user_label}`, e.g. `mysql:prod`, `superset:analytics`. + +- The `loader_type` portion maps to a `DATA_LOADERS` key for instantiation. +- The `user_label` is a slug provided at creation (defaulting to the database name or host). +- URL-safe: only lowercase alphanumeric + hyphen + colon. + +For admin-provisioned connectors (YAML/env config), instances are pre-created at startup with their configured IDs — they behave identically to user-created ones. + +### 5.4 Migration path + +The old per-instance blueprint routes (`/api/connectors/{id}/connect`, etc.) have never shipped. +No backward compatibility needed — we replace them with shared routes that take `connector_id` in the body. +`create_blueprint()` and `_register_*_routes()` methods are deleted. +`register_data_connectors()` is simplified: register `connectors_bp` once, populate `DATA_CONNECTORS` dict. + +## 6. Scope & Phases + +### Phase A — Single-blueprint backend + multi-instance support + +Existing infrastructure that stays as-is: +- `ExternalDataLoader` base class and all loader implementations +- Vault-based credential storage +- `_loaders` in-memory cache pattern + +New work: +- [x] `GET /api/data-loaders` — returns loader types from `DATA_LOADERS` registry with param definitions. +- [x] `GET /api/connectors` — lists registered instances with connection status. +- [x] `POST /api/connectors` — creates a `DataConnector` in `DATA_CONNECTORS` dict (no blueprint registration). Auto-connects if params provided. +- [x] `DELETE /api/connectors/{id}` — tears down instance, clears vault. +- [x] Move all per-instance route handlers to shared routes on `connectors_bp` (`/api/connectors/connect`, `/get-status`, `/get-catalog`, `/preview-data`, `/import-data`, `/refresh-data`, etc.) that accept `connector_id` in JSON body. +- [x] Delete `create_blueprint()`, `_register_connection_routes()`, `_register_catalog_routes()`, `_register_data_routes()`. +- [x] Make `/status` side-effect-free (move auto-reconnect logic to `/connect`). +- [x] Merge `/auth/token-connect` into `/connect` with `mode` field. +- [x] Simplify `register_data_connectors()` — register `connectors_bp` once, populate `DATA_CONNECTORS` from config (no per-instance blueprint). +- [x] Admin-provisioned connectors (YAML/env) auto-create instances at startup. + +### Phase B — Frontend: menu page cards + generic URLs + +- [x] Replace `getConnectorUrls(id)` with static `CONNECTOR_ACTION_URLS` constants (no ID in URL path). +- [x] Update all frontend call sites to send `connector_id` in POST body instead of URL path. +- [x] Render connected connectors as promoted cards on the Load Data menu page. +- [x] "Add Connection" card → left/right layout: pick type, fill params + display name, "Add & Connect" → `POST /api/connectors`. +- [x] Each card click → opens `DataLoaderForm` (browse-only when connected). +- [x] Disconnect / Delete actions on each card. + +### Phase C — Cleanup + +- [x] Unify Superset plugin (`/api/plugins/superset/`) into the `/api/connectors/` flow. +- [x] Remove legacy "Database" tab. diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..b0eb02f4 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,27 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +# Docker Compose configuration for Data Formulator. +# +# Quick start: +# 1. Copy .env.template to .env and fill in your API keys. +# 2. docker compose up --build +# 3. Open http://localhost:5567 in your browser. + +services: + data-formulator: + build: + context: . + dockerfile: Dockerfile + image: data-formulator:latest + ports: + - "5567:5567" + env_file: + - .env + volumes: + # Persist workspace data (uploaded files, sessions, etc.) across container restarts. + - data_formulator_home:/home/appuser/.data_formulator + restart: unless-stopped + +volumes: + data_formulator_home: diff --git a/docs-cn/5-datasource_plugin-development-guide.md b/docs-cn/5-datasource_plugin-development-guide.md new file mode 100644 index 00000000..f243dc60 --- /dev/null +++ b/docs-cn/5-datasource_plugin-development-guide.md @@ -0,0 +1,595 @@ +# 数据源插件开发指南 + +> **目标读者**:要为 Data Formulator 开发新数据源插件的开发者。 +> +> **前置阅读**:[1-data-source-plugin-architecture.md](1-data-source-plugin-architecture.md)(设计原理)、[1-sso-plugin-architecture.md](1-sso-plugin-architecture.md)(SSO + 凭证架构)。 +> +> **参考实现**:`plugins/superset/`(后端)+ `src/plugins/superset/`(前端)。 + +--- + +## 目录 + +1. [快速上手:新增一个插件](#1-快速上手新增一个插件) +2. [后端:目录结构与约定](#2-后端目录结构与约定) +3. [后端:基类契约](#3-后端基类契约) +4. [后端:路由设计](#4-后端路由设计) +5. [后端:PluginDataWriter](#5-后端plugindatawriter) +6. [后端:认证路由规范(三模式协商)](#6-后端认证路由规范三模式协商) +7. [后端:CredentialVault 集成](#7-后端credentialvault-集成) +8. [前端:目录结构与约定](#8-前端目录结构与约定) +9. [前端:模块契约](#9-前端模块契约) +10. [前端:国际化](#10-前端国际化) +11. [环境变量命名规范](#11-环境变量命名规范) +12. [测试规范](#12-测试规范) +13. [核心代码零修改原则](#13-核心代码零修改原则) +14. [Checklist:插件上线前检查](#14-checklist插件上线前检查) + +--- + +## 1. 快速上手:新增一个插件 + +假设要接入 Metabase,只需两步: + +**后端**:在 `py-src/data_formulator/plugins/` 下创建 `metabase/` 目录。 + +``` +plugins/ +├── base.py # 框架,不要修改 +├── data_writer.py # 框架,不要修改 +├── __init__.py # 框架,不要修改 +└── metabase/ # ← 新增 + ├── __init__.py # plugin_class = MetabasePlugin + ├── metabase_client.py + └── routes/ + ├── __init__.py + ├── auth.py + ├── catalog.py + └── data.py +``` + +**前端**:在 `src/plugins/` 下创建 `metabase/` 目录。 + +``` +src/plugins/ +├── types.ts # 框架,不要修改 +├── registry.ts # 框架,不要修改 +├── PluginHost.tsx # 框架,不要修改 +├── index.ts # 框架,不要修改 +└── metabase/ # ← 新增 + ├── index.ts # default export: DataSourcePluginModule + ├── MetabasePanel.tsx + └── locales/ + ├── en.json + └── zh.json +``` + +**配置**:在 `.env` 中设置必须环境变量: + +```bash +PLG_METABASE_URL=https://metabase.example.com +``` + +**启动**:重启 Data Formulator → 框架自动发现并启用 → 前端数据上传对话框中出现 Metabase Tab。 + +**核心原则:不修改任何框架文件。** 如果需要改 `plugins/__init__.py`、`registry.ts`、`app.py` 才能让新插件工作,说明框架有 bug,应该修框架。 + +--- + +## 2. 后端:目录结构与约定 + +``` +plugins// +├── __init__.py # 必须:暴露 plugin_class 属性 +├── _client.py # 建议:封装外部系统 HTTP API +├── auth_bridge.py # 可选:SSO 透传桥接 +├── catalog.py # 可选:目录浏览逻辑 +├── session_helpers.py # 建议:Plugin-namespaced session 操作 +└── routes/ + ├── __init__.py + ├── auth.py # 必须:认证路由 + ├── catalog.py # 建议:目录路由 + └── data.py # 必须:数据加载路由 +``` + +### 关键约定 + +- `__init__.py` 必须有一个模块级属性 `plugin_class`,指向 `DataSourcePlugin` 的具体子类。 +- 如果插件有重量级依赖(如某个 SDK),应在 `__init__.py` 的顶层 import 中引入。框架会 `try/except ImportError`,缺依赖时优雅跳过并记录原因。 +- Session key 必须用 `plugin__` 前缀隔离(如 `plugin_superset_token`、`plugin_metabase_token`),防止多插件间状态冲突。 + +--- + +## 3. 后端:基类契约 + +```python +from data_formulator.plugins.base import DataSourcePlugin + +class MetabasePlugin(DataSourcePlugin): + + @staticmethod + def manifest() -> dict: + return { + # ── 必须 ── + "id": "metabase", # 全局唯一 slug + "name": "Metabase", # 显示名 + "env_prefix": "PLG_METABASE", # 环境变量前缀 + "required_env": ["PLG_METABASE_URL"], # 全部存在才启用 + + # ── 可选 ── + "icon": "metabase", + "description": "Connect to Metabase to browse and load questions.", + "auth_modes": ["password", "sso"], # 支持的认证方式 + "capabilities": ["questions", "dashboards"], + } + + def create_blueprint(self) -> Blueprint: + """组装路由。url_prefix 必须是 /api/plugins//""" + ... + + def get_frontend_config(self) -> dict: + """返回给前端的非敏感配置。绝对不能包含密钥。""" + ... + + def on_enable(self, app) -> None: + """初始化共享服务(client、catalog 等),存到 app.extensions。""" + ... + + def supports_sso_passthrough(self) -> bool: + """外部系统与 DF 共享 IdP 时返回 True。""" + return False +``` + +### manifest 字段说明 + +| 字段 | 类型 | 必须 | 说明 | +|------|------|------|------| +| `id` | `str` | 是 | 全局唯一 slug,用作路由前缀、session key 前缀、前端匹配 key | +| `name` | `str` | 是 | 人类可读名称,显示在 UI | +| `env_prefix` | `str` | 是 | 环境变量命名前缀(如 `PLG_METABASE`) | +| `required_env` | `list[str]` | 是 | 必须环境变量列表,缺任一则插件不启用 | +| `icon` | `str` | 否 | 图标标识,前端 Icon 组件使用 | +| `description` | `str` | 否 | 简短描述 | +| `auth_modes` | `list[str]` | 否 | 支持的认证方式:`"password"` / `"sso"` / `"api_key"` | +| `capabilities` | `list[str]` | 否 | 能力声明,前端可据此决定 UI | +| `version` | `str` | 否 | 插件版本号 | +| `optional_env` | `list[str]` | 否 | 可选环境变量(缺失不影响启用) | + +--- + +## 4. 后端:路由设计 + +### URL 前缀 + +所有路由都在 `/api/plugins//` 下: + +``` +/api/plugins//auth/login POST 登录 +/api/plugins//auth/status GET 认证状态 +/api/plugins//auth/logout POST 登出 +/api/plugins//catalog/... GET 数据目录 +/api/plugins//data/load-* POST 数据加载 +``` + +### 响应格式 + +统一 JSON 格式: + +```json +// 成功 +{"status": "ok", ...payload} + +// 错误 +{"status": "error", "message": "Human-readable error description"} +``` + +HTTP 状态码语义: +- `200` — 成功 +- `400` — 请求参数错误(插件负责校验) +- `401` — 未认证或认证过期 +- `502` — 外部系统不可达或返回错误 +- `503` — 插件依赖的服务不可用(如 Vault 未配置) + +--- + +## 5. 后端:PluginDataWriter + +插件加载的数据通过 `PluginDataWriter` 写入用户 Workspace: + +```python +from data_formulator.plugins.data_writer import PluginDataWriter + +writer = PluginDataWriter("metabase") # plugin_id + +result = writer.write_dataframe( + df, # pandas DataFrame + "sales_data", # 表名 + overwrite=True, # True=覆盖同名, False=自动加后缀 + source_metadata={ # 记录来源,供刷新使用 + "plugin": "metabase", + "question_id": 42, + }, +) +# result = {"table_name": "sales_data", "row_count": 1234, "columns": [...], "is_renamed": False} +``` + +**不要直接调用 `workspace.write_parquet()`**。`PluginDataWriter` 封装了身份解析、表名清洗、元数据打标等逻辑。 + +--- + +## 6. 后端:认证路由规范(三模式协商) + +**这是插件开发中最重要的规范。** + +> **代码层面强制约束**:所有插件必须继承 `PluginAuthHandler` 基类(`plugins/auth_base.py`)。 +> 基类自动生成 `/login`、`/logout`、`/status`、`/me` 标准路由,内置 Vault 生命周期管理。 +> **插件作者无需手动处理 Vault 存储、清除、自动登录逻辑**——基类全部代劳。 + +### 三模式协商流程 + +``` +用户打开插件面板 → GET /api/plugins//auth/status + │ + ▼ +后端按优先级依次尝试: + │ + ├─ ① Session 中已有有效 token? + │ → {"authenticated": true, "mode": "session"} + │ + ├─ ② SSO token 可用 + 插件 supports_sso_passthrough()? + │ → 尝试 SSO 透传登录 + │ → 成功 → {"authenticated": true, "mode": "sso"} + │ → 失败 → 继续 + │ + ├─ ③ CredentialVault 中有已存凭证? + │ → 尝试用已存凭证登录外部系统 + │ → 成功 → {"authenticated": true, "mode": "vault"} + │ → 失败 → {"authenticated": false, "vault_stale": true} + │ + └─ 全部未命中 + → {"authenticated": false, "available_modes": ["password", "api_key"]} + +前端根据响应: + ├─ authenticated=true → 直接显示数据目录 + ├─ vault_stale=true → 显示登录表单 + 提示"已保存的凭证已失效" + └─ authenticated=false → 显示登录表单 +``` + +### 使用 PluginAuthHandler(必须) + +```python +# routes/auth.py + +from data_formulator.plugins.auth_base import PluginAuthHandler + +class MetabaseAuthHandler(PluginAuthHandler): + """插件作者只需实现以下 4 个方法。""" + + def do_login(self, username: str, password: str) -> dict: + """与外部系统认证,成功后写入 Flask session。 + 返回 {"user": {"id": ..., "username": ..., ...}} + 失败时抛异常。""" + result = _bridge().login(username, password) + save_session(result["access_token"], ...) + return {"user": {...}} + + def do_clear_session(self) -> None: + """清除插件在 Flask session 中的所有 key。""" + clear_session() + + def get_session_auth(self) -> dict | None: + """检查当前 session 是否已认证。 + 已认证返回 {"authenticated": True, "mode": "session", "user": {...}} + 未认证返回 None。""" + token, user = require_auth() + if token and user: + return {"authenticated": True, "mode": "session", "user": format_user(user)} + return None + + def get_current_user(self) -> dict | None: + """从 session 中取当前用户,或 None。""" + return get_user() + + +# 创建 handler 实例 → 生成标准路由 Blueprint +_handler = MetabaseAuthHandler("metabase") +auth_bp = _handler.create_auth_blueprint("/api/plugins/metabase/auth") + +# 如有插件专属路由,可继续追加到同一个 Blueprint +@auth_bp.route("/sso/callback", methods=["POST"]) +def sso_callback(): + ... +``` + +### 基类自动处理的路由 + +| 路由 | 方法 | 基类自动行为 | +|------|------|-------------| +| `/login` | POST | 调用 `do_login()` + remember=true 存 Vault / remember=false 清 Vault | +| `/logout` | POST | 调用 `do_clear_session()` + **强制清除 Vault**(不可遗漏) | +| `/status` | GET | Session → Vault 自动登录 → 未认证(三模式协商) | +| `/me` | GET | 返回 `get_current_user()` 或 401 | + +### 核心规则总结 + +| 规则 | 说明 | 由基类强制 | +|------|------|-----------| +| **先 Session → 再 SSO → 再 Vault → 最后手动** | 严格按优先级链,不跳步 | ✅ status 路由 | +| **Vault 凭证必须实测验证** | 从 Vault 取出后必须**实际登录**外部系统 | ✅ try_vault_login | +| **失效凭证返回 vault_stale** | 外部系统密码已改时返回 `vault_stale: true` | ✅ try_vault_login | +| **remember=true 才存 Vault** | 用户主动勾选"记住凭证"后才写入 | ✅ login 路由 | +| **remember=false 要清理 Vault** | 用户不勾选记住 → 删除旧凭证 | ✅ login 路由 | +| **退出必须同时清 Session + Vault** | 防止"退出后 Vault 自动登录回来"的死循环 | ✅ logout 路由 | +| **凭证只在服务端流转** | 前端只知道 authenticated + mode,不接触明文 | ✅ 架构设计 | +| **Vault 操作 best-effort** | 存/删/取失败时静默跳过,不阻断主流程 | ✅ vault_* 方法 | + +--- + +## 7. 后端:CredentialVault 集成 + +### 概述 + +CredentialVault 是一个可选组件(本地部署时自动零配置启用)。插件**不应假设** Vault 一定存在——Vault 不可用时,基类自动跳过,回退到纯 Session 模式。 + +> **重要**:插件作者**不需要手写** Vault 辅助函数。`PluginAuthHandler` 基类已内置 +> `vault_store()`、`vault_delete()`、`vault_retrieve()`、`try_vault_login()` 方法, +> 并在 login / logout / status 路由中自动调用。 + +### 如果需要在自定义路由中访问 Vault + +在极少数情况下(如自定义的凭证管理路由),可通过 handler 实例直接调用: + +```python +# 前提:_handler 是你的 PluginAuthHandler 子类实例 +_handler.vault_store({"username": "alice", "password": "pw"}) +_handler.vault_delete() +creds = _handler.vault_retrieve() # → dict | None +``` + +这些方法内部已处理 Vault 不可用、identity 解析失败等异常,不会抛出。 + +### Vault source_key 命名 + +`source_key` 统一使用 `manifest()["id"]`(即插件 ID),如 `"superset"`、`"metabase"`。一个插件只有一个 source_key,不需要更细粒度的区分。 + +### 匿名用户与 Vault + +Vault 按 `get_identity_id()` 的返回值隔离凭证。匿名用户的 identity 是 `browser:xxx`(来自浏览器 localStorage UUID),因此: + +- 匿名用户**可以**使用 Vault 保存凭证 +- 但凭证绑定的是浏览器 UUID,**不跨设备、不跨浏览器** +- 清除 localStorage 即失去关联 +- 这是可接受的行为——需要可靠凭证存储的用户应配置 SSO + +### Vault 不可用时的行为 + +| Vault 状态 | 插件行为 | +|------------|---------| +| 未配置(`CREDENTIAL_VAULT_KEY` 未设置) | `get_credential_vault()` 返回 None,插件跳过 Vault 步骤,直接展示登录表单 | +| 已配置但无凭证 | `vault.retrieve()` 返回 None,跳过 Vault 步骤 | +| 已配置且有凭证 | 取出并验证,成功则自动登录,失败则返回 vault_stale | +| 密钥已更换(旧凭证无法解密) | `vault.retrieve()` 返回 None(解密失败静默返回 None,不崩溃) | + +--- + +## 8. 前端:目录结构与约定 + +``` +src/plugins// +├── index.ts # 必须:default export DataSourcePluginModule +├── Panel.tsx # 必须:主面板组件 +├── Login.tsx # 建议:登录组件 +├── Catalog.tsx # 建议:数据目录组件 +├── api.ts # 建议:封装后端 API 调用 +└── locales/ + ├── en.json # 建议:英文翻译 + └── zh.json # 建议:中文翻译 +``` + +### 发现机制 + +前端使用 Vite 的 `import.meta.glob` 在**构建时**扫描 `src/plugins/*/index.{ts,tsx}`。你只需要在正确的位置创建文件,无需手动注册。 + +--- + +## 9. 前端:模块契约 + +```typescript +// src/plugins/metabase/index.ts + +import type { DataSourcePluginModule } from '../types'; +import { MetabasePanel } from './MetabasePanel'; +import en from './locales/en.json'; +import zh from './locales/zh.json'; + +const MetabaseIcon: React.FC<{ sx?: object }> = (props) => (/* SVG icon */); + +const metabasePlugin: DataSourcePluginModule = { + id: 'metabase', // 必须与后端 manifest.id 一致 + Icon: MetabaseIcon, // 数据源菜单中的图标 + Panel: MetabasePanel, // 主面板组件 + locales: { en, zh }, // 可选:国际化 +}; + +export default metabasePlugin; +``` + +### Panel 组件接口 + +```typescript +interface PluginPanelProps { + config: PluginConfig; // 后端 get_frontend_config() 的内容 + callbacks: PluginHostCallbacks; // 框架提供的回调 +} + +interface PluginHostCallbacks { + onDataLoaded: (info: DataLoadedInfo) => void; // 数据加载完成后调用 + onClose: () => void; // 关闭对话框 +} +``` + +**数据加载完成后必须调用 `callbacks.onDataLoaded()`**,框架会据此刷新 Workspace 表列表。 + +### 登录面板中的"记住凭证" + +如果插件支持密码登录,登录表单中应提供"记住凭证"(Remember credentials)复选框: + +```typescript +const [remember, setRemember] = useState(false); + +// 登录请求中传递 remember 标志 +const loginPayload = { username, password, remember }; +``` + +**复选框必须附带注释说明**,避免用户与浏览器内置的"记住密码"功能混淆: + +```typescript +} + label={t('plugin.xxx.rememberCredentials')} +/> + + {t('plugin.xxx.rememberCredentialsHint')} + +``` + +注释文案应说明:**凭证存储在服务器端(非浏览器),便于自动化 Agent 代用户拉取数据**。 + +当 `auth/status` 返回 `vault_stale: true` 时,应显示明确的提示: + +```typescript +if (authStatus.vault_stale) { + showWarning("已保存的凭证已失效,请重新输入"); +} +``` + +--- + +## 10. 前端:国际化 + +每个插件自带翻译文件,通过 `locales` 字段导出。框架在启动时自动合并到全局 i18n。 + +```json +// locales/en.json +{ + "plugin.metabase.name": "Metabase", + "plugin.metabase.login.title": "Connect to Metabase", + "plugin.metabase.login.remember": "Remember credentials" +} +``` + +**命名规范**:`plugin...`,避免与其他插件或核心翻译冲突。 + +--- + +## 11. 环境变量命名规范 + +| 前缀 | 用途 | 示例 | +|------|------|------| +| `PLG__` | 插件专属 | `PLG_SUPERSET_URL`、`PLG_METABASE_URL` | +| `PLG__SSO_*` | SSO 透传相关 | `PLG_SUPERSET_SSO_LOGIN_URL` | + +- `required_env` 中列出的变量全部存在时,插件才启用 +- 管理员通过 `.env` 文件或 Docker environment 配置 +- `get_frontend_config()` 可以暴露 URL 等非敏感值,但**绝不暴露密钥** + +--- + +## 12. 测试规范 + +### 测试文件位置 + +``` +tests/backend/unit/test__*.py # 单元测试 +tests/backend/integration/test__*.py # 集成测试 +tests/backend/fixtures// # API 响应 fixture +tests/frontend/unit/plugins// # 前端测试 +``` + +### 外部 API Mock 策略 + +不要直接调用真实外部系统。使用 fixture JSON(从真实系统录制)+ `unittest.mock.patch`: + +```python +@pytest.fixture +def mock_responses(fixture_dir): + def _load(name): + return json.loads((fixture_dir / "metabase" / name).read_text()) + return _load +``` + +### Vault 测试 + +测试 Vault 集成时使用 `tmp_path` 下的**真实 SQLite 文件 + 真实 Fernet 密钥**,不 mock 加密逻辑: + +```python +@pytest.fixture +def vault(tmp_path): + from cryptography.fernet import Fernet + key = Fernet.generate_key().decode() + return LocalCredentialVault(tmp_path / "test.db", key) +``` + +--- + +## 13. 核心代码零修改原则 + +添加新插件时,以下文件**不得修改**: + +| 文件 | 职责 | +|------|------| +| `plugins/__init__.py` | 自动发现逻辑 | +| `plugins/base.py` | 基类定义 | +| `plugins/auth_base.py` | 认证基类(Vault 生命周期) | +| `plugins/data_writer.py` | 数据写入工具 | +| `src/plugins/types.ts` | 前端类型 | +| `src/plugins/registry.ts` | 前端注册表 | +| `src/plugins/PluginHost.tsx` | 前端容器 | +| `src/plugins/index.ts` | 前端导出 | +| `app.py` | 应用入口 | +| `credential_vault/*` | 凭证保险箱框架 | +| `credential_routes.py` | 凭证管理 API | + +如果你发现必须修改以上文件才能完成新插件,请先提 issue 讨论框架层面的修复方案。 + +--- + +## 14. Checklist:插件上线前检查 + +### 后端 + +- [ ] `plugin_class` 是 `DataSourcePlugin` 子类 +- [ ] `manifest()` 包含 `id`、`name`、`env_prefix`、`required_env` +- [ ] `create_blueprint()` 的 `url_prefix` 是 `/api/plugins//` +- [ ] `get_frontend_config()` 不包含任何密钥 +- [ ] Session key 使用 `plugin__` 前缀 +- [ ] 数据写入使用 `PluginDataWriter`,不直接调用 workspace +- [ ] **认证路由继承 `PluginAuthHandler` 基类**(代码层面约束 Vault 生命周期) +- [ ] 只实现 `do_login`、`do_clear_session`、`get_session_auth`、`get_current_user` 四个方法 +- [ ] 退出时 Session + Vault 同时清除(由基类自动保证) +- [ ] Vault 不可用时优雅降级(由基类自动保证) +- [ ] HTTP 错误有意义的错误信息和正确的状态码 +- [ ] 缺少必须环境变量时不启用,不报错 + +### 前端 + +- [ ] `index.ts` default export 包含 `id`、`Icon`、`Panel` +- [ ] `id` 与后端 `manifest.id` 一致 +- [ ] 数据加载成功后调用 `callbacks.onDataLoaded()` +- [ ] 登录表单包含"记住凭证"复选框 + **注释说明**(服务器端存储,供自动化 Agent 使用) +- [ ] `vault_stale` 时显示"凭证已失效"提示 +- [ ] 翻译 key 使用 `plugin..*` 前缀 +- [ ] 不引用其他插件的代码 + +### 配置 + +- [ ] `.env.template` 中有本插件的环境变量说明 +- [ ] `required_env` 中的变量缺失时,插件自动跳过 + +### 测试 + +- [ ] 认证路由测试(含 Vault 自动取用、vault_stale、remember) +- [ ] 目录路由测试 +- [ ] 数据加载路由测试 +- [ ] fixture 文件从真实系统录制 +- [ ] 核心文件 git diff 为空(零修改验证) diff --git a/docs-cn/5.1-superset-sso-oauth-config-guide.md b/docs-cn/5.1-superset-sso-oauth-config-guide.md new file mode 100644 index 00000000..71cc8c45 --- /dev/null +++ b/docs-cn/5.1-superset-sso-oauth-config-guide.md @@ -0,0 +1,582 @@ +# Superset SSO + Data Formulator 对接配置指南 + +本文档面向 **Superset 管理员**,说明如何在 Superset 中配置 SSO 登录并与 Data Formulator(以下简称 DF)正确对接。 + +涵盖三部分内容: +1. Superset 接入 SSO(OAuth2 登录) +2. DF 桥接端点(让 DF 能获取 Superset JWT) +3. JWT → g.user 中间件(让 DF 的 API 调用拥有正确权限) + +--- + +## 1. 前置条件 + +- Superset 已部署并可正常访问 +- 已有 OAuth2/OIDC 身份提供商(IdP),并为 Superset 注册了一个 **Confidential Client** +- 已知以下信息: + - IdP 的 discovery 端点 URL + - Superset 的 `client_id` 和 `client_secret` + +--- + +## 2. 工作原理 + +``` +DF 前端 Superset + │ │ + │ ① 用户点击"SSO 登录" │ + │ window.open 打开弹窗 │ + │ ───────────────────────────────────> │ /df-sso-bridge/?df_origin=... + │ │ + │ │ ② 未登录 → 重定向到 /login/ → SSO 认证 + │ │ 已登录 → 直接跳到 ④ + │ │ + │ │ ③ SSO 认证成功 → Superset 创建 Session + │ │ 重定向回 /df-sso-bridge/ + │ │ + │ │ ④ 为当前用户签发 JWT + │ │ 通过 postMessage 发送给 DF + │ │ 弹窗自动关闭 + │ ⑤ DF 收到 JWT │ + │ <─────────────────────────────────── │ + │ │ + │ ⑥ 后续 DF 用 JWT 调用 Superset API │ + │ ───────────────────────────────────> │ Authorization: Bearer + │ │ JWT 中间件 → g.user → 权限正常 +``` + +--- + +## 3. 文件结构 + +所有配置集中在两个文件中,放在 Superset 的 `PYTHONPATH` 下: + +``` +superset_config.py ← Superset 主配置(导入 oauth_config) +oauth_config.py ← SSO 认证 + DF 桥接(独立文件,便于维护) +``` + +> **参考示例**:完整的配置示例文件见 [`docs-cn/config-examples/superset/`](config-examples/superset/),可作为起点按需修改。 + +--- + +## 4. oauth_config.py — SSO 认证 + DF 桥接 + +此文件包含三部分: +- SSO 用户信息解析(`SsoHandler`) +- 自定义 Security Manager(用户创建、角色同步) +- DF SSO 桥接端点(`SSOBridgeView`) + +### 4.1 完整文件 + +```python +import logging +import json +import secrets +import requests +from urllib.parse import quote + +from flask_appbuilder.security.manager import AUTH_OAUTH +from flask_appbuilder import BaseView, expose +from flask import g, request, Response, redirect, session +from flask_login import current_user +from superset.security import SupersetSecurityManager +from superset import db + +logger = logging.getLogger(__name__) + + +# ============================================================================= +# 第一部分:SSO 用户信息解析 +# ============================================================================= + +class SsoHandler: + """ + 从 SSO 的 userinfo 端点解析用户信息。 + + 自定义时只需修改: + - userinfo_url: 你的 SSO userinfo 端点地址 + - role_mapping: SSO 角色代码 → Superset 角色名的映射表 + """ + userinfo_url = 'https://your-sso.example.com/api/v1/oauth2/userinfo' + + # SSO 角色代码 → Superset 角色名 + # 未在此映射中的 SSO 角色会被忽略 + role_mapping = { + 'admin': 'Admin', + 'analyst': 'Alpha', + 'viewer': 'Gamma', + } + + @classmethod + def parse_user_details(cls, access_token): + """用 access_token 调 userinfo 端点,返回标准化的用户信息字典。""" + resp = requests.get( + cls.userinfo_url, + headers={'Authorization': f'Bearer {access_token}'}, + timeout=10, + ) + resp.raise_for_status() + data = resp.json() + logger.info("SSO userinfo: %s", data) + + username = data.get('preferred_username') or data.get('sub') + if not username: + logger.error("SSO userinfo 缺少 username") + return None + + full_name = data.get('name') or username + email = data.get('email') or f"{username}@example.com" + name_parts = full_name.split(' ', 1) + + return { + 'username': username, + 'email': email, + 'first_name': name_parts[0], + 'last_name': name_parts[1] if len(name_parts) > 1 else '', + 'sso_roles': data.get('sso_roles', []), + } + + +# ============================================================================= +# 第二部分:自定义 Security Manager +# ============================================================================= + +class CustomSsoSecurityManager(SupersetSecurityManager): + """ + 扩展 Superset 默认的 Security Manager: + 1. oauth_user_info: 从 SSO 获取用户信息 + 2. auth_user_oauth: 创建/更新用户 + 同步角色 + """ + + def oauth_user_info(self, provider, response=None): + access_token = response.get('access_token') if response else None + if not access_token: + logger.error("OAuth 响应中缺少 access_token") + return super().oauth_user_info(provider, response) + + try: + user_details = SsoHandler.parse_user_details(access_token) + if user_details: + user_details['active_provider'] = provider + return user_details + except Exception as e: + logger.error("OAuth 用户信息获取失败: %s", e, exc_info=True) + return None + + def auth_user_oauth(self, userinfo): + if not userinfo or not userinfo.get('username'): + return None + + sso_roles = userinfo.get('sso_roles', []) + + # 检查用户是否有任何已映射的角色 + has_mapped_role = any(r in SsoHandler.role_mapping for r in sso_roles) + if not has_mapped_role: + from flask import flash + flash("登录失败:您的账号尚未被分配 Superset 访问权限,请联系管理员。", "danger") + return None + + username = userinfo['username'] + user = self.find_user(username=username) + + if not user: + if self.auth_user_registration: + user = super().auth_user_oauth(userinfo) + else: + return None + else: + user.first_name = userinfo.get('first_name', user.first_name) + user.last_name = userinfo.get('last_name', user.last_name) + user.email = userinfo.get('email', user.email) + + # 同步角色 + if sso_roles: + self._sync_roles(user, sso_roles) + + return user + + def _sync_roles(self, user, sso_roles): + """将 SSO 角色映射为 Superset 角色。本地 Admin 身份始终保留。""" + target_names = set() + + for sr in sso_roles: + mapped = SsoHandler.role_mapping.get(sr) + if mapped: + target_names.add(mapped) + + # 保护:如果用户本地已是 Admin,保留 + if any(r.name == 'Admin' for r in user.roles): + target_names.add('Admin') + + new_roles = [] + for name in target_names: + role = self.find_role(name) + if role: + new_roles.append(role) + else: + logger.warning("Superset 中找不到角色: %s", name) + + if not new_roles: + return + + try: + user.roles = new_roles + db.session.merge(user) + db.session.commit() + logger.info("用户 %s 角色已同步: %s", user.username, [r.name for r in new_roles]) + except Exception as e: + db.session.rollback() + logger.error("角色同步失败: %s", e) + + +# ============================================================================= +# 第三部分:Data Formulator SSO 桥接端点 +# ============================================================================= + +class SSOBridgeView(BaseView): + """ + DF 通过弹窗打开此端点,已登录用户会自动获得 Superset JWT, + 通过 postMessage 传回 DF 前端。 + + 工作流程: + 1. DF 前端 window.open(/df-sso-bridge/?df_origin=http://df-host:5567) + 2. 如果用户未登录 → 重定向到 Superset 登录页 → SSO → 登录后回到此端点 + 3. 如果用户已登录 → 直接签发 JWT → postMessage 传给 DF → 关闭弹窗 + """ + route_base = "/df-sso-bridge" + + @staticmethod + def _is_real_logged_in_user(): + if not session.get("_user_id"): + return False + if getattr(current_user, "is_anonymous", True): + return False + if not getattr(current_user, "is_authenticated", False): + return False + if not getattr(current_user, "id", None): + return False + username = getattr(current_user, "username", "") or "" + if username.lower() in ("public", "anonymous", "guest", ""): + return False + return True + + @expose("/", methods=["GET"]) + def df_sso_bridge(self): + if not self._is_real_logged_in_user(): + next_url = request.full_path.rstrip("?") + return redirect(f"/login/?next={quote(next_url)}") + + from flask_jwt_extended import create_access_token, create_refresh_token + + user_id_str = str(current_user.id) + additional_claims = { + "user": { + "id": current_user.id, + "username": current_user.username, + "first_name": getattr(current_user, "first_name", "") or "", + "last_name": getattr(current_user, "last_name", "") or "", + } + } + + access_token = create_access_token( + identity=user_id_str, fresh=True, additional_claims=additional_claims, + ) + refresh_token = create_refresh_token( + identity=user_id_str, additional_claims=additional_claims, + ) + + df_origin = request.args.get("df_origin", "*") + user_data = additional_claims["user"] + csp_nonce = getattr(g, "csp_nonce", "") or secrets.token_urlsafe(16) + + html = f""" +SSO Bridge + +

正在同步登录状态...

+ +""" + return Response(html, mimetype="text/html") + + +# ============================================================================= +# 导出配置(由 superset_config.py 导入) +# ============================================================================= + +OAUTH_CONFIG = { + 'AUTH_TYPE': AUTH_OAUTH, + 'AUTH_USER_REGISTRATION': True, + 'AUTH_USER_REGISTRATION_ROLE': "Public", + 'AUTH_OAUTH_ROLES_SYNC': True, + 'AUTH_OAUTH_ROLES_UPDATE': True, + 'CUSTOM_SECURITY_MANAGER': CustomSsoSecurityManager, + 'OAUTH_PROVIDERS': [ + { + 'name': 'your-sso', # ← 改为你的 provider 名称 + 'token_key': 'access_token', + 'icon': 'fa-key', + 'remote_app': { + 'client_id': 'YOUR_CLIENT_ID', # ← 改为你的 client_id + 'client_secret': 'YOUR_SECRET', # ← 改为你的 client_secret + 'server_metadata_url': 'https://your-sso.example.com/.well-known/openid-configuration', # ← 改为你的 discovery URL + 'client_kwargs': { + 'scope': 'openid profile email', + }, + }, + } + ] +} +``` + +### 4.2 需要修改的地方 + +使用前,将以下占位符替换为你的实际值: + +| 占位符 | 说明 | 示例 | +|--------|------|------| +| `SsoHandler.userinfo_url` | SSO 的 userinfo 端点 | `https://sso.example.com/api/v1/oauth2/userinfo` | +| `SsoHandler.role_mapping` | SSO 角色 → Superset 角色的映射表 | `{'admin': 'Admin', 'viewer': 'Gamma'}` | +| `YOUR_CLIENT_ID` | Superset 在 IdP 上的 client_id | `jp3zm0QtPN...` | +| `YOUR_SECRET` | Superset 在 IdP 上的 client_secret | `JOFSyzoQvY...` | +| `server_metadata_url` | IdP 的 discovery 端点 | `https://sso.example.com/.well-known/openid-configuration` | +| `your-sso` | provider 名称(自定义,用于日志) | `keycloak`、`okta`、`y-sso` | + +--- + +## 5. superset_config.py — 导入 OAuth 配置 + 注册中间件 + +在你的 `superset_config.py` 文件**末尾**追加以下内容: + +```python +# ============================================================================= +# 导入 OAuth 配置 +# ============================================================================= + +try: + from oauth_config import OAUTH_CONFIG + globals().update(OAUTH_CONFIG) + print("OAuth 配置加载成功") +except ImportError as e: + print(f"OAuth 配置加载失败: {e}") + + +# ============================================================================= +# 注册 DF SSO Bridge + JWT->g.user 中间件 +# ============================================================================= + +TALISMAN_ENABLED = False # Bridge 端点需要内联 script + +try: + def FLASK_APP_MUTATOR(app): + """Superset 启动时调用,注册 DF 桥接视图和 JWT 中间件。""" + + # 1. 注册 SSO Bridge 视图 + from superset import appbuilder + from oauth_config import SSOBridgeView + appbuilder.add_view_no_menu(SSOBridgeView()) + + # 2. JWT -> g.user 同步中间件 + # 当 DF 通过 JWT Bearer Token 调用 Superset REST API 时, + # flask_jwt_extended 能识别用户,但 Flask-AppBuilder 的 + # 安全过滤器依赖 g.user 做权限判断。此中间件确保两者同步。 + import logging + _jwt_logger = logging.getLogger("df_jwt_bridge") + + @app.before_request + def _ensure_user_from_jwt(): + from flask import g, request as req + from flask_login import current_user, login_user + + # 已有会话用户 → 同步到 g.user + if getattr(current_user, "is_authenticated", False) \ + and not getattr(current_user, "is_anonymous", True): + if not hasattr(g, "user") or g.user is None \ + or getattr(g.user, "is_anonymous", True): + g.user = current_user._get_current_object() + return + + # 无会话 → 尝试从 JWT 恢复 + auth_header = req.headers.get("Authorization", "") + if not auth_header.lower().startswith("bearer "): + return + + try: + from flask_jwt_extended import verify_jwt_in_request, get_jwt_identity + verify_jwt_in_request(optional=True) + identity = get_jwt_identity() + if not identity: + return + + sm = app.appbuilder.sm + user = sm.get_user_by_id(int(identity)) + if user and getattr(user, "is_active", False): + login_user(user) + g.user = user + except Exception as exc: + _jwt_logger.debug("JWT->g.user 同步跳过: %s", exc) + + print("DF SSO Bridge + JWT 中间件配置加载成功") + +except ImportError as e: + print(f"DF SSO Bridge 配置加载失败: {e}") +``` + +--- + +## 6. DF 端配置 + +在 DF 的 `.env` 中只需一行即可启用 Superset 插件: + +```env +PLG_SUPERSET_URL=http://你的SUPERSET地址:8088/ +``` + +DF 会自动将 SSO 登录 URL 设为 `{PLG_SUPERSET_URL}/df-sso-bridge/`。 + +--- + +## 7. 验证步骤 + +### 7.1 验证 SSO 登录 + +访问 Superset 登录页,应看到 SSO 登录按钮。点击后跳转到 IdP 登录,登录成功后返回 Superset 首页。 + +### 7.2 验证 Bridge 端点 + +浏览器已登录 Superset 的状态下访问: + +``` +http://SUPERSET地址:8088/df-sso-bridge/?df_origin=http://test +``` + +预期:页面显示"正在同步登录状态...",然后显示"登录成功,请关闭此窗口并返回 Data Formulator。"(因为不是从弹窗打开,没有 `window.opener`) + +### 7.3 验证 JWT + +在 7.2 的页面源码中找到 `access_token` 的值,然后: + +```bash +curl -H "Authorization: Bearer " http://SUPERSET地址:8088/api/v1/me/ +``` + +预期:返回当前用户信息 JSON。 + +### 7.4 端到端测试 + +1. 启动 DF(确保 `.env` 中配置了 `PLG_SUPERSET_URL`) +2. 打开 DF → 数据上传 → 选择 Superset 标签 +3. 点击"SSO 登录" +4. 弹窗打开 → SSO 登录(如果浏览器已有 SSO 会话则自动完成)→ 弹窗关闭 +5. DF 显示 Superset 数据集列表 + +--- + +## 8. 安全说明 + +| 关注点 | 说明 | +|--------|------| +| bridge 端点的访问控制 | 只有已通过 Superset 认证的用户可以获取 JWT,未登录自动重定向到登录页 | +| JWT 传输安全 | 通过 `postMessage` 只发给 `window.opener`(即 DF 页面),`targetOrigin` 参数防止消息发到错误的窗口 | +| client_secret 安全 | 只存在于 Superset 服务端(`oauth_config.py`),不暴露给浏览器 | +| 角色权限 | SSO 角色通过映射表同步到 Superset,本地 Admin 身份始终保留 | +| TALISMAN | Bridge 端点需要内联 ` +""" + return Response(html, mimetype="text/html") + + +# ============================================================================= +# 导出配置(由 superset_config.py 导入) +# ============================================================================= + +OAUTH_CONFIG = { + 'AUTH_TYPE': AUTH_OAUTH, + 'AUTH_USER_REGISTRATION': True, + 'AUTH_USER_REGISTRATION_ROLE': "Public", + 'AUTH_OAUTH_ROLES_SYNC': True, + 'AUTH_OAUTH_ROLES_UPDATE': True, + 'CUSTOM_SECURITY_MANAGER': CustomSsoSecurityManager, + 'OAUTH_PROVIDERS': [ + { + 'name': '', # 例: 'keycloak', 'okta' + 'token_key': 'access_token', + 'icon': 'fa-key', + 'remote_app': { + 'client_id': '', + 'client_secret': '', + 'server_metadata_url': '', # 例: https://sso.example.com/.well-known/openid-configuration + 'client_kwargs': { + 'scope': 'openid profile email', + }, + }, + } + ] +} diff --git a/docs-cn/config-examples/superset/superset_config.py b/docs-cn/config-examples/superset/superset_config.py new file mode 100644 index 00000000..8f083bbd --- /dev/null +++ b/docs-cn/config-examples/superset/superset_config.py @@ -0,0 +1,158 @@ +""" +Superset 配置参考(superset_config.py) + +本文件为 Superset + SSO + DF 对接的配置参考。 +将本文件放在 Superset 的 PYTHONPATH 下,命名为 superset_config.py。 + +重点关注: + - Public 角色配置(允许未登录用户浏览指定数据) + - OAuth 配置导入 + - FLASK_APP_MUTATOR(注册 SSO Bridge + JWT→g.user 中间件) +""" + +import logging +import sys + +# ============================================================================= +# 基础配置 +# ============================================================================= + +SECRET_KEY = '' # 必须修改为随机字符串 + +SQLALCHEMY_DATABASE_URI = 'postgresql://superset:password@localhost:5432/superset' + +BABEL_DEFAULT_LOCALE = 'zh' +LANGUAGES = { + 'en': {'flag': 'us', 'name': 'English'}, + 'zh': {'flag': 'cn', 'name': 'Chinese'}, +} + +# ============================================================================= +# Public 角色配置(允许未登录用户浏览数据) +# +# 启用后,未登录用户会获得 Public 角色的权限(继承自 Gamma)。 +# 这允许你设置"公开仪表盘"供任何人查看,无需登录。 +# +# 副作用:启用后 current_user.is_authenticated 对匿名用户也可能返回 +# True,导致 DF 的 JWT 认证用户被误判为 Public 用户。 +# 解决方案见下方 FLASK_APP_MUTATOR 中的 _ensure_user_from_jwt 中间件。 +# ============================================================================= + +GUEST_ROLE_NAME = "Public" +PUBLIC_ROLE_LIKE = "Gamma" + +# ============================================================================= +# 功能开关 +# ============================================================================= + +FEATURE_FLAGS = { + "DASHBOARD_RBAC": True, # 仪表盘级别的角色权限控制 + "ENABLE_TEMPLATE_PROCESSING": True, # Jinja 模板支持 +} + +# ============================================================================= +# CORS(DF 跨域调用 Superset API 时需要) +# ============================================================================= + +ENABLE_CORS = True +CORS_OPTIONS = { + 'supports_credentials': True, + 'allow_headers': ['*'], + 'resources': ['*'], + 'origins': ['*'], # 生产环境请限制为 DF 的实际域名 +} + +# ============================================================================= +# 其他 +# ============================================================================= + +FAB_API_SWAGGER_UI = True +SQL_MAX_ROW = 500000 +DISPLAY_MAX_ROW = 500000 + +DEBUG = False +SESSION_COOKIE_HTTPONLY = True +RECAPTCHA_PUBLIC_KEY = '' +RECAPTCHA_PRIVATE_KEY = '' + +LOG_LEVEL = 'INFO' +logging.basicConfig( + level=LOG_LEVEL, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', + handlers=[logging.StreamHandler(sys.stdout)], +) + +# ============================================================================= +# 导入 OAuth 配置(从 oauth_config.py) +# ============================================================================= + +try: + from oauth_config import OAUTH_CONFIG + globals().update(OAUTH_CONFIG) + print("OAuth 配置加载成功") +except ImportError as e: + print(f"OAuth 配置加载失败: {e}") + +# ============================================================================= +# SSO Bridge + JWT→g.user 中间件 +# +# 解决两个问题: +# 1. 注册 /df-sso-bridge/ 视图,让 DF 能通过弹窗获取 Superset JWT +# 2. JWT→g.user 同步:当 DF 用 JWT Bearer Token 调用 Superset REST API 时, +# flask_jwt_extended 能识别用户(/api/v1/me/ 正常),但 Flask-AppBuilder +# 的安全过滤器(DatasourceFilter、DashboardAccessFilter)依赖 g.user +# 做权限判断。如果 g.user 未从 JWT 同步,过滤器降级为 Public 角色, +# 导致登录用户只能看到 Public 权限的数据。 +# ============================================================================= + +TALISMAN_ENABLED = False # Bridge 端点需要内联 ' + sanitized = sanitize_error_message(raw) + + assert "' + result = sanitize_error_message(msg) + assert " +""" + return Response(html, mimetype="text/html") + + +# -- Register the blueprint with Superset ------------------------------------ +BLUEPRINTS = [df_sso_bp] + +# Allow embedding in popups from DF dev server origins +TALISMAN_ENABLED = False + +# CORS is configured via environment variables in docker-compose.yml +# (SUPERSET_CORS_ENABLED / SUPERSET_CORS_ORIGINS). +# Do NOT set ENABLE_CORS here — the official image lacks flask-cors. + +# Feature flags — ensure native dashboard filters are enabled for filter testing. +FEATURE_FLAGS = { + "DASHBOARD_NATIVE_FILTERS": True, + "DASHBOARD_CROSS_FILTERS": True, + "DASHBOARD_NATIVE_FILTERS_SET": True, +} diff --git a/tests/database-dockers/superset/test_superset_data_connector.py b/tests/database-dockers/superset/test_superset_data_connector.py new file mode 100644 index 00000000..a35da222 --- /dev/null +++ b/tests/database-dockers/superset/test_superset_data_connector.py @@ -0,0 +1,427 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""Tests for SupersetLoader via DataConnector routes. + +All Superset API calls are mocked — no real Superset instance needed. + +Covers: +- JWT-based auth (token mode): connect / disconnect / status +- Dashboard → dataset hierarchy browsing +- "All Datasets" synthetic namespace +- Dataset metadata retrieval +- Data fetch via SQL Lab (mocked) +- Token refresh / re-login flow +- Frontend config for Superset sources +""" +from __future__ import annotations + +import base64 +import json +import time +from typing import Any +from unittest.mock import MagicMock, patch + +import flask +import pyarrow as pa +import pytest + +from data_formulator.data_connector import DataConnector +from data_formulator.data_loader.external_data_loader import CatalogNode + +pytestmark = [pytest.mark.backend, pytest.mark.plugin] + + +# ------------------------------------------------------------------ +# JWT helpers +# ------------------------------------------------------------------ + +def _make_jwt(exp: float | None = None, sub: str = "admin") -> str: + """Build a fake JWT with a valid exp claim.""" + if exp is None: + exp = time.time() + 3600 # 1 hour from now + header = base64.urlsafe_b64encode(json.dumps({"alg": "HS256"}).encode()).rstrip(b"=").decode() + payload = base64.urlsafe_b64encode( + json.dumps({"sub": sub, "exp": exp}).encode() + ).rstrip(b"=").decode() + sig = base64.urlsafe_b64encode(b"fake-signature").rstrip(b"=").decode() + return f"{header}.{payload}.{sig}" + + +def _expired_jwt() -> str: + return _make_jwt(exp=time.time() - 100) + + +# ------------------------------------------------------------------ +# Mock Superset API +# ------------------------------------------------------------------ + +class MockSupersetClient: + """Simulates SupersetClient API responses.""" + + def __init__(self, url): + self.url = url + + def list_datasets(self, token, page=0, page_size=100): + datasets = [ + {"id": 1, "table_name": "orders_fact", "schema": "public", + "database": {"id": 1, "database_name": "analytics"}, "row_count": 50000}, + {"id": 2, "table_name": "users_dim", "schema": "public", + "database": {"id": 1, "database_name": "analytics"}, "row_count": 10000}, + ] + start = page * page_size + batch = datasets[start:start + page_size] + return {"result": batch, "count": len(datasets)} + + def list_dashboards(self, token, page=0, page_size=500): + return { + "result": [ + {"id": 10, "dashboard_title": "Sales Dashboard"}, + {"id": 20, "dashboard_title": "User Analytics"}, + ], + "count": 2, + } + + def get_dashboard_datasets(self, token, dashboard_id): + if dashboard_id == 10: + return { + "result": [ + {"id": 1, "table_name": "orders_fact", "schema": "public", + "database": {"id": 1, "database_name": "analytics"}, "row_count": 50000}, + ] + } + if dashboard_id == 20: + return { + "result": [ + {"id": 2, "table_name": "users_dim", "schema": "public", + "database": {"id": 1, "database_name": "analytics"}, "row_count": 10000}, + ] + } + return {"result": []} + + def get_dataset_detail(self, token, dataset_id): + datasets = { + 1: { + "id": 1, "table_name": "orders_fact", "schema": "public", + "database": {"id": 1, "database_name": "analytics"}, + "columns": [ + {"column_name": "order_id", "type": "INT"}, + {"column_name": "customer_id", "type": "INT"}, + {"column_name": "amount", "type": "DECIMAL(10,2)"}, + {"column_name": "order_date", "type": "TIMESTAMP"}, + ], + "row_count": 50000, + "description": "Main orders fact table", + "kind": "physical", + }, + 2: { + "id": 2, "table_name": "users_dim", "schema": "public", + "database": {"id": 1, "database_name": "analytics"}, + "columns": [ + {"column_name": "user_id", "type": "INT"}, + {"column_name": "name", "type": "VARCHAR"}, + {"column_name": "email", "type": "VARCHAR"}, + ], + "row_count": 10000, + "kind": "physical", + }, + } + return datasets.get(dataset_id, {}) + + def create_sql_session(self, token): + return {"session_id": "mock-session-123"} + + def execute_sql_with_session(self, session, db_id, sql, schema, limit): + return { + "data": [ + {"order_id": 1, "customer_id": 100, "amount": 99.99, "order_date": "2025-01-01"}, + {"order_id": 2, "customer_id": 101, "amount": 150.00, "order_date": "2025-01-02"}, + {"order_id": 3, "customer_id": 100, "amount": 75.50, "order_date": "2025-01-03"}, + ] + } + + +class MockAuthBridge: + def __init__(self, url): + self.url = url + + def login(self, username, password): + if username == "admin" and password == "admin": + return {"access_token": _make_jwt(), "refresh_token": _make_jwt()} + raise ValueError("Invalid credentials") + + def refresh_token(self, refresh_token): + return {"access_token": _make_jwt()} + + +# ------------------------------------------------------------------ +# Fixtures +# ------------------------------------------------------------------ + +@pytest.fixture(autouse=True) +def _mock_superset_imports(): + """Patch the Superset helpers in the loader module.""" + import data_formulator.data_loader.superset_data_loader as sdl + old_client, old_bridge = sdl.SupersetClient, sdl.SupersetAuthBridge + sdl.SupersetClient = MockSupersetClient + sdl.SupersetAuthBridge = MockAuthBridge + yield + sdl.SupersetClient, sdl.SupersetAuthBridge = old_client, old_bridge + + +@pytest.fixture +def app(): + _app = flask.Flask(__name__) + _app.config["TESTING"] = True + _app.secret_key = "test" + return _app + + +@pytest.fixture +def source(): + from data_formulator.data_loader.superset_data_loader import SupersetLoader + return DataConnector.from_loader( + SupersetLoader, + source_id="superset", + display_name="Test Superset", + ) + + +@pytest.fixture +def client(app, source): + app.register_blueprint(source.create_blueprint()) + return app.test_client() + + +@pytest.fixture +def connected_client(client): + with patch.object(DataConnector, "_get_identity", return_value="test-user"): + resp = client.post("/api/connectors/superset/auth/connect", json={ + "params": {"url": "https://bi.example.com", "username": "admin", "password": "admin"}, + }) + assert resp.status_code == 200 + yield client + + +# ================================================================== +# Tests: Auth (JWT token mode) +# ================================================================== + +class TestSupersetAuth: + + def test_connect_success(self, client): + with patch.object(DataConnector, "_get_identity", return_value="test-user"): + resp = client.post("/api/connectors/superset/auth/connect", json={ + "params": {"url": "https://bi.example.com", "username": "admin", "password": "admin"}, + }) + data = resp.get_json() + assert resp.status_code == 200 + assert data["status"] == "connected" + # Hierarchy: dashboard → dataset + keys = [h["key"] for h in data["hierarchy"]] + assert keys == ["dashboard", "dataset"] + + def test_connect_bad_credentials(self, client): + with patch.object(DataConnector, "_get_identity", return_value="test-user"): + resp = client.post("/api/connectors/superset/auth/connect", json={ + "params": {"url": "https://bi.example.com", "username": "admin", "password": "wrong"}, + }) + assert resp.status_code in (400, 500) + data = resp.get_json() + assert data["status"] == "error" + # Must not leak the password + assert "wrong" not in json.dumps(data) + + def test_auth_mode_is_token(self): + from data_formulator.data_loader.superset_data_loader import SupersetLoader + assert SupersetLoader.auth_mode() == "token" + + def test_disconnect_and_status(self, connected_client): + with patch.object(DataConnector, "_get_identity", return_value="test-user"): + resp = connected_client.post("/api/connectors/superset/auth/disconnect") + assert resp.get_json()["status"] == "disconnected" + + resp = connected_client.get("/api/connectors/superset/auth/status") + assert resp.get_json()["connected"] is False + + +# ================================================================== +# Tests: Catalog browsing (dashboard → dataset hierarchy) +# ================================================================== + +class TestSupersetCatalog: + + def test_ls_root_lists_dashboards_and_all_datasets(self, connected_client): + with patch.object(DataConnector, "_get_identity", return_value="test-user"): + resp = connected_client.post("/api/connectors/superset/catalog/ls", json={"path": []}) + data = resp.get_json() + assert resp.status_code == 200 + + names = [n["name"] for n in data["nodes"]] + assert "Sales Dashboard" in names + assert "User Analytics" in names + assert "All Datasets" in names + + # All root nodes should be namespace + for node in data["nodes"]: + assert node["node_type"] == "namespace" + + def test_ls_dashboard_lists_its_datasets(self, connected_client): + """Expand Sales Dashboard → should see orders_fact.""" + with patch.object(DataConnector, "_get_identity", return_value="test-user"): + resp = connected_client.post("/api/connectors/superset/catalog/ls", json={ + "path": ["10"], # Sales Dashboard ID + }) + data = resp.get_json() + assert resp.status_code == 200 + assert len(data["nodes"]) >= 1 + names = [n["name"] for n in data["nodes"]] + assert "orders_fact" in names + for n in data["nodes"]: + assert n["node_type"] == "table" + + def test_ls_all_datasets(self, connected_client): + """Expand 'All Datasets' → should see both datasets.""" + with patch.object(DataConnector, "_get_identity", return_value="test-user"): + resp = connected_client.post("/api/connectors/superset/catalog/ls", json={ + "path": ["__all__"], + }) + data = resp.get_json() + names = [n["name"] for n in data["nodes"]] + assert "orders_fact" in names + assert "users_dim" in names + + def test_ls_with_filter(self, connected_client): + with patch.object(DataConnector, "_get_identity", return_value="test-user"): + resp = connected_client.post("/api/connectors/superset/catalog/ls", json={ + "path": ["__all__"], + "filter": "orders", + }) + nodes = resp.get_json()["nodes"] + assert len(nodes) == 1 + assert nodes[0]["name"] == "orders_fact" + + def test_catalog_metadata(self, connected_client): + """Get metadata for a specific dataset.""" + with patch.object(DataConnector, "_get_identity", return_value="test-user"): + resp = connected_client.post("/api/connectors/superset/catalog/metadata", json={ + "path": ["10", "1"], # dashboard_id, dataset_id + }) + data = resp.get_json() + assert resp.status_code == 200 + meta = data["metadata"] + assert meta["dataset_id"] == 1 + assert meta["row_count"] == 50000 + col_names = [c["name"] for c in meta["columns"]] + assert "order_id" in col_names + assert "amount" in col_names + + def test_list_tables_flat(self, connected_client): + with patch.object(DataConnector, "_get_identity", return_value="test-user"): + resp = connected_client.post("/api/connectors/superset/catalog/list_tables", json={}) + data = resp.get_json() + assert len(data["tables"]) == 2 + names = [t["name"] for t in data["tables"]] + assert any("orders_fact" in n for n in names) + assert any("users_dim" in n for n in names) + + +# ================================================================== +# Tests: Data routes +# ================================================================== + +class TestSupersetData: + + def test_preview(self, connected_client): + with patch.object(DataConnector, "_get_identity", return_value="test-user"): + resp = connected_client.post("/api/connectors/superset/data/preview", json={ + "source_table": "1:orders_fact", + "size": 3, + }) + data = resp.get_json() + assert resp.status_code == 200 + assert data["status"] == "success" + assert data["row_count"] > 0 + col_names = {c["name"] for c in data["columns"]} + assert "order_id" in col_names + + def test_import(self, connected_client): + mock_meta = MagicMock() + mock_meta.name = "orders" + mock_meta.row_count = 3 + + with patch.object(DataConnector, "_get_identity", return_value="test-user"), \ + patch("data_formulator.auth.identity.get_identity_id", return_value="test-user"), \ + patch("data_formulator.workspace_factory.get_workspace") as mock_ws: + + from data_formulator.data_loader.superset_data_loader import SupersetLoader + with patch.object(SupersetLoader, "ingest_to_workspace", return_value=mock_meta): + resp = connected_client.post("/api/connectors/superset/data/import", json={ + "source_table": "1:orders_fact", + "table_name": "orders", + }) + data = resp.get_json() + assert resp.status_code == 200 + assert data["status"] == "success" + assert data["table_name"] == "orders" + assert data["refreshable"] is True + + +# ================================================================== +# Tests: Token refresh +# ================================================================== + +class TestSupersetTokenRefresh: + + def test_connect_with_expired_token_triggers_refresh(self, client): + """If token expires between connect and catalog call, refresh should work.""" + from data_formulator.data_loader.superset_data_loader import SupersetLoader + + with patch.object(DataConnector, "_get_identity", return_value="test-user"): + # Connect + resp = client.post("/api/connectors/superset/auth/connect", json={ + "params": {"url": "https://bi.example.com", "username": "admin", "password": "admin"}, + }) + assert resp.status_code == 200 + + # Now artificially expire the token + # The mock _ensure_token will handle refresh via MockAuthBridge + resp = client.post("/api/connectors/superset/catalog/ls", json={"path": []}) + assert resp.status_code == 200 + assert len(resp.get_json()["nodes"]) > 0 + + +# ================================================================== +# Tests: Frontend Config +# ================================================================== + +class TestSupersetFrontendConfig: + + def test_config_structure(self, source): + cfg = source.get_frontend_config() + assert cfg["source_id"] == "superset" + assert cfg["name"] == "Test Superset" + # All params should be in form (nothing pinned) + form_names = {f["name"] for f in cfg["params_form"]} + assert "url" in form_names + assert "username" in form_names + assert "password" in form_names + + def test_pinned_url(self): + from data_formulator.data_loader.superset_data_loader import SupersetLoader + source = DataConnector.from_loader( + SupersetLoader, + source_id="superset_corp", + display_name="Corp Superset", + default_params={"url": "https://bi.corp.com"}, + ) + cfg = source.get_frontend_config() + assert cfg["pinned_params"]["url"] == "https://bi.corp.com" + form_names = {f["name"] for f in cfg["params_form"]} + assert "url" not in form_names + assert "username" in form_names + + def test_hierarchy_is_dashboard_dataset(self, source): + cfg = source.get_frontend_config() + keys = [h["key"] for h in cfg["hierarchy"]] + assert keys == ["dashboard", "dataset"] diff --git a/tests/frontend/README.md b/tests/frontend/README.md new file mode 100644 index 00000000..50fe6544 --- /dev/null +++ b/tests/frontend/README.md @@ -0,0 +1,34 @@ +# Frontend Tests + +Frontend unit tests powered by **Vitest** + **@testing-library/react** (jsdom). + +## Directory Layout + +```text +tests/frontend/ + setup.ts # Global test setup (jest-dom matchers) + unit/ + data/ + coerceDate.test.ts # Type coercion – Date handling + resolveExcelCellValue.test.ts # Excel cell value resolution + app/ + dfSelectors.test.ts # Redux selectors (getActiveModel) + views/ + safeCellRender.test.tsx # Component rendering object safety +``` + +## Directory Responsibilities + +- `unit/data/` — Pure function tests for `src/data/` modules (type coercion, Excel parsing) +- `unit/app/` — Redux selector and state logic tests for `src/app/` modules +- `unit/views/` — Rendering safety and component behavior tests for `src/views/` modules + +## Running Tests + +```bash +# Run all frontend tests +npm test + +# Watch mode +npm run test:watch +``` diff --git a/tests/frontend/setup.ts b/tests/frontend/setup.ts new file mode 100644 index 00000000..e94e16ad --- /dev/null +++ b/tests/frontend/setup.ts @@ -0,0 +1 @@ +import '@testing-library/jest-dom/vitest'; diff --git a/tests/frontend/unit/app/IdentityMigrationDialog.test.tsx b/tests/frontend/unit/app/IdentityMigrationDialog.test.tsx new file mode 100644 index 00000000..f0df3cbe --- /dev/null +++ b/tests/frontend/unit/app/IdentityMigrationDialog.test.tsx @@ -0,0 +1,234 @@ +/** + * Scenario tests for IdentityMigrationDialog. + * + * Covers the user journey: anonymous user logs in via SSO, sees the + * migration prompt, and chooses either "Start Fresh" or "Import Data". + * + * Key behavioral requirements: + * - "Start Fresh" must NOT delete anonymous workspace data + * - "Start Fresh" must NOT show "Importing workspaces…" + * - "Import Data" calls the migrate endpoint and shows progress + */ +import React from "react"; +import "@testing-library/jest-dom/vitest"; +import { describe, it, expect, vi, beforeEach, type Mock } from "vitest"; +import { render, screen, waitFor, act, fireEvent } from "@testing-library/react"; + +// --------------------------------------------------------------------------- +// Mocks +// --------------------------------------------------------------------------- + +const mockFetchWithIdentity = vi.fn(); +const mockPurge = vi.fn(async () => {}); + +vi.mock("../../../../src/app/utils", () => ({ + fetchWithIdentity: (...args: any[]) => mockFetchWithIdentity(...args), + getUrls: () => ({ SESSION_LIST: "/api/sessions/list" }), +})); + +vi.mock("../../../../src/app/store", () => ({ + persistor: { purge: () => mockPurge() }, +})); + +vi.mock("react-i18next", () => ({ + useTranslation: () => ({ + t: (key: string, opts?: any) => { + const map: Record = { + "auth.migration.title": "Import Previous Data?", + "auth.migration.description": `You have ${opts?.count ?? 0} workspace(s).`, + "auth.migration.importButton": "Import Data", + "auth.migration.freshButton": "Start Fresh", + "auth.migration.importing": "Importing workspaces…", + "auth.migration.success": `Imported ${opts?.count ?? 0} workspace(s).`, + "auth.migration.failed": `Failed: ${opts?.message ?? ""}`, + }; + return map[key] ?? key; + }, + }), +})); + +import { IdentityMigrationDialog } from "../../../../src/app/IdentityMigrationDialog"; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +function jsonResponse(body: any, status = 200): Response { + return new Response(JSON.stringify(body), { + status, + headers: { "Content-Type": "application/json" }, + }); +} + +function setupAnonymousWorkspaces(count: number) { + mockFetchWithIdentity.mockImplementation(async (url: string) => { + if (url.includes("/api/sessions/list")) { + return jsonResponse({ + status: "ok", + sessions: Array.from({ length: count }, (_, i) => ({ id: `ws-${i}` })), + }); + } + return jsonResponse({ status: "ok" }); + }); +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +beforeEach(() => { + vi.clearAllMocks(); + localStorage.clear(); + // Prevent actual navigation + Object.defineProperty(window, "location", { + writable: true, + value: { href: "/" }, + }); +}); + +describe("Anonymous user logs in and sees migration dialog", () => { + + it("shows the dialog when anonymous workspaces exist", async () => { + setupAnonymousWorkspaces(3); + const onDone = vi.fn(); + + render(); + + await waitFor(() => { + expect(screen.getByText("Import Previous Data?")).toBeInTheDocument(); + }); + expect(screen.getByText(/You have 3 workspace/)).toBeInTheDocument(); + expect(screen.getByText("Import Data")).toBeInTheDocument(); + expect(screen.getByText("Start Fresh")).toBeInTheDocument(); + }); + + it("auto-closes when no anonymous workspaces exist", async () => { + setupAnonymousWorkspaces(0); + const onDone = vi.fn(); + + render(); + + await waitFor(() => { + expect(onDone).toHaveBeenCalled(); + }); + }); +}); + +describe("User clicks 'Start Fresh'", () => { + + it("does NOT call cleanup-anonymous (anonymous data preserved)", async () => { + setupAnonymousWorkspaces(2); + + render(); + + await waitFor(() => { + expect(screen.getByText("Start Fresh")).toBeInTheDocument(); + }); + + await act(async () => { + fireEvent.click(screen.getByText("Start Fresh")); + }); + + const calls = mockFetchWithIdentity.mock.calls.map((c: any[]) => c[0]); + expect(calls).not.toContainEqual( + expect.stringContaining("cleanup-anonymous"), + ); + }); + + it("does NOT call migrate endpoint", async () => { + setupAnonymousWorkspaces(2); + + render(); + + await waitFor(() => { + expect(screen.getByText("Start Fresh")).toBeInTheDocument(); + }); + + await act(async () => { + fireEvent.click(screen.getByText("Start Fresh")); + }); + + const calls = mockFetchWithIdentity.mock.calls.map((c: any[]) => c[0]); + expect(calls).not.toContainEqual( + expect.stringContaining("/api/sessions/migrate"), + ); + }); + + it("never shows 'Importing workspaces…' text", async () => { + setupAnonymousWorkspaces(2); + + render(); + + await waitFor(() => { + expect(screen.getByText("Start Fresh")).toBeInTheDocument(); + }); + + await act(async () => { + fireEvent.click(screen.getByText("Start Fresh")); + }); + + expect(screen.queryByText("Importing workspaces…")).not.toBeInTheDocument(); + }); + + it("navigates to home page", async () => { + setupAnonymousWorkspaces(2); + + render(); + + await waitFor(() => { + expect(screen.getByText("Start Fresh")).toBeInTheDocument(); + }); + + await act(async () => { + fireEvent.click(screen.getByText("Start Fresh")); + }); + + await waitFor(() => { + expect(window.location.href).toBe("/"); + }); + }); +}); + +describe("User clicks 'Import Data'", () => { + + it("calls migrate endpoint and shows importing state", async () => { + setupAnonymousWorkspaces(2); + let resolveMigrate!: (v: Response) => void; + mockFetchWithIdentity.mockImplementation(async (url: string) => { + if (url.includes("/api/sessions/list")) { + return jsonResponse({ + status: "ok", + sessions: [{ id: "ws-0" }, { id: "ws-1" }], + }); + } + if (url.includes("/api/sessions/migrate")) { + return new Promise((resolve) => { + resolveMigrate = resolve; + }); + } + return jsonResponse({ status: "ok" }); + }); + + render(); + + await waitFor(() => { + expect(screen.getByText("Import Data")).toBeInTheDocument(); + }); + + await act(async () => { + fireEvent.click(screen.getByText("Import Data")); + }); + + await waitFor(() => { + expect(screen.getByText("Importing workspaces…")).toBeInTheDocument(); + }); + + await act(async () => { + resolveMigrate(jsonResponse({ status: "ok", moved: ["ws-0", "ws-1"] })); + }); + + await waitFor(() => { + expect(screen.getByText(/Imported 2 workspace/)).toBeInTheDocument(); + }); + }); +}); diff --git a/tests/frontend/unit/app/dfSelectors.test.ts b/tests/frontend/unit/app/dfSelectors.test.ts new file mode 100644 index 00000000..4e7d9f6b --- /dev/null +++ b/tests/frontend/unit/app/dfSelectors.test.ts @@ -0,0 +1,117 @@ +import { describe, it, expect } from 'vitest'; +import { dfSelectors, DataFormulatorState, ModelConfig } from '../../../../src/app/dfSlice'; + +const makeModel = (overrides: Partial = {}): ModelConfig => ({ + id: 'model-1', + endpoint: 'https://api.example.com', + model: 'gpt-4', + ...overrides, +}); + +const makeMinimalState = ( + overrides: Partial> = {}, +): DataFormulatorState => { + return { + models: [], + globalModels: [], + selectedModelId: undefined, + ...overrides, + } as unknown as DataFormulatorState; +}; + +describe('dfSelectors.getActiveModel', () => { + it('should return the selected model when it exists', () => { + const model = makeModel({ id: 'a' }); + const state = makeMinimalState({ + models: [makeModel({ id: 'b' }), model], + selectedModelId: 'a', + }); + expect(dfSelectors.getActiveModel(state)).toEqual(model); + }); + + it('should fall back to the first model when selectedModelId does not match', () => { + const first = makeModel({ id: 'first' }); + const state = makeMinimalState({ + models: [first, makeModel({ id: 'second' })], + selectedModelId: 'non-existent', + }); + expect(dfSelectors.getActiveModel(state)).toEqual(first); + }); + + it('should return undefined when the models array is empty', () => { + const state = makeMinimalState({ + models: [], + selectedModelId: undefined, + }); + expect(dfSelectors.getActiveModel(state)).toBeUndefined(); + }); + + it('should return undefined when models is empty even with a selectedModelId', () => { + const state = makeMinimalState({ + models: [], + selectedModelId: 'some-id', + }); + expect(dfSelectors.getActiveModel(state)).toBeUndefined(); + }); + + it('should return the first model when selectedModelId is undefined', () => { + const first = makeModel({ id: 'only' }); + const state = makeMinimalState({ + models: [first], + selectedModelId: undefined, + }); + expect(dfSelectors.getActiveModel(state)).toEqual(first); + }); + + it('should find a model in globalModels by selectedModelId', () => { + const globalModel = makeModel({ id: 'global-1' }); + const state = makeMinimalState({ + globalModels: [globalModel], + models: [], + selectedModelId: 'global-1', + }); + expect(dfSelectors.getActiveModel(state)).toEqual(globalModel); + }); + + it('should prefer exact match in globalModels over first user model', () => { + const globalModel = makeModel({ id: 'global-1', model: 'gpt-4' }); + const userModel = makeModel({ id: 'user-1', model: 'local-llm' }); + const state = makeMinimalState({ + globalModels: [globalModel], + models: [userModel], + selectedModelId: 'global-1', + }); + expect(dfSelectors.getActiveModel(state)).toEqual(globalModel); + }); + + it('should fall back to first globalModel when no id matches and models is empty', () => { + const globalModel = makeModel({ id: 'global-1' }); + const state = makeMinimalState({ + globalModels: [globalModel], + models: [], + selectedModelId: 'non-existent', + }); + expect(dfSelectors.getActiveModel(state)).toEqual(globalModel); + }); + + it('should fall back to globalModel (first in combined array) over user model', () => { + const globalModel = makeModel({ id: 'global-1' }); + const userModel = makeModel({ id: 'user-1' }); + const state = makeMinimalState({ + globalModels: [globalModel], + models: [userModel], + selectedModelId: 'non-existent', + }); + expect(dfSelectors.getActiveModel(state)).toEqual(globalModel); + }); + + it('should handle undefined globalModels gracefully', () => { + const userModel = makeModel({ id: 'user-1' }); + const state = { + models: [userModel], + globalModels: undefined, + selectedModelId: 'user-1', + } as unknown as DataFormulatorState; + expect(dfSelectors.getActiveModel(state)).toEqual(userModel); + }); +}); diff --git a/tests/frontend/unit/app/fetchWithIdentity.test.ts b/tests/frontend/unit/app/fetchWithIdentity.test.ts new file mode 100644 index 00000000..3db8e0f4 --- /dev/null +++ b/tests/frontend/unit/app/fetchWithIdentity.test.ts @@ -0,0 +1,161 @@ +/** + * Tests for fetchWithIdentity — Bearer token attachment and 401 retry. + * + * Mock strategy: + * - `oidcConfig` module is mocked to control getAccessToken / getUserManager + * - Global `fetch` is mocked to inspect headers and simulate responses + * - Redux store is mocked to provide identity / workspace state + */ +import { describe, it, expect, vi, beforeEach, type Mock } from 'vitest'; + +// ---- Module mocks (must be before imports) -------------------------------- + +vi.mock('../../../../src/app/oidcConfig', () => ({ + getAccessToken: vi.fn(async () => null), + getUserManager: vi.fn(async () => null), +})); + +vi.mock('../../../../src/app/store', () => ({ + store: { + getState: vi.fn(() => ({ + identity: { type: 'browser', id: 'test-browser-id' }, + activeWorkspace: null, + serverConfig: { WORKSPACE_BACKEND: 'local' }, + })), + }, +})); + +vi.mock('../../../../src/app/identity', () => ({ + getBrowserId: vi.fn(() => 'test-browser-id'), +})); + +vi.mock('../../../../src/i18n', () => ({ + default: { language: 'en' }, +})); + +// ---- Imports (after mocks) ----------------------------------------------- + +import { fetchWithIdentity } from '../../../../src/app/utils'; +import { getAccessToken, getUserManager } from '../../../../src/app/oidcConfig'; + +// ---- Helpers ------------------------------------------------------------- + +function mockFetchResponse(status: number, body: any = {}): Response { + return new Response(JSON.stringify(body), { + status, + headers: { 'Content-Type': 'application/json' }, + }); +} + +// ---- Tests --------------------------------------------------------------- + +beforeEach(() => { + vi.restoreAllMocks(); + + // Re-apply default mock implementations after restoreAllMocks + (getAccessToken as Mock).mockResolvedValue(null); + (getUserManager as Mock).mockResolvedValue(null); + + globalThis.fetch = vi.fn(async () => mockFetchResponse(200)); +}); + +describe('fetchWithIdentity', () => { + + describe('Bearer token attachment', () => { + + it('should attach Authorization header when OIDC token is available', async () => { + (getAccessToken as Mock).mockResolvedValue('oidc-access-token-123'); + + await fetchWithIdentity('/api/test'); + + const callArgs = (globalThis.fetch as Mock).mock.calls[0]; + const headers = callArgs[1].headers as Headers; + expect(headers.get('Authorization')).toBe('Bearer oidc-access-token-123'); + }); + + it('should not attach Authorization header in anonymous mode', async () => { + (getAccessToken as Mock).mockResolvedValue(null); + + await fetchWithIdentity('/api/test'); + + const callArgs = (globalThis.fetch as Mock).mock.calls[0]; + const headers = callArgs[1].headers as Headers; + expect(headers.has('Authorization')).toBe(false); + }); + + it('should always attach X-Identity-Id header', async () => { + await fetchWithIdentity('/api/test'); + + const callArgs = (globalThis.fetch as Mock).mock.calls[0]; + const headers = callArgs[1].headers as Headers; + expect(headers.get('X-Identity-Id')).toBe('browser:test-browser-id'); + }); + + it('should not modify headers for non-API URLs', async () => { + await fetchWithIdentity('/static/file.js'); + + const callArgs = (globalThis.fetch as Mock).mock.calls[0]; + expect(callArgs[1]?.headers).toBeUndefined(); + }); + }); + + describe('401 retry with silent renew', () => { + + it('should retry once after silent renew on 401', async () => { + const mockSigninSilent = vi.fn(async () => {}); + (getUserManager as Mock).mockResolvedValue({ + signinSilent: mockSigninSilent, + }); + (getAccessToken as Mock).mockResolvedValue('old-token'); + + (globalThis.fetch as Mock) + .mockResolvedValueOnce(mockFetchResponse(401)) + .mockResolvedValueOnce(mockFetchResponse(200, { ok: true })); + + const resp = await fetchWithIdentity('/api/data'); + + expect(resp.status).toBe(200); + expect(mockSigninSilent).toHaveBeenCalledOnce(); + expect(globalThis.fetch).toHaveBeenCalledTimes(2); + }); + + it('should return 401 when silent renew fails', async () => { + (getUserManager as Mock).mockResolvedValue({ + signinSilent: vi.fn(async () => { throw new Error('renew failed'); }), + }); + (getAccessToken as Mock).mockResolvedValue('expired-token'); + + (globalThis.fetch as Mock).mockResolvedValue(mockFetchResponse(401)); + + const resp = await fetchWithIdentity('/api/data'); + + expect(resp.status).toBe(401); + }); + + it('should not retry when no UserManager is available', async () => { + (getUserManager as Mock).mockResolvedValue(null); + (getAccessToken as Mock).mockResolvedValue(null); + + (globalThis.fetch as Mock).mockResolvedValue(mockFetchResponse(401)); + + const resp = await fetchWithIdentity('/api/data'); + + expect(resp.status).toBe(401); + expect(globalThis.fetch).toHaveBeenCalledOnce(); + }); + + it('should not retry on non-401 errors', async () => { + (getAccessToken as Mock).mockResolvedValue('token'); + (getUserManager as Mock).mockResolvedValue({ + signinSilent: vi.fn(), + }); + + (globalThis.fetch as Mock).mockResolvedValue(mockFetchResponse(500)); + + const resp = await fetchWithIdentity('/api/data'); + + expect(resp.status).toBe(500); + expect(globalThis.fetch).toHaveBeenCalledOnce(); + }); + }); +}); diff --git a/tests/frontend/unit/app/getAccessToken.test.ts b/tests/frontend/unit/app/getAccessToken.test.ts new file mode 100644 index 00000000..b6660779 --- /dev/null +++ b/tests/frontend/unit/app/getAccessToken.test.ts @@ -0,0 +1,74 @@ +/** + * Tests for getAccessToken — silent refresh on expired tokens. + */ +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +const mockGetUser = vi.fn(); +const mockSigninSilent = vi.fn(); + +vi.mock('../../../../src/app/oidcConfig', async (importOriginal) => { + const original = await importOriginal(); + return { + ...original, + getUserManager: vi.fn(async () => ({ + getUser: mockGetUser, + signinSilent: mockSigninSilent, + })), + }; +}); + +import { getAccessToken } from '../../../../src/app/oidcConfig'; + +beforeEach(() => { + vi.clearAllMocks(); +}); + +describe('getAccessToken', () => { + + it('returns token when user exists and not expired', async () => { + mockGetUser.mockResolvedValue({ expired: false, access_token: 'fresh-token' }); + + const token = await getAccessToken(); + + expect(token).toBe('fresh-token'); + expect(mockSigninSilent).not.toHaveBeenCalled(); + }); + + it('returns null when no user stored', async () => { + mockGetUser.mockResolvedValue(null); + + const token = await getAccessToken(); + + expect(token).toBeNull(); + expect(mockSigninSilent).not.toHaveBeenCalled(); + }); + + it('calls signinSilent when token is expired and returns refreshed token', async () => { + mockGetUser.mockResolvedValue({ expired: true, access_token: 'old-token' }); + mockSigninSilent.mockResolvedValue({ expired: false, access_token: 'refreshed-token' }); + + const token = await getAccessToken(); + + expect(token).toBe('refreshed-token'); + expect(mockSigninSilent).toHaveBeenCalledOnce(); + }); + + it('returns null when token is expired and signinSilent fails', async () => { + mockGetUser.mockResolvedValue({ expired: true, access_token: 'old-token' }); + mockSigninSilent.mockRejectedValue(new Error('refresh failed')); + + const token = await getAccessToken(); + + expect(token).toBeNull(); + expect(mockSigninSilent).toHaveBeenCalledOnce(); + }); + + it('returns null when token is expired and signinSilent returns null', async () => { + mockGetUser.mockResolvedValue({ expired: true, access_token: 'old-token' }); + mockSigninSilent.mockResolvedValue(null); + + const token = await getAccessToken(); + + expect(token).toBeNull(); + }); +}); diff --git a/tests/frontend/unit/data/coerceDate.test.ts b/tests/frontend/unit/data/coerceDate.test.ts new file mode 100644 index 00000000..3c11a136 --- /dev/null +++ b/tests/frontend/unit/data/coerceDate.test.ts @@ -0,0 +1,42 @@ +import { describe, it, expect } from 'vitest'; +import { CoerceType } from '../../../../src/data/types'; + +const coerceDate = CoerceType.date; + +describe('coerceDate', () => { + it('should return null for null input', () => { + expect(coerceDate(null)).toBeNull(); + }); + + it('should return null for undefined input', () => { + expect(coerceDate(undefined)).toBeNull(); + }); + + it('should return null for empty string', () => { + expect(coerceDate('')).toBeNull(); + }); + + it('should convert Date object to ISO string', () => { + const date = new Date('2024-06-15T12:00:00Z'); + const result = coerceDate(date); + expect(typeof result).toBe('string'); + expect(result).toBe(date.toISOString()); + }); + + it('should convert Date object with timezone to ISO string', () => { + const date = new Date(2024, 0, 1, 8, 30, 0); + const result = coerceDate(date); + expect(typeof result).toBe('string'); + expect(result).toBe(date.toISOString()); + }); + + it('should pass through string date values unchanged', () => { + expect(coerceDate('2024-01-01')).toBe('2024-01-01'); + expect(coerceDate('Jan 1, 2024')).toBe('Jan 1, 2024'); + }); + + it('should pass through numeric timestamps unchanged', () => { + const ts = 1700000000000; + expect(coerceDate(ts)).toBe(ts); + }); +}); diff --git a/tests/frontend/unit/data/resolveExcelCellValue.test.ts b/tests/frontend/unit/data/resolveExcelCellValue.test.ts new file mode 100644 index 00000000..805fadec --- /dev/null +++ b/tests/frontend/unit/data/resolveExcelCellValue.test.ts @@ -0,0 +1,106 @@ +import { describe, it, expect } from 'vitest'; +import { resolveExcelCellValue } from '../../../../src/data/utils'; + +describe('resolveExcelCellValue', () => { + // --- Null / undefined --- + it('should return null for null', () => { + expect(resolveExcelCellValue(null)).toBeNull(); + }); + + it('should return null for undefined', () => { + expect(resolveExcelCellValue(undefined)).toBeNull(); + }); + + // --- Primitives pass-through --- + it('should return string as-is', () => { + expect(resolveExcelCellValue('hello')).toBe('hello'); + }); + + it('should return number as-is', () => { + expect(resolveExcelCellValue(42)).toBe(42); + }); + + it('should return boolean as-is', () => { + expect(resolveExcelCellValue(true)).toBe(true); + expect(resolveExcelCellValue(false)).toBe(false); + }); + + it('should return empty string as-is', () => { + expect(resolveExcelCellValue('')).toBe(''); + }); + + // --- Date objects --- + it('should convert Date to ISO string', () => { + const date = new Date('2024-03-15T10:30:00Z'); + expect(resolveExcelCellValue(date)).toBe('2024-03-15T10:30:00.000Z'); + }); + + // --- ExcelJS richText --- + it('should join richText segments', () => { + const richText = { + richText: [ + { text: 'Hello' }, + { text: ' ' }, + { text: 'World' }, + ], + }; + expect(resolveExcelCellValue(richText)).toBe('Hello World'); + }); + + it('should handle richText with missing text fields', () => { + const richText = { + richText: [ + { text: 'A' }, + { font: { bold: true } }, + { text: 'B' }, + ], + }; + expect(resolveExcelCellValue(richText)).toBe('AB'); + }); + + // --- ExcelJS hyperlink --- + it('should extract text from hyperlink object', () => { + const hyperlink = { + text: 'Click here', + hyperlink: 'https://example.com', + }; + expect(resolveExcelCellValue(hyperlink)).toBe('Click here'); + }); + + it('should fall back to hyperlink URL when text is empty', () => { + const hyperlink = { + text: '', + hyperlink: 'https://example.com', + }; + expect(resolveExcelCellValue(hyperlink)).toBe('https://example.com'); + }); + + // --- ExcelJS formula --- + it('should resolve formula result (primitive)', () => { + const formula = { formula: '=A1+B1', result: 100 }; + expect(resolveExcelCellValue(formula)).toBe(100); + }); + + it('should resolve formula result (Date)', () => { + const date = new Date('2024-01-01T00:00:00Z'); + const formula = { formula: '=TODAY()', result: date }; + expect(resolveExcelCellValue(formula)).toBe('2024-01-01T00:00:00.000Z'); + }); + + it('should return null for formula with undefined result', () => { + const formula = { formula: '=INVALID()', result: undefined }; + expect(resolveExcelCellValue(formula)).toBeNull(); + }); + + // --- ExcelJS error --- + it('should return null for error cell value', () => { + const errorCell = { error: '#REF!' }; + expect(resolveExcelCellValue(errorCell)).toBeNull(); + }); + + // --- Generic object fallback --- + it('should stringify unknown objects', () => { + const obj = { custom: 'data' }; + expect(resolveExcelCellValue(obj)).toBe(String(obj)); + }); +}); diff --git a/tests/frontend/unit/views/checkIsLikelyTextOnlyModel.test.ts b/tests/frontend/unit/views/checkIsLikelyTextOnlyModel.test.ts new file mode 100644 index 00000000..f9f26dbd --- /dev/null +++ b/tests/frontend/unit/views/checkIsLikelyTextOnlyModel.test.ts @@ -0,0 +1,40 @@ +import { describe, it, expect } from 'vitest'; +import { checkIsLikelyTextOnlyModel } from '../../../../src/views/DataLoadingChat'; + +describe('checkIsLikelyTextOnlyModel', () => { + it('returns true for deepseek-chat', () => { + expect(checkIsLikelyTextOnlyModel('deepseek-chat')).toBe(true); + }); + + it('returns true for DeepSeek-Chat (case-insensitive)', () => { + expect(checkIsLikelyTextOnlyModel('DeepSeek-Chat')).toBe(true); + }); + + it('returns true when deepseek-chat is a substring', () => { + expect(checkIsLikelyTextOnlyModel('provider/deepseek-chat-v2')).toBe(true); + }); + + it('returns false for gpt-4o (multimodal)', () => { + expect(checkIsLikelyTextOnlyModel('gpt-4o')).toBe(false); + }); + + it('returns false for claude-sonnet-4-20250514', () => { + expect(checkIsLikelyTextOnlyModel('claude-sonnet-4-20250514')).toBe(false); + }); + + it('returns false for gemini-2.5-pro', () => { + expect(checkIsLikelyTextOnlyModel('gemini-2.5-pro')).toBe(false); + }); + + it('returns false for deepseek-reasoner (vision-capable)', () => { + expect(checkIsLikelyTextOnlyModel('deepseek-reasoner')).toBe(false); + }); + + it('returns false for undefined', () => { + expect(checkIsLikelyTextOnlyModel(undefined)).toBe(false); + }); + + it('returns false for empty string', () => { + expect(checkIsLikelyTextOnlyModel('')).toBe(false); + }); +}); diff --git a/tests/frontend/unit/views/safeCellRender.test.tsx b/tests/frontend/unit/views/safeCellRender.test.tsx new file mode 100644 index 00000000..410949b9 --- /dev/null +++ b/tests/frontend/unit/views/safeCellRender.test.tsx @@ -0,0 +1,95 @@ +/** + * Tests that verify the safe rendering pattern used in: + * - ReactTable.tsx + * - SelectableDataGrid.tsx + * - DataLoadingThread.tsx + * + * The fix ensures that object values (e.g. Date instances from Excel) + * are converted to strings before being rendered by React, preventing + * "Objects are not valid as a React child" errors. + */ +import React from 'react'; +import { describe, it, expect } from 'vitest'; +import { render } from '@testing-library/react'; + +/** + * Replicates the safe rendering logic applied in the component fixes. + * ReactTable & SelectableDataGrid use this pattern inline. + * DataLoadingThread uses it inside the format callback. + */ +const safeCellRender = (value: any): React.ReactNode => { + if (value != null && typeof value === 'object') return String(value); + if (typeof value === 'boolean') return `${value}`; + return value; +}; + +const formatFn = (v: any) => (v != null && typeof v === 'object' ? String(v) : v); + +describe('safeCellRender – inline pattern (ReactTable / SelectableDataGrid)', () => { + it('should render string values directly', () => { + const { container } = render({safeCellRender('hello')}); + expect(container.textContent).toBe('hello'); + }); + + it('should render number values directly', () => { + const { container } = render({safeCellRender(42)}); + expect(container.textContent).toBe('42'); + }); + + it('should render null without crashing', () => { + const { container } = render({safeCellRender(null)}); + expect(container.textContent).toBe(''); + }); + + it('should render undefined without crashing', () => { + const { container } = render({safeCellRender(undefined)}); + expect(container.textContent).toBe(''); + }); + + it('should render boolean as string', () => { + const { container } = render({safeCellRender(true)}); + expect(container.textContent).toBe('true'); + }); + + it('should safely render a Date object as string', () => { + const date = new Date('2024-06-15T12:00:00Z'); + const { container } = render({safeCellRender(date)}); + expect(container.textContent).toBe(String(date)); + }); + + it('should safely render a plain object as string', () => { + const obj = { richText: [{ text: 'A' }] }; + const { container } = render({safeCellRender(obj)}); + expect(container.textContent).toBe(String(obj)); + }); + + it('should safely render an array as string', () => { + const arr = [1, 2, 3]; + const { container } = render({safeCellRender(arr)}); + expect(container.textContent).toBe(String(arr)); + }); +}); + +describe('formatFn – DataLoadingThread format callback', () => { + it('should pass through string values', () => { + expect(formatFn('text')).toBe('text'); + }); + + it('should pass through number values', () => { + expect(formatFn(99)).toBe(99); + }); + + it('should pass through null', () => { + expect(formatFn(null)).toBeNull(); + }); + + it('should convert Date object to string', () => { + const date = new Date('2024-01-01T00:00:00Z'); + expect(formatFn(date)).toBe(String(date)); + }); + + it('should convert arbitrary object to string', () => { + const obj = { formula: '=SUM(A1:A10)', result: 55 }; + expect(formatFn(obj)).toBe(String(obj)); + }); +}); diff --git a/tests/test_plan.md b/tests/test_plan.md new file mode 100644 index 00000000..4e0d8f97 --- /dev/null +++ b/tests/test_plan.md @@ -0,0 +1,327 @@ +# Test Plan + +This document captures the testing strategy for Data Formulator. It is meant to +be a living document — update it as coverage grows and priorities shift. + +--- + +## Current State + +### Directory layout + +All tests live under the repo-level `tests/` directory (previously some lived +in `py-src/tests/` — those have been consolidated here). + +``` +tests/ + conftest.py # adds py-src to sys.path + test_plan.md # ← this file + run_test_dbs.sh # unified helper: start/stop/test all DBs + backend/ # included in default pytest (no Docker needed) + unit/ # pure functions, no Flask/network + security/ # security-focused tests + test_sandbox_security.py # sandbox confinement (file write, process exec) + test_code_signing.py # HMAC sign/verify for transformation code + test_sanitize.py # error message redaction + test_auth.py # identity extraction & namespace isolation + test_global_model_security.py # credential isolation, error sanitization + test_url_allowlist.py # SSRF protection for user-provided api_base + integration/ # Flask routes, workspace, sandbox + test_sandbox.py # sandbox functional tests (transforms, errors) + ... + contract/ # API boundary guarantees + benchmarks/ # performance benchmarks (not in CI) + plugin/ # data loader tests (requires Docker, run separately) + mysql/ # MySQL loader (Dockerfile + init.sql) + mongodb/ # MongoDB loader (Dockerfile + init_data.js) + postgres/ # PostgreSQL loader (Dockerfile + init.sql) + bigquery/ # BigQuery emulator (Dockerfile + init_data.yaml) + cosmosdb/ # Cosmos DB emulator (Dockerfile + seed_data.py) + run_test_dbs.sh # unified script to start/stop/test databases + frontend/ + setup.ts # jest-dom matchers + unit/ # vitest tests for src/ +``` + +### Running tests + +```bash +# Default: backend + frontend (no Docker needed, fast) +pytest + +# Plugin tests: data loader integrations (requires Docker) +./tests/database-dockers/run_test_dbs.sh start # start all test databases +./tests/database-dockers/run_test_dbs.sh test # run all loader tests +./tests/database-dockers/run_test_dbs.sh stop # tear down + +# Or one-shot per service +./tests/database-dockers/run_test_dbs.sh test mysql +``` + +### What exists today + +| Layer | Location | Runner | Count | +|-------|----------|--------|-------| +| Backend unit | `tests/backend/unit/` | pytest | 20 files | +| Backend security | `tests/backend/security/` | pytest | 6 files | +| Backend integration | `tests/backend/integration/` | pytest | 8 files (7 route tests + sandbox) | +| Backend contract | `tests/backend/contract/` | pytest | 2 files | +| Backend benchmarks | `tests/backend/benchmarks/` | manual | 2 files | +| Plugin (data loaders) | `tests/plugin/` | pytest (manual) | 7 suites (requires Docker) | +| Frontend unit | `tests/frontend/unit/` | vitest | 4 files | + +`tests/backend/` runs by default with `pytest` — no Docker required. +`tests/plugin/` is excluded from default runs and requires +`./tests/run_test_dbs.sh` to start test databases first. + +### Key gaps + +- **Session routes** — save / load / export / import untested +- **Agent pipelines** — no isolated tests for any agent class +- **Data loaders** — S3, Azure Blob, Kusto, Athena, MSSQL not covered +- **Workspace factory** — backend selection logic untested +- **Frontend components** — almost no component or hook tests +- **Frontend state** — only 1 Redux selector test +- **Vega-Lite assembly** — `create_vl_plots` untested +- **Semantic types** — type resolution and classification untested + +--- + +## Proposed Test Categories + +### P0 — Security & correctness (add first) + +These protect against regressions that could leak secrets, execute tampered +code, or corrupt user data. + +#### 1. Code signing (`code_signing.py`) ✅ covered + +Tests in `tests/backend/security/test_code_signing.py`: +sign/verify round-trip, tampered code/signature rejection, empty inputs, +whitespace sensitivity, Unicode, `sign_result()` helper. + +#### 2. Auth & identity (`auth.py`) ✅ covered + +Tests in `tests/backend/security/test_auth.py`: +Azure principal → `user:` prefix, browser → `browser:` prefix, client +cannot spoof `user:` namespace, Azure takes priority, missing headers, +malformed values rejected, validation edge cases. + +#### 3. Error sanitization (`sanitize.py`) ✅ covered + +Tests in `tests/backend/security/test_sanitize.py`: +API key redaction, path stripping (Unix/Windows/tmp), stack trace removal, +HTML escaping, truncation, edge cases (empty, Unicode). + +#### 4. Sandbox confinement (`sandbox/`) ✅ covered + +Tests in `tests/backend/security/test_sandbox_security.py`: +file write blocked (open, csv), process exec blocked (os.system, popen, +execvp, spawnlp, kill, sys.modules bypass, putenv), Docker workspace +read-only mount. + +#### 4b. URL allowlist (`url_allowlist.py`) ✅ covered + +Tests in `tests/backend/security/test_url_allowlist.py`: +open mode (env unset, all URLs allowed), enforce mode (matching patterns +pass, unlisted/private IPs rejected), empty api_base always allowed, +case insensitivity, glob edge cases, pattern loading. + +#### 5. Session sensitive-field stripping (`session_routes.py`) + +- `_strip_sensitive()` removes model credentials, identity, API keys +- round-trip: save → load preserves data but strips secrets + +### P1 — Core data pipeline (add next) + +#### 6. Workspace operations (`datalake/workspace.py`) + +- `save_table()` → `load_table()` round-trip preserves data +- `list_tables()` reflects creates and deletes +- `get_table_metadata()` returns correct schema +- concurrent metadata updates use file locking (atomic writes) +- temp file cleanup on workspace close +- workspace isolation between users + +#### 7. File manager (`datalake/file_manager.py`) + +- encoding detection for UTF-8, UTF-16, Shift-JIS, GB2312 +- BOM handling +- large file upload (near MAX_CONTENT_LENGTH boundary) +- file type validation (reject unsupported formats) +- (some coverage already exists — extend, don't duplicate) + +#### 8. Metadata persistence (`datalake/metadata.py`) + +- YAML save / load round-trip +- cross-platform file locking +- concurrent writers don't corrupt metadata +- schema migration: old metadata format still loads + +#### 9. Table routes — end-to-end (`tables_routes.py`) + +- `/create-table` with JSON, CSV, Parquet payloads +- `/parse-file` with Excel, CSV, TSV files +- `/delete-table` cleans up Parquet + metadata +- `/sample-table` returns correct row count and schema +- DuckDB sampling for large files +- `/open-workspace` initializes workspace correctly +- (some coverage exists — check and extend) + +#### 10. Model registry (`model_registry.py`) + +- env-var scanning for each provider (OpenAI, Azure, Anthropic, Gemini, Ollama) +- `list_public()` never exposes API keys +- custom model endpoint configuration +- missing / malformed env vars → graceful degradation +- (some coverage exists — check and extend) + +### P2 — Agent layer (mock LLM calls) + +#### 11. Agent utilities + +- `agent_utils.py`: JSON extraction from LLM responses, data summary generation +- `agent_utils_sql.py`: DuckDB view creation, quoted identifiers for Unicode +- `agent_language.py`: English / Chinese prompt instruction building +- `agent_diagnostics.py`: diagnostics payload builder captures correct fields +- (some coverage exists for diagnostics and SQL utils) + +#### 12. Individual agents (mock `Client` / LiteLLM) + +- `DataRecAgent` — given a mock LLM response, produces correct Vega-Lite spec + code +- `DataTransformationAgent` — generates valid Python transformation code +- `DataLoadAgent` — infers semantic types and suggests table names from raw data +- `DataCleanAgentStream` — streams cleaning suggestions +- `CodeExplanationAgent` — produces explanation from code input +- `ChartInsightAgent` — generates insight from chart spec +- `DataAgent` — observe→think→act loop terminates correctly + +Focus on: prompt assembly, response parsing, error recovery, repair loops. + +#### 13. Semantic types (`agents/semantic_types.py`) + +- type classification: temporal, categorical, measure sets are disjoint +- type resolution from sample data (dates, currencies, percentages) +- consistency with frontend `type-registry.ts` constants + +### P3 — Visualization & workflows + +#### 14. Vega-Lite chart assembly (`workflows/create_vl_plots.py`) + +- field type coercion (quantitative, nominal, temporal, ordinal) +- encoding shelf assembly from semantic fields +- bar, line, scatter, area chart generation +- multi-layer charts +- invalid encoding combinations → graceful error + +#### 15. Chart semantics (`workflows/chart_semantics.py`) + +- encoding channel validation +- specification completeness checks + +### P4 — Data loader integrations + +#### 16. Data loader framework ✅ partially covered + +Integration tests with Dockerized services exist for MySQL, MongoDB, +PostgreSQL, and BigQuery (in `tests/backend/integration/test_*/`). +A standalone MySQL-to-datalake round-trip test also exists +(`test_mysql_datalake.py`). Still needed: + +- `BaseDataLoader` interface contract test (shared across all loaders) +- Remaining loaders (require mocked connections or emulators): + - MSSQL: SQL query → DataFrame + - S3: file listing, download, Athena integration + - Azure Blob: container listing, file download + - Kusto: KQL query → DataFrame + - Athena: query execution, result fetching +- Connection error handling for each loader +- Credential validation + +### P5 — Frontend unit tests + +#### 17. Data utilities (`src/data/`) + +- type coercion functions (extend existing `coerceDate.test.ts`) +- Excel cell resolution (extend existing) +- data transformation helpers + +#### 18. Redux state (`src/app/`) + +- all exported selectors (extend existing `dfSelectors.test.ts`) +- reducer logic for table CRUD, model selection, session state +- state persistence (redux-persist integration) + +#### 19. View utilities & components + +- `ViewUtils.tsx` helper functions +- `ChartRenderService.tsx` — Vega-Embed rendering +- `ModelSelectionDialog` — model filtering and selection +- `DataView` — table rendering with filtering/sorting +- `EncodingBox` — encoding shelf interactions +- `ChatDialog` — message handling + +#### 20. Internationalization (`src/i18n/`) + +- all i18n keys have translations in both English and Chinese +- no missing keys in either locale + +### P6 — End-to-end workflows (optional / future) + +#### 21. Full pipeline tests + +- upload CSV → create table → derive data → generate chart → export +- session save → reload → resume workflow +- multi-table join → visualization + +These would likely use Playwright or Cypress and are out of scope initially. + +--- + +## Test Infrastructure + +### Fixtures to build + +- **`app_client`** — Flask test client with in-memory workspace (exists in some integration tests, should be standardized) +- **`tmp_workspace`** — temporary workspace directory, cleaned up after test +- **`mock_llm_client`** — patched `Client` that returns canned LLM responses +- **`sample_dataframes`** — standard DataFrames for table operations (small, wide, Unicode columns, empty) +- **`sample_files`** — CSV/Excel/Parquet fixture files (some exist in `tests/backend/fixtures/`) + +### Markers + +| Marker | Meaning | +|--------|---------| +| `@pytest.mark.backend` | Backend test (already configured) | +| `@pytest.mark.contract` | API contract test (already configured) | +| `@pytest.mark.slow` | Tests that take > 5s (DB, Docker sandbox) | +| `@pytest.mark.requires_docker` | Needs Docker daemon running | +| `@pytest.mark.requires_llm` | Needs real LLM API key (skip in CI by default) | + +### CI considerations + +- P0–P2 tests should run on every PR (< 2 min target) +- P4 data loader tests need mock services or are skipped in CI +- P6 e2e tests run on merge to main only +- Frontend tests run via `npm test` in CI + +--- + +## How to prioritize + +1. Start with **P0** — these guard against security issues and data corruption +2. Move to **P1** — these catch breakages in daily development workflows +3. **P2** covers the AI agent layer — use mocks, not real LLM calls +4. **P3–P5** fill remaining coverage gaps +5. **P6** is aspirational — add when the team has bandwidth + +--- + +## Open questions + +- [ ] Should sandbox tests use Docker in CI, or only test `LocalSandbox`? +- [ ] Do we want snapshot tests for Vega-Lite spec output? +- [ ] Should agent tests pin specific mock LLM responses, or use property-based testing? +- [ ] Is there a need for load/stress testing on the streaming endpoints? +- [ ] Should frontend component tests use shallow rendering or full mount? +- [ ] Do we need contract tests for the frontend ↔ backend API boundary (OpenAPI schema)? diff --git a/tsconfig.json b/tsconfig.json index 343a3c0d..0b6b9d97 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,13 +1,12 @@ { "compilerOptions": { - "target": "es5", + "target": "ES2020", "lib": [ "dom", "dom.iterable", "esnext", "ESNext.Array", ], - "downlevelIteration": true, "allowJs": true, "skipLibCheck": true, "esModuleInterop": true, @@ -16,7 +15,7 @@ "forceConsistentCasingInFileNames": true, "noFallthroughCasesInSwitch": true, "module": "esnext", - "moduleResolution": "node", + "moduleResolution": "bundler", "resolveJsonModule": true, "isolatedModules": true, "jsx": "react", diff --git a/uv.lock b/uv.lock index a5c076af..006cfe92 100644 --- a/uv.lock +++ b/uv.lock @@ -259,6 +259,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fc/d8/b8fcba9464f02b121f39de2db2bf57f0b216fe11d014513d666e8634380d/azure_core-1.38.0-py3-none-any.whl", hash = "sha256:ab0c9b2cd71fecb1842d52c965c95285d3cfb38902f6766e4a471f1cd8905335", size = 217825, upload-time = "2026-01-12T17:03:07.291Z" }, ] +[[package]] +name = "azure-cosmos" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-core" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c8/a3/0474e622bf9676e3206d61269461ed16a05958363c254ea3b15af16219b2/azure_cosmos-4.15.0.tar.gz", hash = "sha256:be1cf49837c197d9da880ec47fe020a24d679075b89e0e1e2aca8d376b3a5a24", size = 2100744, upload-time = "2026-02-23T16:01:52.293Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/5f/b6e3d3ae16fa121fdc17e62447800d378b7e716cd6103c3650977a6c4618/azure_cosmos-4.15.0-py3-none-any.whl", hash = "sha256:83c1da7386bcd0df9a15c52116cc35012225d8a72d4f1379938b83ea5eb19fff", size = 424870, upload-time = "2026-02-23T16:01:54.514Z" }, +] + [[package]] name = "azure-identity" version = "1.25.1" @@ -574,14 +587,14 @@ wheels = [ [[package]] name = "click" -version = "8.3.1" +version = "8.1.8" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload-time = "2024-12-21T18:38:44.339Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, + { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload-time = "2024-12-21T18:38:41.666Z" }, ] [[package]] @@ -602,90 +615,63 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/60/97/891a0971e1e4a8c5d2b20bbe0e524dc04548d2307fee33cdeba148fd4fc7/comm-0.2.3-py3-none-any.whl", hash = "sha256:c615d91d75f7f04f095b30d1c1711babd43bdc6419c1be9886a85f2f4e489417", size = 7294, upload-time = "2025-07-25T14:02:02.896Z" }, ] -[[package]] -name = "connectorx" -version = "0.4.5" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/41/183fd02ed424747f50b118a716334ca57569c5c9850fcda900d65a1c3d84/connectorx-0.4.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:3fa0811081c84befde6d3aa661ecb17b95be9e3851e20009fd27d0e1b925ceb9", size = 37905349, upload-time = "2026-01-18T01:31:24.024Z" }, - { url = "https://files.pythonhosted.org/packages/1f/d2/af67eb73865372b1b06057254e04821e31bf8ea84129d38f862e11d10fc7/connectorx-0.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0ea5feccc2fb3471fa72c1d920bb4ed17ba1b18aedb89dee5ee6009138e35260", size = 35976620, upload-time = "2026-01-18T01:31:40.456Z" }, - { url = "https://files.pythonhosted.org/packages/ac/3b/18a4bbfa2fbba7a45539f2fd9fc198bf483984b13b61781f905682d1992c/connectorx-0.4.5-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:e605b5eca75fe63117e5fb93f94e940ede0513340671631da35bdb5a035f8163", size = 43725322, upload-time = "2026-01-18T01:30:49.412Z" }, - { url = "https://files.pythonhosted.org/packages/9a/37/d6dffc001562b7109c8a18604f5a52445187c6bede23e5b737248d172f15/connectorx-0.4.5-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:f139bbfa34840b89d0a5ec760026a9268c18c63fb739568ecbc77660d3e4fc1f", size = 43757365, upload-time = "2026-01-18T01:31:07.069Z" }, - { url = "https://files.pythonhosted.org/packages/2f/ca/9ca19ce638639e5b07b96e28815f464c2eba2b876a22b3f8c0297034f172/connectorx-0.4.5-cp311-none-win_amd64.whl", hash = "sha256:234af0b6ab4a12b64e3818ebea1eb98cc8b47650280fb40924b43e2f1611acb4", size = 34650205, upload-time = "2026-01-18T01:59:35Z" }, - { url = "https://files.pythonhosted.org/packages/74/c3/77aebad14179cf1a8cfdc5e84ea1bac4efb82de270ca6fc7ff914f8ec601/connectorx-0.4.5-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:25efda2317f40e6536582c3dd4f57a8a31c7e5969d708a674272c05591e6f5a2", size = 37900333, upload-time = "2026-01-18T01:31:27.545Z" }, - { url = "https://files.pythonhosted.org/packages/23/5c/e1e82bcb235fa52280696b01a975535800c0b8c3c12af7c5ddbb42a39010/connectorx-0.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:27539e03408705f318572b163c419572a114fdc9baf4d1e6cd746bb87f573cf2", size = 35971550, upload-time = "2026-01-18T01:31:43.775Z" }, - { url = "https://files.pythonhosted.org/packages/14/be/fa9c3a14b6c10c899d0fb93f8ea549285db271c9899900f7eb21c3cdeb9a/connectorx-0.4.5-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c68cc9c6bff737d3c9fb8735b27ecc8474238ef640abb701ee0ab213c6c95f8c", size = 43720741, upload-time = "2026-01-18T01:30:52.897Z" }, - { url = "https://files.pythonhosted.org/packages/3e/4c/54cbfabd1866f3f8657e348f3a496fbde0a138d66a9f2f024b28b4db1c2e/connectorx-0.4.5-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3863bc71677d6314b60cb1e1489a650114d37d8d9f58f2df038cae4a82d2ffc5", size = 43751056, upload-time = "2026-01-18T01:31:10.534Z" }, - { url = "https://files.pythonhosted.org/packages/e0/03/350aafe6bc38a3851744bfab7c4d61bd16500ff6b20dfcb98054b7adb56d/connectorx-0.4.5-cp312-none-win_amd64.whl", hash = "sha256:0737254429e22e5012e1fe6a849112da38abb9b56743b3b8c8a1f902e5270e75", size = 34642277, upload-time = "2026-01-18T01:59:38.121Z" }, - { url = "https://files.pythonhosted.org/packages/10/de/65de3629f3bbb0597cc3393078085a27b5b52a9fa5b701a60c1c11a9868c/connectorx-0.4.5-cp313-cp313-macosx_10_7_x86_64.whl", hash = "sha256:ff2f4236a0fc14cd724b03df1f11c03b714442f4381575465f7d0f4f91135766", size = 37900697, upload-time = "2026-01-18T01:31:30.664Z" }, - { url = "https://files.pythonhosted.org/packages/88/59/6a4542bc57c53e99b366a8f377ebb8c9b9915d08a8915726c4a5f0fd8219/connectorx-0.4.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f5d4754069644a712bd3105345e4f7c680420c5bb1d1264070cda058c7f07fb3", size = 35972573, upload-time = "2026-01-18T01:31:46.945Z" }, - { url = "https://files.pythonhosted.org/packages/a6/8e/96abe0aecb5e121a80f64ded08c4d8b4115df85f1b246a2680c9f29d4d3d/connectorx-0.4.5-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:31a65ff4ec8fde7ea7aa2812f2b21e7a512a3216b1b22ca1b02d3975b0bf1e75", size = 43746476, upload-time = "2026-01-18T01:30:56.296Z" }, - { url = "https://files.pythonhosted.org/packages/0a/9d/3bae67718e0bfbefba41959f2f1dc0a5765392aea311b02d98457c8efd34/connectorx-0.4.5-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:ab1d62a26350055c5e901daa4d6dddb75b11addb923797158c809dffc4f0ac9e", size = 43761125, upload-time = "2026-01-18T01:31:14.253Z" }, - { url = "https://files.pythonhosted.org/packages/40/32/54a45bc796b2e5a572bb76c17ebaf971ec57bf9960ba23731d76a7f70962/connectorx-0.4.5-cp313-none-win_amd64.whl", hash = "sha256:50c20558beff2719be34ff325213526c1700c3a20743e9e0ba592774ebc9cc92", size = 34645527, upload-time = "2026-01-18T01:59:41.303Z" }, - { url = "https://files.pythonhosted.org/packages/79/76/8f89e0d1c973af07eed12c584c52c33b51dc119b7ed85e3c0f91615bfec6/connectorx-0.4.5-cp314-cp314-macosx_10_7_x86_64.whl", hash = "sha256:3ddfe372065b974365bff3b383e39c29cad468c0e7556543dd23753446c441ed", size = 37898985, upload-time = "2026-01-18T01:31:34.162Z" }, - { url = "https://files.pythonhosted.org/packages/f4/38/6243f6c83e9515ebab87c0b21be1c2599bd3f6148b8905802b12dba4bf83/connectorx-0.4.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:3fd7788294417cbbb3811f8942e4fe3b4c190b80627a3c706ceae6c321824bcf", size = 35968795, upload-time = "2026-01-18T01:59:28.032Z" }, - { url = "https://files.pythonhosted.org/packages/7f/1a/031079b5c597b83df8548012095c23c10d471e6a4c29617d55dd4cf5a9b8/connectorx-0.4.5-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:38ad8a032fddf25c36c6911d857fbe54220fe28439f02a4beb273b29bdef1eb8", size = 43742341, upload-time = "2026-01-18T01:31:00.55Z" }, - { url = "https://files.pythonhosted.org/packages/dc/97/525b11d7e3c3a286d83dc138f4e4ef948307338527b442be8fa3b1b379d7/connectorx-0.4.5-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:cc01ca122f649e62707f49f7220ba1ae67961b260e2dcff9e8647ea9915a01cf", size = 43757296, upload-time = "2026-01-18T01:31:17.503Z" }, - { url = "https://files.pythonhosted.org/packages/57/3a/5e1a7cb3b0175c249c232f487918494ec7e26c13f3d543a55c8c7752b993/connectorx-0.4.5-cp314-none-win_amd64.whl", hash = "sha256:2073970532a8e6e2a8a2c0b163497eb8e58216e28fdab6693fcd7e58bfc47bfc", size = 34641445, upload-time = "2026-01-18T01:59:44.351Z" }, -] - [[package]] name = "cryptography" -version = "46.0.4" +version = "46.0.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/78/19/f748958276519adf6a0c1e79e7b8860b4830dda55ccdf29f2719b5fc499c/cryptography-46.0.4.tar.gz", hash = "sha256:bfd019f60f8abc2ed1b9be4ddc21cfef059c841d86d710bb69909a688cbb8f59", size = 749301, upload-time = "2026-01-28T00:24:37.379Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8d/99/157aae7949a5f30d51fcb1a9851e8ebd5c74bf99b5285d8bb4b8b9ee641e/cryptography-46.0.4-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:281526e865ed4166009e235afadf3a4c4cba6056f99336a99efba65336fd5485", size = 7173686, upload-time = "2026-01-28T00:23:07.515Z" }, - { url = "https://files.pythonhosted.org/packages/87/91/874b8910903159043b5c6a123b7e79c4559ddd1896e38967567942635778/cryptography-46.0.4-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5f14fba5bf6f4390d7ff8f086c566454bff0411f6d8aa7af79c88b6f9267aecc", size = 4275871, upload-time = "2026-01-28T00:23:09.439Z" }, - { url = "https://files.pythonhosted.org/packages/c0/35/690e809be77896111f5b195ede56e4b4ed0435b428c2f2b6d35046fbb5e8/cryptography-46.0.4-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47bcd19517e6389132f76e2d5303ded6cf3f78903da2158a671be8de024f4cd0", size = 4423124, upload-time = "2026-01-28T00:23:11.529Z" }, - { url = "https://files.pythonhosted.org/packages/1a/5b/a26407d4f79d61ca4bebaa9213feafdd8806dc69d3d290ce24996d3cfe43/cryptography-46.0.4-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:01df4f50f314fbe7009f54046e908d1754f19d0c6d3070df1e6268c5a4af09fa", size = 4277090, upload-time = "2026-01-28T00:23:13.123Z" }, - { url = "https://files.pythonhosted.org/packages/0c/d8/4bb7aec442a9049827aa34cee1aa83803e528fa55da9a9d45d01d1bb933e/cryptography-46.0.4-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5aa3e463596b0087b3da0dbe2b2487e9fc261d25da85754e30e3b40637d61f81", size = 4947652, upload-time = "2026-01-28T00:23:14.554Z" }, - { url = "https://files.pythonhosted.org/packages/2b/08/f83e2e0814248b844265802d081f2fac2f1cbe6cd258e72ba14ff006823a/cryptography-46.0.4-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0a9ad24359fee86f131836a9ac3bffc9329e956624a2d379b613f8f8abaf5255", size = 4455157, upload-time = "2026-01-28T00:23:16.443Z" }, - { url = "https://files.pythonhosted.org/packages/0a/05/19d849cf4096448779d2dcc9bb27d097457dac36f7273ffa875a93b5884c/cryptography-46.0.4-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:dc1272e25ef673efe72f2096e92ae39dea1a1a450dd44918b15351f72c5a168e", size = 3981078, upload-time = "2026-01-28T00:23:17.838Z" }, - { url = "https://files.pythonhosted.org/packages/e6/89/f7bac81d66ba7cde867a743ea5b37537b32b5c633c473002b26a226f703f/cryptography-46.0.4-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:de0f5f4ec8711ebc555f54735d4c673fc34b65c44283895f1a08c2b49d2fd99c", size = 4276213, upload-time = "2026-01-28T00:23:19.257Z" }, - { url = "https://files.pythonhosted.org/packages/da/9f/7133e41f24edd827020ad21b068736e792bc68eecf66d93c924ad4719fb3/cryptography-46.0.4-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:eeeb2e33d8dbcccc34d64651f00a98cb41b2dc69cef866771a5717e6734dfa32", size = 4912190, upload-time = "2026-01-28T00:23:21.244Z" }, - { url = "https://files.pythonhosted.org/packages/a6/f7/6d43cbaddf6f65b24816e4af187d211f0bc536a29961f69faedc48501d8e/cryptography-46.0.4-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3d425eacbc9aceafd2cb429e42f4e5d5633c6f873f5e567077043ef1b9bbf616", size = 4454641, upload-time = "2026-01-28T00:23:22.866Z" }, - { url = "https://files.pythonhosted.org/packages/9e/4f/ebd0473ad656a0ac912a16bd07db0f5d85184924e14fc88feecae2492834/cryptography-46.0.4-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91627ebf691d1ea3976a031b61fb7bac1ccd745afa03602275dda443e11c8de0", size = 4405159, upload-time = "2026-01-28T00:23:25.278Z" }, - { url = "https://files.pythonhosted.org/packages/d1/f7/7923886f32dc47e27adeff8246e976d77258fd2aa3efdd1754e4e323bf49/cryptography-46.0.4-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2d08bc22efd73e8854b0b7caff402d735b354862f1145d7be3b9c0f740fef6a0", size = 4666059, upload-time = "2026-01-28T00:23:26.766Z" }, - { url = "https://files.pythonhosted.org/packages/eb/a7/0fca0fd3591dffc297278a61813d7f661a14243dd60f499a7a5b48acb52a/cryptography-46.0.4-cp311-abi3-win32.whl", hash = "sha256:82a62483daf20b8134f6e92898da70d04d0ef9a75829d732ea1018678185f4f5", size = 3026378, upload-time = "2026-01-28T00:23:28.317Z" }, - { url = "https://files.pythonhosted.org/packages/2d/12/652c84b6f9873f0909374864a57b003686c642ea48c84d6c7e2c515e6da5/cryptography-46.0.4-cp311-abi3-win_amd64.whl", hash = "sha256:6225d3ebe26a55dbc8ead5ad1265c0403552a63336499564675b29eb3184c09b", size = 3478614, upload-time = "2026-01-28T00:23:30.275Z" }, - { url = "https://files.pythonhosted.org/packages/b9/27/542b029f293a5cce59349d799d4d8484b3b1654a7b9a0585c266e974a488/cryptography-46.0.4-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:485e2b65d25ec0d901bca7bcae0f53b00133bf3173916d8e421f6fddde103908", size = 7116417, upload-time = "2026-01-28T00:23:31.958Z" }, - { url = "https://files.pythonhosted.org/packages/f8/f5/559c25b77f40b6bf828eabaf988efb8b0e17b573545edb503368ca0a2a03/cryptography-46.0.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:078e5f06bd2fa5aea5a324f2a09f914b1484f1d0c2a4d6a8a28c74e72f65f2da", size = 4264508, upload-time = "2026-01-28T00:23:34.264Z" }, - { url = "https://files.pythonhosted.org/packages/49/a1/551fa162d33074b660dc35c9bc3616fefa21a0e8c1edd27b92559902e408/cryptography-46.0.4-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dce1e4f068f03008da7fa51cc7abc6ddc5e5de3e3d1550334eaf8393982a5829", size = 4409080, upload-time = "2026-01-28T00:23:35.793Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/4d8d129a755f5d6df1bbee69ea2f35ebfa954fa1847690d1db2e8bca46a5/cryptography-46.0.4-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:2067461c80271f422ee7bdbe79b9b4be54a5162e90345f86a23445a0cf3fd8a2", size = 4270039, upload-time = "2026-01-28T00:23:37.263Z" }, - { url = "https://files.pythonhosted.org/packages/4c/f5/ed3fcddd0a5e39321e595e144615399e47e7c153a1fb8c4862aec3151ff9/cryptography-46.0.4-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:c92010b58a51196a5f41c3795190203ac52edfd5dc3ff99149b4659eba9d2085", size = 4926748, upload-time = "2026-01-28T00:23:38.884Z" }, - { url = "https://files.pythonhosted.org/packages/43/ae/9f03d5f0c0c00e85ecb34f06d3b79599f20630e4db91b8a6e56e8f83d410/cryptography-46.0.4-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:829c2b12bbc5428ab02d6b7f7e9bbfd53e33efd6672d21341f2177470171ad8b", size = 4442307, upload-time = "2026-01-28T00:23:40.56Z" }, - { url = "https://files.pythonhosted.org/packages/8b/22/e0f9f2dae8040695103369cf2283ef9ac8abe4d51f68710bec2afd232609/cryptography-46.0.4-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:62217ba44bf81b30abaeda1488686a04a702a261e26f87db51ff61d9d3510abd", size = 3959253, upload-time = "2026-01-28T00:23:42.827Z" }, - { url = "https://files.pythonhosted.org/packages/01/5b/6a43fcccc51dae4d101ac7d378a8724d1ba3de628a24e11bf2f4f43cba4d/cryptography-46.0.4-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:9c2da296c8d3415b93e6053f5a728649a87a48ce084a9aaf51d6e46c87c7f2d2", size = 4269372, upload-time = "2026-01-28T00:23:44.655Z" }, - { url = "https://files.pythonhosted.org/packages/17/b7/0f6b8c1dd0779df2b526e78978ff00462355e31c0a6f6cff8a3e99889c90/cryptography-46.0.4-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:9b34d8ba84454641a6bf4d6762d15847ecbd85c1316c0a7984e6e4e9f748ec2e", size = 4891908, upload-time = "2026-01-28T00:23:46.48Z" }, - { url = "https://files.pythonhosted.org/packages/83/17/259409b8349aa10535358807a472c6a695cf84f106022268d31cea2b6c97/cryptography-46.0.4-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:df4a817fa7138dd0c96c8c8c20f04b8aaa1fac3bbf610913dcad8ea82e1bfd3f", size = 4441254, upload-time = "2026-01-28T00:23:48.403Z" }, - { url = "https://files.pythonhosted.org/packages/9c/fe/e4a1b0c989b00cee5ffa0764401767e2d1cf59f45530963b894129fd5dce/cryptography-46.0.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b1de0ebf7587f28f9190b9cb526e901bf448c9e6a99655d2b07fff60e8212a82", size = 4396520, upload-time = "2026-01-28T00:23:50.26Z" }, - { url = "https://files.pythonhosted.org/packages/b3/81/ba8fd9657d27076eb40d6a2f941b23429a3c3d2f56f5a921d6b936a27bc9/cryptography-46.0.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9b4d17bc7bd7cdd98e3af40b441feaea4c68225e2eb2341026c84511ad246c0c", size = 4651479, upload-time = "2026-01-28T00:23:51.674Z" }, - { url = "https://files.pythonhosted.org/packages/00/03/0de4ed43c71c31e4fe954edd50b9d28d658fef56555eba7641696370a8e2/cryptography-46.0.4-cp314-cp314t-win32.whl", hash = "sha256:c411f16275b0dea722d76544a61d6421e2cc829ad76eec79280dbdc9ddf50061", size = 3001986, upload-time = "2026-01-28T00:23:53.485Z" }, - { url = "https://files.pythonhosted.org/packages/5c/70/81830b59df7682917d7a10f833c4dab2a5574cd664e86d18139f2b421329/cryptography-46.0.4-cp314-cp314t-win_amd64.whl", hash = "sha256:728fedc529efc1439eb6107b677f7f7558adab4553ef8669f0d02d42d7b959a7", size = 3468288, upload-time = "2026-01-28T00:23:55.09Z" }, - { url = "https://files.pythonhosted.org/packages/56/f7/f648fdbb61d0d45902d3f374217451385edc7e7768d1b03ff1d0e5ffc17b/cryptography-46.0.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a9556ba711f7c23f77b151d5798f3ac44a13455cc68db7697a1096e6d0563cab", size = 7169583, upload-time = "2026-01-28T00:23:56.558Z" }, - { url = "https://files.pythonhosted.org/packages/d8/cc/8f3224cbb2a928de7298d6ed4790f5ebc48114e02bdc9559196bfb12435d/cryptography-46.0.4-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8bf75b0259e87fa70bddc0b8b4078b76e7fd512fd9afae6c1193bcf440a4dbef", size = 4275419, upload-time = "2026-01-28T00:23:58.364Z" }, - { url = "https://files.pythonhosted.org/packages/17/43/4a18faa7a872d00e4264855134ba82d23546c850a70ff209e04ee200e76f/cryptography-46.0.4-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3c268a3490df22270955966ba236d6bc4a8f9b6e4ffddb78aac535f1a5ea471d", size = 4419058, upload-time = "2026-01-28T00:23:59.867Z" }, - { url = "https://files.pythonhosted.org/packages/ee/64/6651969409821d791ba12346a124f55e1b76f66a819254ae840a965d4b9c/cryptography-46.0.4-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:812815182f6a0c1d49a37893a303b44eaac827d7f0d582cecfc81b6427f22973", size = 4278151, upload-time = "2026-01-28T00:24:01.731Z" }, - { url = "https://files.pythonhosted.org/packages/20/0b/a7fce65ee08c3c02f7a8310cc090a732344066b990ac63a9dfd0a655d321/cryptography-46.0.4-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:a90e43e3ef65e6dcf969dfe3bb40cbf5aef0d523dff95bfa24256be172a845f4", size = 4939441, upload-time = "2026-01-28T00:24:03.175Z" }, - { url = "https://files.pythonhosted.org/packages/db/a7/20c5701e2cd3e1dfd7a19d2290c522a5f435dd30957d431dcb531d0f1413/cryptography-46.0.4-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a05177ff6296644ef2876fce50518dffb5bcdf903c85250974fc8bc85d54c0af", size = 4451617, upload-time = "2026-01-28T00:24:05.403Z" }, - { url = "https://files.pythonhosted.org/packages/00/dc/3e16030ea9aa47b63af6524c354933b4fb0e352257c792c4deeb0edae367/cryptography-46.0.4-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:daa392191f626d50f1b136c9b4cf08af69ca8279d110ea24f5c2700054d2e263", size = 3977774, upload-time = "2026-01-28T00:24:06.851Z" }, - { url = "https://files.pythonhosted.org/packages/42/c8/ad93f14118252717b465880368721c963975ac4b941b7ef88f3c56bf2897/cryptography-46.0.4-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e07ea39c5b048e085f15923511d8121e4a9dc45cee4e3b970ca4f0d338f23095", size = 4277008, upload-time = "2026-01-28T00:24:08.926Z" }, - { url = "https://files.pythonhosted.org/packages/00/cf/89c99698151c00a4631fbfcfcf459d308213ac29e321b0ff44ceeeac82f1/cryptography-46.0.4-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:d5a45ddc256f492ce42a4e35879c5e5528c09cd9ad12420828c972951d8e016b", size = 4903339, upload-time = "2026-01-28T00:24:12.009Z" }, - { url = "https://files.pythonhosted.org/packages/03/c3/c90a2cb358de4ac9309b26acf49b2a100957e1ff5cc1e98e6c4996576710/cryptography-46.0.4-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:6bb5157bf6a350e5b28aee23beb2d84ae6f5be390b2f8ee7ea179cda077e1019", size = 4451216, upload-time = "2026-01-28T00:24:13.975Z" }, - { url = "https://files.pythonhosted.org/packages/96/2c/8d7f4171388a10208671e181ca43cdc0e596d8259ebacbbcfbd16de593da/cryptography-46.0.4-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dd5aba870a2c40f87a3af043e0dee7d9eb02d4aff88a797b48f2b43eff8c3ab4", size = 4404299, upload-time = "2026-01-28T00:24:16.169Z" }, - { url = "https://files.pythonhosted.org/packages/e9/23/cbb2036e450980f65c6e0a173b73a56ff3bccd8998965dea5cc9ddd424a5/cryptography-46.0.4-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:93d8291da8d71024379ab2cb0b5c57915300155ad42e07f76bea6ad838d7e59b", size = 4664837, upload-time = "2026-01-28T00:24:17.629Z" }, - { url = "https://files.pythonhosted.org/packages/0a/21/f7433d18fe6d5845329cbdc597e30caf983229c7a245bcf54afecc555938/cryptography-46.0.4-cp38-abi3-win32.whl", hash = "sha256:0563655cb3c6d05fb2afe693340bc050c30f9f34e15763361cf08e94749401fc", size = 3009779, upload-time = "2026-01-28T00:24:20.198Z" }, - { url = "https://files.pythonhosted.org/packages/3a/6a/bd2e7caa2facffedf172a45c1a02e551e6d7d4828658c9a245516a598d94/cryptography-46.0.4-cp38-abi3-win_amd64.whl", hash = "sha256:fa0900b9ef9c49728887d1576fd8d9e7e3ea872fa9b25ef9b64888adc434e976", size = 3466633, upload-time = "2026-01-28T00:24:21.851Z" }, - { url = "https://files.pythonhosted.org/packages/59/e0/f9c6c53e1f2a1c2507f00f2faba00f01d2f334b35b0fbfe5286715da2184/cryptography-46.0.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:766330cce7416c92b5e90c3bb71b1b79521760cdcfc3a6a1a182d4c9fab23d2b", size = 3476316, upload-time = "2026-01-28T00:24:24.144Z" }, - { url = "https://files.pythonhosted.org/packages/27/7a/f8d2d13227a9a1a9fe9c7442b057efecffa41f1e3c51d8622f26b9edbe8f/cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c236a44acfb610e70f6b3e1c3ca20ff24459659231ef2f8c48e879e2d32b73da", size = 4216693, upload-time = "2026-01-28T00:24:25.758Z" }, - { url = "https://files.pythonhosted.org/packages/c5/de/3787054e8f7972658370198753835d9d680f6cd4a39df9f877b57f0dd69c/cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8a15fb869670efa8f83cbffbc8753c1abf236883225aed74cd179b720ac9ec80", size = 4382765, upload-time = "2026-01-28T00:24:27.577Z" }, - { url = "https://files.pythonhosted.org/packages/8a/5f/60e0afb019973ba6a0b322e86b3d61edf487a4f5597618a430a2a15f2d22/cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:fdc3daab53b212472f1524d070735b2f0c214239df131903bae1d598016fa822", size = 4216066, upload-time = "2026-01-28T00:24:29.056Z" }, - { url = "https://files.pythonhosted.org/packages/81/8e/bf4a0de294f147fee66f879d9bae6f8e8d61515558e3d12785dd90eca0be/cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:44cc0675b27cadb71bdbb96099cca1fa051cd11d2ade09e5cd3a2edb929ed947", size = 4382025, upload-time = "2026-01-28T00:24:30.681Z" }, - { url = "https://files.pythonhosted.org/packages/79/f4/9ceb90cfd6a3847069b0b0b353fd3075dc69b49defc70182d8af0c4ca390/cryptography-46.0.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:be8c01a7d5a55f9a47d1888162b76c8f49d62b234d88f0ff91a9fbebe32ffbc3", size = 3406043, upload-time = "2026-01-28T00:24:32.236Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/a4/ba/04b1bd4218cbc58dc90ce967106d51582371b898690f3ae0402876cc4f34/cryptography-46.0.6.tar.gz", hash = "sha256:27550628a518c5c6c903d84f637fbecf287f6cb9ced3804838a1295dc1fd0759", size = 750542, upload-time = "2026-03-25T23:34:53.396Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/23/9285e15e3bc57325b0a72e592921983a701efc1ee8f91c06c5f0235d86d9/cryptography-46.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:64235194bad039a10bb6d2d930ab3323baaec67e2ce36215fd0952fad0930ca8", size = 7176401, upload-time = "2026-03-25T23:33:22.096Z" }, + { url = "https://files.pythonhosted.org/packages/60/f8/e61f8f13950ab6195b31913b42d39f0f9afc7d93f76710f299b5ec286ae6/cryptography-46.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:26031f1e5ca62fcb9d1fcb34b2b60b390d1aacaa15dc8b895a9ed00968b97b30", size = 4275275, upload-time = "2026-03-25T23:33:23.844Z" }, + { url = "https://files.pythonhosted.org/packages/19/69/732a736d12c2631e140be2348b4ad3d226302df63ef64d30dfdb8db7ad1c/cryptography-46.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9a693028b9cbe51b5a1136232ee8f2bc242e4e19d456ded3fa7c86e43c713b4a", size = 4425320, upload-time = "2026-03-25T23:33:25.703Z" }, + { url = "https://files.pythonhosted.org/packages/d4/12/123be7292674abf76b21ac1fc0e1af50661f0e5b8f0ec8285faac18eb99e/cryptography-46.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:67177e8a9f421aa2d3a170c3e56eca4e0128883cf52a071a7cbf53297f18b175", size = 4278082, upload-time = "2026-03-25T23:33:27.423Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ba/d5e27f8d68c24951b0a484924a84c7cdaed7502bac9f18601cd357f8b1d2/cryptography-46.0.6-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:d9528b535a6c4f8ff37847144b8986a9a143585f0540fbcb1a98115b543aa463", size = 4926514, upload-time = "2026-03-25T23:33:29.206Z" }, + { url = "https://files.pythonhosted.org/packages/34/71/1ea5a7352ae516d5512d17babe7e1b87d9db5150b21f794b1377eac1edc0/cryptography-46.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:22259338084d6ae497a19bae5d4c66b7ca1387d3264d1c2c0e72d9e9b6a77b97", size = 4457766, upload-time = "2026-03-25T23:33:30.834Z" }, + { url = "https://files.pythonhosted.org/packages/01/59/562be1e653accee4fdad92c7a2e88fced26b3fdfce144047519bbebc299e/cryptography-46.0.6-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:760997a4b950ff00d418398ad73fbc91aa2894b5c1db7ccb45b4f68b42a63b3c", size = 3986535, upload-time = "2026-03-25T23:33:33.02Z" }, + { url = "https://files.pythonhosted.org/packages/d6/8b/b1ebfeb788bf4624d36e45ed2662b8bd43a05ff62157093c1539c1288a18/cryptography-46.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3dfa6567f2e9e4c5dceb8ccb5a708158a2a871052fa75c8b78cb0977063f1507", size = 4277618, upload-time = "2026-03-25T23:33:34.567Z" }, + { url = "https://files.pythonhosted.org/packages/dd/52/a005f8eabdb28df57c20f84c44d397a755782d6ff6d455f05baa2785bd91/cryptography-46.0.6-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:cdcd3edcbc5d55757e5f5f3d330dd00007ae463a7e7aa5bf132d1f22a4b62b19", size = 4890802, upload-time = "2026-03-25T23:33:37.034Z" }, + { url = "https://files.pythonhosted.org/packages/ec/4d/8e7d7245c79c617d08724e2efa397737715ca0ec830ecb3c91e547302555/cryptography-46.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:d4e4aadb7fc1f88687f47ca20bb7227981b03afaae69287029da08096853b738", size = 4457425, upload-time = "2026-03-25T23:33:38.904Z" }, + { url = "https://files.pythonhosted.org/packages/1d/5c/f6c3596a1430cec6f949085f0e1a970638d76f81c3ea56d93d564d04c340/cryptography-46.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2b417edbe8877cda9022dde3a008e2deb50be9c407eef034aeeb3a8b11d9db3c", size = 4405530, upload-time = "2026-03-25T23:33:40.842Z" }, + { url = "https://files.pythonhosted.org/packages/7e/c9/9f9cea13ee2dbde070424e0c4f621c091a91ffcc504ffea5e74f0e1daeff/cryptography-46.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:380343e0653b1c9d7e1f55b52aaa2dbb2fdf2730088d48c43ca1c7c0abb7cc2f", size = 4667896, upload-time = "2026-03-25T23:33:42.781Z" }, + { url = "https://files.pythonhosted.org/packages/ad/b5/1895bc0821226f129bc74d00eccfc6a5969e2028f8617c09790bf89c185e/cryptography-46.0.6-cp311-abi3-win32.whl", hash = "sha256:bcb87663e1f7b075e48c3be3ecb5f0b46c8fc50b50a97cf264e7f60242dca3f2", size = 3026348, upload-time = "2026-03-25T23:33:45.021Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f8/c9bcbf0d3e6ad288b9d9aa0b1dee04b063d19e8c4f871855a03ab3a297ab/cryptography-46.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:6739d56300662c468fddb0e5e291f9b4d084bead381667b9e654c7dd81705124", size = 3483896, upload-time = "2026-03-25T23:33:46.649Z" }, + { url = "https://files.pythonhosted.org/packages/01/41/3a578f7fd5c70611c0aacba52cd13cb364a5dee895a5c1d467208a9380b0/cryptography-46.0.6-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:2ef9e69886cbb137c2aef9772c2e7138dc581fad4fcbcf13cc181eb5a3ab6275", size = 7117147, upload-time = "2026-03-25T23:33:48.249Z" }, + { url = "https://files.pythonhosted.org/packages/fa/87/887f35a6fca9dde90cad08e0de0c89263a8e59b2d2ff904fd9fcd8025b6f/cryptography-46.0.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7f417f034f91dcec1cb6c5c35b07cdbb2ef262557f701b4ecd803ee8cefed4f4", size = 4266221, upload-time = "2026-03-25T23:33:49.874Z" }, + { url = "https://files.pythonhosted.org/packages/aa/a8/0a90c4f0b0871e0e3d1ed126aed101328a8a57fd9fd17f00fb67e82a51ca/cryptography-46.0.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d24c13369e856b94892a89ddf70b332e0b70ad4a5c43cf3e9cb71d6d7ffa1f7b", size = 4408952, upload-time = "2026-03-25T23:33:52.128Z" }, + { url = "https://files.pythonhosted.org/packages/16/0b/b239701eb946523e4e9f329336e4ff32b1247e109cbab32d1a7b61da8ed7/cryptography-46.0.6-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:aad75154a7ac9039936d50cf431719a2f8d4ed3d3c277ac03f3339ded1a5e707", size = 4270141, upload-time = "2026-03-25T23:33:54.11Z" }, + { url = "https://files.pythonhosted.org/packages/0f/a8/976acdd4f0f30df7b25605f4b9d3d89295351665c2091d18224f7ad5cdbf/cryptography-46.0.6-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:3c21d92ed15e9cfc6eb64c1f5a0326db22ca9c2566ca46d845119b45b4400361", size = 4904178, upload-time = "2026-03-25T23:33:55.725Z" }, + { url = "https://files.pythonhosted.org/packages/b1/1b/bf0e01a88efd0e59679b69f42d4afd5bced8700bb5e80617b2d63a3741af/cryptography-46.0.6-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:4668298aef7cddeaf5c6ecc244c2302a2b8e40f384255505c22875eebb47888b", size = 4441812, upload-time = "2026-03-25T23:33:57.364Z" }, + { url = "https://files.pythonhosted.org/packages/bb/8b/11df86de2ea389c65aa1806f331cae145f2ed18011f30234cc10ca253de8/cryptography-46.0.6-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:8ce35b77aaf02f3b59c90b2c8a05c73bac12cea5b4e8f3fbece1f5fddea5f0ca", size = 3963923, upload-time = "2026-03-25T23:33:59.361Z" }, + { url = "https://files.pythonhosted.org/packages/91/e0/207fb177c3a9ef6a8108f234208c3e9e76a6aa8cf20d51932916bd43bda0/cryptography-46.0.6-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:c89eb37fae9216985d8734c1afd172ba4927f5a05cfd9bf0e4863c6d5465b013", size = 4269695, upload-time = "2026-03-25T23:34:00.909Z" }, + { url = "https://files.pythonhosted.org/packages/21/5e/19f3260ed1e95bced52ace7501fabcd266df67077eeb382b79c81729d2d3/cryptography-46.0.6-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:ed418c37d095aeddf5336898a132fba01091f0ac5844e3e8018506f014b6d2c4", size = 4869785, upload-time = "2026-03-25T23:34:02.796Z" }, + { url = "https://files.pythonhosted.org/packages/10/38/cd7864d79aa1d92ef6f1a584281433419b955ad5a5ba8d1eb6c872165bcb/cryptography-46.0.6-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:69cf0056d6947edc6e6760e5f17afe4bea06b56a9ac8a06de9d2bd6b532d4f3a", size = 4441404, upload-time = "2026-03-25T23:34:04.35Z" }, + { url = "https://files.pythonhosted.org/packages/09/0a/4fe7a8d25fed74419f91835cf5829ade6408fd1963c9eae9c4bce390ecbb/cryptography-46.0.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e7304c4f4e9490e11efe56af6713983460ee0780f16c63f219984dab3af9d2d", size = 4397549, upload-time = "2026-03-25T23:34:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/5f/a0/7d738944eac6513cd60a8da98b65951f4a3b279b93479a7e8926d9cd730b/cryptography-46.0.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b928a3ca837c77a10e81a814a693f2295200adb3352395fad024559b7be7a736", size = 4651874, upload-time = "2026-03-25T23:34:07.916Z" }, + { url = "https://files.pythonhosted.org/packages/cb/f1/c2326781ca05208845efca38bf714f76939ae446cd492d7613808badedf1/cryptography-46.0.6-cp314-cp314t-win32.whl", hash = "sha256:97c8115b27e19e592a05c45d0dd89c57f81f841cc9880e353e0d3bf25b2139ed", size = 3001511, upload-time = "2026-03-25T23:34:09.892Z" }, + { url = "https://files.pythonhosted.org/packages/c9/57/fe4a23eb549ac9d903bd4698ffda13383808ef0876cc912bcb2838799ece/cryptography-46.0.6-cp314-cp314t-win_amd64.whl", hash = "sha256:c797e2517cb7880f8297e2c0f43bb910e91381339336f75d2c1c2cbf811b70b4", size = 3471692, upload-time = "2026-03-25T23:34:11.613Z" }, + { url = "https://files.pythonhosted.org/packages/c4/cc/f330e982852403da79008552de9906804568ae9230da8432f7496ce02b71/cryptography-46.0.6-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:12cae594e9473bca1a7aceb90536060643128bb274fcea0fc459ab90f7d1ae7a", size = 7162776, upload-time = "2026-03-25T23:34:13.308Z" }, + { url = "https://files.pythonhosted.org/packages/49/b3/dc27efd8dcc4bff583b3f01d4a3943cd8b5821777a58b3a6a5f054d61b79/cryptography-46.0.6-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:639301950939d844a9e1c4464d7e07f902fe9a7f6b215bb0d4f28584729935d8", size = 4270529, upload-time = "2026-03-25T23:34:15.019Z" }, + { url = "https://files.pythonhosted.org/packages/e6/05/e8d0e6eb4f0d83365b3cb0e00eb3c484f7348db0266652ccd84632a3d58d/cryptography-46.0.6-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ed3775295fb91f70b4027aeba878d79b3e55c0b3e97eaa4de71f8f23a9f2eb77", size = 4414827, upload-time = "2026-03-25T23:34:16.604Z" }, + { url = "https://files.pythonhosted.org/packages/2f/97/daba0f5d2dc6d855e2dcb70733c812558a7977a55dd4a6722756628c44d1/cryptography-46.0.6-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8927ccfbe967c7df312ade694f987e7e9e22b2425976ddbf28271d7e58845290", size = 4271265, upload-time = "2026-03-25T23:34:18.586Z" }, + { url = "https://files.pythonhosted.org/packages/89/06/fe1fce39a37ac452e58d04b43b0855261dac320a2ebf8f5260dd55b201a9/cryptography-46.0.6-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:b12c6b1e1651e42ab5de8b1e00dc3b6354fdfd778e7fa60541ddacc27cd21410", size = 4916800, upload-time = "2026-03-25T23:34:20.561Z" }, + { url = "https://files.pythonhosted.org/packages/ff/8a/b14f3101fe9c3592603339eb5d94046c3ce5f7fc76d6512a2d40efd9724e/cryptography-46.0.6-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:063b67749f338ca9c5a0b7fe438a52c25f9526b851e24e6c9310e7195aad3b4d", size = 4448771, upload-time = "2026-03-25T23:34:22.406Z" }, + { url = "https://files.pythonhosted.org/packages/01/b3/0796998056a66d1973fd52ee89dc1bb3b6581960a91ad4ac705f182d398f/cryptography-46.0.6-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:02fad249cb0e090b574e30b276a3da6a149e04ee2f049725b1f69e7b8351ec70", size = 3978333, upload-time = "2026-03-25T23:34:24.281Z" }, + { url = "https://files.pythonhosted.org/packages/c5/3d/db200af5a4ffd08918cd55c08399dc6c9c50b0bc72c00a3246e099d3a849/cryptography-46.0.6-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:7e6142674f2a9291463e5e150090b95a8519b2fb6e6aaec8917dd8d094ce750d", size = 4271069, upload-time = "2026-03-25T23:34:25.895Z" }, + { url = "https://files.pythonhosted.org/packages/d7/18/61acfd5b414309d74ee838be321c636fe71815436f53c9f0334bf19064fa/cryptography-46.0.6-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:456b3215172aeefb9284550b162801d62f5f264a081049a3e94307fe20792cfa", size = 4878358, upload-time = "2026-03-25T23:34:27.67Z" }, + { url = "https://files.pythonhosted.org/packages/8b/65/5bf43286d566f8171917cae23ac6add941654ccf085d739195a4eacf1674/cryptography-46.0.6-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:341359d6c9e68834e204ceaf25936dffeafea3829ab80e9503860dcc4f4dac58", size = 4448061, upload-time = "2026-03-25T23:34:29.375Z" }, + { url = "https://files.pythonhosted.org/packages/e0/25/7e49c0fa7205cf3597e525d156a6bce5b5c9de1fd7e8cb01120e459f205a/cryptography-46.0.6-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9a9c42a2723999a710445bc0d974e345c32adfd8d2fac6d8a251fa829ad31cfb", size = 4399103, upload-time = "2026-03-25T23:34:32.036Z" }, + { url = "https://files.pythonhosted.org/packages/44/46/466269e833f1c4718d6cd496ffe20c56c9c8d013486ff66b4f69c302a68d/cryptography-46.0.6-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6617f67b1606dfd9fe4dbfa354a9508d4a6d37afe30306fe6c101b7ce3274b72", size = 4659255, upload-time = "2026-03-25T23:34:33.679Z" }, + { url = "https://files.pythonhosted.org/packages/0a/09/ddc5f630cc32287d2c953fc5d32705e63ec73e37308e5120955316f53827/cryptography-46.0.6-cp38-abi3-win32.whl", hash = "sha256:7f6690b6c55e9c5332c0b59b9c8a3fb232ebf059094c17f9019a51e9827df91c", size = 3010660, upload-time = "2026-03-25T23:34:35.418Z" }, + { url = "https://files.pythonhosted.org/packages/1b/82/ca4893968aeb2709aacfb57a30dec6fa2ab25b10fa9f064b8882ce33f599/cryptography-46.0.6-cp38-abi3-win_amd64.whl", hash = "sha256:79e865c642cfc5c0b3eb12af83c35c5aeff4fa5c672dc28c43721c2c9fdd2f0f", size = 3471160, upload-time = "2026-03-25T23:34:37.191Z" }, + { url = "https://files.pythonhosted.org/packages/2e/84/7ccff00ced5bac74b775ce0beb7d1be4e8637536b522b5df9b73ada42da2/cryptography-46.0.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:2ea0f37e9a9cf0df2952893ad145fd9627d326a59daec9b0802480fa3bcd2ead", size = 3475444, upload-time = "2026-03-25T23:34:38.944Z" }, + { url = "https://files.pythonhosted.org/packages/bc/1f/4c926f50df7749f000f20eede0c896769509895e2648db5da0ed55db711d/cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a3e84d5ec9ba01f8fd03802b2147ba77f0c8f2617b2aff254cedd551844209c8", size = 4218227, upload-time = "2026-03-25T23:34:40.871Z" }, + { url = "https://files.pythonhosted.org/packages/c6/65/707be3ffbd5f786028665c3223e86e11c4cda86023adbc56bd72b1b6bab5/cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:12f0fa16cc247b13c43d56d7b35287ff1569b5b1f4c5e87e92cc4fcc00cd10c0", size = 4381399, upload-time = "2026-03-25T23:34:42.609Z" }, + { url = "https://files.pythonhosted.org/packages/f3/6d/73557ed0ef7d73d04d9aba745d2c8e95218213687ee5e76b7d236a5030fc/cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:50575a76e2951fe7dbd1f56d181f8c5ceeeb075e9ff88e7ad997d2f42af06e7b", size = 4217595, upload-time = "2026-03-25T23:34:44.205Z" }, + { url = "https://files.pythonhosted.org/packages/9e/c5/e1594c4eec66a567c3ac4400008108a415808be2ce13dcb9a9045c92f1a0/cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:90e5f0a7b3be5f40c3a0a0eafb32c681d8d2c181fc2a1bdabe9b3f611d9f6b1a", size = 4380912, upload-time = "2026-03-25T23:34:46.328Z" }, + { url = "https://files.pythonhosted.org/packages/1a/89/843b53614b47f97fe1abc13f9a86efa5ec9e275292c457af1d4a60dc80e0/cryptography-46.0.6-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6728c49e3b2c180ef26f8e9f0a883a2c585638db64cf265b49c9ba10652d430e", size = 3409955, upload-time = "2026-03-25T23:34:48.465Z" }, ] [[package]] @@ -714,6 +700,7 @@ name = "data-formulator" version = "0.7.0a1" source = { editable = "." } dependencies = [ + { name = "azure-cosmos" }, { name = "azure-identity" }, { name = "azure-keyvault-secrets" }, { name = "azure-kusto-data" }, @@ -721,7 +708,6 @@ dependencies = [ { name = "backoff" }, { name = "beautifulsoup4" }, { name = "boto3" }, - { name = "connectorx" }, { name = "db-dtypes" }, { name = "duckdb" }, { name = "flask" }, @@ -733,25 +719,33 @@ dependencies = [ { name = "litellm" }, { name = "numpy" }, { name = "openai" }, + { name = "openpyxl" }, { name = "pandas" }, + { name = "psycopg2-binary" }, { name = "pyarrow" }, + { name = "pyjwt", extra = ["crypto"] }, { name = "pymongo" }, { name = "pymysql" }, { name = "pyodbc" }, { name = "python-dotenv" }, + { name = "pyyaml" }, + { name = "requests" }, { name = "scikit-learn" }, { name = "vega-datasets" }, { name = "vl-convert-python" }, + { name = "xlrd" }, { name = "yfinance" }, ] [package.dev-dependencies] dev = [ { name = "build" }, + { name = "pytest" }, ] [package.metadata] requires-dist = [ + { name = "azure-cosmos" }, { name = "azure-identity" }, { name = "azure-keyvault-secrets" }, { name = "azure-kusto-data" }, @@ -759,7 +753,6 @@ requires-dist = [ { name = "backoff" }, { name = "beautifulsoup4" }, { name = "boto3" }, - { name = "connectorx", specifier = ">=0.4.5" }, { name = "db-dtypes" }, { name = "duckdb" }, { name = "flask" }, @@ -771,20 +764,29 @@ requires-dist = [ { name = "litellm" }, { name = "numpy" }, { name = "openai" }, + { name = "openpyxl", specifier = ">=3.1.0" }, { name = "pandas" }, - { name = "pyarrow", specifier = ">=23.0.0" }, + { name = "psycopg2-binary" }, + { name = "pyarrow", specifier = ">=13.0.0" }, + { name = "pyjwt", extras = ["crypto"], specifier = ">=2.8.0" }, { name = "pymongo" }, { name = "pymysql" }, { name = "pyodbc" }, { name = "python-dotenv" }, + { name = "pyyaml" }, + { name = "requests" }, { name = "scikit-learn" }, { name = "vega-datasets" }, { name = "vl-convert-python" }, + { name = "xlrd" }, { name = "yfinance" }, ] [package.metadata.requires-dev] -dev = [{ name = "build" }] +dev = [ + { name = "build" }, + { name = "pytest" }, +] [[package]] name = "db-dtypes" @@ -910,6 +912,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/dd/2d/13e6024e613679d8a489dd922f199ef4b1d08a456a58eadd96dc2f05171f/duckdb-1.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:53cd6423136ab44383ec9955aefe7599b3fb3dd1fe006161e6396d8167e0e0d4", size = 13458633, upload-time = "2026-01-26T11:50:17.657Z" }, ] +[[package]] +name = "et-xmlfile" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/38/af70d7ab1ae9d4da450eeec1fa3918940a5fafb9055e934af8d6eb0c2313/et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54", size = 17234, upload-time = "2024-10-25T17:25:40.039Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/8b/5fe2cc11fee489817272089c4203e679c63b570a5aaeb18d852ae3cbba6a/et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa", size = 18059, upload-time = "2024-10-25T17:25:39.051Z" }, +] + [[package]] name = "executing" version = "2.2.1" @@ -1530,14 +1541,23 @@ wheels = [ [[package]] name = "importlib-metadata" -version = "8.7.1" +version = "8.5.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "zipp" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f3/49/3b30cad09e7771a4982d9975a8cbf64f00d4a1ececb53297f1d9a7be1b10/importlib_metadata-8.7.1.tar.gz", hash = "sha256:49fef1ae6440c182052f407c8d34a68f72efc36db9ca90dc0113398f2fdde8bb", size = 57107, upload-time = "2025-12-21T10:00:19.278Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/12/33e59336dca5be0c398a7482335911a33aa0e20776128f038019f1a95f1b/importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7", size = 55304, upload-time = "2024-09-11T14:56:08.937Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/d9/a1e041c5e7caa9a05c925f4bdbdfb7f006d1f74996af53467bc394c97be7/importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b", size = 26514, upload-time = "2024-09-11T14:56:07.019Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865, upload-time = "2025-12-21T10:00:18.329Z" }, + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, ] [[package]] @@ -1791,7 +1811,7 @@ wheels = [ [[package]] name = "jsonschema" -version = "4.26.0" +version = "4.23.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, @@ -1799,9 +1819,9 @@ dependencies = [ { name = "referencing" }, { name = "rpds-py" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b3/fc/e067678238fa451312d4c62bf6e6cf5ec56375422aee02f9cb5f909b3047/jsonschema-4.26.0.tar.gz", hash = "sha256:0c26707e2efad8aa1bfc5b7ce170f3fccc2e4918ff85989ba9ffa9facb2be326", size = 366583, upload-time = "2026-01-07T13:41:07.246Z" } +sdist = { url = "https://files.pythonhosted.org/packages/38/2e/03362ee4034a4c917f697890ccd4aec0800ccf9ded7f511971c75451deec/jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4", size = 325778, upload-time = "2024-07-08T18:40:05.546Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl", hash = "sha256:d489f15263b8d200f8387e64b4c3a75f06629559fb73deb8fdfb525f2dab50ce", size = 90630, upload-time = "2026-01-07T13:41:05.306Z" }, + { url = "https://files.pythonhosted.org/packages/69/4a/4f9dbeb84e8850557c02365a0eee0649abe5eb1d84af92a25731c6c0f922/jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566", size = 88462, upload-time = "2024-07-08T18:40:00.165Z" }, ] [package.optional-dependencies] @@ -1812,7 +1832,6 @@ format-nongpl = [ { name = "jsonpointer" }, { name = "rfc3339-validator" }, { name = "rfc3986-validator" }, - { name = "rfc3987-syntax" }, { name = "uri-template" }, { name = "webcolors" }, ] @@ -2028,15 +2047,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ab/b5/36c712098e6191d1b4e349304ef73a8d06aed77e56ceaac8c0a306c7bda1/jupyterlab_widgets-3.0.16-py3-none-any.whl", hash = "sha256:45fa36d9c6422cf2559198e4db481aa243c7a32d9926b500781c830c80f7ecf8", size = 914926, upload-time = "2025-11-01T21:11:28.008Z" }, ] -[[package]] -name = "lark" -version = "1.3.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/da/34/28fff3ab31ccff1fd4f6c7c7b0ceb2b6968d8ea4950663eadcb5720591a0/lark-1.3.1.tar.gz", hash = "sha256:b426a7a6d6d53189d318f2b6236ab5d6429eaf09259f1ca33eb716eed10d2905", size = 382732, upload-time = "2025-10-27T18:25:56.653Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/82/3d/14ce75ef66813643812f3093ab17e46d3a206942ce7376d31ec2d36229e7/lark-1.3.1-py3-none-any.whl", hash = "sha256:c629b661023a014c37da873b4ff58a817398d12635d3bbb2c5a03be7fe5d1e12", size = 113151, upload-time = "2025-10-27T18:25:54.882Z" }, -] - [[package]] name = "limits" version = "5.6.0" @@ -2053,7 +2063,7 @@ wheels = [ [[package]] name = "litellm" -version = "1.81.6" +version = "1.83.8" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp" }, @@ -2069,9 +2079,9 @@ dependencies = [ { name = "tiktoken" }, { name = "tokenizers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2e/f3/194a2dca6cb3eddb89f4bc2920cf5e27542256af907c23be13c61fe7e021/litellm-1.81.6.tar.gz", hash = "sha256:f02b503dfb7d66d1c939f82e4db21aeec1d6e2ed1fe3f5cd02aaec3f792bc4ae", size = 13878107, upload-time = "2026-02-01T04:02:27.36Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d2/5a/a7b4b4bf9443b1f1d8fb1e1ed7d1936eca93851ff3e43113c3dad17c6556/litellm-1.83.8.tar.gz", hash = "sha256:38db022b4bf5a51cbe597a8308e6e51eb71254ae684d41aa210b76df0c827063", size = 14751978, upload-time = "2026-04-15T03:37:51.462Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/05/3516cc7386b220d388aa0bd833308c677e94eceb82b2756dd95e06f6a13f/litellm-1.81.6-py3-none-any.whl", hash = "sha256:573206ba194d49a1691370ba33f781671609ac77c35347f8a0411d852cf6341a", size = 12224343, upload-time = "2026-02-01T04:02:23.704Z" }, + { url = "https://files.pythonhosted.org/packages/f0/02/ee86522b2cb359079596d224db9b23dc12c02d7eeaf3d458abd7a0c54444/litellm-1.83.8-py3-none-any.whl", hash = "sha256:3bc8cfeff9d73a6a11409006c0d66bafeed9a23db65f642000f72f1cdb2e9ce8", size = 16333221, upload-time = "2026-04-15T03:37:47.934Z" }, ] [[package]] @@ -2491,7 +2501,7 @@ wheels = [ [[package]] name = "openai" -version = "2.16.0" +version = "2.24.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -2503,9 +2513,21 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b1/6c/e4c964fcf1d527fdf4739e7cc940c60075a4114d50d03871d5d5b1e13a88/openai-2.16.0.tar.gz", hash = "sha256:42eaa22ca0d8ded4367a77374104d7a2feafee5bd60a107c3c11b5243a11cd12", size = 629649, upload-time = "2026-01-27T23:28:02.579Z" } +sdist = { url = "https://files.pythonhosted.org/packages/55/13/17e87641b89b74552ed408a92b231283786523edddc95f3545809fab673c/openai-2.24.0.tar.gz", hash = "sha256:1e5769f540dbd01cb33bc4716a23e67b9d695161a734aff9c5f925e2bf99a673", size = 658717, upload-time = "2026-02-24T20:02:07.958Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/16/83/0315bf2cfd75a2ce8a7e54188e9456c60cec6c0cf66728ed07bd9859ff26/openai-2.16.0-py3-none-any.whl", hash = "sha256:5f46643a8f42899a84e80c38838135d7038e7718333ce61396994f887b09a59b", size = 1068612, upload-time = "2026-01-27T23:28:00.356Z" }, + { url = "https://files.pythonhosted.org/packages/c9/30/844dc675ee6902579b8eef01ed23917cc9319a1c9c0c14ec6e39340c96d0/openai-2.24.0-py3-none-any.whl", hash = "sha256:fed30480d7d6c884303287bde864980a4b137b60553ffbcf9ab4a233b7a73d94", size = 1120122, upload-time = "2026-02-24T20:02:05.669Z" }, +] + +[[package]] +name = "openpyxl" +version = "3.1.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "et-xmlfile" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/f9/88d94a75de065ea32619465d2f77b29a0469500e99012523b91cc4141cd1/openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050", size = 186464, upload-time = "2024-06-28T14:03:44.161Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c0/da/977ded879c29cbd04de313843e76868e6e13408a94ed6b987245dc7c8506/openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2", size = 250910, upload-time = "2024-06-28T14:03:41.161Z" }, ] [[package]] @@ -2637,6 +2659,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" }, ] +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + [[package]] name = "prometheus-client" version = "0.24.1" @@ -2812,6 +2843,58 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/8c/c7/7bb2e321574b10df20cbde462a94e2b71d05f9bbda251ef27d104668306a/psutil-7.2.2-cp37-abi3-win_arm64.whl", hash = "sha256:8c233660f575a5a89e6d4cb65d9f938126312bca76d8fe087b947b3a1aaac9ee", size = 134617, upload-time = "2026-01-28T18:15:36.514Z" }, ] +[[package]] +name = "psycopg2-binary" +version = "2.9.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ac/6c/8767aaa597ba424643dc87348c6f1754dd9f48e80fdc1b9f7ca5c3a7c213/psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c", size = 379620, upload-time = "2025-10-10T11:14:48.041Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/ae/8d8266f6dd183ab4d48b95b9674034e1b482a3f8619b33a0d86438694577/psycopg2_binary-2.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0e8480afd62362d0a6a27dd09e4ca2def6fa50ed3a4e7c09165266106b2ffa10", size = 3756452, upload-time = "2025-10-10T11:11:11.583Z" }, + { url = "https://files.pythonhosted.org/packages/4b/34/aa03d327739c1be70e09d01182619aca8ebab5970cd0cfa50dd8b9cec2ac/psycopg2_binary-2.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:763c93ef1df3da6d1a90f86ea7f3f806dc06b21c198fa87c3c25504abec9404a", size = 3863957, upload-time = "2025-10-10T11:11:16.932Z" }, + { url = "https://files.pythonhosted.org/packages/48/89/3fdb5902bdab8868bbedc1c6e6023a4e08112ceac5db97fc2012060e0c9a/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e164359396576a3cc701ba8af4751ae68a07235d7a380c631184a611220d9a4", size = 4410955, upload-time = "2025-10-10T11:11:21.21Z" }, + { url = "https://files.pythonhosted.org/packages/ce/24/e18339c407a13c72b336e0d9013fbbbde77b6fd13e853979019a1269519c/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d57c9c387660b8893093459738b6abddbb30a7eab058b77b0d0d1c7d521ddfd7", size = 4468007, upload-time = "2025-10-10T11:11:24.831Z" }, + { url = "https://files.pythonhosted.org/packages/91/7e/b8441e831a0f16c159b5381698f9f7f7ed54b77d57bc9c5f99144cc78232/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2c226ef95eb2250974bf6fa7a842082b31f68385c4f3268370e3f3870e7859ee", size = 4165012, upload-time = "2025-10-10T11:11:29.51Z" }, + { url = "https://files.pythonhosted.org/packages/0d/61/4aa89eeb6d751f05178a13da95516c036e27468c5d4d2509bb1e15341c81/psycopg2_binary-2.9.11-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a311f1edc9967723d3511ea7d2708e2c3592e3405677bf53d5c7246753591fbb", size = 3981881, upload-time = "2025-10-30T02:55:07.332Z" }, + { url = "https://files.pythonhosted.org/packages/76/a1/2f5841cae4c635a9459fe7aca8ed771336e9383b6429e05c01267b0774cf/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb415404821b6d1c47353ebe9c8645967a5235e6d88f914147e7fd411419e6f", size = 3650985, upload-time = "2025-10-10T11:11:34.975Z" }, + { url = "https://files.pythonhosted.org/packages/84/74/4defcac9d002bca5709951b975173c8c2fa968e1a95dc713f61b3a8d3b6a/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f07c9c4a5093258a03b28fab9b4f151aa376989e7f35f855088234e656ee6a94", size = 3296039, upload-time = "2025-10-10T11:11:40.432Z" }, + { url = "https://files.pythonhosted.org/packages/6d/c2/782a3c64403d8ce35b5c50e1b684412cf94f171dc18111be8c976abd2de1/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:00ce1830d971f43b667abe4a56e42c1e2d594b32da4802e44a73bacacb25535f", size = 3043477, upload-time = "2025-10-30T02:55:11.182Z" }, + { url = "https://files.pythonhosted.org/packages/c8/31/36a1d8e702aa35c38fc117c2b8be3f182613faa25d794b8aeaab948d4c03/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cffe9d7697ae7456649617e8bb8d7a45afb71cd13f7ab22af3e5c61f04840908", size = 3345842, upload-time = "2025-10-10T11:11:45.366Z" }, + { url = "https://files.pythonhosted.org/packages/6e/b4/a5375cda5b54cb95ee9b836930fea30ae5a8f14aa97da7821722323d979b/psycopg2_binary-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:304fd7b7f97eef30e91b8f7e720b3db75fee010b520e434ea35ed1ff22501d03", size = 2713894, upload-time = "2025-10-10T11:11:48.775Z" }, + { url = "https://files.pythonhosted.org/packages/d8/91/f870a02f51be4a65987b45a7de4c2e1897dd0d01051e2b559a38fa634e3e/psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4", size = 3756603, upload-time = "2025-10-10T11:11:52.213Z" }, + { url = "https://files.pythonhosted.org/packages/27/fa/cae40e06849b6c9a95eb5c04d419942f00d9eaac8d81626107461e268821/psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc", size = 3864509, upload-time = "2025-10-10T11:11:56.452Z" }, + { url = "https://files.pythonhosted.org/packages/2d/75/364847b879eb630b3ac8293798e380e441a957c53657995053c5ec39a316/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a", size = 4411159, upload-time = "2025-10-10T11:12:00.49Z" }, + { url = "https://files.pythonhosted.org/packages/6f/a0/567f7ea38b6e1c62aafd58375665a547c00c608a471620c0edc364733e13/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bf940cd7e7fec19181fdbc29d76911741153d51cab52e5c21165f3262125685e", size = 4468234, upload-time = "2025-10-10T11:12:04.892Z" }, + { url = "https://files.pythonhosted.org/packages/30/da/4e42788fb811bbbfd7b7f045570c062f49e350e1d1f3df056c3fb5763353/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fa0f693d3c68ae925966f0b14b8edda71696608039f4ed61b1fe9ffa468d16db", size = 4166236, upload-time = "2025-10-10T11:12:11.674Z" }, + { url = "https://files.pythonhosted.org/packages/3c/94/c1777c355bc560992af848d98216148be5f1be001af06e06fc49cbded578/psycopg2_binary-2.9.11-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a1cf393f1cdaf6a9b57c0a719a1068ba1069f022a59b8b1fe44b006745b59757", size = 3983083, upload-time = "2025-10-30T02:55:15.73Z" }, + { url = "https://files.pythonhosted.org/packages/bd/42/c9a21edf0e3daa7825ed04a4a8588686c6c14904344344a039556d78aa58/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ef7a6beb4beaa62f88592ccc65df20328029d721db309cb3250b0aae0fa146c3", size = 3652281, upload-time = "2025-10-10T11:12:17.713Z" }, + { url = "https://files.pythonhosted.org/packages/12/22/dedfbcfa97917982301496b6b5e5e6c5531d1f35dd2b488b08d1ebc52482/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:31b32c457a6025e74d233957cc9736742ac5a6cb196c6b68499f6bb51390bd6a", size = 3298010, upload-time = "2025-10-10T11:12:22.671Z" }, + { url = "https://files.pythonhosted.org/packages/66/ea/d3390e6696276078bd01b2ece417deac954dfdd552d2edc3d03204416c0c/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:edcb3aeb11cb4bf13a2af3c53a15b3d612edeb6409047ea0b5d6a21a9d744b34", size = 3044641, upload-time = "2025-10-30T02:55:19.929Z" }, + { url = "https://files.pythonhosted.org/packages/12/9a/0402ded6cbd321da0c0ba7d34dc12b29b14f5764c2fc10750daa38e825fc/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b6d93d7c0b61a1dd6197d208ab613eb7dcfdcca0a49c42ceb082257991de9d", size = 3347940, upload-time = "2025-10-10T11:12:26.529Z" }, + { url = "https://files.pythonhosted.org/packages/b1/d2/99b55e85832ccde77b211738ff3925a5d73ad183c0b37bcbbe5a8ff04978/psycopg2_binary-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:b33fabeb1fde21180479b2d4667e994de7bbf0eec22832ba5d9b5e4cf65b6c6d", size = 2714147, upload-time = "2025-10-10T11:12:29.535Z" }, + { url = "https://files.pythonhosted.org/packages/ff/a8/a2709681b3ac11b0b1786def10006b8995125ba268c9a54bea6f5ae8bd3e/psycopg2_binary-2.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b8fb3db325435d34235b044b199e56cdf9ff41223a4b9752e8576465170bb38c", size = 3756572, upload-time = "2025-10-10T11:12:32.873Z" }, + { url = "https://files.pythonhosted.org/packages/62/e1/c2b38d256d0dafd32713e9f31982a5b028f4a3651f446be70785f484f472/psycopg2_binary-2.9.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:366df99e710a2acd90efed3764bb1e28df6c675d33a7fb40df9b7281694432ee", size = 3864529, upload-time = "2025-10-10T11:12:36.791Z" }, + { url = "https://files.pythonhosted.org/packages/11/32/b2ffe8f3853c181e88f0a157c5fb4e383102238d73c52ac6d93a5c8bffe6/psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8c55b385daa2f92cb64b12ec4536c66954ac53654c7f15a203578da4e78105c0", size = 4411242, upload-time = "2025-10-10T11:12:42.388Z" }, + { url = "https://files.pythonhosted.org/packages/10/04/6ca7477e6160ae258dc96f67c371157776564679aefd247b66f4661501a2/psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c0377174bf1dd416993d16edc15357f6eb17ac998244cca19bc67cdc0e2e5766", size = 4468258, upload-time = "2025-10-10T11:12:48.654Z" }, + { url = "https://files.pythonhosted.org/packages/3c/7e/6a1a38f86412df101435809f225d57c1a021307dd0689f7a5e7fe83588b1/psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5c6ff3335ce08c75afaed19e08699e8aacf95d4a260b495a4a8545244fe2ceb3", size = 4166295, upload-time = "2025-10-10T11:12:52.525Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7d/c07374c501b45f3579a9eb761cbf2604ddef3d96ad48679112c2c5aa9c25/psycopg2_binary-2.9.11-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:84011ba3109e06ac412f95399b704d3d6950e386b7994475b231cf61eec2fc1f", size = 3983133, upload-time = "2025-10-30T02:55:24.329Z" }, + { url = "https://files.pythonhosted.org/packages/82/56/993b7104cb8345ad7d4516538ccf8f0d0ac640b1ebd8c754a7b024e76878/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ba34475ceb08cccbdd98f6b46916917ae6eeb92b5ae111df10b544c3a4621dc4", size = 3652383, upload-time = "2025-10-10T11:12:56.387Z" }, + { url = "https://files.pythonhosted.org/packages/2d/ac/eaeb6029362fd8d454a27374d84c6866c82c33bfc24587b4face5a8e43ef/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b31e90fdd0f968c2de3b26ab014314fe814225b6c324f770952f7d38abf17e3c", size = 3298168, upload-time = "2025-10-10T11:13:00.403Z" }, + { url = "https://files.pythonhosted.org/packages/2b/39/50c3facc66bded9ada5cbc0de867499a703dc6bca6be03070b4e3b65da6c/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:d526864e0f67f74937a8fce859bd56c979f5e2ec57ca7c627f5f1071ef7fee60", size = 3044712, upload-time = "2025-10-30T02:55:27.975Z" }, + { url = "https://files.pythonhosted.org/packages/9c/8e/b7de019a1f562f72ada81081a12823d3c1590bedc48d7d2559410a2763fe/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04195548662fa544626c8ea0f06561eb6203f1984ba5b4562764fbeb4c3d14b1", size = 3347549, upload-time = "2025-10-10T11:13:03.971Z" }, + { url = "https://files.pythonhosted.org/packages/80/2d/1bb683f64737bbb1f86c82b7359db1eb2be4e2c0c13b947f80efefa7d3e5/psycopg2_binary-2.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:efff12b432179443f54e230fdf60de1f6cc726b6c832db8701227d089310e8aa", size = 2714215, upload-time = "2025-10-10T11:13:07.14Z" }, + { url = "https://files.pythonhosted.org/packages/64/12/93ef0098590cf51d9732b4f139533732565704f45bdc1ffa741b7c95fb54/psycopg2_binary-2.9.11-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:92e3b669236327083a2e33ccfa0d320dd01b9803b3e14dd986a4fc54aa00f4e1", size = 3756567, upload-time = "2025-10-10T11:13:11.885Z" }, + { url = "https://files.pythonhosted.org/packages/7c/a9/9d55c614a891288f15ca4b5209b09f0f01e3124056924e17b81b9fa054cc/psycopg2_binary-2.9.11-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:e0deeb03da539fa3577fcb0b3f2554a97f7e5477c246098dbb18091a4a01c16f", size = 3864755, upload-time = "2025-10-10T11:13:17.727Z" }, + { url = "https://files.pythonhosted.org/packages/13/1e/98874ce72fd29cbde93209977b196a2edae03f8490d1bd8158e7f1daf3a0/psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b52a3f9bb540a3e4ec0f6ba6d31339727b2950c9772850d6545b7eae0b9d7c5", size = 4411646, upload-time = "2025-10-10T11:13:24.432Z" }, + { url = "https://files.pythonhosted.org/packages/5a/bd/a335ce6645334fb8d758cc358810defca14a1d19ffbc8a10bd38a2328565/psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:db4fd476874ccfdbb630a54426964959e58da4c61c9feba73e6094d51303d7d8", size = 4468701, upload-time = "2025-10-10T11:13:29.266Z" }, + { url = "https://files.pythonhosted.org/packages/44/d6/c8b4f53f34e295e45709b7568bf9b9407a612ea30387d35eb9fa84f269b4/psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47f212c1d3be608a12937cc131bd85502954398aaa1320cb4c14421a0ffccf4c", size = 4166293, upload-time = "2025-10-10T11:13:33.336Z" }, + { url = "https://files.pythonhosted.org/packages/4b/e0/f8cc36eadd1b716ab36bb290618a3292e009867e5c97ce4aba908cb99644/psycopg2_binary-2.9.11-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e35b7abae2b0adab776add56111df1735ccc71406e56203515e228a8dc07089f", size = 3983184, upload-time = "2025-10-30T02:55:32.483Z" }, + { url = "https://files.pythonhosted.org/packages/53/3e/2a8fe18a4e61cfb3417da67b6318e12691772c0696d79434184a511906dc/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fcf21be3ce5f5659daefd2b3b3b6e4727b028221ddc94e6c1523425579664747", size = 3652650, upload-time = "2025-10-10T11:13:38.181Z" }, + { url = "https://files.pythonhosted.org/packages/76/36/03801461b31b29fe58d228c24388f999fe814dfc302856e0d17f97d7c54d/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:9bd81e64e8de111237737b29d68039b9c813bdf520156af36d26819c9a979e5f", size = 3298663, upload-time = "2025-10-10T11:13:44.878Z" }, + { url = "https://files.pythonhosted.org/packages/97/77/21b0ea2e1a73aa5fa9222b2a6b8ba325c43c3a8d54272839c991f2345656/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:32770a4d666fbdafab017086655bcddab791d7cb260a16679cc5a7338b64343b", size = 3044737, upload-time = "2025-10-30T02:55:35.69Z" }, + { url = "https://files.pythonhosted.org/packages/67/69/f36abe5f118c1dca6d3726ceae164b9356985805480731ac6712a63f24f0/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c3cb3a676873d7506825221045bd70e0427c905b9c8ee8d6acd70cfcbd6e576d", size = 3347643, upload-time = "2025-10-10T11:13:53.499Z" }, + { url = "https://files.pythonhosted.org/packages/e1/36/9c0c326fe3a4227953dfb29f5d0c8ae3b8eb8c1cd2967aa569f50cb3c61f/psycopg2_binary-2.9.11-cp314-cp314-win_amd64.whl", hash = "sha256:4012c9c954dfaccd28f94e84ab9f94e12df76b4afb22331b1f0d3154893a6316", size = 2803913, upload-time = "2025-10-10T11:13:57.058Z" }, +] + [[package]] name = "ptyprocess" version = "0.7.0" @@ -2882,11 +2965,11 @@ wheels = [ [[package]] name = "pyasn1" -version = "0.6.2" +version = "0.6.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/b6/6e630dff89739fcd427e3f72b3d905ce0acb85a45d4ec3e2678718a3487f/pyasn1-0.6.2.tar.gz", hash = "sha256:9b59a2b25ba7e4f8197db7686c09fb33e658b98339fadb826e9512629017833b", size = 146586, upload-time = "2026-01-16T18:04:18.534Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5c/5f/6583902b6f79b399c9c40674ac384fd9cd77805f9e6205075f828ef11fb2/pyasn1-0.6.3.tar.gz", hash = "sha256:697a8ecd6d98891189184ca1fa05d1bb00e2f84b5977c481452050549c8a72cf", size = 148685, upload-time = "2026-03-17T01:06:53.382Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/44/b5/a96872e5184f354da9c84ae119971a0a4c221fe9b27a4d94bd43f2596727/pyasn1-0.6.2-py3-none-any.whl", hash = "sha256:1eb26d860996a18e9b6ed05e7aae0e9fc21619fcee6af91cca9bad4fbea224bf", size = 83371, upload-time = "2026-01-16T18:04:17.174Z" }, + { url = "https://files.pythonhosted.org/packages/5d/a0/7d793dce3fa811fe047d6ae2431c672364b462850c6235ae306c0efd025f/pyasn1-0.6.3-py3-none-any.whl", hash = "sha256:a80184d120f0864a52a073acc6fc642847d0be408e7c7252f31390c0f4eadcde", size = 83997, upload-time = "2026-03-17T01:06:52.036Z" }, ] [[package]] @@ -3024,20 +3107,20 @@ wheels = [ [[package]] name = "pygments" -version = "2.19.2" +version = "2.20.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/b2/bc9c9196916376152d655522fdcebac55e66de6603a76a02bca1b6414f6c/pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f", size = 4955991, upload-time = "2026-03-29T13:29:33.898Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, + { url = "https://files.pythonhosted.org/packages/f4/7e/a72dd26f3b0f4f2bf1dd8923c85f7ceb43172af56d63c7383eb62b332364/pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176", size = 1231151, upload-time = "2026-03-29T13:29:30.038Z" }, ] [[package]] name = "pyjwt" -version = "2.11.0" +version = "2.12.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5c/5a/b46fa56bf322901eee5b0454a34343cdbdae202cd421775a8ee4e42fd519/pyjwt-2.11.0.tar.gz", hash = "sha256:35f95c1f0fbe5d5ba6e43f00271c275f7a1a4db1dab27bf708073b75318ea623", size = 98019, upload-time = "2026-01-30T19:59:55.694Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a8/10/e8192be5f38f3e8e7e046716de4cae33d56fd5ae08927a823bb916be36c1/pyjwt-2.12.0.tar.gz", hash = "sha256:2f62390b667cd8257de560b850bb5a883102a388829274147f1d724453f8fb02", size = 102511, upload-time = "2026-03-12T17:15:30.831Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl", hash = "sha256:94a6bde30eb5c8e04fee991062b534071fd1439ef58d2adc9ccb823e7bcd0469", size = 28224, upload-time = "2026-01-30T19:59:54.539Z" }, + { url = "https://files.pythonhosted.org/packages/15/70/70f895f404d363d291dcf62c12c85fdd47619ad9674ac0f53364d035925a/pyjwt-2.12.0-py3-none-any.whl", hash = "sha256:9bb459d1bdd0387967d287f5656bf7ec2b9a26645d1961628cda1764e087fd6e", size = 29700, upload-time = "2026-03-12T17:15:29.257Z" }, ] [package.optional-dependencies] @@ -3177,6 +3260,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/bd/24/12818598c362d7f300f18e74db45963dbcb85150324092410c8b49405e42/pyproject_hooks-1.2.0-py3-none-any.whl", hash = "sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913", size = 10216, upload-time = "2024-09-29T09:24:11.978Z" }, ] +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -3191,11 +3290,11 @@ wheels = [ [[package]] name = "python-dotenv" -version = "1.2.1" +version = "1.0.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115, upload-time = "2024-01-23T06:33:00.505Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" }, + { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863, upload-time = "2024-01-23T06:32:58.246Z" }, ] [[package]] @@ -3497,18 +3596,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9e/51/17023c0f8f1869d8806b979a2bffa3f861f26a3f1a66b094288323fba52f/rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9", size = 4242, upload-time = "2019-10-28T16:00:13.976Z" }, ] -[[package]] -name = "rfc3987-syntax" -version = "1.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "lark" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/2c/06/37c1a5557acf449e8e406a830a05bf885ac47d33270aec454ef78675008d/rfc3987_syntax-1.1.0.tar.gz", hash = "sha256:717a62cbf33cffdd16dfa3a497d81ce48a660ea691b1ddd7be710c22f00b4a0d", size = 14239, upload-time = "2025-07-18T01:05:05.015Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/71/44ce230e1b7fadd372515a97e32a83011f906ddded8d03e3c6aafbdedbb7/rfc3987_syntax-1.1.0-py3-none-any.whl", hash = "sha256:6c3d97604e4c5ce9f714898e05401a0445a641cfa276432b0a648c80856f6a3f", size = 8046, upload-time = "2025-07-18T01:05:03.843Z" }, -] - [[package]] name = "rpds-py" version = "0.30.0" @@ -3947,21 +4034,19 @@ wheels = [ [[package]] name = "tornado" -version = "6.5.4" +version = "6.5.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/37/1d/0a336abf618272d53f62ebe274f712e213f5a03c0b2339575430b8362ef2/tornado-6.5.4.tar.gz", hash = "sha256:a22fa9047405d03260b483980635f0b041989d8bcc9a313f8fe18b411d84b1d7", size = 513632, upload-time = "2025-12-15T19:21:03.836Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/f1/3173dfa4a18db4a9b03e5d55325559dab51ee653763bb8745a75af491286/tornado-6.5.5.tar.gz", hash = "sha256:192b8f3ea91bd7f1f50c06955416ed76c6b72f96779b962f07f911b91e8d30e9", size = 516006, upload-time = "2026-03-10T21:31:02.067Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/a9/e94a9d5224107d7ce3cc1fab8d5dc97f5ea351ccc6322ee4fb661da94e35/tornado-6.5.4-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d6241c1a16b1c9e4cc28148b1cda97dd1c6cb4fb7068ac1bedc610768dff0ba9", size = 443909, upload-time = "2025-12-15T19:20:48.382Z" }, - { url = "https://files.pythonhosted.org/packages/db/7e/f7b8d8c4453f305a51f80dbb49014257bb7d28ccb4bbb8dd328ea995ecad/tornado-6.5.4-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2d50f63dda1d2cac3ae1fa23d254e16b5e38153758470e9956cbc3d813d40843", size = 442163, upload-time = "2025-12-15T19:20:49.791Z" }, - { url = "https://files.pythonhosted.org/packages/ba/b5/206f82d51e1bfa940ba366a8d2f83904b15942c45a78dd978b599870ab44/tornado-6.5.4-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1cf66105dc6acb5af613c054955b8137e34a03698aa53272dbda4afe252be17", size = 445746, upload-time = "2025-12-15T19:20:51.491Z" }, - { url = "https://files.pythonhosted.org/packages/8e/9d/1a3338e0bd30ada6ad4356c13a0a6c35fbc859063fa7eddb309183364ac1/tornado-6.5.4-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50ff0a58b0dc97939d29da29cd624da010e7f804746621c78d14b80238669335", size = 445083, upload-time = "2025-12-15T19:20:52.778Z" }, - { url = "https://files.pythonhosted.org/packages/50/d4/e51d52047e7eb9a582da59f32125d17c0482d065afd5d3bc435ff2120dc5/tornado-6.5.4-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5fb5e04efa54cf0baabdd10061eb4148e0be137166146fff835745f59ab9f7f", size = 445315, upload-time = "2025-12-15T19:20:53.996Z" }, - { url = "https://files.pythonhosted.org/packages/27/07/2273972f69ca63dbc139694a3fc4684edec3ea3f9efabf77ed32483b875c/tornado-6.5.4-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9c86b1643b33a4cd415f8d0fe53045f913bf07b4a3ef646b735a6a86047dda84", size = 446003, upload-time = "2025-12-15T19:20:56.101Z" }, - { url = "https://files.pythonhosted.org/packages/d1/83/41c52e47502bf7260044413b6770d1a48dda2f0246f95ee1384a3cd9c44a/tornado-6.5.4-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:6eb82872335a53dd063a4f10917b3efd28270b56a33db69009606a0312660a6f", size = 445412, upload-time = "2025-12-15T19:20:57.398Z" }, - { url = "https://files.pythonhosted.org/packages/10/c7/bc96917f06cbee182d44735d4ecde9c432e25b84f4c2086143013e7b9e52/tornado-6.5.4-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6076d5dda368c9328ff41ab5d9dd3608e695e8225d1cd0fd1e006f05da3635a8", size = 445392, upload-time = "2025-12-15T19:20:58.692Z" }, - { url = "https://files.pythonhosted.org/packages/0c/1a/d7592328d037d36f2d2462f4bc1fbb383eec9278bc786c1b111cbbd44cfa/tornado-6.5.4-cp39-abi3-win32.whl", hash = "sha256:1768110f2411d5cd281bac0a090f707223ce77fd110424361092859e089b38d1", size = 446481, upload-time = "2025-12-15T19:21:00.008Z" }, - { url = "https://files.pythonhosted.org/packages/d6/6d/c69be695a0a64fd37a97db12355a035a6d90f79067a3cf936ec2b1dc38cd/tornado-6.5.4-cp39-abi3-win_amd64.whl", hash = "sha256:fa07d31e0cd85c60713f2b995da613588aa03e1303d75705dca6af8babc18ddc", size = 446886, upload-time = "2025-12-15T19:21:01.287Z" }, - { url = "https://files.pythonhosted.org/packages/50/49/8dc3fd90902f70084bd2cd059d576ddb4f8bb44c2c7c0e33a11422acb17e/tornado-6.5.4-cp39-abi3-win_arm64.whl", hash = "sha256:053e6e16701eb6cbe641f308f4c1a9541f91b6261991160391bfc342e8a551a1", size = 445910, upload-time = "2025-12-15T19:21:02.571Z" }, + { url = "https://files.pythonhosted.org/packages/59/8c/77f5097695f4dd8255ecbd08b2a1ed8ba8b953d337804dd7080f199e12bf/tornado-6.5.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:487dc9cc380e29f58c7ab88f9e27cdeef04b2140862e5076a66fb6bb68bb1bfa", size = 445983, upload-time = "2026-03-10T21:30:44.28Z" }, + { url = "https://files.pythonhosted.org/packages/ab/5e/7625b76cd10f98f1516c36ce0346de62061156352353ef2da44e5c21523c/tornado-6.5.5-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:65a7f1d46d4bb41df1ac99f5fcb685fb25c7e61613742d5108b010975a9a6521", size = 444246, upload-time = "2026-03-10T21:30:46.571Z" }, + { url = "https://files.pythonhosted.org/packages/b2/04/7b5705d5b3c0fab088f434f9c83edac1573830ca49ccf29fb83bf7178eec/tornado-6.5.5-cp39-abi3-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e74c92e8e65086b338fd56333fb9a68b9f6f2fe7ad532645a290a464bcf46be5", size = 447229, upload-time = "2026-03-10T21:30:48.273Z" }, + { url = "https://files.pythonhosted.org/packages/34/01/74e034a30ef59afb4097ef8659515e96a39d910b712a89af76f5e4e1f93c/tornado-6.5.5-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:435319e9e340276428bbdb4e7fa732c2d399386d1de5686cb331ec8eee754f07", size = 448192, upload-time = "2026-03-10T21:30:51.22Z" }, + { url = "https://files.pythonhosted.org/packages/be/00/fe9e02c5a96429fce1a1d15a517f5d8444f9c412e0bb9eadfbe3b0fc55bf/tornado-6.5.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3f54aa540bdbfee7b9eb268ead60e7d199de5021facd276819c193c0fb28ea4e", size = 448039, upload-time = "2026-03-10T21:30:53.52Z" }, + { url = "https://files.pythonhosted.org/packages/82/9e/656ee4cec0398b1d18d0f1eb6372c41c6b889722641d84948351ae19556d/tornado-6.5.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:36abed1754faeb80fbd6e64db2758091e1320f6bba74a4cf8c09cd18ccce8aca", size = 447445, upload-time = "2026-03-10T21:30:55.541Z" }, + { url = "https://files.pythonhosted.org/packages/5a/76/4921c00511f88af86a33de770d64141170f1cfd9c00311aea689949e274e/tornado-6.5.5-cp39-abi3-win32.whl", hash = "sha256:dd3eafaaeec1c7f2f8fdcd5f964e8907ad788fe8a5a32c4426fbbdda621223b7", size = 448582, upload-time = "2026-03-10T21:30:57.142Z" }, + { url = "https://files.pythonhosted.org/packages/2c/23/f6c6112a04d28eed765e374435fb1a9198f73e1ec4b4024184f21faeb1ad/tornado-6.5.5-cp39-abi3-win_amd64.whl", hash = "sha256:6443a794ba961a9f619b1ae926a2e900ac20c34483eea67be4ed8f1e58d3ef7b", size = 448990, upload-time = "2026-03-10T21:30:58.857Z" }, + { url = "https://files.pythonhosted.org/packages/b7/c8/876602cbc96469911f0939f703453c1157b0c826ecb05bdd32e023397d4e/tornado-6.5.5-cp39-abi3-win_arm64.whl", hash = "sha256:2c9a876e094109333f888539ddb2de4361743e5d21eece20688e3e351e4990a6", size = 448016, upload-time = "2026-03-10T21:31:00.43Z" }, ] [[package]] @@ -4250,6 +4335,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/57/e9/70983b75d4abd6f85cffc6df79c623220ec5a579ceaacabac35c904b7b52/wrapt-2.1.0-py3-none-any.whl", hash = "sha256:e035693a0d25ea5bf5826df3e203dff7d091b0d5442aaefec9ca8f2bab38417f", size = 43886, upload-time = "2026-01-31T23:25:07.22Z" }, ] +[[package]] +name = "xlrd" +version = "2.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/07/5a/377161c2d3538d1990d7af382c79f3b2372e880b65de21b01b1a2b78691e/xlrd-2.0.2.tar.gz", hash = "sha256:08b5e25de58f21ce71dc7db3b3b8106c1fa776f3024c54e45b45b374e89234c9", size = 100167, upload-time = "2025-06-14T08:46:39.039Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1a/62/c8d562e7766786ba6587d09c5a8ba9f718ed3fa8af7f4553e8f91c36f302/xlrd-2.0.2-py2.py3-none-any.whl", hash = "sha256:ea762c3d29f4cca48d82df517b6d89fbce4db3107f9d78713e48cd321d5c9aa9", size = 96555, upload-time = "2025-06-14T08:46:37.766Z" }, +] + [[package]] name = "yarl" version = "1.22.0" diff --git a/vite.config.ts b/vite.config.ts index f4845e8a..1c0553be 100644 --- a/vite.config.ts +++ b/vite.config.ts @@ -34,7 +34,7 @@ export default defineConfig({ 'vendor-d3': ['d3'], 'vendor-utils': ['lodash', 'localforage', 'dompurify', 'validator'], 'vendor-editor': ['prismjs', 'prism-react-renderer', 'react-simple-code-editor', 'prettier'], - 'vendor-markdown': ['markdown-to-jsx', 'mui-markdown', 'katex', 'react-katex'], + 'vendor-markdown': ['markdown-to-jsx', 'katex', 'react-katex'], 'vendor-misc': ['exceljs', 'html2canvas', 'allotment', 'react-dnd', 'react-dnd-html5-backend', 'react-virtuoso'], } } diff --git a/vitest.config.ts b/vitest.config.ts new file mode 100644 index 00000000..0a1695a0 --- /dev/null +++ b/vitest.config.ts @@ -0,0 +1,19 @@ +import { defineConfig } from 'vitest/config'; +import react from '@vitejs/plugin-react-swc'; +import path from 'path'; + +export default defineConfig({ + plugins: [react()], + resolve: { + alias: { + '@': path.resolve(__dirname, './src'), + }, + }, + test: { + globals: true, + environment: 'jsdom', + setupFiles: ['./tests/frontend/setup.ts'], + include: ['tests/frontend/**/*.test.{ts,tsx}'], + css: false, + }, +}); diff --git a/yarn.lock b/yarn.lock index c44307c5..1febbc12 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2,102 +2,184 @@ # yarn lockfile v1 -"@antfu/install-pkg@^1.1.0": - version "1.1.0" - resolved "https://registry.npmjs.org/@antfu/install-pkg/-/install-pkg-1.1.0.tgz" - integrity sha512-MGQsmw10ZyI+EJo45CdSER4zEb+p31LpDAFp2Z3gkSd1yqVZGi0Ebx++YTEMonJy4oChEMLsxZ64j8FH6sSqtQ== - dependencies: - package-manager-detector "^1.3.0" - tinyexec "^1.0.1" +"@adobe/css-tools@^4.4.0": + version "4.4.4" + resolved "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.4.4.tgz" + integrity sha512-Elp+iwUx5rN5+Y8xLt5/GRoG20WGoDCQ/1Fb+1LiGtvwbDavuSk0jhD/eZdckHAuzcDzccnkv+rEjyWfRx18gg== -"@antfu/utils@^9.2.0": - version "9.3.0" - resolved "https://registry.npmjs.org/@antfu/utils/-/utils-9.3.0.tgz" - integrity sha512-9hFT4RauhcUzqOE4f1+frMKLZrgNog5b06I7VmZQV1BkvwvqrbC8EBZf3L1eEL2AKb6rNKjER0sEvJiSP1FXEA== +"@asamuzakjp/css-color@^5.0.1": + version "5.0.1" + resolved "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-5.0.1.tgz" + integrity sha512-2SZFvqMyvboVV1d15lMf7XiI3m7SDqXUuKaTymJYLN6dSGadqp+fVojqJlVoMlbZnlTmu3S0TLwLTJpvBMO1Aw== + dependencies: + "@csstools/css-calc" "^3.1.1" + "@csstools/css-color-parser" "^4.0.2" + "@csstools/css-parser-algorithms" "^4.0.0" + "@csstools/css-tokenizer" "^4.0.0" + lru-cache "^11.2.6" + +"@asamuzakjp/dom-selector@^7.0.3": + version "7.0.4" + resolved "https://registry.npmjs.org/@asamuzakjp/dom-selector/-/dom-selector-7.0.4.tgz" + integrity sha512-jXR6x4AcT3eIrS2fSNAwJpwirOkGcd+E7F7CP3zjdTqz9B/2huHOL8YJZBgekKwLML+u7qB/6P1LXQuMScsx0w== + dependencies: + "@asamuzakjp/nwsapi" "^2.3.9" + bidi-js "^1.0.3" + css-tree "^3.2.1" + is-potential-custom-element-name "^1.0.1" + lru-cache "^11.2.7" + +"@asamuzakjp/nwsapi@^2.3.9": + version "2.3.9" + resolved "https://registry.npmjs.org/@asamuzakjp/nwsapi/-/nwsapi-2.3.9.tgz" + integrity sha512-n8GuYSrI9bF7FFZ/SjhwevlHc8xaVlb/7HmHelnc/PZXBD2ZR49NnN9sMMuDdEGPeeRQ5d0hqlSlEpgCX3Wl0Q== + +"@azure/msal-browser@^5.6.3": + version "5.6.3" + resolved "https://registry.npmjs.org/@azure/msal-browser/-/msal-browser-5.6.3.tgz" + integrity sha512-sTjMtUm+bJpENU/1WlRzHEsgEHppZDZ1EtNyaOODg/sQBtMxxJzGB+MOCM+T2Q5Qe1fKBrdxUmjyRxm0r7Ez9w== + dependencies: + "@azure/msal-common" "16.4.1" + +"@azure/msal-common@16.4.1": + version "16.4.1" + resolved "https://registry.npmjs.org/@azure/msal-common/-/msal-common-16.4.1.tgz" + integrity sha512-Bl8f+w37xkXsYh7QRkAKCFGYtWMYuOVO7Lv+BxILrvGz3HbIEF22Pt0ugyj0QPOl6NLrHcnNUQ9yeew98P/5iw== -"@babel/code-frame@^7.0.0": - version "7.22.5" - resolved "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.22.5.tgz" - integrity sha512-Xmwn266vad+6DAqEB2A6V/CcZVp62BbwVmcOJc2RPuwih1kw02TjQvWVWlcKGbBPd+8/0V5DEkOcizRGYsspYQ== +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.28.6", "@babel/code-frame@^7.29.0": + version "7.29.0" + resolved "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.29.0.tgz" + integrity sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw== dependencies: - "@babel/highlight" "^7.22.5" + "@babel/helper-validator-identifier" "^7.28.5" + js-tokens "^4.0.0" + picocolors "^1.1.1" -"@babel/helper-module-imports@^7.16.7": - version "7.16.7" - resolved "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.16.7.tgz" - integrity sha512-LVtS6TqjJHFc+nYeITRo6VLXve70xmq7wPhWTqDJusJEgGmkAACWwMiTNrvfoQo6hEhFwAIixNkvB0jPXDL8Wg== +"@babel/generator@^7.29.0": + version "7.29.1" + resolved "https://registry.npmjs.org/@babel/generator/-/generator-7.29.1.tgz" + integrity sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw== dependencies: - "@babel/types" "^7.16.7" + "@babel/parser" "^7.29.0" + "@babel/types" "^7.29.0" + "@jridgewell/gen-mapping" "^0.3.12" + "@jridgewell/trace-mapping" "^0.3.28" + jsesc "^3.0.2" -"@babel/helper-string-parser@^7.22.5": - version "7.22.5" - resolved "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz" - integrity sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw== +"@babel/helper-globals@^7.28.0": + version "7.28.0" + resolved "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz" + integrity sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw== -"@babel/helper-validator-identifier@^7.22.5": - version "7.22.5" - resolved "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz" - integrity sha512-aJXu+6lErq8ltp+JhkJUfk1MTGyuA4v7f3pA+BJ5HLfNC6nAQ0Cpi9uOquUj8Hehg0aUiHzWQbOVJGao6ztBAQ== +"@babel/helper-module-imports@^7.16.7": + version "7.28.6" + resolved "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.28.6.tgz" + integrity sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw== + dependencies: + "@babel/traverse" "^7.28.6" + "@babel/types" "^7.28.6" + +"@babel/helper-string-parser@^7.27.1": + version "7.27.1" + resolved "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz" + integrity sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA== + +"@babel/helper-validator-identifier@^7.28.5": + version "7.28.5" + resolved "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz" + integrity sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q== + +"@babel/parser@^7.28.6", "@babel/parser@^7.29.0": + version "7.29.2" + resolved "https://registry.npmjs.org/@babel/parser/-/parser-7.29.2.tgz" + integrity sha512-4GgRzy/+fsBa72/RZVJmGKPmZu9Byn8o4MoLpmNe1m8ZfYnz5emHLQz3U4gLud6Zwl0RZIcgiLD7Uq7ySFuDLA== + dependencies: + "@babel/types" "^7.29.0" + +"@babel/runtime@^7.12.1", "@babel/runtime@^7.12.5", "@babel/runtime@^7.18.3", "@babel/runtime@^7.23.2", "@babel/runtime@^7.28.6", "@babel/runtime@^7.29.2", "@babel/runtime@^7.5.5", "@babel/runtime@^7.8.7", "@babel/runtime@^7.9.2": + version "7.29.2" + resolved "https://registry.npmjs.org/@babel/runtime/-/runtime-7.29.2.tgz" + integrity sha512-JiDShH45zKHWyGe4ZNVRrCjBz8Nh9TMmZG1kh4QTK8hCBTWBi8Da+i7s1fJw7/lYpM4ccepSNfqzZ/QvABBi5g== + +"@babel/template@^7.28.6": + version "7.28.6" + resolved "https://registry.npmjs.org/@babel/template/-/template-7.28.6.tgz" + integrity sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ== + dependencies: + "@babel/code-frame" "^7.28.6" + "@babel/parser" "^7.28.6" + "@babel/types" "^7.28.6" + +"@babel/traverse@^7.28.6": + version "7.29.0" + resolved "https://registry.npmjs.org/@babel/traverse/-/traverse-7.29.0.tgz" + integrity sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA== + dependencies: + "@babel/code-frame" "^7.29.0" + "@babel/generator" "^7.29.0" + "@babel/helper-globals" "^7.28.0" + "@babel/parser" "^7.29.0" + "@babel/template" "^7.28.6" + "@babel/types" "^7.29.0" + debug "^4.3.1" -"@babel/highlight@^7.22.5": - version "7.22.5" - resolved "https://registry.npmjs.org/@babel/highlight/-/highlight-7.22.5.tgz" - integrity sha512-BSKlD1hgnedS5XRnGOljZawtag7H1yPfQp0tdNJCHoH6AZ+Pcm9VvkrK59/Yy593Ypg0zMxH2BxD1VPYUQ7UIw== +"@babel/types@^7.28.6", "@babel/types@^7.29.0": + version "7.29.0" + resolved "https://registry.npmjs.org/@babel/types/-/types-7.29.0.tgz" + integrity sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A== dependencies: - "@babel/helper-validator-identifier" "^7.22.5" - chalk "^2.0.0" - js-tokens "^4.0.0" + "@babel/helper-string-parser" "^7.27.1" + "@babel/helper-validator-identifier" "^7.28.5" -"@babel/runtime@^7.12.1", "@babel/runtime@^7.12.5", "@babel/runtime@^7.18.3", "@babel/runtime@^7.28.4", "@babel/runtime@^7.5.5", "@babel/runtime@^7.8.7", "@babel/runtime@^7.9.2": - version "7.28.4" - resolved "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.4.tgz" - integrity sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ== +"@base-ui/utils@^0.2.6": + version "0.2.7" + resolved "https://registry.npmjs.org/@base-ui/utils/-/utils-0.2.7.tgz#a7a57d08af6d02a905c0ca5e87b6dde85597046d" + integrity sha512-nXYKhiL/0JafyJE8PfcflipGftOftlIwKd72rU15iZ1M5yqgg5J9P8NHU71GReDuXco5MJA/eVQqUT5WRqX9sA== + dependencies: + "@babel/runtime" "^7.29.2" + "@floating-ui/utils" "^0.2.11" + reselect "^5.1.1" + use-sync-external-store "^1.6.0" -"@babel/types@^7.16.7": - version "7.22.5" - resolved "https://registry.npmjs.org/@babel/types/-/types-7.22.5.tgz" - integrity sha512-zo3MIHGOkPOfoRXitsgHLjEXmlDaD/5KU1Uzuc9GNiZPhSqVxVRtxuPaSBZDsYZ9qV88AjtMtWW7ww98loJ9KA== +"@bramus/specificity@^2.4.2": + version "2.4.2" + resolved "https://registry.npmjs.org/@bramus/specificity/-/specificity-2.4.2.tgz" + integrity sha512-ctxtJ/eA+t+6q2++vj5j7FYX3nRu311q1wfYH3xjlLOsczhlhxAg2FWNUXhpGvAw3BWo1xBcvOV6/YLc2r5FJw== dependencies: - "@babel/helper-string-parser" "^7.22.5" - "@babel/helper-validator-identifier" "^7.22.5" - to-fast-properties "^2.0.0" + css-tree "^3.0.0" -"@braintree/sanitize-url@^7.1.1": - version "7.1.1" - resolved "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-7.1.1.tgz" - integrity sha512-i1L7noDNxtFyL5DmZafWy1wRVhGehQmzZaz1HiN5e7iylJMSZR7ekOV7NsIqa5qBldlLrsKv4HbgFUVlQrz8Mw== +"@csstools/color-helpers@^6.0.2": + version "6.0.2" + resolved "https://registry.npmjs.org/@csstools/color-helpers/-/color-helpers-6.0.2.tgz" + integrity sha512-LMGQLS9EuADloEFkcTBR3BwV/CGHV7zyDxVRtVDTwdI2Ca4it0CCVTT9wCkxSgokjE5Ho41hEPgb8OEUwoXr6Q== -"@chevrotain/cst-dts-gen@11.0.3": - version "11.0.3" - resolved "https://registry.npmjs.org/@chevrotain/cst-dts-gen/-/cst-dts-gen-11.0.3.tgz" - integrity sha512-BvIKpRLeS/8UbfxXxgC33xOumsacaeCKAjAeLyOn7Pcp95HiRbrpl14S+9vaZLolnbssPIUuiUd8IvgkRyt6NQ== - dependencies: - "@chevrotain/gast" "11.0.3" - "@chevrotain/types" "11.0.3" - lodash-es "4.17.21" +"@csstools/css-calc@^3.1.1": + version "3.1.1" + resolved "https://registry.npmjs.org/@csstools/css-calc/-/css-calc-3.1.1.tgz" + integrity sha512-HJ26Z/vmsZQqs/o3a6bgKslXGFAungXGbinULZO3eMsOyNJHeBBZfup5FiZInOghgoM4Hwnmw+OgbJCNg1wwUQ== -"@chevrotain/gast@11.0.3": - version "11.0.3" - resolved "https://registry.npmjs.org/@chevrotain/gast/-/gast-11.0.3.tgz" - integrity sha512-+qNfcoNk70PyS/uxmj3li5NiECO+2YKZZQMbmjTqRI3Qchu8Hig/Q9vgkHpI3alNjr7M+a2St5pw5w5F6NL5/Q== +"@csstools/css-color-parser@^4.0.2": + version "4.0.2" + resolved "https://registry.npmjs.org/@csstools/css-color-parser/-/css-color-parser-4.0.2.tgz" + integrity sha512-0GEfbBLmTFf0dJlpsNU7zwxRIH0/BGEMuXLTCvFYxuL1tNhqzTbtnFICyJLTNK4a+RechKP75e7w42ClXSnJQw== dependencies: - "@chevrotain/types" "11.0.3" - lodash-es "4.17.21" + "@csstools/color-helpers" "^6.0.2" + "@csstools/css-calc" "^3.1.1" -"@chevrotain/regexp-to-ast@11.0.3": - version "11.0.3" - resolved "https://registry.npmjs.org/@chevrotain/regexp-to-ast/-/regexp-to-ast-11.0.3.tgz" - integrity sha512-1fMHaBZxLFvWI067AVbGJav1eRY7N8DDvYCTwGBiE/ytKBgP8azTdgyrKyWZ9Mfh09eHWb5PgTSO8wi7U824RA== +"@csstools/css-parser-algorithms@^4.0.0": + version "4.0.0" + resolved "https://registry.npmjs.org/@csstools/css-parser-algorithms/-/css-parser-algorithms-4.0.0.tgz" + integrity sha512-+B87qS7fIG3L5h3qwJ/IFbjoVoOe/bpOdh9hAjXbvx0o8ImEmUsGXN0inFOnk2ChCFgqkkGFQ+TpM5rbhkKe4w== -"@chevrotain/types@11.0.3": - version "11.0.3" - resolved "https://registry.npmjs.org/@chevrotain/types/-/types-11.0.3.tgz" - integrity sha512-gsiM3G8b58kZC2HaWR50gu6Y1440cHiJ+i3JUvcp/35JchYejb2+5MVeJK0iKThYpAa/P2PYFV4hoi44HD+aHQ== +"@csstools/css-syntax-patches-for-csstree@^1.1.1": + version "1.1.1" + resolved "https://registry.npmjs.org/@csstools/css-syntax-patches-for-csstree/-/css-syntax-patches-for-csstree-1.1.1.tgz" + integrity sha512-BvqN0AMWNAnLk9G8jnUT77D+mUbY/H2b3uDTvg2isJkHaOufUE2R3AOwxWo7VBQKT1lOdwdvorddo2B/lk64+w== -"@chevrotain/utils@11.0.3": - version "11.0.3" - resolved "https://registry.npmjs.org/@chevrotain/utils/-/utils-11.0.3.tgz" - integrity sha512-YslZMgtJUyuMbZ+aKvfF3x1f5liK4mWNxghFRv7jqRR9C3R3fAOGTTKvxXDa2Y1s9zSbcpuO0cAxDYsc9SrXoQ== +"@csstools/css-tokenizer@^4.0.0": + version "4.0.0" + resolved "https://registry.npmjs.org/@csstools/css-tokenizer/-/css-tokenizer-4.0.0.tgz" + integrity sha512-QxULHAm7cNu72w97JUNCBFODFaXpbDg+dP8b/oWFAZ2MTRppA3U00Y2L1HqaS4J6yBqxwa/Y3nMBaxVKbB/NsA== "@emotion/babel-plugin@^11.13.5": version "11.13.5" @@ -133,9 +215,9 @@ integrity sha512-MyqliTZGuOm3+5ZRSaaBGP3USLw6+EGykkwZns2EPC5g8jJ4z9OrdZY9apkl3+UP9+sdz76YYkwCKP5gh8iY3g== "@emotion/is-prop-valid@^1.3.0": - version "1.3.1" - resolved "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-1.3.1.tgz" - integrity sha512-/ACwoqx7XQi9knQs/G0qKvv5teDMhD7bXYns9N/wM8ah8iNb8jZ2uNO0YOgiq2o2poIvVtJS2YALasQuMSQ7Kw== + version "1.4.0" + resolved "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-1.4.0.tgz" + integrity sha512-QgD4fyscGcbbKwJmqNvUMSE02OsHUa+lAWKdEUIJKgqe5IwRSKd7+KhibEWdaKwgjLj0DRSHA9biAIqGBk05lw== dependencies: "@emotion/memoize" "^0.9.0" @@ -208,22 +290,22 @@ "@esbuild/aix-ppc64@0.21.5": version "0.21.5" - resolved "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz" + resolved "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz#c7184a326533fcdf1b8ee0733e21c713b975575f" integrity sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ== "@esbuild/android-arm64@0.21.5": version "0.21.5" - resolved "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz" + resolved "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz#09d9b4357780da9ea3a7dfb833a1f1ff439b4052" integrity sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A== "@esbuild/android-arm@0.21.5": version "0.21.5" - resolved "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz" + resolved "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz#9b04384fb771926dfa6d7ad04324ecb2ab9b2e28" integrity sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg== "@esbuild/android-x64@0.21.5": version "0.21.5" - resolved "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz" + resolved "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz#29918ec2db754cedcb6c1b04de8cd6547af6461e" integrity sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA== "@esbuild/darwin-arm64@0.21.5": @@ -233,129 +315,122 @@ "@esbuild/darwin-x64@0.21.5": version "0.21.5" - resolved "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz" + resolved "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz#c13838fa57372839abdddc91d71542ceea2e1e22" integrity sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw== "@esbuild/freebsd-arm64@0.21.5": version "0.21.5" - resolved "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz" + resolved "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz#646b989aa20bf89fd071dd5dbfad69a3542e550e" integrity sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g== "@esbuild/freebsd-x64@0.21.5": version "0.21.5" - resolved "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz" + resolved "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz#aa615cfc80af954d3458906e38ca22c18cf5c261" integrity sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ== "@esbuild/linux-arm64@0.21.5": version "0.21.5" - resolved "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz" + resolved "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz#70ac6fa14f5cb7e1f7f887bcffb680ad09922b5b" integrity sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q== "@esbuild/linux-arm@0.21.5": version "0.21.5" - resolved "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz" + resolved "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz#fc6fd11a8aca56c1f6f3894f2bea0479f8f626b9" integrity sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA== "@esbuild/linux-ia32@0.21.5": version "0.21.5" - resolved "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz" + resolved "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz#3271f53b3f93e3d093d518d1649d6d68d346ede2" integrity sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg== "@esbuild/linux-loong64@0.21.5": version "0.21.5" - resolved "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz" + resolved "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz#ed62e04238c57026aea831c5a130b73c0f9f26df" integrity sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg== "@esbuild/linux-mips64el@0.21.5": version "0.21.5" - resolved "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz" + resolved "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz#e79b8eb48bf3b106fadec1ac8240fb97b4e64cbe" integrity sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg== "@esbuild/linux-ppc64@0.21.5": version "0.21.5" - resolved "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz" + resolved "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz#5f2203860a143b9919d383ef7573521fb154c3e4" integrity sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w== "@esbuild/linux-riscv64@0.21.5": version "0.21.5" - resolved "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz" + resolved "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz#07bcafd99322d5af62f618cb9e6a9b7f4bb825dc" integrity sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA== "@esbuild/linux-s390x@0.21.5": version "0.21.5" - resolved "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz" + resolved "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz#b7ccf686751d6a3e44b8627ababc8be3ef62d8de" integrity sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A== "@esbuild/linux-x64@0.21.5": version "0.21.5" - resolved "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz" + resolved "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz#6d8f0c768e070e64309af8004bb94e68ab2bb3b0" integrity sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ== "@esbuild/netbsd-x64@0.21.5": version "0.21.5" - resolved "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz" + resolved "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz#bbe430f60d378ecb88decb219c602667387a6047" integrity sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg== "@esbuild/openbsd-x64@0.21.5": version "0.21.5" - resolved "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz" + resolved "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz#99d1cf2937279560d2104821f5ccce220cb2af70" integrity sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow== "@esbuild/sunos-x64@0.21.5": version "0.21.5" - resolved "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz" + resolved "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz#08741512c10d529566baba837b4fe052c8f3487b" integrity sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg== "@esbuild/win32-arm64@0.21.5": version "0.21.5" - resolved "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz" + resolved "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz#675b7385398411240735016144ab2e99a60fc75d" integrity sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A== "@esbuild/win32-ia32@0.21.5": version "0.21.5" - resolved "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz" + resolved "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz#1bfc3ce98aa6ca9a0969e4d2af72144c59c1193b" integrity sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA== "@esbuild/win32-x64@0.21.5": version "0.21.5" - resolved "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz" + resolved "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz#acad351d582d157bb145535db2a6ff53dd514b5c" integrity sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw== -"@eslint-community/eslint-utils@^4.7.0", "@eslint-community/eslint-utils@^4.8.0": - version "4.9.0" - resolved "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.0.tgz" - integrity sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g== +"@eslint-community/eslint-utils@^4.8.0", "@eslint-community/eslint-utils@^4.9.1": + version "4.9.1" + resolved "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.1.tgz" + integrity sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ== dependencies: eslint-visitor-keys "^3.4.3" -"@eslint-community/regexpp@^4.10.0", "@eslint-community/regexpp@^4.12.1": +"@eslint-community/regexpp@^4.12.1", "@eslint-community/regexpp@^4.12.2": version "4.12.2" resolved "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.2.tgz" integrity sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew== -"@eslint/config-array@^0.21.1": - version "0.21.1" - resolved "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.1.tgz" - integrity sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA== +"@eslint/config-array@^0.21.2": + version "0.21.2" + resolved "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.2.tgz" + integrity sha512-nJl2KGTlrf9GjLimgIru+V/mzgSK0ABCDQRvxw5BjURL7WfH5uoWmizbH7QB6MmnMBd8cIC9uceWnezL1VZWWw== dependencies: "@eslint/object-schema" "^2.1.7" debug "^4.3.1" - minimatch "^3.1.2" + minimatch "^3.1.5" -"@eslint/config-helpers@^0.4.1": +"@eslint/config-helpers@^0.4.2": version "0.4.2" resolved "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.4.2.tgz" integrity sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw== dependencies: "@eslint/core" "^0.17.0" -"@eslint/core@^0.16.0": - version "0.16.0" - resolved "https://registry.npmjs.org/@eslint/core/-/core-0.16.0.tgz" - integrity sha512-nmC8/totwobIiFcGkDza3GIKfAw1+hLiYVrh3I1nIomQ8PEr5cxg34jnkmGawul/ep52wGRAcyeDCNtWKSOj4Q== - dependencies: - "@types/json-schema" "^7.0.15" - "@eslint/core@^0.17.0": version "0.17.0" resolved "https://registry.npmjs.org/@eslint/core/-/core-0.17.0.tgz" @@ -363,32 +438,32 @@ dependencies: "@types/json-schema" "^7.0.15" -"@eslint/eslintrc@^3.3.1": - version "3.3.1" - resolved "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.1.tgz" - integrity sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ== +"@eslint/eslintrc@^3.3.5": + version "3.3.5" + resolved "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.5.tgz" + integrity sha512-4IlJx0X0qftVsN5E+/vGujTRIFtwuLbNsVUe7TO6zYPDR1O6nFwvwhIKEKSrl6dZchmYBITazxKoUYOjdtjlRg== dependencies: - ajv "^6.12.4" + ajv "^6.14.0" debug "^4.3.2" espree "^10.0.1" globals "^14.0.0" ignore "^5.2.0" import-fresh "^3.2.1" - js-yaml "^4.1.0" - minimatch "^3.1.2" + js-yaml "^4.1.1" + minimatch "^3.1.5" strip-json-comments "^3.1.1" -"@eslint/js@9.38.0", "@eslint/js@^9.15.0": - version "9.38.0" - resolved "https://registry.npmjs.org/@eslint/js/-/js-9.38.0.tgz" - integrity sha512-UZ1VpFvXf9J06YG9xQBdnzU+kthors6KjhMAl6f4gH4usHyh31rUf2DLGInT8RFYIReYXNSydgPY0V2LuWgl7A== +"@eslint/js@9.39.4", "@eslint/js@^9.15.0": + version "9.39.4" + resolved "https://registry.npmjs.org/@eslint/js/-/js-9.39.4.tgz" + integrity sha512-nE7DEIchvtiFTwBw4Lfbu59PG+kCofhjsKaCWzxTpt4lfRjRMqG6uMBzKXuEcyXhOHoUp9riAm7/aWYGhXZ9cw== "@eslint/object-schema@^2.1.7": version "2.1.7" resolved "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.7.tgz" integrity sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA== -"@eslint/plugin-kit@^0.4.0": +"@eslint/plugin-kit@^0.4.1": version "0.4.1" resolved "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.4.1.tgz" integrity sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA== @@ -396,6 +471,11 @@ "@eslint/core" "^0.17.0" levn "^0.4.1" +"@exodus/bytes@^1.11.0", "@exodus/bytes@^1.15.0", "@exodus/bytes@^1.6.0": + version "1.15.0" + resolved "https://registry.npmjs.org/@exodus/bytes/-/bytes-1.15.0.tgz" + integrity sha512-UY0nlA+feH81UGSHv92sLEPLCeZFjXOuHhrIo0HQydScuQc8s0A7kL/UdgwgDq8g8ilksmuoF35YVTNphV2aBQ== + "@fast-csv/format@4.3.5": version "4.3.5" resolved "https://registry.npmjs.org/@fast-csv/format/-/format-4.3.5.tgz" @@ -421,6 +501,26 @@ lodash.isundefined "^3.0.1" lodash.uniq "^4.5.0" +"@floating-ui/core@^1.7.5": + version "1.7.5" + resolved "https://registry.npmjs.org/@floating-ui/core/-/core-1.7.5.tgz" + integrity sha512-1Ih4WTWyw0+lKyFMcBHGbb5U5FtuHJuujoyyr5zTaWS5EYMeT6Jb2AuDeftsCsEuchO+mM2ij5+q9crhydzLhQ== + dependencies: + "@floating-ui/utils" "^0.2.11" + +"@floating-ui/dom@^1.0.0": + version "1.7.6" + resolved "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.7.6.tgz" + integrity sha512-9gZSAI5XM36880PPMm//9dfiEngYoC6Am2izES1FF406YFsjvyBMmeJ2g4SAju3xWwtuynNRFL2s9hgxpLI5SQ== + dependencies: + "@floating-ui/core" "^1.7.5" + "@floating-ui/utils" "^0.2.11" + +"@floating-ui/utils@^0.2.11": + version "0.2.11" + resolved "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.11.tgz" + integrity sha512-RiB/yIh78pcIxl6lLMG0CgBXAZ2Y0eVHqMPYugu+9U0AeT6YBeiJpf7lbdJNIugFP5SIjwNRgo4DhR1Qxi26Gg== + "@fontsource/roboto@^4.5.5": version "4.5.8" resolved "https://registry.npmjs.org/@fontsource/roboto/-/roboto-4.5.8.tgz" @@ -449,247 +549,276 @@ resolved "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz" integrity sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ== -"@iconify/types@^2.0.0": - version "2.0.0" - resolved "https://registry.npmjs.org/@iconify/types/-/types-2.0.0.tgz" - integrity sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg== +"@jridgewell/gen-mapping@^0.3.12": + version "0.3.13" + resolved "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz" + integrity sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA== + dependencies: + "@jridgewell/sourcemap-codec" "^1.5.0" + "@jridgewell/trace-mapping" "^0.3.24" -"@iconify/utils@^3.0.1": - version "3.0.2" - resolved "https://registry.npmjs.org/@iconify/utils/-/utils-3.0.2.tgz" - integrity sha512-EfJS0rLfVuRuJRn4psJHtK2A9TqVnkxPpHY6lYHiB9+8eSuudsxbwMiavocG45ujOo6FJ+CIRlRnlOGinzkaGQ== - dependencies: - "@antfu/install-pkg" "^1.1.0" - "@antfu/utils" "^9.2.0" - "@iconify/types" "^2.0.0" - debug "^4.4.1" - globals "^15.15.0" - kolorist "^1.8.0" - local-pkg "^1.1.1" - mlly "^1.7.4" - -"@juggle/resize-observer@^3.3.1": - version "3.4.0" - resolved "https://registry.npmjs.org/@juggle/resize-observer/-/resize-observer-3.4.0.tgz" - integrity sha512-dfLbk+PwWvFzSxwk3n5ySL0hfBog779o8h68wK/7/APo/7cgyWp5jcXockbxdk5kFRkbeXWm4Fbi9FrdN381sA== +"@jridgewell/resolve-uri@^3.1.0": + version "3.1.2" + resolved "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz" + integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw== + +"@jridgewell/sourcemap-codec@^1.4.14", "@jridgewell/sourcemap-codec@^1.5.0", "@jridgewell/sourcemap-codec@^1.5.5": + version "1.5.5" + resolved "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz" + integrity sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og== + +"@jridgewell/trace-mapping@^0.3.24", "@jridgewell/trace-mapping@^0.3.28": + version "0.3.31" + resolved "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz" + integrity sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw== + dependencies: + "@jridgewell/resolve-uri" "^3.1.0" + "@jridgewell/sourcemap-codec" "^1.4.14" "@kurkle/color@^0.3.0": version "0.3.4" resolved "https://registry.npmjs.org/@kurkle/color/-/color-0.3.4.tgz" integrity sha512-M5UknZPHRu3DEDWoipU6sE8PdkZ6Z/S+v4dD+Ke8IaNlpdSQah50lz1KtcFBa2vsdOnwbbnxJwVM4wty6udA5w== -"@mermaid-js/parser@^0.6.3": - version "0.6.3" - resolved "https://registry.npmjs.org/@mermaid-js/parser/-/parser-0.6.3.tgz" - integrity sha512-lnjOhe7zyHjc+If7yT4zoedx2vo4sHaTmtkl1+or8BRTnCtDmcTpAjpzDSfCZrshM5bCoz0GyidzadJAH1xobA== - dependencies: - langium "3.3.1" - -"@mui/core-downloads-tracker@^7.3.4": - version "7.3.4" - resolved "https://registry.npmjs.org/@mui/core-downloads-tracker/-/core-downloads-tracker-7.3.4.tgz" - integrity sha512-BIktMapG3r4iXwIhYNpvk97ZfYWTreBBQTWjQKbNbzI64+ULHfYavQEX2w99aSWHS58DvXESWIgbD9adKcUOBw== +"@mui/core-downloads-tracker@^7.3.9": + version "7.3.9" + resolved "https://registry.npmjs.org/@mui/core-downloads-tracker/-/core-downloads-tracker-7.3.9.tgz" + integrity sha512-MOkOCTfbMJwLshlBCKJ59V2F/uaLYfmKnN76kksj6jlGUVdI25A9Hzs08m+zjBRdLv+sK7Rqdsefe8X7h/6PCw== "@mui/icons-material@^7.1.1": - version "7.3.4" - resolved "https://registry.npmjs.org/@mui/icons-material/-/icons-material-7.3.4.tgz" - integrity sha512-9n6Xcq7molXWYb680N2Qx+FRW8oT6j/LXF5PZFH3ph9X/Rct0B/BlLAsFI7iL9ySI6LVLuQIVtrLiPT82R7OZw== + version "7.3.9" + resolved "https://registry.npmjs.org/@mui/icons-material/-/icons-material-7.3.9.tgz" + integrity sha512-BT+zPJXss8Hg/oEMRmHl17Q97bPACG4ufFSfGEdhiE96jOyR5Dz1ty7ZWt1fVGR0y1p+sSgEwQT/MNZQmoWDCw== dependencies: - "@babel/runtime" "^7.28.4" + "@babel/runtime" "^7.28.6" "@mui/lab@^7.0.1-beta.18": - version "7.0.1-beta.18" - resolved "https://registry.npmjs.org/@mui/lab/-/lab-7.0.1-beta.18.tgz" - integrity sha512-LC+Eki92aW9/8uUS2RNqo3yytdMlUiIzV3tKvaZVabzecHncyiItAk12SNSqV+U4Pu+TUKqEM9Y5lShAUtuLPQ== - dependencies: - "@babel/runtime" "^7.28.4" - "@mui/system" "^7.3.3" - "@mui/types" "^7.4.7" - "@mui/utils" "^7.3.3" + version "7.0.1-beta.23" + resolved "https://registry.npmjs.org/@mui/lab/-/lab-7.0.1-beta.23.tgz" + integrity sha512-661LhBtL33DWeRk7DXXu4LvbHUmTRkoybiVgKkdLx6gA4Nbr1r6B1U+yZGcTm5GfY25nrtS083aoy3P0wuuJ3A== + dependencies: + "@babel/runtime" "^7.28.6" + "@mui/system" "^7.3.9" + "@mui/types" "^7.4.12" + "@mui/utils" "^7.3.9" clsx "^2.1.1" prop-types "^15.8.1" "@mui/material@^7.1.1": - version "7.3.4" - resolved "https://registry.npmjs.org/@mui/material/-/material-7.3.4.tgz" - integrity sha512-gEQL9pbJZZHT7lYJBKQCS723v1MGys2IFc94COXbUIyCTWa+qC77a7hUax4Yjd5ggEm35dk4AyYABpKKWC4MLw== - dependencies: - "@babel/runtime" "^7.28.4" - "@mui/core-downloads-tracker" "^7.3.4" - "@mui/system" "^7.3.3" - "@mui/types" "^7.4.7" - "@mui/utils" "^7.3.3" + version "7.3.9" + resolved "https://registry.npmjs.org/@mui/material/-/material-7.3.9.tgz" + integrity sha512-I8yO3t4T0y7bvDiR1qhIN6iBWZOTBfVOnmLlM7K6h3dx5YX2a7rnkuXzc2UkZaqhxY9NgTnEbdPlokR1RxCNRQ== + dependencies: + "@babel/runtime" "^7.28.6" + "@mui/core-downloads-tracker" "^7.3.9" + "@mui/system" "^7.3.9" + "@mui/types" "^7.4.12" + "@mui/utils" "^7.3.9" "@popperjs/core" "^2.11.8" "@types/react-transition-group" "^4.4.12" clsx "^2.1.1" - csstype "^3.1.3" + csstype "^3.2.3" prop-types "^15.8.1" - react-is "^19.1.1" + react-is "^19.2.3" react-transition-group "^4.4.5" -"@mui/private-theming@^7.3.3": - version "7.3.3" - resolved "https://registry.npmjs.org/@mui/private-theming/-/private-theming-7.3.3.tgz" - integrity sha512-OJM+9nj5JIyPUvsZ5ZjaeC9PfktmK+W5YaVLToLR8L0lB/DGmv1gcKE43ssNLSvpoW71Hct0necfade6+kW3zQ== +"@mui/private-theming@^7.3.9": + version "7.3.9" + resolved "https://registry.npmjs.org/@mui/private-theming/-/private-theming-7.3.9.tgz" + integrity sha512-ErIyRQvsiQEq7Yvcvfw9UDHngaqjMy9P3JDPnRAaKG5qhpl2C4tX/W1S4zJvpu+feihmZJStjIyvnv6KDbIrlw== dependencies: - "@babel/runtime" "^7.28.4" - "@mui/utils" "^7.3.3" + "@babel/runtime" "^7.28.6" + "@mui/utils" "^7.3.9" prop-types "^15.8.1" -"@mui/styled-engine@^7.3.3": - version "7.3.3" - resolved "https://registry.npmjs.org/@mui/styled-engine/-/styled-engine-7.3.3.tgz" - integrity sha512-CmFxvRJIBCEaWdilhXMw/5wFJ1+FT9f3xt+m2pPXhHPeVIbBg9MnMvNSJjdALvnQJMPw8jLhrUtXmN7QAZV2fw== +"@mui/styled-engine@^7.3.9": + version "7.3.9" + resolved "https://registry.npmjs.org/@mui/styled-engine/-/styled-engine-7.3.9.tgz" + integrity sha512-JqujWt5bX4okjUPGpVof/7pvgClqh7HvIbsIBIOOlCh2u3wG/Bwp4+E1bc1dXSwkrkp9WUAoNdI5HEC+5HKvMw== dependencies: - "@babel/runtime" "^7.28.4" + "@babel/runtime" "^7.28.6" "@emotion/cache" "^11.14.0" "@emotion/serialize" "^1.3.3" "@emotion/sheet" "^1.4.0" - csstype "^3.1.3" + csstype "^3.2.3" prop-types "^15.8.1" -"@mui/system@^7.3.3": - version "7.3.3" - resolved "https://registry.npmjs.org/@mui/system/-/system-7.3.3.tgz" - integrity sha512-Lqq3emZr5IzRLKaHPuMaLBDVaGvxoh6z7HMWd1RPKawBM5uMRaQ4ImsmmgXWtwJdfZux5eugfDhXJUo2mliS8Q== +"@mui/system@^7.3.9": + version "7.3.9" + resolved "https://registry.npmjs.org/@mui/system/-/system-7.3.9.tgz" + integrity sha512-aL1q9am8XpRrSabv9qWf5RHhJICJql34wnrc1nz0MuOglPRYF/liN+c8VqZdTvUn9qg+ZjRVbKf4sJVFfIDtmg== dependencies: - "@babel/runtime" "^7.28.4" - "@mui/private-theming" "^7.3.3" - "@mui/styled-engine" "^7.3.3" - "@mui/types" "^7.4.7" - "@mui/utils" "^7.3.3" + "@babel/runtime" "^7.28.6" + "@mui/private-theming" "^7.3.9" + "@mui/styled-engine" "^7.3.9" + "@mui/types" "^7.4.12" + "@mui/utils" "^7.3.9" clsx "^2.1.1" - csstype "^3.1.3" + csstype "^3.2.3" prop-types "^15.8.1" -"@mui/types@^7.4.7": - version "7.4.7" - resolved "https://registry.npmjs.org/@mui/types/-/types-7.4.7.tgz" - integrity sha512-8vVje9rdEr1rY8oIkYgP+Su5Kwl6ik7O3jQ0wl78JGSmiZhRHV+vkjooGdKD8pbtZbutXFVTWQYshu2b3sG9zw== +"@mui/types@^7.4.12": + version "7.4.12" + resolved "https://registry.npmjs.org/@mui/types/-/types-7.4.12.tgz" + integrity sha512-iKNAF2u9PzSIj40CjvKJWxFXJo122jXVdrmdh0hMYd+FR+NuJMkr/L88XwWLCRiJ5P1j+uyac25+Kp6YC4hu6w== + dependencies: + "@babel/runtime" "^7.28.6" + +"@mui/types@^9.0.0": + version "9.0.0" + resolved "https://registry.npmjs.org/@mui/types/-/types-9.0.0.tgz#92d8c64e72cb863ee59108cb20cc476d648a3ab9" + integrity sha512-i1cuFCAWN44b3AJWO7mh7tuh1sqbQSeVr/94oG0TX5uXivac8XalgE4/6fQZcmGZigzbQ35IXxj/4jLpRIBYZg== dependencies: - "@babel/runtime" "^7.28.4" + "@babel/runtime" "^7.29.2" -"@mui/utils@^7.3.3": - version "7.3.3" - resolved "https://registry.npmjs.org/@mui/utils/-/utils-7.3.3.tgz" - integrity sha512-kwNAUh7bLZ7mRz9JZ+6qfRnnxbE4Zuc+RzXnhSpRSxjTlSTj7b4JxRLXpG+MVtPVtqks5k/XC8No1Vs3x4Z2gg== +"@mui/utils@9.0.0": + version "9.0.0" + resolved "https://registry.npmjs.org/@mui/utils/-/utils-9.0.0.tgz#25b563ccbf537feba5f89c37a00cb8e6eea45ad0" + integrity sha512-bQcqyg/gjULUqTuyUjSAFr6LQGLvtkNtDbJerAtoUn9kGZ0hg5QJiN1PLHMLbeFpe3te1831uq7GFl2ITokGdg== dependencies: - "@babel/runtime" "^7.28.4" - "@mui/types" "^7.4.7" + "@babel/runtime" "^7.29.2" + "@mui/types" "^9.0.0" "@types/prop-types" "^15.7.15" clsx "^2.1.1" prop-types "^15.8.1" - react-is "^19.1.1" + react-is "^19.2.4" -"@nodelib/fs.scandir@2.1.5": - version "2.1.5" - resolved "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz" - integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== +"@mui/utils@^7.3.9": + version "7.3.9" + resolved "https://registry.npmjs.org/@mui/utils/-/utils-7.3.9.tgz" + integrity sha512-U6SdZaGbfb65fqTsH3V5oJdFj9uYwyLE2WVuNvmbggTSDBb8QHrFsqY8BN3taK9t3yJ8/BPHD/kNvLNyjwM7Yw== dependencies: - "@nodelib/fs.stat" "2.0.5" - run-parallel "^1.1.9" - -"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": - version "2.0.5" - resolved "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz" - integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== + "@babel/runtime" "^7.28.6" + "@mui/types" "^7.4.12" + "@types/prop-types" "^15.7.15" + clsx "^2.1.1" + prop-types "^15.8.1" + react-is "^19.2.3" -"@nodelib/fs.walk@^1.2.3": - version "1.2.8" - resolved "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz" - integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== +"@mui/x-internals@^9.0.0": + version "9.0.0" + resolved "https://registry.npmjs.org/@mui/x-internals/-/x-internals-9.0.0.tgz#8851a058e09b719690b4f398319805239e923855" + integrity sha512-E/4rdg69JjhyybpPGypCjAKSKLLnSdCFM+O6P/nkUg47+qt3uftxQEhjQO53rcn6ahHl6du/uNZ9BLgeY6kYxQ== dependencies: - "@nodelib/fs.scandir" "2.1.5" - fastq "^1.6.0" - -"@parcel/watcher-android-arm64@2.5.1": - version "2.5.1" - resolved "https://registry.npmjs.org/@parcel/watcher-android-arm64/-/watcher-android-arm64-2.5.1.tgz" - integrity sha512-KF8+j9nNbUN8vzOFDpRMsaKBHZ/mcjEjMToVMJOhTozkDonQFFrRcfdLWn6yWKCmJKmdVxSgHiYvTCef4/qcBA== - -"@parcel/watcher-darwin-arm64@2.5.1": - version "2.5.1" - resolved "https://registry.npmjs.org/@parcel/watcher-darwin-arm64/-/watcher-darwin-arm64-2.5.1.tgz" - integrity sha512-eAzPv5osDmZyBhou8PoF4i6RQXAfeKL9tjb3QzYuccXFMQU0ruIc/POh30ePnaOyD1UXdlKguHBmsTs53tVoPw== + "@babel/runtime" "^7.28.6" + "@mui/utils" "9.0.0" + reselect "^5.1.1" + use-sync-external-store "^1.6.0" -"@parcel/watcher-darwin-x64@2.5.1": - version "2.5.1" - resolved "https://registry.npmjs.org/@parcel/watcher-darwin-x64/-/watcher-darwin-x64-2.5.1.tgz" - integrity sha512-1ZXDthrnNmwv10A0/3AJNZ9JGlzrF82i3gNQcWOzd7nJ8aj+ILyW1MTxVk35Db0u91oD5Nlk9MBiujMlwmeXZg== - -"@parcel/watcher-freebsd-x64@2.5.1": - version "2.5.1" - resolved "https://registry.npmjs.org/@parcel/watcher-freebsd-x64/-/watcher-freebsd-x64-2.5.1.tgz" - integrity sha512-SI4eljM7Flp9yPuKi8W0ird8TI/JK6CSxju3NojVI6BjHsTyK7zxA9urjVjEKJ5MBYC+bLmMcbAWlZ+rFkLpJQ== - -"@parcel/watcher-linux-arm-glibc@2.5.1": - version "2.5.1" - resolved "https://registry.npmjs.org/@parcel/watcher-linux-arm-glibc/-/watcher-linux-arm-glibc-2.5.1.tgz" - integrity sha512-RCdZlEyTs8geyBkkcnPWvtXLY44BCeZKmGYRtSgtwwnHR4dxfHRG3gR99XdMEdQ7KeiDdasJwwvNSF5jKtDwdA== - -"@parcel/watcher-linux-arm-musl@2.5.1": - version "2.5.1" - resolved "https://registry.npmjs.org/@parcel/watcher-linux-arm-musl/-/watcher-linux-arm-musl-2.5.1.tgz" - integrity sha512-6E+m/Mm1t1yhB8X412stiKFG3XykmgdIOqhjWj+VL8oHkKABfu/gjFj8DvLrYVHSBNC+/u5PeNrujiSQ1zwd1Q== - -"@parcel/watcher-linux-arm64-glibc@2.5.1": - version "2.5.1" - resolved "https://registry.npmjs.org/@parcel/watcher-linux-arm64-glibc/-/watcher-linux-arm64-glibc-2.5.1.tgz" - integrity sha512-LrGp+f02yU3BN9A+DGuY3v3bmnFUggAITBGriZHUREfNEzZh/GO06FF5u2kx8x+GBEUYfyTGamol4j3m9ANe8w== - -"@parcel/watcher-linux-arm64-musl@2.5.1": - version "2.5.1" - resolved "https://registry.npmjs.org/@parcel/watcher-linux-arm64-musl/-/watcher-linux-arm64-musl-2.5.1.tgz" - integrity sha512-cFOjABi92pMYRXS7AcQv9/M1YuKRw8SZniCDw0ssQb/noPkRzA+HBDkwmyOJYp5wXcsTrhxO0zq1U11cK9jsFg== - -"@parcel/watcher-linux-x64-glibc@2.5.1": - version "2.5.1" - resolved "https://registry.npmjs.org/@parcel/watcher-linux-x64-glibc/-/watcher-linux-x64-glibc-2.5.1.tgz" - integrity sha512-GcESn8NZySmfwlTsIur+49yDqSny2IhPeZfXunQi48DMugKeZ7uy1FX83pO0X22sHntJ4Ub+9k34XQCX+oHt2A== - -"@parcel/watcher-linux-x64-musl@2.5.1": - version "2.5.1" - resolved "https://registry.npmjs.org/@parcel/watcher-linux-x64-musl/-/watcher-linux-x64-musl-2.5.1.tgz" - integrity sha512-n0E2EQbatQ3bXhcH2D1XIAANAcTZkQICBPVaxMeaCVBtOpBZpWJuf7LwyWPSBDITb7In8mqQgJ7gH8CILCURXg== - -"@parcel/watcher-win32-arm64@2.5.1": - version "2.5.1" - resolved "https://registry.npmjs.org/@parcel/watcher-win32-arm64/-/watcher-win32-arm64-2.5.1.tgz" - integrity sha512-RFzklRvmc3PkjKjry3hLF9wD7ppR4AKcWNzH7kXR7GUe0Igb3Nz8fyPwtZCSquGrhU5HhUNDr/mKBqj7tqA2Vw== - -"@parcel/watcher-win32-ia32@2.5.1": - version "2.5.1" - resolved "https://registry.npmjs.org/@parcel/watcher-win32-ia32/-/watcher-win32-ia32-2.5.1.tgz" - integrity sha512-c2KkcVN+NJmuA7CGlaGD1qJh1cLfDnQsHjE89E60vUEMlqduHGCdCLJCID5geFVM0dOtA3ZiIO8BoEQmzQVfpQ== +"@mui/x-tree-view@^9.0.1": + version "9.0.1" + resolved "https://registry.npmjs.org/@mui/x-tree-view/-/x-tree-view-9.0.1.tgz#0bc1feb262ef5648ec44f00b6e1125ddb34bcbb8" + integrity sha512-vUSxqg5dAbaHJsifc/PjFhf57vQWjMSK9cJKPy1SXcAFHWhXSJ1oCHpUwfQ7hjKvkeqf8emKxtCVMXNVRRhIWw== + dependencies: + "@babel/runtime" "^7.28.6" + "@base-ui/utils" "^0.2.6" + "@mui/utils" "9.0.0" + "@mui/x-internals" "^9.0.0" + "@types/react-transition-group" "^4.4.12" + clsx "^2.1.1" + prop-types "^15.8.1" + react-transition-group "^4.4.5" -"@parcel/watcher-win32-x64@2.5.1": - version "2.5.1" - resolved "https://registry.npmjs.org/@parcel/watcher-win32-x64/-/watcher-win32-x64-2.5.1.tgz" - integrity sha512-9lHBdJITeNR++EvSQVUcaZoWupyHfXe1jZvGZ06O/5MflPcuPLtEphScIBL+AiCWBO46tDSHzWyD0uDmmZqsgA== +"@napi-rs/wasm-runtime@^1.1.1": + version "1.1.4" + resolved "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-1.1.4.tgz#a46bbfedc29751b7170c5d23bc1d8ee8c7e3c1e1" + integrity sha512-3NQNNgA1YSlJb/kMH1ildASP9HW7/7kYnRI2szWJaofaS1hWmbGI4H+d3+22aGzXXN9IJ+n+GiFVcGipJP18ow== + dependencies: + "@tybys/wasm-util" "^0.10.1" + +"@oxc-project/types@=0.122.0": + version "0.122.0" + resolved "https://registry.npmjs.org/@oxc-project/types/-/types-0.122.0.tgz" + integrity sha512-oLAl5kBpV4w69UtFZ9xqcmTi+GENWOcPF7FCrczTiBbmC0ibXxCwyvZGbO39rCVEuLGAZM84DH0pUIyyv/YJzA== + +"@parcel/watcher-android-arm64@2.5.6": + version "2.5.6" + resolved "https://registry.npmjs.org/@parcel/watcher-android-arm64/-/watcher-android-arm64-2.5.6.tgz#5f32e0dba356f4ac9a11068d2a5c134ca3ba6564" + integrity sha512-YQxSS34tPF/6ZG7r/Ih9xy+kP/WwediEUsqmtf0cuCV5TPPKw/PQHRhueUo6JdeFJaqV3pyjm0GdYjZotbRt/A== + +"@parcel/watcher-darwin-arm64@2.5.6": + version "2.5.6" + resolved "https://registry.npmjs.org/@parcel/watcher-darwin-arm64/-/watcher-darwin-arm64-2.5.6.tgz" + integrity sha512-Z2ZdrnwyXvvvdtRHLmM4knydIdU9adO3D4n/0cVipF3rRiwP+3/sfzpAwA/qKFL6i1ModaabkU7IbpeMBgiVEA== + +"@parcel/watcher-darwin-x64@2.5.6": + version "2.5.6" + resolved "https://registry.npmjs.org/@parcel/watcher-darwin-x64/-/watcher-darwin-x64-2.5.6.tgz#bf05d76a78bc15974f15ec3671848698b0838063" + integrity sha512-HgvOf3W9dhithcwOWX9uDZyn1lW9R+7tPZ4sug+NGrGIo4Rk1hAXLEbcH1TQSqxts0NYXXlOWqVpvS1SFS4fRg== + +"@parcel/watcher-freebsd-x64@2.5.6": + version "2.5.6" + resolved "https://registry.npmjs.org/@parcel/watcher-freebsd-x64/-/watcher-freebsd-x64-2.5.6.tgz#8bc26e9848e7303ac82922a5ae1b1ef1bdb48a53" + integrity sha512-vJVi8yd/qzJxEKHkeemh7w3YAn6RJCtYlE4HPMoVnCpIXEzSrxErBW5SJBgKLbXU3WdIpkjBTeUNtyBVn8TRng== + +"@parcel/watcher-linux-arm-glibc@2.5.6": + version "2.5.6" + resolved "https://registry.npmjs.org/@parcel/watcher-linux-arm-glibc/-/watcher-linux-arm-glibc-2.5.6.tgz#1328fee1deb0c2d7865079ef53a2ba4cc2f8b40a" + integrity sha512-9JiYfB6h6BgV50CCfasfLf/uvOcJskMSwcdH1PHH9rvS1IrNy8zad6IUVPVUfmXr+u+Km9IxcfMLzgdOudz9EQ== + +"@parcel/watcher-linux-arm-musl@2.5.6": + version "2.5.6" + resolved "https://registry.npmjs.org/@parcel/watcher-linux-arm-musl/-/watcher-linux-arm-musl-2.5.6.tgz#bad0f45cb3e2157746db8b9d22db6a125711f152" + integrity sha512-Ve3gUCG57nuUUSyjBq/MAM0CzArtuIOxsBdQ+ftz6ho8n7s1i9E1Nmk/xmP323r2YL0SONs1EuwqBp2u1k5fxg== + +"@parcel/watcher-linux-arm64-glibc@2.5.6": + version "2.5.6" + resolved "https://registry.npmjs.org/@parcel/watcher-linux-arm64-glibc/-/watcher-linux-arm64-glibc-2.5.6.tgz#b75913fbd501d9523c5f35d420957bf7d0204809" + integrity sha512-f2g/DT3NhGPdBmMWYoxixqYr3v/UXcmLOYy16Bx0TM20Tchduwr4EaCbmxh1321TABqPGDpS8D/ggOTaljijOA== + +"@parcel/watcher-linux-arm64-musl@2.5.6": + version "2.5.6" + resolved "https://registry.npmjs.org/@parcel/watcher-linux-arm64-musl/-/watcher-linux-arm64-musl-2.5.6.tgz#da5621a6a576070c8c0de60dea8b46dc9c3827d4" + integrity sha512-qb6naMDGlbCwdhLj6hgoVKJl2odL34z2sqkC7Z6kzir8b5W65WYDpLB6R06KabvZdgoHI/zxke4b3zR0wAbDTA== + +"@parcel/watcher-linux-x64-glibc@2.5.6": + version "2.5.6" + resolved "https://registry.npmjs.org/@parcel/watcher-linux-x64-glibc/-/watcher-linux-x64-glibc-2.5.6.tgz#ce437accdc4b30f93a090b4a221fd95cd9b89639" + integrity sha512-kbT5wvNQlx7NaGjzPFu8nVIW1rWqV780O7ZtkjuWaPUgpv2NMFpjYERVi0UYj1msZNyCzGlaCWEtzc+exjMGbQ== + +"@parcel/watcher-linux-x64-musl@2.5.6": + version "2.5.6" + resolved "https://registry.npmjs.org/@parcel/watcher-linux-x64-musl/-/watcher-linux-x64-musl-2.5.6.tgz#02400c54b4a67efcc7e2327b249711920ac969e2" + integrity sha512-1JRFeC+h7RdXwldHzTsmdtYR/Ku8SylLgTU/reMuqdVD7CtLwf0VR1FqeprZ0eHQkO0vqsbvFLXUmYm/uNKJBg== + +"@parcel/watcher-win32-arm64@2.5.6": + version "2.5.6" + resolved "https://registry.npmjs.org/@parcel/watcher-win32-arm64/-/watcher-win32-arm64-2.5.6.tgz#caae3d3c7583ca0a7171e6bd142c34d20ea1691e" + integrity sha512-3ukyebjc6eGlw9yRt678DxVF7rjXatWiHvTXqphZLvo7aC5NdEgFufVwjFfY51ijYEWpXbqF5jtrK275z52D4Q== + +"@parcel/watcher-win32-ia32@2.5.6": + version "2.5.6" + resolved "https://registry.npmjs.org/@parcel/watcher-win32-ia32/-/watcher-win32-ia32-2.5.6.tgz#9ac922550896dfe47bfc5ae3be4f1bcaf8155d6d" + integrity sha512-k35yLp1ZMwwee3Ez/pxBi5cf4AoBKYXj00CZ80jUz5h8prpiaQsiRPKQMxoLstNuqe2vR4RNPEAEcjEFzhEz/g== + +"@parcel/watcher-win32-x64@2.5.6": + version "2.5.6" + resolved "https://registry.npmjs.org/@parcel/watcher-win32-x64/-/watcher-win32-x64-2.5.6.tgz#73fdafba2e21c448f0e456bbe13178d8fe11739d" + integrity sha512-hbQlYcCq5dlAX9Qx+kFb0FHue6vbjlf0FrNzSKdYK2APUf7tGfGxQCk2ihEREmbR6ZMc0MVAD5RIX/41gpUzTw== "@parcel/watcher@^2.4.1": - version "2.5.1" - resolved "https://registry.npmjs.org/@parcel/watcher/-/watcher-2.5.1.tgz" - integrity sha512-dfUnCxiN9H4ap84DvD2ubjw+3vUNpstxa0TneY/Paat8a3R4uQZDLSvWjmznAY/DoahqTHl9V46HF/Zs3F29pg== + version "2.5.6" + resolved "https://registry.npmjs.org/@parcel/watcher/-/watcher-2.5.6.tgz" + integrity sha512-tmmZ3lQxAe/k/+rNnXQRawJ4NjxO2hqiOLTHvWchtGZULp4RyFeh6aU4XdOYBFe2KE1oShQTv4AblOs2iOrNnQ== dependencies: - detect-libc "^1.0.3" + detect-libc "^2.0.3" is-glob "^4.0.3" - micromatch "^4.0.5" node-addon-api "^7.0.0" + picomatch "^4.0.3" optionalDependencies: - "@parcel/watcher-android-arm64" "2.5.1" - "@parcel/watcher-darwin-arm64" "2.5.1" - "@parcel/watcher-darwin-x64" "2.5.1" - "@parcel/watcher-freebsd-x64" "2.5.1" - "@parcel/watcher-linux-arm-glibc" "2.5.1" - "@parcel/watcher-linux-arm-musl" "2.5.1" - "@parcel/watcher-linux-arm64-glibc" "2.5.1" - "@parcel/watcher-linux-arm64-musl" "2.5.1" - "@parcel/watcher-linux-x64-glibc" "2.5.1" - "@parcel/watcher-linux-x64-musl" "2.5.1" - "@parcel/watcher-win32-arm64" "2.5.1" - "@parcel/watcher-win32-ia32" "2.5.1" - "@parcel/watcher-win32-x64" "2.5.1" + "@parcel/watcher-android-arm64" "2.5.6" + "@parcel/watcher-darwin-arm64" "2.5.6" + "@parcel/watcher-darwin-x64" "2.5.6" + "@parcel/watcher-freebsd-x64" "2.5.6" + "@parcel/watcher-linux-arm-glibc" "2.5.6" + "@parcel/watcher-linux-arm-musl" "2.5.6" + "@parcel/watcher-linux-arm64-glibc" "2.5.6" + "@parcel/watcher-linux-arm64-musl" "2.5.6" + "@parcel/watcher-linux-x64-glibc" "2.5.6" + "@parcel/watcher-linux-x64-musl" "2.5.6" + "@parcel/watcher-win32-arm64" "2.5.6" + "@parcel/watcher-win32-ia32" "2.5.6" + "@parcel/watcher-win32-x64" "2.5.6" "@popperjs/core@^2.11.8": version "2.11.8" @@ -721,214 +850,318 @@ redux-thunk "^2.4.2" reselect "^4.1.8" -"@remix-run/router@1.23.0": - version "1.23.0" - resolved "https://registry.npmjs.org/@remix-run/router/-/router-1.23.0.tgz" - integrity sha512-O3rHJzAQKamUz1fvE0Qaw0xSFqsA/yafi2iqeE0pvdFtCO1viYx8QL6f3Ln/aCCTLxs68SLf0KPM9eSeM8yBnA== +"@remirror/core-constants@3.0.0": + version "3.0.0" + resolved "https://registry.npmjs.org/@remirror/core-constants/-/core-constants-3.0.0.tgz" + integrity sha512-42aWfPrimMfDKDi4YegyS7x+/0tlzaqwPQCULLanv3DMIlu96KTJR0fM5isWX2UViOqlGnX6YFgqWepcX+XMNg== + +"@remix-run/router@1.23.2": + version "1.23.2" + resolved "https://registry.npmjs.org/@remix-run/router/-/router-1.23.2.tgz" + integrity sha512-Ic6m2U/rMjTkhERIa/0ZtXJP17QUi2CbWE7cqx4J58M8aA3QTfW+2UlQ4psvTX9IO1RfNVhK3pcpdjej7L+t2w== + +"@rolldown/binding-android-arm64@1.0.0-rc.11": + version "1.0.0-rc.11" + resolved "https://registry.npmjs.org/@rolldown/binding-android-arm64/-/binding-android-arm64-1.0.0-rc.11.tgz#25a584227ed97239fd564451c0db2c359751b42a" + integrity sha512-SJ+/g+xNnOh6NqYxD0V3uVN4W3VfnrGsC9/hoglicgTNfABFG9JjISvkkU0dNY84MNHLWyOgxP9v9Y9pX4S7+A== + +"@rolldown/binding-darwin-arm64@1.0.0-rc.11": + version "1.0.0-rc.11" + resolved "https://registry.npmjs.org/@rolldown/binding-darwin-arm64/-/binding-darwin-arm64-1.0.0-rc.11.tgz" + integrity sha512-7WQgR8SfOPwmDZGFkThUvsmd/nwAWv91oCO4I5LS7RKrssPZmOt7jONN0cW17ydGC1n/+puol1IpoieKqQidmg== + +"@rolldown/binding-darwin-x64@1.0.0-rc.11": + version "1.0.0-rc.11" + resolved "https://registry.npmjs.org/@rolldown/binding-darwin-x64/-/binding-darwin-x64-1.0.0-rc.11.tgz#6e751ea2067cacee0c94f0e8b087761dde62f9ea" + integrity sha512-39Ks6UvIHq4rEogIfQBoBRusj0Q0nPVWIvqmwBLaT6aqQGIakHdESBVOPRRLacy4WwUPIx4ZKzfZ9PMW+IeyUQ== + +"@rolldown/binding-freebsd-x64@1.0.0-rc.11": + version "1.0.0-rc.11" + resolved "https://registry.npmjs.org/@rolldown/binding-freebsd-x64/-/binding-freebsd-x64-1.0.0-rc.11.tgz#b7582b959398c5871034b94ba0a8ecde0425a8e7" + integrity sha512-jfsm0ZHfhiqrvWjJAmzsqiIFPz5e7mAoCOPBNTcNgkiid/LaFKiq92+0ojH+nmJmKYkre4t71BWXUZDNp7vsag== + +"@rolldown/binding-linux-arm-gnueabihf@1.0.0-rc.11": + version "1.0.0-rc.11" + resolved "https://registry.npmjs.org/@rolldown/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-1.0.0-rc.11.tgz#3b8c5e071d6a0ed1cb1880c1948c6fece553502a" + integrity sha512-zjQaUtSyq1nVe3nxmlSCuR96T1LPlpvmJ0SZy0WJFEsV4kFbXcq2u68L4E6O0XeFj4aex9bEauqjW8UQBeAvfQ== + +"@rolldown/binding-linux-arm64-gnu@1.0.0-rc.11": + version "1.0.0-rc.11" + resolved "https://registry.npmjs.org/@rolldown/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-1.0.0-rc.11.tgz#2533165620137b077ae4ede92b752a63cd85cfcb" + integrity sha512-WMW1yE6IOnehTcFE9eipFkm3XN63zypWlrJQ2iF7NrQ9b2LDRjumFoOGJE8RJJTJCTBAdmLMnJ8uVitACUUo1Q== + +"@rolldown/binding-linux-arm64-musl@1.0.0-rc.11": + version "1.0.0-rc.11" + resolved "https://registry.npmjs.org/@rolldown/binding-linux-arm64-musl/-/binding-linux-arm64-musl-1.0.0-rc.11.tgz#b04cf5b806a012027a4e8b139e0f86b2ff7621c0" + integrity sha512-jfndI9tsfm4APzjNt6QdBkYwre5lRPUgHeDHoI7ydKUuJvz3lZeCfMsI56BZj+7BYqiKsJm7cfd/6KYV7ubrBg== + +"@rolldown/binding-linux-ppc64-gnu@1.0.0-rc.11": + version "1.0.0-rc.11" + resolved "https://registry.npmjs.org/@rolldown/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-1.0.0-rc.11.tgz#bda9c11fe03482033d5dac6a943802b3e7579550" + integrity sha512-ZlFgw46NOAGMgcdvdYwAGu2Q+SLFA9LzbJLW+iyMOJyhj5wk6P3KEE9Gct4xWwSzFoPI7JCdYmYMzVtlgQ+zfw== + +"@rolldown/binding-linux-s390x-gnu@1.0.0-rc.11": + version "1.0.0-rc.11" + resolved "https://registry.npmjs.org/@rolldown/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-1.0.0-rc.11.tgz#55daa2d35f92f62e958fc44e12db1c16e1f271c5" + integrity sha512-hIOYmuT6ofM4K04XAZd3OzMySEO4K0/nc9+jmNcxNAxRi6c5UWpqfw3KMFV4MVFWL+jQsSh+bGw2VqmaPMTLyw== + +"@rolldown/binding-linux-x64-gnu@1.0.0-rc.11": + version "1.0.0-rc.11" + resolved "https://registry.npmjs.org/@rolldown/binding-linux-x64-gnu/-/binding-linux-x64-gnu-1.0.0-rc.11.tgz#8ca1abf607bbe2f7fdd6f6416192937dc9ea1e54" + integrity sha512-qXBQQO9OvkjjQPLdUVr7Nr2t3QTZI7s4KZtfw7HzBgjbmAPSFwSv4rmET9lLSgq3rH/ndA3ngv3Qb8l2njoPNA== + +"@rolldown/binding-linux-x64-musl@1.0.0-rc.11": + version "1.0.0-rc.11" + resolved "https://registry.npmjs.org/@rolldown/binding-linux-x64-musl/-/binding-linux-x64-musl-1.0.0-rc.11.tgz#36a52beee8ac97a79d1ed8f1b94fab677e3e4d11" + integrity sha512-/tpFfoSTzUkH9LPY+cYbqZBDyyX62w5fICq9qzsHLL8uTI6BHip3Q9Uzft0wylk/i8OOwKik8OxW+QAhDmzwmg== + +"@rolldown/binding-openharmony-arm64@1.0.0-rc.11": + version "1.0.0-rc.11" + resolved "https://registry.npmjs.org/@rolldown/binding-openharmony-arm64/-/binding-openharmony-arm64-1.0.0-rc.11.tgz#91c74fd23b3f3f3942fe4b3aefc9428ecbaa55fd" + integrity sha512-mcp3Rio2w72IvdZG0oQ4bM2c2oumtwHfUfKncUM6zGgz0KgPz4YmDPQfnXEiY5t3+KD/i8HG2rOB/LxdmieK2g== + +"@rolldown/binding-wasm32-wasi@1.0.0-rc.11": + version "1.0.0-rc.11" + resolved "https://registry.npmjs.org/@rolldown/binding-wasm32-wasi/-/binding-wasm32-wasi-1.0.0-rc.11.tgz#6520bafe57ff1cd2fb45f8f22b1cb6d57be44e79" + integrity sha512-LXk5Hii1Ph9asuGRjBuz8TUxdc1lWzB7nyfdoRgI0WGPZKmCxvlKk8KfYysqtr4MfGElu/f/pEQRh8fcEgkrWw== + dependencies: + "@napi-rs/wasm-runtime" "^1.1.1" + +"@rolldown/binding-win32-arm64-msvc@1.0.0-rc.11": + version "1.0.0-rc.11" + resolved "https://registry.npmjs.org/@rolldown/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-1.0.0-rc.11.tgz#73dd1c4737473c8270b61cd2e42b05a34453ffc0" + integrity sha512-dDwf5otnx0XgRY1yqxOC4ITizcdzS/8cQ3goOWv3jFAo4F+xQYni+hnMuO6+LssHHdJW7+OCVL3CoU4ycnh35Q== + +"@rolldown/binding-win32-x64-msvc@1.0.0-rc.11": + version "1.0.0-rc.11" + resolved "https://registry.npmjs.org/@rolldown/binding-win32-x64-msvc/-/binding-win32-x64-msvc-1.0.0-rc.11.tgz#4d922aa6dd6bf27c73eba93fec9a0aed62549095" + integrity sha512-LN4/skhSggybX71ews7dAj6r2geaMJfm3kMbK2KhFMg9B10AZXnKoLCVVgzhMHL0S+aKtr4p8QbAW8k+w95bAA== "@rolldown/pluginutils@1.0.0-beta.27": version "1.0.0-beta.27" resolved "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.27.tgz" integrity sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA== -"@rollup/rollup-android-arm-eabi@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.59.0.tgz#a6742c74c7d9d6d604ef8a48f99326b4ecda3d82" - integrity sha512-upnNBkA6ZH2VKGcBj9Fyl9IGNPULcjXRlg0LLeaioQWueH30p6IXtJEbKAgvyv+mJaMxSm1l6xwDXYjpEMiLMg== - -"@rollup/rollup-android-arm64@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.59.0.tgz#97247be098de4df0c11971089fd2edf80a5da8cf" - integrity sha512-hZ+Zxj3SySm4A/DylsDKZAeVg0mvi++0PYVceVyX7hemkw7OreKdCvW2oQ3T1FMZvCaQXqOTHb8qmBShoqk69Q== - -"@rollup/rollup-darwin-arm64@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.59.0.tgz#674852cf14cf11b8056e0b1a2f4e872b523576cf" - integrity sha512-W2Psnbh1J8ZJw0xKAd8zdNgF9HRLkdWwwdWqubSVk0pUuQkoHnv7rx4GiF9rT4t5DIZGAsConRE3AxCdJ4m8rg== - -"@rollup/rollup-darwin-x64@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.59.0.tgz#36dfd7ed0aaf4d9d89d9ef983af72632455b0246" - integrity sha512-ZW2KkwlS4lwTv7ZVsYDiARfFCnSGhzYPdiOU4IM2fDbL+QGlyAbjgSFuqNRbSthybLbIJ915UtZBtmuLrQAT/w== - -"@rollup/rollup-freebsd-arm64@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.59.0.tgz#2f87c2074b4220260fdb52a9996246edfc633c22" - integrity sha512-EsKaJ5ytAu9jI3lonzn3BgG8iRBjV4LxZexygcQbpiU0wU0ATxhNVEpXKfUa0pS05gTcSDMKpn3Sx+QB9RlTTA== - -"@rollup/rollup-freebsd-x64@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.59.0.tgz#9b5a26522a38a95dc06616d1939d4d9a76937803" - integrity sha512-d3DuZi2KzTMjImrxoHIAODUZYoUUMsuUiY4SRRcJy6NJoZ6iIqWnJu9IScV9jXysyGMVuW+KNzZvBLOcpdl3Vg== - -"@rollup/rollup-linux-arm-gnueabihf@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.59.0.tgz#86aa4859385a8734235b5e40a48e52d770758c3a" - integrity sha512-t4ONHboXi/3E0rT6OZl1pKbl2Vgxf9vJfWgmUoCEVQVxhW6Cw/c8I6hbbu7DAvgp82RKiH7TpLwxnJeKv2pbsw== - -"@rollup/rollup-linux-arm-musleabihf@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.59.0.tgz#cbe70e56e6ece8dac83eb773b624fc9e5a460976" - integrity sha512-CikFT7aYPA2ufMD086cVORBYGHffBo4K8MQ4uPS/ZnY54GKj36i196u8U+aDVT2LX4eSMbyHtyOh7D7Zvk2VvA== - -"@rollup/rollup-linux-arm64-gnu@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.59.0.tgz#d14992a2e653bc3263d284bc6579b7a2890e1c45" - integrity sha512-jYgUGk5aLd1nUb1CtQ8E+t5JhLc9x5WdBKew9ZgAXg7DBk0ZHErLHdXM24rfX+bKrFe+Xp5YuJo54I5HFjGDAA== - -"@rollup/rollup-linux-arm64-musl@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.59.0.tgz#2fdd1ddc434ea90aeaa0851d2044789b4d07f6da" - integrity sha512-peZRVEdnFWZ5Bh2KeumKG9ty7aCXzzEsHShOZEFiCQlDEepP1dpUl/SrUNXNg13UmZl+gzVDPsiCwnV1uI0RUA== - -"@rollup/rollup-linux-loong64-gnu@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.59.0.tgz#8a181e6f89f969f21666a743cd411416c80099e7" - integrity sha512-gbUSW/97f7+r4gHy3Jlup8zDG190AuodsWnNiXErp9mT90iCy9NKKU0Xwx5k8VlRAIV2uU9CsMnEFg/xXaOfXg== - -"@rollup/rollup-linux-loong64-musl@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.59.0.tgz#904125af2babc395f8061daa27b5af1f4e3f2f78" - integrity sha512-yTRONe79E+o0FWFijasoTjtzG9EBedFXJMl888NBEDCDV9I2wGbFFfJQQe63OijbFCUZqxpHz1GzpbtSFikJ4Q== - -"@rollup/rollup-linux-ppc64-gnu@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.59.0.tgz#a57970ac6864c9a3447411a658224bdcf948be22" - integrity sha512-sw1o3tfyk12k3OEpRddF68a1unZ5VCN7zoTNtSn2KndUE+ea3m3ROOKRCZxEpmT9nsGnogpFP9x6mnLTCaoLkA== - -"@rollup/rollup-linux-ppc64-musl@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.59.0.tgz#bb84de5b26870567a4267666e08891e80bb56a63" - integrity sha512-+2kLtQ4xT3AiIxkzFVFXfsmlZiG5FXYW7ZyIIvGA7Bdeuh9Z0aN4hVyXS/G1E9bTP/vqszNIN/pUKCk/BTHsKA== - -"@rollup/rollup-linux-riscv64-gnu@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.59.0.tgz#72d00d2c7fb375ce3564e759db33f17a35bffab9" - integrity sha512-NDYMpsXYJJaj+I7UdwIuHHNxXZ/b/N2hR15NyH3m2qAtb/hHPA4g4SuuvrdxetTdndfj9b1WOmy73kcPRoERUg== - -"@rollup/rollup-linux-riscv64-musl@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.59.0.tgz#4c166ef58e718f9245bd31873384ba15a5c1a883" - integrity sha512-nLckB8WOqHIf1bhymk+oHxvM9D3tyPndZH8i8+35p/1YiVoVswPid2yLzgX7ZJP0KQvnkhM4H6QZ5m0LzbyIAg== - -"@rollup/rollup-linux-s390x-gnu@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.59.0.tgz#bb5025cde9a61db478c2ca7215808ad3bce73a09" - integrity sha512-oF87Ie3uAIvORFBpwnCvUzdeYUqi2wY6jRFWJAy1qus/udHFYIkplYRW+wo+GRUP4sKzYdmE1Y3+rY5Gc4ZO+w== - -"@rollup/rollup-linux-x64-gnu@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.59.0.tgz#9b66b1f9cd95c6624c788f021c756269ffed1552" - integrity sha512-3AHmtQq/ppNuUspKAlvA8HtLybkDflkMuLK4DPo77DfthRb71V84/c4MlWJXixZz4uruIH4uaa07IqoAkG64fg== +"@rolldown/pluginutils@1.0.0-rc.11": + version "1.0.0-rc.11" + resolved "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-rc.11.tgz" + integrity sha512-xQO9vbwBecJRv9EUcQ/y0dzSTJgA7Q6UVN7xp6B81+tBGSLVAK03yJ9NkJaUA7JFD91kbjxRSC/mDnmvXzbHoQ== + +"@rollup/rollup-android-arm-eabi@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.60.0.tgz#7e158ddfc16f78da99c0d5ccbae6cae403ef3284" + integrity sha512-WOhNW9K8bR3kf4zLxbfg6Pxu2ybOUbB2AjMDHSQx86LIF4rH4Ft7vmMwNt0loO0eonglSNy4cpD3MKXXKQu0/A== + +"@rollup/rollup-android-arm64@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.60.0.tgz#49f4ae0e22b6f9ffbcd3818b9a0758fa2d10b1cd" + integrity sha512-u6JHLll5QKRvjciE78bQXDmqRqNs5M/3GVqZeMwvmjaNODJih/WIrJlFVEihvV0MiYFmd+ZyPr9wxOVbPAG2Iw== + +"@rollup/rollup-darwin-arm64@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.60.0.tgz" + integrity sha512-qEF7CsKKzSRc20Ciu2Zw1wRrBz4g56F7r/vRwY430UPp/nt1x21Q/fpJ9N5l47WWvJlkNCPJz3QRVw008fi7yA== + +"@rollup/rollup-darwin-x64@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.60.0.tgz#1bf7a92b27ebdd5e0d1d48503c7811160773be1a" + integrity sha512-WADYozJ4QCnXCH4wPB+3FuGmDPoFseVCUrANmA5LWwGmC6FL14BWC7pcq+FstOZv3baGX65tZ378uT6WG8ynTw== + +"@rollup/rollup-freebsd-arm64@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.60.0.tgz#5ccf537b99c5175008444702193ad0b1c36f7f16" + integrity sha512-6b8wGHJlDrGeSE3aH5mGNHBjA0TTkxdoNHik5EkvPHCt351XnigA4pS7Wsj/Eo9Y8RBU6f35cjN9SYmCFBtzxw== + +"@rollup/rollup-freebsd-x64@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.60.0.tgz#1196ecd7bf4e128624ef83cd1f9d785114474a77" + integrity sha512-h25Ga0t4jaylMB8M/JKAyrvvfxGRjnPQIR8lnCayyzEjEOx2EJIlIiMbhpWxDRKGKF8jbNH01NnN663dH638mA== + +"@rollup/rollup-linux-arm-gnueabihf@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.60.0.tgz#cc147633a4af229fee83a737bf2334fbac3dc28e" + integrity sha512-RzeBwv0B3qtVBWtcuABtSuCzToo2IEAIQrcyB/b2zMvBWVbjo8bZDjACUpnaafaxhTw2W+imQbP2BD1usasK4g== + +"@rollup/rollup-linux-arm-musleabihf@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.60.0.tgz#3559f9f060153ea54594a42c3b87a297bedcc26e" + integrity sha512-Sf7zusNI2CIU1HLzuu9Tc5YGAHEZs5Lu7N1ssJG4Tkw6e0MEsN7NdjUDDfGNHy2IU+ENyWT+L2obgWiguWibWQ== + +"@rollup/rollup-linux-arm64-gnu@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.60.0.tgz#e91f887b154123485cfc4b59befe2080fcd8f2df" + integrity sha512-DX2x7CMcrJzsE91q7/O02IJQ5/aLkVtYFryqCjduJhUfGKG6yJV8hxaw8pZa93lLEpPTP/ohdN4wFz7yp/ry9A== + +"@rollup/rollup-linux-arm64-musl@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.60.0.tgz#660752f040df9ba44a24765df698928917c0bf21" + integrity sha512-09EL+yFVbJZlhcQfShpswwRZ0Rg+z/CsSELFCnPt3iK+iqwGsI4zht3secj5vLEs957QvFFXnzAT0FFPIxSrkQ== + +"@rollup/rollup-linux-loong64-gnu@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.60.0.tgz#cb0e939a5fa479ccef264f3f45b31971695f869c" + integrity sha512-i9IcCMPr3EXm8EQg5jnja0Zyc1iFxJjZWlb4wr7U2Wx/GrddOuEafxRdMPRYVaXjgbhvqalp6np07hN1w9kAKw== + +"@rollup/rollup-linux-loong64-musl@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.60.0.tgz#42f86fbc82cd1a81be2d346476dd3231cf5ee442" + integrity sha512-DGzdJK9kyJ+B78MCkWeGnpXJ91tK/iKA6HwHxF4TAlPIY7GXEvMe8hBFRgdrR9Ly4qebR/7gfUs9y2IoaVEyog== + +"@rollup/rollup-linux-ppc64-gnu@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.60.0.tgz#39776a647a789dc95ea049277c5ef8f098df77f9" + integrity sha512-RwpnLsqC8qbS8z1H1AxBA1H6qknR4YpPR9w2XX0vo2Sz10miu57PkNcnHVaZkbqyw/kUWfKMI73jhmfi9BRMUQ== + +"@rollup/rollup-linux-ppc64-musl@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.60.0.tgz#466f20029a8e8b3bb2954c7ddebc9586420cac2c" + integrity sha512-Z8pPf54Ly3aqtdWC3G4rFigZgNvd+qJlOE52fmko3KST9SoGfAdSRCwyoyG05q1HrrAblLbk1/PSIV+80/pxLg== + +"@rollup/rollup-linux-riscv64-gnu@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.60.0.tgz#cff9877c78f12e7aa6246f6902ad913e99edb2b7" + integrity sha512-3a3qQustp3COCGvnP4SvrMHnPQ9d1vzCakQVRTliaz8cIp/wULGjiGpbcqrkv0WrHTEp8bQD/B3HBjzujVWLOA== + +"@rollup/rollup-linux-riscv64-musl@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.60.0.tgz#9a762fb99b5a82a921017f56491b7e892b9fb17d" + integrity sha512-pjZDsVH/1VsghMJ2/kAaxt6dL0psT6ZexQVrijczOf+PeP2BUqTHYejk3l6TlPRydggINOeNRhvpLa0AYpCWSQ== + +"@rollup/rollup-linux-s390x-gnu@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.60.0.tgz#9d25ad8ac7dab681935baf78ac5ea92d14629cdf" + integrity sha512-3ObQs0BhvPgiUVZrN7gqCSvmFuMWvWvsjG5ayJ3Lraqv+2KhOsp+pUbigqbeWqueGIsnn+09HBw27rJ+gYK4VQ== + +"@rollup/rollup-linux-x64-gnu@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.60.0.tgz#5e5139e11819fa38a052368da79422cb4afcf466" + integrity sha512-EtylprDtQPdS5rXvAayrNDYoJhIz1/vzN2fEubo3yLE7tfAw+948dO0g4M0vkTVFhKojnF+n6C8bDNe+gDRdTg== "@rollup/rollup-linux-x64-gnu@^4.24.4": - version "4.52.5" - resolved "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.52.5.tgz" - integrity sha512-hXGLYpdhiNElzN770+H2nlx+jRog8TyynpTVzdlc6bndktjKWyZyiCsuDAlpd+j+W+WNqfcyAWz9HxxIGfZm1Q== - -"@rollup/rollup-linux-x64-musl@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.59.0.tgz#b007ca255dc7166017d57d7d2451963f0bd23fd9" - integrity sha512-2UdiwS/9cTAx7qIUZB/fWtToJwvt0Vbo0zmnYt7ED35KPg13Q0ym1g442THLC7VyI6JfYTP4PiSOWyoMdV2/xg== - -"@rollup/rollup-openbsd-x64@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.59.0.tgz#e8b357b2d1aa2c8d76a98f5f0d889eabe93f4ef9" - integrity sha512-M3bLRAVk6GOwFlPTIxVBSYKUaqfLrn8l0psKinkCFxl4lQvOSz8ZrKDz2gxcBwHFpci0B6rttydI4IpS4IS/jQ== - -"@rollup/rollup-openharmony-arm64@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.59.0.tgz#96c2e3f4aacd3d921981329831ff8dde492204dc" - integrity sha512-tt9KBJqaqp5i5HUZzoafHZX8b5Q2Fe7UjYERADll83O4fGqJ49O1FsL6LpdzVFQcpwvnyd0i+K/VSwu/o/nWlA== - -"@rollup/rollup-win32-arm64-msvc@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.59.0.tgz#2d865149d706d938df8b4b8f117e69a77646d581" - integrity sha512-V5B6mG7OrGTwnxaNUzZTDTjDS7F75PO1ae6MJYdiMu60sq0CqN5CVeVsbhPxalupvTX8gXVSU9gq+Rx1/hvu6A== - -"@rollup/rollup-win32-ia32-msvc@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.59.0.tgz#abe1593be0fa92325e9971c8da429c5e05b92c36" - integrity sha512-UKFMHPuM9R0iBegwzKF4y0C4J9u8C6MEJgFuXTBerMk7EJ92GFVFYBfOZaSGLu6COf7FxpQNqhNS4c4icUPqxA== - -"@rollup/rollup-win32-x64-gnu@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.59.0.tgz#c4af3e9518c9a5cd4b1c163dc81d0ad4d82e7eab" - integrity sha512-laBkYlSS1n2L8fSo1thDNGrCTQMmxjYY5G0WFWjFFYZkKPjsMBsgJfGf4TLxXrF6RyhI60L8TMOjBMvXiTcxeA== - -"@rollup/rollup-win32-x64-msvc@4.59.0": - version "4.59.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.59.0.tgz#4584a8a87b29188a4c1fe987a9fcf701e256d86c" - integrity sha512-2HRCml6OztYXyJXAvdDXPKcawukWY2GpR5/nxKp4iBgiO3wcoEGkAaqctIbZcNB6KlUQBIqt8VYkNSj2397EfA== - -"@swc/core-darwin-arm64@1.14.0": - version "1.14.0" - resolved "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.14.0.tgz" - integrity sha512-uHPC8rlCt04nvYNczWzKVdgnRhxCa3ndKTBBbBpResOZsRmiwRAvByIGh599j+Oo6Z5eyTPrgY+XfJzVmXnN7Q== - -"@swc/core-darwin-x64@1.14.0": - version "1.14.0" - resolved "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.14.0.tgz" - integrity sha512-2SHrlpl68vtePRknv9shvM9YKKg7B9T13tcTg9aFCwR318QTYo+FzsKGmQSv9ox/Ua0Q2/5y2BNjieffJoo4nA== - -"@swc/core-linux-arm-gnueabihf@1.14.0": - version "1.14.0" - resolved "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.14.0.tgz" - integrity sha512-SMH8zn01dxt809svetnxpeg/jWdpi6dqHKO3Eb11u4OzU2PK7I5uKS6gf2hx5LlTbcJMFKULZiVwjlQLe8eqtg== - -"@swc/core-linux-arm64-gnu@1.14.0": - version "1.14.0" - resolved "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.14.0.tgz" - integrity sha512-q2JRu2D8LVqGeHkmpVCljVNltG0tB4o4eYg+dElFwCS8l2Mnt9qurMCxIeo9mgoqz0ax+k7jWtIRHktnVCbjvQ== - -"@swc/core-linux-arm64-musl@1.14.0": - version "1.14.0" - resolved "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.14.0.tgz" - integrity sha512-uofpVoPCEUjYIv454ZEZ3sLgMD17nIwlz2z7bsn7rl301Kt/01umFA7MscUovFfAK2IRGck6XB+uulMu6aFhKQ== - -"@swc/core-linux-x64-gnu@1.14.0": - version "1.14.0" - resolved "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.14.0.tgz" - integrity sha512-quTTx1Olm05fBfv66DEBuOsOgqdypnZ/1Bh3yGXWY7ANLFeeRpCDZpljD9BSjdsNdPOlwJmEUZXMHtGm3v1TZQ== - -"@swc/core-linux-x64-musl@1.14.0": - version "1.14.0" - resolved "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.14.0.tgz" - integrity sha512-caaNAu+aIqT8seLtCf08i8C3/UC5ttQujUjejhMcuS1/LoCKtNiUs4VekJd2UGt+pyuuSrQ6dKl8CbCfWvWeXw== - -"@swc/core-win32-arm64-msvc@1.14.0": - version "1.14.0" - resolved "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.14.0.tgz" - integrity sha512-EeW3jFlT3YNckJ6V/JnTfGcX7UHGyh6/AiCPopZ1HNaGiXVCKHPpVQZicmtyr/UpqxCXLrTgjHOvyMke7YN26A== - -"@swc/core-win32-ia32-msvc@1.14.0": - version "1.14.0" - resolved "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.14.0.tgz" - integrity sha512-dPai3KUIcihV5hfoO4QNQF5HAaw8+2bT7dvi8E5zLtecW2SfL3mUZipzampXq5FHll0RSCLzlrXnSx+dBRZIIQ== - -"@swc/core-win32-x64-msvc@1.14.0": - version "1.14.0" - resolved "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.14.0.tgz" - integrity sha512-nm+JajGrTqUA6sEHdghDlHMNfH1WKSiuvljhdmBACW4ta4LC3gKurX2qZuiBARvPkephW9V/i5S8QPY1PzFEqg== + version "4.60.1" + resolved "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.60.1.tgz#56a6a0d9076f2a05a976031493b24a20ddcc0e77" + integrity sha512-77PpsFQUCOiZR9+LQEFg9GClyfkNXj1MP6wRnzYs0EeWbPcHs02AXu4xuUbM1zhwn3wqaizle3AEYg5aeoohhg== + +"@rollup/rollup-linux-x64-musl@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.60.0.tgz#b6211d46e11b1f945f5504cc794fce839331ed08" + integrity sha512-k09oiRCi/bHU9UVFqD17r3eJR9bn03TyKraCrlz5ULFJGdJGi7VOmm9jl44vOJvRJ6P7WuBi/s2A97LxxHGIdw== + +"@rollup/rollup-openbsd-x64@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.60.0.tgz#e6e09eebaa7012bb9c7331b437a9e992bd94ca35" + integrity sha512-1o/0/pIhozoSaDJoDcec+IVLbnRtQmHwPV730+AOD29lHEEo4F5BEUB24H0OBdhbBBDwIOSuf7vgg0Ywxdfiiw== + +"@rollup/rollup-openharmony-arm64@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.60.0.tgz#f7d99ae857032498e57a5e7259fb7100fd24a87e" + integrity sha512-pESDkos/PDzYwtyzB5p/UoNU/8fJo68vcXM9ZW2V0kjYayj1KaaUfi1NmTUTUpMn4UhU4gTuK8gIaFO4UGuMbA== + +"@rollup/rollup-win32-arm64-msvc@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.60.0.tgz#41e392f5d9f3bf1253fdaf2f6d6f6b1bfc452856" + integrity sha512-hj1wFStD7B1YBeYmvY+lWXZ7ey73YGPcViMShYikqKT1GtstIKQAtfUI6yrzPjAy/O7pO0VLXGmUVWXQMaYgTQ== + +"@rollup/rollup-win32-ia32-msvc@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.60.0.tgz#f41b0490be0e5d3cf459b4dc076a192b532adea9" + integrity sha512-SyaIPFoxmUPlNDq5EHkTbiKzmSEmq/gOYFI/3HHJ8iS/v1mbugVa7dXUzcJGQfoytp9DJFLhHH4U3/eTy2Bq4w== + +"@rollup/rollup-win32-x64-gnu@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.60.0.tgz#0fcf9f1fcb750f0317b13aac3b3231687e6397a5" + integrity sha512-RdcryEfzZr+lAr5kRm2ucN9aVlCCa2QNq4hXelZxb8GG0NJSazq44Z3PCCc8wISRuCVnGs0lQJVX5Vp6fKA+IA== + +"@rollup/rollup-win32-x64-msvc@4.60.0": + version "4.60.0" + resolved "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.60.0.tgz#3afdb30405f6d4248df5e72e1ca86c5eab55fab8" + integrity sha512-PrsWNQ8BuE00O3Xsx3ALh2Df8fAj9+cvvX9AIA6o4KpATR98c9mud4XtDWVvsEuyia5U4tVSTKygawyJkjm60w== + +"@standard-schema/spec@^1.1.0": + version "1.1.0" + resolved "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.1.0.tgz" + integrity sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w== + +"@swc/core-darwin-arm64@1.15.21": + version "1.15.21" + resolved "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.15.21.tgz" + integrity sha512-SA8SFg9dp0qKRH8goWsax6bptFE2EdmPf2YRAQW9WoHGf3XKM1bX0nd5UdwxmC5hXsBUZAYf7xSciCler6/oyA== + +"@swc/core-darwin-x64@1.15.21": + version "1.15.21" + resolved "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.15.21.tgz#05ff28c00a7045d9760c847e19604fff02b6e3ea" + integrity sha512-//fOVntgowz9+V90lVsNCtyyrtbHp3jWH6Rch7MXHXbcvbLmbCTmssl5DeedUWLLGiAAW1wksBdqdGYOTjaNLw== + +"@swc/core-linux-arm-gnueabihf@1.15.21": + version "1.15.21" + resolved "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.15.21.tgz#d52a0fac1933fe4e4180a196417053571d6c255f" + integrity sha512-meNI4Sh6h9h8DvIfEc0l5URabYMSuNvyisLmG6vnoYAS43s8ON3NJR8sDHvdP7NJTrLe0q/x2XCn6yL/BeHcZg== + +"@swc/core-linux-arm64-gnu@1.15.21": + version "1.15.21" + resolved "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.15.21.tgz#32cd1b9d0d4be4d53ccfbc122ac61289f37735b9" + integrity sha512-QrXlNQnHeXqU2EzLlnsPoWEh8/GtNJLvfMiPsDhk+ht6Xv8+vhvZ5YZ/BokNWSIZiWPKLAqR0M7T92YF5tmD3g== + +"@swc/core-linux-arm64-musl@1.15.21": + version "1.15.21" + resolved "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.15.21.tgz#0993e8b2ffac4f1141fa7b158e8dd982c2476c1a" + integrity sha512-8/yGCMO333ultDaMQivE5CjO6oXDPeeg1IV4sphojPkb0Pv0i6zvcRIkgp60xDB+UxLr6VgHgt+BBgqS959E9g== + +"@swc/core-linux-ppc64-gnu@1.15.21": + version "1.15.21" + resolved "https://registry.npmjs.org/@swc/core-linux-ppc64-gnu/-/core-linux-ppc64-gnu-1.15.21.tgz#5f6765d9a36235d95fd5c69f6d848973e85d8180" + integrity sha512-ucW0HzPx0s1dgRvcvuLSPSA/2Kk/VYTv9st8qe1Kc22Gu0Q0rH9+6TcBTmMuNIp0Xs4BPr1uBttmbO1wEGI49Q== + +"@swc/core-linux-s390x-gnu@1.15.21": + version "1.15.21" + resolved "https://registry.npmjs.org/@swc/core-linux-s390x-gnu/-/core-linux-s390x-gnu-1.15.21.tgz#f96779dc2ba8d47298bca3ceaa961e0f460aa0bd" + integrity sha512-ulTnOGc5I7YRObE/9NreAhQg94QkiR5qNhhcUZ1iFAYjzg/JGAi1ch+s/Ixe61pMIr8bfVrF0NOaB0f8wjaAfA== + +"@swc/core-linux-x64-gnu@1.15.21": + version "1.15.21" + resolved "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.15.21.tgz#0ffe779d5fd060bfb7992176f51d317c81c6aaaf" + integrity sha512-D0RokxtM+cPvSqJIKR6uja4hbD+scI9ezo95mBhfSyLUs9wnPPl26sLp1ZPR/EXRdYm3F3S6RUtVi+8QXhT24Q== + +"@swc/core-linux-x64-musl@1.15.21": + version "1.15.21" + resolved "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.15.21.tgz#2ea9fab26555d27c715aed6a08604a8296e4af50" + integrity sha512-nER8u7VeRfmU6fMDzl1NQAbbB/G7O2avmvCOwIul1uGkZ2/acbPH+DCL9h5+0yd/coNcxMBTL6NGepIew+7C2w== + +"@swc/core-win32-arm64-msvc@1.15.21": + version "1.15.21" + resolved "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.15.21.tgz#b401f34f38d744ca2b800bf2574ef5f7b20ca52f" + integrity sha512-+/AgNBnjYugUA8C0Do4YzymgvnGbztv7j8HKSQLvR/DQgZPoXQ2B3PqB2mTtGh/X5DhlJWiqnunN35JUgWcAeQ== + +"@swc/core-win32-ia32-msvc@1.15.21": + version "1.15.21" + resolved "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.15.21.tgz#c761e981725d137abd7abcecff88d1dc2d76baad" + integrity sha512-IkSZj8PX/N4HcaFhMQtzmkV8YSnuNoJ0E6OvMwFiOfejPhiKXvl7CdDsn1f4/emYEIDO3fpgZW9DTaCRMDxaDA== + +"@swc/core-win32-x64-msvc@1.15.21": + version "1.15.21" + resolved "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.15.21.tgz#4878cd851b4f98033e19fca78953201aef736edd" + integrity sha512-zUyWso7OOENB6e1N1hNuNn8vbvLsTdKQ5WKLgt/JcBNfJhKy/6jmBmqI3GXk/MyvQKd5SLvP7A0F36p7TeDqvw== "@swc/core@^1.12.11": - version "1.14.0" - resolved "https://registry.npmjs.org/@swc/core/-/core-1.14.0.tgz" - integrity sha512-oExhY90bes5pDTVrei0xlMVosTxwd/NMafIpqsC4dMbRYZ5KB981l/CX8tMnGsagTplj/RcG9BeRYmV6/J5m3w== + version "1.15.21" + resolved "https://registry.npmjs.org/@swc/core/-/core-1.15.21.tgz" + integrity sha512-fkk7NJcBscrR3/F8jiqlMptRHP650NxqDnspBMrRe5d8xOoCy9MLL5kOBLFXjFLfMo3KQQHhk+/jUULOMlR1uQ== dependencies: "@swc/counter" "^0.1.3" "@swc/types" "^0.1.25" optionalDependencies: - "@swc/core-darwin-arm64" "1.14.0" - "@swc/core-darwin-x64" "1.14.0" - "@swc/core-linux-arm-gnueabihf" "1.14.0" - "@swc/core-linux-arm64-gnu" "1.14.0" - "@swc/core-linux-arm64-musl" "1.14.0" - "@swc/core-linux-x64-gnu" "1.14.0" - "@swc/core-linux-x64-musl" "1.14.0" - "@swc/core-win32-arm64-msvc" "1.14.0" - "@swc/core-win32-ia32-msvc" "1.14.0" - "@swc/core-win32-x64-msvc" "1.14.0" + "@swc/core-darwin-arm64" "1.15.21" + "@swc/core-darwin-x64" "1.15.21" + "@swc/core-linux-arm-gnueabihf" "1.15.21" + "@swc/core-linux-arm64-gnu" "1.15.21" + "@swc/core-linux-arm64-musl" "1.15.21" + "@swc/core-linux-ppc64-gnu" "1.15.21" + "@swc/core-linux-s390x-gnu" "1.15.21" + "@swc/core-linux-x64-gnu" "1.15.21" + "@swc/core-linux-x64-musl" "1.15.21" + "@swc/core-win32-arm64-msvc" "1.15.21" + "@swc/core-win32-ia32-msvc" "1.15.21" + "@swc/core-win32-x64-msvc" "1.15.21" "@swc/counter@^0.1.3": version "0.1.3" @@ -936,16 +1169,269 @@ integrity sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ== "@swc/types@^0.1.25": - version "0.1.25" - resolved "https://registry.npmjs.org/@swc/types/-/types-0.1.25.tgz" - integrity sha512-iAoY/qRhNH8a/hBvm3zKj9qQ4oc2+3w1unPJa2XvTK3XjeLXtzcCingVPw/9e5mn1+0yPqxcBGp9Jf0pkfMb1g== + version "0.1.26" + resolved "https://registry.npmjs.org/@swc/types/-/types-0.1.26.tgz" + integrity sha512-lyMwd7WGgG79RS7EERZV3T8wMdmPq3xwyg+1nmAM64kIhx5yl+juO2PYIHb7vTiPgPCj8LYjsNV2T5wiQHUEaw== dependencies: "@swc/counter" "^0.1.3" +"@testing-library/dom@^10.4.1": + version "10.4.1" + resolved "https://registry.npmjs.org/@testing-library/dom/-/dom-10.4.1.tgz" + integrity sha512-o4PXJQidqJl82ckFaXUeoAW+XysPLauYI43Abki5hABd853iMhitooc6znOnczgbTYmEP6U6/y1ZyKAIsvMKGg== + dependencies: + "@babel/code-frame" "^7.10.4" + "@babel/runtime" "^7.12.5" + "@types/aria-query" "^5.0.1" + aria-query "5.3.0" + dom-accessibility-api "^0.5.9" + lz-string "^1.5.0" + picocolors "1.1.1" + pretty-format "^27.0.2" + +"@testing-library/jest-dom@^6.9.1": + version "6.9.1" + resolved "https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-6.9.1.tgz" + integrity sha512-zIcONa+hVtVSSep9UT3jZ5rizo2BsxgyDYU7WFD5eICBE7no3881HGeb/QkGfsJs6JTkY1aQhT7rIPC7e+0nnA== + dependencies: + "@adobe/css-tools" "^4.4.0" + aria-query "^5.0.0" + css.escape "^1.5.1" + dom-accessibility-api "^0.6.3" + picocolors "^1.1.1" + redent "^3.0.0" + +"@testing-library/react@^16.3.2": + version "16.3.2" + resolved "https://registry.npmjs.org/@testing-library/react/-/react-16.3.2.tgz" + integrity sha512-XU5/SytQM+ykqMnAnvB2umaJNIOsLF3PVv//1Ew4CTcpz0/BRyy/af40qqrt7SjKpDdT1saBMc42CUok5gaw+g== + dependencies: + "@babel/runtime" "^7.12.5" + +"@tiptap/core@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/core/-/core-3.22.2.tgz" + integrity sha512-atq35NkpeEphH6vNYJ0pTLLBA73FAbvTV9Ovd3AaTC5s99/KF5Q86zVJXvml8xPRcMGM6dLp+eSSd06oTscMSA== + +"@tiptap/extension-blockquote@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extension-blockquote/-/extension-blockquote-3.22.2.tgz" + integrity sha512-iTdlmGFcgxi4LKaOW2Rc9/yD83qTXgRm5BN3vCHWy5+TbEnReYxYqU5qKsbtTbKy30sO8TJTdAXTZ29uomShQQ== + +"@tiptap/extension-bold@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extension-bold/-/extension-bold-3.22.2.tgz" + integrity sha512-bqsPJyKcT/RWse4e16U2EKhraR8a2+98TUuk1amG3yCyFJZStoO/j+pN0IqZdZZjr3WtxFyvwWp7Kc59UN+jUA== + +"@tiptap/extension-bubble-menu@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extension-bubble-menu/-/extension-bubble-menu-3.22.2.tgz" + integrity sha512-5hbyDOSkJwA2uh0v9Mm0Dd9bb9inx6tHBEDSH2tCB9Rm23poz3yOreB7SNX8xDMe5L0/PQesfWC14RitcmhKPg== + dependencies: + "@floating-ui/dom" "^1.0.0" + +"@tiptap/extension-bullet-list@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extension-bullet-list/-/extension-bullet-list-3.22.2.tgz" + integrity sha512-llrTJnA72RGcWLLO+ro0QN4sjHynhaCerhpV+GZE/ATd8BqV/ekQFdBLJrvC/09My2XQfCwLsyCh92NPXUdELA== + +"@tiptap/extension-code-block@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extension-code-block/-/extension-code-block-3.22.2.tgz" + integrity sha512-PEwFlDyvtKF19WCrOFg77qJV9WqhvjCY4ZoXlHP9Hx0KTcOA8W39mtw8d4NWU5pLRK94yHKF1DVVL8UUkEOnww== + +"@tiptap/extension-code@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extension-code/-/extension-code-3.22.2.tgz" + integrity sha512-iYFY+yzfYA9MKt7nupyW/PzqL9XC2D0mC8l1z2Y10i0/fGL8NbqIYjhNUAyXGqH3QWcI+DirI66842y2OadPOg== + +"@tiptap/extension-document@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extension-document/-/extension-document-3.22.2.tgz" + integrity sha512-yPw9pQeVC4QDh86TuyKCZxxM4g0NAw7mEtGnAo6EpxaBQr1wyBr9yFpys+QTsQpRTmyTf1VHp4iTTLuWHMljIw== + +"@tiptap/extension-dropcursor@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extension-dropcursor/-/extension-dropcursor-3.22.2.tgz" + integrity sha512-sDv3fv4LtX0X4nqwh9Gn3C/aZXT+C2JlK7tJovPOpaYP/a6hr03Sn35X5moAfgMCSiWFygEvlTriqwmCsJuxog== + +"@tiptap/extension-floating-menu@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extension-floating-menu/-/extension-floating-menu-3.22.2.tgz" + integrity sha512-r0ZTeh9rNtj9Api+G0YyaB+tAKPDn7aYWg+qSrmAC5EyUPee6Zjn3zlw0q4renCeQflvNRK20xHM8zokC41jOA== + +"@tiptap/extension-gapcursor@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extension-gapcursor/-/extension-gapcursor-3.22.2.tgz" + integrity sha512-rR2OLrl/k2kj7xehaZHq0Y7T+1wy2DOTabir9LsTrktTFEcklrh9qY1KC6rEBkwMKaWrmignR1l39kS6RlKFNw== + +"@tiptap/extension-hard-break@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extension-hard-break/-/extension-hard-break-3.22.2.tgz" + integrity sha512-ChsoqF4XRp6EWatTRlXL4LMFh/ggwRVCyt09brSfjJV5knFaXlECSa5/+rKLMLMULaj6dVlJqoAD15exgu2HHA== + +"@tiptap/extension-heading@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extension-heading/-/extension-heading-3.22.2.tgz" + integrity sha512-QPHLef+ikAyf7RVc4EdGeKxH4OEGb3ueCEwJ41RcYPtZ1BX9ueei7FC936guTdL1U7w3vQ65qfy86HznzkYgvw== + +"@tiptap/extension-horizontal-rule@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extension-horizontal-rule/-/extension-horizontal-rule-3.22.2.tgz" + integrity sha512-Oz8KN5KJAWV1mFNE9UIWXdMD6xa5zPf/0yLsT8V4sgaRm+VsdFKllN58BY9qCZf/kIZbaOez5KkaoeAcm0MAZg== + +"@tiptap/extension-image@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extension-image/-/extension-image-3.22.2.tgz" + integrity sha512-xFCgwreF6sn5mQ/hFDQKn41NIbbfks/Ou9j763Djf3pWsastgzdgwifQOpXVI3aSsqlKUO3o8/8R/yQczvZcwg== + +"@tiptap/extension-italic@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extension-italic/-/extension-italic-3.22.2.tgz" + integrity sha512-fmtQu2HDnV3sOZPdz0+1lOLI7UtrIhusohJj2UwOLQxG8qqhLwbvWx2OQTlfblgY0z+CjLRr6ANbNDxOTIblfg== + +"@tiptap/extension-link@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extension-link/-/extension-link-3.22.2.tgz" + integrity sha512-TXfSoKmng5pecvQUZqdsx6ICeob5V5hhYOj2vCEtjfcjWsyCndqFIl1w+Nt/yI5ehrFNOVPyj3ZvcELuuAW6pw== + dependencies: + linkifyjs "^4.3.2" + +"@tiptap/extension-list-item@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extension-list-item/-/extension-list-item-3.22.2.tgz" + integrity sha512-Mk+iiLIFh8Pfuarr6mWfTO7QJbd2ZQd0nGNhNWXlGAO7DJCb4BP9nj4bEIJ17SbcykGRjsi4WMqY50z4MHXqKQ== + +"@tiptap/extension-list-keymap@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extension-list-keymap/-/extension-list-keymap-3.22.2.tgz" + integrity sha512-TozU9V2vldMUPpTXnfLCO33EO06jLxn7uEJTMBnN4iX/dLV3cBVCbE4kHyDKS0sLd7joUeekS06vYP9uQb1hFw== + +"@tiptap/extension-list@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extension-list/-/extension-list-3.22.2.tgz" + integrity sha512-Vq9xScgkA2A3Zj9dQ4WUBKK7u7UCzeSFRz9FcKTQVZHRPbZoqFGnlRUVngqsE7JXrCOthXQ1dXxgk40nAsBFRw== + +"@tiptap/extension-ordered-list@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extension-ordered-list/-/extension-ordered-list-3.22.2.tgz" + integrity sha512-K7qxoBKmsVkAd3kW64ZRCUPFrDcNGpXRDUBx9YgAO/bTfsfxtH2oil+igsUWGXPczpP4yoHPKjTfhpBpLjGl6Q== + +"@tiptap/extension-paragraph@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extension-paragraph/-/extension-paragraph-3.22.2.tgz" + integrity sha512-EHZZzxVhvzEPDPWtRBF1YKhB+WCUjd1C2NhjHfL3Dl71PBqM3ZWA6qN7NDGPyNyGGWauui/NR/4X+5AfPqlHyA== + +"@tiptap/extension-strike@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extension-strike/-/extension-strike-3.22.2.tgz" + integrity sha512-YFC3elKU1L8PiGbcB6tqd/7vWPF5IbydJz0POJpHzSjstX+VfT8VsvS7ubxVuSIWQ11kGkH3mzX6LX8JHsHZxg== + +"@tiptap/extension-text@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extension-text/-/extension-text-3.22.2.tgz" + integrity sha512-J1w7JwijfSD7ah0WfiwZ/DVWCIGT9x369RM4RJc57i44mIBElj7tl1dh+N5KPGOXKUup4gr7sSJAE38lgeaDMg== + +"@tiptap/extension-underline@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extension-underline/-/extension-underline-3.22.2.tgz" + integrity sha512-BaV6WOowxdkGTLWiU7DdZ3Twh633O4RGqwUM5dDas5LvaqL8AMWGTO8Wg9yAaaKXzd9MtKI1ZCqS/+MtzusgkQ== + +"@tiptap/extensions@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/extensions/-/extensions-3.22.2.tgz" + integrity sha512-s7MZmm2Xdq+8feIXgY3v7gVpQ5ClqBZi20KheouS7KSbBlrY4fu2irYR1EGc6r1UUVaHMxEa+cx5knhx+mIPUw== + +"@tiptap/pm@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/pm/-/pm-3.22.2.tgz" + integrity sha512-G2ENwIazoSKkAnN5MN5yN91TIZNFm6TxB74kPf3Empr2k9W51Hkcier70jHGpArhgcEaL4BVreuU1PRDRwCeGw== + dependencies: + prosemirror-changeset "^2.3.0" + prosemirror-collab "^1.3.1" + prosemirror-commands "^1.6.2" + prosemirror-dropcursor "^1.8.1" + prosemirror-gapcursor "^1.3.2" + prosemirror-history "^1.4.1" + prosemirror-inputrules "^1.4.0" + prosemirror-keymap "^1.2.2" + prosemirror-markdown "^1.13.1" + prosemirror-menu "^1.2.4" + prosemirror-model "^1.24.1" + prosemirror-schema-basic "^1.2.3" + prosemirror-schema-list "^1.5.0" + prosemirror-state "^1.4.3" + prosemirror-tables "^1.6.4" + prosemirror-trailing-node "^3.0.0" + prosemirror-transform "^1.10.2" + prosemirror-view "^1.38.1" + +"@tiptap/react@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/react/-/react-3.22.2.tgz" + integrity sha512-tyGKG69e/MkpoD/JTpVPz0XydEHxh1MSAYnLb3gRvyvBDv2r/veLea+cApkmjQaCfkKC/CWwTFXBYlOB0caSBA== + dependencies: + "@types/use-sync-external-store" "^0.0.6" + fast-equals "^5.3.3" + use-sync-external-store "^1.4.0" + optionalDependencies: + "@tiptap/extension-bubble-menu" "^3.22.2" + "@tiptap/extension-floating-menu" "^3.22.2" + +"@tiptap/starter-kit@^3.22.2": + version "3.22.2" + resolved "https://registry.npmjs.org/@tiptap/starter-kit/-/starter-kit-3.22.2.tgz" + integrity sha512-+CCKX8tOQ/ZPb2k/z6em4AQCFYAcdd8+0TOzPWiuLxRyCHRPBBVhnPsXOKgKwE4OO3E8BsezquuYRYRwsyzCqg== + dependencies: + "@tiptap/core" "^3.22.2" + "@tiptap/extension-blockquote" "^3.22.2" + "@tiptap/extension-bold" "^3.22.2" + "@tiptap/extension-bullet-list" "^3.22.2" + "@tiptap/extension-code" "^3.22.2" + "@tiptap/extension-code-block" "^3.22.2" + "@tiptap/extension-document" "^3.22.2" + "@tiptap/extension-dropcursor" "^3.22.2" + "@tiptap/extension-gapcursor" "^3.22.2" + "@tiptap/extension-hard-break" "^3.22.2" + "@tiptap/extension-heading" "^3.22.2" + "@tiptap/extension-horizontal-rule" "^3.22.2" + "@tiptap/extension-italic" "^3.22.2" + "@tiptap/extension-link" "^3.22.2" + "@tiptap/extension-list" "^3.22.2" + "@tiptap/extension-list-item" "^3.22.2" + "@tiptap/extension-list-keymap" "^3.22.2" + "@tiptap/extension-ordered-list" "^3.22.2" + "@tiptap/extension-paragraph" "^3.22.2" + "@tiptap/extension-strike" "^3.22.2" + "@tiptap/extension-text" "^3.22.2" + "@tiptap/extension-underline" "^3.22.2" + "@tiptap/extensions" "^3.22.2" + "@tiptap/pm" "^3.22.2" + +"@tybys/wasm-util@^0.10.1": + version "0.10.1" + resolved "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.10.1.tgz#ecddd3205cf1e2d5274649ff0eedd2991ed7f414" + integrity sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg== + dependencies: + tslib "^2.4.0" + +"@types/aria-query@^5.0.1": + version "5.0.4" + resolved "https://registry.npmjs.org/@types/aria-query/-/aria-query-5.0.4.tgz" + integrity sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw== + +"@types/chai@^5.2.2": + version "5.2.3" + resolved "https://registry.npmjs.org/@types/chai/-/chai-5.2.3.tgz" + integrity sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA== + dependencies: + "@types/deep-eql" "*" + assertion-error "^2.0.1" + "@types/d3-array@*", "@types/d3-array@^3.2.1": - version "3.2.1" - resolved "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.1.tgz" - integrity sha512-Y2Jn2idRrLzUfAKV2LyRImR+y4oa2AntrgID95SHJxuMUrkNXmanDSed71sRNZysveJVt1hLLemQZIady0FpEg== + version "3.2.2" + resolved "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.2.tgz" + integrity sha512-hOLWVbm7uRza0BYXpIIW5pxfrKe0W+D5lrFiAEYR+pb6w3N2SwSMaJbXdUfSEv+dT4MfHBLtn5js0LAWaO6otw== "@types/d3-axis@*": version "3.0.6" @@ -985,9 +1471,9 @@ integrity sha512-ZMaSKu4THYCU6sV64Lhg6qjf1orxBthaC161plr5KuPHo3CNm8DTHiLw/5Eq2b6TsNP0W0iJrUOFscY6Q450Hw== "@types/d3-dispatch@*": - version "3.0.6" - resolved "https://registry.npmjs.org/@types/d3-dispatch/-/d3-dispatch-3.0.6.tgz" - integrity sha512-4fvZhzMeeuBJYZXRXrRIQnvUYfyXwYmLsdiN7XXmVNQKKw1cM8a5WdID0g1hVFZDqT9ZqZEY5pD44p24VS7iZQ== + version "3.0.7" + resolved "https://registry.npmjs.org/@types/d3-dispatch/-/d3-dispatch-3.0.7.tgz" + integrity sha512-5o9OIAdKkhN1QItV2oqaE5KMIiXAvDWBDPrD85e58Qlz1c1kI/J0NcqbEG88CoTwJrYe7ntUCVfeUl2UJKbWgA== "@types/d3-drag@*": version "3.0.7" @@ -1043,9 +1529,9 @@ "@types/d3-color" "*" "@types/d3-path@*": - version "3.1.0" - resolved "https://registry.npmjs.org/@types/d3-path/-/d3-path-3.1.0.tgz" - integrity sha512-P2dlU/q51fkOc/Gfl3Ul9kicV7l+ra934qBFXCFhrZMOL6du1TM0pm1ThYvENukyOn5h9v+yMJ9Fn5JK4QozrQ== + version "3.1.1" + resolved "https://registry.npmjs.org/@types/d3-path/-/d3-path-3.1.1.tgz" + integrity sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg== "@types/d3-polygon@*": version "3.0.2" @@ -1063,26 +1549,26 @@ integrity sha512-Imagg1vJ3y76Y2ea0871wpabqp613+8/r0mCLEBfdtqC7xMSfj9idOnmBYyMoULfHePJyxMAw3nWhJxzc+LFwQ== "@types/d3-scale-chromatic@*": - version "3.0.3" - resolved "https://registry.npmjs.org/@types/d3-scale-chromatic/-/d3-scale-chromatic-3.0.3.tgz" - integrity sha512-laXM4+1o5ImZv3RpFAsTRn3TEkzqkytiOY0Dz0sq5cnd1dtNlk6sHLon4OvqaiJb28T0S/TdsBI3Sjsy+keJrw== + version "3.1.0" + resolved "https://registry.npmjs.org/@types/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz" + integrity sha512-iWMJgwkK7yTRmWqRB5plb1kadXyQ5Sj8V/zYlFGMUBbIPKQScw+Dku9cAAMgJG+z5GYDoMjWGLVOvjghDEFnKQ== "@types/d3-scale@*": - version "4.0.8" - resolved "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.8.tgz" - integrity sha512-gkK1VVTr5iNiYJ7vWDI+yUFFlszhNMtVeneJ6lUTKPjprsvLLI9/tgEGiXJOnlINJA8FyA88gfnQsHbybVZrYQ== + version "4.0.9" + resolved "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.9.tgz" + integrity sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw== dependencies: "@types/d3-time" "*" "@types/d3-selection@*": - version "3.0.10" - resolved "https://registry.npmjs.org/@types/d3-selection/-/d3-selection-3.0.10.tgz" - integrity sha512-cuHoUgS/V3hLdjJOLTT691+G2QoqAjCVLmr4kJXR4ha56w1Zdu8UUQ5TxLRqudgNjwXeQxKMq4j+lyf9sWuslg== + version "3.0.11" + resolved "https://registry.npmjs.org/@types/d3-selection/-/d3-selection-3.0.11.tgz" + integrity sha512-bhAXu23DJWsrI45xafYpkQ4NtcKMwWnAC/vKrd2l+nxMFuvOT3XMYTIj2opv8vq8AO5Yh7Qac/nSeP/3zjTK0w== "@types/d3-shape@*": - version "3.1.6" - resolved "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-3.1.6.tgz" - integrity sha512-5KKk5aKGu2I+O6SONMYSNflgiP0WfZIQvVUMan50wHsLG1G94JlxEVnCpQARfTtzytuY0p/9PXXZb3I7giofIA== + version "3.1.8" + resolved "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-3.1.8.tgz" + integrity sha512-lae0iWfcDeR7qt7rA88BNiqdvPS5pFVPpo5OfjElwNaT2yyekbM0C9vK+yqBqEmHr6lDkRnYNoTBYlAgJa7a4w== dependencies: "@types/d3-path" "*" @@ -1092,9 +1578,9 @@ integrity sha512-5xg9rC+wWL8kdDj153qZcsJ0FWiFt0J5RB6LYUNZjwSnesfblqrI/bJ1wBdJ8OQfncgbJG5+2F+qfqnqyzYxyg== "@types/d3-time@*": - version "3.0.3" - resolved "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.3.tgz" - integrity sha512-2p6olUZ4w3s+07q3Tm2dbiMZy5pCDfYwtLXXHUnVzXgQlZ/OyPtUz6OL382BkOuGlLXqfT+wqv8Fw2v8/0geBw== + version "3.0.4" + resolved "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.4.tgz" + integrity sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g== "@types/d3-timer@*": version "3.0.2" @@ -1102,9 +1588,9 @@ integrity sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw== "@types/d3-transition@*": - version "3.0.8" - resolved "https://registry.npmjs.org/@types/d3-transition/-/d3-transition-3.0.8.tgz" - integrity sha512-ew63aJfQ/ms7QQ4X7pk5NxQ9fZH/z+i24ZfJ6tJSfqxJMrYLiK01EAs2/Rtw/JreGUsS3pLPNV644qXFGnoZNQ== + version "3.0.9" + resolved "https://registry.npmjs.org/@types/d3-transition/-/d3-transition-3.0.9.tgz" + integrity sha512-uZS5shfxzO3rGlu0cC3bjmMFKsXv+SmZZcgp0KD22ts4uGXp5EVYGzu/0YdwZeKmddhcAccYtREJKkPfXkZuCg== dependencies: "@types/d3-selection" "*" @@ -1152,6 +1638,18 @@ "@types/d3-transition" "*" "@types/d3-zoom" "*" +"@types/debug@^4.0.0": + version "4.1.13" + resolved "https://registry.npmjs.org/@types/debug/-/debug-4.1.13.tgz#22d1cc9d542d3593caea764f974306ab36286ee7" + integrity sha512-KSVgmQmzMwPlmtljOomayoR89W4FynCAi3E8PPs7vmDVPe84hT+vGPKkJfThkmXs0x0jAaa9U8uW8bbfyS2fWw== + dependencies: + "@types/ms" "*" + +"@types/deep-eql@*": + version "4.0.2" + resolved "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz" + integrity sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw== + "@types/dompurify@^3.0.5": version "3.2.0" resolved "https://registry.npmjs.org/@types/dompurify/-/dompurify-3.2.0.tgz" @@ -1159,7 +1657,14 @@ dependencies: dompurify "*" -"@types/estree@1.0.8", "@types/estree@^1.0.6", "@types/estree@^1.0.8": +"@types/estree-jsx@^1.0.0": + version "1.0.5" + resolved "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.5.tgz#858a88ea20f34fe65111f005a689fa1ebf70dc18" + integrity sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg== + dependencies: + "@types/estree" "*" + +"@types/estree@*", "@types/estree@1.0.8", "@types/estree@^1.0.0", "@types/estree@^1.0.6", "@types/estree@^1.0.8": version "1.0.8" resolved "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz" integrity sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w== @@ -1169,12 +1674,18 @@ resolved "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.16.tgz" integrity sha512-6C8nqWur3j98U6+lXDfTUWIfgvZU+EumvpHKcYjujKH7woYyLj2sUmff0tRhrqM7BohUw7Pz3ZB1jj2gW9Fvmg== +"@types/hast@^3.0.0": + version "3.0.4" + resolved "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz#1d6b39993b82cea6ad783945b0508c25903e15aa" + integrity sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ== + dependencies: + "@types/unist" "*" + "@types/hoist-non-react-statics@^3.3.1": - version "3.3.1" - resolved "https://registry.npmjs.org/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.1.tgz" - integrity sha512-iMIqiko6ooLrTh1joXodJK5X9xeEALT1kM5G3ZLhD3hszxBdIEd5C75U834D9mLcINgD4OyZf5uQXjkuYydWvA== + version "3.3.7" + resolved "https://registry.npmjs.org/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.7.tgz" + integrity sha512-PQTyIulDkIDro8P+IHbKCsw7U2xxBYflVzW/FgWdCAePD9xGSidgA76/GeJ6lBKoblyhf9pBY763gbrN+1dI8g== dependencies: - "@types/react" "*" hoist-non-react-statics "^3.3.0" "@types/json-schema@^7.0.15": @@ -1182,10 +1693,58 @@ resolved "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz" integrity sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA== +"@types/linkify-it@^3": + version "3.0.5" + resolved "https://registry.npmjs.org/@types/linkify-it/-/linkify-it-3.0.5.tgz" + integrity sha512-yg6E+u0/+Zjva+buc3EIb+29XEg4wltq7cSmd4Uc2EE/1nUVmxyzpX6gUXD0V8jIrG0r7YeOGVIbYRkxeooCtw== + +"@types/linkify-it@^5": + version "5.0.0" + resolved "https://registry.npmjs.org/@types/linkify-it/-/linkify-it-5.0.0.tgz" + integrity sha512-sVDA58zAw4eWAffKOaQH5/5j3XeayukzDk+ewSsnv3p4yJEZHCCzMDiZM8e0OUrRvmpGZ85jf4yDHkHsgBNr9Q== + "@types/lodash@^4.17.7": - version "4.17.20" - resolved "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.20.tgz" - integrity sha512-H3MHACvFUEiujabxhaI/ImO6gUrd8oOurg7LQtS7mbwIXA/cUqWrvBsaeJ23aZEPk1TAYkurjfMbSELfoCXlGA== + version "4.17.24" + resolved "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.24.tgz" + integrity sha512-gIW7lQLZbue7lRSWEFql49QJJWThrTFFeIMJdp3eH4tKoxm1OvEPg02rm4wCCSHS0cL3/Fizimb35b7k8atwsQ== + +"@types/markdown-it@^13.0.7": + version "13.0.9" + resolved "https://registry.npmjs.org/@types/markdown-it/-/markdown-it-13.0.9.tgz" + integrity sha512-1XPwR0+MgXLWfTn9gCsZ55AHOKW1WN+P9vr0PaQh5aerR9LLQXUbjfEAFhjmEmyoYFWAyuN2Mqkn40MZ4ukjBw== + dependencies: + "@types/linkify-it" "^3" + "@types/mdurl" "^1" + +"@types/markdown-it@^14.0.0": + version "14.1.2" + resolved "https://registry.npmjs.org/@types/markdown-it/-/markdown-it-14.1.2.tgz" + integrity sha512-promo4eFwuiW+TfGxhi+0x3czqTYJkG8qB17ZUJiVF10Xm7NLVRSLUsfRTU/6h1e24VvRnXCx+hG7li58lkzog== + dependencies: + "@types/linkify-it" "^5" + "@types/mdurl" "^2" + +"@types/mdast@^4.0.0": + version "4.0.4" + resolved "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz#7ccf72edd2f1aa7dd3437e180c64373585804dd6" + integrity sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA== + dependencies: + "@types/unist" "*" + +"@types/mdurl@^1": + version "1.0.5" + resolved "https://registry.npmjs.org/@types/mdurl/-/mdurl-1.0.5.tgz" + integrity sha512-6L6VymKTzYSrEf4Nev4Xa1LCHKrlTlYCBMTlQKFuddo1CvQcE52I0mwfOJayueUC7MJuXOeHTcIU683lzd0cUA== + +"@types/mdurl@^2": + version "2.0.0" + resolved "https://registry.npmjs.org/@types/mdurl/-/mdurl-2.0.0.tgz" + integrity sha512-RGdgjQUZba5p6QEFAVx2OGb8rQDL/cPRG7GiedRzMcJ1tYnUANBncjbSB1NRGwbvjcPeikRABz2nshyPk1bhWg== + +"@types/ms@*": + version "2.1.0" + resolved "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz#052aa67a48eccc4309d7f0191b7e41434b90bb78" + integrity sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA== "@types/node@^14.0.1": version "14.18.63" @@ -1193,21 +1752,21 @@ integrity sha512-fAtCfv4jJg+ExtXhvCkCqUKZ+4ok/JQk01qDKhL5BDDoS3AxKXhV5/MAVUZyQnSEd2GT92fkgZl0pz0Q0AzcIQ== "@types/node@^20.14.10": - version "20.19.24" - resolved "https://registry.npmjs.org/@types/node/-/node-20.19.24.tgz" - integrity sha512-FE5u0ezmi6y9OZEzlJfg37mqqf6ZDSF2V/NLjUyGrR9uTZ7Sb9F7bLNZ03S4XVUNRWGA7Ck4c1kK+YnuWjl+DA== + version "20.19.37" + resolved "https://registry.npmjs.org/@types/node/-/node-20.19.37.tgz" + integrity sha512-8kzdPJ3FsNsVIurqBs7oodNnCEVbni9yUEkaHbgptDACOPW04jimGagZ51E6+lXUwJjgnBw+hyko/lkFWCldqw== dependencies: undici-types "~6.21.0" "@types/parse-json@^4.0.0": - version "4.0.0" - resolved "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz" - integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== + version "4.0.2" + resolved "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.2.tgz" + integrity sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw== "@types/prismjs@^1.26.0": - version "1.26.0" - resolved "https://registry.npmjs.org/@types/prismjs/-/prismjs-1.26.0.tgz" - integrity sha512-ZTaqn/qSqUuAq1YwvOFQfVW1AR/oQJlLSZVustdjwI+GZ8kr0MSHBj0tsXPW1EqHubx50gtBEjbPGsdZwQwCjQ== + version "1.26.6" + resolved "https://registry.npmjs.org/@types/prismjs/-/prismjs-1.26.6.tgz" + integrity sha512-vqlvI7qlMvcCBbVe0AKAb4f97//Hy0EBTaiW8AalRnG/xAN5zOiWWyrNqNXeq8+KAuvRewjCVY1+IPxk4RdNYw== "@types/prop-types@*", "@types/prop-types@^15.7.15": version "15.7.15" @@ -1231,126 +1790,151 @@ resolved "https://registry.npmjs.org/@types/react-transition-group/-/react-transition-group-4.4.12.tgz" integrity sha512-8TV6R3h2j7a91c+1DXdJi3Syo69zzIZbz7Lg5tORM5LEJG7X/E6a1V3drRyBRZq7/utz7A+c4OgYLiLcYGHG6w== -"@types/react@*", "@types/react@^18.3.3": - version "18.3.3" - resolved "https://registry.npmjs.org/@types/react/-/react-18.3.3.tgz" - integrity sha512-hti/R0pS0q1/xx+TsI73XIqk26eBsISZ2R0wUijXIngRK9R/e7Xw/cXVxQK7R5JjW+SV4zGcn5hXjudkN/pLIw== +"@types/react@*": + version "19.2.14" + resolved "https://registry.npmjs.org/@types/react/-/react-19.2.14.tgz" + integrity sha512-ilcTH/UniCkMdtexkoCN0bI7pMcJDvmQFPvuPvmEaYA/NSfFTAgdUSLAoVjaRJm7+6PvcM+q1zYOwS4wTYMF9w== + dependencies: + csstype "^3.2.2" + +"@types/react@^18.3.3": + version "18.3.28" + resolved "https://registry.npmjs.org/@types/react/-/react-18.3.28.tgz" + integrity sha512-z9VXpC7MWrhfWipitjNdgCauoMLRdIILQsAEV+ZesIzBq/oUlxk0m3ApZuMFCXdnS4U7KrI+l3WRUEGQ8K1QKw== dependencies: "@types/prop-types" "*" - csstype "^3.0.2" + csstype "^3.2.2" "@types/trusted-types@^2.0.7": version "2.0.7" resolved "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz" integrity sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw== +"@types/unist@*", "@types/unist@^3.0.0": + version "3.0.3" + resolved "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz#acaab0f919ce69cce629c2d4ed2eb4adc1b6c20c" + integrity sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q== + +"@types/unist@^2.0.0": + version "2.0.11" + resolved "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz#11af57b127e32487774841f7a4e54eab166d03c4" + integrity sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA== + "@types/use-sync-external-store@^0.0.3": version "0.0.3" resolved "https://registry.npmjs.org/@types/use-sync-external-store/-/use-sync-external-store-0.0.3.tgz" integrity sha512-EwmlvuaxPNej9+T4v5AuBPJa2x2UOJVdjCtDHgcDqitUeOtjnJKJ+apYjVcAoBEMjKW1VVFGZLUb5+qqa09XFA== +"@types/use-sync-external-store@^0.0.6": + version "0.0.6" + resolved "https://registry.npmjs.org/@types/use-sync-external-store/-/use-sync-external-store-0.0.6.tgz" + integrity sha512-zFDAD+tlpf2r4asuHEj0XH6pY6i0g5NeAHPn+15wk3BV6JA69eERFXC1gyGThDkVa1zCyKr5jox1+2LbV/AMLg== + "@types/validator@^13.12.2": - version "13.15.4" - resolved "https://registry.npmjs.org/@types/validator/-/validator-13.15.4.tgz" - integrity sha512-LSFfpSnJJY9wbC0LQxgvfb+ynbHftFo0tMsFOl/J4wexLnYMmDSPaj2ZyDv3TkfL1UePxPrxOWJfbiRS8mQv7A== - -"@typescript-eslint/eslint-plugin@8.46.2", "@typescript-eslint/eslint-plugin@^8.16.0": - version "8.46.2" - resolved "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.46.2.tgz" - integrity sha512-ZGBMToy857/NIPaaCucIUQgqueOiq7HeAKkhlvqVV4lm089zUFW6ikRySx2v+cAhKeUCPuWVHeimyk6Dw1iY3w== - dependencies: - "@eslint-community/regexpp" "^4.10.0" - "@typescript-eslint/scope-manager" "8.46.2" - "@typescript-eslint/type-utils" "8.46.2" - "@typescript-eslint/utils" "8.46.2" - "@typescript-eslint/visitor-keys" "8.46.2" - graphemer "^1.4.0" - ignore "^7.0.0" + version "13.15.10" + resolved "https://registry.npmjs.org/@types/validator/-/validator-13.15.10.tgz" + integrity sha512-T8L6i7wCuyoK8A/ZeLYt1+q0ty3Zb9+qbSSvrIVitzT3YjZqkTZ40IbRsPanlB4h1QB3JVL1SYCdR6ngtFYcuA== + +"@typescript-eslint/eslint-plugin@8.57.2", "@typescript-eslint/eslint-plugin@^8.16.0": + version "8.57.2" + resolved "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.57.2.tgz" + integrity sha512-NZZgp0Fm2IkD+La5PR81sd+g+8oS6JwJje+aRWsDocxHkjyRw0J5L5ZTlN3LI1LlOcGL7ph3eaIUmTXMIjLk0w== + dependencies: + "@eslint-community/regexpp" "^4.12.2" + "@typescript-eslint/scope-manager" "8.57.2" + "@typescript-eslint/type-utils" "8.57.2" + "@typescript-eslint/utils" "8.57.2" + "@typescript-eslint/visitor-keys" "8.57.2" + ignore "^7.0.5" natural-compare "^1.4.0" - ts-api-utils "^2.1.0" - -"@typescript-eslint/parser@8.46.2", "@typescript-eslint/parser@^8.16.0": - version "8.46.2" - resolved "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.46.2.tgz" - integrity sha512-BnOroVl1SgrPLywqxyqdJ4l3S2MsKVLDVxZvjI1Eoe8ev2r3kGDo+PcMihNmDE+6/KjkTubSJnmqGZZjQSBq/g== - dependencies: - "@typescript-eslint/scope-manager" "8.46.2" - "@typescript-eslint/types" "8.46.2" - "@typescript-eslint/typescript-estree" "8.46.2" - "@typescript-eslint/visitor-keys" "8.46.2" - debug "^4.3.4" - -"@typescript-eslint/project-service@8.46.2": - version "8.46.2" - resolved "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.46.2.tgz" - integrity sha512-PULOLZ9iqwI7hXcmL4fVfIsBi6AN9YxRc0frbvmg8f+4hQAjQ5GYNKK0DIArNo+rOKmR/iBYwkpBmnIwin4wBg== - dependencies: - "@typescript-eslint/tsconfig-utils" "^8.46.2" - "@typescript-eslint/types" "^8.46.2" - debug "^4.3.4" - -"@typescript-eslint/scope-manager@8.46.2": - version "8.46.2" - resolved "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.46.2.tgz" - integrity sha512-LF4b/NmGvdWEHD2H4MsHD8ny6JpiVNDzrSZr3CsckEgCbAGZbYM4Cqxvi9L+WqDMT+51Ozy7lt2M+d0JLEuBqA== - dependencies: - "@typescript-eslint/types" "8.46.2" - "@typescript-eslint/visitor-keys" "8.46.2" - -"@typescript-eslint/tsconfig-utils@8.46.2", "@typescript-eslint/tsconfig-utils@^8.46.2": - version "8.46.2" - resolved "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.46.2.tgz" - integrity sha512-a7QH6fw4S57+F5y2FIxxSDyi5M4UfGF+Jl1bCGd7+L4KsaUY80GsiF/t0UoRFDHAguKlBaACWJRmdrc6Xfkkag== - -"@typescript-eslint/type-utils@8.46.2": - version "8.46.2" - resolved "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.46.2.tgz" - integrity sha512-HbPM4LbaAAt/DjxXaG9yiS9brOOz6fabal4uvUmaUYe6l3K1phQDMQKBRUrr06BQkxkvIZVVHttqiybM9nJsLA== - dependencies: - "@typescript-eslint/types" "8.46.2" - "@typescript-eslint/typescript-estree" "8.46.2" - "@typescript-eslint/utils" "8.46.2" - debug "^4.3.4" - ts-api-utils "^2.1.0" - -"@typescript-eslint/types@8.46.2", "@typescript-eslint/types@^8.46.2": - version "8.46.2" - resolved "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.46.2.tgz" - integrity sha512-lNCWCbq7rpg7qDsQrd3D6NyWYu+gkTENkG5IKYhUIcxSb59SQC/hEQ+MrG4sTgBVghTonNWq42bA/d4yYumldQ== - -"@typescript-eslint/typescript-estree@8.46.2": - version "8.46.2" - resolved "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.46.2.tgz" - integrity sha512-f7rW7LJ2b7Uh2EiQ+7sza6RDZnajbNbemn54Ob6fRwQbgcIn+GWfyuHDHRYgRoZu1P4AayVScrRW+YfbTvPQoQ== - dependencies: - "@typescript-eslint/project-service" "8.46.2" - "@typescript-eslint/tsconfig-utils" "8.46.2" - "@typescript-eslint/types" "8.46.2" - "@typescript-eslint/visitor-keys" "8.46.2" - debug "^4.3.4" - fast-glob "^3.3.2" - is-glob "^4.0.3" - minimatch "^9.0.4" - semver "^7.6.0" - ts-api-utils "^2.1.0" - -"@typescript-eslint/utils@8.46.2": - version "8.46.2" - resolved "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.46.2.tgz" - integrity sha512-sExxzucx0Tud5tE0XqR0lT0psBQvEpnpiul9XbGUB1QwpWJJAps1O/Z7hJxLGiZLBKMCutjTzDgmd1muEhBnVg== - dependencies: - "@eslint-community/eslint-utils" "^4.7.0" - "@typescript-eslint/scope-manager" "8.46.2" - "@typescript-eslint/types" "8.46.2" - "@typescript-eslint/typescript-estree" "8.46.2" - -"@typescript-eslint/visitor-keys@8.46.2": - version "8.46.2" - resolved "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.46.2.tgz" - integrity sha512-tUFMXI4gxzzMXt4xpGJEsBsTox0XbNQ1y94EwlD/CuZwFcQP79xfQqMhau9HsRc/J0cAPA/HZt1dZPtGn9V/7w== - dependencies: - "@typescript-eslint/types" "8.46.2" - eslint-visitor-keys "^4.2.1" + ts-api-utils "^2.4.0" + +"@typescript-eslint/parser@8.57.2", "@typescript-eslint/parser@^8.16.0": + version "8.57.2" + resolved "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.57.2.tgz" + integrity sha512-30ScMRHIAD33JJQkgfGW1t8CURZtjc2JpTrq5n2HFhOefbAhb7ucc7xJwdWcrEtqUIYJ73Nybpsggii6GtAHjA== + dependencies: + "@typescript-eslint/scope-manager" "8.57.2" + "@typescript-eslint/types" "8.57.2" + "@typescript-eslint/typescript-estree" "8.57.2" + "@typescript-eslint/visitor-keys" "8.57.2" + debug "^4.4.3" + +"@typescript-eslint/project-service@8.57.2": + version "8.57.2" + resolved "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.57.2.tgz" + integrity sha512-FuH0wipFywXRTHf+bTTjNyuNQQsQC3qh/dYzaM4I4W0jrCqjCVuUh99+xd9KamUfmCGPvbO8NDngo/vsnNVqgw== + dependencies: + "@typescript-eslint/tsconfig-utils" "^8.57.2" + "@typescript-eslint/types" "^8.57.2" + debug "^4.4.3" + +"@typescript-eslint/scope-manager@8.57.2": + version "8.57.2" + resolved "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.57.2.tgz" + integrity sha512-snZKH+W4WbWkrBqj4gUNRIGb/jipDW3qMqVJ4C9rzdFc+wLwruxk+2a5D+uoFcKPAqyqEnSb4l2ULuZf95eSkw== + dependencies: + "@typescript-eslint/types" "8.57.2" + "@typescript-eslint/visitor-keys" "8.57.2" + +"@typescript-eslint/tsconfig-utils@8.57.2", "@typescript-eslint/tsconfig-utils@^8.57.2": + version "8.57.2" + resolved "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.57.2.tgz" + integrity sha512-3Lm5DSM+DCowsUOJC+YqHHnKEfFh5CoGkj5Z31NQSNF4l5wdOwqGn99wmwN/LImhfY3KJnmordBq/4+VDe2eKw== + +"@typescript-eslint/type-utils@8.57.2": + version "8.57.2" + resolved "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.57.2.tgz" + integrity sha512-Co6ZCShm6kIbAM/s+oYVpKFfW7LBc6FXoPXjTRQ449PPNBY8U0KZXuevz5IFuuUj2H9ss40atTaf9dlGLzbWZg== + dependencies: + "@typescript-eslint/types" "8.57.2" + "@typescript-eslint/typescript-estree" "8.57.2" + "@typescript-eslint/utils" "8.57.2" + debug "^4.4.3" + ts-api-utils "^2.4.0" + +"@typescript-eslint/types@8.57.2", "@typescript-eslint/types@^8.57.2": + version "8.57.2" + resolved "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.57.2.tgz" + integrity sha512-/iZM6FnM4tnx9csuTxspMW4BOSegshwX5oBDznJ7S4WggL7Vczz5d2W11ecc4vRrQMQHXRSxzrCsyG5EsPPTbA== + +"@typescript-eslint/typescript-estree@8.57.2": + version "8.57.2" + resolved "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.57.2.tgz" + integrity sha512-2MKM+I6g8tJxfSmFKOnHv2t8Sk3T6rF20A1Puk0svLK+uVapDZB/4pfAeB7nE83uAZrU6OxW+HmOd5wHVdXwXA== + dependencies: + "@typescript-eslint/project-service" "8.57.2" + "@typescript-eslint/tsconfig-utils" "8.57.2" + "@typescript-eslint/types" "8.57.2" + "@typescript-eslint/visitor-keys" "8.57.2" + debug "^4.4.3" + minimatch "^10.2.2" + semver "^7.7.3" + tinyglobby "^0.2.15" + ts-api-utils "^2.4.0" + +"@typescript-eslint/utils@8.57.2": + version "8.57.2" + resolved "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.57.2.tgz" + integrity sha512-krRIbvPK1ju1WBKIefiX+bngPs+odIQUtR7kymzPfo1POVw3jlF+nLkmexdSSd4UCbDcQn+wMBATOOmpBbqgKg== + dependencies: + "@eslint-community/eslint-utils" "^4.9.1" + "@typescript-eslint/scope-manager" "8.57.2" + "@typescript-eslint/types" "8.57.2" + "@typescript-eslint/typescript-estree" "8.57.2" + +"@typescript-eslint/visitor-keys@8.57.2": + version "8.57.2" + resolved "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.57.2.tgz" + integrity sha512-zhahknjobV2FiD6Ee9iLbS7OV9zi10rG26odsQdfBO/hjSzUQbkIYgda+iNKK1zNiW2ey+Lf8MU5btN17V3dUw== + dependencies: + "@typescript-eslint/types" "8.57.2" + eslint-visitor-keys "^5.0.0" + +"@ungap/structured-clone@^1.0.0": + version "1.3.0" + resolved "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz#d06bbb384ebcf6c505fde1c3d0ed4ddffe0aaff8" + integrity sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g== "@vitejs/plugin-react-swc@^3.7.0": version "3.11.0" @@ -1360,20 +1944,80 @@ "@rolldown/pluginutils" "1.0.0-beta.27" "@swc/core" "^1.12.11" +"@vitest/expect@4.1.1": + version "4.1.1" + resolved "https://registry.npmjs.org/@vitest/expect/-/expect-4.1.1.tgz" + integrity sha512-xAV0fqBTk44Rn6SjJReEQkHP3RrqbJo6JQ4zZ7/uVOiJZRarBtblzrOfFIZeYUrukp2YD6snZG6IBqhOoHTm+A== + dependencies: + "@standard-schema/spec" "^1.1.0" + "@types/chai" "^5.2.2" + "@vitest/spy" "4.1.1" + "@vitest/utils" "4.1.1" + chai "^6.2.2" + tinyrainbow "^3.0.3" + +"@vitest/mocker@4.1.1": + version "4.1.1" + resolved "https://registry.npmjs.org/@vitest/mocker/-/mocker-4.1.1.tgz" + integrity sha512-h3BOylsfsCLPeceuCPAAJ+BvNwSENgJa4hXoXu4im0bs9Lyp4URc4JYK4pWLZ4pG/UQn7AT92K6IByi6rE6g3A== + dependencies: + "@vitest/spy" "4.1.1" + estree-walker "^3.0.3" + magic-string "^0.30.21" + +"@vitest/pretty-format@4.1.1": + version "4.1.1" + resolved "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-4.1.1.tgz" + integrity sha512-GM+TEQN5WhOygr1lp7skeVjdLPqqWMHsfzXrcHAqZJi/lIVh63H0kaRCY8MDhNWikx19zBUK8ceaLB7X5AH9NQ== + dependencies: + tinyrainbow "^3.0.3" + +"@vitest/runner@4.1.1": + version "4.1.1" + resolved "https://registry.npmjs.org/@vitest/runner/-/runner-4.1.1.tgz" + integrity sha512-f7+FPy75vN91QGWsITueq0gedwUZy1fLtHOCMeQpjs8jTekAHeKP80zfDEnhrleviLHzVSDXIWuCIOFn3D3f8A== + dependencies: + "@vitest/utils" "4.1.1" + pathe "^2.0.3" + +"@vitest/snapshot@4.1.1": + version "4.1.1" + resolved "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-4.1.1.tgz" + integrity sha512-kMVSgcegWV2FibXEx9p9WIKgje58lcTbXgnJixfcg15iK8nzCXhmalL0ZLtTWLW9PH1+1NEDShiFFedB3tEgWg== + dependencies: + "@vitest/pretty-format" "4.1.1" + "@vitest/utils" "4.1.1" + magic-string "^0.30.21" + pathe "^2.0.3" + +"@vitest/spy@4.1.1": + version "4.1.1" + resolved "https://registry.npmjs.org/@vitest/spy/-/spy-4.1.1.tgz" + integrity sha512-6Ti/KT5OVaiupdIZEuZN7l3CZcR0cxnxt70Z0//3CtwgObwA6jZhmVBA3yrXSVN3gmwjgd7oDNLlsXz526gpRA== + +"@vitest/utils@4.1.1": + version "4.1.1" + resolved "https://registry.npmjs.org/@vitest/utils/-/utils-4.1.1.tgz" + integrity sha512-cNxAlaB3sHoCdL6pj6yyUXv9Gry1NHNg0kFTXdvSIZXLHsqKH7chiWOkwJ5s5+d/oMwcoG9T0bKU38JZWKusrQ== + dependencies: + "@vitest/pretty-format" "4.1.1" + convert-source-map "^2.0.0" + tinyrainbow "^3.0.3" + acorn-jsx@^5.3.2: version "5.3.2" resolved "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz" integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== acorn@^8.15.0: - version "8.15.0" - resolved "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz" - integrity sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg== + version "8.16.0" + resolved "https://registry.npmjs.org/acorn/-/acorn-8.16.0.tgz" + integrity sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw== -ajv@^6.12.4: - version "6.12.6" - resolved "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz" - integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== +ajv@^6.14.0: + version "6.14.0" + resolved "https://registry.npmjs.org/ajv/-/ajv-6.14.0.tgz" + integrity sha512-IWrosm/yrn43eiKqkfkHis7QioDleaXQHdDVPKg0FSwwd/DuvyX79TZnFOnYpB7dcsFAMmtFztZuXPDvSePkFw== dependencies: fast-deep-equal "^3.1.1" fast-json-stable-stringify "^2.0.0" @@ -1381,29 +2025,27 @@ ajv@^6.12.4: uri-js "^4.2.2" allotment@^1.20.4: - version "1.20.4" - resolved "https://registry.npmjs.org/allotment/-/allotment-1.20.4.tgz" - integrity sha512-LMM5Xe5nLePFOLAlW/5k3ARqznYGUyNekV4xJrfDKn1jimW3nlZE6hT/Tu0T8s0VgAkr9s2P7+uM0WvJKn5DAw== + version "1.20.5" + resolved "https://registry.npmjs.org/allotment/-/allotment-1.20.5.tgz" + integrity sha512-7i4NT7ieXEyAd5lBrXmE7WHz/e7hRuo97+j+TwrPE85ha6kyFURoc76nom0dWSZ1pTKVEAMJy/+f3/Isfu/41A== dependencies: classnames "^2.3.0" eventemitter3 "^5.0.0" + fast-deep-equal "^3.1.3" lodash.clamp "^4.0.0" lodash.debounce "^4.0.0" - lodash.isequal "^4.5.0" - use-resize-observer "^9.0.0" + usehooks-ts "^3.1.1" + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== -ansi-regex@^6.0.1: +ansi-regex@^6.2.2: version "6.2.2" resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz" integrity sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg== -ansi-styles@^3.2.1: - version "3.2.1" - resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz" - integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== - dependencies: - color-convert "^1.9.0" - ansi-styles@^4.1.0: version "4.3.0" resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz" @@ -1411,6 +2053,11 @@ ansi-styles@^4.1.0: dependencies: color-convert "^2.0.1" +ansi-styles@^5.0.0: + version "5.2.0" + resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz" + integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== + ansi-styles@^6.2.1: version "6.2.3" resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz" @@ -1466,7 +2113,14 @@ argparse@^2.0.1: resolved "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz" integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== -aria-query@^5.3.2: +aria-query@5.3.0: + version "5.3.0" + resolved "https://registry.npmjs.org/aria-query/-/aria-query-5.3.0.tgz" + integrity sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A== + dependencies: + dequal "^2.0.3" + +aria-query@^5.0.0, aria-query@^5.3.2: version "5.3.2" resolved "https://registry.npmjs.org/aria-query/-/aria-query-5.3.2.tgz" integrity sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw== @@ -1549,6 +2203,11 @@ arraybuffer.prototype.slice@^1.0.4: get-intrinsic "^1.2.6" is-array-buffer "^3.0.4" +assertion-error@^2.0.1: + version "2.0.1" + resolved "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz" + integrity sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA== + ast-types-flow@^0.0.8: version "0.0.8" resolved "https://registry.npmjs.org/ast-types-flow/-/ast-types-flow-0.0.8.tgz" @@ -1572,9 +2231,9 @@ available-typed-arrays@^1.0.7: possible-typed-array-names "^1.0.0" axe-core@^4.10.0: - version "4.11.0" - resolved "https://registry.npmjs.org/axe-core/-/axe-core-4.11.0.tgz" - integrity sha512-ilYanEU8vxxBexpJd8cWM4ElSQq4QctCLKih0TSfjIfCQTeyH/6zVrmIJfLPrKTKJRbiG+cfnZbQIjAlJmF1jQ== + version "4.11.1" + resolved "https://registry.npmjs.org/axe-core/-/axe-core-4.11.1.tgz" + integrity sha512-BASOg+YwO2C+346x3LZOeoovTIoTrRqEsqMa6fmfAV0P+U9mFr9NsyOEpiYvFjbc64NMrSswhV50WdXzdb/Z5A== axobject-query@^4.1.0: version "4.1.0" @@ -1590,11 +2249,21 @@ babel-plugin-macros@^3.1.0: cosmiconfig "^7.0.0" resolve "^1.19.0" +bail@^2.0.0: + version "2.0.2" + resolved "https://registry.npmjs.org/bail/-/bail-2.0.2.tgz#d26f5cd8fe5d6f832a31517b9f7c356040ba6d5d" + integrity sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw== + balanced-match@^1.0.0: version "1.0.2" resolved "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz" integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== +balanced-match@^4.0.2: + version "4.0.4" + resolved "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.4.tgz" + integrity sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA== + base64-arraybuffer@^1.0.2: version "1.0.2" resolved "https://registry.npmjs.org/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz" @@ -1605,6 +2274,13 @@ base64-js@^1.3.1: resolved "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz" integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== +bidi-js@^1.0.3: + version "1.0.3" + resolved "https://registry.npmjs.org/bidi-js/-/bidi-js-1.0.3.tgz" + integrity sha512-RKshQI1R3YQ+n9YJz2QQ147P66ELpa1FQEg20Dk8oW9t2KgLbpDLLp9aGZ7y8WHSshDknG0bknqGw5/tyCs5tw== + dependencies: + require-from-string "^2.0.2" + big-integer@^1.6.17: version "1.6.52" resolved "https://registry.npmjs.org/big-integer/-/big-integer-1.6.52.tgz" @@ -1633,26 +2309,26 @@ bluebird@~3.4.1: integrity sha512-iD3898SR7sWVRHbiQv+sHUtHnMvC1o3nW5rAcqnq3uOn07DSAppZYUkIGslDz6gXC7HfunPe7YVBgoEJASPcHA== brace-expansion@^1.1.7: - version "1.1.11" - resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz" - integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + version "1.1.12" + resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz" + integrity sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg== dependencies: balanced-match "^1.0.0" concat-map "0.0.1" brace-expansion@^2.0.1: - version "2.0.1" - resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz" - integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== + version "2.0.2" + resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz" + integrity sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ== dependencies: balanced-match "^1.0.0" -braces@^3.0.3: - version "3.0.3" - resolved "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz" - integrity sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA== +brace-expansion@^5.0.2: + version "5.0.5" + resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.5.tgz" + integrity sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ== dependencies: - fill-range "^7.1.1" + balanced-match "^4.0.2" bubblesets-js@^3.0.0: version "3.0.1" @@ -1714,13 +2390,23 @@ callsites@^3.0.0: integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== canvas@^3.2.1: - version "3.2.1" - resolved "https://registry.npmjs.org/canvas/-/canvas-3.2.1.tgz" - integrity sha512-ej1sPFR5+0YWtaVp6S1N1FVz69TQCqmrkGeRvQxZeAB1nAIcjNTHVwrZtYtWFFBmQsF40/uDLehsW5KuYC99mg== + version "3.2.2" + resolved "https://registry.npmjs.org/canvas/-/canvas-3.2.2.tgz" + integrity sha512-duEt4h1HHu9sJZyVKfLRXR6tsKPY7cEELzxSRJkwddOXYvQT3P/+es98SV384JA0zMOZ5s+9gatnGfM6sL4Drg== dependencies: node-addon-api "^7.0.0" prebuild-install "^7.1.3" +ccount@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz#17a3bf82302e0870d6da43a01311a8bc02a3ecf5" + integrity sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg== + +chai@^6.2.2: + version "6.2.2" + resolved "https://registry.npmjs.org/chai/-/chai-6.2.2.tgz" + integrity sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg== + chainsaw@~0.1.0: version "0.1.0" resolved "https://registry.npmjs.org/chainsaw/-/chainsaw-0.1.0.tgz" @@ -1728,15 +2414,6 @@ chainsaw@~0.1.0: dependencies: traverse ">=0.3.0 <0.4" -chalk@^2.0.0: - version "2.4.2" - resolved "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz" - integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== - dependencies: - ansi-styles "^3.2.1" - escape-string-regexp "^1.0.5" - supports-color "^5.3.0" - chalk@^4.0.0: version "4.1.2" resolved "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz" @@ -1745,6 +2422,26 @@ chalk@^4.0.0: ansi-styles "^4.1.0" supports-color "^7.1.0" +character-entities-html4@^2.0.0: + version "2.1.0" + resolved "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz#1f1adb940c971a4b22ba39ddca6b618dc6e56b2b" + integrity sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA== + +character-entities-legacy@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz#76bc83a90738901d7bc223a9e93759fdd560125b" + integrity sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ== + +character-entities@^2.0.0: + version "2.0.2" + resolved "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz#2d09c2e72cd9523076ccb21157dff66ad43fcc22" + integrity sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ== + +character-reference-invalid@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-2.0.1.tgz#85c66b041e43b47210faf401278abf808ac45cb9" + integrity sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw== + chart.js@^4.5.1: version "4.5.1" resolved "https://registry.npmjs.org/chart.js/-/chart.js-4.5.1.tgz" @@ -1752,25 +2449,6 @@ chart.js@^4.5.1: dependencies: "@kurkle/color" "^0.3.0" -chevrotain-allstar@~0.3.0: - version "0.3.1" - resolved "https://registry.npmjs.org/chevrotain-allstar/-/chevrotain-allstar-0.3.1.tgz" - integrity sha512-b7g+y9A0v4mxCW1qUhf3BSVPg+/NvGErk/dOkrDaHA0nQIQGAtrOjlX//9OQtRlSCy+x9rfB5N8yC71lH1nvMw== - dependencies: - lodash-es "^4.17.21" - -chevrotain@~11.0.3: - version "11.0.3" - resolved "https://registry.npmjs.org/chevrotain/-/chevrotain-11.0.3.tgz" - integrity sha512-ci2iJH6LeIkvP9eJW6gpueU8cnZhv85ELY8w8WiFtNjMHA5ad6pQLaJo9mEly/9qUyCpvqX8/POVUTf18/HFdw== - dependencies: - "@chevrotain/cst-dts-gen" "11.0.3" - "@chevrotain/gast" "11.0.3" - "@chevrotain/regexp-to-ast" "11.0.3" - "@chevrotain/types" "11.0.3" - "@chevrotain/utils" "11.0.3" - lodash-es "4.17.21" - chokidar@^4.0.0: version "4.0.3" resolved "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz" @@ -1802,18 +2480,11 @@ cliui@^9.0.1: strip-ansi "^7.1.0" wrap-ansi "^9.0.0" -clsx@^2.0.0, clsx@^2.1.1: +clsx@^2.1.1: version "2.1.1" resolved "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz" integrity sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA== -color-convert@^1.9.0: - version "1.9.3" - resolved "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz" - integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== - dependencies: - color-name "1.1.3" - color-convert@^2.0.1: version "2.0.1" resolved "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz" @@ -1821,16 +2492,16 @@ color-convert@^2.0.1: dependencies: color-name "~1.1.4" -color-name@1.1.3: - version "1.1.3" - resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz" - integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== - color-name@~1.1.4: version "1.1.4" resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz" integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== +comma-separated-tokens@^2.0.0: + version "2.0.3" + resolved "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz#4e89c9458acb61bc8fef19f4529973b2392839ee" + integrity sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg== + commander@2: version "2.20.3" resolved "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz" @@ -1861,44 +2532,25 @@ concat-map@0.0.1: resolved "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== -confbox@^0.1.8: - version "0.1.8" - resolved "https://registry.npmjs.org/confbox/-/confbox-0.1.8.tgz" - integrity sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w== - -confbox@^0.2.2: - version "0.2.2" - resolved "https://registry.npmjs.org/confbox/-/confbox-0.2.2.tgz" - integrity sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ== - convert-source-map@^1.5.0: version "1.9.0" resolved "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz" integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== +convert-source-map@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz" + integrity sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg== + core-util-is@~1.0.0: version "1.0.3" resolved "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz" integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== -cose-base@^1.0.0: - version "1.0.3" - resolved "https://registry.npmjs.org/cose-base/-/cose-base-1.0.3.tgz" - integrity sha512-s9whTXInMSgAp/NVXVNuVxVKzGH2qck3aQlVHxDCdAEPgtMKwc4Wq6/QKhgdEdgbLSi9rBTAcPoRa6JpiG4ksg== - dependencies: - layout-base "^1.0.0" - -cose-base@^2.2.0: - version "2.2.0" - resolved "https://registry.npmjs.org/cose-base/-/cose-base-2.2.0.tgz" - integrity sha512-AzlgcsCbUMymkADOJtQm3wO9S3ltPfYOFD5033keQn9NJzIbtnZj+UdBJe7DYml/8TdbtHJW3j58SOnKhWY/5g== - dependencies: - layout-base "^2.0.0" - cosmiconfig@^7.0.0: - version "7.0.1" - resolved "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.0.1.tgz" - integrity sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ== + version "7.1.0" + resolved "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.1.0.tgz" + integrity sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA== dependencies: "@types/parse-json" "^4.0.0" import-fresh "^3.2.1" @@ -1919,6 +2571,11 @@ crc32-stream@^4.0.2: crc-32 "^1.2.0" readable-stream "^3.4.0" +crelt@^1.0.0: + version "1.0.6" + resolved "https://registry.npmjs.org/crelt/-/crelt-1.0.6.tgz" + integrity sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g== + cross-spawn@^7.0.6: version "7.0.6" resolved "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz" @@ -1935,56 +2592,36 @@ css-line-break@^2.1.0: dependencies: utrie "^1.0.2" -csstype@^3.0.2, csstype@^3.1.0, csstype@^3.1.3: - version "3.1.3" - resolved "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz" - integrity sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw== +css-tree@^3.0.0, css-tree@^3.2.1: + version "3.2.1" + resolved "https://registry.npmjs.org/css-tree/-/css-tree-3.2.1.tgz" + integrity sha512-X7sjQzceUhu1u7Y/ylrRZFU2FS6LRiFVp6rKLPg23y3x3c3DOKAwuXGDp+PAGjh6CSnCjYeAul8pcT8bAl+lSA== + dependencies: + mdn-data "2.27.1" + source-map-js "^1.2.1" + +css.escape@^1.5.1: + version "1.5.1" + resolved "https://registry.npmjs.org/css.escape/-/css.escape-1.5.1.tgz" + integrity sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg== + +csstype@^3.0.2, csstype@^3.1.0, csstype@^3.2.2, csstype@^3.2.3: + version "3.2.3" + resolved "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz" + integrity sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ== culori@^4.0.2: version "4.0.2" resolved "https://registry.npmjs.org/culori/-/culori-4.0.2.tgz" integrity sha512-1+BhOB8ahCn4O0cep0Sh2l9KCOfOdY+BXJnKMHFFzDEouSr/el18QwXEMRlOj9UY5nCeA8UN3a/82rUWRBeyBw== -cytoscape-cose-bilkent@^4.1.0: - version "4.1.0" - resolved "https://registry.npmjs.org/cytoscape-cose-bilkent/-/cytoscape-cose-bilkent-4.1.0.tgz" - integrity sha512-wgQlVIUJF13Quxiv5e1gstZ08rnZj2XaLHGoFMYXz7SkNfCDOOteKBE6SYRfA9WxxI/iBc3ajfDoc6hb/MRAHQ== - dependencies: - cose-base "^1.0.0" - -cytoscape-fcose@^2.2.0: - version "2.2.0" - resolved "https://registry.npmjs.org/cytoscape-fcose/-/cytoscape-fcose-2.2.0.tgz" - integrity sha512-ki1/VuRIHFCzxWNrsshHYPs6L7TvLu3DL+TyIGEsRcvVERmxokbf5Gdk7mFxZnTdiGtnA4cfSmjZJMviqSuZrQ== - dependencies: - cose-base "^2.2.0" - -cytoscape@^3.29.3: - version "3.33.1" - resolved "https://registry.npmjs.org/cytoscape/-/cytoscape-3.33.1.tgz" - integrity sha512-iJc4TwyANnOGR1OmWhsS9ayRS3s+XQ185FmuHObThD+5AeJCakAAbWv8KimMTt08xCCLNgneQwFp+JRJOr9qGQ== - -"d3-array@1 - 2": - version "2.12.1" - resolved "https://registry.npmjs.org/d3-array/-/d3-array-2.12.1.tgz" - integrity sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ== - dependencies: - internmap "^1.0.0" - -"d3-array@1 - 3", "d3-array@2 - 3", d3-array@3, d3-array@3.2.4, d3-array@^3.2.0, d3-array@^3.2.4: +"d3-array@1 - 3", "d3-array@2 - 3", "d3-array@2.10.0 - 3", "d3-array@2.5.0 - 3", d3-array@3, d3-array@3.2.4, d3-array@^3.2.0, d3-array@^3.2.4: version "3.2.4" resolved "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz" integrity sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg== dependencies: internmap "1 - 2" -"d3-array@2.10.0 - 3", "d3-array@2.5.0 - 3": - version "3.2.0" - resolved "https://registry.npmjs.org/d3-array/-/d3-array-3.2.0.tgz" - integrity sha512-3yXFQo0oG3QCxbF06rMPFyGRMGJNS7NvsV1+2joOjbBE+9xvWQ8+GcMJAjRCzw06zQ3/arXeJgbPYcjUCuC+3g== - dependencies: - internmap "1 - 2" - d3-axis@3: version "3.0.0" resolved "https://registry.npmjs.org/d3-axis/-/d3-axis-3.0.0.tgz" @@ -2071,9 +2708,9 @@ d3-force@3, d3-force@^3.0.0: d3-timer "1 - 3" "d3-format@1 - 3", d3-format@3, d3-format@^3.1.0: - version "3.1.0" - resolved "https://registry.npmjs.org/d3-format/-/d3-format-3.1.0.tgz" - integrity sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA== + version "3.1.2" + resolved "https://registry.npmjs.org/d3-format/-/d3-format-3.1.2.tgz" + integrity sha512-AJDdYOdnyRDV5b6ArilzCPPwc1ejkHcoyFarqlPqT7zRYjhavcT3uSrqcMvsgh2CgoPbK3RCwyHaVyxYcP2Arg== d3-geo-projection@^4.0.0: version "4.0.0" @@ -2103,11 +2740,6 @@ d3-hierarchy@3, d3-hierarchy@^3.1.2: dependencies: d3-color "1 - 3" -d3-path@1: - version "1.0.9" - resolved "https://registry.npmjs.org/d3-path/-/d3-path-1.0.9.tgz" - integrity sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg== - "d3-path@1 - 3", d3-path@3, d3-path@^3.1.0: version "3.1.0" resolved "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz" @@ -2128,14 +2760,6 @@ d3-random@3: resolved "https://registry.npmjs.org/d3-random/-/d3-random-3.0.1.tgz" integrity sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ== -d3-sankey@^0.12.3: - version "0.12.3" - resolved "https://registry.npmjs.org/d3-sankey/-/d3-sankey-0.12.3.tgz" - integrity sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ== - dependencies: - d3-array "1 - 2" - d3-shape "^1.2.0" - d3-scale-chromatic@3, d3-scale-chromatic@^3.1.0: version "3.1.0" resolved "https://registry.npmjs.org/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz" @@ -2167,13 +2791,6 @@ d3-shape@3, d3-shape@^3.2.0: dependencies: d3-path "^3.1.0" -d3-shape@^1.2.0: - version "1.3.7" - resolved "https://registry.npmjs.org/d3-shape/-/d3-shape-1.3.7.tgz" - integrity sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw== - dependencies: - d3-path "1" - "d3-time-format@2 - 4", d3-time-format@4, d3-time-format@^4.1.0: version "4.1.0" resolved "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz" @@ -2215,7 +2832,7 @@ d3-zoom@3: d3-selection "2 - 3" d3-transition "2 - 3" -d3@^7.3.0, d3@^7.9.0: +d3@^7.3.0: version "7.9.0" resolved "https://registry.npmjs.org/d3/-/d3-7.9.0.tgz" integrity sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA== @@ -2251,19 +2868,19 @@ d3@^7.3.0, d3@^7.9.0: d3-transition "3" d3-zoom "3" -dagre-d3-es@7.0.13: - version "7.0.13" - resolved "https://registry.npmjs.org/dagre-d3-es/-/dagre-d3-es-7.0.13.tgz" - integrity sha512-efEhnxpSuwpYOKRm/L5KbqoZmNNukHa/Flty4Wp62JRvgH2ojwVgPgdYyr4twpieZnyRDdIH7PY2mopX26+j2Q== - dependencies: - d3 "^7.9.0" - lodash-es "^4.17.21" - damerau-levenshtein@^1.0.8: version "1.0.8" resolved "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz" integrity sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA== +data-urls@^7.0.0: + version "7.0.0" + resolved "https://registry.npmjs.org/data-urls/-/data-urls-7.0.0.tgz" + integrity sha512-23XHcCF+coGYevirZceTVD7NdJOqVn+49IHyxgszm+JIiHLoB2TkmPtsYkNWT1pvRSGkc35L6NHs0yHkN2SumA== + dependencies: + whatwg-mimetype "^5.0.0" + whatwg-url "^16.0.0" + data-view-buffer@^1.0.2: version "1.0.2" resolved "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.2.tgz" @@ -2291,18 +2908,30 @@ data-view-byte-offset@^1.0.1: es-errors "^1.3.0" is-data-view "^1.0.1" -dayjs@^1.11.18, dayjs@^1.8.34: - version "1.11.18" - resolved "https://registry.npmjs.org/dayjs/-/dayjs-1.11.18.tgz" - integrity sha512-zFBQ7WFRvVRhKcWoUh+ZA1g2HVgUbsZm9sbddh8EC5iv93sui8DVVz1Npvz+r6meo9VKfa8NyLWBsQK1VvIKPA== +dayjs@^1.8.34: + version "1.11.20" + resolved "https://registry.npmjs.org/dayjs/-/dayjs-1.11.20.tgz" + integrity sha512-YbwwqR/uYpeoP4pu043q+LTDLFBLApUP6VxRihdfNTqu4ubqMlGDLd6ErXhEgsyvY0K6nCs7nggYumAN+9uEuQ== -debug@^4.3.1, debug@^4.3.2, debug@^4.3.4, debug@^4.4.1: +debug@^4.0.0, debug@^4.3.1, debug@^4.3.2, debug@^4.4.3: version "4.4.3" resolved "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz" integrity sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA== dependencies: ms "^2.1.3" +decimal.js@^10.6.0: + version "10.6.0" + resolved "https://registry.npmjs.org/decimal.js/-/decimal.js-10.6.0.tgz" + integrity sha512-YpgQiITW3JXGntzdUmyUR1V812Hn8T1YVXhCu+wO3OpS4eU9l4YdD3qjyiKdV6mvV29zapkMeD390UVEf2lkUg== + +decode-named-character-reference@^1.0.0: + version "1.3.0" + resolved "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.3.0.tgz#3e40603760874c2e5867691b599d73a7da25b53f" + integrity sha512-GtpQYB283KrPp6nRw50q3U9/VfOutZOe103qlN7BPP6Ad27xYnOIWv4lPzo8HCAL+mMZofJ9KEy30fq6MfaK6Q== + dependencies: + character-entities "^2.0.0" + decompress-response@^6.0.0: version "6.0.0" resolved "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz" @@ -2339,22 +2968,29 @@ define-properties@^1.1.3, define-properties@^1.2.1: object-keys "^1.1.1" delaunator@5: - version "5.0.0" - resolved "https://registry.npmjs.org/delaunator/-/delaunator-5.0.0.tgz" - integrity sha512-AyLvtyJdbv/U1GkiS6gUUzclRoAY4Gs75qkMygJJhU75LW4DNuSF2RMzpxs9jw9Oz1BobHjTdkG3zdP55VxAqw== + version "5.1.0" + resolved "https://registry.npmjs.org/delaunator/-/delaunator-5.1.0.tgz" + integrity sha512-AGrQ4QSgssa1NGmWmLPqN5NY2KajF5MqxetNEO+o0n3ZwZZeTmt7bBnvzHWrmkZFxGgr4HdyFgelzgi06otLuQ== dependencies: - robust-predicates "^3.0.0" + robust-predicates "^3.0.2" -detect-libc@^1.0.3: - version "1.0.3" - resolved "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz" - integrity sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg== +dequal@^2.0.0, dequal@^2.0.3: + version "2.0.3" + resolved "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz" + integrity sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA== -detect-libc@^2.0.0: +detect-libc@^2.0.0, detect-libc@^2.0.3: version "2.1.2" resolved "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz" integrity sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ== +devlop@^1.0.0, devlop@^1.1.0: + version "1.1.0" + resolved "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz#4db7c2ca4dc6e0e834c30be70c94bbc976dc7018" + integrity sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA== + dependencies: + dequal "^2.0.0" + dnd-core@^16.0.1: version "16.0.1" resolved "https://registry.npmjs.org/dnd-core/-/dnd-core-16.0.1.tgz" @@ -2371,6 +3007,16 @@ doctrine@^2.1.0: dependencies: esutils "^2.0.2" +dom-accessibility-api@^0.5.9: + version "0.5.16" + resolved "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.5.16.tgz" + integrity sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg== + +dom-accessibility-api@^0.6.3: + version "0.6.3" + resolved "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.6.3.tgz" + integrity sha512-7ZgogeTnjuHbo+ct10G9Ffp0mif17idi0IyWNVA/wcwcm7NPOD/WEHVP3n7n3MhXqxoIYm8d6MuZohYWIZ4T3w== + dom-helpers@^5.0.1: version "5.2.1" resolved "https://registry.npmjs.org/dom-helpers/-/dom-helpers-5.2.1.tgz" @@ -2379,10 +3025,10 @@ dom-helpers@^5.0.1: "@babel/runtime" "^7.8.7" csstype "^3.0.2" -dompurify@*, dompurify@^3.2.4, dompurify@^3.2.5: - version "3.3.0" - resolved "https://registry.npmjs.org/dompurify/-/dompurify-3.3.0.tgz" - integrity sha512-r+f6MYR1gGN1eJv0TVQbhA7if/U7P87cdPl3HN5rikqaBSBxLiCb/b9O+2eG0cxz0ghyU+mU1QkbsOwERMYlWQ== +dompurify@*, dompurify@^3.2.4: + version "3.3.3" + resolved "https://registry.npmjs.org/dompurify/-/dompurify-3.3.3.tgz" + integrity sha512-Oj6pzI2+RqBfFG+qOaOLbFXLQ90ARpcGG6UePL82bJLtdsa6CYJD7nmiU8MW9nQNOtCHV3lZ/Bzq1X0QYbBZCA== optionalDependencies: "@types/trusted-types" "^2.0.7" @@ -2427,17 +3073,27 @@ end-of-stream@^1.1.0, end-of-stream@^1.4.1: dependencies: once "^1.4.0" +entities@^4.4.0: + version "4.5.0" + resolved "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz" + integrity sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw== + +entities@^6.0.0: + version "6.0.1" + resolved "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz" + integrity sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g== + error-ex@^1.3.1: - version "1.3.2" - resolved "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz" - integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== + version "1.3.4" + resolved "https://registry.npmjs.org/error-ex/-/error-ex-1.3.4.tgz" + integrity sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ== dependencies: is-arrayish "^0.2.1" -es-abstract@^1.17.5, es-abstract@^1.23.2, es-abstract@^1.23.3, es-abstract@^1.23.5, es-abstract@^1.23.6, es-abstract@^1.23.9, es-abstract@^1.24.0: - version "1.24.0" - resolved "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.0.tgz" - integrity sha512-WSzPgsdLtTcQwm4CROfS5ju2Wa1QQcVeT37jFjYzdFz1r9ahadC8B8/a4qxJxM+09F18iumCdRmlr96ZYkQvEg== +es-abstract@^1.17.5, es-abstract@^1.23.2, es-abstract@^1.23.3, es-abstract@^1.23.5, es-abstract@^1.23.6, es-abstract@^1.23.9, es-abstract@^1.24.0, es-abstract@^1.24.1: + version "1.24.1" + resolved "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.1.tgz" + integrity sha512-zHXBLhP+QehSSbsS9Pt23Gg964240DPd6QCf8WpkqEXxQ7fhdZzYsocOr5u7apWonsS5EjZDmTF+/slGMyasvw== dependencies: array-buffer-byte-length "^1.0.2" arraybuffer.prototype.slice "^1.0.4" @@ -2505,27 +3161,33 @@ es-errors@^1.3.0: integrity sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw== es-iterator-helpers@^1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.2.1.tgz" - integrity sha512-uDn+FE1yrDzyC0pCo961B2IHbdM8y/ACZsKD4dG6WqrjV53BADjwa7D+1aom2rsNVfLyDgU/eigvlJGJ08OQ4w== + version "1.3.1" + resolved "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.3.1.tgz" + integrity sha512-zWwRvqWiuBPr0muUG/78cW3aHROFCNIQ3zpmYDpwdbnt2m+xlNyRWpHBpa2lJjSBit7BQ+RXA1iwbSmu5yJ/EQ== dependencies: call-bind "^1.0.8" - call-bound "^1.0.3" + call-bound "^1.0.4" define-properties "^1.2.1" - es-abstract "^1.23.6" + es-abstract "^1.24.1" es-errors "^1.3.0" - es-set-tostringtag "^2.0.3" + es-set-tostringtag "^2.1.0" function-bind "^1.1.2" - get-intrinsic "^1.2.6" + get-intrinsic "^1.3.0" globalthis "^1.0.4" gopd "^1.2.0" has-property-descriptors "^1.0.2" has-proto "^1.2.0" has-symbols "^1.1.0" internal-slot "^1.1.0" - iterator.prototype "^1.1.4" + iterator.prototype "^1.1.5" + math-intrinsics "^1.1.0" safe-array-concat "^1.1.3" +es-module-lexer@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-2.0.0.tgz" + integrity sha512-5POEcUuZybH7IdmGsD8wlf0AI55wMecM9rVBTI/qEAy2c1kTOm3DjFYjrBdI2K3BaJjJYfYFeRtM0t9ssnRuxw== + es-object-atoms@^1.0.0, es-object-atoms@^1.1.1: version "1.1.1" resolved "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz" @@ -2533,7 +3195,7 @@ es-object-atoms@^1.0.0, es-object-atoms@^1.1.1: dependencies: es-errors "^1.3.0" -es-set-tostringtag@^2.0.3, es-set-tostringtag@^2.1.0: +es-set-tostringtag@^2.1.0: version "2.1.0" resolved "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz" integrity sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA== @@ -2593,11 +3255,6 @@ escalade@^3.1.1: resolved "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz" integrity sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA== -escape-string-regexp@^1.0.5: - version "1.0.5" - resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" - integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== - escape-string-regexp@^4.0.0: version "4.0.0" resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz" @@ -2666,24 +3323,29 @@ eslint-visitor-keys@^4.2.1: resolved "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz" integrity sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ== +eslint-visitor-keys@^5.0.0: + version "5.0.1" + resolved "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-5.0.1.tgz" + integrity sha512-tD40eHxA35h0PEIZNeIjkHoDR4YjjJp34biM0mDvplBe//mB+IHCqHDGV7pxF+7MklTvighcCPPZC7ynWyjdTA== + eslint@^9.15.0: - version "9.38.0" - resolved "https://registry.npmjs.org/eslint/-/eslint-9.38.0.tgz" - integrity sha512-t5aPOpmtJcZcz5UJyY2GbvpDlsK5E8JqRqoKtfiKE3cNh437KIqfJr3A3AKf5k64NPx6d0G3dno6XDY05PqPtw== + version "9.39.4" + resolved "https://registry.npmjs.org/eslint/-/eslint-9.39.4.tgz" + integrity sha512-XoMjdBOwe/esVgEvLmNsD3IRHkm7fbKIUGvrleloJXUZgDHig2IPWNniv+GwjyJXzuNqVjlr5+4yVUZjycJwfQ== dependencies: "@eslint-community/eslint-utils" "^4.8.0" "@eslint-community/regexpp" "^4.12.1" - "@eslint/config-array" "^0.21.1" - "@eslint/config-helpers" "^0.4.1" - "@eslint/core" "^0.16.0" - "@eslint/eslintrc" "^3.3.1" - "@eslint/js" "9.38.0" - "@eslint/plugin-kit" "^0.4.0" + "@eslint/config-array" "^0.21.2" + "@eslint/config-helpers" "^0.4.2" + "@eslint/core" "^0.17.0" + "@eslint/eslintrc" "^3.3.5" + "@eslint/js" "9.39.4" + "@eslint/plugin-kit" "^0.4.1" "@humanfs/node" "^0.16.6" "@humanwhocodes/module-importer" "^1.0.1" "@humanwhocodes/retry" "^0.4.2" "@types/estree" "^1.0.6" - ajv "^6.12.4" + ajv "^6.14.0" chalk "^4.0.0" cross-spawn "^7.0.6" debug "^4.3.2" @@ -2702,7 +3364,7 @@ eslint@^9.15.0: is-glob "^4.0.0" json-stable-stringify-without-jsonify "^1.0.1" lodash.merge "^4.6.2" - minimatch "^3.1.2" + minimatch "^3.1.5" natural-compare "^1.4.0" optionator "^0.9.3" @@ -2716,9 +3378,9 @@ espree@^10.0.1, espree@^10.4.0: eslint-visitor-keys "^4.2.1" esquery@^1.5.0: - version "1.6.0" - resolved "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz" - integrity sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg== + version "1.7.0" + resolved "https://registry.npmjs.org/esquery/-/esquery-1.7.0.tgz" + integrity sha512-Ap6G0WQwcU/LHsvLwON1fAQX9Zp0A2Y6Y/cJBl9r/JbW90Zyg4/zbG6zzKa2OTALELarYHmKu0GhpM5EO+7T0g== dependencies: estraverse "^5.1.0" @@ -2734,15 +3396,27 @@ estraverse@^5.1.0, estraverse@^5.2.0, estraverse@^5.3.0: resolved "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz" integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== +estree-util-is-identifier-name@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/estree-util-is-identifier-name/-/estree-util-is-identifier-name-3.0.0.tgz#0b5ef4c4ff13508b34dcd01ecfa945f61fce5dbd" + integrity sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg== + +estree-walker@^3.0.3: + version "3.0.3" + resolved "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz" + integrity sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g== + dependencies: + "@types/estree" "^1.0.0" + esutils@^2.0.2: version "2.0.3" resolved "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== eventemitter3@^5.0.0: - version "5.0.1" - resolved "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz" - integrity sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA== + version "5.0.4" + resolved "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.4.tgz" + integrity sha512-mlsTRyGaPBjPedk6Bvw+aqbsXDtoAyAzm5MO7JgU+yVRyMQ5O8bD4Kcci7BS85f93veegeCPkL8R4GLClnjLFw== exceljs@^4.4.0: version "4.4.0" @@ -2764,10 +3438,15 @@ expand-template@^2.0.3: resolved "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz" integrity sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg== -exsolve@^1.0.7: - version "1.0.7" - resolved "https://registry.npmjs.org/exsolve/-/exsolve-1.0.7.tgz" - integrity sha512-VO5fQUzZtI6C+vx4w/4BWJpg3s/5l+6pRQEHzFRM8WFi4XffSP1Z+4qi7GbjWbvRQEbdIco5mIMq+zX4rPuLrw== +expect-type@^1.3.0: + version "1.3.0" + resolved "https://registry.npmjs.org/expect-type/-/expect-type-1.3.0.tgz" + integrity sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA== + +extend@^3.0.0: + version "3.0.2" + resolved "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" + integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== fast-csv@^4.3.1: version "4.3.6" @@ -2782,16 +3461,10 @@ fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: resolved "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz" integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== -fast-glob@^3.3.2: - version "3.3.3" - resolved "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz" - integrity sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg== - dependencies: - "@nodelib/fs.stat" "^2.0.2" - "@nodelib/fs.walk" "^1.2.3" - glob-parent "^5.1.2" - merge2 "^1.3.0" - micromatch "^4.0.8" +fast-equals@^5.3.3: + version "5.4.0" + resolved "https://registry.npmjs.org/fast-equals/-/fast-equals-5.4.0.tgz" + integrity sha512-jt2DW/aNFNwke7AUd+Z+e6pz39KO5rzdbbFCg2sGafS4mk13MI7Z8O5z9cADNn5lhGODIgLwug6TZO2ctf7kcw== fast-json-patch@^3.0.0-1, fast-json-patch@^3.1.1: version "3.1.1" @@ -2808,12 +3481,10 @@ fast-levenshtein@^2.0.6: resolved "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz" integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== -fastq@^1.6.0: - version "1.19.1" - resolved "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz" - integrity sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ== - dependencies: - reusify "^1.0.4" +fdir@^6.5.0: + version "6.5.0" + resolved "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz" + integrity sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg== file-entry-cache@^8.0.0: version "8.0.0" @@ -2822,13 +3493,6 @@ file-entry-cache@^8.0.0: dependencies: flat-cache "^4.0.0" -fill-range@^7.1.1: - version "7.1.1" - resolved "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz" - integrity sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg== - dependencies: - to-regex-range "^5.0.1" - find-root@^1.1.0: version "1.1.0" resolved "https://registry.npmjs.org/find-root/-/find-root-1.1.0.tgz" @@ -2851,9 +3515,9 @@ flat-cache@^4.0.0: keyv "^4.5.4" flatted@^3.2.9: - version "3.3.3" - resolved "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz" - integrity sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg== + version "3.4.2" + resolved "https://registry.npmjs.org/flatted/-/flatted-3.4.2.tgz" + integrity sha512-PjDse7RzhcPkIJwy5t7KPWQSZ9cAbzQXcafsetQoD7sOJRQlGikNbx7yZp2OotDnJyrDcbyRq3Ttb18iYOqkxA== for-each@^0.3.3, for-each@^0.3.5: version "0.3.5" @@ -2920,9 +3584,9 @@ get-caller-file@^2.0.5: integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== get-east-asian-width@^1.0.0: - version "1.4.0" - resolved "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.4.0.tgz" - integrity sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q== + version "1.5.0" + resolved "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.5.0.tgz" + integrity sha512-CQ+bEO+Tva/qlmw24dCejulK5pMzVnUOFOijVogd3KQs07HnRIgp8TGipvCCRT06xeYEbpbgwaCxglFyiuIcmA== get-intrinsic@^1.2.4, get-intrinsic@^1.2.5, get-intrinsic@^1.2.6, get-intrinsic@^1.2.7, get-intrinsic@^1.3.0: version "1.3.0" @@ -2962,13 +3626,6 @@ github-from-package@0.0.0: resolved "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz" integrity sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw== -glob-parent@^5.1.2: - version "5.1.2" - resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz" - integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== - dependencies: - is-glob "^4.0.1" - glob-parent@^6.0.2: version "6.0.2" resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz" @@ -2993,7 +3650,7 @@ globals@^14.0.0: resolved "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz" integrity sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ== -globals@^15.12.0, globals@^15.15.0: +globals@^15.12.0: version "15.15.0" resolved "https://registry.npmjs.org/globals/-/globals-15.15.0.tgz" integrity sha512-7ACyT3wmyp3I61S4fG682L0VA2RGD9otkqGJIwNUMF1SWUombIIk+af1unuDYgMm082aHYwD+mzJvv9Iu8dsgg== @@ -3031,26 +3688,11 @@ graceful-fs@^4.1.2, graceful-fs@^4.2.0, graceful-fs@^4.2.2: resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz" integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== -graphemer@^1.4.0: - version "1.4.0" - resolved "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz" - integrity sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag== - -hachure-fill@^0.5.2: - version "0.5.2" - resolved "https://registry.npmjs.org/hachure-fill/-/hachure-fill-0.5.2.tgz" - integrity sha512-3GKBOn+m2LX9iq+JC1064cSFprJY4jL1jCXTcpnfER5HYE2l/4EfWSGzkPa/ZDBmYI0ZOEj5VHV/eKnPGkHuOg== - has-bigints@^1.0.2: version "1.1.0" resolved "https://registry.npmjs.org/has-bigints/-/has-bigints-1.1.0.tgz" integrity sha512-R3pbpkcIqv2Pm3dUwgjclDRVmWpTJW2DcMzcIhEXEx1oh/CEMObMm3KLmRJOdvhM7o4uQBnwr8pzRK2sJWIqfg== -has-flag@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz" - integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== - has-flag@^4.0.0: version "4.0.0" resolved "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz" @@ -3089,6 +3731,34 @@ hasown@^2.0.2: dependencies: function-bind "^1.1.2" +hast-util-to-jsx-runtime@^2.0.0: + version "2.3.6" + resolved "https://registry.npmjs.org/hast-util-to-jsx-runtime/-/hast-util-to-jsx-runtime-2.3.6.tgz#ff31897aae59f62232e21594eac7ef6b63333e98" + integrity sha512-zl6s8LwNyo1P9uw+XJGvZtdFF1GdAkOg8ujOw+4Pyb76874fLps4ueHXDhXWdk6YHQ6OgUtinliG7RsYvCbbBg== + dependencies: + "@types/estree" "^1.0.0" + "@types/hast" "^3.0.0" + "@types/unist" "^3.0.0" + comma-separated-tokens "^2.0.0" + devlop "^1.0.0" + estree-util-is-identifier-name "^3.0.0" + hast-util-whitespace "^3.0.0" + mdast-util-mdx-expression "^2.0.0" + mdast-util-mdx-jsx "^3.0.0" + mdast-util-mdxjs-esm "^2.0.0" + property-information "^7.0.0" + space-separated-tokens "^2.0.0" + style-to-js "^1.0.0" + unist-util-position "^5.0.0" + vfile-message "^4.0.0" + +hast-util-whitespace@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz#7778ed9d3c92dd9e8c5c8f648a49c21fc51cb621" + integrity sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw== + dependencies: + "@types/hast" "^3.0.0" + hoist-non-react-statics@^3.3.0, hoist-non-react-statics@^3.3.1, hoist-non-react-statics@^3.3.2: version "3.3.2" resolved "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz" @@ -3096,6 +3766,25 @@ hoist-non-react-statics@^3.3.0, hoist-non-react-statics@^3.3.1, hoist-non-react- dependencies: react-is "^16.7.0" +html-encoding-sniffer@^6.0.0: + version "6.0.0" + resolved "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-6.0.0.tgz" + integrity sha512-CV9TW3Y3f8/wT0BRFc1/KAVQ3TUHiXmaAb6VW9vtiMFf7SLoMd1PdAc4W3KFOFETBJUb90KatHqlsZMWV+R9Gg== + dependencies: + "@exodus/bytes" "^1.6.0" + +html-parse-stringify@^3.0.1: + version "3.0.1" + resolved "https://registry.npmjs.org/html-parse-stringify/-/html-parse-stringify-3.0.1.tgz" + integrity sha512-KknJ50kTInJ7qIScF3jeaFRpMpE8/lfiTdzf/twXyPBLAGrLRTmkz3AdTnKeh40X8k9L2fdYwEp/42WGXIRGcg== + dependencies: + void-elements "3.1.0" + +html-url-attributes@^3.0.0: + version "3.0.1" + resolved "https://registry.npmjs.org/html-url-attributes/-/html-url-attributes-3.0.1.tgz#83b052cd5e437071b756cd74ae70f708870c2d87" + integrity sha512-ol6UPyBWqsrO6EJySPz2O7ZSr856WDrEzM5zMqp+FJJLGMW35cLYmmZnl0vztAZxRUoNZJFTCohfjuIJ8I4QBQ== + html2canvas@^1.4.1: version "1.4.1" resolved "https://registry.npmjs.org/html2canvas/-/html2canvas-1.4.1.tgz" @@ -3104,6 +3793,20 @@ html2canvas@^1.4.1: css-line-break "^2.1.0" text-segmentation "^1.0.3" +i18next-browser-languagedetector@^8.2.1: + version "8.2.1" + resolved "https://registry.npmjs.org/i18next-browser-languagedetector/-/i18next-browser-languagedetector-8.2.1.tgz" + integrity sha512-bZg8+4bdmaOiApD7N7BPT9W8MLZG+nPTOFlLiJiT8uzKXFjhxw4v2ierCXOwB5sFDMtuA5G4kgYZ0AznZxQ/cw== + dependencies: + "@babel/runtime" "^7.23.2" + +i18next@^26.0.1: + version "26.0.1" + resolved "https://registry.npmjs.org/i18next/-/i18next-26.0.1.tgz" + integrity sha512-vtz5sXU4+nkCm8yEU+JJ6yYIx0mkg9e68W0G0PXpnOsmzLajNsW5o28DJMqbajxfsfq0gV3XdrBudsDQnwxfsQ== + dependencies: + "@babel/runtime" "^7.29.2" + iconv-lite@0.6: version "0.6.3" resolved "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz" @@ -3121,7 +3824,7 @@ ignore@^5.2.0: resolved "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz" integrity sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g== -ignore@^7.0.0: +ignore@^7.0.5: version "7.0.5" resolved "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz" integrity sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg== @@ -3136,15 +3839,15 @@ immer@^9.0.21: resolved "https://registry.npmjs.org/immer/-/immer-9.0.21.tgz" integrity sha512-bc4NBHqOqSfRW7POMkHd51LvClaeMXpm8dx0e8oE2GORbq5aRK7Bxl4FyzVLdGtLmvLKL7BTDBG5ACQm4HWjTA== -immutable@^5.0.2: - version "5.1.4" - resolved "https://registry.npmjs.org/immutable/-/immutable-5.1.4.tgz" - integrity sha512-p6u1bG3YSnINT5RQmx/yRZBpenIl30kVxkTLDyHLIMk0gict704Q9n+thfDI7lTRm9vXdDYutVzXhzcThxTnXA== +immutable@^5.1.5: + version "5.1.5" + resolved "https://registry.npmjs.org/immutable/-/immutable-5.1.5.tgz" + integrity sha512-t7xcm2siw+hlUM68I+UEOK+z84RzmN59as9DZ7P1l0994DKUWV7UXBMQZVxaoMSRQ+PBZbHCOoBt7a2wxOMt+A== import-fresh@^3.2.1: - version "3.3.0" - resolved "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz" - integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== + version "3.3.1" + resolved "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz" + integrity sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ== dependencies: parent-module "^1.0.0" resolve-from "^4.0.0" @@ -3154,6 +3857,11 @@ imurmurhash@^0.1.4: resolved "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz" integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== +indent-string@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz" + integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== + inflight@^1.0.4: version "1.0.6" resolved "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz" @@ -3172,6 +3880,11 @@ ini@~1.3.0: resolved "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz" integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== +inline-style-parser@0.2.7: + version "0.2.7" + resolved "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.2.7.tgz#b1fc68bfc0313b8685745e4464e37f9376b9c909" + integrity sha512-Nb2ctOyNR8DqQoR0OwRG95uNWIC0C1lCgf5Naz5H6Ji72KZ8OcFZLz2P5sNgwlyoJ8Yif11oMuYs5pBQa86csA== + internal-slot@^1.1.0: version "1.1.0" resolved "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz" @@ -3186,10 +3899,18 @@ internal-slot@^1.1.0: resolved "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz" integrity sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg== -internmap@^1.0.0: - version "1.0.1" - resolved "https://registry.npmjs.org/internmap/-/internmap-1.0.1.tgz" - integrity sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw== +is-alphabetical@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.1.tgz#01072053ea7c1036df3c7d19a6daaec7f19e789b" + integrity sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ== + +is-alphanumerical@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-2.0.1.tgz#7c03fbe96e3e931113e57f964b0a368cc2dfd875" + integrity sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw== + dependencies: + is-alphabetical "^2.0.0" + is-decimal "^2.0.0" is-array-buffer@^3.0.4, is-array-buffer@^3.0.5: version "3.0.5" @@ -3236,7 +3957,7 @@ is-callable@^1.2.7: resolved "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz" integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== -is-core-module@^2.13.0, is-core-module@^2.8.1: +is-core-module@^2.16.1: version "2.16.1" resolved "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz" integrity sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w== @@ -3260,6 +3981,11 @@ is-date-object@^1.0.5, is-date-object@^1.1.0: call-bound "^1.0.2" has-tostringtag "^1.0.2" +is-decimal@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/is-decimal/-/is-decimal-2.0.1.tgz#9469d2dc190d0214fd87d78b78caecc0cc14eef7" + integrity sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A== + is-extglob@^2.1.1: version "2.1.1" resolved "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz" @@ -3283,13 +4009,18 @@ is-generator-function@^1.0.10: has-tostringtag "^1.0.2" safe-regex-test "^1.1.0" -is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3: +is-glob@^4.0.0, is-glob@^4.0.3: version "4.0.3" resolved "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz" integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== dependencies: is-extglob "^2.1.1" +is-hexadecimal@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-2.0.1.tgz#86b5bf668fca307498d319dfc03289d781a90027" + integrity sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg== + is-map@^2.0.3: version "2.0.3" resolved "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz" @@ -3308,10 +4039,15 @@ is-number-object@^1.1.1: call-bound "^1.0.3" has-tostringtag "^1.0.2" -is-number@^7.0.0: - version "7.0.0" - resolved "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz" - integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== +is-plain-obj@^4.0.0: + version "4.1.0" + resolved "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz#d65025edec3657ce032fd7db63c97883eaed71f0" + integrity sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg== + +is-potential-custom-element-name@^1.0.1: + version "1.0.1" + resolved "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz" + integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ== is-regex@^1.2.1: version "1.2.1" @@ -3394,7 +4130,7 @@ isexe@^2.0.0: resolved "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz" integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== -iterator.prototype@^1.1.4: +iterator.prototype@^1.1.5: version "1.1.5" resolved "https://registry.npmjs.org/iterator.prototype/-/iterator.prototype-1.1.5.tgz" integrity sha512-H0dkQoCa3b2VEeKQBOxFph+JAbcrQdE7KC0UkqwpLmv2EC4P41QXP+rqo9wYodACiG5/WM5s9oDApTU8utwj9g== @@ -3411,13 +4147,45 @@ iterator.prototype@^1.1.4: resolved "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== -js-yaml@^4.1.0, js-yaml@^4.1.1: +js-yaml@^4.1.1: version "4.1.1" - resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz#854c292467705b699476e1a2decc0c8a3458806b" + resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz" integrity sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA== dependencies: argparse "^2.0.1" +jsdom@^29.0.1: + version "29.0.1" + resolved "https://registry.npmjs.org/jsdom/-/jsdom-29.0.1.tgz" + integrity sha512-z6JOK5gRO7aMybVq/y/MlIpKh8JIi68FBKMUtKkK2KH/wMSRlCxQ682d08LB9fYXplyY/UXG8P4XXTScmdjApg== + dependencies: + "@asamuzakjp/css-color" "^5.0.1" + "@asamuzakjp/dom-selector" "^7.0.3" + "@bramus/specificity" "^2.4.2" + "@csstools/css-syntax-patches-for-csstree" "^1.1.1" + "@exodus/bytes" "^1.15.0" + css-tree "^3.2.1" + data-urls "^7.0.0" + decimal.js "^10.6.0" + html-encoding-sniffer "^6.0.0" + is-potential-custom-element-name "^1.0.1" + lru-cache "^11.2.7" + parse5 "^8.0.0" + saxes "^6.0.0" + symbol-tree "^3.2.4" + tough-cookie "^6.0.1" + undici "^7.24.5" + w3c-xmlserializer "^5.0.0" + webidl-conversions "^8.0.1" + whatwg-mimetype "^5.0.0" + whatwg-url "^16.0.1" + xml-name-validator "^5.0.0" + +jsesc@^3.0.2: + version "3.1.0" + resolved "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz" + integrity sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA== + json-buffer@3.0.1: version "3.0.1" resolved "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz" @@ -3468,10 +4236,15 @@ jszip@^3.10.1: readable-stream "~2.3.6" setimmediate "^1.0.5" +jwt-decode@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/jwt-decode/-/jwt-decode-4.0.0.tgz" + integrity sha512-+KJGIyHgkGuIq3IEBNftfhW/LfWhXUIY6OmyVWjliu5KH1y0fw7VQ8YndE2O4qZdMSd9SqbnC8GOcZEy0Om7sA== + katex@^0.16.0, katex@^0.16.22: - version "0.16.25" - resolved "https://registry.npmjs.org/katex/-/katex-0.16.25.tgz" - integrity sha512-woHRUZ/iF23GBP1dkDQMh1QBad9dmr8/PAwNA54VrSOVYgI12MAcE14TqnDdQOdzyEonGzMepYnqBMYdsoAr8Q== + version "0.16.42" + resolved "https://registry.npmjs.org/katex/-/katex-0.16.42.tgz" + integrity sha512-sZ4jqyEXfHTLEFK+qsFYToa3UZ0rtFcPGwKpyiRYh2NJn8obPWOQ+/u7ux0F6CAU/y78+Mksh1YkxTPXTh47TQ== dependencies: commander "^8.3.0" @@ -3482,27 +4255,6 @@ keyv@^4.5.4: dependencies: json-buffer "3.0.1" -khroma@^2.1.0: - version "2.1.0" - resolved "https://registry.npmjs.org/khroma/-/khroma-2.1.0.tgz" - integrity sha512-Ls993zuzfayK269Svk9hzpeGUKob/sIgZzyHYdjQoAdQetRKpOLj+k/QQQ/6Qi0Yz65mlROrfd+Ev+1+7dz9Kw== - -kolorist@^1.8.0: - version "1.8.0" - resolved "https://registry.npmjs.org/kolorist/-/kolorist-1.8.0.tgz" - integrity sha512-Y+60/zizpJ3HRH8DCss+q95yr6145JXZo46OTpFvDZWLfRCE4qChOyk1b26nMaNpfHHgxagk9dXT5OP0Tfe+dQ== - -langium@3.3.1: - version "3.3.1" - resolved "https://registry.npmjs.org/langium/-/langium-3.3.1.tgz" - integrity sha512-QJv/h939gDpvT+9SiLVlY7tZC3xB2qK57v0J04Sh9wpMb6MP1q8gB21L3WIo8T5P1MSMg3Ep14L7KkDCFG3y4w== - dependencies: - chevrotain "~11.0.3" - chevrotain-allstar "~0.3.0" - vscode-languageserver "~9.0.1" - vscode-languageserver-textdocument "~1.0.11" - vscode-uri "~3.0.8" - language-subtag-registry@^0.3.20: version "0.3.23" resolved "https://registry.npmjs.org/language-subtag-registry/-/language-subtag-registry-0.3.23.tgz" @@ -3515,16 +4267,6 @@ language-tags@^1.0.9: dependencies: language-subtag-registry "^0.3.20" -layout-base@^1.0.0: - version "1.0.2" - resolved "https://registry.npmjs.org/layout-base/-/layout-base-1.0.2.tgz" - integrity sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg== - -layout-base@^2.0.0: - version "2.0.1" - resolved "https://registry.npmjs.org/layout-base/-/layout-base-2.0.1.tgz" - integrity sha512-dp3s92+uNI1hWIpPGH3jK2kxE2lMjdXdr+DH8ynZHpd6PUlH6x6cbuXnoMmiNumznqaNO31xu9e79F0uuZ0JFg== - lazystream@^1.0.0: version "1.0.1" resolved "https://registry.npmjs.org/lazystream/-/lazystream-1.0.1.tgz" @@ -3554,25 +4296,102 @@ lie@~3.3.0: dependencies: immediate "~3.0.5" +lightningcss-android-arm64@1.32.0: + version "1.32.0" + resolved "https://registry.npmjs.org/lightningcss-android-arm64/-/lightningcss-android-arm64-1.32.0.tgz#f033885116dfefd9c6f54787523e3514b61e1968" + integrity sha512-YK7/ClTt4kAK0vo6w3X+Pnm0D2cf2vPHbhOXdoNti1Ga0al1P4TBZhwjATvjNwLEBCnKvjJc2jQgHXH0NEwlAg== + +lightningcss-darwin-arm64@1.32.0: + version "1.32.0" + resolved "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.32.0.tgz" + integrity sha512-RzeG9Ju5bag2Bv1/lwlVJvBE3q6TtXskdZLLCyfg5pt+HLz9BqlICO7LZM7VHNTTn/5PRhHFBSjk5lc4cmscPQ== + +lightningcss-darwin-x64@1.32.0: + version "1.32.0" + resolved "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.32.0.tgz#35f3e97332d130b9ca181e11b568ded6aebc6d5e" + integrity sha512-U+QsBp2m/s2wqpUYT/6wnlagdZbtZdndSmut/NJqlCcMLTWp5muCrID+K5UJ6jqD2BFshejCYXniPDbNh73V8w== + +lightningcss-freebsd-x64@1.32.0: + version "1.32.0" + resolved "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.32.0.tgz#9777a76472b64ed6ff94342ad64c7bafd794a575" + integrity sha512-JCTigedEksZk3tHTTthnMdVfGf61Fky8Ji2E4YjUTEQX14xiy/lTzXnu1vwiZe3bYe0q+SpsSH/CTeDXK6WHig== + +lightningcss-linux-arm-gnueabihf@1.32.0: + version "1.32.0" + resolved "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.32.0.tgz#13ae652e1ab73b9135d7b7da172f666c410ad53d" + integrity sha512-x6rnnpRa2GL0zQOkt6rts3YDPzduLpWvwAF6EMhXFVZXD4tPrBkEFqzGowzCsIWsPjqSK+tyNEODUBXeeVHSkw== + +lightningcss-linux-arm64-gnu@1.32.0: + version "1.32.0" + resolved "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.32.0.tgz#417858795a94592f680123a1b1f9da8a0e1ef335" + integrity sha512-0nnMyoyOLRJXfbMOilaSRcLH3Jw5z9HDNGfT/gwCPgaDjnx0i8w7vBzFLFR1f6CMLKF8gVbebmkUN3fa/kQJpQ== + +lightningcss-linux-arm64-musl@1.32.0: + version "1.32.0" + resolved "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.32.0.tgz#6be36692e810b718040802fd809623cffe732133" + integrity sha512-UpQkoenr4UJEzgVIYpI80lDFvRmPVg6oqboNHfoH4CQIfNA+HOrZ7Mo7KZP02dC6LjghPQJeBsvXhJod/wnIBg== + +lightningcss-linux-x64-gnu@1.32.0: + version "1.32.0" + resolved "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.32.0.tgz#0b7803af4eb21cfd38dd39fe2abbb53c7dd091f6" + integrity sha512-V7Qr52IhZmdKPVr+Vtw8o+WLsQJYCTd8loIfpDaMRWGUZfBOYEJeyJIkqGIDMZPwPx24pUMfwSxxI8phr/MbOA== + +lightningcss-linux-x64-musl@1.32.0: + version "1.32.0" + resolved "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.32.0.tgz#88dc8ba865ddddb1ac5ef04b0f161804418c163b" + integrity sha512-bYcLp+Vb0awsiXg/80uCRezCYHNg1/l3mt0gzHnWV9XP1W5sKa5/TCdGWaR/zBM2PeF/HbsQv/j2URNOiVuxWg== + +lightningcss-win32-arm64-msvc@1.32.0: + version "1.32.0" + resolved "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.32.0.tgz#4f30ba3fa5e925f5b79f945e8cc0d176c3b1ab38" + integrity sha512-8SbC8BR40pS6baCM8sbtYDSwEVQd4JlFTOlaD3gWGHfThTcABnNDBda6eTZeqbofalIJhFx0qKzgHJmcPTnGdw== + +lightningcss-win32-x64-msvc@1.32.0: + version "1.32.0" + resolved "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.32.0.tgz#141aa5605645064928902bb4af045fa7d9f4220a" + integrity sha512-Amq9B/SoZYdDi1kFrojnoqPLxYhQ4Wo5XiL8EVJrVsB8ARoC1PWW6VGtT0WKCemjy8aC+louJnjS7U18x3b06Q== + +lightningcss@^1.32.0: + version "1.32.0" + resolved "https://registry.npmjs.org/lightningcss/-/lightningcss-1.32.0.tgz" + integrity sha512-NXYBzinNrblfraPGyrbPoD19C1h9lfI/1mzgWYvXUTe414Gz/X1FD2XBZSZM7rRTrMA8JL3OtAaGifrIKhQ5yQ== + dependencies: + detect-libc "^2.0.3" + optionalDependencies: + lightningcss-android-arm64 "1.32.0" + lightningcss-darwin-arm64 "1.32.0" + lightningcss-darwin-x64 "1.32.0" + lightningcss-freebsd-x64 "1.32.0" + lightningcss-linux-arm-gnueabihf "1.32.0" + lightningcss-linux-arm64-gnu "1.32.0" + lightningcss-linux-arm64-musl "1.32.0" + lightningcss-linux-x64-gnu "1.32.0" + lightningcss-linux-x64-musl "1.32.0" + lightningcss-win32-arm64-msvc "1.32.0" + lightningcss-win32-x64-msvc "1.32.0" + lines-and-columns@^1.1.6: version "1.2.4" resolved "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz" integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== +linkify-it@^5.0.0: + version "5.0.0" + resolved "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz" + integrity sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ== + dependencies: + uc.micro "^2.0.0" + +linkifyjs@^4.3.2: + version "4.3.2" + resolved "https://registry.npmjs.org/linkifyjs/-/linkifyjs-4.3.2.tgz" + integrity sha512-NT1CJtq3hHIreOianA8aSXn6Cw0JzYOuDQbOrSPe7gqFnCpKP++MQe3ODgO3oh2GJFORkAAdqredOa60z63GbA== + listenercount@~1.0.1: version "1.0.1" resolved "https://registry.npmjs.org/listenercount/-/listenercount-1.0.1.tgz" integrity sha512-3mk/Zag0+IJxeDrxSgaDPy4zZ3w05PRZeJNnlWhzFz5OkX49J4krc+A8X2d2M69vGMBEX0uyl8M+W+8gH+kBqQ== -local-pkg@^1.1.1: - version "1.1.2" - resolved "https://registry.npmjs.org/local-pkg/-/local-pkg-1.1.2.tgz" - integrity sha512-arhlxbFRmoQHl33a0Zkle/YWlmNwoyt6QNZEIJcqNbdrsix5Lvc4HyyI3EnwxTYlZYc32EbYrQ8SzEZ7dqgg9A== - dependencies: - mlly "^1.7.4" - pkg-types "^2.3.0" - quansync "^0.2.11" - localforage@^1.10.0: version "1.10.0" resolved "https://registry.npmjs.org/localforage/-/localforage-1.10.0.tgz" @@ -3587,17 +4406,12 @@ locate-path@^6.0.0: dependencies: p-locate "^5.0.0" -lodash-es@4.17.21, lodash-es@^4.17.21: - version "4.17.21" - resolved "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz" - integrity sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw== - lodash.clamp@^4.0.0: version "4.0.3" resolved "https://registry.npmjs.org/lodash.clamp/-/lodash.clamp-4.0.3.tgz" integrity sha512-HvzRFWjtcguTW7yd8NJBshuNaCa8aqNFtnswdT7f/cMd/1YKy5Zzoq4W/Oxvnx9l7aeY258uSdDfM793+eLsVg== -lodash.debounce@^4.0.0: +lodash.debounce@^4.0.0, lodash.debounce@^4.0.8: version "4.0.8" resolved "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz" integrity sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow== @@ -3677,6 +4491,11 @@ lodash@^4.17.21, lodash@^4.17.23: resolved "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz" integrity sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w== +longest-streak@^3.0.0: + version "3.1.0" + resolved "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz#62fa67cd958742a1574af9f39866364102d90cd4" + integrity sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g== + loose-envify@^1.1.0, loose-envify@^1.4.0: version "1.4.0" resolved "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz" @@ -3684,83 +4503,387 @@ loose-envify@^1.1.0, loose-envify@^1.4.0: dependencies: js-tokens "^3.0.0 || ^4.0.0" +lru-cache@^11.2.6, lru-cache@^11.2.7: + version "11.2.7" + resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.7.tgz" + integrity sha512-aY/R+aEsRelme17KGQa/1ZSIpLpNYYrhcrepKTZgE+W3WM16YMCaPwOHLHsmopZHELU0Ojin1lPVxKR0MihncA== + +lz-string@^1.5.0: + version "1.5.0" + resolved "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz" + integrity sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ== + +magic-string@^0.30.21: + version "0.30.21" + resolved "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz" + integrity sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ== + dependencies: + "@jridgewell/sourcemap-codec" "^1.5.5" + +markdown-it-task-lists@^2.1.1: + version "2.1.1" + resolved "https://registry.npmjs.org/markdown-it-task-lists/-/markdown-it-task-lists-2.1.1.tgz" + integrity sha512-TxFAc76Jnhb2OUu+n3yz9RMu4CwGfaT788br6HhEDlvWfdeJcLUsxk1Hgw2yJio0OXsxv7pyIPmvECY7bMbluA== + +markdown-it@^14.0.0, markdown-it@^14.1.0: + version "14.1.1" + resolved "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.1.tgz" + integrity sha512-BuU2qnTti9YKgK5N+IeMubp14ZUKUUw7yeJbkjtosvHiP0AZ5c8IAgEMk79D0eC8F23r4Ac/q8cAIFdm2FtyoA== + dependencies: + argparse "^2.0.1" + entities "^4.4.0" + linkify-it "^5.0.0" + mdurl "^2.0.0" + punycode.js "^2.3.1" + uc.micro "^2.1.0" + markdown-to-jsx@^7.4.0: version "7.7.17" resolved "https://registry.npmjs.org/markdown-to-jsx/-/markdown-to-jsx-7.7.17.tgz" integrity sha512-7mG/1feQ0TX5I7YyMZVDgCC/y2I3CiEhIRQIhyov9nGBP5eoVrOXXHuL5ZP8GRfxVZKRiXWJgwXkb9It+nQZfQ== -marked@^16.2.1: - version "16.4.1" - resolved "https://registry.npmjs.org/marked/-/marked-16.4.1.tgz" - integrity sha512-ntROs7RaN3EvWfy3EZi14H4YxmT6A5YvywfhO+0pm+cH/dnSQRmdAmoFIc3B9aiwTehyk7pESH4ofyBY+V5hZg== - math-intrinsics@^1.1.0: version "1.1.0" resolved "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz" integrity sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g== -merge2@^1.3.0: - version "1.4.1" - resolved "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz" - integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== - -mermaid@^11.6.0: - version "11.12.1" - resolved "https://registry.npmjs.org/mermaid/-/mermaid-11.12.1.tgz" - integrity sha512-UlIZrRariB11TY1RtTgUWp65tphtBv4CSq7vyS2ZZ2TgoMjs2nloq+wFqxiwcxlhHUvs7DPGgMjs2aeQxz5h9g== - dependencies: - "@braintree/sanitize-url" "^7.1.1" - "@iconify/utils" "^3.0.1" - "@mermaid-js/parser" "^0.6.3" - "@types/d3" "^7.4.3" - cytoscape "^3.29.3" - cytoscape-cose-bilkent "^4.1.0" - cytoscape-fcose "^2.2.0" - d3 "^7.9.0" - d3-sankey "^0.12.3" - dagre-d3-es "7.0.13" - dayjs "^1.11.18" - dompurify "^3.2.5" - katex "^0.16.22" - khroma "^2.1.0" - lodash-es "^4.17.21" - marked "^16.2.1" - roughjs "^4.6.6" - stylis "^4.3.6" - ts-dedent "^2.2.0" - uuid "^11.1.0" - -micromatch@^4.0.5, micromatch@^4.0.8: - version "4.0.8" - resolved "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz" - integrity sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA== +mdast-util-from-markdown@^2.0.0: + version "2.0.3" + resolved "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.3.tgz#c95822b91aab75f18a4cbe8b2f51b873ed2cf0c7" + integrity sha512-W4mAWTvSlKvf8L6J+VN9yLSqQ9AOAAvHuoDAmPkz4dHf553m5gVj2ejadHJhoJmcmxEnOv6Pa8XJhpxE93kb8Q== + dependencies: + "@types/mdast" "^4.0.0" + "@types/unist" "^3.0.0" + decode-named-character-reference "^1.0.0" + devlop "^1.0.0" + mdast-util-to-string "^4.0.0" + micromark "^4.0.0" + micromark-util-decode-numeric-character-reference "^2.0.0" + micromark-util-decode-string "^2.0.0" + micromark-util-normalize-identifier "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + unist-util-stringify-position "^4.0.0" + +mdast-util-mdx-expression@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/mdast-util-mdx-expression/-/mdast-util-mdx-expression-2.0.1.tgz#43f0abac9adc756e2086f63822a38c8d3c3a5096" + integrity sha512-J6f+9hUp+ldTZqKRSg7Vw5V6MqjATc+3E4gf3CFNcuZNWD8XdyI6zQ8GqH7f8169MM6P7hMBRDVGnn7oHB9kXQ== + dependencies: + "@types/estree-jsx" "^1.0.0" + "@types/hast" "^3.0.0" + "@types/mdast" "^4.0.0" + devlop "^1.0.0" + mdast-util-from-markdown "^2.0.0" + mdast-util-to-markdown "^2.0.0" + +mdast-util-mdx-jsx@^3.0.0: + version "3.2.0" + resolved "https://registry.npmjs.org/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-3.2.0.tgz#fd04c67a2a7499efb905a8a5c578dddc9fdada0d" + integrity sha512-lj/z8v0r6ZtsN/cGNNtemmmfoLAFZnjMbNyLzBafjzikOM+glrjNHPlf6lQDOTccj9n5b0PPihEBbhneMyGs1Q== + dependencies: + "@types/estree-jsx" "^1.0.0" + "@types/hast" "^3.0.0" + "@types/mdast" "^4.0.0" + "@types/unist" "^3.0.0" + ccount "^2.0.0" + devlop "^1.1.0" + mdast-util-from-markdown "^2.0.0" + mdast-util-to-markdown "^2.0.0" + parse-entities "^4.0.0" + stringify-entities "^4.0.0" + unist-util-stringify-position "^4.0.0" + vfile-message "^4.0.0" + +mdast-util-mdxjs-esm@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-2.0.1.tgz#019cfbe757ad62dd557db35a695e7314bcc9fa97" + integrity sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg== + dependencies: + "@types/estree-jsx" "^1.0.0" + "@types/hast" "^3.0.0" + "@types/mdast" "^4.0.0" + devlop "^1.0.0" + mdast-util-from-markdown "^2.0.0" + mdast-util-to-markdown "^2.0.0" + +mdast-util-phrasing@^4.0.0: + version "4.1.0" + resolved "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz#7cc0a8dec30eaf04b7b1a9661a92adb3382aa6e3" + integrity sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w== + dependencies: + "@types/mdast" "^4.0.0" + unist-util-is "^6.0.0" + +mdast-util-to-hast@^13.0.0: + version "13.2.1" + resolved "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.1.tgz#d7ff84ca499a57e2c060ae67548ad950e689a053" + integrity sha512-cctsq2wp5vTsLIcaymblUriiTcZd0CwWtCbLvrOzYCDZoWyMNV8sZ7krj09FSnsiJi3WVsHLM4k6Dq/yaPyCXA== + dependencies: + "@types/hast" "^3.0.0" + "@types/mdast" "^4.0.0" + "@ungap/structured-clone" "^1.0.0" + devlop "^1.0.0" + micromark-util-sanitize-uri "^2.0.0" + trim-lines "^3.0.0" + unist-util-position "^5.0.0" + unist-util-visit "^5.0.0" + vfile "^6.0.0" + +mdast-util-to-markdown@^2.0.0: + version "2.1.2" + resolved "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.2.tgz#f910ffe60897f04bb4b7e7ee434486f76288361b" + integrity sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA== + dependencies: + "@types/mdast" "^4.0.0" + "@types/unist" "^3.0.0" + longest-streak "^3.0.0" + mdast-util-phrasing "^4.0.0" + mdast-util-to-string "^4.0.0" + micromark-util-classify-character "^2.0.0" + micromark-util-decode-string "^2.0.0" + unist-util-visit "^5.0.0" + zwitch "^2.0.0" + +mdast-util-to-string@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz#7a5121475556a04e7eddeb67b264aae79d312814" + integrity sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg== + dependencies: + "@types/mdast" "^4.0.0" + +mdn-data@2.27.1: + version "2.27.1" + resolved "https://registry.npmjs.org/mdn-data/-/mdn-data-2.27.1.tgz" + integrity sha512-9Yubnt3e8A0OKwxYSXyhLymGW4sCufcLG6VdiDdUGVkPhpqLxlvP5vl1983gQjJl3tqbrM731mjaZaP68AgosQ== + +mdurl@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz" + integrity sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w== + +micromark-core-commonmark@^2.0.0: + version "2.0.3" + resolved "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.3.tgz#c691630e485021a68cf28dbc2b2ca27ebf678cd4" + integrity sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg== + dependencies: + decode-named-character-reference "^1.0.0" + devlop "^1.0.0" + micromark-factory-destination "^2.0.0" + micromark-factory-label "^2.0.0" + micromark-factory-space "^2.0.0" + micromark-factory-title "^2.0.0" + micromark-factory-whitespace "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-chunked "^2.0.0" + micromark-util-classify-character "^2.0.0" + micromark-util-html-tag-name "^2.0.0" + micromark-util-normalize-identifier "^2.0.0" + micromark-util-resolve-all "^2.0.0" + micromark-util-subtokenize "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-factory-destination@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.1.tgz#8fef8e0f7081f0474fbdd92deb50c990a0264639" + integrity sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA== + dependencies: + micromark-util-character "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-factory-label@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-2.0.1.tgz#5267efa97f1e5254efc7f20b459a38cb21058ba1" + integrity sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg== + dependencies: + devlop "^1.0.0" + micromark-util-character "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-factory-space@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz#36d0212e962b2b3121f8525fc7a3c7c029f334fc" + integrity sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg== + dependencies: + micromark-util-character "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-factory-title@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-2.0.1.tgz#237e4aa5d58a95863f01032d9ee9b090f1de6e94" + integrity sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw== + dependencies: + micromark-factory-space "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-factory-whitespace@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.1.tgz#06b26b2983c4d27bfcc657b33e25134d4868b0b1" + integrity sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ== + dependencies: + micromark-factory-space "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-util-character@^2.0.0: + version "2.1.1" + resolved "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz#2f987831a40d4c510ac261e89852c4e9703ccda6" + integrity sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q== + dependencies: + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-util-chunked@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-2.0.1.tgz#47fbcd93471a3fccab86cff03847fc3552db1051" + integrity sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA== + dependencies: + micromark-util-symbol "^2.0.0" + +micromark-util-classify-character@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.1.tgz#d399faf9c45ca14c8b4be98b1ea481bced87b629" + integrity sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q== + dependencies: + micromark-util-character "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-util-combine-extensions@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.1.tgz#2a0f490ab08bff5cc2fd5eec6dd0ca04f89b30a9" + integrity sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg== + dependencies: + micromark-util-chunked "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-util-decode-numeric-character-reference@^2.0.0: + version "2.0.2" + resolved "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.2.tgz#fcf15b660979388e6f118cdb6bf7d79d73d26fe5" + integrity sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw== + dependencies: + micromark-util-symbol "^2.0.0" + +micromark-util-decode-string@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.1.tgz#6cb99582e5d271e84efca8e61a807994d7161eb2" + integrity sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ== + dependencies: + decode-named-character-reference "^1.0.0" + micromark-util-character "^2.0.0" + micromark-util-decode-numeric-character-reference "^2.0.0" + micromark-util-symbol "^2.0.0" + +micromark-util-encode@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz#0d51d1c095551cfaac368326963cf55f15f540b8" + integrity sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw== + +micromark-util-html-tag-name@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.1.tgz#e40403096481986b41c106627f98f72d4d10b825" + integrity sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA== + +micromark-util-normalize-identifier@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.1.tgz#c30d77b2e832acf6526f8bf1aa47bc9c9438c16d" + integrity sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q== + dependencies: + micromark-util-symbol "^2.0.0" + +micromark-util-resolve-all@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.1.tgz#e1a2d62cdd237230a2ae11839027b19381e31e8b" + integrity sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg== + dependencies: + micromark-util-types "^2.0.0" + +micromark-util-sanitize-uri@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz#ab89789b818a58752b73d6b55238621b7faa8fd7" + integrity sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ== + dependencies: + micromark-util-character "^2.0.0" + micromark-util-encode "^2.0.0" + micromark-util-symbol "^2.0.0" + +micromark-util-subtokenize@^2.0.0: + version "2.1.0" + resolved "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.1.0.tgz#d8ade5ba0f3197a1cf6a2999fbbfe6357a1a19ee" + integrity sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA== dependencies: - braces "^3.0.3" - picomatch "^2.3.1" + devlop "^1.0.0" + micromark-util-chunked "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-util-symbol@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz#e5da494e8eb2b071a0d08fb34f6cefec6c0a19b8" + integrity sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q== + +micromark-util-types@^2.0.0: + version "2.0.2" + resolved "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz#f00225f5f5a0ebc3254f96c36b6605c4b393908e" + integrity sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA== + +micromark@^4.0.0: + version "4.0.2" + resolved "https://registry.npmjs.org/micromark/-/micromark-4.0.2.tgz#91395a3e1884a198e62116e33c9c568e39936fdb" + integrity sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA== + dependencies: + "@types/debug" "^4.0.0" + debug "^4.0.0" + decode-named-character-reference "^1.0.0" + devlop "^1.0.0" + micromark-core-commonmark "^2.0.0" + micromark-factory-space "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-chunked "^2.0.0" + micromark-util-combine-extensions "^2.0.0" + micromark-util-decode-numeric-character-reference "^2.0.0" + micromark-util-encode "^2.0.0" + micromark-util-normalize-identifier "^2.0.0" + micromark-util-resolve-all "^2.0.0" + micromark-util-sanitize-uri "^2.0.0" + micromark-util-subtokenize "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" mimic-response@^3.1.0: version "3.1.0" resolved "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz" integrity sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ== -minimatch@^3.1.1, minimatch@^3.1.2: - version "3.1.2" - resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz" - integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== +min-indent@^1.0.0: + version "1.0.1" + resolved "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz" + integrity sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg== + +minimatch@^10.2.2: + version "10.2.4" + resolved "https://registry.npmjs.org/minimatch/-/minimatch-10.2.4.tgz" + integrity sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg== dependencies: - brace-expansion "^1.1.7" + brace-expansion "^5.0.2" -minimatch@^5.1.0: - version "5.1.6" - resolved "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz" - integrity sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g== +minimatch@^3.1.1, minimatch@^3.1.2, minimatch@^3.1.5: + version "3.1.5" + resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz" + integrity sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w== dependencies: - brace-expansion "^2.0.1" + brace-expansion "^1.1.7" -minimatch@^9.0.4: - version "9.0.5" - resolved "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz" - integrity sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow== +minimatch@^5.1.0: + version "5.1.9" + resolved "https://registry.npmjs.org/minimatch/-/minimatch-5.1.9.tgz" + integrity sha512-7o1wEA2RyMP7Iu7GNba9vc0RWWGACJOCZBJX2GJWip0ikV+wcOsgVuY9uE8CPiyQhkGFSlhuSkZPavN7u1c2Fw== dependencies: brace-expansion "^2.0.1" @@ -3781,29 +4904,11 @@ mkdirp-classic@^0.5.2, mkdirp-classic@^0.5.3: dependencies: minimist "^1.2.6" -mlly@^1.7.4: - version "1.8.0" - resolved "https://registry.npmjs.org/mlly/-/mlly-1.8.0.tgz" - integrity sha512-l8D9ODSRWLe2KHJSifWGwBqpTZXIXTeo8mlKjY+E2HAakaTeNpqAyBZ8GSqLzHgw4XmHmC8whvpjJNMbFZN7/g== - dependencies: - acorn "^8.15.0" - pathe "^2.0.3" - pkg-types "^1.3.1" - ufo "^1.6.1" - ms@^2.1.3: version "2.1.3" resolved "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz" integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== -mui-markdown@^2.0.3: - version "2.0.3" - resolved "https://registry.npmjs.org/mui-markdown/-/mui-markdown-2.0.3.tgz" - integrity sha512-wrQjH/7EedktFJn1TPaxIPi5/PwvTwWq6fSuY/lL7RsaCuG1JoSSwe1a4Kzwfr+OMvqtKZLVxuNDacDWW4xF6w== - optionalDependencies: - mermaid "^11.6.0" - prism-react-renderer "^2.0.3" - nanoid@^3.3.11: version "3.3.11" resolved "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz" @@ -3820,9 +4925,9 @@ natural-compare@^1.4.0: integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== node-abi@^3.3.0: - version "3.87.0" - resolved "https://registry.npmjs.org/node-abi/-/node-abi-3.87.0.tgz" - integrity sha512-+CGM1L1CgmtheLcBuleyYOn7NWPVu0s0EJH2C4puxgEZb9h8QpR9G2dBfZJOAUhi7VQxuBPMd0hiISWcTyiYyQ== + version "3.89.0" + resolved "https://registry.npmjs.org/node-abi/-/node-abi-3.89.0.tgz" + integrity sha512-6u9UwL0HlAl21+agMN3YAMXcKByMqwGx+pq+P76vii5f7hTPtKDp08/H9py6DY+cfDw7kQNTGEj/rly3IgbNQA== dependencies: semver "^7.3.5" @@ -3831,6 +4936,16 @@ node-addon-api@^7.0.0: resolved "https://registry.npmjs.org/node-addon-api/-/node-addon-api-7.1.1.tgz" integrity sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ== +node-exports-info@^1.6.0: + version "1.6.0" + resolved "https://registry.npmjs.org/node-exports-info/-/node-exports-info-1.6.0.tgz" + integrity sha512-pyFS63ptit/P5WqUkt+UUfe+4oevH+bFeIiPPdfb0pFeYEu/1ELnJu5l+5EcTKYL5M7zaAa7S8ddywgXypqKCw== + dependencies: + array.prototype.flatmap "^1.3.3" + es-errors "^1.3.0" + object.entries "^1.1.9" + semver "^6.3.1" + normalize-path@^3.0.0: version "3.0.0" resolved "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz" @@ -3893,6 +5008,18 @@ object.values@^1.1.6, object.values@^1.2.1: define-properties "^1.2.1" es-object-atoms "^1.0.0" +obug@^2.1.1: + version "2.1.1" + resolved "https://registry.npmjs.org/obug/-/obug-2.1.1.tgz" + integrity sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ== + +oidc-client-ts@3.5.0: + version "3.5.0" + resolved "https://registry.npmjs.org/oidc-client-ts/-/oidc-client-ts-3.5.0.tgz" + integrity sha512-l2q8l9CTCTOlbX+AnK4p3M+4CEpKpyQhle6blQkdFhm0IsBqsxm15bYaSa11G7pWdsYr6epdsRZxJpCyCRbT8A== + dependencies: + jwt-decode "^4.0.0" + once@^1.3.0, once@^1.3.1, once@^1.4.0: version "1.4.0" resolved "https://registry.npmjs.org/once/-/once-1.4.0.tgz" @@ -3912,6 +5039,11 @@ optionator@^0.9.3: type-check "^0.4.0" word-wrap "^1.2.5" +orderedmap@^2.0.0: + version "2.1.1" + resolved "https://registry.npmjs.org/orderedmap/-/orderedmap-2.1.1.tgz" + integrity sha512-TvAWxi0nDe1j/rtMcWcIj94+Ffe6n7zhow33h40SKxmsmozs6dz/e+EajymfoFcHd7sxNn8yHM8839uixMOV6g== + own-keys@^1.0.1: version "1.0.1" resolved "https://registry.npmjs.org/own-keys/-/own-keys-1.0.1.tgz" @@ -3935,11 +5067,6 @@ p-locate@^5.0.0: dependencies: p-limit "^3.0.2" -package-manager-detector@^1.3.0: - version "1.5.0" - resolved "https://registry.npmjs.org/package-manager-detector/-/package-manager-detector-1.5.0.tgz" - integrity sha512-uBj69dVlYe/+wxj8JOpr97XfsxH/eumMt6HqjNTmJDf/6NO9s+0uxeOneIz3AsPt2m6y9PqzDzd3ATcU17MNfw== - pako@~1.0.2: version "1.0.11" resolved "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz" @@ -3952,6 +5079,19 @@ parent-module@^1.0.0: dependencies: callsites "^3.0.0" +parse-entities@^4.0.0: + version "4.0.2" + resolved "https://registry.npmjs.org/parse-entities/-/parse-entities-4.0.2.tgz#61d46f5ed28e4ee62e9ddc43d6b010188443f159" + integrity sha512-GG2AQYWoLgL877gQIKeRPGO1xF9+eG1ujIb5soS5gPvLQ1y2o8FL90w2QWNdf9I361Mpp7726c+lj3U0qK1uGw== + dependencies: + "@types/unist" "^2.0.0" + character-entities-legacy "^3.0.0" + character-reference-invalid "^2.0.0" + decode-named-character-reference "^1.0.0" + is-alphanumerical "^2.0.0" + is-decimal "^2.0.0" + is-hexadecimal "^2.0.0" + parse-json@^5.0.0: version "5.2.0" resolved "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz" @@ -3962,10 +5102,12 @@ parse-json@^5.0.0: json-parse-even-better-errors "^2.3.0" lines-and-columns "^1.1.6" -path-data-parser@0.1.0, path-data-parser@^0.1.0: - version "0.1.0" - resolved "https://registry.npmjs.org/path-data-parser/-/path-data-parser-0.1.0.tgz" - integrity sha512-NOnmBpt5Y2RWbuv0LMzsayp3lVylAHLPUTut412ZA3l+C4uw4ZVkQbjShYCQ8TCpUMdPapr4YjUqLYD6v68j+w== +parse5@^8.0.0: + version "8.0.0" + resolved "https://registry.npmjs.org/parse5/-/parse5-8.0.0.tgz" + integrity sha512-9m4m5GSgXjL4AjumKzq1Fgfp3Z8rsvjRNbnkVwfu2ImRqE5D0LnY2QfDen18FSY9C573YU5XxSapdHZTZ2WolA== + dependencies: + entities "^6.0.0" path-exists@^4.0.0: version "4.0.0" @@ -3992,61 +5134,30 @@ path-type@^4.0.0: resolved "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz" integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== -pathe@^2.0.1, pathe@^2.0.3: +pathe@^2.0.3: version "2.0.3" resolved "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz" integrity sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w== -picocolors@^1.1.1: +picocolors@1.1.1, picocolors@^1.1.1: version "1.1.1" resolved "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz" integrity sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA== -picomatch@^2.3.1: - version "2.3.1" - resolved "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz" - integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== - -pkg-types@^1.3.1: - version "1.3.1" - resolved "https://registry.npmjs.org/pkg-types/-/pkg-types-1.3.1.tgz" - integrity sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ== - dependencies: - confbox "^0.1.8" - mlly "^1.7.4" - pathe "^2.0.1" - -pkg-types@^2.3.0: - version "2.3.0" - resolved "https://registry.npmjs.org/pkg-types/-/pkg-types-2.3.0.tgz" - integrity sha512-SIqCzDRg0s9npO5XQ3tNZioRY1uK06lA41ynBC1YmFTmnY6FjUjVt6s4LoADmwoig1qqD0oK8h1p/8mlMx8Oig== - dependencies: - confbox "^0.2.2" - exsolve "^1.0.7" - pathe "^2.0.3" - -points-on-curve@0.2.0, points-on-curve@^0.2.0: - version "0.2.0" - resolved "https://registry.npmjs.org/points-on-curve/-/points-on-curve-0.2.0.tgz" - integrity sha512-0mYKnYYe9ZcqMCWhUjItv/oHjvgEsfKvnUTg8sAtnHr3GVy7rGkXCb6d5cSyqrWqL4k81b9CPg3urd+T7aop3A== - -points-on-path@^0.2.1: - version "0.2.1" - resolved "https://registry.npmjs.org/points-on-path/-/points-on-path-0.2.1.tgz" - integrity sha512-25ClnWWuw7JbWZcgqY/gJ4FQWadKxGWk+3kR/7kD0tCaDtPPMj7oHu2ToLaVhfpnHrZzYby2w6tUA0eOIuUg8g== - dependencies: - path-data-parser "0.1.0" - points-on-curve "0.2.0" +picomatch@^4.0.3: + version "4.0.4" + resolved "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz" + integrity sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A== possible-typed-array-names@^1.0.0: version "1.1.0" resolved "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz" integrity sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg== -postcss@^8.4.43: - version "8.5.6" - resolved "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz" - integrity sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg== +postcss@^8.4.43, postcss@^8.5.8: + version "8.5.8" + resolved "https://registry.npmjs.org/postcss/-/postcss-8.5.8.tgz" + integrity sha512-OW/rX8O/jXnm82Ey1k44pObPtdblfiuWnrd8X7GJ7emImCOstunGbXUpp7HdBrFQX6rJzn3sPT397Wp5aCwCHg== dependencies: nanoid "^3.3.11" picocolors "^1.1.1" @@ -4080,19 +5191,20 @@ prettier@^2.8.3: resolved "https://registry.npmjs.org/prettier/-/prettier-2.8.8.tgz" integrity sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q== +pretty-format@^27.0.2: + version "27.5.1" + resolved "https://registry.npmjs.org/pretty-format/-/pretty-format-27.5.1.tgz" + integrity sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ== + dependencies: + ansi-regex "^5.0.1" + ansi-styles "^5.0.0" + react-is "^17.0.1" + prism-react-renderer@^1.3.5: version "1.3.5" resolved "https://registry.npmjs.org/prism-react-renderer/-/prism-react-renderer-1.3.5.tgz" integrity sha512-IJ+MSwBWKG+SM3b2SUfdrhC+gu01QkV2KmRQgREThBfSQRoufqRfxfHUxpG1WcaFjP+kojcFyO9Qqtpgt3qLCg== -prism-react-renderer@^2.0.3: - version "2.4.1" - resolved "https://registry.npmjs.org/prism-react-renderer/-/prism-react-renderer-2.4.1.tgz" - integrity sha512-ey8Ls/+Di31eqzUxC46h8MksNuGx/n0AAC8uKpwFau4RPDYLuE3EXTp8N8G2vX2N7UC/+IXeNUnlWBGGcAG+Ig== - dependencies: - "@types/prismjs" "^1.26.0" - clsx "^2.0.0" - prismjs@^1.30.0: version "1.30.0" resolved "https://registry.npmjs.org/prismjs/-/prismjs-1.30.0.tgz" @@ -4112,29 +5224,183 @@ prop-types@^15.6.2, prop-types@^15.8.1: object-assign "^4.1.1" react-is "^16.13.1" +property-information@^7.0.0: + version "7.1.0" + resolved "https://registry.npmjs.org/property-information/-/property-information-7.1.0.tgz#b622e8646e02b580205415586b40804d3e8bfd5d" + integrity sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ== + +prosemirror-changeset@^2.3.0: + version "2.4.0" + resolved "https://registry.npmjs.org/prosemirror-changeset/-/prosemirror-changeset-2.4.0.tgz" + integrity sha512-LvqH2v7Q2SF6yxatuPP2e8vSUKS/L+xAU7dPDC4RMyHMhZoGDfBC74mYuyYF4gLqOEG758wajtyhNnsTkuhvng== + dependencies: + prosemirror-transform "^1.0.0" + +prosemirror-collab@^1.3.1: + version "1.3.1" + resolved "https://registry.npmjs.org/prosemirror-collab/-/prosemirror-collab-1.3.1.tgz" + integrity sha512-4SnynYR9TTYaQVXd/ieUvsVV4PDMBzrq2xPUWutHivDuOshZXqQ5rGbZM84HEaXKbLdItse7weMGOUdDVcLKEQ== + dependencies: + prosemirror-state "^1.0.0" + +prosemirror-commands@^1.0.0, prosemirror-commands@^1.6.2: + version "1.7.1" + resolved "https://registry.npmjs.org/prosemirror-commands/-/prosemirror-commands-1.7.1.tgz" + integrity sha512-rT7qZnQtx5c0/y/KlYaGvtG411S97UaL6gdp6RIZ23DLHanMYLyfGBV5DtSnZdthQql7W+lEVbpSfwtO8T+L2w== + dependencies: + prosemirror-model "^1.0.0" + prosemirror-state "^1.0.0" + prosemirror-transform "^1.10.2" + +prosemirror-dropcursor@^1.8.1: + version "1.8.2" + resolved "https://registry.npmjs.org/prosemirror-dropcursor/-/prosemirror-dropcursor-1.8.2.tgz" + integrity sha512-CCk6Gyx9+Tt2sbYk5NK0nB1ukHi2ryaRgadV/LvyNuO3ena1payM2z6Cg0vO1ebK8cxbzo41ku2DE5Axj1Zuiw== + dependencies: + prosemirror-state "^1.0.0" + prosemirror-transform "^1.1.0" + prosemirror-view "^1.1.0" + +prosemirror-gapcursor@^1.3.2: + version "1.4.1" + resolved "https://registry.npmjs.org/prosemirror-gapcursor/-/prosemirror-gapcursor-1.4.1.tgz" + integrity sha512-pMdYaEnjNMSwl11yjEGtgTmLkR08m/Vl+Jj443167p9eB3HVQKhYCc4gmHVDsLPODfZfjr/MmirsdyZziXbQKw== + dependencies: + prosemirror-keymap "^1.0.0" + prosemirror-model "^1.0.0" + prosemirror-state "^1.0.0" + prosemirror-view "^1.0.0" + +prosemirror-history@^1.0.0, prosemirror-history@^1.4.1: + version "1.5.0" + resolved "https://registry.npmjs.org/prosemirror-history/-/prosemirror-history-1.5.0.tgz" + integrity sha512-zlzTiH01eKA55UAf1MEjtssJeHnGxO0j4K4Dpx+gnmX9n+SHNlDqI2oO1Kv1iPN5B1dm5fsljCfqKF9nFL6HRg== + dependencies: + prosemirror-state "^1.2.2" + prosemirror-transform "^1.0.0" + prosemirror-view "^1.31.0" + rope-sequence "^1.3.0" + +prosemirror-inputrules@^1.4.0: + version "1.5.1" + resolved "https://registry.npmjs.org/prosemirror-inputrules/-/prosemirror-inputrules-1.5.1.tgz" + integrity sha512-7wj4uMjKaXWAQ1CDgxNzNtR9AlsuwzHfdFH1ygEHA2KHF2DOEaXl1CJfNPAKCg9qNEh4rum975QLaCiQPyY6Fw== + dependencies: + prosemirror-state "^1.0.0" + prosemirror-transform "^1.0.0" + +prosemirror-keymap@^1.0.0, prosemirror-keymap@^1.2.2, prosemirror-keymap@^1.2.3: + version "1.2.3" + resolved "https://registry.npmjs.org/prosemirror-keymap/-/prosemirror-keymap-1.2.3.tgz" + integrity sha512-4HucRlpiLd1IPQQXNqeo81BGtkY8Ai5smHhKW9jjPKRc2wQIxksg7Hl1tTI2IfT2B/LgX6bfYvXxEpJl7aKYKw== + dependencies: + prosemirror-state "^1.0.0" + w3c-keyname "^2.2.0" + +prosemirror-markdown@^1.11.1, prosemirror-markdown@^1.13.1: + version "1.13.4" + resolved "https://registry.npmjs.org/prosemirror-markdown/-/prosemirror-markdown-1.13.4.tgz" + integrity sha512-D98dm4cQ3Hs6EmjK500TdAOew4Z03EV71ajEFiWra3Upr7diytJsjF4mPV2dW+eK5uNectiRj0xFxYI9NLXDbw== + dependencies: + "@types/markdown-it" "^14.0.0" + markdown-it "^14.0.0" + prosemirror-model "^1.25.0" + +prosemirror-menu@^1.2.4: + version "1.3.0" + resolved "https://registry.npmjs.org/prosemirror-menu/-/prosemirror-menu-1.3.0.tgz" + integrity sha512-TImyPXCHPcDsSka2/lwJ6WjTASr4re/qWq1yoTTuLOqfXucwF6VcRa2LWCkM/EyTD1UO3CUwiH8qURJoWJRxwg== + dependencies: + crelt "^1.0.0" + prosemirror-commands "^1.0.0" + prosemirror-history "^1.0.0" + prosemirror-state "^1.0.0" + +prosemirror-model@^1.0.0, prosemirror-model@^1.20.0, prosemirror-model@^1.21.0, prosemirror-model@^1.24.1, prosemirror-model@^1.25.0, prosemirror-model@^1.25.4: + version "1.25.4" + resolved "https://registry.npmjs.org/prosemirror-model/-/prosemirror-model-1.25.4.tgz" + integrity sha512-PIM7E43PBxKce8OQeezAs9j4TP+5yDpZVbuurd1h5phUxEKIu+G2a+EUZzIC5nS1mJktDJWzbqS23n1tsAf5QA== + dependencies: + orderedmap "^2.0.0" + +prosemirror-schema-basic@^1.2.3: + version "1.2.4" + resolved "https://registry.npmjs.org/prosemirror-schema-basic/-/prosemirror-schema-basic-1.2.4.tgz" + integrity sha512-ELxP4TlX3yr2v5rM7Sb70SqStq5NvI15c0j9j/gjsrO5vaw+fnnpovCLEGIcpeGfifkuqJwl4fon6b+KdrODYQ== + dependencies: + prosemirror-model "^1.25.0" + +prosemirror-schema-list@^1.5.0: + version "1.5.1" + resolved "https://registry.npmjs.org/prosemirror-schema-list/-/prosemirror-schema-list-1.5.1.tgz" + integrity sha512-927lFx/uwyQaGwJxLWCZRkjXG0p48KpMj6ueoYiu4JX05GGuGcgzAy62dfiV8eFZftgyBUvLx76RsMe20fJl+Q== + dependencies: + prosemirror-model "^1.0.0" + prosemirror-state "^1.0.0" + prosemirror-transform "^1.7.3" + +prosemirror-state@^1.0.0, prosemirror-state@^1.2.2, prosemirror-state@^1.4.3, prosemirror-state@^1.4.4: + version "1.4.4" + resolved "https://registry.npmjs.org/prosemirror-state/-/prosemirror-state-1.4.4.tgz" + integrity sha512-6jiYHH2CIGbCfnxdHbXZ12gySFY/fz/ulZE333G6bPqIZ4F+TXo9ifiR86nAHpWnfoNjOb3o5ESi7J8Uz1jXHw== + dependencies: + prosemirror-model "^1.0.0" + prosemirror-transform "^1.0.0" + prosemirror-view "^1.27.0" + +prosemirror-tables@^1.6.4: + version "1.8.5" + resolved "https://registry.npmjs.org/prosemirror-tables/-/prosemirror-tables-1.8.5.tgz" + integrity sha512-V/0cDCsHKHe/tfWkeCmthNUcEp1IVO3p6vwN8XtwE9PZQLAZJigbw3QoraAdfJPir4NKJtNvOB8oYGKRl+t0Dw== + dependencies: + prosemirror-keymap "^1.2.3" + prosemirror-model "^1.25.4" + prosemirror-state "^1.4.4" + prosemirror-transform "^1.10.5" + prosemirror-view "^1.41.4" + +prosemirror-trailing-node@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/prosemirror-trailing-node/-/prosemirror-trailing-node-3.0.0.tgz" + integrity sha512-xiun5/3q0w5eRnGYfNlW1uU9W6x5MoFKWwq/0TIRgt09lv7Hcser2QYV8t4muXbEr+Fwo0geYn79Xs4GKywrRQ== + dependencies: + "@remirror/core-constants" "3.0.0" + escape-string-regexp "^4.0.0" + +prosemirror-transform@^1.0.0, prosemirror-transform@^1.1.0, prosemirror-transform@^1.10.2, prosemirror-transform@^1.10.5, prosemirror-transform@^1.7.3: + version "1.12.0" + resolved "https://registry.npmjs.org/prosemirror-transform/-/prosemirror-transform-1.12.0.tgz" + integrity sha512-GxboyN4AMIsoHNtz5uf2r2Ru551i5hWeCMD6E2Ib4Eogqoub0NflniaBPVQ4MrGE5yZ8JV9tUHg9qcZTTrcN4w== + dependencies: + prosemirror-model "^1.21.0" + +prosemirror-view@^1.0.0, prosemirror-view@^1.1.0, prosemirror-view@^1.27.0, prosemirror-view@^1.31.0, prosemirror-view@^1.38.1, prosemirror-view@^1.41.4: + version "1.41.8" + resolved "https://registry.npmjs.org/prosemirror-view/-/prosemirror-view-1.41.8.tgz" + integrity sha512-TnKDdohEatgyZNGCDWIdccOHXhYloJwbwU+phw/a23KBvJIR9lWQWW7WHHK3vBdOLDNuF7TaX98GObUZOWkOnA== + dependencies: + prosemirror-model "^1.20.0" + prosemirror-state "^1.0.0" + prosemirror-transform "^1.1.0" + pump@^3.0.0: - version "3.0.3" - resolved "https://registry.npmjs.org/pump/-/pump-3.0.3.tgz" - integrity sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA== + version "3.0.4" + resolved "https://registry.npmjs.org/pump/-/pump-3.0.4.tgz" + integrity sha512-VS7sjc6KR7e1ukRFhQSY5LM2uBWAUPiOPa/A3mkKmiMwSmRFUITt0xuj+/lesgnCv+dPIEYlkzrcyXgquIHMcA== dependencies: end-of-stream "^1.1.0" once "^1.3.1" -punycode@^2.1.0: +punycode.js@^2.3.1: + version "2.3.1" + resolved "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz" + integrity sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA== + +punycode@^2.1.0, punycode@^2.3.1: version "2.3.1" resolved "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz" integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg== -quansync@^0.2.11: - version "0.2.11" - resolved "https://registry.npmjs.org/quansync/-/quansync-0.2.11.tgz" - integrity sha512-AifT7QEbW9Nri4tAwR5M/uzpBuqfZf+zwaEM/QkzEjj7NBuFD2rBuy0K3dE+8wltbezDV7JMA0WfnCPYRSYbXA== - -queue-microtask@^1.2.2: - version "1.2.3" - resolved "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz" - integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== - rc@^1.2.7: version "1.2.8" resolved "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz" @@ -4181,20 +5447,39 @@ react-dom@^18.2.0: loose-envify "^1.1.0" scheduler "^0.23.2" +react-i18next@^16.5.4: + version "16.6.6" + resolved "https://registry.npmjs.org/react-i18next/-/react-i18next-16.6.6.tgz" + integrity sha512-ZgL2HUoW34UKUkOV7uSQFE1CDnRPD+tCR3ywSuWH7u2iapnz86U8Bi3Vrs620qNDzCf1F47NxglCEkchCTDOHw== + dependencies: + "@babel/runtime" "^7.29.2" + html-parse-stringify "^3.0.1" + use-sync-external-store "^1.6.0" + react-is@^16.13.1, react-is@^16.7.0: version "16.13.1" resolved "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz" integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== +react-is@^17.0.1: + version "17.0.2" + resolved "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz" + integrity sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w== + react-is@^18.0.0: version "18.3.1" resolved "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz" integrity sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg== -react-is@^19.1.1: - version "19.2.0" - resolved "https://registry.npmjs.org/react-is/-/react-is-19.2.0.tgz" - integrity sha512-x3Ax3kNSMIIkyVYhWPyO09bu0uttcAIoecO/um/rKGQ4EltYWVYtyiGkS/3xMynrbVQdS69Jhlv8FXUEZehlzA== +react-is@^19.2.3: + version "19.2.4" + resolved "https://registry.npmjs.org/react-is/-/react-is-19.2.4.tgz" + integrity sha512-W+EWGn2v0ApPKgKKCy/7s7WHXkboGcsrXE+2joLyVxkbyVQfO3MUEaUQDHoSmb8TFFrSKYa9mw64WZHNHSDzYA== + +react-is@^19.2.4: + version "19.2.5" + resolved "https://registry.npmjs.org/react-is/-/react-is-19.2.5.tgz#7e7b54143e9313fed787b23fd4295d5a23872ad9" + integrity sha512-Dn0t8IQhCmeIT3wu+Apm1/YVsJXsGWi6k4sPdnBIdqMVtHtv0IGi6dcpNpNkNac0zB2uUAqNX3MHzN8c+z2rwQ== react-katex@^3.1.0: version "3.1.0" @@ -4203,6 +5488,23 @@ react-katex@^3.1.0: dependencies: katex "^0.16.0" +react-markdown@^10.1.0: + version "10.1.0" + resolved "https://registry.npmjs.org/react-markdown/-/react-markdown-10.1.0.tgz#e22bc20faddbc07605c15284255653c0f3bad5ca" + integrity sha512-qKxVopLT/TyA6BX3Ue5NwabOsAzm0Q7kAPwq6L+wWDwisYs7R8vZ0nRXqq6rkueboxpkjvLGU9fWifiX/ZZFxQ== + dependencies: + "@types/hast" "^3.0.0" + "@types/mdast" "^4.0.0" + devlop "^1.0.0" + hast-util-to-jsx-runtime "^2.0.0" + html-url-attributes "^3.0.0" + mdast-util-to-hast "^13.0.0" + remark-parse "^11.0.0" + remark-rehype "^11.0.0" + unified "^11.0.0" + unist-util-visit "^5.0.0" + vfile "^6.0.0" + react-redux@^8.0.4: version "8.1.3" resolved "https://registry.npmjs.org/react-redux/-/react-redux-8.1.3.tgz" @@ -4216,19 +5518,19 @@ react-redux@^8.0.4: use-sync-external-store "^1.0.0" react-router-dom@^6.22.0: - version "6.30.1" - resolved "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.30.1.tgz" - integrity sha512-llKsgOkZdbPU1Eg3zK8lCn+sjD9wMRZZPuzmdWWX5SUs8OFkN5HnFVC0u5KMeMaC9aoancFI/KoLuKPqN+hxHw== + version "6.30.3" + resolved "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.30.3.tgz" + integrity sha512-pxPcv1AczD4vso7G4Z3TKcvlxK7g7TNt3/FNGMhfqyntocvYKj+GCatfigGDjbLozC4baguJ0ReCigoDJXb0ag== dependencies: - "@remix-run/router" "1.23.0" - react-router "6.30.1" + "@remix-run/router" "1.23.2" + react-router "6.30.3" -react-router@6.30.1: - version "6.30.1" - resolved "https://registry.npmjs.org/react-router/-/react-router-6.30.1.tgz" - integrity sha512-X1m21aEmxGXqENEPG3T6u0Th7g0aS4ZmoNynhbs+Cn+q+QGTLt+d5IQ2bHAXKzKcxGJjxACpVbnYQSCRcfxHlQ== +react-router@6.30.3: + version "6.30.3" + resolved "https://registry.npmjs.org/react-router/-/react-router-6.30.3.tgz" + integrity sha512-XRnlbKMTmktBkjCLE8/XcZFlnHvr2Ltdr1eJX4idL55/9BbORzyZEaIkBFDhFGCEWBBItsVrDxwx3gnisMitdw== dependencies: - "@remix-run/router" "1.23.0" + "@remix-run/router" "1.23.2" react-selectable-fast@^3.4.0: version "3.4.0" @@ -4261,9 +5563,9 @@ react-vega@^7.6.0: vega-embed "6.5.1" react-virtuoso@^4.3.10: - version "4.14.1" - resolved "https://registry.npmjs.org/react-virtuoso/-/react-virtuoso-4.14.1.tgz" - integrity sha512-NRUF1ak8lY+Tvc6WN9cce59gU+lilzVtOozP+pm9J7iHshLGGjsiAB4rB2qlBPHjFbcXOQpT+7womNHGDUql8w== + version "4.18.3" + resolved "https://registry.npmjs.org/react-virtuoso/-/react-virtuoso-4.18.3.tgz" + integrity sha512-fLz/peHAx4Eu0DLHurFEEI7Y6n5CqEoxBh04rgJM9yMuOJah2a9zWg/MUOmZLcp7zuWYorXq5+5bf3IRgkNvWg== react@^18.2.0: version "18.3.1" @@ -4306,6 +5608,14 @@ readdirp@^4.0.1: resolved "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz" integrity sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg== +redent@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz" + integrity sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg== + dependencies: + indent-string "^4.0.0" + strip-indent "^3.0.0" + redux-persist@^6.0.0: version "6.0.0" resolved "https://registry.npmjs.org/redux-persist/-/redux-persist-6.0.0.tgz" @@ -4349,39 +5659,68 @@ regexp.prototype.flags@^1.5.3, regexp.prototype.flags@^1.5.4: gopd "^1.2.0" set-function-name "^2.0.2" +remark-parse@^11.0.0: + version "11.0.0" + resolved "https://registry.npmjs.org/remark-parse/-/remark-parse-11.0.0.tgz#aa60743fcb37ebf6b069204eb4da304e40db45a1" + integrity sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA== + dependencies: + "@types/mdast" "^4.0.0" + mdast-util-from-markdown "^2.0.0" + micromark-util-types "^2.0.0" + unified "^11.0.0" + +remark-rehype@^11.0.0: + version "11.1.2" + resolved "https://registry.npmjs.org/remark-rehype/-/remark-rehype-11.1.2.tgz#2addaadda80ca9bd9aa0da763e74d16327683b37" + integrity sha512-Dh7l57ianaEoIpzbp0PC9UKAdCSVklD8E5Rpw7ETfbTl3FqcOOgq5q2LVDhgGCkaBv7p24JXikPdvhhmHvKMsw== + dependencies: + "@types/hast" "^3.0.0" + "@types/mdast" "^4.0.0" + mdast-util-to-hast "^13.0.0" + unified "^11.0.0" + vfile "^6.0.0" + +require-from-string@^2.0.2: + version "2.0.2" + resolved "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz" + integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== + reselect@^4.1.8: version "4.1.8" resolved "https://registry.npmjs.org/reselect/-/reselect-4.1.8.tgz" integrity sha512-ab9EmR80F/zQTMNeneUr4cv+jSwPJgIlvEmVwLerwrWVbpLlBuls9XHzIeTFy4cegU2NHBp3va0LKOzU5qFEYQ== +reselect@^5.1.1: + version "5.1.1" + resolved "https://registry.npmjs.org/reselect/-/reselect-5.1.1.tgz#c766b1eb5d558291e5e550298adb0becc24bb72e" + integrity sha512-K/BG6eIky/SBpzfHZv/dd+9JBFiS4SWV7FIujVyJRux6e45+73RaUHXLmIR1f7WOMaQ0U1km6qwklRQxpJJY0w== + resolve-from@^4.0.0: version "4.0.0" resolved "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz" integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== resolve@^1.19.0: - version "1.22.0" - resolved "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz" - integrity sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw== + version "1.22.11" + resolved "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz" + integrity sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ== dependencies: - is-core-module "^2.8.1" + is-core-module "^2.16.1" path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" resolve@^2.0.0-next.5: - version "2.0.0-next.5" - resolved "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.5.tgz" - integrity sha512-U7WjGVG9sH8tvjW5SmGbQuui75FiyjAX72HX15DwBBwF9dNiQZRQAg9nnPhYy+TUnE0+VcrttuvNI8oSxZcocA== + version "2.0.0-next.6" + resolved "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.6.tgz" + integrity sha512-3JmVl5hMGtJ3kMmB3zi3DL25KfkCEyy3Tw7Gmw7z5w8M9WlwoPFnIvwChzu1+cF3iaK3sp18hhPz8ANeimdJfA== dependencies: - is-core-module "^2.13.0" + es-errors "^1.3.0" + is-core-module "^2.16.1" + node-exports-info "^1.6.0" + object-keys "^1.1.1" path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" -reusify@^1.0.4: - version "1.1.0" - resolved "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz" - integrity sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw== - rimraf@2: version "2.7.1" resolved "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz" @@ -4389,66 +5728,78 @@ rimraf@2: dependencies: glob "^7.1.3" -robust-predicates@^3.0.0: - version "3.0.1" - resolved "https://registry.npmjs.org/robust-predicates/-/robust-predicates-3.0.1.tgz" - integrity sha512-ndEIpszUHiG4HtDsQLeIuMvRsDnn8c8rYStabochtUeCvfuvNptb5TUbVD68LRAILPX7p9nqQGh4xJgn3EHS/g== +robust-predicates@^3.0.2: + version "3.0.3" + resolved "https://registry.npmjs.org/robust-predicates/-/robust-predicates-3.0.3.tgz" + integrity sha512-NS3levdsRIUOmiJ8FZWCP7LG3QpJyrs/TE0Zpf1yvZu8cAJJ6QMW92H1c7kWpdIHo8RvmLxN/o2JXTKHp74lUA== + +rolldown@1.0.0-rc.11: + version "1.0.0-rc.11" + resolved "https://registry.npmjs.org/rolldown/-/rolldown-1.0.0-rc.11.tgz" + integrity sha512-NRjoKMusSjfRbSYiH3VSumlkgFe7kYAa3pzVOsVYVFY3zb5d7nS+a3KGQ7hJKXuYWbzJKPVQ9Wxq2UvyK+ENpw== + dependencies: + "@oxc-project/types" "=0.122.0" + "@rolldown/pluginutils" "1.0.0-rc.11" + optionalDependencies: + "@rolldown/binding-android-arm64" "1.0.0-rc.11" + "@rolldown/binding-darwin-arm64" "1.0.0-rc.11" + "@rolldown/binding-darwin-x64" "1.0.0-rc.11" + "@rolldown/binding-freebsd-x64" "1.0.0-rc.11" + "@rolldown/binding-linux-arm-gnueabihf" "1.0.0-rc.11" + "@rolldown/binding-linux-arm64-gnu" "1.0.0-rc.11" + "@rolldown/binding-linux-arm64-musl" "1.0.0-rc.11" + "@rolldown/binding-linux-ppc64-gnu" "1.0.0-rc.11" + "@rolldown/binding-linux-s390x-gnu" "1.0.0-rc.11" + "@rolldown/binding-linux-x64-gnu" "1.0.0-rc.11" + "@rolldown/binding-linux-x64-musl" "1.0.0-rc.11" + "@rolldown/binding-openharmony-arm64" "1.0.0-rc.11" + "@rolldown/binding-wasm32-wasi" "1.0.0-rc.11" + "@rolldown/binding-win32-arm64-msvc" "1.0.0-rc.11" + "@rolldown/binding-win32-x64-msvc" "1.0.0-rc.11" rollup@^4.20.0: - version "4.59.0" - resolved "https://registry.yarnpkg.com/rollup/-/rollup-4.59.0.tgz#cf74edac17c1486f562d728a4d923a694abdf06f" - integrity sha512-2oMpl67a3zCH9H79LeMcbDhXW/UmWG/y2zuqnF2jQq5uq9TbM9TVyXvA4+t+ne2IIkBdrLpAaRQAvo7YI/Yyeg== + version "4.60.0" + resolved "https://registry.npmjs.org/rollup/-/rollup-4.60.0.tgz" + integrity sha512-yqjxruMGBQJ2gG4HtjZtAfXArHomazDHoFwFFmZZl0r7Pdo7qCIXKqKHZc8yeoMgzJJ+pO6pEEHa+V7uzWlrAQ== dependencies: "@types/estree" "1.0.8" optionalDependencies: - "@rollup/rollup-android-arm-eabi" "4.59.0" - "@rollup/rollup-android-arm64" "4.59.0" - "@rollup/rollup-darwin-arm64" "4.59.0" - "@rollup/rollup-darwin-x64" "4.59.0" - "@rollup/rollup-freebsd-arm64" "4.59.0" - "@rollup/rollup-freebsd-x64" "4.59.0" - "@rollup/rollup-linux-arm-gnueabihf" "4.59.0" - "@rollup/rollup-linux-arm-musleabihf" "4.59.0" - "@rollup/rollup-linux-arm64-gnu" "4.59.0" - "@rollup/rollup-linux-arm64-musl" "4.59.0" - "@rollup/rollup-linux-loong64-gnu" "4.59.0" - "@rollup/rollup-linux-loong64-musl" "4.59.0" - "@rollup/rollup-linux-ppc64-gnu" "4.59.0" - "@rollup/rollup-linux-ppc64-musl" "4.59.0" - "@rollup/rollup-linux-riscv64-gnu" "4.59.0" - "@rollup/rollup-linux-riscv64-musl" "4.59.0" - "@rollup/rollup-linux-s390x-gnu" "4.59.0" - "@rollup/rollup-linux-x64-gnu" "4.59.0" - "@rollup/rollup-linux-x64-musl" "4.59.0" - "@rollup/rollup-openbsd-x64" "4.59.0" - "@rollup/rollup-openharmony-arm64" "4.59.0" - "@rollup/rollup-win32-arm64-msvc" "4.59.0" - "@rollup/rollup-win32-ia32-msvc" "4.59.0" - "@rollup/rollup-win32-x64-gnu" "4.59.0" - "@rollup/rollup-win32-x64-msvc" "4.59.0" + "@rollup/rollup-android-arm-eabi" "4.60.0" + "@rollup/rollup-android-arm64" "4.60.0" + "@rollup/rollup-darwin-arm64" "4.60.0" + "@rollup/rollup-darwin-x64" "4.60.0" + "@rollup/rollup-freebsd-arm64" "4.60.0" + "@rollup/rollup-freebsd-x64" "4.60.0" + "@rollup/rollup-linux-arm-gnueabihf" "4.60.0" + "@rollup/rollup-linux-arm-musleabihf" "4.60.0" + "@rollup/rollup-linux-arm64-gnu" "4.60.0" + "@rollup/rollup-linux-arm64-musl" "4.60.0" + "@rollup/rollup-linux-loong64-gnu" "4.60.0" + "@rollup/rollup-linux-loong64-musl" "4.60.0" + "@rollup/rollup-linux-ppc64-gnu" "4.60.0" + "@rollup/rollup-linux-ppc64-musl" "4.60.0" + "@rollup/rollup-linux-riscv64-gnu" "4.60.0" + "@rollup/rollup-linux-riscv64-musl" "4.60.0" + "@rollup/rollup-linux-s390x-gnu" "4.60.0" + "@rollup/rollup-linux-x64-gnu" "4.60.0" + "@rollup/rollup-linux-x64-musl" "4.60.0" + "@rollup/rollup-openbsd-x64" "4.60.0" + "@rollup/rollup-openharmony-arm64" "4.60.0" + "@rollup/rollup-win32-arm64-msvc" "4.60.0" + "@rollup/rollup-win32-ia32-msvc" "4.60.0" + "@rollup/rollup-win32-x64-gnu" "4.60.0" + "@rollup/rollup-win32-x64-msvc" "4.60.0" fsevents "~2.3.2" -roughjs@^4.6.6: - version "4.6.6" - resolved "https://registry.npmjs.org/roughjs/-/roughjs-4.6.6.tgz" - integrity sha512-ZUz/69+SYpFN/g/lUlo2FXcIjRkSu3nDarreVdGGndHEBJ6cXPdKguS8JGxwj5HA5xIbVKSmLgr5b3AWxtRfvQ== - dependencies: - hachure-fill "^0.5.2" - path-data-parser "^0.1.0" - points-on-curve "^0.2.0" - points-on-path "^0.2.1" - -run-parallel@^1.1.9: - version "1.2.0" - resolved "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz" - integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== - dependencies: - queue-microtask "^1.2.2" +rope-sequence@^1.3.0: + version "1.3.4" + resolved "https://registry.npmjs.org/rope-sequence/-/rope-sequence-1.3.4.tgz" + integrity sha512-UT5EDe2cu2E/6O4igUr5PSFs23nvvukicWHx6GnOPlHAiiYbzNuCRQCuiUdHJQcqKalLKlrYJnjY0ySGsXNQXQ== rw@1: version "1.3.3" resolved "https://registry.npmjs.org/rw/-/rw-1.3.3.tgz" - integrity sha1-P4Yt+pGrdmsUiF700BEkv9oHT7Q= + integrity sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ== rybitten@^0.22.0: version "0.22.0" @@ -4466,16 +5817,16 @@ safe-array-concat@^1.1.3: has-symbols "^1.1.0" isarray "^2.0.5" -safe-buffer@^5.0.1, safe-buffer@~5.1.0, safe-buffer@~5.1.1: - version "5.1.2" - resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" - integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== - -safe-buffer@~5.2.0: +safe-buffer@^5.0.1, safe-buffer@~5.2.0: version "5.2.1" resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== +safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.2" + resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + safe-push-apply@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/safe-push-apply/-/safe-push-apply-1.0.0.tgz" @@ -4499,12 +5850,12 @@ safe-regex-test@^1.0.3, safe-regex-test@^1.1.0: integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== sass@^1.77.6: - version "1.93.2" - resolved "https://registry.npmjs.org/sass/-/sass-1.93.2.tgz" - integrity sha512-t+YPtOQHpGW1QWsh1CHQ5cPIr9lbbGZLZnbihP/D/qZj/yuV68m8qarcV17nvkOX81BCrvzAlq2klCQFZghyTg== + version "1.98.0" + resolved "https://registry.npmjs.org/sass/-/sass-1.98.0.tgz" + integrity sha512-+4N/u9dZ4PrgzGgPlKnaaRQx64RO0JBKs9sDhQ2pLgN6JQZ25uPQZKQYaBJU48Kd5BxgXoJ4e09Dq7nMcOUW3A== dependencies: chokidar "^4.0.0" - immutable "^5.0.2" + immutable "^5.1.5" source-map-js ">=0.6.2 <2.0.0" optionalDependencies: "@parcel/watcher" "^2.4.1" @@ -4516,6 +5867,13 @@ saxes@^5.0.1: dependencies: xmlchars "^2.2.0" +saxes@^6.0.0: + version "6.0.0" + resolved "https://registry.npmjs.org/saxes/-/saxes-6.0.0.tgz" + integrity sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA== + dependencies: + xmlchars "^2.2.0" + scheduler@^0.23.2: version "0.23.2" resolved "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz" @@ -4528,20 +5886,20 @@ semver@^6.3.1: resolved "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz" integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== -semver@^7.1.3, semver@^7.3.5, semver@^7.6.0, semver@^7.6.3: - version "7.7.3" - resolved "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz" - integrity sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q== +semver@^7.1.3, semver@^7.3.5, semver@^7.6.3, semver@^7.7.3: + version "7.7.4" + resolved "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz" + integrity sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA== seroval-plugins@~1.5.0: - version "1.5.0" - resolved "https://registry.npmjs.org/seroval-plugins/-/seroval-plugins-1.5.0.tgz" - integrity sha512-EAHqADIQondwRZIdeW2I636zgsODzoBDwb3PT/+7TLDWyw1Dy/Xv7iGUIEXXav7usHDE9HVhOU61irI3EnyyHA== + version "1.5.1" + resolved "https://registry.npmjs.org/seroval-plugins/-/seroval-plugins-1.5.1.tgz" + integrity sha512-4FbuZ/TMl02sqv0RTFexu0SP6V+ywaIe5bAWCCEik0fk17BhALgwvUDVF7e3Uvf9pxmwCEJsRPmlkUE6HdzLAw== seroval@~1.5.0: - version "1.5.0" - resolved "https://registry.npmjs.org/seroval/-/seroval-1.5.0.tgz" - integrity sha512-OE4cvmJ1uSPrKorFIH9/w/Qwuvi/IMcGbv5RKgcJ/zjA/IohDLU6SVaxFN9FwajbP7nsX0dQqMDes1whk3y+yw== + version "1.5.1" + resolved "https://registry.npmjs.org/seroval/-/seroval-1.5.1.tgz" + integrity sha512-OwrZRZAfhHww0WEnKHDY8OM0U/Qs8OTfIDWhUD4BLpNJUfXK4cGmjiagGze086m+mhI+V2nD0gfbHEnJjb9STA== set-function-length@^1.2.2: version "1.2.2" @@ -4631,6 +5989,11 @@ side-channel@^1.1.0: side-channel-map "^1.0.1" side-channel-weakmap "^1.0.2" +siginfo@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz" + integrity sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g== + simple-concat@^1.0.0: version "1.0.1" resolved "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz" @@ -4646,9 +6009,9 @@ simple-get@^4.0.0: simple-concat "^1.0.0" solid-js@^1.9.5: - version "1.9.11" - resolved "https://registry.npmjs.org/solid-js/-/solid-js-1.9.11.tgz" - integrity sha512-WEJtcc5mkh/BnHA6Yrg4whlF8g6QwpmXXRg4P2ztPmcKeHHlH4+djYecBLhSpecZY2RRECXYUwIc/C2r3yzQ4Q== + version "1.9.12" + resolved "https://registry.npmjs.org/solid-js/-/solid-js-1.9.12.tgz" + integrity sha512-QzKaSJq2/iDrWR1As6MHZQ8fQkdOBf8GReYb7L5iKwMGceg7HxDcaOHk0at66tNgn9U2U7dXo8ZZpLIAmGMzgw== dependencies: csstype "^3.1.0" seroval "~1.5.0" @@ -4662,13 +6025,28 @@ solid-js@^1.9.5: source-map@^0.5.7: version "0.5.7" resolved "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz" - integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= + integrity sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ== + +space-separated-tokens@^2.0.0: + version "2.0.2" + resolved "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz#1ecd9d2350a3844572c3f4a312bceb018348859f" + integrity sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q== spectral.js@^2.0.2: version "2.0.2" resolved "https://registry.npmjs.org/spectral.js/-/spectral.js-2.0.2.tgz" integrity sha512-g7NA/GMc2C50ez/foALJW8DcwvwbMgW5WF0/1fmAib5AN8NkJwMVyWgkPeSGAm4D6XAFXdtz9KM4AreuV+hJsg== +stackback@0.0.2: + version "0.0.2" + resolved "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz" + integrity sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw== + +std-env@^4.0.0-rc.1: + version "4.0.0" + resolved "https://registry.npmjs.org/std-env/-/std-env-4.0.0.tgz" + integrity sha512-zUMPtQ/HBY3/50VbpkupYHbRroTRZJPRLvreamgErJVys0ceuzMkD44J/QjqhHjOzK42GQ3QZIeFG1OYfOtKqQ== + stop-iteration-iterator@^1.1.0: version "1.1.0" resolved "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.1.0.tgz" @@ -4768,12 +6146,27 @@ string_decoder@~1.1.1: dependencies: safe-buffer "~5.1.0" +stringify-entities@^4.0.0: + version "4.0.4" + resolved "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.4.tgz#b3b79ef5f277cc4ac73caeb0236c5ba939b3a4f3" + integrity sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg== + dependencies: + character-entities-html4 "^2.0.0" + character-entities-legacy "^3.0.0" + strip-ansi@^7.1.0: - version "7.1.2" - resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz" - integrity sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA== + version "7.2.0" + resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.2.0.tgz" + integrity sha512-yDPMNjp4WyfYBkHnjIRLfca1i6KMyGCtsVgoKe/z1+6vukgaENdgGBZt+ZmKPc4gavvEZ5OgHfHdrazhgNyG7w== + dependencies: + ansi-regex "^6.2.2" + +strip-indent@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz" + integrity sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ== dependencies: - ansi-regex "^6.0.1" + min-indent "^1.0.0" strip-json-comments@^3.1.1: version "3.1.1" @@ -4785,23 +6178,25 @@ strip-json-comments@~2.0.1: resolved "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz" integrity sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ== +style-to-js@^1.0.0: + version "1.1.21" + resolved "https://registry.npmjs.org/style-to-js/-/style-to-js-1.1.21.tgz#2908941187f857e79e28e9cd78008b9a0b3e0e8d" + integrity sha512-RjQetxJrrUJLQPHbLku6U/ocGtzyjbJMP9lCNK7Ag0CNh690nSH8woqWH9u16nMjYBAok+i7JO1NP2pOy8IsPQ== + dependencies: + style-to-object "1.0.14" + +style-to-object@1.0.14: + version "1.0.14" + resolved "https://registry.npmjs.org/style-to-object/-/style-to-object-1.0.14.tgz#1d22f0e7266bb8c6d8cae5caf4ec4f005e08f611" + integrity sha512-LIN7rULI0jBscWQYaSswptyderlarFkjQ+t79nzty8tcIAceVomEVlLzH5VP4Cmsv6MtKhs7qaAiwlcp+Mgaxw== + dependencies: + inline-style-parser "0.2.7" + stylis@4.2.0: version "4.2.0" resolved "https://registry.npmjs.org/stylis/-/stylis-4.2.0.tgz" integrity sha512-Orov6g6BB1sDfYgzWfTHDOxamtX1bE/zo104Dh9e6fqJ3PooipYyfJ0pUmrZO2wAvO8YbEyeFrkV91XTsGMSrw== -stylis@^4.3.6: - version "4.3.6" - resolved "https://registry.npmjs.org/stylis/-/stylis-4.3.6.tgz" - integrity sha512-yQ3rwFWRfwNUY7H5vpU0wfdkNSnvnJinhF9830Swlaxl03zsOjCfmX0ugac+3LtK0lYSgwL/KXc8oYL3mG4YFQ== - -supports-color@^5.3.0: - version "5.5.0" - resolved "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz" - integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== - dependencies: - has-flag "^3.0.0" - supports-color@^7.1.0: version "7.2.0" resolved "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz" @@ -4814,6 +6209,11 @@ supports-preserve-symlinks-flag@^1.0.0: resolved "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz" integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== +symbol-tree@^3.2.4: + version "3.2.4" + resolved "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz" + integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== + tar-fs@^2.0.0: version "2.1.4" resolved "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.4.tgz" @@ -4842,28 +6242,56 @@ text-segmentation@^1.0.3: dependencies: utrie "^1.0.2" -tinyexec@^1.0.1: - version "1.0.1" - resolved "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.1.tgz" - integrity sha512-5uC6DDlmeqiOwCPmK9jMSdOuZTh8bU39Ys6yidB+UTt5hfZUPGAypSgFRiEp+jbi9qH40BLDvy85jIU88wKSqw== +tinybench@^2.9.0: + version "2.9.0" + resolved "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz" + integrity sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg== + +tinyexec@^1.0.2: + version "1.0.4" + resolved "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.4.tgz" + integrity sha512-u9r3uZC0bdpGOXtlxUIdwf9pkmvhqJdrVCH9fapQtgy/OeTTMZ1nqH7agtvEfmGui6e1XxjcdrlxvxJvc3sMqw== + +tinyglobby@^0.2.15: + version "0.2.15" + resolved "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz" + integrity sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ== + dependencies: + fdir "^6.5.0" + picomatch "^4.0.3" + +tinyrainbow@^3.0.3: + version "3.1.0" + resolved "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-3.1.0.tgz" + integrity sha512-Bf+ILmBgretUrdJxzXM0SgXLZ3XfiaUuOj/IKQHuTXip+05Xn+uyEYdVg0kYDipTBcLrCVyUzAPz7QmArb0mmw== + +tiptap-markdown@^0.9.0: + version "0.9.0" + resolved "https://registry.npmjs.org/tiptap-markdown/-/tiptap-markdown-0.9.0.tgz" + integrity sha512-dKLQ9iiuGNgrlGVjrNauF/UBzWu4LYOx5pkD0jNkmQt/GOwfCJsBuzZTsf1jZ204ANHOm572mZ9PYvGh1S7tpQ== + dependencies: + "@types/markdown-it" "^13.0.7" + markdown-it "^14.1.0" + markdown-it-task-lists "^2.1.1" + prosemirror-markdown "^1.11.1" + +tldts-core@^7.0.27: + version "7.0.27" + resolved "https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.27.tgz" + integrity sha512-YQ7uPjgWUibIK6DW5lrKujGwUKhLevU4hcGbP5O6TcIUb+oTjJYJVWPS4nZsIHrEEEG6myk/oqAJUEQmpZrHsg== + +tldts@^7.0.5: + version "7.0.27" + resolved "https://registry.npmjs.org/tldts/-/tldts-7.0.27.tgz" + integrity sha512-I4FZcVFcqCRuT0ph6dCDpPuO4Xgzvh+spkcTr1gK7peIvxWauoloVO0vuy1FQnijT63ss6AsHB6+OIM4aXHbPg== + dependencies: + tldts-core "^7.0.27" tmp@^0.2.0: version "0.2.5" resolved "https://registry.npmjs.org/tmp/-/tmp-0.2.5.tgz" integrity sha512-voyz6MApa1rQGUxT3E+BK7/ROe8itEx7vD8/HEvt4xwXucvQ5G5oeEiHkmHZJuBO21RpOf+YYm9MOivj709jow== -to-fast-properties@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz" - integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== - -to-regex-range@^5.0.1: - version "5.0.1" - resolved "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz" - integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== - dependencies: - is-number "^7.0.0" - topojson-client@^3.1.0: version "3.1.0" resolved "https://registry.npmjs.org/topojson-client/-/topojson-client-3.1.0.tgz" @@ -4871,27 +6299,46 @@ topojson-client@^3.1.0: dependencies: commander "2" +tough-cookie@^6.0.1: + version "6.0.1" + resolved "https://registry.npmjs.org/tough-cookie/-/tough-cookie-6.0.1.tgz" + integrity sha512-LktZQb3IeoUWB9lqR5EWTHgW/VTITCXg4D21M+lvybRVdylLrRMnqaIONLVb5mav8vM19m44HIcGq4qASeu2Qw== + dependencies: + tldts "^7.0.5" + +tr46@^6.0.0: + version "6.0.0" + resolved "https://registry.npmjs.org/tr46/-/tr46-6.0.0.tgz" + integrity sha512-bLVMLPtstlZ4iMQHpFHTR7GAGj2jxi8Dg0s2h2MafAE4uSWF98FC/3MomU51iQAMf8/qDUbKWf5GxuvvVcXEhw== + dependencies: + punycode "^2.3.1" + "traverse@>=0.3.0 <0.4": version "0.3.9" resolved "https://registry.npmjs.org/traverse/-/traverse-0.3.9.tgz" integrity sha512-iawgk0hLP3SxGKDfnDJf8wTz4p2qImnyihM5Hh/sGvQ3K37dPi/w8sRhdNIxYA1TwFwc5mDhIJq+O0RsvXBKdQ== -ts-api-utils@^2.1.0: - version "2.1.0" - resolved "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz" - integrity sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ== +trim-lines@^3.0.0: + version "3.0.1" + resolved "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz#d802e332a07df861c48802c04321017b1bd87338" + integrity sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg== -ts-dedent@^2.2.0: +trough@^2.0.0: version "2.2.0" - resolved "https://registry.npmjs.org/ts-dedent/-/ts-dedent-2.2.0.tgz" - integrity sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ== + resolved "https://registry.npmjs.org/trough/-/trough-2.2.0.tgz#94a60bd6bd375c152c1df911a4b11d5b0256f50f" + integrity sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw== + +ts-api-utils@^2.4.0: + version "2.5.0" + resolved "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.5.0.tgz" + integrity sha512-OJ/ibxhPlqrMM0UiNHJ/0CKQkoKF243/AEmplt3qpRgkW8VG7IfOS41h7V8TjITqdByHzrjcS/2si+y4lIh8NA== tslib@2.3.0: version "2.3.0" resolved "https://registry.npmjs.org/tslib/-/tslib-2.3.0.tgz" integrity sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg== -tslib@^2.8.1, tslib@~2.8.1: +tslib@^2.4.0, tslib@^2.8.1, tslib@~2.8.1: version "2.8.1" resolved "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz" integrity sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w== @@ -4956,24 +6403,24 @@ typed-array-length@^1.0.7: reflect.getprototypeof "^1.0.6" typescript-eslint@^8.16.0: - version "8.46.2" - resolved "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.46.2.tgz" - integrity sha512-vbw8bOmiuYNdzzV3lsiWv6sRwjyuKJMQqWulBOU7M0RrxedXledX8G8kBbQeiOYDnTfiXz0Y4081E1QMNB6iQg== + version "8.57.2" + resolved "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.57.2.tgz" + integrity sha512-VEPQ0iPgWO/sBaZOU1xo4nuNdODVOajPnTIbog2GKYr31nIlZ0fWPoCQgGfF3ETyBl1vn63F/p50Um9Z4J8O8A== dependencies: - "@typescript-eslint/eslint-plugin" "8.46.2" - "@typescript-eslint/parser" "8.46.2" - "@typescript-eslint/typescript-estree" "8.46.2" - "@typescript-eslint/utils" "8.46.2" + "@typescript-eslint/eslint-plugin" "8.57.2" + "@typescript-eslint/parser" "8.57.2" + "@typescript-eslint/typescript-estree" "8.57.2" + "@typescript-eslint/utils" "8.57.2" typescript@^4.9.5: version "4.9.5" resolved "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz" integrity sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g== -ufo@^1.6.1: - version "1.6.1" - resolved "https://registry.npmjs.org/ufo/-/ufo-1.6.1.tgz" - integrity sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA== +uc.micro@^2.0.0, uc.micro@^2.1.0: + version "2.1.0" + resolved "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz" + integrity sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A== unbox-primitive@^1.1.0: version "1.1.0" @@ -4990,6 +6437,62 @@ undici-types@~6.21.0: resolved "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz" integrity sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ== +undici@^7.24.5: + version "7.24.5" + resolved "https://registry.npmjs.org/undici/-/undici-7.24.5.tgz" + integrity sha512-3IWdCpjgxp15CbJnsi/Y9TCDE7HWVN19j1hmzVhoAkY/+CJx449tVxT5wZc1Gwg8J+P0LWvzlBzxYRnHJ+1i7Q== + +unified@^11.0.0: + version "11.0.5" + resolved "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz#f66677610a5c0a9ee90cab2b8d4d66037026d9e1" + integrity sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA== + dependencies: + "@types/unist" "^3.0.0" + bail "^2.0.0" + devlop "^1.0.0" + extend "^3.0.0" + is-plain-obj "^4.0.0" + trough "^2.0.0" + vfile "^6.0.0" + +unist-util-is@^6.0.0: + version "6.0.1" + resolved "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.1.tgz#d0a3f86f2dd0db7acd7d8c2478080b5c67f9c6a9" + integrity sha512-LsiILbtBETkDz8I9p1dQ0uyRUWuaQzd/cuEeS1hoRSyW5E5XGmTzlwY1OrNzzakGowI9Dr/I8HVaw4hTtnxy8g== + dependencies: + "@types/unist" "^3.0.0" + +unist-util-position@^5.0.0: + version "5.0.0" + resolved "https://registry.npmjs.org/unist-util-position/-/unist-util-position-5.0.0.tgz#678f20ab5ca1207a97d7ea8a388373c9cf896be4" + integrity sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA== + dependencies: + "@types/unist" "^3.0.0" + +unist-util-stringify-position@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz#449c6e21a880e0855bf5aabadeb3a740314abac2" + integrity sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ== + dependencies: + "@types/unist" "^3.0.0" + +unist-util-visit-parents@^6.0.0: + version "6.0.2" + resolved "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.2.tgz#777df7fb98652ce16b4b7cd999d0a1a40efa3a02" + integrity sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ== + dependencies: + "@types/unist" "^3.0.0" + unist-util-is "^6.0.0" + +unist-util-visit@^5.0.0: + version "5.1.0" + resolved "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.1.0.tgz#9a2a28b0aa76a15e0da70a08a5863a2f060e2468" + integrity sha512-m+vIdyeCOpdr/QeQCu2EzxX/ohgS8KbnPDgFni4dQsfSCtpz8UqDyY5GjRru8PDKuYn7Fq19j1CQ+nJSsGKOzg== + dependencies: + "@types/unist" "^3.0.0" + unist-util-is "^6.0.0" + unist-util-visit-parents "^6.0.0" + unzipper@^0.10.11: version "0.10.14" resolved "https://registry.npmjs.org/unzipper/-/unzipper-0.10.14.tgz" @@ -5013,18 +6516,18 @@ uri-js@^4.2.2: dependencies: punycode "^2.1.0" -use-resize-observer@^9.0.0: - version "9.1.0" - resolved "https://registry.npmjs.org/use-resize-observer/-/use-resize-observer-9.1.0.tgz" - integrity sha512-R25VqO9Wb3asSD4eqtcxk8sJalvIOYBqS8MNZlpDSQ4l4xMQxC/J7Id9HoTqPq8FwULIn0PVW+OAqF2dyYbjow== - dependencies: - "@juggle/resize-observer" "^3.3.1" - -use-sync-external-store@^1.0.0: +use-sync-external-store@^1.0.0, use-sync-external-store@^1.4.0, use-sync-external-store@^1.6.0: version "1.6.0" resolved "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.6.0.tgz" integrity sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w== +usehooks-ts@^3.1.1: + version "3.1.1" + resolved "https://registry.npmjs.org/usehooks-ts/-/usehooks-ts-3.1.1.tgz" + integrity sha512-I4diPp9Cq6ieSUH2wu+fDAVQO43xwtulo+fKEidHUwZPnYImbtkTjzIJYcDcJqxgmX31GVqNFURodvcgHcW0pA== + dependencies: + lodash.debounce "^4.0.8" + util-deprecate@^1.0.1, util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" @@ -5037,20 +6540,15 @@ utrie@^1.0.2: dependencies: base64-arraybuffer "^1.0.2" -uuid@^11.1.0: - version "11.1.0" - resolved "https://registry.npmjs.org/uuid/-/uuid-11.1.0.tgz" - integrity sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A== - uuid@^8.3.0: version "8.3.2" resolved "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz" integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== validator@^13.15.20: - version "13.15.20" - resolved "https://registry.npmjs.org/validator/-/validator-13.15.20.tgz" - integrity sha512-KxPOq3V2LmfQPP4eqf3Mq/zrT0Dqp2Vmx2Bn285LwVahLc+CsxOM0crBHczm8ijlcjZ0Q5Xd6LW3z3odTPnlrw== + version "13.15.26" + resolved "https://registry.npmjs.org/validator/-/validator-13.15.26.tgz" + integrity sha512-spH26xU080ydGggxRyR1Yhcbgx+j3y5jbNXk/8L+iRvdIEQ4uTRH2Sgf2dokud6Q4oAtsbNvJ1Ft+9xmm6IZcA== vega-canvas@^2.0.0: version "2.0.0" @@ -5449,6 +6947,22 @@ vega@^6.2.0: vega-voronoi "~5.1.0" vega-wordcloud "~5.1.0" +vfile-message@^4.0.0: + version "4.0.3" + resolved "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz#87b44dddd7b70f0641c2e3ed0864ba73e2ea8df4" + integrity sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw== + dependencies: + "@types/unist" "^3.0.0" + unist-util-stringify-position "^4.0.0" + +vfile@^6.0.0: + version "6.0.3" + resolved "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz#3652ab1c496531852bf55a6bac57af981ebc38ab" + integrity sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q== + dependencies: + "@types/unist" "^3.0.0" + vfile-message "^4.0.0" + vite@^5.4.21: version "5.4.21" resolved "https://registry.npmjs.org/vite/-/vite-5.4.21.tgz" @@ -5460,45 +6974,85 @@ vite@^5.4.21: optionalDependencies: fsevents "~2.3.3" +"vite@^6.0.0 || ^7.0.0 || ^8.0.0": + version "8.0.2" + resolved "https://registry.npmjs.org/vite/-/vite-8.0.2.tgz" + integrity sha512-1gFhNi+bHhRE/qKZOJXACm6tX4bA3Isy9KuKF15AgSRuRazNBOJfdDemPBU16/mpMxApDPrWvZ08DcLPEoRnuA== + dependencies: + lightningcss "^1.32.0" + picomatch "^4.0.3" + postcss "^8.5.8" + rolldown "1.0.0-rc.11" + tinyglobby "^0.2.15" + optionalDependencies: + fsevents "~2.3.3" + +vitest@^4.1.0: + version "4.1.1" + resolved "https://registry.npmjs.org/vitest/-/vitest-4.1.1.tgz" + integrity sha512-yF+o4POL41rpAzj5KVILUxm1GCjKnELvaqmU9TLLUbMfDzuN0UpUR9uaDs+mCtjPe+uYPksXDRLQGGPvj1cTmA== + dependencies: + "@vitest/expect" "4.1.1" + "@vitest/mocker" "4.1.1" + "@vitest/pretty-format" "4.1.1" + "@vitest/runner" "4.1.1" + "@vitest/snapshot" "4.1.1" + "@vitest/spy" "4.1.1" + "@vitest/utils" "4.1.1" + es-module-lexer "^2.0.0" + expect-type "^1.3.0" + magic-string "^0.30.21" + obug "^2.1.1" + pathe "^2.0.3" + picomatch "^4.0.3" + std-env "^4.0.0-rc.1" + tinybench "^2.9.0" + tinyexec "^1.0.2" + tinyglobby "^0.2.15" + tinyrainbow "^3.0.3" + vite "^6.0.0 || ^7.0.0 || ^8.0.0" + why-is-node-running "^2.3.0" + vm-browserify@^1.1.2: version "1.1.2" resolved "https://registry.npmjs.org/vm-browserify/-/vm-browserify-1.1.2.tgz" integrity sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ== -vscode-jsonrpc@8.2.0: - version "8.2.0" - resolved "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.2.0.tgz" - integrity sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA== +void-elements@3.1.0: + version "3.1.0" + resolved "https://registry.npmjs.org/void-elements/-/void-elements-3.1.0.tgz" + integrity sha512-Dhxzh5HZuiHQhbvTW9AMetFfBHDMYpo23Uo9btPXgdYP+3T5S+p+jgNy7spra+veYhBP2dCSgxR/i2Y02h5/6w== + +w3c-keyname@^2.2.0: + version "2.2.8" + resolved "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz" + integrity sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ== -vscode-languageserver-protocol@3.17.5: - version "3.17.5" - resolved "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.5.tgz" - integrity sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg== +w3c-xmlserializer@^5.0.0: + version "5.0.0" + resolved "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-5.0.0.tgz" + integrity sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA== dependencies: - vscode-jsonrpc "8.2.0" - vscode-languageserver-types "3.17.5" + xml-name-validator "^5.0.0" -vscode-languageserver-textdocument@~1.0.11: - version "1.0.12" - resolved "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.12.tgz" - integrity sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA== +webidl-conversions@^8.0.1: + version "8.0.1" + resolved "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-8.0.1.tgz" + integrity sha512-BMhLD/Sw+GbJC21C/UgyaZX41nPt8bUTg+jWyDeg7e7YN4xOM05YPSIXceACnXVtqyEw/LMClUQMtMZ+PGGpqQ== -vscode-languageserver-types@3.17.5: - version "3.17.5" - resolved "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz" - integrity sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg== +whatwg-mimetype@^5.0.0: + version "5.0.0" + resolved "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-5.0.0.tgz" + integrity sha512-sXcNcHOC51uPGF0P/D4NVtrkjSU2fNsm9iog4ZvZJsL3rjoDAzXZhkm2MWt1y+PUdggKAYVoMAIYcs78wJ51Cw== -vscode-languageserver@~9.0.1: - version "9.0.1" - resolved "https://registry.npmjs.org/vscode-languageserver/-/vscode-languageserver-9.0.1.tgz" - integrity sha512-woByF3PDpkHFUreUa7Hos7+pUWdeWMXRd26+ZX2A8cFx6v/JPTtd4/uN0/jB6XQHYaOlHbio03NTHCqrgG5n7g== +whatwg-url@^16.0.0, whatwg-url@^16.0.1: + version "16.0.1" + resolved "https://registry.npmjs.org/whatwg-url/-/whatwg-url-16.0.1.tgz" + integrity sha512-1to4zXBxmXHV3IiSSEInrreIlu02vUOvrhxJJH5vcxYTBDAx51cqZiKdyTxlecdKNSjj8EcxGBxNf6Vg+945gw== dependencies: - vscode-languageserver-protocol "3.17.5" - -vscode-uri@~3.0.8: - version "3.0.8" - resolved "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.0.8.tgz" - integrity sha512-AyFQ0EVmsOZOlAnxoFOGOq1SQDWAB7C6aqMGS23svWAllfOaxbuFvcT8D1i8z3Gyn8fraVeZNNmN6e9bxxXkKw== + "@exodus/bytes" "^1.11.0" + tr46 "^6.0.0" + webidl-conversions "^8.0.1" which-boxed-primitive@^1.1.0, which-boxed-primitive@^1.1.1: version "1.1.1" @@ -5541,9 +7095,9 @@ which-collection@^1.0.2: is-weakset "^2.0.3" which-typed-array@^1.1.16, which-typed-array@^1.1.19: - version "1.1.19" - resolved "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz" - integrity sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw== + version "1.1.20" + resolved "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.20.tgz" + integrity sha512-LYfpUkmqwl0h9A2HL09Mms427Q1RZWuOHsukfVcKRq9q95iQxdw0ix1JQrqbcDR9PH1QDwf5Qo8OZb5lksZ8Xg== dependencies: available-typed-arrays "^1.0.7" call-bind "^1.0.8" @@ -5560,6 +7114,14 @@ which@^2.0.1: dependencies: isexe "^2.0.0" +why-is-node-running@^2.3.0: + version "2.3.0" + resolved "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz" + integrity sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w== + dependencies: + siginfo "^2.0.0" + stackback "0.0.2" + word-wrap@^1.2.5: version "1.2.5" resolved "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz" @@ -5579,6 +7141,11 @@ wrappy@1: resolved "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== +xml-name-validator@^5.0.0: + version "5.0.0" + resolved "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-5.0.0.tgz" + integrity sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg== + xmlchars@^2.2.0: version "2.2.0" resolved "https://registry.npmjs.org/xmlchars/-/xmlchars-2.2.0.tgz" @@ -5590,9 +7157,9 @@ y18n@^5.0.5: integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== yaml@^1.10.0: - version "1.10.2" - resolved "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz" - integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== + version "1.10.3" + resolved "https://registry.npmjs.org/yaml/-/yaml-1.10.3.tgz" + integrity sha512-vIYeF1u3CjlhAFekPPAk2h/Kv4T3mAkMox5OymRiJQB0spDP10LHvt+K7G9Ny6NuuMAb25/6n1qyUjAcGNf/AA== yargs-parser@^22.0.0: version "22.0.0" @@ -5631,3 +7198,8 @@ zrender@6.0.0: integrity sha512-41dFXEEXuJpNecuUQq6JlbybmnHaqqpGlbH1yxnA5V9MMP4SbohSVZsJIwz+zdjQXSSlR1Vc34EgH1zxyTDvhg== dependencies: tslib "2.3.0" + +zwitch@^2.0.0: + version "2.0.4" + resolved "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz#c827d4b0acb76fc3e685a4c6ec2902d51070e9d7" + integrity sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==