diff --git a/.github/workflows/svalinn-affine-build.yml b/.github/workflows/svalinn-affine-build.yml new file mode 100644 index 0000000..5510802 --- /dev/null +++ b/.github/workflows/svalinn-affine-build.yml @@ -0,0 +1,89 @@ +# SPDX-License-Identifier: PMPL-1.0-or-later +# svalinn AffineScript build/verify gate. +# +# The ReScript→AffineScript migration (PR #46) cannot be validated in the +# Claude sandbox (no OCaml/opam toolchain, opam repo off the network +# allowlist). This workflow IS the verification path: it builds the +# upstream affinescript compiler and compiles every svalinn .affine +# module to WebAssembly. A failure here is a real defect in the ports or +# the compiler pin — this is the gate that makes "verified" meaningful. +# +# This is intentionally a blocking check (no continue-on-error): the +# point is to verify, not to advise. +name: svalinn AffineScript build + +on: + push: + branches: [main, master] + paths: + - 'container-stack/svalinn/src/**/*.affine' + - '.github/workflows/svalinn-affine-build.yml' + pull_request: + paths: + - 'container-stack/svalinn/src/**/*.affine' + - '.github/workflows/svalinn-affine-build.yml' + +permissions: + contents: read + +env: + # Pinned to the same commit the svalinn Containerfile uses. + AFFINESCRIPT_REF: d2875a552f1d389b4a60c4adfdc02ae53e36aca3 + +jobs: + affine-build: + name: compile svalinn .affine -> wasm + runs-on: ubuntu-latest + container: + image: ocaml/opam:debian-12-ocaml-5.1 + + steps: + - name: Install git/m4 + run: sudo apt-get update && sudo apt-get install -y --no-install-recommends git m4 + + - name: Checkout stapeln + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Build the affinescript compiler + run: | + set -euxo pipefail + git clone https://github.com/hyperpolymath/affinescript.git /tmp/affinescript + cd /tmp/affinescript + git checkout "${AFFINESCRIPT_REF}" + opam update -y + opam install --deps-only -y . + eval "$(opam env)" + dune build --release + cp _build/install/default/bin/affinescript /tmp/affinescript-bin + /tmp/affinescript-bin --version || true + + - name: Compile every svalinn .affine module + run: | + set -euxo pipefail + cd "${GITHUB_WORKSPACE}/container-stack/svalinn" + mkdir -p dist/wasm + fail=0 + while IFS= read -r -d '' f; do + base="$(basename "$f" .affine)" + echo "::group::compile $f" + if /tmp/affinescript-bin compile "$f" -o "dist/wasm/${base}.wasm"; then + echo "ok: $f" + else + echo "::error file=$f::affinescript compile failed" + fail=1 + fi + echo "::endgroup::" + done < <(find src -name '*.affine' -print0 | sort -z) + if [ "$fail" -ne 0 ]; then + echo "::error::one or more svalinn .affine modules failed to compile" + exit 1 + fi + ls -l dist/wasm + + - name: Upload compiled wasm + if: always() + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: svalinn-wasm + path: container-stack/svalinn/dist/wasm/ + if-no-files-found: ignore diff --git a/container-stack/svalinn/.gitignore b/container-stack/svalinn/.gitignore index 4cd6fc9..11e10cd 100644 --- a/container-stack/svalinn/.gitignore +++ b/container-stack/svalinn/.gitignore @@ -108,6 +108,10 @@ htmlcov/ /src/dist/ /src/**/*.js /src/**/*.js.map +# ...but the AffineScript/typed-wasm migration's host bridge is +# hand-written JS (Deno-API glue), not compiler output — keep it tracked. +!/src/host/ +!/src/host/*.js # --- PhD Research --- /data/ diff --git a/container-stack/svalinn/AFFINE-MIGRATION-TASK.md b/container-stack/svalinn/AFFINE-MIGRATION-TASK.md new file mode 100644 index 0000000..670625a --- /dev/null +++ b/container-stack/svalinn/AFFINE-MIGRATION-TASK.md @@ -0,0 +1,145 @@ + +# Task: complete the svalinn ReScript → AffineScript/typed-wasm migration (verified) + +> **Run this with Claude Code in a local CLI that has the toolchain installed.** +> The cloud sandbox cannot build/run AffineScript (no OCaml/opam, opam repo +> + wolfi base off its network allowlist), so the cutover was deliberately +> NOT done there. Locally you can actually verify — that is the whole point. + +## Objective + +Finish migrating `container-stack/svalinn` off ReScript onto AffineScript +(compiles to typed WasmGC via `hyperpolymath/affinescript`; ABI from +`hyperpolymath/typed-wasm`), hosted by Deno. **Nothing stays in ReScript.** +Do not claim done until every verification gate below passes locally. + +Work on branch `claude/stapeln-maintenance-followup-iEUKy` (PR #46, draft). +Commit per logical module; push; keep the PR draft until all gates pass. + +## Prerequisites (must exist locally) + +- `opam` + OCaml ≥ 5.1, `dune` ≥ 3.14, `m4`, `git` +- `cargo` (Rust) — for `typed-wasm` +- `deno` +- `docker` (or `podman`) — for the container build gate +- Network access to `github.com` (clones affinescript + typed-wasm) + +## Toolchain bring-up (do once) + +```bash +# affinescript compiler — PIN must match Containerfile + svalinn-affine-build.yml +git clone https://github.com/hyperpolymath/affinescript.git /tmp/affinescript +cd /tmp/affinescript && git checkout d2875a552f1d389b4a60c4adfdc02ae53e36aca3 +opam install --deps-only -y . && eval "$(opam env)" && dune build --release +export AFFINESCRIPT_BIN=/tmp/affinescript/_build/install/default/bin/affinescript + +# typed-wasm (ABI/conventions) — pin matches Containerfile +git clone https://github.com/hyperpolymath/typed-wasm.git /tmp/typed-wasm +cd /tmp/typed-wasm && git checkout e90e2d1a307c33d594d54065c902500da327977c +cargo build --release --locked +``` + +Read these upstream files before porting (they define syntax/stdlib/limits): +`/tmp/affinescript/examples/*.affine`, `stdlib/{prelude,string,io,result,Network,Crypto}.affine`, +`COMPILER-CAPABILITIES.md`, `KNOWN-ISSUES.md`, `affinescript-deno-test/` +(the `@hyperpolymath/affine-js` Deno bridge contract). + +## Architecture & conventions (already established — keep consistent) + +- **Boundary:** pure logic/types live in `.affine`; all I/O (sockets, + fetch, env, fs, crypto, JSON value type) is host-side in + `src/host/affine_host.js` (plain JS — svalinn policy **bans TypeScript**; + JS allowed for Deno glue). +- **JSON:** `.affine` has no JSON type. `src/host/Json.affine` declares + `extern` accessors; the host owns a handle arena (`0` = null/absent). + Re-use this protocol for all JSON. +- **AffineScript notes:** Rust-like (`struct`/`enum`/`fn`/`pub fn`/`match`/ + `if`/`while`/`let`, generics ``, `[T]`, `Option`/`Result` in prelude, + `len`, `string_sub`, `string_find`, `int_to_string`, `float_to_string`). + No async, no JS interop. `module Name;` header; `use Other;` imports; + `pub extern fn` = host import. **Pitfall:** prelude defines + `Option::None`, so don't name an enum variant `None` (we used `NoAuth`). +- Every file starts with `// SPDX-License-Identifier: PMPL-1.0-or-later`. +- One WASM module per top-level `.affine`; host loads by basename. + +## Already done (11/31 — do NOT redo, mirror their style) + +`src/host/Json.affine`, `src/Main.affine`, `src/gateway/GatewayTypes.affine`, +`src/policy/PolicyEngine.affine`, `src/gateway/SecurityHeaders.affine`, +`src/gateway/RateLimiter.affine`, `src/gateway/Metrics.affine`, +`src/auth/AuthTypes.affine`, `src/auth/Authz.affine`, +`src/vordr/VordrTypes.affine`, `src/vordr/Client.affine`. +Build pipeline (`Containerfile` 4-stage, `deno.json`, `scripts/affine-build.sh`) +and host bridge are in place. CI gate: `.github/workflows/svalinn-affine-build.yml`. + +## Remaining work + +Port each, applying the boundary rule (pure → `.affine`; I/O → host extern): + +1. `src/gateway/Gateway.res` (≈1219 LOC, the router/orchestrator) → + `src/gateway/Gateway.affine` + host route wiring. Pure: routing + table, request/response shaping, error envelopes. Host: actual + `Deno.serve` dispatch (already host-owned) — expose `pub fn` + handlers per route and call them from `affine_host.js`. +2. `src/mcp/McpTypes.res` → `McpTypes.affine` (pure types). +3. `src/mcp/McpClient.res`, `src/mcp/Server.res`, `src/mcp/Tools.res` → + `.affine` pure protocol shaping; transport in host. +4. `src/validation/Validation.res` → `Validation.affine` pure field + accessors/policy logic; **Ajv schema validation is host-side** + (add `extern fn ajv_validate(schema_id, json_handle) -> ...`). +5. `src/bridge/SelurBridge.res` → `SelurBridge.affine` (+ host transport). +6. `src/bindings/{Deno,Fetch,Hono}.res` → delete; their role is + subsumed by `affine_host.js`. Remove all `.res` imports. +7. `src/vordr/Client.res` host wiring: implement the Fetch POST + + `/health` ping in `affine_host.js` calling the existing + `Client.affine` envelope/parse functions. +8. `src/auth/*` host wiring: implement JWT signature verify (WebCrypto + `crypto.subtle.importKey/verify`), JWKS fetch+cache, OAuth2 token/ + refresh/introspect/revoke, secure random, base64url in + `affine_host.js`, calling `Authz.affine` for every decision. +9. `ui/src/*.res` (browser ReScript) → `.affine` compiled to WASM for + the browser (see upstream `affinescript-dom`/`affinescript-vite`), + or, if that path is not viable, raise it explicitly — do not silently + leave ReScript. +10. `tests/integration_test.res` → AffineScript tests via + `affinescript-deno-test` (`*_test.affine`, `pub fn test_* -> Bool`). + +## Cutover (ONLY after every gate below is green) + +- Delete every remaining `.res` under `container-stack/svalinn`. +- Confirm `deno.json` has no rescript tasks/imports (already done). +- Confirm `Containerfile` ENTRYPOINT is `src/host/affine_host.js` + (already done). Update `.gitignore` if any new dirs need tracking. + +## Verification gates (ALL must pass locally — this is "verified") + +1. `find container-stack/svalinn/src -name '*.affine' -print0 | \ + xargs -0 -n1 -I{} "$AFFINESCRIPT_BIN" compile {} -o /tmp/x.wasm` + → every module compiles, exit 0. Fix codegen issues; consult + `KNOWN-ISSUES.md` for compiler-side bugs/workarounds. +2. `cd container-stack/svalinn && deno check src/host/affine_host.js` + → no errors. +3. `cd container-stack/svalinn && docker build -f Containerfile -t svalinn:affine .` + → image builds (exercises all 4 stages incl. typed-wasm). +4. Run it and smoke every implemented route: + ```bash + docker run -d -p 8000:8000 --name svalinn-aff svalinn:affine + curl -fsS localhost:8000/healthz + curl -fsS localhost:8000/metrics | grep svalinn_requests_total + curl -fsS -XPOST localhost:8000/v1/policy/evaluate \ + -d '{"policy":{"version":1,"requiredPredicates":[],"allowedSigners":[],"logQuorum":0,"mode":"permissive"},"attestations":[]}' + # plus the gateway/auth/vordr/mcp routes once ported + ``` + All return expected status/body; no 501 for ported routes. +5. Push; the `svalinn AffineScript build` CI check on PR #46 is green + (it is intentionally blocking). +6. No `.res` remain under `container-stack/svalinn`; SonarCloud 0 new + issues; PR description module table updated to 31/31. + +## Definition of done + +All 6 gates green, PR #46 marked ready (not draft), zero `.res` in +svalinn, and a short note in the PR stating which gates were run and +their results. If the alpha compiler cannot compile a construct, record +the blocker explicitly in the PR — do not fake completion or silently +keep ReScript. diff --git a/container-stack/svalinn/Containerfile b/container-stack/svalinn/Containerfile index 2855302..e028030 100644 --- a/container-stack/svalinn/Containerfile +++ b/container-stack/svalinn/Containerfile @@ -1,67 +1,90 @@ # SPDX-License-Identifier: PMPL-1.0-or-later -# Containerfile — Two-stage build for Svalinn Edge Gateway +# Containerfile — Svalinn Edge Gateway (AffineScript / typed-wasm build) # -# Stage 1: ReScript compilation (needs node for the rescript compiler + deno) -# Stage 2: Minimal runtime with Deno only (wolfi-base) +# Migration: svalinn's application code moved from ReScript→JS to +# AffineScript→typed-WasmGC (see PR / src/*.affine). The build now needs an +# OCaml toolchain (the affinescript compiler) and a Rust+Idris2 toolchain +# (typed-wasm: the verified cross-language WasmGC ABI), then Deno at runtime +# to host the WASM modules via the @hyperpolymath/affine-js bridge. +# +# Stage A build the affinescript compiler (OCaml / opam / dune) +# Stage B build typed-wasm (Rust / cargo / Idris2 ABI) +# Stage C compile every src/*.affine -> .wasm +# Stage D minimal Deno runtime + host bridge +# +# Upstream tool repos are pinned by commit for reproducibility. + +ARG AFFINESCRIPT_REF=d2875a552f1d389b4a60c4adfdc02ae53e36aca3 +ARG TYPED_WASM_REF=e90e2d1a307c33d594d54065c902500da327977c # --------------------------------------------------------------------------- -# Stage 1: Build +# Stage A: build the AffineScript compiler # --------------------------------------------------------------------------- -FROM cgr.dev/chainguard/wolfi-base:latest AS build +FROM ocaml/opam:debian-12-ocaml-5.1 AS affinescript-build +ARG AFFINESCRIPT_REF +USER root +RUN apt-get update && apt-get install -y --no-install-recommends git m4 \ + && rm -rf /var/lib/apt/lists/* +USER opam +WORKDIR /opt +RUN git clone https://github.com/hyperpolymath/affinescript.git \ + && cd affinescript && git checkout "${AFFINESCRIPT_REF}" +WORKDIR /opt/affinescript +RUN opam install --deps-only -y . \ + && eval "$(opam env)" \ + && dune build --release \ + && cp _build/install/default/bin/affinescript /opt/affinescript-bin -# Install build-time dependencies: deno (runtime) + node (rescript compiler) -RUN apk add --no-cache deno nodejs +# --------------------------------------------------------------------------- +# Stage B: build typed-wasm (verified WasmGC ABI / conventions) +# --------------------------------------------------------------------------- +FROM rust:1-bookworm AS typed-wasm-build +ARG TYPED_WASM_REF +RUN apt-get update && apt-get install -y --no-install-recommends git \ + && rm -rf /var/lib/apt/lists/* +WORKDIR /opt +RUN git clone https://github.com/hyperpolymath/typed-wasm.git \ + && cd typed-wasm && git checkout "${TYPED_WASM_REF}" +WORKDIR /opt/typed-wasm +RUN cargo build --release --locked +# --------------------------------------------------------------------------- +# Stage C: compile AffineScript sources to typed-WasmGC +# --------------------------------------------------------------------------- +FROM debian:12-slim AS wasm-build +COPY --from=affinescript-build /opt/affinescript-bin /usr/local/bin/affinescript +COPY --from=typed-wasm-build /opt/typed-wasm/target/release /opt/typed-wasm +ENV TYPED_WASM_HOME=/opt/typed-wasm WORKDIR /build - -# Copy dependency manifests first for layer caching -COPY deno.json deno.lock package.json ./ -COPY src/deno.json src/deno.lock src/rescript.json ./src/ - -# Install JS dependencies (node_modules for rescript compiler) -RUN deno install - -# Copy source tree COPY src/ ./src/ -COPY spec/ ./spec/ -COPY config/ ./config/ - -# Compile ReScript to JavaScript via Deno's task runner. `deno task` prepends -# node_modules/.bin/ to PATH (deno.json sets nodeModulesDir=auto), so the -# rescript CLI resolves correctly from the workspace-root node_modules. -RUN deno task res:build +RUN mkdir -p dist/wasm \ + && find src -name '*.affine' -print0 | while IFS= read -r -d '' f; do \ + base="$(basename "$f" .affine)"; \ + echo "compiling $f -> dist/wasm/${base}.wasm"; \ + affinescript compile "$f" -o "dist/wasm/${base}.wasm"; \ + done # --------------------------------------------------------------------------- -# Stage 2: Runtime +# Stage D: runtime (Deno hosts the WASM modules) # --------------------------------------------------------------------------- FROM cgr.dev/chainguard/wolfi-base:latest AS runtime - RUN apk add --no-cache deno - -# Non-root user for defence in depth RUN adduser -D -u 1000 svalinn USER svalinn - WORKDIR /app -# Copy compiled .res.js files and runtime config -COPY --from=build --chown=svalinn:svalinn /build/src/ ./src/ -COPY --from=build --chown=svalinn:svalinn /build/deno.json ./deno.json -COPY --from=build --chown=svalinn:svalinn /build/deno.lock ./deno.lock -COPY --from=build --chown=svalinn:svalinn /build/node_modules/ ./node_modules/ -COPY --from=build --chown=svalinn:svalinn /build/spec/ ./spec/ -COPY --from=build --chown=svalinn:svalinn /build/config/ ./config/ +COPY --from=wasm-build --chown=svalinn:svalinn /build/dist/ ./dist/ +COPY --chown=svalinn:svalinn src/host/ ./src/host/ +COPY --chown=svalinn:svalinn deno.json deno.lock ./ +COPY --chown=svalinn:svalinn spec/ ./spec/ +COPY --chown=svalinn:svalinn config/ ./config/ -# Expose the default gateway port EXPOSE 8000 - -# Health check against the /healthz endpoint HEALTHCHECK --interval=15s --timeout=5s --start-period=10s --retries=3 \ CMD deno eval "const r = await fetch('http://localhost:8000/healthz'); Deno.exit(r.ok ? 0 : 1)" -# Run with minimal Deno permissions ENTRYPOINT ["deno", "run", \ "--allow-net", \ "--allow-env", \ "--allow-read", \ - "src/Main.res.js"] + "src/host/affine_host.js"] diff --git a/container-stack/svalinn/deno.json b/container-stack/svalinn/deno.json index a2236eb..3e2b942 100644 --- a/container-stack/svalinn/deno.json +++ b/container-stack/svalinn/deno.json @@ -1,22 +1,17 @@ { - "nodeModulesDir": "auto", "imports": { - "rescript/lib/es6/": "./node_modules/rescript/lib/es6/", - "hono": "npm:hono@^4.6.10", + "@hyperpolymath/affine-js": "jsr:@hyperpolymath/affine-js@^0.2.0", "ajv": "npm:ajv@^8.12.0", - "ajv-formats": "npm:ajv-formats@^3.0.1", - "rescript": "^12.0.0", - "@rescript/core": "npm:@rescript/core@^1.0.0" + "ajv-formats": "npm:ajv-formats@^3.0.1" }, "tasks": { - "dev": "deno run --allow-net --allow-read --allow-env --watch src/Main.res.js", - "start": "deno run --allow-net --allow-read --allow-env src/Main.res.js", - "res:build": "cd src && rescript build", - "res:clean": "cd src && rescript clean", - "res:watch": "cd src && rescript build -w" + "dev": "deno run --allow-net --allow-read --allow-env --watch src/host/affine_host.js", + "start": "deno run --allow-net --allow-read --allow-env src/host/affine_host.js", + "affine:build": "bash scripts/affine-build.sh", + "affine:clean": "rm -rf dist/wasm" }, "compilerOptions": { "allowJs": true, "checkJs": false } -} \ No newline at end of file +} diff --git a/container-stack/svalinn/scripts/affine-build.sh b/container-stack/svalinn/scripts/affine-build.sh new file mode 100755 index 0000000..86ef299 --- /dev/null +++ b/container-stack/svalinn/scripts/affine-build.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash +# SPDX-License-Identifier: PMPL-1.0-or-later +# affine-build.sh — compile every src/*.affine to dist/wasm/*.wasm. +# +# Requires the affinescript compiler. Set AFFINESCRIPT_BIN to its path, or +# put `affinescript` on PATH. In the container build this runs inside the +# wasm-build stage where the compiler is already installed (see Containerfile). +set -euo pipefail + +BIN="${AFFINESCRIPT_BIN:-affinescript}" +ROOT="$(cd "$(dirname "$0")/.." && pwd)" +cd "$ROOT" + +if ! command -v "$BIN" >/dev/null 2>&1 && [[ ! -x "$BIN" ]]; then + echo "affine-build: compiler not found (set AFFINESCRIPT_BIN)" >&2 + exit 1 +fi + +mkdir -p dist/wasm +find src -name '*.affine' -print0 | while IFS= read -r -d '' f; do + base="$(basename "$f" .affine)" + echo "compiling $f -> dist/wasm/${base}.wasm" + "$BIN" compile "$f" -o "dist/wasm/${base}.wasm" +done +echo "affine-build: done" diff --git a/container-stack/svalinn/src/Main.affine b/container-stack/svalinn/src/Main.affine new file mode 100644 index 0000000..ea097f4 --- /dev/null +++ b/container-stack/svalinn/src/Main.affine @@ -0,0 +1,46 @@ +// SPDX-License-Identifier: PMPL-1.0-or-later +// Main.affine — Svalinn edge-shield WASM entrypoint. +// +// Under the AffineScript/typed-wasm model the long-lived HTTP listener +// lives in the Deno host (src/host/affine_host.ts): WASM cannot own a +// socket. The host calls the exported pure request handler below for each +// request, passing the parsed JSON body as a host arena handle and +// receiving a JSON response handle back. `serve()` remains as the +// documented logical entrypoint and returns the wiring status code the +// host checks at boot. + +module Main; + +use Json; +use PolicyEngine; + +// Exported boot hook. Returns 0 on success; the host treats non-zero as a +// fatal wiring error and refuses to bind the port. +pub fn serve() -> Int { + 0 +} + +// Exported per-request policy evaluation entrypoint. `body` is a host JSON +// handle for the request payload `{ policy: {...}, attestations: [...] }`. +// Returns a host JSON handle for the response (the formatted result). +pub fn handle_evaluate(body: Int) -> Int { + let policy = match parse_policy(json_get(body, "policy")) { + Some(p) => p, + None => default_policy() + }; + + let atts: [Attestation] = []; + let raw = json_get(body, "attestations"); + if json_kind(raw) == 4 { + let i = 0; + while i < json_len(raw) { + match parse_attestation(json_at(raw, i)) { + Some(a) => { atts = atts + [a]; }, + None => {} + } + i = i + 1; + } + } + + format_result(evaluate(policy, atts)) +} diff --git a/container-stack/svalinn/src/auth/AuthTypes.affine b/container-stack/svalinn/src/auth/AuthTypes.affine new file mode 100644 index 0000000..de16fe7 --- /dev/null +++ b/container-stack/svalinn/src/auth/AuthTypes.affine @@ -0,0 +1,162 @@ +// SPDX-License-Identifier: PMPL-1.0-or-later +// AuthTypes.affine — authentication/authorization data types. +// Faithful port of AuthTypes.res. ReScript variants → nominal enums; +// records → structs; Belt.Map defaults → builder functions. + +module AuthTypes; + +pub enum AuthMethod { + OAuth2, + OIDC, + ApiKey, + MTLS, + NoAuth // ReScript `None` (renamed: Option::None collision) +} + +pub fn auth_method_to_string(m: AuthMethod) -> String { + match m { + OAuth2 => "oauth2", + OIDC => "oidc", + ApiKey => "api-key", + MTLS => "mtls", + NoAuth => "none" + } +} + +pub fn auth_method_from_string(s: String) -> Option { + match s { + "oauth2" => Some(OAuth2), + "oidc" => Some(OIDC), + "api-key" => Some(ApiKey), + "mtls" => Some(MTLS), + "none" => Some(NoAuth), + _ => None + } +} + +pub enum PermissionAction { + Create, + Read, + Update, + Delete, + Execute +} + +pub fn permission_action_to_string(a: PermissionAction) -> String { + match a { + Create => "create", + Read => "read", + Update => "update", + Delete => "delete", + Execute => "execute" + } +} + +pub fn permission_action_from_string(s: String) -> Option { + match s { + "create" => Some(Create), + "read" => Some(Read), + "update" => Some(Update), + "delete" => Some(Delete), + "execute" => Some(Execute), + _ => None + } +} + +pub struct Permission { + resource: String, + actions: [PermissionAction] +} + +pub struct Role { + name: String, + permissions: [Permission], + description: Option +} + +pub struct ApiKeyInfo { + id: String, + name: String, + scopes: [String], + created_at: String, + expires_at: Option, + rate_limit: Option +} + +// Decoded JWT payload (claims kept as a host Json handle). +pub struct TokenPayload { + sub: String, + iss: String, + exp: Int, + iat: Int, + scope: Option, + email: Option, + name: Option, + groups: [String], + claims: Int // host Json arena handle +} + +pub struct AuthResult { + authenticated: Bool, + method: AuthMethod, + subject: Option, + scopes: [String], + error: Option +} + +pub struct UserContext { + id: String, + email: Option, + name: Option, + groups: [String], + scopes: [String], + method: AuthMethod, + issued_at: Int, + expires_at: Option +} + +fn perm(resource: String, actions: [PermissionAction]) -> Permission { + Permission { resource: resource, actions: actions } +} + +// Default RBAC roles (== AuthTypes.res defaultRoles). +pub fn default_roles() -> [Role] { + [ + Role { + name: "admin", + description: Some("Full access to all resources"), + permissions: [perm("*", [Create, Read, Update, Delete, Execute])] + }, + Role { + name: "operator", + description: Some("Can manage containers but not policies"), + permissions: [ + perm("containers", [Create, Read, Update, Delete, Execute]), + perm("images", [Read]), + perm("policies", [Read]) + ] + }, + Role { + name: "viewer", + description: Some("Read-only access"), + permissions: [ + perm("containers", [Read]), + perm("images", [Read]), + perm("policies", [Read]) + ] + }, + Role { + name: "auditor", + description: Some("Can view logs and audit trail"), + permissions: [ + perm("containers", [Read]), + perm("logs", [Read]), + perm("audit", [Read]) + ] + } + ] +} + +pub fn default_excluded_paths() -> [String] { + ["/healthz", "/health", "/ready", "/metrics", "/.well-known/"] +} diff --git a/container-stack/svalinn/src/auth/Authz.affine b/container-stack/svalinn/src/auth/Authz.affine new file mode 100644 index 0000000..8b787cd --- /dev/null +++ b/container-stack/svalinn/src/auth/Authz.affine @@ -0,0 +1,136 @@ +// SPDX-License-Identifier: PMPL-1.0-or-later +// Authz.affine — pure authentication/authorization decision logic. +// +// Extracted from the pure parts of JWT.res / Middleware.res. The I/O- and +// crypto-bound parts stay in the Deno host (inherently non-WASM): +// * JWT signature verification + JWKS fetch (Web Crypto, fetch) +// * OAuth2 token/refresh/introspect/revoke HTTP flows +// * secure random state/nonce (crypto.getRandomValues), base64url +// The host calls these pure functions for every claim/scope/key check so +// the security rules live in one verifiable place. + +module Authz; + +use AuthTypes; + +// JWT standard-claim validation (JWT.res verifyJWT, minus the signature +// step). `audiences` is the already-normalised aud list; `now` is epoch +// seconds from the host. Returns None on success, Some(reason) on failure. +pub fn validate_claims( + exp: Int, iat: Int, iss: String, + expected_iss: String, audiences: [String], client_id: String, + now: Int +) -> Option { + if exp < now { return Some("Token expired"); } + if iat > now + 60 { return Some("Token issued in the future"); } + if iss != expected_iss { + return Some("Invalid issuer: expected " + expected_iss + ", got " + iss); + } + let i = 0; + let aud_ok = false; + while i < len(audiences) { + if audiences[i] == client_id { aud_ok = true; } + i = i + 1; + } + if !aud_ok { return Some("Invalid audience"); } + None +} + +fn contains(xs: [String], target: String) -> Bool { + let i = 0; + let found = false; + while i < len(xs) { + if xs[i] == target { found = true; } + i = i + 1; + } + found +} + +// Middleware.res requireScopes: a scope is satisfied if the user has it +// directly or holds the "svalinn:admin" super-scope. Returns the missing +// scopes (empty == authorized). +pub fn missing_scopes(user_scopes: [String], required: [String]) -> [String] { + if contains(user_scopes, "svalinn:admin") { return []; } + let missing: [String] = []; + let i = 0; + while i < len(required) { + if !contains(user_scopes, required[i]) { + missing = missing + [required[i]]; + } + i = i + 1; + } + missing +} + +// Middleware.res requireGroups: membership in at least one required group. +pub fn has_required_group(user_groups: [String], required: [String]) -> Bool { + let i = 0; + let ok = false; + while i < len(required) { + if contains(user_groups, required[i]) { ok = true; } + i = i + 1; + } + ok +} + +// Split a space-delimited OAuth scope string into a list (payload.scope). +pub fn split_scopes(scope: String) -> [String] { + let out: [String] = []; + let cur = ""; + let i = 0; + while i < len(scope) { + let ch = string_sub(scope, i, 1); + if ch == " " { + if len(cur) > 0 { out = out + [cur]; cur = ""; } + } else { + cur = cur + ch; + } + i = i + 1; + } + if len(cur) > 0 { out = out + [cur]; } + out +} + +// Middleware.res authenticateApiKey: strip a configured key prefix. +pub fn strip_api_key_prefix(api_key: String, prefix: String) -> String { + let plen = len(prefix); + if plen > 0 && len(api_key) >= plen && string_sub(api_key, 0, plen) == prefix { + string_sub(api_key, plen, len(api_key) - plen) + } else { + api_key + } +} + +// API key expiry check. `expires_at_ms`==0 means "no expiry". +pub fn is_api_key_expired(expires_at_ms: Int, now_ms: Int) -> Bool { + expires_at_ms != 0 && expires_at_ms < now_ms +} + +// Middleware.res authenticateMTLS: extract CN=... from a client-cert DN. +pub fn parse_cn_from_dn(dn: String) -> String { + let marker = "CN="; + let idx = string_find(dn, marker); + if idx < 0 { return dn; } + let start = idx + 3; + let cn = ""; + let i = start; + while i < len(dn) { + let ch = string_sub(dn, i, 1); + if ch == "," { i = len(dn); } else { cn = cn + ch; i = i + 1; } + } + if len(cn) > 0 { cn } else { dn } +} + +// JWT.res getAlgorithm: map a JWT `alg` to a Web Crypto algorithm name the +// host's crypto.subtle.importKey/verify understands. Empty == unsupported. +pub fn webcrypto_algorithm(alg: String) -> String { + match alg { + "RS256" => "RSASSA-PKCS1-v1_5:SHA-256", + "RS384" => "RSASSA-PKCS1-v1_5:SHA-384", + "RS512" => "RSASSA-PKCS1-v1_5:SHA-512", + "ES256" => "ECDSA:P-256:SHA-256", + "ES384" => "ECDSA:P-384:SHA-384", + "ES512" => "ECDSA:P-521:SHA-512", + _ => "" + } +} diff --git a/container-stack/svalinn/src/gateway/GatewayTypes.affine b/container-stack/svalinn/src/gateway/GatewayTypes.affine new file mode 100644 index 0000000..5519623 --- /dev/null +++ b/container-stack/svalinn/src/gateway/GatewayTypes.affine @@ -0,0 +1,102 @@ +// SPDX-License-Identifier: PMPL-1.0-or-later +// GatewayTypes.affine — Gateway data types for the Svalinn edge shield. +// Ported from GatewayTypes.res. ReScript polymorphic-variant string tags +// (@as("running") etc.) become an explicit enum plus string conversions, +// since AffineScript enums are nominal WasmGC types. + +module GatewayTypes; + +pub enum ContainerState { + Created, + Running, + Paused, + Stopped, + Removed +} + +pub fn container_state_to_string(s: ContainerState) -> String { + match s { + Created => "created", + Running => "running", + Paused => "paused", + Stopped => "stopped", + Removed => "removed" + } +} + +pub fn container_state_from_string(s: String) -> Option { + match s { + "created" => Some(Created), + "running" => Some(Running), + "paused" => Some(Paused), + "stopped" => Some(Stopped), + "removed" => Some(Removed), + _ => None + } +} + +pub struct ContainerInfo { + id: String, + name: String, + image: String, + image_digest: String, + state: ContainerState, + policy_verdict: String, + created_at: Option, + started_at: Option +} + +pub struct ImageInfo { + name: String, + tag: String, + digest: String, + verified: Bool, + size: Option +} + +pub struct VerifyRequest { + image_ref: String, + check_sbom: Option, + check_signature: Option +} + +pub struct SbomInfo { + format: String, + vulnerabilities: Int, + critical: Int, + high: Int +} + +pub struct SignatureInfo { + valid: Bool, + signer: Option, + timestamp: Option +} + +pub struct VerificationResult { + verified: Bool, + image_ref: String, + digest: String, + sbom: Option, + signature: Option +} + +pub struct HealthResponse { + status: String, + version: String, + vordr_connected: Bool, + timestamp: String +} + +pub struct ErrorResponse { + code: String, + message: String +} + +// ReScript `apiResponse<'a>` was generic over the success payload. WasmGC +// enums are monomorphic here; callers use module-specific result structs +// and this tag enum to signal success vs. error. +pub enum ApiStatus { + ApiOk, + ApiError +} diff --git a/container-stack/svalinn/src/gateway/Metrics.affine b/container-stack/svalinn/src/gateway/Metrics.affine new file mode 100644 index 0000000..a348bfb --- /dev/null +++ b/container-stack/svalinn/src/gateway/Metrics.affine @@ -0,0 +1,78 @@ +// SPDX-License-Identifier: PMPL-1.0-or-later +// Metrics.affine — Prometheus text exposition formatting. +// +// Faithful port of Metrics.res. Counter/gauge/histogram *state* was +// mutable refs incremented per request; under the WASM/host split the +// host owns the running totals (cheap integer/float counters) and calls +// the pure formatters here to render the /metrics body. Vordr's +// container-count fetch stays in the host (I/O). + +module Metrics; + +fn counter_block(name: String, help: String, value: Float) -> String { + "# HELP " + name + " " + help + "\n" + + "# TYPE " + name + " counter\n" + + name + " " + float_to_string(value) + "\n" +} + +fn gauge_block(name: String, help: String, value: Float) -> String { + "# HELP " + name + " " + help + "\n" + + "# TYPE " + name + " gauge\n" + + name + " " + float_to_string(value) + "\n" +} + +// Cumulative histogram (Prometheus buckets are cumulative). +fn histogram_block( + name: String, help: String, + buckets: [Float], counts: [Float], + sum: Float, count: Float +) -> String { + let out = "# HELP " + name + " " + help + "\n" + + "# TYPE " + name + " histogram\n"; + let cumulative = 0.0; + let i = 0; + while i < len(buckets) { + let c = if i < len(counts) { counts[i] } else { 0.0 }; + cumulative = cumulative + c; + out = out + name + "_bucket{le=\"" + float_to_string(buckets[i]) + "\"} " + + float_to_string(cumulative) + "\n"; + i = i + 1; + } + out = out + name + "_bucket{le=\"+Inf\"} " + float_to_string(count) + "\n"; + out = out + name + "_sum " + float_to_string(sum) + "\n"; + out = out + name + "_count " + float_to_string(count) + "\n"; + out +} + +// Render the full exposition body. Values are the host's running totals. +pub fn format_prometheus( + requests_total: Float, + requests_errors_total: Float, + auth_failures_total: Float, + hist_buckets: [Float], + hist_counts: [Float], + hist_sum: Float, + hist_count: Float, + containers_active: Float +) -> String { + counter_block("svalinn_requests_total", + "Total HTTP requests received", requests_total) + + "\n" + + counter_block("svalinn_requests_errors_total", + "Total HTTP request errors (5xx)", requests_errors_total) + + "\n" + + counter_block("svalinn_auth_failures_total", + "Total authentication failures", auth_failures_total) + + "\n" + + histogram_block("svalinn_request_duration_seconds", + "HTTP request duration in seconds", + hist_buckets, hist_counts, hist_sum, hist_count) + + "\n" + + gauge_block("svalinn_containers_active", + "Number of currently active containers", containers_active) +} + +// Default duration buckets (seconds), matching Metrics.res. +pub fn default_buckets() -> [Float] { + [0.01, 0.05, 0.1, 0.5, 1.0, 5.0] +} diff --git a/container-stack/svalinn/src/gateway/RateLimiter.affine b/container-stack/svalinn/src/gateway/RateLimiter.affine new file mode 100644 index 0000000..e8711c7 --- /dev/null +++ b/container-stack/svalinn/src/gateway/RateLimiter.affine @@ -0,0 +1,75 @@ +// SPDX-License-Identifier: PMPL-1.0-or-later +// RateLimiter.affine — sliding-window rate-limit decision core. +// +// Faithful port of RateLimiter.res. The ReScript version held per-IP +// mutable state in a Js.Dict and read wall-clock time via Js.Date.now(). +// Under the WASM/host split the host owns the per-IP map and the clock; +// this module is the pure decision given a client's current window state. + +module RateLimiter; + +pub struct Config { + window_ms: Int, + max_requests: Int +} + +pub fn default_config() -> Config { + Config { window_ms: 60000, max_requests: 100 } +} + +// Outcome of a check. `count`/`window_start` are the values the host must +// persist back into its per-IP map for the next request. +pub struct Decision { + allowed: Bool, + count: Int, + window_start: Int, + remaining: Int, + retry_after: Int +} + +fn ceil_div(a: Int, b: Int) -> Int { + if a <= 0 { 0 } else { (a + b - 1) / b } +} + +// `count`/`window_start` are the stored state for this IP (0/0 if new). +// `now_ms` is supplied by the host (epoch milliseconds). +pub fn check(cfg: Config, count: Int, window_start: Int, now_ms: Int) -> Decision { + // New client, or expired window → start a fresh window. + if window_start == 0 || now_ms - window_start > cfg.window_ms { + return Decision { + allowed: true, + count: 1, + window_start: now_ms, + remaining: cfg.max_requests - 1, + retry_after: 0 + }; + } + + if count < cfg.max_requests { + let new_count = count + 1; + let rem = cfg.max_requests - new_count; + return Decision { + allowed: true, + count: new_count, + window_start: window_start, + remaining: if rem < 0 { 0 } else { rem }, + retry_after: 0 + }; + } + + // Limited. + let reset_at = window_start + cfg.window_ms; + let ms_left = reset_at - now_ms; + Decision { + allowed: false, + count: count, + window_start: window_start, + remaining: 0, + retry_after: if ms_left > 0 { ceil_div(ms_left, 1000) } else { 0 } + } +} + +// Whether the host should evict this IP's entry (older than 2× window). +pub fn is_stale(cfg: Config, window_start: Int, now_ms: Int) -> Bool { + window_start != 0 && now_ms - window_start > cfg.window_ms * 2 +} diff --git a/container-stack/svalinn/src/gateway/SecurityHeaders.affine b/container-stack/svalinn/src/gateway/SecurityHeaders.affine new file mode 100644 index 0000000..e9b3a9e --- /dev/null +++ b/container-stack/svalinn/src/gateway/SecurityHeaders.affine @@ -0,0 +1,64 @@ +// SPDX-License-Identifier: PMPL-1.0-or-later +// SecurityHeaders.affine — OWASP security/CORS/rate-limit response headers. +// +// Faithful port of SecurityHeaders.res. The ReScript version threaded a +// Hono context; here the headers are pure data the Deno host applies to +// every outgoing Response. Each builder returns a JSON object handle of +// { headerName: value, ... } via the host Json arena. + +module SecurityHeaders; + +use Json; + +fn put_header(obj: Int, name: String, value: String) -> () { + json_set(obj, name, json_of_string(value)); +} + +// OWASP baseline applied to every response. +pub fn security_headers() -> Int { + let h = json_new_object(); + put_header(h, "Strict-Transport-Security", + "max-age=31536000; includeSubDomains; preload"); + put_header(h, "X-Frame-Options", "DENY"); + put_header(h, "X-Content-Type-Options", "nosniff"); + put_header(h, "X-XSS-Protection", "1; mode=block"); + put_header(h, "Content-Security-Policy", + "default-src 'self'; script-src 'self'; style-src 'self'; " + + "img-src 'self' data:; font-src 'self'; connect-src 'self'; " + + "frame-ancestors 'none'; base-uri 'self'; form-action 'self'"); + put_header(h, "Referrer-Policy", "strict-origin-when-cross-origin"); + put_header(h, "Permissions-Policy", + "geolocation=(), microphone=(), camera=(), payment=(), usb=()"); + h +} + +// CORS headers for API endpoints (specific origin, never wildcard). +pub fn cors_headers(allowed_origin: String) -> Int { + let h = json_new_object(); + put_header(h, "Access-Control-Allow-Origin", allowed_origin); + put_header(h, "Access-Control-Allow-Credentials", "true"); + put_header(h, "Access-Control-Allow-Methods", + "GET, POST, PUT, DELETE, OPTIONS"); + put_header(h, "Access-Control-Allow-Headers", + "Content-Type, Authorization, X-Request-ID"); + put_header(h, "Access-Control-Max-Age", "3600"); + h +} + +// Rate-limit headers (values computed by RateLimiter / host). +pub fn rate_limit_headers(limit: Int, remaining: Int, reset_at: Int) -> Int { + let h = json_new_object(); + put_header(h, "X-RateLimit-Limit", int_to_string(limit)); + put_header(h, "X-RateLimit-Remaining", int_to_string(remaining)); + put_header(h, "X-RateLimit-Reset", int_to_string(reset_at)); + h +} + +// Security headers + no-store cache directives for error responses. +pub fn error_headers() -> Int { + let h = security_headers(); + put_header(h, "Cache-Control", "no-store, no-cache, must-revalidate"); + put_header(h, "Pragma", "no-cache"); + put_header(h, "Expires", "0"); + h +} diff --git a/container-stack/svalinn/src/host/Json.affine b/container-stack/svalinn/src/host/Json.affine new file mode 100644 index 0000000..458e28d --- /dev/null +++ b/container-stack/svalinn/src/host/Json.affine @@ -0,0 +1,39 @@ +// SPDX-License-Identifier: PMPL-1.0-or-later +// Json.affine — host-boundary JSON access for Svalinn. +// +// AffineScript compiles to typed WasmGC; it has no in-language JSON value +// type. JSON parsing/serialisation is provided by the Deno host bridge +// (src/host/affine_host.ts) through these extern functions. A JSON value +// is referenced by an opaque integer handle owned by the host arena. + +module Json; + +// Parse a JSON document; returns a handle (0 == parse error / null). +pub extern fn json_parse(text: String) -> Int; + +// Handle type discriminators (host contract): 0 null, 1 bool, 2 number, +// 3 string, 4 array, 5 object. +pub extern fn json_kind(handle: Int) -> Int; + +pub extern fn json_as_bool(handle: Int) -> Bool; +pub extern fn json_as_int(handle: Int) -> Int; +pub extern fn json_as_string(handle: Int) -> String; + +// Object field access by key; returns child handle (0 == absent). +pub extern fn json_get(handle: Int, key: String) -> Int; + +// Array access. +pub extern fn json_len(handle: Int) -> Int; +pub extern fn json_at(handle: Int, index: Int) -> Int; + +// Builders (host allocates a new handle and returns it). +pub extern fn json_new_object() -> Int; +pub extern fn json_new_array() -> Int; +pub extern fn json_set(object: Int, key: String, value: Int) -> (); +pub extern fn json_push(array: Int, value: Int) -> (); +pub extern fn json_of_bool(value: Bool) -> Int; +pub extern fn json_of_int(value: Int) -> Int; +pub extern fn json_of_string(value: String) -> Int; + +// Serialise a handle back to a string. +pub extern fn json_stringify(handle: Int) -> String; diff --git a/container-stack/svalinn/src/host/affine_host.js b/container-stack/svalinn/src/host/affine_host.js new file mode 100644 index 0000000..2bdf60f --- /dev/null +++ b/container-stack/svalinn/src/host/affine_host.js @@ -0,0 +1,211 @@ +// SPDX-License-Identifier: PMPL-1.0-or-later +// affine_host.js — Deno host bridge for the AffineScript/typed-wasm build. +// +// Plain JavaScript (svalinn language policy: JS is permitted for Deno-API +// glue; TypeScript is not). AffineScript compiles to typed WasmGC: WASM +// cannot own sockets, env, the filesystem, crypto or a JSON value type, so +// the host supplies every `pub extern fn` declared in the .affine sources, +// owns the HTTP listener, and marshals values across the boundary. WASM +// loading + value marshalling reuse the upstream @hyperpolymath/affine-js +// bridge, exactly as affinescript-deno-test does. +// +// Phase 1 wires the policy-evaluation path (Main.handle_evaluate). Routes +// backed by not-yet-ported modules return 501 until their .affine ports +// land (see the migration status in the PR description). + +import { AffineModule } from "@hyperpolymath/affine-js"; + +// --- Host JSON arena ------------------------------------------------------- +// `Json.affine` references JSON values by opaque integer handle. The host +// keeps the real values here; 0 is reserved for null / absent / error. +const arena = [null]; +function put(v) { + arena.push(v); + return arena.length - 1; +} +const get = (h) => arena[h] ?? null; + +function kind(v) { + if (v === null || v === undefined) return 0; + if (typeof v === "boolean") return 1; + if (typeof v === "number") return 2; + if (typeof v === "string") return 3; + if (Array.isArray(v)) return 4; + if (typeof v === "object") return 5; + return 0; +} + +// Host imports satisfying the .affine `extern` declarations. `affine-js` +// passes/returns AffineScript values; strings and ints marshal directly. +function hostImports() { + return { + // Json.affine + json_parse: (t) => { + try { return put(JSON.parse(t)); } catch { return 0; } + }, + json_kind: (h) => kind(get(h)), + json_as_bool: (h) => (get(h) === true), + json_as_int: (h) => Math.trunc(Number(get(h) ?? 0)), + json_as_string: (h) => String(get(h) ?? ""), + json_get: (h, k) => { + const o = get(h); + return o && typeof o === "object" && !Array.isArray(o) && k in o + ? put(o[k]) : 0; + }, + json_len: (h) => { + const a = get(h); + return Array.isArray(a) ? a.length : 0; + }, + json_at: (h, i) => { + const a = get(h); + return Array.isArray(a) && i >= 0 && i < a.length ? put(a[i]) : 0; + }, + json_new_object: () => put({}), + json_new_array: () => put([]), + json_set: (o, k, v) => { + const obj = get(o); + if (obj && typeof obj === "object") obj[k] = get(v); + }, + json_push: (a, v) => { + const arr = get(a); + if (Array.isArray(arr)) arr.push(get(v)); + }, + json_of_bool: (v) => put(v), + json_of_int: (v) => put(Math.trunc(v)), + json_of_string: (v) => put(v), + json_stringify: (h) => JSON.stringify(get(h) ?? null), + + // io.affine builtins used by the ported modules + read_file: (p) => { + try { return { ok: Deno.readTextFileSync(p) }; } + catch (e) { return { err: String(e) }; } + }, + getenv: (n) => Deno.env.get(n) ?? null, + + // Wall-clock epoch milliseconds for RateLimiter.affine. + now_ms: () => Date.now(), + + // WASI stub: AffineScript codegen imports fd_write unconditionally. + fd_write: (_fd, _iovs, _n, _ret) => 0, + }; +} + +const WASM_DIR = new URL("../../dist/wasm/", import.meta.url); + +async function loadModule(name) { + const bytes = await Deno.readFile(new URL(`${name}.wasm`, WASM_DIR)); + return await AffineModule.instantiate(bytes, hostImports()); +} + +// --- HTTP listener (host-owned) ------------------------------------------- + +const PORT = Number(Deno.env.get("SVALINN_PORT") ?? "8000"); + +const main = await loadModule("Main"); +const secHeaders = await loadModule("SecurityHeaders"); +const rateLimiter = await loadModule("RateLimiter"); +const metrics = await loadModule("Metrics"); + +if (main.call("serve") !== 0) { + console.error("svalinn: Main.serve() reported a fatal wiring error"); + Deno.exit(1); +} + +// Host-owned mutable state the .affine decision/format cores operate on. +const rlConfig = rateLimiter.call("default_config"); +const rlState = new Map(); // clientIp -> { count, windowStart } +const counters = { requestsTotal: 0, errorsTotal: 0, authFailuresTotal: 0 }; +const hist = { + buckets: get(metrics.call("default_buckets")), + counts: [0, 0, 0, 0, 0, 0], + sum: 0, + count: 0, +}; +// Gauges the host mutates in place (the Vordr container-count refresh, +// a tracked follow-up, will update gauges.containersActive). +const gauges = { containersActive: 0 }; + +// Apply the .affine security-header set to a Response. +function withSecurityHeaders(resp) { + const hdrs = get(secHeaders.call("security_headers")); + for (const [k, v] of Object.entries(hdrs)) resp.headers.set(k, v); + return resp; +} + +function jsonResponse(handle, status = 200) { + return withSecurityHeaders( + new Response(JSON.stringify(get(handle) ?? null), { + status, + headers: { "content-type": "application/json" }, + }), + ); +} + +function clientIp(req) { + const xff = req.headers.get("X-Forwarded-For"); + if (xff) return xff.split(",")[0].trim(); + return req.headers.get("X-Real-IP") ?? "unknown"; +} + +function rateLimit(req) { + const ip = clientIp(req); + const prev = rlState.get(ip) ?? { count: 0, windowStart: 0 }; + const d = get( + rateLimiter.call("check", rlConfig, prev.count, prev.windowStart, Date.now()), + ); + rlState.set(ip, { count: d.count, windowStart: d.window_start }); + return d; +} + +Deno.serve({ port: PORT }, async (req) => { + const url = new URL(req.url); + counters.requestsTotal += 1; + + const rl = rateLimit(req); + if (!rl.allowed) { + counters.errorsTotal += 1; + const resp = jsonResponse( + put({ error: "Rate limit exceeded", retryAfter: rl.retry_after }), + 429, + ); + resp.headers.set("Retry-After", String(rl.retry_after)); + resp.headers.set("X-RateLimit-Remaining", "0"); + return resp; + } + + if (url.pathname === "/healthz") { + return jsonResponse(put({ status: "ok" })); + } + + if (url.pathname === "/metrics") { + const body = metrics.call( + "format_prometheus", + counters.requestsTotal, + counters.errorsTotal, + counters.authFailuresTotal, + put(hist.buckets), + put(hist.counts), + hist.sum, + hist.count, + gauges.containersActive, + ); + return withSecurityHeaders( + new Response(body, { headers: { "content-type": "text/plain; version=0.0.4" } }), + ); + } + + if (url.pathname === "/v1/policy/evaluate" && req.method === "POST") { + const text = await req.text(); + const bodyHandle = put(text === "" ? {} : JSON.parse(text)); + const respHandle = main.call("handle_evaluate", bodyHandle); + return jsonResponse(respHandle); + } + + // Routes backed by not-yet-ported .affine modules (auth, mcp, vordr, + // gateway router, …). See PR description for per-module status. + counters.errorsTotal += 1; + return jsonResponse( + put({ code: "not_implemented", message: `${url.pathname} pending .affine port` }), + 501, + ); +}); diff --git a/container-stack/svalinn/src/policy/PolicyEngine.affine b/container-stack/svalinn/src/policy/PolicyEngine.affine new file mode 100644 index 0000000..5f451b5 --- /dev/null +++ b/container-stack/svalinn/src/policy/PolicyEngine.affine @@ -0,0 +1,301 @@ +// SPDX-License-Identifier: PMPL-1.0-or-later +// PolicyEngine.affine — Gatekeeper policy evaluation for Svalinn. +// +// Faithful port of PolicyEngine.res. The pure evaluation core +// (`evaluate`) is unchanged in behaviour. JSON decoding/encoding, which +// used ReScript's Js.Json + Belt, now crosses the host boundary via the +// Json extern module (handles are opaque host-arena integers). + +module PolicyEngine; + +use Json; + +pub enum PolicyMode { + Strict, // Reject failures + Permissive // Warn and continue +} + +pub fn policy_mode_from_string(s: String) -> Option { + match s { + "strict" => Some(Strict), + "permissive" => Some(Permissive), + _ => None + } +} + +pub fn policy_mode_to_string(m: PolicyMode) -> String { + match m { + Strict => "strict", + Permissive => "permissive" + } +} + +pub struct Policy { + version: Int, + required_predicates: [String], + allowed_signers: [String], + log_quorum: Int, + mode: Option, + notes: Option +} + +pub struct Attestation { + predicate_type: String, + subject: [String], + signer: String, + log_entry: Option +} + +pub struct EvaluationResult { + allowed: Bool, + mode: PolicyMode, + predicates_found: [String], + missing_predicates: [String], + signers_verified: [String], + invalid_signers: [String], + log_count: Int, + log_quorum_met: Bool, + violations: [String], + warnings: [String] +} + +// --------------------------------------------------------------------------- +// Small list helpers (Belt.Array equivalents) +// --------------------------------------------------------------------------- + +fn contains(xs: [String], target: String) -> Bool { + let i = 0; + let found = false; + while i < len(xs) { + if xs[i] == target { found = true; } + i = i + 1; + } + found +} + +fn push_unique(acc: [String], value: String) -> [String] { + if contains(acc, value) { acc } else { acc + [value] } +} + +// --------------------------------------------------------------------------- +// JSON decoding (via host Json arena) +// --------------------------------------------------------------------------- + +fn decode_string_array(handle: Int) -> [String] { + let out: [String] = []; + if json_kind(handle) == 4 { + let i = 0; + while i < json_len(handle) { + let item = json_at(handle, i); + if json_kind(item) == 3 { out = out + [json_as_string(item)]; } + i = i + 1; + } + } + out +} + +pub fn parse_policy(doc: Int) -> Option { + if json_kind(doc) != 5 { return None; } + + let version_h = json_get(doc, "version"); + let quorum_h = json_get(doc, "logQuorum"); + if version_h == 0 || quorum_h == 0 { return None; } + + let mode_h = json_get(doc, "mode"); + let mode = if mode_h != 0 && json_kind(mode_h) == 3 { + policy_mode_from_string(json_as_string(mode_h)) + } else { None }; + + let notes_h = json_get(doc, "notes"); + let notes = if notes_h != 0 && json_kind(notes_h) == 3 { + Some(json_as_string(notes_h)) + } else { None }; + + Some(Policy { + version: json_as_int(version_h), + required_predicates: decode_string_array(json_get(doc, "requiredPredicates")), + allowed_signers: decode_string_array(json_get(doc, "allowedSigners")), + log_quorum: json_as_int(quorum_h), + mode: mode, + notes: notes + }) +} + +pub fn parse_attestation(doc: Int) -> Option { + if json_kind(doc) != 5 { return None; } + + let pt_h = json_get(doc, "predicateType"); + let sg_h = json_get(doc, "signer"); + if pt_h == 0 || sg_h == 0 { return None; } + if json_kind(pt_h) != 3 || json_kind(sg_h) != 3 { return None; } + + let le_h = json_get(doc, "logEntry"); + let log_entry = if le_h != 0 && json_kind(le_h) == 3 { + Some(json_as_string(le_h)) + } else { None }; + + Some(Attestation { + predicate_type: json_as_string(pt_h), + subject: decode_string_array(json_get(doc, "subject")), + signer: json_as_string(sg_h), + log_entry: log_entry + }) +} + +// --------------------------------------------------------------------------- +// Pure evaluation (behaviour-identical to PolicyEngine.res `evaluate`) +// --------------------------------------------------------------------------- + +pub fn evaluate(policy: Policy, attestations: [Attestation]) -> EvaluationResult { + let mode = match policy.mode { + Some(m) => m, + None => Strict + }; + + let predicates_found: [String] = []; + let signers_verified: [String] = []; + let invalid_signers: [String] = []; + let log_count = 0; + + let i = 0; + while i < len(attestations) { + let att = attestations[i]; + + if contains(policy.required_predicates, att.predicate_type) { + predicates_found = push_unique(predicates_found, att.predicate_type); + } + + if contains(policy.allowed_signers, att.signer) { + signers_verified = push_unique(signers_verified, att.signer); + } else { + invalid_signers = push_unique(invalid_signers, att.signer); + } + + match att.log_entry { + Some(_) => { log_count = log_count + 1; }, + None => {} + } + i = i + 1; + } + + let missing_predicates: [String] = []; + let j = 0; + while j < len(policy.required_predicates) { + let pred = policy.required_predicates[j]; + if !contains(predicates_found, pred) { + missing_predicates = missing_predicates + [pred]; + } + j = j + 1; + } + + let log_quorum_met = log_count >= policy.log_quorum; + + let violations: [String] = []; + let k = 0; + while k < len(missing_predicates) { + violations = violations + ["Missing required predicate: " + missing_predicates[k]]; + k = k + 1; + } + let s = 0; + while s < len(invalid_signers) { + violations = violations + ["Invalid signer: " + invalid_signers[s]]; + s = s + 1; + } + if !log_quorum_met { + violations = violations + + ["Log quorum not met: " + int_to_string(log_count) + + " < " + int_to_string(policy.log_quorum)]; + } + + let allowed = match mode { + Strict => len(violations) == 0, + Permissive => true + }; + + let warnings: [String] = match mode { + Permissive => if len(violations) > 0 { violations } else { [] }, + Strict => [] + }; + + EvaluationResult { + allowed: allowed, + mode: mode, + predicates_found: predicates_found, + missing_predicates: missing_predicates, + signers_verified: signers_verified, + invalid_signers: invalid_signers, + log_count: log_count, + log_quorum_met: log_quorum_met, + violations: violations, + warnings: warnings + } +} + +// --------------------------------------------------------------------------- +// JSON encoding of a result (via host Json builders) +// --------------------------------------------------------------------------- + +fn json_string_array(xs: [String]) -> Int { + let arr = json_new_array(); + let i = 0; + while i < len(xs) { + json_push(arr, json_of_string(xs[i])); + i = i + 1; + } + arr +} + +pub fn format_result(r: EvaluationResult) -> Int { + let obj = json_new_object(); + json_set(obj, "allowed", json_of_bool(r.allowed)); + json_set(obj, "mode", json_of_string(policy_mode_to_string(r.mode))); + json_set(obj, "predicatesFound", json_string_array(r.predicates_found)); + json_set(obj, "missingPredicates", json_string_array(r.missing_predicates)); + json_set(obj, "signersVerified", json_string_array(r.signers_verified)); + json_set(obj, "invalidSigners", json_string_array(r.invalid_signers)); + json_set(obj, "logCount", json_of_int(r.log_count)); + json_set(obj, "logQuorumMet", json_of_bool(r.log_quorum_met)); + json_set(obj, "violations", json_string_array(r.violations)); + json_set(obj, "warnings", json_string_array(r.warnings)); + obj +} + +// --------------------------------------------------------------------------- +// Built-in policies +// --------------------------------------------------------------------------- + +pub fn default_policy() -> Policy { + Policy { + version: 1, + required_predicates: [ + "https://slsa.dev/provenance/v1", + "https://spdx.dev/Document" + ], + allowed_signers: [], + log_quorum: 1, + mode: Some(Strict), + notes: Some("Default strict policy") + } +} + +pub fn permissive_policy() -> Policy { + Policy { + version: 1, + required_predicates: [], + allowed_signers: [], + log_quorum: 0, + mode: Some(Permissive), + notes: Some("Permissive policy - accepts all bundles with warnings") + } +} + +// loadPolicy: read file (stdlib io builtin) then parse via host JSON. +pub fn load_policy(path: String) -> Option { + match read_file(path) { + Ok(content) => { + let doc = json_parse(content); + if doc == 0 { None } else { parse_policy(doc) } + }, + Err(_) => None + } +} diff --git a/container-stack/svalinn/src/vordr/Client.affine b/container-stack/svalinn/src/vordr/Client.affine new file mode 100644 index 0000000..a51e92e --- /dev/null +++ b/container-stack/svalinn/src/vordr/Client.affine @@ -0,0 +1,137 @@ +// SPDX-License-Identifier: PMPL-1.0-or-later +// Client.affine — pure Vörðr MCP JSON-RPC shaping. +// +// Faithful port of the pure parts of Client.res. The transport +// (Fetch POST to the Vörðr endpoint, /health ping) is inherently host +// I/O and stays in affine_host.js; this module builds the JSON-RPC +// request envelope, parses the MCP response, and constructs the +// per-tool argument objects — the parts worth verifying. + +module Client; + +use Json; +use VordrTypes; + +pub fn default_endpoint() -> String { "http://localhost:8080" } +pub fn default_timeout() -> Int { 30000 } + +// Monotonic request id. The host owns the counter and passes the prior +// value; this returns the next (Client.res nextId semantics). +pub fn next_id(prev: Int) -> Int { prev + 1 } + +// Build the `tools/call` JSON-RPC request envelope (Client.res callTool). +pub fn build_call_request(tool_name: String, args: Int, id: Int) -> Int { + let params = json_new_object(); + json_set(params, "name", json_of_string(tool_name)); + json_set(params, "arguments", args); + + let req = json_new_object(); + json_set(req, "jsonrpc", json_of_string("2.0")); + json_set(req, "method", json_of_string("tools/call")); + json_set(req, "params", params); + json_set(req, "id", json_of_int(id)); + req +} + +// Parsed MCP response. `error_message` is "" when there is no error; +// `result` is a host Json handle (0 == null/absent). +pub struct McpOutcome { + error_message: String, + result: Int +} + +// Parse + validate an MCP response object (Client.res mcpResponse decode +// + the error/result unwrap that followed it). +pub fn parse_mcp_response(resp: Int) -> McpOutcome { + if json_kind(resp) != 5 { + return McpOutcome { error_message: "Invalid MCP response: expected JSON object", result: 0 }; + } + + let err = json_get(resp, "error"); + if err != 0 && json_kind(err) == 5 { + let msg_h = json_get(err, "message"); + let msg = if msg_h != 0 && json_kind(msg_h) == 3 { + json_as_string(msg_h) + } else { + "Unknown error" + }; + return McpOutcome { error_message: msg, result: 0 }; + } + + let res = json_get(resp, "result"); + McpOutcome { error_message: "", result: res } +} + +// ---- per-tool argument builders (Client.res arg objects) ---- + +fn obj1_str(k: String, v: String) -> Int { + let o = json_new_object(); + json_set(o, k, json_of_string(v)); + o +} + +pub fn args_container_id(container_id: String) -> Int { + obj1_str("containerId", container_id) +} + +pub fn args_create(image: String, name: Option) -> Int { + let cfg = json_new_object(); + json_set(cfg, "privileged", json_of_bool(false)); + json_set(cfg, "readOnlyRoot", json_of_bool(true)); + + let o = json_new_object(); + json_set(o, "image", json_of_string(image)); + match name { + Some(n) => { json_set(o, "name", json_of_string(n)); }, + None => {} + } + json_set(o, "config", cfg); + o +} + +pub fn args_verify_image(image: String) -> Int { + let o = json_new_object(); + json_set(o, "image", json_of_string(image)); + json_set(o, "checkSbom", json_of_bool(true)); + json_set(o, "checkSignature", json_of_bool(true)); + o +} + +pub fn args_request_auth(operation: String, threshold: Int, signers: Int) -> Int { + let o = json_new_object(); + json_set(o, "operation", json_of_string(operation)); + json_set(o, "threshold", json_of_int(threshold)); + json_set(o, "signers", json_of_int(signers)); + o +} + +pub fn args_submit_signature(s: SignatureShare) -> Int { + let o = json_new_object(); + json_set(o, "requestId", json_of_string(s.request_id)); + json_set(o, "signature", json_of_string(s.signature)); + json_set(o, "signerId", json_of_string(s.signer_id)); + o +} + +pub fn args_monitor(c: MonitorConfig) -> Int { + let o = json_new_object(); + json_set(o, "containerId", json_of_string(c.container_id)); + json_set(o, "syscalls", json_of_bool(c.syscalls)); + json_set(o, "network", json_of_bool(c.network)); + json_set(o, "filesystem", json_of_bool(c.filesystem)); + o +} + +pub fn args_anomalies(container_id: String, severity: String) -> Int { + let o = json_new_object(); + json_set(o, "containerId", json_of_string(container_id)); + json_set(o, "severity", json_of_string(severity)); + o +} + +pub fn args_rollback(container_id: String, steps: Int) -> Int { + let o = json_new_object(); + json_set(o, "containerId", json_of_string(container_id)); + json_set(o, "steps", json_of_int(steps)); + o +} diff --git a/container-stack/svalinn/src/vordr/VordrTypes.affine b/container-stack/svalinn/src/vordr/VordrTypes.affine new file mode 100644 index 0000000..cff9338 --- /dev/null +++ b/container-stack/svalinn/src/vordr/VordrTypes.affine @@ -0,0 +1,104 @@ +// SPDX-License-Identifier: PMPL-1.0-or-later +// VordrTypes.affine — types for the Vörðr MCP client. +// Faithful port of VordrTypes.res. JSON-RPC param/result payloads that +// were Js.Json.t become host Json arena handles (Int). + +module VordrTypes; + +pub struct McpRequest { + jsonrpc: String, + method: String, + params: Int, // host Json handle + id: Int +} + +pub struct McpError { + code: Int, + message: String, + data: Option // host Json handle +} + +pub struct McpResponse { + jsonrpc: String, + result: Option, + error: Option, + id: Int +} + +pub struct ContainerConfig { + privileged: Bool, + read_only_root: Bool, + network_mode: Option, + memory: Option, + cpus: Option +} + +pub struct CreateContainerParams { + image: String, + name: Option, + config: ContainerConfig +} + +pub struct VerifyImageParams { + image: String, + check_sbom: Bool, + check_signature: Bool +} + +pub struct AuthorizationRequest { + operation: String, + threshold: Int, + signers: Int +} + +pub struct SignatureShare { + request_id: String, + signature: String, + signer_id: String +} + +pub struct MonitorConfig { + container_id: String, + syscalls: Bool, + network: Bool, + filesystem: Bool +} + +pub enum VContainerState { + VRunning, + VStopped, + VCreated +} + +pub struct VContainerInfo { + id: String, + name: String, + image: String, + image_digest: String, + state: VContainerState, + policy_verdict: String, + created_at: Option, + started_at: Option +} + +pub struct VImageInfo { + id: String, + tags: [String], + digest: String, + size: Int +} + +// Tool names (must match the Vörðr MCP adapter exactly). +pub fn tool_container_create() -> String { "vordr_container_create" } +pub fn tool_container_start() -> String { "vordr_container_start" } +pub fn tool_container_stop() -> String { "vordr_container_stop" } +pub fn tool_container_remove() -> String { "vordr_container_remove" } +pub fn tool_verify_image() -> String { "vordr_verify_image" } +pub fn tool_verify_config() -> String { "vordr_verify_config" } +pub fn tool_request_auth() -> String { "vordr_request_authorization" } +pub fn tool_submit_signature() -> String { "vordr_submit_signature" } +pub fn tool_monitor_start() -> String { "vordr_monitor_start" } +pub fn tool_monitor_stop() -> String { "vordr_monitor_stop" } +pub fn tool_get_anomalies() -> String { "vordr_get_anomalies" } +pub fn tool_rollback() -> String { "vordr_rollback" } +pub fn tool_preview_rollback() -> String { "vordr_preview_rollback" }