diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 381d4e12..40d23a31 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout sources - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: submodules: true @@ -32,10 +32,13 @@ jobs: - name: Install protoc run: sudo provisioning/protoc.sh - - name: Setup just - uses: extractions/setup-just@v3 - with: - just-version: 1.40.0 + - name: Install just + run: cargo install just --version 1.40.0 --locked + env: + CARGO_TARGET_DIR: /tmp/cargo-install-just + + - name: Add cargo bin to PATH + run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH - name: Check compilation run: cargo check diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index ec98a1ff..a23cbd7e 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -12,7 +12,7 @@ jobs: name: Build Docusaurus runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: fetch-depth: 0 diff --git a/.github/workflows/release-gate.yml b/.github/workflows/release-gate.yml new file mode 100644 index 00000000..a6edff9a --- /dev/null +++ b/.github/workflows/release-gate.yml @@ -0,0 +1,74 @@ +name: Release Gate +on: + pull_request: + types: [closed] + branches: [main] + paths: ['.releases/**'] + +concurrency: + group: release-gate + cancel-in-progress: false + +jobs: + create-release-tag: + name: Create tag and dispatch release + runs-on: ubuntu-latest + timeout-minutes: 5 + if: github.event.pull_request.merged == true + permissions: + contents: write + actions: write + steps: + - name: Fail if App credentials are not configured + run: | + if [ -z "${{ secrets.APP_ID }}" ] || [ -z "${{ secrets.APP_PRIVATE_KEY }}" ]; then + echo "❌ APP_ID and APP_PRIVATE_KEY must be configured." + echo "For fork testing, install a personal GitHub App on the fork," + echo "create a private key, and add both as repository secrets." + exit 1 + fi + + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + + - uses: actions/create-github-app-token@v3 + id: app-token + with: + app-id: ${{ secrets.APP_ID }} + private-key: ${{ secrets.APP_PRIVATE_KEY }} + + - uses: actions/setup-python@v6 + with: + python-version: '3.x' + + - name: Install Python deps + run: pip install pyyaml + + - name: Create tag from release request + id: gate + env: + GH_TOKEN: ${{ steps.app-token.outputs.token }} + REPO: ${{ github.repository }} + BASE_SHA: ${{ github.event.pull_request.base.sha }} + MERGE_SHA: ${{ github.event.pull_request.merge_commit_sha }} + run: | + python .github/workflows/release/release.py gate | tee /tmp/gate-output.txt + TAG=$(grep '^tag=' /tmp/gate-output.txt | tail -1 | cut -d= -f2-) + COMMIT=$(grep '^commit=' /tmp/gate-output.txt | tail -1 | cut -d= -f2-) + echo "tag=$TAG" >> $GITHUB_OUTPUT + echo "commit=$COMMIT" >> $GITHUB_OUTPUT + + if [ -z "$TAG" ] || [ -z "$COMMIT" ]; then + echo "❌ gate did not emit tag= / commit= outputs" + exit 1 + fi + echo "Gate outputs: $TAG @ $COMMIT" + + - name: Dispatch release workflow + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + gh workflow run release.yml \ + --ref main \ + -f tag=${{ steps.gate.outputs.tag }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index be779b85..4a1d7d0d 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,18 +1,83 @@ name: Draft Release on: - push: - tags: - - "v*" + workflow_dispatch: + inputs: + tag: + description: 'Release tag (e.g. v1.2.3)' + required: true + type: string permissions: - contents: write - packages: write + contents: read + packages: read + id-token: none + actions: read + attestations: read + checks: read + deployments: read + issues: read + discussions: read + pull-requests: read + repository-projects: read + security-events: read + statuses: read + models: read + +# Job-level permissions grant write scopes only where required. + +concurrency: + group: release-${{ inputs.tag }} + cancel-in-progress: false jobs: + resolve-tag: + runs-on: ubuntu-latest + timeout-minutes: 5 + outputs: + commit: ${{ steps.resolve.outputs.commit }} + steps: + - name: Checkout main + uses: actions/checkout@v6 + with: + ref: main + fetch-depth: 0 + + - name: Resolve tag to commit + id: resolve + run: | + git cat-file -e "refs/tags/${{ inputs.tag }}" 2>/dev/null || { + echo "❌ Tag ${{ inputs.tag }} does not exist" + exit 1 + } + COMMIT=$(git rev-parse "refs/tags/${{ inputs.tag }}^{}") + echo "commit=$COMMIT" >> $GITHUB_OUTPUT + echo "Resolved tag ${{ inputs.tag }} to commit $COMMIT" + + # Determines whether this tag should update :latest on GHCR. + # Runs once; the Docker job consumes its output via matrix. + determine-latest: + runs-on: ubuntu-latest + timeout-minutes: 2 + outputs: + value: ${{ steps.is_latest.outputs.value }} + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + + - name: Check is-latest + id: is_latest + run: | + VALUE=$(python .github/workflows/release/release.py is-latest "${{ inputs.tag }}") + echo "value=$VALUE" >> $GITHUB_OUTPUT + # Builds the x64 and arm64 binaries for Linux, for all 3 crates, via the Docker builder build-binaries-linux: + needs: [resolve-tag] + timeout-minutes: 60 strategy: + fail-fast: false matrix: target: - amd64 @@ -35,8 +100,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: + ref: ${{ needs.resolve-tag.outputs.commit }} fetch-depth: 0 submodules: true @@ -44,6 +110,9 @@ jobs: run: | echo "Releasing commit: $(git rev-parse HEAD)" + - name: Set lowercase owner + run: echo "OWNER=$(echo '${{ github.repository_owner }}' | tr '[:upper:]' '[:lower:]')" >> $GITHUB_ENV + - name: Set up QEMU uses: docker/setup-qemu-action@v3 @@ -62,30 +131,31 @@ jobs: with: context: . push: false - platforms: linux/amd64,linux/arm64 - cache-from: type=registry,ref=ghcr.io/commit-boost/buildcache:${{ matrix.target-crate}} - cache-to: type=registry,ref=ghcr.io/commit-boost/buildcache:${{ matrix.target-crate }},mode=max + platforms: linux/${{ matrix.target }} file: provisioning/build.Dockerfile - outputs: type=local,dest=build + outputs: type=local,dest=build/linux_${{ matrix.target }} build-args: | TARGET_CRATE=${{ matrix.name }} - name: Package binary (Linux) run: | cd build/linux_${{ matrix.target }} - tar -czvf ${{ matrix.name }}-${{ github.ref_name }}-linux_${{ matrix.package-suffix }}.tar.gz ${{ matrix.name }} - mv ${{ matrix.name }}-${{ github.ref_name }}-linux_${{ matrix.package-suffix }}.tar.gz ../../ + tar -czvf ${{ matrix.name }}-${{ inputs.tag }}-linux_${{ matrix.package-suffix }}.tar.gz ${{ matrix.name }} + mv ${{ matrix.name }}-${{ inputs.tag }}-linux_${{ matrix.package-suffix }}.tar.gz ../../ - name: Upload artifact uses: actions/upload-artifact@v4 with: - name: ${{ matrix.name }}-${{ github.ref_name }}-linux_${{ matrix.package-suffix }} + name: ${{ matrix.name }}-${{ inputs.tag }}-linux_${{ matrix.package-suffix }} path: | - ${{ matrix.name }}-${{ github.ref_name }}-linux_${{ matrix.package-suffix }}.tar.gz + ${{ matrix.name }}-${{ inputs.tag }}-linux_${{ matrix.package-suffix }}.tar.gz # Builds the arm64 binaries for Darwin, for all 3 crates, natively build-binaries-darwin: + needs: [resolve-tag] + timeout-minutes: 60 strategy: + fail-fast: false matrix: target: # x64 requires macos-latest-large which is not available in the free tier @@ -105,8 +175,9 @@ jobs: runs-on: ${{ matrix.os }} steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: + ref: ${{ needs.resolve-tag.outputs.commit }} fetch-depth: 0 submodules: true @@ -121,19 +192,19 @@ jobs: brew install protobuf - name: Cache Cargo registry - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/.cargo/registry key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} - name: Cache Cargo index - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/.cargo/git key: ${{ runner.os }}-cargo-git-${{ hashFiles('**/Cargo.lock') }} - name: Cache Cargo build - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: target key: ${{ runner.os }}-cargo-build-${{ matrix.target }}-${{ matrix.name }}-${{ hashFiles('**/Cargo.lock') }} @@ -148,24 +219,32 @@ jobs: - name: Package binary (Darwin) run: | cd target/${{ matrix.target }}/release - tar -czvf ${{ matrix.name }}-${{ github.ref_name }}-darwin_${{ matrix.package-suffix }}.tar.gz ${{ matrix.name }} - mv ${{ matrix.name }}-${{ github.ref_name }}-darwin_${{ matrix.package-suffix }}.tar.gz ../../../ + tar -czvf ${{ matrix.name }}-${{ inputs.tag }}-darwin_${{ matrix.package-suffix }}.tar.gz ${{ matrix.name }} + mv ${{ matrix.name }}-${{ inputs.tag }}-darwin_${{ matrix.package-suffix }}.tar.gz ../../../ - name: Upload artifact uses: actions/upload-artifact@v4 with: - name: ${{ matrix.name }}-${{ github.ref_name }}-darwin_${{ matrix.package-suffix }} + name: ${{ matrix.name }}-${{ inputs.tag }}-darwin_${{ matrix.package-suffix }} path: | - ${{ matrix.name }}-${{ github.ref_name }}-darwin_${{ matrix.package-suffix }}.tar.gz - - # Builds the PBS Docker image - build-and-push-pbs-docker: - needs: [build-binaries-linux] + ${{ matrix.name }}-${{ inputs.tag }}-darwin_${{ matrix.package-suffix }}.tar.gz + + # Builds and pushes Docker images for both PBS and Signer + build-and-push-docker: + needs: [resolve-tag, build-binaries-linux, determine-latest] + permissions: + contents: read + packages: write + strategy: + matrix: + crate: [pbs, signer] runs-on: ubuntu-latest + timeout-minutes: 45 steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: + ref: ${{ needs.resolve-tag.outputs.commit }} fetch-depth: 0 submodules: true @@ -179,10 +258,13 @@ jobs: run: | mkdir -p ./artifacts/bin/linux_amd64 mkdir -p ./artifacts/bin/linux_arm64 - tar -xzf ./artifacts/commit-boost-pbs-${{ github.ref_name }}-linux_x86-64/commit-boost-pbs-${{ github.ref_name }}-linux_x86-64.tar.gz -C ./artifacts/bin - mv ./artifacts/bin/commit-boost-pbs ./artifacts/bin/linux_amd64/commit-boost-pbs - tar -xzf ./artifacts/commit-boost-pbs-${{ github.ref_name }}-linux_arm64/commit-boost-pbs-${{ github.ref_name }}-linux_arm64.tar.gz -C ./artifacts/bin - mv ./artifacts/bin/commit-boost-pbs ./artifacts/bin/linux_arm64/commit-boost-pbs + tar -xzf ./artifacts/commit-boost-${{ matrix.crate }}-${{ inputs.tag }}-linux_x86-64/commit-boost-${{ matrix.crate }}-${{ inputs.tag }}-linux_x86-64.tar.gz -C ./artifacts/bin + mv ./artifacts/bin/commit-boost-${{ matrix.crate }} ./artifacts/bin/linux_amd64/commit-boost-${{ matrix.crate }} + tar -xzf ./artifacts/commit-boost-${{ matrix.crate }}-${{ inputs.tag }}-linux_arm64/commit-boost-${{ matrix.crate }}-${{ inputs.tag }}-linux_arm64.tar.gz -C ./artifacts/bin + mv ./artifacts/bin/commit-boost-${{ matrix.crate }} ./artifacts/bin/linux_arm64/commit-boost-${{ matrix.crate }} + + - name: Set lowercase owner + run: echo "OWNER=$(echo '${{ github.repository_owner }}' | tr '[:upper:]' '[:lower:]')" >> $GITHUB_ENV - name: Set up QEMU uses: docker/setup-qemu-action@v3 @@ -197,7 +279,7 @@ jobs: username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - - name: Build and push PBS Docker image + - name: Build and push Docker image uses: docker/build-push-action@v6 with: context: . @@ -206,84 +288,68 @@ jobs: build-args: | BINARIES_PATH=./artifacts/bin tags: | - ghcr.io/commit-boost/pbs:${{ github.ref_name }} - ${{ !contains(github.ref_name, 'rc') && 'ghcr.io/commit-boost/pbs:latest' || '' }} - file: provisioning/pbs.Dockerfile - - # Builds the Signer Docker image - build-and-push-signer-docker: - needs: [build-binaries-linux] + ghcr.io/${{ env.OWNER }}/${{ matrix.crate }}:${{ inputs.tag }} + ${{ needs.determine-latest.outputs.value == 'true' && format('ghcr.io/{0}/{1}:latest', env.OWNER, matrix.crate) || '' }} + file: provisioning/${{ matrix.crate }}.Dockerfile + + # Signs all binaries with Sigstore for provenance + sign-binaries: + needs: [build-binaries-linux, build-binaries-darwin] + permissions: + contents: read + id-token: write runs-on: ubuntu-latest + timeout-minutes: 10 steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - fetch-depth: 0 - submodules: true - - - name: Download binary archives + - name: Download binary artifacts uses: actions/download-artifact@v4 with: path: ./artifacts pattern: "commit-boost-*" - - name: Extract binaries - run: | - mkdir -p ./artifacts/bin/linux_amd64 - mkdir -p ./artifacts/bin/linux_arm64 - tar -xzf ./artifacts/commit-boost-signer-${{ github.ref_name }}-linux_x86-64/commit-boost-signer-${{ github.ref_name }}-linux_x86-64.tar.gz -C ./artifacts/bin - mv ./artifacts/bin/commit-boost-signer ./artifacts/bin/linux_amd64/commit-boost-signer - tar -xzf ./artifacts/commit-boost-signer-${{ github.ref_name }}-linux_arm64/commit-boost-signer-${{ github.ref_name }}-linux_arm64.tar.gz -C ./artifacts/bin - mv ./artifacts/bin/commit-boost-signer ./artifacts/bin/linux_arm64/commit-boost-signer - - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: Login to GitHub Container Registry - uses: docker/login-action@v3 + - name: Sign all binaries with Sigstore + uses: sigstore/gh-action-sigstore-python@v3.0.0 with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} + inputs: ./artifacts/**/*.tar.gz - - name: Build and push Signer Docker image - uses: docker/build-push-action@v6 + - name: Upload signed artifacts + uses: actions/upload-artifact@v4 with: - context: . - push: true - platforms: linux/amd64,linux/arm64 - build-args: | - BINARIES_PATH=./artifacts/bin - tags: | - ghcr.io/commit-boost/signer:${{ github.ref_name }} - ${{ !contains(github.ref_name, 'rc') && 'ghcr.io/commit-boost/signer:latest' || '' }} - file: provisioning/signer.Dockerfile + name: signed-${{ inputs.tag }} + path: ./artifacts/**/*.sigstore* # Creates a draft release on GitHub with the binaries finalize-release: needs: - build-binaries-linux - build-binaries-darwin - - build-and-push-pbs-docker - - build-and-push-signer-docker + - build-and-push-docker + - sign-binaries + permissions: + contents: write + packages: read runs-on: ubuntu-latest + timeout-minutes: 60 steps: - - name: Download artifacts + - name: Download binary artifacts uses: actions/download-artifact@v4 with: path: ./artifacts pattern: "commit-boost-*" + - name: Download signed artifacts + uses: actions/download-artifact@v4 + with: + path: ./artifacts + pattern: "signed-*" + - name: Finalize Release uses: softprops/action-gh-release@v2 with: files: ./artifacts/**/* draft: true prerelease: false - tag_name: ${{ github.ref_name }} - name: ${{ github.ref_name }} + tag_name: ${{ inputs.tag }} + name: ${{ inputs.tag }} env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/release/README.md b/.github/workflows/release/README.md new file mode 100644 index 00000000..b081569e --- /dev/null +++ b/.github/workflows/release/README.md @@ -0,0 +1,87 @@ +# Release automation + +Python CLI and workflow glue for the Commit-Boost release process. + +For the maintainer-facing release procedure, see [`.releases/README.md`](../../../.releases/README.md). + +## Flow + +```text +1. PR on main adds .releases/vX.Y.Z.yml + - file name is the release tag + - referenced commit may be on main or on a hotfix branch + +2. validate-release-request.yml + - pre-merge feedback for authors and reviewers + - validates request shape before merge + +3. release-gate.yml + - authoritative post-merge check + - re-validates the request + - creates the tag via the GitHub App + - dispatches release.yml from main with only the tag + +4. release.yml + - receives only the tag + - resolves the commit from the tag itself + - builds binaries from that exact commit + - pushes Docker images + - signs artifacts with Sigstore + - drafts the GitHub Release +``` + +`validate-release-request.yml` is kept for fast pre-merge feedback. `release-gate.yml` is still authoritative because it is the last step before tag creation. + +The release workflow always runs from the latest workflow definition on main. The tag is the trust anchor: `release-gate.yml` creates it via the GitHub App, and `release.yml` resolves the commit from that tag before building. This prevents dispatch from injecting an arbitrary tag and commit pair. + +The release commit may be off-main. The workflow definition always runs from main. + +## Local usage + +Requires `GH_TOKEN` and `REPO` in env. `uv` is recommended. + +```bash +export REPO=commit-boost/commit-boost-client +export GH_TOKEN=$(gh auth token) + +uv run --with pyyaml python .github/workflows/release/release.py lint .releases/v1.2.3.yml +uv run --with pyyaml --with pytest pytest .github/workflows/release/test_release.py -v +``` + +## Commands + +| Command | Purpose | +| --- | --- | +| `validate-filename ` | Validate release tag format | +| `validate-yaml ` | Validate release-request YAML shape | +| `find-added --base --head ` | List added release-request files | +| `check-modifications --base --head ` | Reject edits and deletes to existing release requests | +| `check-commit-exists ` | Verify the requested commit exists | +| `check-tag-free ` | Verify the tag does not already exist | +| `create-tag ` | Create the signed tag via GitHub API | +| `is-latest ` | Decide whether Docker `:latest` should move | +| `validate-pr` | Full pre-merge validation | +| `gate` | Authoritative post-merge validation and tag creation | +| `lint ` | Local pre-flight check | + +## Layout + +```text +.github/workflows/release/ +├── release.py +├── test_release.py +└── README.md +``` + +## Notes + +- The release request PR is the approval point. +- Release request files are immutable after merge. +- Botched attempts may leave version gaps. We accept that rather than adding retry machinery to `.releases/`. +- Workflow permissions grant write access only where required. + +## Troubleshooting + +- `pip install pyyaml` fails with `externally-managed-environment`: use `uv` or install inside a venv. +- `gh: not found`: install GitHub CLI and run `gh auth login`. +- Release dispatch succeeds but the build fails immediately: confirm the tag exists and points at the intended commit. diff --git a/.github/workflows/release/release.py b/.github/workflows/release/release.py new file mode 100644 index 00000000..be6f1069 --- /dev/null +++ b/.github/workflows/release/release.py @@ -0,0 +1,424 @@ +#!/usr/bin/env python3 +"""Release management CLI for Commit-Boost. + +Single-file argparse CLI. PyYAML + stdlib only. Shells out to ``git`` and +``gh`` via ``subprocess.run``. +""" + +import argparse +import json +import os +import re +import subprocess +import sys +from pathlib import Path + + +# ── helpers ────────────────────────────────────────────────────────────────── + + +def _env(name: str) -> str: + """Read *name* from the environment; exit 1 with a clear message if missing.""" + val = os.environ.get(name) + if not val: + print(f"❌ Required environment variable ${name} is not set.") + sys.exit(1) + return val + + +class GhApiError(Exception): + """Raised when a ``gh api`` call fails non-zero.""" + + +def gh_api(method: str, path: str, *, paginate: bool = False, **fields) -> dict | list: + """Thin wrapper over ``gh api``. Returns parsed JSON.""" + token = _env("GH_TOKEN") + repo = _env("REPO") + full_path = f"/repos/{repo}{path}" + argv = ["gh", "api", "--method", method, full_path] + for k, v in fields.items(): + argv.extend(["-f", f"{k}={v}"]) + if method.upper() == "GET" and paginate: + argv.append("--paginate") + env = os.environ.copy() + env["GH_TOKEN"] = token + result = subprocess.run(argv, capture_output=True, text=True, env=env) + if result.returncode != 0: + print(result.stderr, file=sys.stderr, end="") + raise GhApiError( + f"gh api {method} {full_path} failed (exit {result.returncode})" + ) + if not result.stdout.strip(): + return {} + return json.loads(result.stdout) + + +def _run(*args: str) -> str: + """Wrapper over ``subprocess.run`` with check, capture_output, text.""" + result = subprocess.run(list(args), capture_output=True, text=True, check=True) + return result.stdout + + +# Public alias so tests can patch `release.run_git` at the boundary. +# Also lets callers shell out to git explicitly when intent needs to be clear. +def run_git(*args: str) -> str: + return _run("git", *args) + + +def _git_diff(base: str, head: str, diff_filter: str) -> list[str]: + """Return list of .releases/*.yml files from git diff with *diff_filter*.""" + try: + out = run_git( + "diff", "--name-only", f"--diff-filter={diff_filter}", + f"{base}..{head}", "--", ".releases/*.yml", + ) + except subprocess.CalledProcessError as e: + if e.returncode == 1: + return [] + raise + return [l for l in out.strip().split("\n") if l] + + +def find_added(base: str, head: str) -> list[str]: + """Return .releases/*.yml files added between two refs.""" + return _git_diff(base, head, "A") + + +def find_modified_deleted(base: str, head: str) -> list[str]: + """Return .releases/*.yml files modified or deleted between two refs.""" + return _git_diff(base, head, "MD") + + +# ── core validation helpers ───────────────────────────────────────────────── + +SEMVER_RE = re.compile( + r"^v(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)(-rc[1-9][0-9]*)?$" +) + + +def _semver_key(tag: str) -> tuple: + """Return a comparable key for a strict-semver release tag. + + Strict means: matches SEMVER_RE exactly. Non-strict input raises ValueError + so callers can fail loudly rather than silently mis-sort. + + Sort order: (major, minor, patch, rc_or_inf). Final releases sort above + their RC siblings via float('inf') — i.e. v1.2.3 > v1.2.3-rc99. + """ + if not SEMVER_RE.match(tag): + raise ValueError(f"Not a strict-semver release tag: {tag!r}") + m = re.match(r"^v(\d+)\.(\d+)\.(\d+)(?:-rc(\d+))?$", tag) + return ( + int(m.group(1)), int(m.group(2)), int(m.group(3)), + int(m.group(4)) if m.group(4) else float("inf"), + ) + + +def validate_yaml_file(path: str) -> tuple[str, str]: + """Parse and validate a release-request YAML. Returns (commit_sha, tag).""" + try: + text = Path(path).read_text() + except FileNotFoundError: + print(f"❌ File not found: {path}") + sys.exit(1) + + import yaml + try: + data = yaml.safe_load(text) + except yaml.YAMLError as e: + print(f"❌ YAML parse error: {e}") + sys.exit(1) + + if not isinstance(data, dict): + print("❌ YAML must be a mapping (dict)") + sys.exit(1) + + missing = {"commit", "reason"} - data.keys() + if missing: + print(f"❌ Missing required fields: {missing}") + sys.exit(1) + + commit = data["commit"] + if ( + not isinstance(commit, str) or len(commit) != 40 + or not all(c in "0123456789abcdef" for c in commit) + ): + print("❌ commit must be a 40-character lowercase hex SHA") + sys.exit(1) + + reason = data["reason"] + if not isinstance(reason, str) or not reason.strip(): + print("❌ reason must be a non-empty string") + sys.exit(1) + + tag = Path(path).stem + return commit, tag + + +# ── subcommands ────────────────────────────────────────────────────────────── + +def cmd_validate_filename(args: argparse.Namespace) -> None: + if SEMVER_RE.match(args.basename): + print(f"✅ Valid release filename: {args.basename}") + sys.exit(0) + print( + f"❌ Filename '{args.basename}' is not a valid release tag.\n" + "Expected: v.. or v..-rc, " + "no leading zeros" + ) + sys.exit(1) + + +def cmd_validate_yaml(args: argparse.Namespace) -> None: + commit, tag = validate_yaml_file(args.path) + print(f"tag={tag}") + print(f"commit={commit}") + print(f"✅ YAML validation passed for {Path(args.path).name}") + sys.exit(0) + + +def cmd_find_added(args: argparse.Namespace) -> None: + files = find_added(args.base, args.head) + for f in files: + print(f) + print(f"count={len(files)}", file=sys.stderr) + sys.exit(0) + + +def cmd_check_modifications(args: argparse.Namespace) -> None: + files = find_modified_deleted(args.base, args.head) + if files: + print("❌ Existing release YAMLs cannot be modified or deleted:") + for f in files: + print(f) + sys.exit(1) + print("✅ No modifications or deletions detected") + sys.exit(0) + + +def cmd_check_commit_exists(args: argparse.Namespace) -> None: + try: + gh_api("GET", f"/commits/{args.sha}") + print(f"✅ Commit {args.sha} exists") + sys.exit(0) + except GhApiError: + print(f"❌ Commit {args.sha} does not exist in this repository") + sys.exit(1) + + +def cmd_check_tag_free(args: argparse.Namespace) -> None: + try: + gh_api("GET", f"/git/refs/tags/{args.tag}") + print(f"❌ Tag {args.tag} already exists. Pick a different version.") + sys.exit(1) + except GhApiError: + print(f"✅ Tag {args.tag} is free") + sys.exit(0) + + +def cmd_create_tag(args: argparse.Namespace) -> None: + tag_obj = gh_api( + "POST", "/git/tags", + tag=args.tag, message=args.tag, + object=args.commit, type="commit", + ) + tag_sha = tag_obj.get("sha") if isinstance(tag_obj, dict) else None + if not tag_sha: + print("❌ Failed to create tag object") + sys.exit(1) + + gh_api( + "POST", "/git/refs", + ref=f"refs/tags/{args.tag}", sha=tag_sha, + ) + print(f"✅ Tag {args.tag} created at {args.commit} (signed by GitHub via App identity)") + sys.exit(0) + + +def cmd_is_latest(args: argparse.Namespace) -> None: + tag = args.tag + # Fail-closed: the tag we're releasing must itself be strict semver. + # validate-filename already enforces this for new releases, but defend + # in depth in case is-latest is invoked standalone. + if not SEMVER_RE.match(tag): + print(f"❌ Tag {tag!r} is not strict semver. Cannot determine is-latest.") + sys.exit(1) + # RC tags never get :latest + if "-rc" in tag: + print("false") + sys.exit(0) + try: + all_tags = run_git("tag", "--list", "v*").strip().split("\n") + except subprocess.CalledProcessError: + print("true") + sys.exit(0) + # Only strict-semver, non-RC tags participate in the comparison. + # Legacy malformed tags (v0.7.0-rc.1, v0.9.2-rc-dev, v2.0.0-rc2-1, etc.) + # are invisible to the highest-wins check. + candidates = [t for t in all_tags if t and SEMVER_RE.match(t) and "-rc" not in t] + if not candidates: + print("true") + sys.exit(0) + highest = max(candidates, key=_semver_key) + print("true" if highest == tag else "false") + sys.exit(0) + + +def _step(fn, args: argparse.Namespace) -> None: + """Run a cmd_* function as a step inside an orchestrator. + + The cmd_* functions all call ``sys.exit(0)`` on success, which would + short-circuit any orchestrator that chains them. This wrapper catches + SystemExit(0) so the next step can run, while letting non-zero exits + propagate (orchestrator should abort on failure). + """ + try: + fn(args) + except SystemExit as e: + if e.code not in (0, None): + raise + + +def cmd_validate_pr(args: argparse.Namespace) -> None: + base = _env("BASE_SHA") + head = _env("HEAD_SHA") + + added = find_added(base, head) + mods = find_modified_deleted(base, head) + + if mods: + print("❌ Existing release YAMLs cannot be modified or deleted:") + for m in mods: + print(m) + sys.exit(1) + + if len(added) == 0: + print("added_count=0") + print("No release changes in this PR; validation trivially passes.") + sys.exit(0) + + if len(added) > 1: + print("❌ Only one release YAML may be added per PR.") + for a in added: + print(a) + sys.exit(1) + + filepath = added[0] + basename = Path(filepath).stem + + _step(cmd_validate_filename, argparse.Namespace(basename=basename)) + commit, _ = validate_yaml_file(filepath) + _step(cmd_check_commit_exists, argparse.Namespace(sha=commit)) + _step(cmd_check_tag_free, argparse.Namespace(tag=basename)) + print(f"added_count=1") + print(f"tag={basename}") + print(f"commit={commit}") + print(f"✅ Release request for {basename} validated.") + + +def cmd_gate(args: argparse.Namespace) -> None: + base = _env("BASE_SHA") + merge_sha = _env("MERGE_SHA") + + added = find_added(base, merge_sha) + if len(added) == 0: + print("Expected exactly 1 added release YAML, got 0. Skipping.") + sys.exit(0) + if len(added) > 1: + print(f"❌ Expected exactly 1 added release YAML, got {len(added)}.") + for path in added: + print(path) + sys.exit(1) + + filepath = added[0] + basename = Path(filepath).stem + + # Re-validate everything — do not trust that validate-pr ran or passed. + _step(cmd_validate_filename, argparse.Namespace(basename=basename)) + commit, tag = validate_yaml_file(filepath) + _step(cmd_check_commit_exists, argparse.Namespace(sha=commit)) + _step(cmd_check_tag_free, argparse.Namespace(tag=tag)) + + # Emit structured output before creating the tag so release-gate.yml can parse it + print(f"tag={tag}") + print(f"commit={commit}") + + cmd_create_tag(argparse.Namespace(tag=tag, commit=commit)) + + +def cmd_lint(args: argparse.Namespace) -> None: + """Pre-commit sanity check: run every CI validation against a single YAML. + + Reads $REPO and $GH_TOKEN like validate-pr does, but skips the git-diff + step — we already know which file you're checking. Use this before + opening a release-request PR to confirm CI will accept it. + """ + path = args.path + basename = Path(path).stem + + print(f"── Linting {path} ──") + _step(cmd_validate_filename, argparse.Namespace(basename=basename)) + commit, _ = validate_yaml_file(path) + _step(cmd_check_commit_exists, argparse.Namespace(sha=commit)) + _step(cmd_check_tag_free, argparse.Namespace(tag=basename)) + print(f"✅ {path} would pass CI.") + sys.exit(0) + + +# ── main ───────────────────────────────────────────────────────────────────── + +def main() -> None: + parser = argparse.ArgumentParser(description="Commit-Boost release management") + sub = parser.add_subparsers(dest="command", required=True) + + p = sub.add_parser("validate-filename", help="Validate a release filename against strict semver") + p.add_argument("basename") + p.set_defaults(func=cmd_validate_filename) + + p = sub.add_parser("validate-yaml", help="Parse and validate a release-request YAML file") + p.add_argument("path") + p.set_defaults(func=cmd_validate_yaml) + + p = sub.add_parser("find-added", help="List release YAMLs added between two refs") + p.add_argument("--base", required=True) + p.add_argument("--head", required=True) + p.set_defaults(func=cmd_find_added) + + p = sub.add_parser("check-modifications", help="Reject modifications/deletions of release YAMLs") + p.add_argument("--base", required=True) + p.add_argument("--head", required=True) + p.set_defaults(func=cmd_check_modifications) + + p = sub.add_parser("check-commit-exists", help="Verify a commit SHA exists in the repo") + p.add_argument("sha") + p.set_defaults(func=cmd_check_commit_exists) + + p = sub.add_parser("check-tag-free", help="Verify a tag does not already exist") + p.add_argument("tag") + p.set_defaults(func=cmd_check_tag_free) + + p = sub.add_parser("create-tag", help="Create an annotated tag via GitHub API") + p.add_argument("tag") + p.add_argument("commit") + p.set_defaults(func=cmd_create_tag) + + p = sub.add_parser("is-latest", help="Check if a tag is the highest non-RC semver") + p.add_argument("tag") + p.set_defaults(func=cmd_is_latest) + + p = sub.add_parser("validate-pr", help="End-to-end PR validator (reads env)") + p.set_defaults(func=cmd_validate_pr) + + p = sub.add_parser("gate", help="End-to-end gate after merge (reads env)") + p.set_defaults(func=cmd_gate) + + p = sub.add_parser("lint", help="Pre-commit sanity check on a single YAML (reads $REPO + $GH_TOKEN)") + p.add_argument("path", help="Path to the release-request YAML to lint") + p.set_defaults(func=cmd_lint) + + parsed = parser.parse_args() + parsed.func(parsed) + + +if __name__ == "__main__": + main() diff --git a/.github/workflows/release/test_release.py b/.github/workflows/release/test_release.py new file mode 100644 index 00000000..9943b247 --- /dev/null +++ b/.github/workflows/release/test_release.py @@ -0,0 +1,508 @@ +"""Tests for release.py — pure-logic and mocked-network coverage.""" + +import os +import subprocess +import sys +from pathlib import Path +from unittest.mock import patch + +import pytest + +import release + +from release import ( + SEMVER_RE, + _semver_key, + cmd_check_commit_exists, + cmd_check_modifications, + cmd_check_tag_free, + cmd_create_tag, + cmd_find_added, + cmd_gate, + cmd_is_latest, + cmd_lint, + cmd_validate_filename, + cmd_validate_pr, + cmd_validate_yaml, + GhApiError, +) + +HERE = Path(__file__).parent + + +@pytest.fixture(autouse=True) +def _restore_cwd(): + """Restore working directory after each test.""" + orig = os.getcwd() + yield + os.chdir(orig) + + +def _write_yaml(tmp_path: Path, name: str, content: str) -> str: + """Write a YAML fixture into tmp_path and return the absolute path.""" + p = tmp_path / name + p.write_text(content) + return str(p) + + +# Inline YAML fixtures — kept next to the tests that use them for readability. +GOOD_YAML = """\ +commit: abcdef1234567890abcdef1234567890abcdef12 +reason: "Emergency pagination fix" +""" + +BAD_SCHEMA_YAML = """\ +commit: abcdef1234567890abcdef1234567890abcdef12 +""" # missing reason + +BAD_SHA_LENGTH_YAML = """\ +commit: abcdef1234567890abcdef1234567890abcdef123 +reason: "Too long SHA" +""" + +BAD_SHA_CHARS_YAML = """\ +commit: xbcdef1234567890abcdef1234567890abcdef12 +reason: "Invalid hex char x" +""" + +EMPTY_REASON_YAML = """\ +commit: abcdef1234567890abcdef1234567890abcdef12 +reason: "" +""" + +NOT_A_MAPPING_YAML = """\ +- item1 +- item2 +""" + + +# ── validate-filename ──────────────────────────────────────────────────────── + +class TestValidateFilename: + def test_passes_full_release(self, capsys): + with pytest.raises(SystemExit) as exc: + cmd_validate_filename(_ns(basename="v1.2.3")) + assert exc.value.code == 0 + out = capsys.readouterr().out + assert "✅" in out + + def test_passes_rc_release(self, capsys): + with pytest.raises(SystemExit) as exc: + cmd_validate_filename(_ns(basename="v1.2.3-rc1")) + assert exc.value.code == 0 + out = capsys.readouterr().out + assert "✅" in out + + def test_passes_v0_0_1(self, capsys): + with pytest.raises(SystemExit) as exc: + cmd_validate_filename(_ns(basename="v0.0.1")) + assert exc.value.code == 0 + + def test_passes_v10_20_30(self, capsys): + with pytest.raises(SystemExit) as exc: + cmd_validate_filename(_ns(basename="v10.20.30")) + assert exc.value.code == 0 + + def test_fails_no_v_prefix(self, capsys): + with pytest.raises(SystemExit) as exc: + cmd_validate_filename(_ns(basename="1.2.3")) + assert exc.value.code == 1 + + def test_fails_leading_zero_major(self, capsys): + with pytest.raises(SystemExit) as exc: + cmd_validate_filename(_ns(basename="v01.2.3")) + assert exc.value.code == 1 + + def test_fails_leading_zero_minor(self, capsys): + with pytest.raises(SystemExit) as exc: + cmd_validate_filename(_ns(basename="v1.02.3")) + assert exc.value.code == 1 + + def test_fails_leading_zero_patch(self, capsys): + with pytest.raises(SystemExit) as exc: + cmd_validate_filename(_ns(basename="v1.2.03")) + assert exc.value.code == 1 + + def test_fails_rc0(self, capsys): + with pytest.raises(SystemExit) as exc: + cmd_validate_filename(_ns(basename="v1.2.3-rc0")) + assert exc.value.code == 1 + + def test_fails_missing_patch(self, capsys): + with pytest.raises(SystemExit) as exc: + cmd_validate_filename(_ns(basename="v1.2")) + assert exc.value.code == 1 + + def test_fails_yaml_extension(self, capsys): + with pytest.raises(SystemExit) as exc: + cmd_validate_filename(_ns(basename="v1.2.3.yaml")) + assert exc.value.code == 1 + + def test_fails_empty(self, capsys): + with pytest.raises(SystemExit) as exc: + cmd_validate_filename(_ns(basename="")) + assert exc.value.code == 1 + + # -- regex-level tests (belt and suspenders) -- + + @pytest.mark.parametrize("good", [ + "v0.0.0", + "v1.0.0", + "v10.20.30", + "v0.0.0-rc1", + "v1.2.3-rc99", + "v999.999.999", + ]) + def test_regex_good(self, good): + assert SEMVER_RE.match(good), f"expected {good} to match" + + @pytest.mark.parametrize("bad", [ + "", + "1.2.3", + "v01.2.3", + "v1.02.3", + "v1.2.03", + "v1.2", + "v1.2.3.4", + "v1.2.3.yaml", + "v1.2.3-rc0", + "v1.2.3-rc", + "v1.2.3-alpha", + "v1.2.3-RC1", + ]) + def test_regex_bad(self, bad): + assert SEMVER_RE.match(bad) is None, f"expected {bad} to NOT match" + + +# ── validate-yaml ──────────────────────────────────────────────────────────── + +class TestValidateYaml: + def test_good_yaml(self, tmp_path, capsys): + path = _write_yaml(tmp_path, "v1.2.3.yml", GOOD_YAML) + with pytest.raises(SystemExit) as exc: + cmd_validate_yaml(_ns(path=path)) + assert exc.value.code == 0 + out = capsys.readouterr().out + assert "commit=" in out + assert "tag=" in out + assert "✅" in out + + def test_missing_fields(self, tmp_path, capsys): + path = _write_yaml(tmp_path, "v1.2.3.yml", BAD_SCHEMA_YAML) + with pytest.raises(SystemExit) as exc: + cmd_validate_yaml(_ns(path=path)) + assert exc.value.code == 1 + out = capsys.readouterr().out + assert "❌" in out + assert "reason" in out + + def test_bad_sha_length(self, tmp_path, capsys): + path = _write_yaml(tmp_path, "v1.2.3.yml", BAD_SHA_LENGTH_YAML) + with pytest.raises(SystemExit) as exc: + cmd_validate_yaml(_ns(path=path)) + assert exc.value.code == 1 + + def test_bad_sha_chars(self, tmp_path, capsys): + path = _write_yaml(tmp_path, "v1.2.3.yml", BAD_SHA_CHARS_YAML) + with pytest.raises(SystemExit) as exc: + cmd_validate_yaml(_ns(path=path)) + assert exc.value.code == 1 + + def test_empty_reason(self, tmp_path, capsys): + path = _write_yaml(tmp_path, "v1.2.3.yml", EMPTY_REASON_YAML) + with pytest.raises(SystemExit) as exc: + cmd_validate_yaml(_ns(path=path)) + assert exc.value.code == 1 + + def test_non_mapping_root(self, tmp_path, capsys): + path = _write_yaml(tmp_path, "v1.2.3.yml", NOT_A_MAPPING_YAML) + with pytest.raises(SystemExit) as exc: + cmd_validate_yaml(_ns(path=path)) + assert exc.value.code == 1 + + def test_file_not_found(self, capsys): + with pytest.raises(SystemExit) as exc: + cmd_validate_yaml(_ns(path="/nonexistent.yml")) + assert exc.value.code == 1 + + +# ── is-latest ──────────────────────────────────────────────────────────────── + +class TestIsLatest: + def test_highest_tag_returns_true(self, capsys): + with patch("release._run") as mock_run: + mock_run.return_value = "v1.0.0\nv1.1.0\nv2.0.0\n" + with pytest.raises(SystemExit) as exc: + cmd_is_latest(_ns(tag="v2.0.0")) + assert exc.value.code == 0 + out = capsys.readouterr().out.strip() + assert out == "true" + + def test_lower_tag_returns_false(self, capsys): + with patch("release._run") as mock_run: + mock_run.return_value = "v1.0.0\nv1.1.0\nv2.0.0\n" + with pytest.raises(SystemExit) as exc: + cmd_is_latest(_ns(tag="v1.1.0")) + assert exc.value.code == 0 + out = capsys.readouterr().out.strip() + assert out == "false" + + def test_rc_tags_excluded(self, capsys): + with patch("release._run") as mock_run: + mock_run.return_value = "v1.0.0\nv1.1.0-rc1\nv1.1.0-rc2\nv2.0.0-rc1\n" + with pytest.raises(SystemExit) as exc: + cmd_is_latest(_ns(tag="v1.0.0")) + assert exc.value.code == 0 + out = capsys.readouterr().out.strip() + assert out == "true" # v1.0.0 is highest non-RC + + def test_empty_tag_list_returns_true(self, capsys): + with patch("release._run") as mock_run: + mock_run.return_value = "" + with pytest.raises(SystemExit) as exc: + cmd_is_latest(_ns(tag="v1.0.0")) + assert exc.value.code == 0 + out = capsys.readouterr().out.strip() + assert out == "true" + + def test_only_rc_tags_returns_true(self, capsys): + with patch("release._run") as mock_run: + mock_run.return_value = "v1.0.0-rc1\nv2.0.0-rc1\n" + with pytest.raises(SystemExit) as exc: + cmd_is_latest(_ns(tag="v3.0.0")) + assert exc.value.code == 0 + out = capsys.readouterr().out.strip() + assert out == "true" + + def test_rc_tags_excluded_highest_non_rc_wins(self, capsys): + """v1.0.0 is the only non-RC, so it's the highest, not v2.0.0-rc1.""" + with patch("release._run") as mock_run: + mock_run.return_value = "v1.0.0\nv2.0.0-rc1\n" + with pytest.raises(SystemExit) as exc: + cmd_is_latest(_ns(tag="v2.0.0")) + assert exc.value.code == 0 + out = capsys.readouterr().out.strip() + assert out == "false" # v2.0.0 doesn't exist yet + # Now test the highest non-RC is v1.0.0 + with patch("release._run") as mock_run: + mock_run.return_value = "v1.0.0\nv2.0.0-rc1\n" + with pytest.raises(SystemExit) as exc: + cmd_is_latest(_ns(tag="v1.0.0")) + assert exc.value.code == 0 + out = capsys.readouterr().out.strip() + assert out == "true" # v1.0.0 IS the highest non-RC + + def test_single_tag_returns_true(self, capsys): + with patch("release._run") as mock_run: + mock_run.return_value = "v1.0.0\n" + with pytest.raises(SystemExit) as exc: + cmd_is_latest(_ns(tag="v1.0.0")) + assert exc.value.code == 0 + out = capsys.readouterr().out.strip() + assert out == "true" + + +# ── find-added-releases (tmp git repo) ─────────────────────────────────────── + +class TestFindAddedReleases: + def test_finds_added_file(self, tmp_path): + os.chdir(str(tmp_path)) + _init_git_repo(tmp_path) + _git_commit(tmp_path, "initial", files={"README.md": "hello"}) + base = _git_rev(tmp_path, "HEAD") + _git_commit(tmp_path, "add release", files={ + ".releases/v1.2.3.yml": "commit: a\nreason: test\n" + }) + head = _git_rev(tmp_path, "HEAD") + with pytest.raises(SystemExit) as exc: + cmd_find_added(_ns(base=base, head=head)) + assert exc.value.code == 0 + + def test_no_added_files(self, tmp_path, capsys): + os.chdir(str(tmp_path)) + _init_git_repo(tmp_path) + _git_commit(tmp_path, "initial", files={"README.md": "hello"}) + base = _git_rev(tmp_path, "HEAD") + _git_commit(tmp_path, "add another file", files={"other.txt": "stuff"}) + head = _git_rev(tmp_path, "HEAD") + with pytest.raises(SystemExit) as exc: + cmd_find_added(_ns(base=base, head=head)) + assert exc.value.code == 0 + + +# ── check-modifications (tmp git repo) ─────────────────────────────────────── + +class TestCheckModifications: + def test_no_modifications_passes(self, tmp_path): + os.chdir(str(tmp_path)) + _init_git_repo(tmp_path) + _git_commit(tmp_path, "initial", files={"README.md": "hello"}) + base = _git_rev(tmp_path, "HEAD") + _git_commit(tmp_path, "add unrelated", files={"other.txt": "stuff"}) + head = _git_rev(tmp_path, "HEAD") + with pytest.raises(SystemExit) as exc: + cmd_check_modifications(_ns(base=base, head=head)) + assert exc.value.code == 0 + + def test_modification_fails(self, tmp_path): + os.chdir(str(tmp_path)) + _init_git_repo(tmp_path) + _git_commit(tmp_path, "initial", files={ + ".releases/v1.0.0.yml": "commit: a\nreason: test\n" + }) + base = _git_rev(tmp_path, "HEAD") + _git_commit(tmp_path, "modify release", files={ + ".releases/v1.0.0.yml": "commit: b\nreason: modified\n" + }) + head = _git_rev(tmp_path, "HEAD") + with pytest.raises(SystemExit) as exc: + cmd_check_modifications(_ns(base=base, head=head)) + assert exc.value.code == 1 + + def test_deletion_fails(self, tmp_path): + os.chdir(str(tmp_path)) + _init_git_repo(tmp_path) + _git_commit(tmp_path, "initial", files={ + ".releases/v1.0.0.yml": "commit: a\nreason: test\n" + }) + base = _git_rev(tmp_path, "HEAD") + (tmp_path / ".releases" / "v1.0.0.yml").unlink() + subprocess.run(["git", "rm", ".releases/v1.0.0.yml"], cwd=str(tmp_path), capture_output=True) + _git_commit(tmp_path, "delete release", files={}) + head = _git_rev(tmp_path, "HEAD") + with pytest.raises(SystemExit) as exc: + cmd_check_modifications(_ns(base=base, head=head)) + assert exc.value.code == 1 + + +# ── check-commit-exists, check-tag-free ────────────────────────────────────── + +class TestCheckCommitExists: + def test_commit_exists(self, capsys): + with patch("release.gh_api") as mock_gh: + mock_gh.return_value = {"sha": "abc123"} + with pytest.raises(SystemExit) as exc: + cmd_check_commit_exists(_ns(sha="abc123")) + assert exc.value.code == 0 + + def test_commit_missing(self, capsys): + with patch("release.gh_api") as mock_gh: + mock_gh.side_effect = GhApiError("not found") + with pytest.raises(SystemExit) as exc: + cmd_check_commit_exists(_ns(sha="abc123")) + assert exc.value.code == 1 + + +class TestCheckTagFree: + def test_tag_free(self, capsys): + with patch("release.gh_api") as mock_gh: + mock_gh.side_effect = GhApiError("not found") + with pytest.raises(SystemExit) as exc: + cmd_check_tag_free(_ns(tag="v1.2.3")) + assert exc.value.code == 0 + + def test_tag_exists(self, capsys): + with patch("release.gh_api") as mock_gh: + mock_gh.return_value = {"ref": "refs/tags/v1.2.3"} + with pytest.raises(SystemExit) as exc: + cmd_check_tag_free(_ns(tag="v1.2.3")) + assert exc.value.code == 1 + + +# ── create-tag ─────────────────────────────────────────────────────────────── + +class TestCreateTag: + def test_creates_tag_successfully(self, capsys): + with patch("release.gh_api") as mock_gh: + mock_gh.side_effect = [ + {"sha": "tag_obj_sha_123"}, # POST /git/tags + {}, # POST /git/refs + ] + with pytest.raises(SystemExit) as exc: + cmd_create_tag(_ns(tag="v1.2.3", commit="abc123")) + assert exc.value.code == 0 + assert "✅" in capsys.readouterr().out + + def test_fails_when_tag_object_has_no_sha(self, capsys): + with patch("release.gh_api") as mock_gh: + mock_gh.return_value = {} # no sha field + with pytest.raises(SystemExit) as exc: + cmd_create_tag(_ns(tag="v1.2.3", commit="abc123")) + assert exc.value.code == 1 + + +# ── _semver_key ────────────────────────────────────────────────────────────── + +class TestSemverKey: + def test_normal(self): + assert _semver_key("v1.2.3") == (1, 2, 3, float("inf")) + assert _semver_key("v10.20.30") == (10, 20, 30, float("inf")) + + def test_rc(self): + key = _semver_key("v1.2.3-rc4") + assert key == (1, 2, 3, 4) + + def test_rc_higher_than_normal(self): + """RC versions sort before the full release of the same semver.""" + rc = _semver_key("v1.2.3-rc4") + full = _semver_key("v1.2.3") + assert rc < full # rc4's 4 < inf + + def test_sort_order(self): + tags = ["v2.0.0", "v1.10.0", "v1.2.3-rc4", "v1.2.3", "v1.2.3-rc1"] + sorted_tags = sorted(tags, key=_semver_key) + assert sorted_tags == [ + "v1.2.3-rc1", + "v1.2.3-rc4", + "v1.2.3", + "v1.10.0", + "v2.0.0", + ] + + def test_rejects_non_strict(self): + """_semver_key fails loudly on tags that don't match SEMVER_RE.""" + for bad in ["v0.7.0-rc.1", "v0.9.2-rc-dev", "v2.0.0-rc2-1", + "v01.02.03", "1.2.3", "garbage"]: + with pytest.raises(ValueError): + _semver_key(bad) + + +# ── helpers ────────────────────────────────────────────────────────────────── + +def _ns(**kwargs): + """Build a simple argparse.Namespace stand-in.""" + from types import SimpleNamespace + return SimpleNamespace(**kwargs) + + +# git helpers for tmp-dir based tests + +def _init_git_repo(path: Path) -> None: + subprocess.run(["git", "init"], cwd=str(path), capture_output=True) + subprocess.run( + ["git", "config", "user.email", "test@test.com"], + cwd=str(path), capture_output=True, + ) + subprocess.run( + ["git", "config", "user.name", "Test"], + cwd=str(path), capture_output=True, + ) + + +def _git_commit(path: Path, msg: str, files: dict[str, str]) -> None: + for relpath, content in files.items(): + full = path / relpath + full.parent.mkdir(parents=True, exist_ok=True) + full.write_text(content) + subprocess.run(["git", "add", relpath], cwd=str(path), capture_output=True) + subprocess.run(["git", "commit", "-m", msg], cwd=str(path), capture_output=True) + + +def _git_rev(path: Path, ref: str) -> str: + r = subprocess.run( + ["git", "rev-parse", ref], + cwd=str(path), capture_output=True, text=True, + ) + return r.stdout.strip() + + diff --git a/.github/workflows/security-audit.yml b/.github/workflows/security-audit.yml index 2a40d1ac..60bc3ea9 100644 --- a/.github/workflows/security-audit.yml +++ b/.github/workflows/security-audit.yml @@ -18,7 +18,7 @@ jobs: security_audit: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - uses: actions-rs/audit-check@v1.2.0 with: token: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/test-docs.yml b/.github/workflows/test-docs.yml index c4d18bdf..6cb3e4d9 100644 --- a/.github/workflows/test-docs.yml +++ b/.github/workflows/test-docs.yml @@ -12,7 +12,7 @@ jobs: name: Test deployment runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: fetch-depth: 0 diff --git a/.github/workflows/validate-release-request.yml b/.github/workflows/validate-release-request.yml new file mode 100644 index 00000000..f0e8c0fb --- /dev/null +++ b/.github/workflows/validate-release-request.yml @@ -0,0 +1,33 @@ +name: Validate Release Request +on: + pull_request: + paths: ['.releases/**'] + +permissions: + contents: read + pull-requests: read + +jobs: + validate: + name: validate-release-request + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + + - uses: actions/setup-python@v6 + with: + python-version: '3.x' + + - name: Install Python deps + run: pip install pyyaml + + - name: Validate release request + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + REPO: ${{ github.repository }} + BASE_SHA: ${{ github.event.pull_request.base.sha }} + HEAD_SHA: ${{ github.event.pull_request.head.sha }} + run: python .github/workflows/release/release.py validate-pr diff --git a/.gitignore b/.gitignore index e48792b4..000e6573 100644 --- a/.gitignore +++ b/.gitignore @@ -16,3 +16,8 @@ targets.json .idea/ logs .vscode/ + +# Python (release scripts under .github/workflows/release/) +__pycache__/ +*.pyc +.pytest_cache/ diff --git a/.releases/.gitkeep b/.releases/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/.releases/README.md b/.releases/README.md new file mode 100644 index 00000000..b075bce9 --- /dev/null +++ b/.releases/README.md @@ -0,0 +1,94 @@ +# Release Requests + +This directory contains immutable release-request files. Merging one on main requests a release. + +## Filing a release request + +1. Pick the commit SHA to release. +2. Create `.releases/.yml` where the file name is the exact tag to create. +3. Add: + +```yaml +commit: <40-character SHA> +reason: "" +``` + +4. In the same PR: + - update `CHANGELOG.md` + - bump the root `Cargo.toml` workspace version to `-dev` +5. Get approvals and merge the PR to main. + +## Rules + +- Full release: `v1.2.3.yml` +- Pre-release: `v1.2.3-rc1.yml`, `v1.2.3-rc2.yml`, etc. +- Must start with `v` +- Must be strict semver, optionally with `-rcN` where N >= 1 +- Must use `.yml` + +## Constraints enforced by CI + +- Exactly one release-request file may be added per PR +- Existing release-request files cannot be modified or deleted +- The referenced commit must exist in the repository +- The tag must not already exist +- The referenced commit may be on main or on an off-main hotfix branch + +## Hotfix releases + +A release commit does not need to be on main. + +Typical flow: +1. Branch from the last release tag: `git checkout -b fix/ vX.Y.Z` +2. Land fixes on that branch +3. Open a PR on main adding `.releases/vA.B.C.yml` that points at the hotfix branch tip commit +4. Merge the release-request PR on main +5. After the release ships, reconcile the hotfix branch back into main separately + +## After merge + +1. `release-gate.yml` re-validates the request and creates the signed tag via the GitHub App +2. `release.yml` resolves the commit from the tag, builds binaries, pushes Docker images, signs artifacts, and drafts the GitHub Release +3. GHCR `:latest` moves only if the new tag is the highest non-RC version + +## Operational note + +Release-request files are immutable after merge. If a release attempt is botched, use the next version number and explain the gap in the changelog if needed. + +## Downloading and verifying release assets + +Release assets are published per binary, not as a single generic `commit-boost-...` tarball. + +Examples: +- `commit-boost-cli-vX.Y.Z-linux_x86-64.tar.gz` +- `commit-boost-pbs-vX.Y.Z-linux_x86-64.tar.gz` +- `commit-boost-signer-vX.Y.Z-linux_x86-64.tar.gz` + +Each tarball has a matching Sigstore bundle: +- `...tar.gz.sigstore.json` + +Example verification flow: + +```bash +export REPO=Commit-Boost/commit-boost-client +export VERSION=vX.Y.Z +export ARCH=linux_x86-64 +export BIN=commit-boost-pbs + +curl -L \ + -o "$BIN-$VERSION-$ARCH.tar.gz" \ + "https://github.com/$REPO/releases/download/$VERSION/$BIN-$VERSION-$ARCH.tar.gz" + +curl -L \ + -o "$BIN-$VERSION-$ARCH.tar.gz.sigstore.json" \ + "https://github.com/$REPO/releases/download/$VERSION/$BIN-$VERSION-$ARCH.tar.gz.sigstore.json" + +cosign verify-blob \ + "$BIN-$VERSION-$ARCH.tar.gz" \ + --bundle "$BIN-$VERSION-$ARCH.tar.gz.sigstore.json" \ + --certificate-oidc-issuer="https://token.actions.githubusercontent.com" \ + --certificate-identity="https://github.com/Commit-Boost/commit-boost-client/.github/workflows/release.yml@refs/heads/main" +``` + +To verify assets from a fork, replace `REPO` with the fork path, for example: +- `/commit-boost-client` diff --git a/README.md b/README.md index c28c28b5..7d43a20d 100644 --- a/README.md +++ b/README.md @@ -28,6 +28,45 @@ Commit-Boost is a modular sidecar that allows Ethereum validators to opt-in to d ## Audit Commit-Boost received an audit from [Sigma Prime](https://sigmaprime.io/). Find the report [here](/audit/Sigma_Prime_Commit_Boost_Client_Security_Assessment_Report_v2_0.pdf). +## Verifying release artifacts + +All release binaries are signed using [Sigstore cosign](https://docs.sigstore.dev/cosign/overview/). You can verify that a binary was built by the official Commit-Boost CI pipeline from the tagged commit of any release. + +### Prerequisites + +Install cosign: [cosign installation guide](https://docs.sigstore.dev/cosign/system_config/installation/) + +### Verify a binary + +```bash +# Set the release version and your target architecture +# Architecture options: darwin_arm64, linux_arm64, linux_x86-64 +export REPO=Commit-Boost/commit-boost-client +export VERSION=vX.Y.Z +export ARCH=linux_x86-64 +export BIN=commit-boost-pbs + +# Download the binary tarball and its signature bundle +curl -L \ + -o "$BIN-$VERSION-$ARCH.tar.gz" \ + "https://github.com/$REPO/releases/download/$VERSION/$BIN-$VERSION-$ARCH.tar.gz" + +curl -L \ + -o "$BIN-$VERSION-$ARCH.tar.gz.sigstore.json" \ + "https://github.com/$REPO/releases/download/$VERSION/$BIN-$VERSION-$ARCH.tar.gz.sigstore.json" + +# Verify the binary was signed by the official CI pipeline +cosign verify-blob \ + "$BIN-$VERSION-$ARCH.tar.gz" \ + --bundle "$BIN-$VERSION-$ARCH.tar.gz.sigstore.json" \ + --certificate-oidc-issuer="https://token.actions.githubusercontent.com" \ + --certificate-identity="https://github.com/Commit-Boost/commit-boost-client/.github/workflows/release.yml@refs/heads/main" +``` + +A successful verification prints `Verified OK`. If the binary was modified after being built by CI, verification will fail. + +The `.sigstore.json` bundle for each binary is attached to the release alongside the tarball itself. + ## Acknowledgements - [MEV boost](https://github.com/flashbots/mev-boost) - [Reth](https://github.com/paradigmxyz/reth)