diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index 8ed36371b..8c62d66f3 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -139,15 +139,21 @@ jobs: # On Windows, Git Bash's /usr/bin/link.exe shadows MSVC's link.exe. # Create .cargo/config.toml with the explicit MSVC linker path so cargo # uses the correct linker inside cibuildwheel (which runs on the host). + # Must pick the NEWEST installed MSVC toolset to match VsDevCmd.bat's + # LIB/INCLUDE env (see scripts/ci/setup-msvc.ps1 for the same rationale); + # picking the lexically-first link.exe selects the OLDEST toolset and + # produces LNK1181 against the newer SDK's LIB paths. - name: Configure MSVC linker for Cargo (Windows) if: runner.os == 'Windows' shell: pwsh run: | $vsWhere = "${env:ProgramFiles(x86)}\Microsoft Visual Studio\Installer\vswhere.exe" $vsPath = & $vsWhere -latest -property installationPath - $linkPath = Get-ChildItem -Path "$vsPath\VC\Tools\MSVC" -Recurse -Filter "link.exe" | - Where-Object { $_.FullName -like "*\bin\Hostx64\x64\*" } | - Select-Object -First 1 -ExpandProperty FullName + $latestMsvcDir = Get-ChildItem -Path "$vsPath\VC\Tools\MSVC" -Directory | + Sort-Object { try { [version]$_.Name } catch { [version]"0.0" } } -Descending | + Select-Object -First 1 -ExpandProperty FullName + $linkPath = if ($latestMsvcDir) { Join-Path $latestMsvcDir "bin\Hostx64\x64\link.exe" } else { $null } + if ($linkPath -and -not (Test-Path $linkPath)) { $linkPath = $null } if ($linkPath) { $escapedPath = $linkPath.Replace('\', '/') @@ -195,7 +201,7 @@ jobs: LLVM_SYS_140_PREFIX=$HOME/.pecos/deps/llvm-14 CMAKE=$HOME/.pecos/deps/cmake-${{ env.PECOS_CMAKE_VERSION }}/bin/cmake CUDA_PATH=/usr/local/cuda-12.6 - MATURIN_PEP517_ARGS=--features=mwpf + MATURIN_PEP517_ARGS=--features=extension-module,mwpf CIBW_BEFORE_ALL_LINUX: | curl -sSf https://sh.rustup.rs | sh -s -- -y source $HOME/.cargo/env @@ -228,7 +234,7 @@ jobs: CMAKE=$HOME/.pecos/deps/cmake-${{ env.PECOS_CMAKE_VERSION }}/CMake.app/Contents/bin/cmake MACOSX_DEPLOYMENT_TARGET=13.2 SDKROOT=$(xcrun --show-sdk-path) - MATURIN_PEP517_ARGS=--features=mwpf + MATURIN_PEP517_ARGS=--features=extension-module,mwpf CIBW_BEFORE_ALL_MACOS: | curl -sSf https://sh.rustup.rs | sh -s -- -y source $HOME/.cargo/env @@ -248,7 +254,7 @@ jobs: PATH="C:\\Users\\runneradmin\\.pecos\\deps\\llvm-14\\bin;C:\\Users\\runneradmin\\.pecos\\deps\\cmake-${{ env.PECOS_CMAKE_VERSION }}\\bin;$PATH" LLVM_SYS_140_PREFIX="C:\\Users\\runneradmin\\.pecos\\deps\\llvm-14" CMAKE="C:\\Users\\runneradmin\\.pecos\\deps\\cmake-${{ env.PECOS_CMAKE_VERSION }}\\bin\\cmake.exe" - MATURIN_PEP517_ARGS=--features=mwpf + MATURIN_PEP517_ARGS=--features=extension-module,mwpf CIBW_BEFORE_ALL_WINDOWS: > echo "=== Installing LLVM using pecos ===" && rustup update && diff --git a/.github/workflows/python-test.yml b/.github/workflows/python-test.yml index 8829d1b6a..1554a9a0b 100644 --- a/.github/workflows/python-test.yml +++ b/.github/workflows/python-test.yml @@ -78,7 +78,11 @@ jobs: - name: Set up Visual Studio environment on Windows if: runner.os == 'Windows' shell: pwsh - run: ./scripts/ci/setup-msvc.ps1 -Arch x64 -HostArch x64 + # -NoPinLinker: don't pin the linker (so rustc auto-detects MSVC and sets + # up LIB/INCLUDE itself) and instead export PECOS_MSVC_HOST_BIN, which the + # just recipes prepend to PATH ahead of git's /usr/bin so rustc's + # PATH-based linker lookup finds MSVC's link.exe, not GNU coreutils' link. + run: ./scripts/ci/setup-msvc.ps1 -Arch x64 -HostArch x64 -NoPinLinker - name: Install the latest version of uv uses: astral-sh/setup-uv@v7 @@ -127,56 +131,51 @@ jobs: path: ~/.pecos/deps/llvm-14 key: llvm-${{ env.LLVM_VERSION }}-${{ runner.os }}-${{ runner.arch }}-v2 - # Configure MSVC linker BEFORE any cargo build (Git's link.exe conflicts with MSVC's) - - name: Configure MSVC linker (Windows) + # No linker pin, no LIB/INCLUDE in .cargo/config.toml. setup-msvc.ps1 + # (-NoPinLinker) exported PECOS_MSVC_HOST_BIN; the just recipes prepend it + # to PATH so rustc's own vswhere MSVC detection finds the real link.exe + # (not git's /usr/bin/link) AND configures LIB/INCLUDE itself -- exactly + # like a correctly-configured local Windows build. Only the .cargo dir + # needs to exist for the LLVM_SYS_140_PREFIX written next. + - name: Prepare .cargo dir (Windows) if: runner.os == 'Windows' shell: pwsh - run: | - $vsWhere = "${env:ProgramFiles(x86)}\Microsoft Visual Studio\Installer\vswhere.exe" - $vsPath = & $vsWhere -latest -property installationPath - $linkPath = Get-ChildItem -Path "$vsPath\VC\Tools\MSVC" -Recurse -Filter "link.exe" | - Where-Object { $_.FullName -like "*\bin\Hostx64\x64\*" } | - Select-Object -First 1 -ExpandProperty FullName - if ($linkPath) { - New-Item -ItemType Directory -Force -Path .cargo | Out-Null - $escapedPath = $linkPath.Replace('\', '/') - "[target.x86_64-pc-windows-msvc]`nlinker = `"$escapedPath`"" | Out-File -FilePath ".cargo\config.toml" -Encoding UTF8 - } else { - Write-Error "Could not find MSVC link.exe" - exit 1 - } + run: New-Item -ItemType Directory -Force -Path .cargo | Out-Null - name: Ensure LLVM ${{ env.LLVM_VERSION }} run: just ci-env + # config.toml [env] (no `linker=` -- the PATH-prepend in the just recipes + # makes rustc find the correct MSVC link.exe, so the linker pin is gone). + # LIB/INCLUDE are still required: rustc finds the *linker* but does NOT + # populate its LIB env, and git-bash mangles the ambient LIB at the + # bash->native-cargo spawn boundary (proven: LNK1181 on kernel32.lib in + # build-selene even with the correct linker). cargo reads [env] from this + # file after it starts, bypassing the mangled shell env entirely -- this is + # the one irreducible remedy for that MSYS spawn-mangling given `just` + # mandates git-bash. LLVM_SYS_140_PREFIX is a single mangling-safe path + # inkwell needs. TOML literal (single-quoted) strings for the Windows + # paths; guarded so an empty value can't clobber via force = true. - name: Configure LLVM environment (Windows) if: runner.os == 'Windows' shell: pwsh run: | - # Rewrite .cargo/config.toml with both linker and LLVM config - # (`pecos env --github-actions` sets LLVM_SYS_140_PREFIX for following steps.) - $env:PECOS_LLVM = $env:LLVM_SYS_140_PREFIX - - # Rewrite .cargo/config.toml with both linker and LLVM config - # (pecos install llvm may have already written this, so overwrite cleanly) - $vsWhere = "${env:ProgramFiles(x86)}\Microsoft Visual Studio\Installer\vswhere.exe" - $vsPath = & $vsWhere -latest -property installationPath - $linkPath = Get-ChildItem -Path "$vsPath\VC\Tools\MSVC" -Recurse -Filter "link.exe" | - Where-Object { $_.FullName -like "*\bin\Hostx64\x64\*" } | - Select-Object -First 1 -ExpandProperty FullName - $escapedLinker = $linkPath.Replace('\', '/') $escapedLLVM = $env:LLVM_SYS_140_PREFIX.Replace('\', '/') + $libLine = if ($env:LIB) { "LIB = { value = '$($env:LIB)', force = true }`n" } else { "" } + $incLine = if ($env:INCLUDE) { "INCLUDE = { value = '$($env:INCLUDE)', force = true }`n" } else { "" } @" - [target.x86_64-pc-windows-msvc] - linker = "$escapedLinker" - [env] LLVM_SYS_140_PREFIX = { value = "$escapedLLVM", force = true } + $libLine$incLine "@ | Out-File -FilePath ".cargo\config.toml" -Encoding UTF8 - # Now install CLI (LLVM env is set, inkwell can find it) + # cargo install runs cargo directly (not via just), so prepend the MSVC + # bin here too so its build-script links find the right link.exe. + # No-op on non-Windows where PECOS_MSVC_HOST_BIN is unset. - name: Install PECOS CLI - run: cargo install --path crates/pecos-cli --force + run: | + if [ -n "${PECOS_MSVC_HOST_BIN:-}" ]; then export PATH="$(cygpath -u "$PECOS_MSVC_HOST_BIN"):$PATH"; fi + cargo install --path crates/pecos-cli --force # macOS: prevent Homebrew library path issues - name: Configure macOS environment @@ -199,6 +198,26 @@ jobs: export LIBRARY_PATH=/usr/lib just build-debug + # Two-level diagnostic to localize where the MSVC LIB is lost (the + # step-level bash sees a correct LIB, but cold build-script links inside + # `just` were failing on kernel32.lib). (1) step-level bash, (2) through the + # exact `just -> #!/usr/bin/env bash` chain build-selene uses. Comparing the + # two pinpoints whether `just`/the shebang layer drops or mangles LIB. + # Non-fatal on the through-just side so we still capture the build failure + # context; the step-level side fails fast on a clear regression. + - name: Diagnose MSVC LIB across the just chain (Windows) + if: runner.os == 'Windows' + run: | + echo "=== step-level bash ===" + echo "MSYS2_ENV_CONV_EXCL=${MSYS2_ENV_CONV_EXCL:-}" + echo "LIB=${LIB:-}" + case "${LIB:-}" in + *"Windows Kits"*"um"*) echo "OK: Windows SDK um lib dir present in step-level LIB" ;; + *) echo "ERROR: step-level LIB lacks a Windows SDK 'um' lib directory" >&2; exit 1 ;; + esac + echo "=== through just -> shebang-bash (build-selene's exact chain) ===" + just _win-msvc-env-debug || true + - name: Build PECOS if: runner.os != 'macOS' run: just build-debug diff --git a/Cargo.toml b/Cargo.toml index a93ee89e2..83c364b80 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -48,7 +48,12 @@ clap_complete = "4.5" cargo_metadata = "0.23" # --- Python bindings --- -pyo3 = { version = "0.28", features = ["extension-module"] } +# Note: `extension-module`/`abi3-py310`/`generate-import-lib` are intentionally +# NOT enabled at the workspace level. They tell pyo3 to skip linking libpython +# at build time, which is what we want when maturin builds the cdylib but is +# fatal for plain `cargo test`. Each pecos-rslib* crate gates them behind its +# own `extension-module` feature; maturin opts in via pyproject.toml. +pyo3 = "0.28" # --- C/C++ FFI & build --- bindgen = "0.72" @@ -238,9 +243,13 @@ inherits = "release" strip = false debug = 1 # line tables only — minimal size impact -# Native profile: release + CPU-specific optimizations -# Use with: cargo build --profile native -# Build scripts detect this via PROFILE=native env var and add --march=native for C++ code +# Native profile: release + CPU-specific optimizations. +# Use with: cargo build --profile native (or just native). +# Build scripts detect this via PROFILE=native env var and add --march=native for C++ code. +# Rust code gets -C target-cpu=native via RUSTFLAGS supplied by the caller (Justfile +# recipes, pecos python build --profile native, etc.). Setting it here as +# `profile.native.rustflags` would be cleaner but that field is still unstable in +# stable cargo as of 1.93. [profile.native] inherits = "release" diff --git a/Justfile b/Justfile index 8baa733da..4f75e97aa 100644 --- a/Justfile +++ b/Justfile @@ -55,6 +55,11 @@ setup-ci: # Ensure CI has a runtime-valid LLVM and export PECOS build env files [group('setup')] ci-env: + #!/usr/bin/env bash + set -euo pipefail + # See `build` for why; no-op when PECOS_MSVC_HOST_BIN is unset. Shebang + # body so the export persists across both pecos invocations. + if [ -n "${PECOS_MSVC_HOST_BIN:-}" ]; then export PATH="$(cygpath -u "$PECOS_MSVC_HOST_BIN"):$PATH"; fi {{pecos}} llvm ensure --managed --no-configure {{pecos}} env --github-actions @@ -68,15 +73,11 @@ doctor: fail() { echo " [!!] $1: $2"; PROBLEMS=$((PROBLEMS + 1)); } echo "LLVM 14:" - LLVM_DIR="" - for d in "$HOME/.pecos/deps/llvm-14" "$HOME/.pecos/deps/llvm"; do - [ -d "$d/bin" ] && LLVM_DIR="$d" && break - done - if [ -n "$LLVM_DIR" ]; then - VERSION=$("$LLVM_DIR/bin/llvm-config" --version 2>/dev/null || echo "unknown") + if LLVM_DIR=$({{pecos}} llvm find 2>/dev/null); then + VERSION=$("$LLVM_DIR/bin/llvm-config" --version 2>/dev/null || {{pecos}} llvm version 2>/dev/null | head -1 || echo "unknown") ok "installed" "$VERSION at $LLVM_DIR" else - fail "installed" "not found (run: pecos setup)" + fail "installed" "not found (run: just setup)" fi if [ -f .cargo/config.toml ] && grep -q "LLVM_SYS_140_PREFIX" .cargo/config.toml 2>/dev/null; then ok ".cargo/config.toml" "LLVM_SYS_140_PREFIX configured" @@ -155,24 +156,43 @@ list-deps: # Building # ============================================================================= -# Build PECOS (profile: debug, release, native) +# Build PECOS (profile: dev/debug, release, native) [group('build')] -build profile="debug": setup-quiet sync-deps build-selene +build profile="debug": (validate-profile "build" profile) setup-quiet sync-deps (build-selene profile) #!/usr/bin/env bash set -euo pipefail - {{pecos}} python build --profile {{profile}} - command -v julia >/dev/null 2>&1 && just julia-build {{profile}} || true - command -v go >/dev/null 2>&1 && just go-build {{profile}} || true + # Put MSVC's link.exe ahead of git's /usr/bin/link so rustc (unpinned) + # finds the right linker AND configures LIB/INCLUDE itself. No-op when + # PECOS_MSVC_HOST_BIN is unset (non-Windows / local / pinned workflows). + if [ -n "${PECOS_MSVC_HOST_BIN:-}" ]; then export PATH="$(cygpath -u "$PECOS_MSVC_HOST_BIN"):$PATH"; fi + PROFILE="{{profile}}" + {{pecos}} python build --profile "$PROFILE" + if command -v julia >/dev/null 2>&1; then + just julia-build "$PROFILE" + fi + if command -v go >/dev/null 2>&1; then + just go-build "$PROFILE" + fi -# Build PECOS without dependency setup or sync (profile: debug, release, native) +# Build PECOS without dependency setup or sync (profile: dev/debug, release, native) [group('build')] -build-lite profile="debug": build-selene - {{pecos}} python build --profile {{profile}} +build-lite profile="debug": (validate-profile "build-lite" profile) (build-selene profile) + #!/usr/bin/env bash + set -euo pipefail + # See `build` for why; no-op when PECOS_MSVC_HOST_BIN is unset. + if [ -n "${PECOS_MSVC_HOST_BIN:-}" ]; then export PATH="$(cygpath -u "$PECOS_MSVC_HOST_BIN"):$PATH"; fi + PROFILE="{{profile}}" + {{pecos}} python build --profile "$PROFILE" -# Build PECOS with CUDA Python extras (profile: debug, release, native) +# Build PECOS with CUDA Python extras (profile: dev/debug, release, native) [group('build')] -build-cuda profile="debug": setup-quiet - {{pecos}} python build --profile {{profile}} --cuda +build-cuda profile="debug": (validate-profile "build-cuda" profile) setup-quiet + #!/usr/bin/env bash + set -euo pipefail + # See `build` for why; no-op when PECOS_MSVC_HOST_BIN is unset. + if [ -n "${PECOS_MSVC_HOST_BIN:-}" ]; then export PATH="$(cygpath -u "$PECOS_MSVC_HOST_BIN"):$PATH"; fi + PROFILE="{{profile}}" + {{pecos}} python build --profile "$PROFILE" --cuda # ============================================================================= # Testing @@ -192,31 +212,31 @@ pytest *args: uv run pytest python/selene-plugins fi -# Run Rust tests (CUDA-aware; mode: debug or release) +# Run Rust tests (CUDA-aware; mode: dev/debug, release, native) [group('test')] -rstest mode="release": +rstest mode="release": (validate-test-mode "rstest" mode) #!/usr/bin/env bash set -euo pipefail - if [ "{{mode}}" = "release" ]; then - {{pecos}} rust test --release - else - {{pecos}} rust test - fi + # See `build` for why; no-op when PECOS_MSVC_HOST_BIN is unset. + if [ -n "${PECOS_MSVC_HOST_BIN:-}" ]; then export PATH="$(cygpath -u "$PECOS_MSVC_HOST_BIN"):$PATH"; fi + MODE="{{mode}}" + {{pecos}} rust test --profile "$MODE" -# Run all tests (Rust + Python + Julia + Go if available) +# Run all tests (Rust + Python + Julia + Go if available; mode: dev/debug, release, native) [group('test')] -test mode="release": (rstest mode) pytest +test mode="release": (validate-test-mode "test" mode) (rstest mode) pytest #!/usr/bin/env bash set -euo pipefail + MODE="{{mode}}" if command -v julia >/dev/null 2>&1; then echo "Julia detected, running Julia tests..." - just julia-test + just julia-test "$MODE" else echo "Julia not detected, skipping Julia tests" fi if command -v go >/dev/null 2>&1; then echo "Go detected, running Go tests..." - just go-test + just go-test "$MODE" else echo "Go not detected, skipping Go tests" fi @@ -227,9 +247,12 @@ test mode="release": (rstest mode) pytest # Fix formatting and linting issues (or: just lint check) [group('lint')] -lint mode="fix": python-workspace-check +lint mode="fix": (validate-lint-mode mode) python-workspace-check #!/usr/bin/env bash set -euo pipefail + # See `build` for why; no-op when PECOS_MSVC_HOST_BIN is unset. + if [ -n "${PECOS_MSVC_HOST_BIN:-}" ]; then export PATH="$(cygpath -u "$PECOS_MSVC_HOST_BIN"):$PATH"; fi + MODE="{{mode}}" # Detect CUDA: only use --all-features when CUDA toolkit is available if command -v nvcc >/dev/null 2>&1 || [ -n "${CUDA_PATH:-}" ] || [ -d /usr/local/cuda ]; then CLIPPY_FEATURES="--all-features" @@ -239,7 +262,7 @@ lint mode="fix": python-workspace-check echo "(No CUDA -- linting with default features only)" fi - if [ "{{mode}}" = "check" ]; then + if [ "$MODE" = "check" ]; then echo "==> Checking Rust formatting..." cargo fmt --all -- --check echo "==> Running clippy..." @@ -302,19 +325,36 @@ fmt: # Run benchmarks (profile: release/native; features: optional; pattern: filter) [group('test')] -bench profile="release" features="" pattern="": +bench profile="release" features="" pattern="": (validate-bench-profile "bench" profile) #!/usr/bin/env bash set -euo pipefail - ARGS="bench -p benchmarks --bench benchmarks" - if [ "{{profile}}" = "native" ]; then - ARGS="$ARGS --profile=native" + PROFILE="{{profile}}" + FEATURES="{{features}}" + PATTERN="{{pattern}}" + case "$FEATURES" in + features=*) + VALUE="${FEATURES#features=}" + echo "Invalid features argument: $FEATURES" + echo "Just recipe parameters are positional. Use: just bench $PROFILE $VALUE" + exit 2 + ;; + esac + case "$PATTERN" in + pattern=*) + VALUE="${PATTERN#pattern=}" + echo "Invalid pattern argument: $PATTERN" + echo "Just recipe parameters are positional. Use: just bench $PROFILE '$FEATURES' '$VALUE'" + exit 2 + ;; + esac + ARGS=(bench -p benchmarks --bench benchmarks) + if [ "$PROFILE" = "native" ]; then + ARGS+=(--profile=native) export RUSTFLAGS="${RUSTFLAGS:-} -C target-cpu=native" - elif [ "{{profile}}" != "release" ]; then - echo "Unknown profile: {{profile}}. Use release or native."; exit 1 fi - if [ -n "{{features}}" ]; then ARGS="$ARGS --features={{features}}"; fi - if [ -n "{{pattern}}" ]; then ARGS="$ARGS -- {{pattern}}"; fi - cargo $ARGS + if [ -n "$FEATURES" ]; then ARGS+=(--features "$FEATURES"); fi + if [ -n "$PATTERN" ]; then ARGS+=(-- "$PATTERN"); fi + cargo "${ARGS[@]}" # ============================================================================= # Dev Workflows @@ -322,10 +362,11 @@ bench profile="release" features="" pattern="": # Dev cycle: build + test (lang: all, rust, python, julia, go) [group('dev')] -dev lang="all": +dev lang="all": (validate-dev-lang lang) #!/usr/bin/env bash set -euo pipefail - case "{{lang}}" in + DEV_LANG="{{lang}}" + case "$DEV_LANG" in all) just build just test debug @@ -346,7 +387,7 @@ dev lang="all": just go-test ;; *) - echo "Unknown language: {{lang}}. Use: all, rust, python, julia, go" + echo "Unknown language: $DEV_LANG. Use: all, rust, python, julia, go" exit 1 ;; esac @@ -355,10 +396,34 @@ dev lang="all": [group('dev')] check-all: clean (build "release") (test "release") (lint "check") -# Clean build artifacts (or: just clean cache/deps/all/dry-run) +# Clean build artifacts (or: just clean cache/deps/selene/all/dry-run; multiple OK, e.g. just clean selene deps) [group('clean')] clean *target: - uv run python scripts/clean.py {{ if target == "cache" { "--cache" } else if target == "deps" { "--deps" } else if target == "all" { "--all" } else if target == "dry-run" { "--dry-run" } else { "" } }} + #!/usr/bin/env bash + set -euo pipefail + TARGETS="{{target}}" + ARGS=() + if [ -n "$TARGETS" ]; then + for TARGET in $TARGETS; do + case "$TARGET" in + cache|deps|selene|all|dry-run) ARGS+=("--$TARGET") ;; + target=*) + VALUE="${TARGET#target=}" + echo "Invalid clean target argument: $TARGET" + echo "Just variadic arguments are positional. Use: just clean $VALUE" + exit 2 + ;; + *) + echo "Unknown clean target: $TARGET" + echo "Supported targets: cache, deps, selene, all, dry-run" + exit 2 + ;; + esac + done + fi + # macOS bash 3.2: ${arr[@]+"${arr[@]}"} expands to nothing when arr is empty/unset + # under `set -u` (which otherwise trips on empty @-expansion). + uv run python scripts/clean.py ${ARGS[@]+"${ARGS[@]}"} # ============================================================================= # Documentation @@ -366,7 +431,7 @@ clean *target: # Serve documentation locally (port: default 8000) [group('docs')] -docs port="8000": +docs port="8000": (validate-port port) uv run mkdocs serve -a "127.0.0.1:{{port}}" # Build documentation @@ -413,24 +478,48 @@ check-cuda: # Julia Bindings # ============================================================================= -# Build Julia FFI library (profile: debug, release, native; rustflags: optional) +# Build Julia FFI library (profile: dev/debug, release, native; rustflags: optional) [group('julia')] -julia-build profile="release" rustflags="": +julia-build profile="release" rustflags="": (validate-profile "julia-build" profile) #!/usr/bin/env bash set -euo pipefail - if [ -n "{{rustflags}}" ]; then - export RUSTFLAGS="${RUSTFLAGS:-} {{rustflags}}" + PROFILE="{{profile}}" + RUSTFLAGS_ARG="{{rustflags}}" + case "$RUSTFLAGS_ARG" in + rustflags=*) + VALUE="${RUSTFLAGS_ARG#rustflags=}" + echo "Invalid rustflags argument: $RUSTFLAGS_ARG" + echo "Just recipe parameters are positional. Use: just julia-build $PROFILE '$VALUE'" + exit 2 + ;; + esac + if [ -n "$RUSTFLAGS_ARG" ]; then + export RUSTFLAGS="${RUSTFLAGS:-} $RUSTFLAGS_ARG" + fi + # The native profile inherits release; -C target-cpu=native is injected here + # rather than via profile.native.rustflags (which is still unstable in cargo). + if [ "$PROFILE" = "native" ]; then + export RUSTFLAGS="${RUSTFLAGS:-} -C target-cpu=native" fi - case "{{profile}}" in + case "$PROFILE" in native) cargo build --profile native -p pecos-julia-ffi ;; release) cargo build --release -p pecos-julia-ffi ;; dev|debug) cargo build -p pecos-julia-ffi ;; - *) echo "Unknown profile: {{profile}}"; exit 1 ;; + *) echo "Unknown profile: $PROFILE"; exit 1 ;; esac -# Run Julia tests +# Run Julia tests (profile: dev/debug, release, native) [group('julia')] -julia-test: (julia-build "release") +julia-test profile="release": (validate-profile "julia-test" profile) (julia-build profile) + #!/usr/bin/env bash + set -euo pipefail + PROFILE="{{profile}}" + case "$PROFILE" in + native) LIB_DIR="$(pwd)/target/native" ;; + release) LIB_DIR="$(pwd)/target/release" ;; + dev|debug) LIB_DIR="$(pwd)/target/debug" ;; + esac + export PECOS_JULIA_LIB_DIR="$LIB_DIR" cd julia/PECOS.jl && julia --project=. -e 'using Pkg; Pkg.instantiate(); include("test/runtests.jl")' # Format Julia code @@ -458,27 +547,48 @@ julia-lint: (julia-build "release") # Go Bindings # ============================================================================= -# Build Go FFI library (profile: debug, release, native; rustflags: optional) +# Build Go FFI library (profile: dev/debug, release, native; rustflags: optional) [group('go')] -go-build profile="release" rustflags="": +go-build profile="release" rustflags="": (validate-profile "go-build" profile) #!/usr/bin/env bash set -euo pipefail - if [ -n "{{rustflags}}" ]; then - export RUSTFLAGS="${RUSTFLAGS:-} {{rustflags}}" + PROFILE="{{profile}}" + RUSTFLAGS_ARG="{{rustflags}}" + case "$RUSTFLAGS_ARG" in + rustflags=*) + VALUE="${RUSTFLAGS_ARG#rustflags=}" + echo "Invalid rustflags argument: $RUSTFLAGS_ARG" + echo "Just recipe parameters are positional. Use: just go-build $PROFILE '$VALUE'" + exit 2 + ;; + esac + if [ -n "$RUSTFLAGS_ARG" ]; then + export RUSTFLAGS="${RUSTFLAGS:-} $RUSTFLAGS_ARG" + fi + # See julia-build for why -C target-cpu=native is injected here. + if [ "$PROFILE" = "native" ]; then + export RUSTFLAGS="${RUSTFLAGS:-} -C target-cpu=native" fi - case "{{profile}}" in + case "$PROFILE" in native) cargo build --profile native -p pecos-go-ffi ;; release) cargo build --release -p pecos-go-ffi ;; dev|debug) cargo build -p pecos-go-ffi ;; - *) echo "Unknown profile: {{profile}}"; exit 1 ;; + *) echo "Unknown profile: $PROFILE"; exit 1 ;; esac -# Run Go tests +# Run Go tests (profile: dev/debug, release, native) [group('go')] -go-test: (go-build "release") +go-test profile="release": (validate-profile "go-test" profile) (go-build profile) #!/usr/bin/env bash set -euo pipefail - LIB_DIR="$(pwd)/target/release" + PROFILE="{{profile}}" + case "$PROFILE" in + native) LIB_DIR="$(pwd)/target/native" ;; + release) LIB_DIR="$(pwd)/target/release" ;; + dev|debug) LIB_DIR="$(pwd)/target/debug" ;; + esac + export CGO_LDFLAGS="-L$LIB_DIR ${CGO_LDFLAGS:-}" + export LIBRARY_PATH="$LIB_DIR:${LIBRARY_PATH:-}" export LD_LIBRARY_PATH="$LIB_DIR:${LD_LIBRARY_PATH:-}" export DYLD_LIBRARY_PATH="$LIB_DIR:${DYLD_LIBRARY_PATH:-}" cd go/pecos && go test -v @@ -493,12 +603,19 @@ go-fmt: go-fmt-check: @test -z "$(gofmt -l go/pecos)" || (gofmt -l go/pecos && exit 1) -# Run Go linting with go vet +# Run Go linting with go vet (profile: dev/debug, release, native) [group('go')] -go-lint: (go-build "release") +go-lint profile="release": (validate-profile "go-lint" profile) (go-build profile) #!/usr/bin/env bash set -euo pipefail - LIB_DIR="$(pwd)/target/release" + PROFILE="{{profile}}" + case "$PROFILE" in + native) LIB_DIR="$(pwd)/target/native" ;; + release) LIB_DIR="$(pwd)/target/release" ;; + dev|debug) LIB_DIR="$(pwd)/target/debug" ;; + esac + export CGO_LDFLAGS="-L$LIB_DIR ${CGO_LDFLAGS:-}" + export LIBRARY_PATH="$LIB_DIR:${LIBRARY_PATH:-}" export LD_LIBRARY_PATH="$LIB_DIR:${LD_LIBRARY_PATH:-}" export DYLD_LIBRARY_PATH="$LIB_DIR:${DYLD_LIBRARY_PATH:-}" cd go/pecos && go vet ./... @@ -530,8 +647,135 @@ pytest-slow: # Private / Internal Recipes # ============================================================================= +[private] +validate-profile recipe profile: + #!/usr/bin/env bash + set -euo pipefail + RECIPE="{{recipe}}" + PROFILE="{{profile}}" + case "$PROFILE" in + dev|debug|release|native) ;; + profile=*) + VALUE="${PROFILE#profile=}" + echo "Invalid profile argument: $PROFILE" + echo "Just recipe parameters are positional. Use: just $RECIPE $VALUE" + exit 2 + ;; + *) + echo "Unknown profile: $PROFILE" + echo "Supported profiles: dev, debug, release, native" + exit 2 + ;; + esac + +[private] +validate-test-mode recipe mode: + #!/usr/bin/env bash + set -euo pipefail + RECIPE="{{recipe}}" + MODE="{{mode}}" + case "$MODE" in + dev|debug|release|native) ;; + mode=*) + VALUE="${MODE#mode=}" + echo "Invalid mode argument: $MODE" + echo "Just recipe parameters are positional. Use: just $RECIPE $VALUE" + exit 2 + ;; + *) + echo "Unknown test mode: $MODE" + echo "Supported modes: dev, debug, release, native" + exit 2 + ;; + esac + +[private] +validate-lint-mode mode: + #!/usr/bin/env bash + set -euo pipefail + MODE="{{mode}}" + case "$MODE" in + fix|check) ;; + mode=*) + VALUE="${MODE#mode=}" + echo "Invalid mode argument: $MODE" + echo "Just recipe parameters are positional. Use: just lint $VALUE" + exit 2 + ;; + *) + echo "Unknown lint mode: $MODE" + echo "Supported modes: fix, check" + exit 2 + ;; + esac + +[private] +validate-bench-profile recipe profile: + #!/usr/bin/env bash + set -euo pipefail + RECIPE="{{recipe}}" + PROFILE="{{profile}}" + case "$PROFILE" in + release|native) ;; + profile=*) + VALUE="${PROFILE#profile=}" + echo "Invalid benchmark profile argument: $PROFILE" + echo "Just recipe parameters are positional. Use: just $RECIPE $VALUE" + exit 2 + ;; + *) + echo "Unknown benchmark profile: $PROFILE" + echo "Supported benchmark profiles: release, native" + exit 2 + ;; + esac + +[private] +validate-dev-lang lang: + #!/usr/bin/env bash + set -euo pipefail + DEV_LANG="{{lang}}" + case "$DEV_LANG" in + all|rust|python|julia|go) ;; + lang=*) + VALUE="${DEV_LANG#lang=}" + echo "Invalid language argument: $DEV_LANG" + echo "Just recipe parameters are positional. Use: just dev $VALUE" + exit 2 + ;; + *) + echo "Unknown language: $DEV_LANG" + echo "Supported languages: all, rust, python, julia, go" + exit 2 + ;; + esac + +[private] +validate-port port: + #!/usr/bin/env bash + set -euo pipefail + PORT="{{port}}" + case "$PORT" in + port=*) + VALUE="${PORT#port=}" + echo "Invalid port argument: $PORT" + echo "Just recipe parameters are positional. Use: just docs $VALUE" + exit 2 + ;; + *) ;; + esac + if ! [[ "$PORT" =~ ^[0-9]+$ ]] || [ "$PORT" -lt 1 ] || [ "$PORT" -gt 65535 ]; then + echo "Invalid docs port: $PORT" + echo "Port must be an integer from 1 to 65535" + exit 2 + fi + [private] setup-quiet: + #!/usr/bin/env bash + set -euo pipefail + # See `build` for why; no-op when PECOS_MSVC_HOST_BIN is unset. + if [ -n "${PECOS_MSVC_HOST_BIN:-}" ]; then export PATH="$(cygpath -u "$PECOS_MSVC_HOST_BIN"):$PATH"; fi {{pecos}} setup --quiet # Sync Python deps (fast if already installed, skips maturin rebuilds) @@ -539,18 +783,76 @@ setup-quiet: sync-deps: #!/usr/bin/env bash set -euo pipefail + # See `build` for why; no-op when PECOS_MSVC_HOST_BIN is unset. uv sync + # triggers maturin -> cargo to (re)build the rslib crates. + if [ -n "${PECOS_MSVC_HOST_BIN:-}" ]; then export PATH="$(cygpath -u "$PECOS_MSVC_HOST_BIN"):$PATH"; fi # Quick check: ensure the packages used by the default dev/test lane are importable. # This catches newly added workspace members that an older .venv may be missing. if uv run --frozen python -c "import importlib.util, sys; required = ('pecos', 'pecos_rslib', 'pecos_selene_stab_vec', 'pecos_selene_stabilizer', 'pecos_selene_statevec', 'pecos_selene_stab_mps', 'pecos_selene_mast'); missing = [name for name in required if importlib.util.find_spec(name) is None]; sys.exit(1 if missing else 0)" 2>/dev/null; then exit 0 fi echo "Python deps incomplete, running uv sync..." - uv sync --project . --all-packages + SYNC_ARGS=(--project . --all-packages) + # Include CUDA Python packages (cupy, cuquantum, pytket-cutensornet) when + # the toolkit is installed AND an NVIDIA GPU is present. Pure Rust users + # and machines without a GPU skip this -- mirrors `pecos python build`. + if {{pecos}} cuda check -q 2>/dev/null && nvidia-smi -L 2>/dev/null | grep -q "^GPU "; then + echo "CUDA toolkit + NVIDIA GPU detected -- including CUDA Python packages" + SYNC_ARGS+=(--group cuda) + fi + uv sync "${SYNC_ARGS[@]}" + +# Diagnostic: dump the MSVC linker environment as seen *through the exact +# just -> #!/usr/bin/env bash shebang chain that build-selene uses*. The CI +# step-level bash sees a correct LIB, but cold build-script links inside this +# chain were failing to find kernel32.lib -- this pinpoints whether LIB / the +# resolved link.exe / cargo's env survive the chain. Windows-only; harmless +# elsewhere. +[private] +[windows] +_win-msvc-env-debug: + #!/usr/bin/env bash + set -euo pipefail + echo "=== _win-msvc-env-debug (inside just shebang-bash) ===" + echo "uname: $(uname -s 2>/dev/null || echo n/a)" + echo "BASH: ${BASH:-} BASH_VERSION=${BASH_VERSION:-}" + echo "MSYS2_ENV_CONV_EXCL=${MSYS2_ENV_CONV_EXCL:-}" + echo "CARGO_TARGET_X86_64_PC_WINDOWS_MSVC_LINKER=${CARGO_TARGET_X86_64_PC_WINDOWS_MSVC_LINKER:-}" + echo "which link: $(command -v link 2>/dev/null || echo '')" + echo "LIB=${LIB:-}" + echo "INCLUDE=${INCLUDE:-}" + case "${LIB:-}" in + *';'*) echo "LIB-form: Windows (semicolon-separated) -- good for native link.exe" ;; + *':'*'/'*) echo "LIB-form: POSIX-mangled (colon/forward-slash) -- native link.exe will choke" ;; + "") echo "LIB-form: EMPTY/UNSET -- env lost in the chain" ;; + *) echo "LIB-form: single-entry or unknown" ;; + esac + echo "--- .cargo/config.toml ---" + cat .cargo/config.toml 2>/dev/null || echo "" + echo "=== end _win-msvc-env-debug ===" [private] -build-selene: +build-selene profile="release": #!/usr/bin/env bash set -euo pipefail + # See `build` for why; no-op when PECOS_MSVC_HOST_BIN is unset. + if [ -n "${PECOS_MSVC_HOST_BIN:-}" ]; then export PATH="$(cygpath -u "$PECOS_MSVC_HOST_BIN"):$PATH"; fi + PROFILE="{{profile}}" + case "$PROFILE" in + native) CARGO_PROFILE_FLAGS=(--profile native); TARGET_DIR="target/native" ;; + release) CARGO_PROFILE_FLAGS=(--release); TARGET_DIR="target/release" ;; + dev|debug) CARGO_PROFILE_FLAGS=(); TARGET_DIR="target/debug" ;; + *) echo "build-selene: unknown profile $PROFILE" >&2; exit 2 ;; + esac + # See julia-build for why -C target-cpu=native is injected here. + if [ "$PROFILE" = "native" ]; then + export RUSTFLAGS="${RUSTFLAGS:-} -C target-cpu=native" + fi + case "$(uname -s)" in + Darwin) LIB_PREFIX="lib"; LIB_EXT="dylib" ;; + MINGW*|MSYS*|CYGWIN*) LIB_PREFIX=""; LIB_EXT="dll" ;; + *) LIB_PREFIX="lib"; LIB_EXT="so" ;; + esac PLUGIN_DIRS=() for DIR in python/selene-plugins/pecos-selene-*/; do [ -d "$DIR" ] || continue @@ -558,38 +860,41 @@ build-selene: [ -f "$DIR/pyproject.toml" ] || continue PLUGIN_DIRS+=("$DIR") done - # Check if any selene source changed since last install + # Skip cargo if the cargo output for this profile already exists and no Rust + # source is newer. We compare against target// (cargo's output) rather + # than _dist/lib/ (the installed copy) so switching profile correctly triggers + # a rebuild even when sources are unchanged. + # macOS bash 3.2: ${arr[@]+"${arr[@]}"} expands to nothing when arr is + # empty/unset under `set -u` (which otherwise trips on empty @-expansion). NEEDS_BUILD=false - for DIR in "${PLUGIN_DIRS[@]}"; do + for DIR in ${PLUGIN_DIRS[@]+"${PLUGIN_DIRS[@]}"}; do PKG=$(basename "$DIR") - DEST="$DIR/python/${PKG//-/_}/_dist/lib/" - SO=$(find "$DEST" -name "*.so" 2>/dev/null | head -1 || true) - if [ -z "$SO" ]; then + LIB="$TARGET_DIR/${LIB_PREFIX}${PKG//-/_}.${LIB_EXT}" + if [ ! -f "$LIB" ]; then NEEDS_BUILD=true break fi - # Check if any Rust source is newer than the installed .so - NEWER=$(find "crates/" "$DIR" -name "*.rs" -newer "$SO" 2>/dev/null | head -1 || true) + NEWER=$(find "crates/" "$DIR" -name "*.rs" -newer "$LIB" 2>/dev/null | head -1 || true) if [ -n "$NEWER" ]; then NEEDS_BUILD=true break fi done - if [ "$NEEDS_BUILD" = false ]; then - echo "Selene plugins: up to date" - exit 0 - fi - echo "Building Selene plugins..." - CARGO_ARGS="" - for DIR in "${PLUGIN_DIRS[@]}"; do - CARGO_ARGS="$CARGO_ARGS -p $(basename "$DIR")" - done - if [ -n "$CARGO_ARGS" ]; then - cargo build --release $CARGO_ARGS + if [ "$NEEDS_BUILD" = true ]; then + echo "Building Selene plugins ($PROFILE)..." + CARGO_PKG_ARGS=() + for DIR in ${PLUGIN_DIRS[@]+"${PLUGIN_DIRS[@]}"}; do + CARGO_PKG_ARGS+=(-p "$(basename "$DIR")") + done + if [ ${#CARGO_PKG_ARGS[@]} -gt 0 ]; then + cargo build ${CARGO_PROFILE_FLAGS[@]+"${CARGO_PROFILE_FLAGS[@]}"} "${CARGO_PKG_ARGS[@]}" + fi + else + echo "Selene plugins: cargo output up to date ($PROFILE)" fi - echo "Copying libraries to Python packages..." - {{pecos}} selene install --profile release - echo "Selene plugins built and installed successfully" + echo "Installing Selene plugin libraries ($PROFILE)..." + {{pecos}} selene install --profile "$PROFILE" + echo "Selene plugins ready ($PROFILE)" # Convenience aliases @@ -597,6 +902,8 @@ build-selene: build-debug: (build "debug") [private] build-release: (build "release") +[private] +build-native: (build "native") # Regenerate all lockfiles from scratch [group('setup')] @@ -621,6 +928,7 @@ julia-examples: (julia-build "debug") #!/usr/bin/env bash set -euo pipefail if command -v julia >/dev/null 2>&1; then + export PECOS_JULIA_LIB_DIR="$(pwd)/target/debug" cd julia/PECOS.jl && julia --project=. examples/demo.jl cd julia/PECOS.jl && julia --project=. examples/basic_usage.jl else diff --git a/crates/pecos-cli/src/cli.rs b/crates/pecos-cli/src/cli.rs index b7154f28a..1dab58b1e 100644 --- a/crates/pecos-cli/src/cli.rs +++ b/crates/pecos-cli/src/cli.rs @@ -26,20 +26,26 @@ pub mod setup_cmd; pub mod uninstall_cmd; pub mod upgrade_cmd; -use clap::Subcommand; +use clap::{Subcommand, ValueEnum}; #[derive(Subcommand, Clone)] pub enum RustCommands { /// Run cargo check with CUDA-aware feature handling Check { - /// Also check FFI crates (pecos-rslib, pecos-julia-ffi, pecos-go-ffi) + /// Also check FFI crates (pecos-rslib, pecos-rslib-cuda, pecos-julia-ffi, + /// pecos-go-ffi). pecos-rslib-cuda transitively pulls in pecos-cuquantum, + /// whose Linux build script may download cuTensor over the network if it + /// isn't already cached in ~/.pecos/deps/; pecos-julia-ffi and pecos-go-ffi + /// also need Julia/Go installed. #[arg(long)] include_ffi: bool, }, /// Run cargo clippy with CUDA-aware feature handling Clippy { - /// Also check FFI crates (pecos-rslib, pecos-julia-ffi, pecos-go-ffi) + /// Also clippy FFI crates (pecos-rslib, pecos-rslib-cuda, pecos-julia-ffi, + /// pecos-go-ffi). Same external-toolchain caveats as `rust check + /// --include-ffi`. #[arg(long)] include_ffi: bool, @@ -50,31 +56,56 @@ pub enum RustCommands { /// Run cargo test with CUDA-aware feature handling Test { - /// Use release mode for tests - #[arg(long)] - release: bool, + /// Build profile for tests (dev/debug, release, native) + #[arg(long, value_enum, default_value = "dev")] + profile: BuildProfile, - /// Also test FFI crates + /// Also test FFI crates (pecos-rslib, pecos-rslib-cuda, pecos-julia-ffi, + /// pecos-go-ffi). Same external-toolchain caveats as `rust check + /// --include-ffi`. #[arg(long)] include_ffi: bool, }, } +#[derive(Clone, Copy, Debug, Eq, PartialEq, ValueEnum)] +pub enum BuildProfile { + Dev, + Debug, + Release, + Native, +} + +impl BuildProfile { + pub const fn as_str(self) -> &'static str { + match self { + Self::Dev => "dev", + Self::Debug => "debug", + Self::Release => "release", + Self::Native => "native", + } + } +} + #[derive(Subcommand, Clone)] pub enum PythonCommands { /// Build pecos-rslib and quantum-pecos via maturin Build { /// Build profile (dev/debug, release, native) - #[arg(long, default_value = "dev")] - profile: String, + #[arg(long, value_enum, default_value = "dev")] + profile: BuildProfile, /// Additional RUSTFLAGS #[arg(long)] rustflags: Option, - /// Build with CUDA support - #[arg(long)] + /// Force CUDA support on (overrides auto-detection) + #[arg(long, conflicts_with = "no_cuda")] cuda: bool, + + /// Force CUDA support off (overrides auto-detection) + #[arg(long = "no-cuda")] + no_cuda: bool, }, } diff --git a/crates/pecos-cli/src/cli/cuda_cmd.rs b/crates/pecos-cli/src/cli/cuda_cmd.rs index 0124be08f..64e7807df 100644 --- a/crates/pecos-cli/src/cli/cuda_cmd.rs +++ b/crates/pecos-cli/src/cli/cuda_cmd.rs @@ -1,11 +1,59 @@ //! Implementation of the `cuda` subcommand +use std::process::Command; +use std::sync::OnceLock; + use pecos_build::Result; use pecos_build::cuda::{ find_cuda, get_cuda_version, get_pecos_cuda_dir, is_valid_cuda_installation, }; use pecos_build::errors::Error; +/// Check whether an NVIDIA GPU is present and accessible via the driver. +/// +/// Used to decide whether to install Python CUDA packages (cupy, cuquantum, +/// pytket-cutensornet) alongside the toolkit. We only auto-include them when +/// both the toolkit and a usable GPU are present, so build/CI machines with +/// just the toolkit don't pull large GPU-only wheels they can't use. +/// +/// Note: distinct from `probe_gpu_availability()` in `rust_cmd.rs`, which +/// uses wgpu and matches any adapter (NVIDIA, AMD, Intel, Apple). cupy needs +/// NVIDIA specifically. +pub(super) fn has_nvidia_gpu() -> bool { + let Ok(output) = Command::new("nvidia-smi").arg("-L").output() else { + return false; + }; + output.status.success() + && String::from_utf8_lossy(&output.stdout) + .lines() + .any(|line| line.starts_with("GPU ")) +} + +/// Whether Python CUDA packages should be auto-included for this machine. +/// +/// True iff the CUDA toolkit is installed and an NVIDIA GPU is detected. +pub(super) fn should_install_cuda_python() -> bool { + find_cuda().is_some() && has_nvidia_gpu() +} + +/// Cheap proxy for "are the CUDA Python packages synced into the active +/// environment?". Spawns `uv run --frozen python -c "import cupy"`, so the +/// result is cached for the lifetime of this process — `pecos setup` calls it +/// from `has_missing_deps`, `print_status_summary`, and the install step itself, +/// and the cache keeps that to one subprocess instead of three. +/// +/// We probe `cupy` specifically because it's the package most likely to fail +/// at runtime when missing (others in the group degrade more silently). +pub(super) fn cuda_python_packages_installed() -> bool { + static CACHED: OnceLock = OnceLock::new(); + *CACHED.get_or_init(|| { + Command::new("uv") + .args(["run", "--frozen", "python", "-c", "import cupy"]) + .output() + .is_ok_and(|o| o.status.success()) + }) +} + /// Run the cuda subcommand pub fn run(command: super::CudaCommands) -> Result<()> { match command { @@ -169,11 +217,9 @@ fn run_validate(path: Option) -> Result<()> { } } -/// Install CUDA Python packages +/// CLI entry point for `pecos cuda setup-python`. Validates the toolkit is +/// present, then runs `uv sync --group cuda` and prints next-step hints. fn run_setup_python() -> Result<()> { - use std::process::Command; - - // First check if CUDA toolkit is available if find_cuda().is_none() { eprintln!("Error: CUDA toolkit not found."); eprintln!(); @@ -186,10 +232,22 @@ fn run_setup_python() -> Result<()> { )); } + install_cuda_python_packages()?; + println!(); + println!("Verify with:"); + println!(" python -c \"import cupy; print('cupy:', cupy.cuda.is_available())\""); + Ok(()) +} + +/// Run `uv sync --group cuda` to install Python CUDA packages. +/// +/// Reusable from other CLI commands (e.g. `pecos setup`) once they've already +/// confirmed the user wants this. Does NOT validate toolkit presence -- caller +/// is responsible for that check. +pub(super) fn install_cuda_python_packages() -> Result<()> { println!("Installing CUDA Python packages (cupy, cuquantum, pytket-cutensornet)..."); println!(); - // Run uv sync --group cuda to install CUDA packages via dependency group let status = Command::new("uv") .args(["sync", "--group", "cuda"]) .status(); @@ -198,9 +256,6 @@ fn run_setup_python() -> Result<()> { Ok(s) if s.success() => { println!(); println!("CUDA Python packages installed successfully."); - println!(); - println!("Verify with:"); - println!(" python -c \"import cupy; print('cupy:', cupy.cuda.is_available())\""); Ok(()) } Ok(_) => { diff --git a/crates/pecos-cli/src/cli/env_cmd.rs b/crates/pecos-cli/src/cli/env_cmd.rs index fb82be050..4ad0fc4a0 100644 --- a/crates/pecos-cli/src/cli/env_cmd.rs +++ b/crates/pecos-cli/src/cli/env_cmd.rs @@ -100,6 +100,25 @@ pub fn collect_env() -> BTreeMap { env.insert("CMAKE".into(), cmake_bin.display().to_string()); } + // PYO3_PYTHON — point pyo3's build script at a Python that ships libpython + // so `cargo test` on pecos-rslib* (which depend on pyo3) can link. macOS's + // Apple-shipped python3 (the one CommandLineTools provides) has no + // libpython, so the default PATH lookup fails to link. The repo's .venv + // (created by uv) does ship libpython, so prefer that. Respect an existing + // PYO3_PYTHON if the caller already set one. + if std::env::var_os("PYO3_PYTHON").is_none() + && let Some(repo_root) = pecos_build::llvm::find_cargo_project_root() + { + let venv_python = if cfg!(windows) { + repo_root.join(".venv").join("Scripts").join("python.exe") + } else { + repo_root.join(".venv").join("bin").join("python") + }; + if venv_python.exists() { + env.insert("PYO3_PYTHON".into(), venv_python.display().to_string()); + } + } + env } diff --git a/crates/pecos-cli/src/cli/python_cmd.rs b/crates/pecos-cli/src/cli/python_cmd.rs index 1aed7f118..23e521d77 100644 --- a/crates/pecos-cli/src/cli/python_cmd.rs +++ b/crates/pecos-cli/src/cli/python_cmd.rs @@ -13,10 +13,38 @@ pub fn run(command: &super::PythonCommands) -> Result<()> { profile, rustflags, cuda, - } => run_build(profile, rustflags.as_deref(), *cuda), + no_cuda, + } => { + let cuda_resolved = resolve_cuda_choice(*cuda, *no_cuda); + run_build(profile.as_str(), rustflags.as_deref(), cuda_resolved) + } } } +/// Decide whether to install CUDA Python packages for this build. +/// +/// Resolution order: +/// - `--cuda` -> always on (caller knows what they want) +/// - `--no-cuda` -> always off (caller opts out) +/// - neither -> auto-detect: include CUDA Python packages when both the +/// toolkit and an NVIDIA GPU are present, otherwise skip +fn resolve_cuda_choice(cuda: bool, no_cuda: bool) -> bool { + if cuda { + return true; + } + if no_cuda { + return false; + } + let detected = super::cuda_cmd::should_install_cuda_python(); + if detected { + println!( + "CUDA toolkit + NVIDIA GPU detected -- including CUDA Python packages \ + (cupy, cuquantum, pytket-cutensornet). Pass --no-cuda to skip." + ); + } + detected +} + /// Get the repository root fn get_repo_root() -> Result { let mut current = std::env::current_dir()?; @@ -68,10 +96,28 @@ fn run_build(profile: &str, rustflags: Option<&str>, cuda: bool) -> Result<()> { let repo_root = get_repo_root()?; - // Determine maturin release flag - let maturin_release = matches!(profile, "release" | "native"); + // Map our profile name to maturin's cargo-profile flag. `dev`/`debug` use + // cargo's default dev profile (no flag), `release` uses --release, `native` + // uses --profile native so artifacts land in target/native/. Routing native + // through --profile native (rather than --release with target-cpu RUSTFLAGS) + // also lets the C++ build.rs files in pecos-pymatching/-chromobius/-tesseract + // detect "native" via OUT_DIR and add -march=native to their C++ compilation. + let cargo_profile_flag: &[&str] = match profile { + "release" => &["--release"], + "native" => &["--profile", "native"], + "dev" | "debug" => &[], + other => { + return Err(Error::Config(format!( + "Unknown profile: {other} (expected dev, debug, release, or native)" + ))); + } + }; - // Set RUSTFLAGS if provided or for native profile + // Build up RUSTFLAGS. For native we inject -C target-cpu=native because + // profile.native.rustflags in Cargo.toml is still gated on nightly; other + // callers (Justfile go-build/julia-build/build-selene/bench) inject the + // same flag so the resulting artifacts are consistent regardless of entry + // point. let mut flags = std::env::var("RUSTFLAGS").unwrap_or_default(); if profile == "native" { if !flags.is_empty() { @@ -145,11 +191,15 @@ fn run_build(profile: &str, rustflags: Option<&str>, cuda: bool) -> Result<()> { let maturin = venv_bin.join("maturin"); let mut cmd = Command::new(&maturin); cmd.args(["develop", "--uv"]); - if maturin_release { - cmd.arg("--release"); - } + cmd.args(cargo_profile_flag); + // Maturin's CLI --features REPLACES (not merges with) the features list + // in pyproject.toml's [tool.maturin], so any time we pass extra features + // we must also pass `extension-module` -- otherwise the cdylib loses + // pyo3's extension-module + abi3 settings and the resulting wheel either + // links libpython directly (wrong) or fails entirely on machines without + // a linkable libpython. The same applies to CI's MATURIN_PEP517_ARGS. if mwpf_enabled && crate_name == "pecos-rslib" { - cmd.args(["--features", "mwpf"]); + cmd.args(["--features", "extension-module,mwpf"]); } cmd.current_dir(&crate_dir); // On macOS, add rpath for system libc++ and clean Homebrew paths @@ -231,10 +281,10 @@ fn run_build(profile: &str, rustflags: Option<&str>, cuda: bool) -> Result<()> { } fn cargo_profile_dir(profile: &str) -> &'static str { - if matches!(profile, "release" | "native") { - "release" - } else { - "debug" + match profile { + "release" => "release", + "native" => "native", + _ => "debug", } } @@ -317,10 +367,11 @@ mod tests { use super::*; #[test] - fn cargo_profile_dir_maps_native_to_release() { + fn cargo_profile_dir_matches_cargos_target_subdir() { + assert_eq!(cargo_profile_dir("dev"), "debug"); assert_eq!(cargo_profile_dir("debug"), "debug"); assert_eq!(cargo_profile_dir("release"), "release"); - assert_eq!(cargo_profile_dir("native"), "release"); + assert_eq!(cargo_profile_dir("native"), "native"); } #[test] diff --git a/crates/pecos-cli/src/cli/rust_cmd.rs b/crates/pecos-cli/src/cli/rust_cmd.rs index 6a5f2254f..d71512995 100644 --- a/crates/pecos-cli/src/cli/rust_cmd.rs +++ b/crates/pecos-cli/src/cli/rust_cmd.rs @@ -5,8 +5,37 @@ use pecos_build::errors::Error; use serde_json::Value; use std::process::Command; -/// FFI crates that should be excluded from workspace-wide cargo commands -const FFI_CRATES: &[&str] = &["pecos-rslib", "pecos-julia-ffi", "pecos-go-ffi"]; +/// FFI crates that need a non-Rust toolchain or external SDK to check / +/// clippy / test. +/// +/// - pecos-rslib needs cmake (for mwpf via highs-sys) under `--all-features`. +/// - pecos-rslib-cuda transitively depends on pecos-cuquantum, whose build.rs +/// calls `ensure_cutensor()` on Linux -- that will silently download cuTensor +/// over the network if it's not already cached in `~/.pecos/deps/`, which we +/// don't want a routine `cargo check` to do. Dedicated CUDA workflows can +/// opt in via `--include-ffi` or by setting up the cache first. +/// - pecos-julia-ffi needs Julia. +/// - pecos-go-ffi needs Go. +/// +/// All four are excluded from the default workspace check / clippy / test +/// invocations and only touched when the caller opts in with `--include-ffi`. +const FFI_CRATES: &[&str] = &[ + "pecos-rslib", + "pecos-rslib-cuda", + "pecos-julia-ffi", + "pecos-go-ffi", +]; + +/// Extra pyo3 cdylib crates excluded only from `cargo test --workspace`. +/// +/// pecos-rslib-exp and pecos-rslib-llvm are pyo3 cdylibs whose +/// `extension-module` feature is opt-in (see python/pecos-rslib*/Cargo.toml), +/// so `cargo test --workspace` would try to link the test binary against +/// libpython and fail on systems where the active Python is a stub (e.g. +/// macOS `/usr/bin/python3`). They have no Rust unit tests of their own, so +/// this exclusion is no-coverage-loss. Default `pecos rust check` and +/// `pecos rust clippy` still cover them because check/clippy don't link. +const PYO3_CDYLIB_TEST_EXCLUDES: &[&str] = &["pecos-rslib-exp", "pecos-rslib-llvm"]; /// Warn if shared C++ dependencies differ across per-crate pecos.toml files. /// This is informational -- different crates may legitimately pin different versions. @@ -40,9 +69,9 @@ pub fn run(command: &super::RustCommands) -> Result<()> { super::RustCommands::Check { include_ffi } => run_check(*include_ffi), super::RustCommands::Clippy { include_ffi, fix } => run_clippy(*include_ffi, *fix), super::RustCommands::Test { - release, + profile, include_ffi, - } => run_test(*release, *include_ffi), + } => run_test(*profile, *include_ffi), } } @@ -177,11 +206,20 @@ fn is_tool_available(tool: &str) -> bool { /// `SDKROOT`, etc.) so build scripts like highs-sys's cmake-rs invocation /// find the PECOS-managed cmake without further plumbing. fn run_cargo_command(args: &[&str]) -> bool { + run_cargo_command_with_rustflags(args, None) +} + +/// Like `run_cargo_command` but lets the caller override `RUSTFLAGS`. Used by +/// `run_test` to inject `-C target-cpu=native` for the native profile. +fn run_cargo_command_with_rustflags(args: &[&str], rustflags: Option<&str>) -> bool { let mut cmd = Command::new("cargo"); cmd.args(args); for (key, value) in super::env_cmd::collect_env() { cmd.env(key, value); } + if let Some(rf) = rustflags { + cmd.env("RUSTFLAGS", rf); + } matches!(cmd.status(), Ok(s) if s.success()) } @@ -368,16 +406,42 @@ fn run_clippy(include_ffi: bool, fix: bool) -> Result<()> { } /// Run cargo test with GPU-aware feature handling -fn run_test(release: bool, include_ffi: bool) -> Result<()> { +fn run_test(profile: super::BuildProfile, include_ffi: bool) -> Result<()> { // Warn about any C++ dependency version differences across crates check_dep_consistency(); let gpu_probe = probe_gpu_availability(); let include_gpu_sims = should_include_gpu_sims(&gpu_probe); - let release_flag = if release { "--release" } else { "" }; maybe_print_gpu_probe_status(&gpu_probe, include_gpu_sims); + // Map our profile to the cargo flags that select the corresponding profile. + // Native goes through `--profile native` (not `--release`) so artifacts land + // in target/native/ and the C++ build.rs files (pecos-pymatching et al.) + // can detect "native" via OUT_DIR and add -march=native to their builds. + let profile_args: &[&str] = match profile { + super::BuildProfile::Dev | super::BuildProfile::Debug => &[], + super::BuildProfile::Release => &["--release"], + super::BuildProfile::Native => &["--profile", "native"], + }; + + // For native, append -C target-cpu=native to RUSTFLAGS. profile.native.rustflags + // in Cargo.toml is still gated on nightly so we inject per-process here, matching + // what `pecos python build --profile native` and the Justfile recipes do. + let inherited_rustflags = std::env::var("RUSTFLAGS").unwrap_or_default(); + let computed_rustflags: Option = if matches!(profile, super::BuildProfile::Native) { + let mut rf = inherited_rustflags; + if !rf.is_empty() { + rf.push(' '); + } + rf.push_str("-C target-cpu=native"); + Some(rf) + } else { + None + }; + let rustflags = computed_rustflags.as_deref(); + let run = |args: &[&str]| -> bool { run_cargo_command_with_rustflags(args, rustflags) }; + println!("Testing workspace packages..."); // runtime = sim + qasm + phir (format parsers) // hugr = qis (includes llvm) + hugr compilation @@ -385,9 +449,9 @@ fn run_test(release: bool, include_ffi: bool) -> Result<()> { // to ensure the pecos binary has PHIR/QIS support for integration tests. let mut args: Vec<&str> = vec!["test", "--workspace", "--features=runtime,hugr"]; - for crate_name in FFI_CRATES { + for crate_name in FFI_CRATES.iter().chain(PYO3_CDYLIB_TEST_EXCLUDES) { args.push("--exclude"); - args.push(crate_name); + args.push(*crate_name); } args.extend(&[ @@ -401,11 +465,9 @@ fn run_test(release: bool, include_ffi: bool) -> Result<()> { "pecos-gpu-sims", // Always exclude from workspace test, test separately if GPU available ]); - if !release_flag.is_empty() { - args.push(release_flag); - } + args.extend(profile_args); - if !run_cargo_command(&args) { + if !run(&args) { return Err(Error::Config("cargo test (workspace) failed".to_string())); } @@ -416,10 +478,8 @@ fn run_test(release: bool, include_ffi: bool) -> Result<()> { // binary. Testing separately ensures the binary is built correctly. println!("Testing pecos-cli with runtime features..."); let mut cli_args: Vec<&str> = vec!["test", "-p", "pecos-cli", "--features=runtime"]; - if !release_flag.is_empty() { - cli_args.push(release_flag); - } - if !run_cargo_command(&cli_args) { + cli_args.extend(profile_args); + if !run(&cli_args) { return Err(Error::Config( "cargo test (pecos-cli with runtime) failed".to_string(), )); @@ -429,10 +489,8 @@ fn run_test(release: bool, include_ffi: bool) -> Result<()> { if probe_cuquantum_availability() { println!("cuQuantum runtime available - testing pecos-cuquantum"); let mut args = vec!["test", "-p", "pecos-cuquantum"]; - if !release_flag.is_empty() { - args.push(release_flag); - } - if !run_cargo_command(&args) { + args.extend(profile_args); + if !run(&args) { return Err(Error::Config( "cargo test (pecos-cuquantum) failed".to_string(), )); @@ -444,10 +502,8 @@ fn run_test(release: bool, include_ffi: bool) -> Result<()> { if include_gpu_sims { println!("Including pecos-gpu-sims in Rust tests"); let mut args = vec!["test", "-p", "pecos-gpu-sims"]; - if !release_flag.is_empty() { - args.push(release_flag); - } - if !run_cargo_command(&args) { + args.extend(profile_args); + if !run(&args) { return Err(Error::Config( "cargo test (pecos-gpu-sims) failed".to_string(), )); @@ -456,22 +512,24 @@ fn run_test(release: bool, include_ffi: bool) -> Result<()> { println!("Testing pecos-decoders..."); let mut args = vec!["test", "-p", "pecos-decoders", "--all-features"]; - if !release_flag.is_empty() { - args.push(release_flag); - } - if !run_cargo_command(&args) { + args.extend(profile_args); + if !run(&args) { return Err(Error::Config( "cargo test (pecos-decoders) failed".to_string(), )); } if include_ffi { + // Don't use --all-features here: pecos-rslib's `extension-module` feature + // tells pyo3 to skip linking libpython, which is correct when maturin + // builds the cdylib but produces unresolved Python C API symbols in a + // `cargo test` binary. We instead enable the non-pyo3-linking features + // we actually want to exercise (wasm is in default; mwpf pulls in the + // optional decoder). println!("Testing pecos-rslib..."); - let mut args = vec!["test", "-p", "pecos-rslib", "--all-features"]; - if !release_flag.is_empty() { - args.push(release_flag); - } - if !run_cargo_command(&args) { + let mut args = vec!["test", "-p", "pecos-rslib", "--features=mwpf"]; + args.extend(profile_args); + if !run(&args) { return Err(Error::Config("cargo test (pecos-rslib) failed".to_string())); } } diff --git a/crates/pecos-cli/src/cli/setup_cmd.rs b/crates/pecos-cli/src/cli/setup_cmd.rs index 31ffef02c..4e2cf26cd 100644 --- a/crates/pecos-cli/src/cli/setup_cmd.rs +++ b/crates/pecos-cli/src/cli/setup_cmd.rs @@ -47,6 +47,14 @@ pub fn run( setup_cuquantum(mode)?; } + // Python CUDA packages: only relevant when toolkit + NVIDIA GPU are present. + // The Justfile/`pecos python build` flow auto-detects this too; offering it + // here means an interactive `pecos setup` puts the user in a fully-ready + // state without a follow-up command. + if !skip_cuda && super::cuda_cmd::should_install_cuda_python() { + setup_cuda_python(mode)?; + } + if !skip_cmake { setup_cmake(mode)?; } @@ -71,6 +79,12 @@ fn has_missing_deps(skip_llvm: bool, skip_cuda: bool, skip_cmake: bool) -> bool { return true; } + if !skip_cuda + && super::cuda_cmd::should_install_cuda_python() + && !super::cuda_cmd::cuda_python_packages_installed() + { + return true; + } if !skip_cmake && pecos_build::cmake::find_cmake().is_none() { return true; } @@ -110,6 +124,17 @@ fn print_status_summary(skip_llvm: bool, skip_cuda: bool, skip_cmake: bool) { } } + // CUDA Python packages (only show when toolkit + NVIDIA GPU are present; + // mirrors the gate used by setup_cuda_python so the summary matches what + // the orchestrator will actually do). + if !skip_cuda && super::cuda_cmd::should_install_cuda_python() { + if super::cuda_cmd::cuda_python_packages_installed() { + println!(" cupy: installed (CUDA Python packages synced)"); + } else { + println!(" cupy: not installed (~500 MB via `uv sync --group cuda`)"); + } + } + // cmake (optional, used for the MWPF decoder) if skip_cmake { println!(" cmake: skipped (--skip-cmake)"); @@ -320,6 +345,26 @@ fn setup_cuquantum(mode: PromptMode) -> Result<()> { Ok(()) } +// ── Python CUDA packages ──────────────────────────────────────────────────── + +fn setup_cuda_python(mode: PromptMode) -> Result<()> { + if super::cuda_cmd::cuda_python_packages_installed() { + return Ok(()); + } + + if confirm( + "Install CUDA Python packages? (cupy, cuquantum, pytket-cutensornet via `uv sync --group cuda`)", + true, // default yes when CUDA toolkit + NVIDIA GPU are present + mode, + ) { + super::cuda_cmd::install_cuda_python_packages()?; + } else { + println!(" Skipping CUDA Python packages. Install later with `pecos cuda setup-python`."); + } + + Ok(()) +} + // ── cmake (optional, MWPF decoder) ────────────────────────────────────────── // cmake is optional, so install failures degrade gracefully (mwpf disabled) diff --git a/crates/pecos-qis/build_selene.rs b/crates/pecos-qis/build_selene.rs index 6c4511ba4..11a44ec00 100644 --- a/crates/pecos-qis/build_selene.rs +++ b/crates/pecos-qis/build_selene.rs @@ -179,6 +179,7 @@ EXPORTS selene_random_f64 selene_custom_runtime_call pecos_call_qmain_with_setjmp + pecos_call_void_main_with_setjmp "; std::fs::write(&def_file, def_content).expect("Failed to write .def file"); diff --git a/crates/pecos-qis/src/c/selene_shim.c b/crates/pecos-qis/src/c/selene_shim.c index ce74a1be4..62f73ed27 100644 --- a/crates/pecos-qis/src/c/selene_shim.c +++ b/crates/pecos-qis/src/c/selene_shim.c @@ -472,3 +472,42 @@ EXPORT_API uint64_t pecos_call_qmain_with_setjmp(qmain_fn_t qmain) { } } } + +/** + * Wrapper function to safely call a `void main()` entry point. + * + * QIR programs can use either `i64 @qmain(i64)` (the Helios profile, with an + * explicit error-code return value) or `void @main()` (the simpler "base + * profile" form, with no return value). The two have incompatible C calling + * conventions: calling `void @main()` through the qmain wrapper (which expects + * `uint64_t (*)(uint64_t)`) is undefined behaviour and reads whatever happens + * to be in the return register, producing seemingly-random "error" codes. + * + * This wrapper exists so the Rust executor can dispatch on the entry-point + * symbol it finds and call each kind through the matching ABI. + * + * Returns: 0 on success, error code on failure (when longjmp is used). + */ +typedef void (*void_main_fn_t)(void); + +EXPORT_API uint64_t pecos_call_void_main_with_setjmp(void_main_fn_t main_func) { + static __thread SeleneInstance dummy_instance; + selene_void_result_t start_result = selene_on_shot_start(&dummy_instance, 0); + if (start_result.error_code != 0) { + return start_result.error_code; + } + + int error_code = setjmp(user_program_jmpbuf); + if (error_code == 0) { + main_func(); + selene_on_shot_end(&dummy_instance); + return 0; + } else { + selene_on_shot_end(&dummy_instance); + if (error_code < 1000) { + return 0; + } else { + return (uint64_t)error_code; + } + } +} diff --git a/crates/pecos-qis/src/executor.rs b/crates/pecos-qis/src/executor.rs index a950a981f..d1ba56774 100644 --- a/crates/pecos-qis/src/executor.rs +++ b/crates/pecos-qis/src/executor.rs @@ -282,6 +282,22 @@ impl SharedLibrary { type ResetInterfaceFn = unsafe extern "C" fn(); type GetOperationsFn = unsafe extern "C" fn() -> *mut OperationCollector; type CallQmainFn = unsafe extern "C" fn(extern "C" fn(u64) -> u64) -> u64; +type CallVoidMainFn = unsafe extern "C" fn(extern "C" fn()) -> u64; + +/// The entry-point shape found in a compiled QIR program, bundled with the +/// matching setjmp wrapper from the C shim. Each variant pairs the function +/// pointer ABI with the shim that calls it -- mixing them (e.g. calling a +/// `void main()` through the qmain wrapper) is undefined behaviour. +enum ExecutionEntryPoint<'a> { + Qmain { + func: Symbol<'a, extern "C" fn(u64) -> u64>, + call: Symbol<'a, CallQmainFn>, + }, + VoidMain { + func: Symbol<'a, extern "C" fn()>, + call: Symbol<'a, CallVoidMainFn>, + }, +} type WaitForNeedResultFn = unsafe extern "C" fn(u64) -> u64; type SetMeasurementResultFn = unsafe extern "C" fn(u64, bool); type SignalResultReadyFn = unsafe extern "C" fn(); @@ -1051,39 +1067,77 @@ impl QisHeliosInterface { load_result } - /// Get the qmain and setjmp wrapper function symbols from the libraries + /// Get the entry point and matching setjmp wrapper from the libraries. + /// + /// QIR programs can use one of two entry-point signatures: + /// - `i64 @qmain(i64)` -- the Helios / adaptive profile. pecos-hugr-qis + /// emits this (its `LLVM_MAIN` constant in compiler.rs is `"qmain"`), + /// and the pecos-phir RON pipeline fixtures (`ron_support.rs`, + /// `qis_pipeline_tests`) use it. The return value is an error code. + /// - `void @main()` -- the "base profile" form. pecos-phir's MLIR/QIR text + /// path matches `@main` directly (see `mlir_toolchain.rs`), and PECOS's QIR + /// text tests use this. It's also the most common form for + /// externally-authored programs. + /// + /// Calling a `void @main()` function through the qmain ABI (`u64 fn(u64)`) + /// is undefined behaviour: the return register is never set, so what looks + /// like a "random error code" is actually whatever was in the register on + /// return. We dispatch on the symbol that's present so each kind is called + /// with the correct ABI. + /// + /// **Known limitation:** dispatch is name-only. A program with the + /// off-spec signature `void @qmain()` or `i64 @main(i64)` would be + /// misclassified. The robust fix would be to inspect the LLVM module's + /// function type before linking and reject (or dispatch on) any signature + /// other than the two canonical shapes; that requires plumbing the IR + /// through to this lookup, so it's deferred until we encounter such a + /// program in practice. Until then, callers should stick to the two + /// canonical signatures above. fn get_execution_symbols<'a>( program_lib: &'a Library, shim_lib: &'a Library, - ) -> Result< - ( - Symbol<'a, extern "C" fn(u64) -> u64>, - Symbol<'a, CallQmainFn>, - ), - InterfaceError, - > { - // Get the qmain or main function symbol - let qmain_fn: Symbol u64> = unsafe { - program_lib - .get(b"qmain\0") - .or_else(|_| program_lib.get(b"main\0")) - .map_err(|e| { - InterfaceError::ExecutionError(format!( - "Failed to find qmain or main entry point: {e}" - )) - })? - }; + ) -> Result, InterfaceError> { + // Prefer `qmain` (Helios profile); fall back to `main` (void-return form). + // We look up qmain first because it's the only one we want to call + // through the i64-returning wrapper. + let qmain_fn: Result u64>, _> = + unsafe { program_lib.get(b"qmain\0") }; + + if let Ok(func) = qmain_fn { + let call: Symbol<'a, CallQmainFn> = unsafe { + shim_lib + .get(b"pecos_call_qmain_with_setjmp\0") + .map_err(|e| { + InterfaceError::ExecutionError(format!( + "Failed to find pecos_call_qmain_with_setjmp wrapper: {e}" + )) + })? + }; + return Ok(ExecutionEntryPoint::Qmain { func, call }); + } - // Get the setjmp wrapper function - let call_with_setjmp: Symbol = unsafe { + // No qmain -- try `main` and dispatch through the void-main wrapper so + // we don't read a garbage value out of the return register. + let main_fn: Symbol<'a, extern "C" fn()> = unsafe { + program_lib.get(b"main\0").map_err(|e| { + InterfaceError::ExecutionError(format!( + "Failed to find qmain or main entry point: {e}" + )) + })? + }; + let call: Symbol<'a, CallVoidMainFn> = unsafe { shim_lib - .get(b"pecos_call_qmain_with_setjmp\0") + .get(b"pecos_call_void_main_with_setjmp\0") .map_err(|e| { - InterfaceError::ExecutionError(format!("Failed to find setjmp wrapper: {e}")) + InterfaceError::ExecutionError(format!( + "Failed to find pecos_call_void_main_with_setjmp wrapper: {e}" + )) })? }; - - Ok((qmain_fn, call_with_setjmp)) + Ok(ExecutionEntryPoint::VoidMain { + func: main_fn, + call, + }) } /// Add platform-specific linker flags to the clang command @@ -1812,15 +1866,15 @@ entry: let program_lib = Self::get_or_cache_program_lib(so_path)?; debug!("Using cached program library"); - // Step 5: Get the execution symbols (qmain and setjmp wrapper) - let (qmain_fn, call_with_setjmp) = - Self::get_execution_symbols(program_lib.inner(), shim_lib.inner())?; + // Step 5: Get the execution entry point (qmain or main) and matching + // setjmp wrapper from the shim. + let entry_point = Self::get_execution_symbols(program_lib.inner(), shim_lib.inner())?; - // Step 6: Call qmain via our setjmp wrapper + // Step 6: Call the entry point via the matching setjmp wrapper. // The call chain will be: // pecos_call_qmain_with_setjmp(qmain) [from our shim] // → setjmp(user_program_jmpbuf) [saves stack state for longjmp] - // → qmain(0) [user code in program.so] + // → qmain(0) -or- main() [user code in program.so] // → ___qalloc() [from libhelios.a linked into program.so] // → selene_qalloc() [from libpecos_selene.so C shim] // → __quantum__rt__qubit_allocate() [from libpecos_qis_ffi.so] @@ -1828,13 +1882,16 @@ entry: // If an error occurs: // → longjmp(user_program_jmpbuf, error_code) [jumps back to setjmp] // → wrapper catches error and returns error code - let result = unsafe { call_with_setjmp(*qmain_fn) }; + let (entry_label, result) = match &entry_point { + ExecutionEntryPoint::Qmain { func, call } => ("qmain", unsafe { call(**func) }), + ExecutionEntryPoint::VoidMain { func, call } => ("main", unsafe { call(**func) }), + }; if result != 0 { return Err(InterfaceError::ExecutionError(format!( - "qmain returned error code: {result}" + "{entry_label} returned error code: {result}" ))); } - info!("qmain executed successfully!"); + info!("{entry_label} executed successfully!"); // Step 7: Collect the operations from thread-local storage via the cdylib // IMPORTANT: We call the cdylib's version to get the operations from the same diff --git a/docs/development/dev-tools.md b/docs/development/dev-tools.md index 68de3910b..26811ac47 100644 --- a/docs/development/dev-tools.md +++ b/docs/development/dev-tools.md @@ -14,10 +14,14 @@ pecos --help # Rust commands (CUDA-aware) pecos rust check # Run cargo check (auto-excludes CUDA if unavailable) pecos rust clippy # Run cargo clippy (CUDA-aware) -pecos rust test # Run cargo test (CUDA-aware) +pecos rust test # Run cargo test (CUDA-aware; default profile=dev) +pecos rust test --profile release # Same but with release optimisations +pecos rust test --profile native # Release + -C target-cpu=native + --march=native for C++ # Python build (maturin + quantum-pecos) -pecos python build # Build pecos-rslib with maturin +pecos python build # Build pecos-rslib with maturin (default profile=dev) +pecos python build --profile release # Release build +pecos python build --profile native # Release + native-CPU codegen (Rust and C++) # Dependency installation pecos install llvm # Install LLVM 14 to ~/.pecos/deps/llvm-14/ diff --git a/docs/user-guide/cmake-setup.md b/docs/user-guide/cmake-setup.md index 4c7fb564a..bbd1c5061 100644 --- a/docs/user-guide/cmake-setup.md +++ b/docs/user-guide/cmake-setup.md @@ -94,9 +94,10 @@ Optional decoders: `pecos python build` will detect cmake automatically and pass `--features mwpf` to maturin. To check the decoder from Python: ```python -import pecos_rslib +from pecos_rslib.qec import ObservableSubgraphDecoder # MWPF-capable decoder -pecos_rslib.qec.create_observable_decoder(dem_str, "mwpf") # should not raise +# Construct with a real DEM + stabilizer coords: +# decoder = ObservableSubgraphDecoder(dem_str, stab_coords, inner_decoder="mwpf") ``` Set `PECOS_BUILD_MWPF=0` to force MWPF off even when cmake is present (useful for reproducing the lean build locally). `PECOS_BUILD_MWPF=1` forces it on, which is what CI sets. diff --git a/go/README.md b/go/README.md index fcd6af8e8..cc0578b07 100644 --- a/go/README.md +++ b/go/README.md @@ -12,13 +12,16 @@ Go bindings for the PECOS quantum error correction simulator. ### 1. Build the Rust library ```bash -cd go/pecos-go-ffi -cargo build --release +just go-build release ``` This creates `libpecos_go.so` (Linux), `libpecos_go.dylib` (macOS), or `pecos_go.dll` (Windows) in `target/release/`. -### 2. Set library path +### 2. Set runtime library paths + +The `#cgo LDFLAGS` directive in `pecos/pecos.go` already points at +`target/release/` for the link step, so for the standard release build only +the runtime loader paths need to be set: **Linux:** ```bash @@ -30,11 +33,17 @@ export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$(pwd)/target/release export DYLD_LIBRARY_PATH=$DYLD_LIBRARY_PATH:$(pwd)/target/release ``` +To use a non-release profile (e.g. debug or native), prepend an extra +search path via `CGO_LDFLAGS` — this is what `just go-test ` does: + +```bash +export CGO_LDFLAGS="-L$(pwd)/target/native" +``` + ### 3. Run Go tests ```bash -cd go/pecos -go test -v +just go-test release ``` ## Usage diff --git a/go/pecos/pecos.go b/go/pecos/pecos.go index ee9e06721..6d3a541d5 100644 --- a/go/pecos/pecos.go +++ b/go/pecos/pecos.go @@ -18,12 +18,19 @@ // // Before using this package, you need to build the Rust library: // -// cd go/pecos-go-ffi -// cargo build --release +// just go-build release // -// Then set the library path: +// The #cgo directive below already points at the workspace's target/release/ +// for the link step, so for the standard release build only the runtime +// loader paths need to be set: // -// export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/path/to/PECOS/target/release +// export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/path/to/PECOS/target/release" +// export DYLD_LIBRARY_PATH="$DYLD_LIBRARY_PATH:/path/to/PECOS/target/release" # macOS +// +// To use a non-release profile (e.g. debug or native) add an extra search +// path via CGO_LDFLAGS (this is what `just go-test ` does): +// +// export CGO_LDFLAGS="-L/path/to/PECOS/target/native" // // # Example // @@ -43,7 +50,13 @@ package pecos /* -#cgo LDFLAGS: -L${SRCDIR}/../pecos-go-ffi/target/release -lpecos_go +// The -L${SRCDIR}/../../target/release search path lets a plain `go test` +// link against the workspace's release-built libpecos_go without the caller +// having to set CGO_LDFLAGS (used by .github/workflows/go-test.yml and +// direct-from-clone smoke tests). Callers targeting a different cargo profile +// can prepend their own -L via CGO_LDFLAGS -- the go toolchain places +// CGO_LDFLAGS before this directive on the linker command line, so non-release +// search paths take precedence. #cgo LDFLAGS: -L${SRCDIR}/../../target/release -lpecos_go #include diff --git a/julia/PECOS.jl/src/PECOS.jl b/julia/PECOS.jl/src/PECOS.jl index 9b869ffa1..92e89e6e7 100644 --- a/julia/PECOS.jl/src/PECOS.jl +++ b/julia/PECOS.jl/src/PECOS.jl @@ -26,9 +26,6 @@ include("Simulator.jl") # Determine library path based on environment const libpecos_julia = begin - # Check if we're in development mode (library built locally) - dev_lib_path = joinpath(@__DIR__, "..", "..", "..", "target", "release") - lib_name = if Sys.iswindows() "pecos_julia.dll" elseif Sys.isapple() @@ -37,28 +34,36 @@ const libpecos_julia = begin "libpecos_julia.so" end - dev_lib = joinpath(dev_lib_path, lib_name) - - if isfile(dev_lib) - # Development mode: use locally built library - dev_lib + explicit_lib_dir = get(ENV, "PECOS_JULIA_LIB_DIR", "") + candidate_dirs = if isempty(explicit_lib_dir) + [ + joinpath(@__DIR__, "..", "..", "..", "target", "release"), + joinpath(@__DIR__, "..", "..", "..", "target", "native"), + joinpath(@__DIR__, "..", "..", "..", "target", "debug"), + ] else - # Try debug build as fallback - debug_lib = joinpath(@__DIR__, "..", "..", "..", "target", "debug", lib_name) - if isfile(debug_lib) - debug_lib - else - error(""" - PECOS Julia library not found! - - Please build the library first: - cd julia/pecos-julia-ffi && cargo build --release - - Or for debug mode: - cd julia/pecos-julia-ffi && cargo build - """) - end + [explicit_lib_dir] end + + candidates = [joinpath(candidate_dir, lib_name) for candidate_dir in candidate_dirs] + found_index = findfirst(isfile, candidates) + + if found_index === nothing + searched = join(candidate_dirs, "\n ") + error(""" + PECOS Julia library not found! + + Searched: + $searched + + Build it with: + just julia-build release + + Or select a specific build directory with PECOS_JULIA_LIB_DIR. + """) + end + + candidates[found_index] end struct QubitId diff --git a/pyproject.toml b/pyproject.toml index 3bde0ac88..9d1575279 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -84,6 +84,12 @@ reinstall-package = [ [tool.black] line-length = 120 +# Target the minimum supported Python (matches `requires-python = ">=3.10"` +# above and `target-version = "py310"` in ruff.toml) so black produces code +# that parses cleanly on every interpreter in the support range -- otherwise +# black auto-targets the newest version satisfying requires-python and can +# emit 3.14-only syntax that 3.10/3.11/3.12 can't parse. +target-version = ["py310"] [tool.pytest.ini_options] markers = [ diff --git a/python/pecos-rslib-cuda/Cargo.toml b/python/pecos-rslib-cuda/Cargo.toml index 896cadcbc..eb17d4c55 100644 --- a/python/pecos-rslib-cuda/Cargo.toml +++ b/python/pecos-rslib-cuda/Cargo.toml @@ -17,12 +17,20 @@ crate-type = ["cdylib", "rlib"] doctest = false test = false +[features] +# See pecos-rslib's Cargo.toml for why this bundle is opt-in. +extension-module = [ + "pyo3/extension-module", + "pyo3/abi3-py310", + "pyo3/generate-import-lib", +] + [dependencies] pecos-cuquantum = { workspace = true } pecos-core = { workspace = true } pecos-simulators = { workspace = true } -pyo3 = { workspace = true, features = ["extension-module", "abi3-py310", "generate-import-lib"] } +pyo3.workspace = true log.workspace = true [build-dependencies] diff --git a/python/pecos-rslib-cuda/pyproject.toml b/python/pecos-rslib-cuda/pyproject.toml index 7d48eccb5..fc66e5b6f 100644 --- a/python/pecos-rslib-cuda/pyproject.toml +++ b/python/pecos-rslib-cuda/pyproject.toml @@ -36,6 +36,7 @@ build-backend = "maturin" [tool.maturin] module-name = "pecos_rslib_cuda" +features = ["extension-module"] [dependency-groups] dev = [] diff --git a/python/pecos-rslib-exp/Cargo.toml b/python/pecos-rslib-exp/Cargo.toml index 82b8162cd..bddcee29a 100644 --- a/python/pecos-rslib-exp/Cargo.toml +++ b/python/pecos-rslib-exp/Cargo.toml @@ -16,6 +16,14 @@ name = "pecos_rslib_exp" crate-type = ["cdylib", "rlib"] doctest = false +[features] +# See pecos-rslib's Cargo.toml for why this bundle is opt-in. +extension-module = [ + "pyo3/extension-module", + "pyo3/abi3-py310", + "pyo3/generate-import-lib", +] + [dependencies] pecos-core.workspace = true pecos-eeg.workspace = true @@ -28,7 +36,7 @@ serde = { workspace = true, features = ["derive"] } serde_json.workspace = true rayon.workspace = true pecos-stab-tn.workspace = true -pyo3 = { workspace = true, features = ["extension-module", "abi3-py310", "generate-import-lib", "num-complex"] } +pyo3 = { workspace = true, features = ["num-complex"] } num-complex.workspace = true smallvec.workspace = true diff --git a/python/pecos-rslib-exp/pyproject.toml b/python/pecos-rslib-exp/pyproject.toml index 2dc51abe2..6dc1ad382 100644 --- a/python/pecos-rslib-exp/pyproject.toml +++ b/python/pecos-rslib-exp/pyproject.toml @@ -18,3 +18,4 @@ build-backend = "maturin" [tool.maturin] module-name = "pecos_rslib_exp" +features = ["extension-module"] diff --git a/python/pecos-rslib-llvm/Cargo.toml b/python/pecos-rslib-llvm/Cargo.toml index 973d8829e..8c230b0a4 100644 --- a/python/pecos-rslib-llvm/Cargo.toml +++ b/python/pecos-rslib-llvm/Cargo.toml @@ -16,10 +16,18 @@ name = "pecos_rslib_llvm" crate-type = ["cdylib", "rlib"] doctest = false +[features] +# See pecos-rslib's Cargo.toml for why this bundle is opt-in. +extension-module = [ + "pyo3/extension-module", + "pyo3/abi3-py310", + "pyo3/generate-import-lib", +] + [dependencies] pecos-llvm.workspace = true inkwell = { workspace = true, features = ["llvm14-0"] } -pyo3 = { workspace = true, features = ["extension-module", "abi3-py310", "generate-import-lib"] } +pyo3.workspace = true regex.workspace = true tempfile.workspace = true log.workspace = true diff --git a/python/pecos-rslib-llvm/pyproject.toml b/python/pecos-rslib-llvm/pyproject.toml index 553757192..268bc33bd 100644 --- a/python/pecos-rslib-llvm/pyproject.toml +++ b/python/pecos-rslib-llvm/pyproject.toml @@ -35,6 +35,7 @@ build-backend = "maturin" [tool.maturin] module-name = "pecos_rslib_llvm" +features = ["extension-module"] [dependency-groups] dev = [] diff --git a/python/pecos-rslib/Cargo.toml b/python/pecos-rslib/Cargo.toml index dd1d0fe9e..d0db547a2 100644 --- a/python/pecos-rslib/Cargo.toml +++ b/python/pecos-rslib/Cargo.toml @@ -24,6 +24,23 @@ wasm = ["dep:pecos-wasm"] # Off by default for local dev so cmake is not required. Wheel builds in CI enable # this so end users get MWPF support out of the box. mwpf = ["pecos-decoders/mwpf"] +# Bundle of pyo3 features that maturin needs for the Python cdylib but that +# break a plain `cargo test`: +# - extension-module: tell the linker NOT to link libpython +# - abi3-py310: use the limited API; pyo3 then skips emitting +# `rustc-link-lib=python` directives +# - generate-import-lib: Windows-only, generates abi3 import library +# With these enabled, `cargo test` produces a test binary with unresolved +# Python C API symbols (__Py_IncRef, __Py_NoneStruct, ...). Gating them behind +# an opt-in feature lets maturin enable it (via `features = +# ["extension-module"]` in pyproject.toml) while leaving `cargo test` working. +# All four pecos-rslib* crates must agree on this to avoid feature unification +# silently re-enabling it for the whole workspace. +extension-module = [ + "pyo3/extension-module", + "pyo3/abi3-py310", + "pyo3/generate-import-lib", +] [dependencies] # Core PECOS crates (direct deps instead of metacrate) @@ -74,7 +91,7 @@ pecos-build.workspace = true pecos-experimental.workspace = true # Third-party -pyo3 = { workspace = true, features = ["extension-module", "abi3-py310", "generate-import-lib", "num-complex"] } +pyo3 = { workspace = true, features = ["num-complex"] } rayon.workspace = true rand.workspace = true ndarray.workspace = true diff --git a/python/pecos-rslib/pyproject.toml b/python/pecos-rslib/pyproject.toml index 2f45395d6..262f56eae 100644 --- a/python/pecos-rslib/pyproject.toml +++ b/python/pecos-rslib/pyproject.toml @@ -35,6 +35,9 @@ build-backend = "maturin" [tool.maturin] module-name = "pecos_rslib" +# extension-module is feature-gated in Cargo.toml so plain `cargo test` works; +# maturin enables it here so the cdylib doesn't link libpython at build time. +features = ["extension-module"] [dependency-groups] dev = [ diff --git a/python/quantum-pecos/pyproject.toml b/python/quantum-pecos/pyproject.toml index 1d22357c1..b8d03695f 100644 --- a/python/quantum-pecos/pyproject.toml +++ b/python/quantum-pecos/pyproject.toml @@ -99,6 +99,8 @@ packages = ["src/pecos"] [tool.black] line-length = 120 +# See top-level pyproject.toml's [tool.black] for why this is pinned to py310. +target-version = ["py310"] [tool.pytest.ini_options] markers = [ diff --git a/python/quantum-pecos/src/pecos/slr/gen_codes/guppy/ir_builder.py b/python/quantum-pecos/src/pecos/slr/gen_codes/guppy/ir_builder.py index 22580b5b9..5163a275e 100644 --- a/python/quantum-pecos/src/pecos/slr/gen_codes/guppy/ir_builder.py +++ b/python/quantum-pecos/src/pecos/slr/gen_codes/guppy/ir_builder.py @@ -36,7 +36,7 @@ def verify(ancilla: array[qubit, 3] @owned) -> tuple[array[qubit, 2], ...]: - Use separate ancilla qubits instead of array elements for verification - Or restructure the verification pattern to avoid the loop issue -See tests/slr-tests/guppy/test_partial_array_returns.py for correct usage patterns. +See tests/slr_tests/guppy/test_partial_array_returns.py for correct usage patterns. """ from __future__ import annotations @@ -152,6 +152,15 @@ def __init__( # Track version numbers for generating unique variable names self.variable_version_counter: dict[str, int] = {} + # Unified variable-state tracking: replaces ad-hoc dicts like + # `unpacked_vars`, `refreshed_arrays`, etc. (See variable_state.py + # for rationale.) Migration is incremental -- legacy dicts still + # populated, this object is consulted at sites that need a coherent + # view of "what Guppy form is this SLR variable in right now?". + from pecos.slr.gen_codes.guppy.variable_state import VariableState + + self.var_state = VariableState() + def _get_unique_var_name(self, base_name: str, index: int | None = None) -> str: """Generate a unique variable name that doesn't conflict with existing names. @@ -1967,6 +1976,22 @@ def render(self, _context): if not was_consumed and hasattr(self, "consumed_resources"): was_consumed = fresh_name in self.consumed_resources + # If the fresh array was unpacked into element vars, the + # array itself was moved by the unpack -- discard_array + # would error. Element-level cleanup is handled separately + # (or the elements were consumed by gates/measurements). + # The unpacked-state tracker keys by the *original* SLR + # symbol, so we look up via the original; the fresh name + # itself doesn't appear in unpacked_vars. + original_name = info.get("original") + if ( + original_name + and self.var_state.is_unpacked(original_name) + and hasattr(self, "refreshed_arrays") + and self.refreshed_arrays.get(original_name) == fresh_name + ): + was_consumed = True + if not was_consumed and info.get("is_quantum_array"): # Add discard statement discard_stmt = FunctionCall( @@ -3019,6 +3044,36 @@ def render(self, context): # Regular pre-allocated array - use measure_array qreg_ref = self._convert_qubit_ref(qreg) + # If the array was previously unpacked (e.g., to access an + # individual element after a function call returned it), + # Guppy considers the original variable name consumed by + # the unpack. Repack from the element vars so measure_array + # can take the whole array as input. We emit the repack + # statement *prepended* to whatever statement(s) the rest + # of this branch produces (see `_prepend_to_result`). + # + # var_state and the legacy `unpacked_vars` dict are both + # updated so other code paths agree the array is whole again. + repack_stmt = None + if hasattr(qreg, "sym") and self.var_state.is_unpacked(qreg.sym): + binding = self.var_state.get(qreg.sym) + repack_stmt = Assignment( + target=VariableRef(qreg.sym), + value=self._create_array_reconstruction(list(binding.element_names)), + ) + self.var_state.bind_whole(qreg.sym, qreg.sym) + if hasattr(self, "unpacked_vars") and qreg.sym in self.unpacked_vars: + del self.unpacked_vars[qreg.sym] + if hasattr(self, "context"): + var = self.context.lookup_variable(qreg.sym) + if var: + var.is_unpacked = False + var.unpacked_names = [] + # qreg_ref was computed *before* the repack -- recompute + # so it points at the now-whole array, not stale unpacked + # element variables. + qreg_ref = self._convert_qubit_ref(qreg) + # Mark fresh variable as used if this is measuring a fresh variable if hasattr(self, "fresh_variables_to_track") and hasattr( self, @@ -3120,7 +3175,10 @@ def render(self, context): func_name="quantum.measure_array", args=[qreg_ref], ) - return Assignment(target=creg_ref, value=call) + result = Assignment(target=creg_ref, value=call) + if repack_stmt is not None: + return Block(statements=[repack_stmt, result]) + return result # No target - just measure call = FunctionCall( @@ -3139,7 +3197,10 @@ def analyze(self, context): def render(self, context): return self.expr.render(context) - return ExpressionStatement(call) + result = ExpressionStatement(call) + if repack_stmt is not None: + return Block(statements=[repack_stmt, result]) + return result # Handle single qubit measurement if len(meas.qargs) == 1: @@ -6656,12 +6717,19 @@ def render(self, context): ) element_names = [f"{name}_{i}{unpack_suffix}" for i in range(return_array_size)] - # Add unpacking statement using ArrayUnpack IR class + # Add unpacking statement using ArrayUnpack IR class. + # When the array was refreshed by a function call (e.g., + # q → q_fresh), unpack from the refreshed name -- the + # original is moved/consumed at this point. Without + # this, generated Guppy looks like `q_0_ret, = q` and + # Guppy rejects with WrongNumberOfUnpacksError or + # AlreadyUsedError. from pecos.slr.gen_codes.guppy.ir import ArrayUnpack + unpack_source = self.refreshed_arrays.get(name, name) unpack_stmt = ArrayUnpack( targets=element_names, - source=name, + source=unpack_source, ) statements.append(unpack_stmt) @@ -6673,14 +6741,17 @@ def render(self, context): # CRITICAL: Track index mapping for partial consumption # If live_qubits tells us which original indices are in the returned array, # create a mapping from original index → unpacked variable index + index_map: dict[int, int] | None = None if name in live_qubits: original_indices = sorted(live_qubits[name]) if not hasattr(self, "index_mapping"): self.index_mapping = {} # Map original index to position in returned/unpacked array - self.index_mapping[name] = { - orig_idx: new_idx for new_idx, orig_idx in enumerate(original_indices) - } + index_map = {orig_idx: new_idx for new_idx, orig_idx in enumerate(original_indices)} + self.index_mapping[name] = index_map + + # Mirror to unified variable state (see variable_state.py) + self.var_state.bind_unpacked(name, list(element_names), index_map) # Update context if hasattr(self, "context"): diff --git a/python/quantum-pecos/src/pecos/slr/gen_codes/guppy/variable_state.py b/python/quantum-pecos/src/pecos/slr/gen_codes/guppy/variable_state.py new file mode 100644 index 000000000..f5805f6ce --- /dev/null +++ b/python/quantum-pecos/src/pecos/slr/gen_codes/guppy/variable_state.py @@ -0,0 +1,154 @@ +"""Unified variable-state tracking for the Guppy IR generator. + +The Guppy generator translates SLR programs (high-level quantum DSL) to +Guppy source. Guppy uses linear types: every qubit must be used exactly +once, and arrays-of-qubits get "moved" into and out of operations rather +than mutated in place. + +Translating SLR to Guppy means tracking, for each SLR variable, *what +Guppy variable currently holds it*. The form changes over the lifetime +of the SLR variable -- it might be a whole array, get unpacked into +element variables for individual access, get refreshed by a function +return, get partially consumed, etc. + +Historically the IRGuppyGenerator did this with ~6+ separate dicts +(`unpacked_vars`, `refreshed_arrays`, `array_remapping`, `index_mapping`, +`variable_remapping`, `function_var_remapping`, `replaced_qubits`, +`fresh_variables_to_track`, ...). Different code generation sites +consult different subsets of these dicts; sites that miss a state +transition emit Guppy that violates linearity ("AlreadyUsedError", +"WrongNumberOfUnpacksError", etc.). + +This module replaces that with one model: each SLR variable has a +*current binding* describing its Guppy form right now. Operations on the +variable consult the binding; transitions update it. Code-generation +sites that need the variable in a particular form call helpers like +`ensure_whole()` which emit reconstruction statements transparently. + +The migration is incremental. While the legacy dicts still exist, this +module shadows them: writes go to both, reads prefer this module. Once +all read sites are migrated, the legacy dicts can be removed. +""" + +from __future__ import annotations + +from dataclasses import dataclass, field + + +@dataclass(frozen=True) +class WholeArray: + """SLR variable is currently bound to a single Guppy array variable. + + `guppy_name` is the live identifier; subsequent ops can reference + `guppy_name` directly or index into it via `guppy_name[i]`. + """ + + guppy_name: str + + +@dataclass(frozen=True) +class UnpackedArray: + """SLR variable was unpacked into per-element Guppy variables. + + `element_names[i]` is the Guppy variable for original SLR index + `i` -- unless `index_mapping` is set, in which case mapping + `original_index -> position_in_element_names` is used (this happens + when a function call returned a partially-consumed array). + """ + + element_names: tuple[str, ...] + index_mapping: tuple[tuple[int, int], ...] = () # (orig_idx, position) + + def position_for(self, original_index: int) -> int | None: + """Return the position in `element_names` for an SLR index. + + With no `index_mapping`, returns `original_index` directly when in + bounds. With a mapping, looks up the position; returns None for + SLR indices that aren't present in the partial array. + """ + if not self.index_mapping: + return original_index if original_index < len(self.element_names) else None + for orig, pos in self.index_mapping: + if orig == original_index: + return pos + return None + + +@dataclass(frozen=True) +class Consumed: + """SLR variable is fully consumed; subsequent references are bugs. + + `reason` is a short human-readable note for diagnostics ("measured", + "passed to function as @owned", etc.). + """ + + reason: str = "" + + +Binding = WholeArray | UnpackedArray | Consumed + + +@dataclass +class VariableState: + """Current Guppy bindings for SLR variables in one generation context. + + A "context" is typically one Guppy function being generated -- the + main function or one of the extracted sub-block functions. Bindings + are local to a context; the same SLR variable name in different + contexts can have different bindings. + """ + + bindings: dict[str, Binding] = field(default_factory=dict) + + def bind_whole(self, slr_name: str, guppy_name: str) -> None: + """Record that `slr_name` is currently held by Guppy var `guppy_name`.""" + self.bindings[slr_name] = WholeArray(guppy_name) + + def bind_unpacked( + self, + slr_name: str, + element_names: list[str], + index_mapping: dict[int, int] | None = None, + ) -> None: + """Record that `slr_name` was unpacked into per-element Guppy vars.""" + mapping_tuple = tuple(sorted(index_mapping.items())) if index_mapping else () + self.bindings[slr_name] = UnpackedArray(tuple(element_names), mapping_tuple) + + def bind_consumed(self, slr_name: str, reason: str = "") -> None: + """Record that `slr_name` is no longer accessible.""" + self.bindings[slr_name] = Consumed(reason) + + def get(self, slr_name: str) -> Binding | None: + """Return current binding, or None if `slr_name` is unknown here.""" + return self.bindings.get(slr_name) + + def is_unpacked(self, slr_name: str) -> bool: + """True iff `slr_name` is currently in unpacked form.""" + return isinstance(self.bindings.get(slr_name), UnpackedArray) + + def is_consumed(self, slr_name: str) -> bool: + """True iff `slr_name` has been consumed.""" + return isinstance(self.bindings.get(slr_name), Consumed) + + def ensure_whole(self, slr_name: str) -> tuple[list[str], str | None]: + """Ensure `slr_name` is bound as a whole array; emit prep code if not. + + Returns (preparation_lines, guppy_name). The caller emits the + preparation_lines (Guppy source as `array(elem_0, elem_1, ...)` + repacking) before whatever it does with `guppy_name`. Returns + ([], guppy_name) when already whole. Returns ([], None) when + `slr_name` is consumed or unknown -- caller should treat as a + programming error. + + After repack, the binding is updated to WholeArray so subsequent + callers don't repack again. + """ + binding = self.bindings.get(slr_name) + if isinstance(binding, WholeArray): + return [], binding.guppy_name + if isinstance(binding, UnpackedArray): + elements = ", ".join(binding.element_names) + line = f"{slr_name} = array({elements})" + self.bindings[slr_name] = WholeArray(slr_name) + return [line], slr_name + return [], None diff --git a/python/quantum-pecos/tests/docs/conftest.py b/python/quantum-pecos/tests/docs/conftest.py index 7a5ed3ca1..9e59c51aa 100644 --- a/python/quantum-pecos/tests/docs/conftest.py +++ b/python/quantum-pecos/tests/docs/conftest.py @@ -21,7 +21,7 @@ def _check_cuda_available() -> bool: # Check for CUDA toolkit using pecos CLI (same as Justfile pattern) try: result = subprocess.run( - ["cargo", "run", "-p", "pecos", "--features", "cli", "--", "cuda", "check", "-q"], + ["cargo", "run", "-p", "pecos-cli", "--quiet", "--", "cuda", "check", "-q"], capture_output=True, timeout=30, check=False, diff --git a/python/quantum-pecos/tests/docs/rust_crate/tests/user_guide_circuit_representation.rs b/python/quantum-pecos/tests/docs/rust_crate/tests/user_guide_circuit_representation.rs index 4862d6ab2..142bf145d 100644 --- a/python/quantum-pecos/tests/docs/rust_crate/tests/user_guide_circuit_representation.rs +++ b/python/quantum-pecos/tests/docs/rust_crate/tests/user_guide_circuit_representation.rs @@ -8,7 +8,7 @@ fn test_user_guide_circuit_representation_rust_1() { use pecos::core::{Gate, QubitId}; use pecos::dag::DAG; use pecos::digraph::DiGraph; - use pecos::quantum::{Attribute, DagCircuit, TickCircuit, TickGateError}; + use pecos::quantum::{Attribute, DagCircuit, TickCircuit}; // Fluent builder API let mut circuit = DagCircuit::new(); diff --git a/python/quantum-pecos/tests/guppy/test_hugr_compilation.py b/python/quantum-pecos/tests/guppy/test_hugr_compilation.py index e0033fb75..89afb372a 100644 --- a/python/quantum-pecos/tests/guppy/test_hugr_compilation.py +++ b/python/quantum-pecos/tests/guppy/test_hugr_compilation.py @@ -1,7 +1,13 @@ -"""Test HUGR compilation and LLVM IR generation.""" +"""Test HUGR compilation and LLVM IR generation. + +Rust-side coverage (compilation, unit tests) lives in `cargo test +-p pecos-hugr-qis` and is run by `just rstest` / `pecos rust test +--workspace --features=runtime,hugr`. Don't re-invoke cargo from pytest -- +duplicates work, hides Rust build errors as Python test failures, and +runs under a different env than the canonical Rust test path. +""" import os -import shutil import subprocess import tempfile from pathlib import Path @@ -38,91 +44,6 @@ def _find_llvm_as() -> str | None: class TestHUGRCompilation: """Test suite for HUGR compilation and related functionality.""" - def test_rust_hugr_crate_compilation(self) -> None: - """Test that the Rust HUGR support compiles.""" - # Check if cargo is available - cargo_path = shutil.which("cargo") - if not cargo_path: - pytest.skip("Cargo not available") - - try: - result = subprocess.run( - [cargo_path, "--version"], - capture_output=True, - text=True, - check=False, - ) - if result.returncode != 0: - pytest.skip("Cargo not available") - except FileNotFoundError: - pytest.skip("Cargo not found in PATH") - - # Check if pecos-hugr-qis crate exists - project_root = Path(__file__).resolve().parent.parent.parent.parent.parent - hugr_crate = project_root / "crates" / "pecos-hugr-qis" - - if not hugr_crate.exists(): - pytest.skip("pecos-hugr-qis crate not found") - - # Test compilation of pecos-hugr-qis crate - result = subprocess.run( - [cargo_path, "check", "-p", "pecos-hugr-qis", "--features", "llvm"], - capture_output=True, - text=True, - cwd=project_root, - check=False, - ) - - # returncode == 0 means SUCCESS, not failure! - assert result.returncode == 0, f"HUGR crate compilation failed: {result.stderr[:500]}" - - def test_rust_hugr_unit_tests(self) -> None: - """Test that HUGR unit tests pass.""" - # Check cargo availability - cargo_path = shutil.which("cargo") - if not cargo_path: - pytest.skip("Cargo not available") - - try: - subprocess.run( - [cargo_path, "--version"], - capture_output=True, - check=False, - ) - except FileNotFoundError: - pytest.skip("Cargo not available") - - project_root = Path(__file__).resolve().parent.parent.parent.parent.parent - hugr_crate = project_root / "crates" / "pecos-hugr-qis" - - if not hugr_crate.exists(): - pytest.skip("pecos-hugr-qis crate not found") - - # Run HUGR-specific unit tests - result = subprocess.run( - [ - cargo_path, - "test", - "-p", - "pecos-hugr-qis", - "--features", - "llvm", - "--", - "--nocapture", - ], - capture_output=True, - text=True, - cwd=project_root, - check=False, - ) - - assert result.returncode == 0, f"HUGR unit tests failed: {result.stderr[:500]}" - - # Count successful tests if output is available - if "test result: ok" in result.stdout: - test_count = result.stdout.count("test result: ok") - assert test_count > 0, "Should have at least one passing test" - def test_llvm_ir_format_validation(self) -> None: """Test that generated LLVM IR follows HUGR conventions.""" # Create a test LLVM IR file following HUGR conventions diff --git a/python/quantum-pecos/tests/slr_tests/__init__.py b/python/quantum-pecos/tests/slr_tests/__init__.py new file mode 100644 index 000000000..579b19fbf --- /dev/null +++ b/python/quantum-pecos/tests/slr_tests/__init__.py @@ -0,0 +1,7 @@ +"""SLR test package. + +The `__init__.py` here is load-bearing: without it, pytest's importlib +mode resolves `slr_tests/guppy/test_hugr_compilation.py` and `guppy/ +test_hugr_compilation.py` to the same module name (`guppy.test_hugr_ +compilation`), and the second-loaded file silently aliases to the first. +""" diff --git a/python/quantum-pecos/tests/slr-tests/guppy/__init__.py b/python/quantum-pecos/tests/slr_tests/guppy/__init__.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/__init__.py rename to python/quantum-pecos/tests/slr_tests/guppy/__init__.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/demo_improvements.py b/python/quantum-pecos/tests/slr_tests/guppy/demo_improvements.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/demo_improvements.py rename to python/quantum-pecos/tests/slr_tests/guppy/demo_improvements.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/demo_unpacking_rules.py b/python/quantum-pecos/tests/slr_tests/guppy/demo_unpacking_rules.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/demo_unpacking_rules.py rename to python/quantum-pecos/tests/slr_tests/guppy/demo_unpacking_rules.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_allocation_optimization.py b/python/quantum-pecos/tests/slr_tests/guppy/test_allocation_optimization.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_allocation_optimization.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_allocation_optimization.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_array_patterns.py b/python/quantum-pecos/tests/slr_tests/guppy/test_array_patterns.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_array_patterns.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_array_patterns.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_complex_permutations.py b/python/quantum-pecos/tests/slr_tests/guppy/test_complex_permutations.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_complex_permutations.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_complex_permutations.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_conditional_refinement.py b/python/quantum-pecos/tests/slr_tests/guppy/test_conditional_refinement.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_conditional_refinement.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_conditional_refinement.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_conditional_resources.py b/python/quantum-pecos/tests/slr_tests/guppy/test_conditional_resources.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_conditional_resources.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_conditional_resources.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_data_flow.py b/python/quantum-pecos/tests/slr_tests/guppy/test_data_flow.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_data_flow.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_data_flow.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_hugr_compilation.py b/python/quantum-pecos/tests/slr_tests/guppy/test_hugr_compilation.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_hugr_compilation.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_hugr_compilation.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_hugr_error_messages.py b/python/quantum-pecos/tests/slr_tests/guppy/test_hugr_error_messages.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_hugr_error_messages.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_hugr_error_messages.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_ir_basic.py b/python/quantum-pecos/tests/slr_tests/guppy/test_ir_basic.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_ir_basic.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_ir_basic.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_ir_for_loops.py b/python/quantum-pecos/tests/slr_tests/guppy/test_ir_for_loops.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_ir_for_loops.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_ir_for_loops.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_ir_generator.py b/python/quantum-pecos/tests/slr_tests/guppy/test_ir_generator.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_ir_generator.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_ir_generator.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_ir_hugr_compatibility.py b/python/quantum-pecos/tests/slr_tests/guppy/test_ir_hugr_compatibility.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_ir_hugr_compatibility.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_ir_hugr_compatibility.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_ir_permute.py b/python/quantum-pecos/tests/slr_tests/guppy/test_ir_permute.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_ir_permute.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_ir_permute.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_ir_scope_management.py b/python/quantum-pecos/tests/slr_tests/guppy/test_ir_scope_management.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_ir_scope_management.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_ir_scope_management.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_ir_while_loops.py b/python/quantum-pecos/tests/slr_tests/guppy/test_ir_while_loops.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_ir_while_loops.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_ir_while_loops.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_linearity_patterns.py b/python/quantum-pecos/tests/slr_tests/guppy/test_linearity_patterns.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_linearity_patterns.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_linearity_patterns.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_loop_generation.py b/python/quantum-pecos/tests/slr_tests/guppy/test_loop_generation.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_loop_generation.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_loop_generation.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_measurement_optimization.py b/python/quantum-pecos/tests/slr_tests/guppy/test_measurement_optimization.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_measurement_optimization.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_measurement_optimization.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_multi_qubit_measurements.py b/python/quantum-pecos/tests/slr_tests/guppy/test_multi_qubit_measurements.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_multi_qubit_measurements.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_multi_qubit_measurements.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_partial_array_returns.py b/python/quantum-pecos/tests/slr_tests/guppy/test_partial_array_returns.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_partial_array_returns.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_partial_array_returns.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_partial_consumption.py b/python/quantum-pecos/tests/slr_tests/guppy/test_partial_consumption.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_partial_consumption.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_partial_consumption.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_register_wide_ops.py b/python/quantum-pecos/tests/slr_tests/guppy/test_register_wide_ops.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_register_wide_ops.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_register_wide_ops.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_simple_slr_to_guppy.py b/python/quantum-pecos/tests/slr_tests/guppy/test_simple_slr_to_guppy.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_simple_slr_to_guppy.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_simple_slr_to_guppy.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_steane_integration.py b/python/quantum-pecos/tests/slr_tests/guppy/test_steane_integration.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_steane_integration.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_steane_integration.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_unified_resource_planner.py b/python/quantum-pecos/tests/slr_tests/guppy/test_unified_resource_planner.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_unified_resource_planner.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_unified_resource_planner.py diff --git a/python/quantum-pecos/tests/slr-tests/guppy/test_unpacking_rules.py b/python/quantum-pecos/tests/slr_tests/guppy/test_unpacking_rules.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/guppy/test_unpacking_rules.py rename to python/quantum-pecos/tests/slr_tests/guppy/test_unpacking_rules.py diff --git a/python/quantum-pecos/tests/slr-tests/pecos/regression/random_cases/test_slr_phys.py b/python/quantum-pecos/tests/slr_tests/pecos/regression/random_cases/test_slr_phys.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/pecos/regression/random_cases/test_slr_phys.py rename to python/quantum-pecos/tests/slr_tests/pecos/regression/random_cases/test_slr_phys.py diff --git a/python/quantum-pecos/tests/slr-tests/pecos/unit/slr/conftest.py b/python/quantum-pecos/tests/slr_tests/pecos/unit/slr/conftest.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/pecos/unit/slr/conftest.py rename to python/quantum-pecos/tests/slr_tests/pecos/unit/slr/conftest.py diff --git a/python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_basic_permutation.py b/python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_basic_permutation.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_basic_permutation.py rename to python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_basic_permutation.py diff --git a/python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_complex_permutation.py b/python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_complex_permutation.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_complex_permutation.py rename to python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_complex_permutation.py diff --git a/python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_conversion_with_qasm.py b/python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_conversion_with_qasm.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_conversion_with_qasm.py rename to python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_conversion_with_qasm.py diff --git a/python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_creg_permutation.py b/python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_creg_permutation.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_creg_permutation.py rename to python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_creg_permutation.py diff --git a/python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_guppy_generation.py b/python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_guppy_generation.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_guppy_generation.py rename to python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_guppy_generation.py diff --git a/python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_guppy_generation_comprehensive.py b/python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_guppy_generation_comprehensive.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_guppy_generation_comprehensive.py rename to python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_guppy_generation_comprehensive.py diff --git a/python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_measurement_permutation.py b/python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_measurement_permutation.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_measurement_permutation.py rename to python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_measurement_permutation.py diff --git a/python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_measurement_unrolling.py b/python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_measurement_unrolling.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_measurement_unrolling.py rename to python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_measurement_unrolling.py diff --git a/python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_pythonic_syntax_example.py b/python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_pythonic_syntax_example.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_pythonic_syntax_example.py rename to python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_pythonic_syntax_example.py diff --git a/python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_quantum_circuit_conversion.py b/python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_quantum_circuit_conversion.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_quantum_circuit_conversion.py rename to python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_quantum_circuit_conversion.py diff --git a/python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_quantum_permutation.py b/python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_quantum_permutation.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_quantum_permutation.py rename to python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_quantum_permutation.py diff --git a/python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_register_permutation.py b/python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_register_permutation.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_register_permutation.py rename to python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_register_permutation.py diff --git a/python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_repeat_to_guppy_pipeline.py b/python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_repeat_to_guppy_pipeline.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_repeat_to_guppy_pipeline.py rename to python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_repeat_to_guppy_pipeline.py diff --git a/python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_return_validation.py b/python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_return_validation.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_return_validation.py rename to python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_return_validation.py diff --git a/python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_stim_conversion.py b/python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_stim_conversion.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/pecos/unit/slr/test_stim_conversion.py rename to python/quantum-pecos/tests/slr_tests/pecos/unit/slr/test_stim_conversion.py diff --git a/python/quantum-pecos/tests/slr-tests/pytest.ini b/python/quantum-pecos/tests/slr_tests/pytest.ini similarity index 100% rename from python/quantum-pecos/tests/slr-tests/pytest.ini rename to python/quantum-pecos/tests/slr_tests/pytest.ini diff --git a/python/quantum-pecos/tests/slr-tests/test_partial.py b/python/quantum-pecos/tests/slr_tests/test_partial.py similarity index 100% rename from python/quantum-pecos/tests/slr-tests/test_partial.py rename to python/quantum-pecos/tests/slr_tests/test_partial.py diff --git a/scripts/ci/setup-msvc.ps1 b/scripts/ci/setup-msvc.ps1 index 3a841a8cc..3a06ec7cb 100644 --- a/scripts/ci/setup-msvc.ps1 +++ b/scripts/ci/setup-msvc.ps1 @@ -11,7 +11,16 @@ param( [string]$Arch = "x64", - [string]$HostArch = "x64" + [string]$HostArch = "x64", + # When set: do NOT pin CARGO_TARGET_X86_64_PC_WINDOWS_MSVC_LINKER, and + # export PECOS_MSVC_HOST_BIN (the dir containing the newest MSVC link.exe) + # so just recipes can prepend it to PATH. With no pin, rustc uses its own + # vswhere MSVC detection -- which both finds the linker via PATH AND sets + # up LIB/INCLUDE itself -- so prepending the MSVC bin ahead of git's + # /usr/bin (which shadows link.exe) is sufficient and needs no + # .cargo/config.toml linker/LIB/INCLUDE surgery. Only python-test.yml + # passes this; other workflows keep the pin. + [switch]$NoPinLinker ) $ErrorActionPreference = "Stop" @@ -96,15 +105,57 @@ if ($after.ContainsKey("Path")) { } } -$linkPath = Get-ChildItem -Path (Join-Path $vsPath "VC\Tools\MSVC") -Recurse -Filter "link.exe" | - Where-Object { $_.FullName -like "*\bin\Hostx64\x64\*" } | +# Pick link.exe from the NEWEST installed MSVC toolset. VsDevCmd.bat (above) +# already configured LIB / INCLUDE / etc. against the newest toolset, and runners +# can have multiple MSVC versions side-by-side (14.29 from VS 2019 + 14.40+ from +# VS 2022). Naive `Select-Object -First 1` returns the lexically-first directory, +# which is the OLDEST -- mismatching it with the newest-MSVC LIB paths makes the +# linker fail with `LNK1181: cannot open input file 'kernel32.lib'` when +# anything outside cache (e.g. cold debug-profile build scripts) needs to link. +$latestMsvcDir = Get-ChildItem -Path (Join-Path $vsPath "VC\Tools\MSVC") -Directory | + Sort-Object { try { [version]$_.Name } catch { [version]"0.0" } } -Descending | Select-Object -First 1 -ExpandProperty FullName +if (-not $latestMsvcDir) { + throw "Could not find any MSVC toolset under $vsPath\VC\Tools\MSVC" +} +$msvcHostBin = Join-Path $latestMsvcDir "bin\Hostx64\x64" +$linkPath = Join-Path $msvcHostBin "link.exe" +if (-not (Test-Path $linkPath)) { + throw "MSVC link.exe not found at $linkPath" +} -if (-not $linkPath) { - throw "Could not find MSVC link.exe for x64" +if ($NoPinLinker) { + # Don't pin the linker -> rustc auto-detects MSVC (vswhere) and configures + # LIB/INCLUDE itself. Export the MSVC host-bin dir so just recipes can + # prepend it ahead of git's /usr/bin (which shadows link.exe) -- that is + # the only thing rustc's PATH-based linker lookup gets wrong here. + Add-GitHubEnv -Name "PECOS_MSVC_HOST_BIN" -Value $msvcHostBin +} else { + Add-GitHubEnv -Name "CARGO_TARGET_X86_64_PC_WINDOWS_MSVC_LINKER" -Value $linkPath } -Add-GitHubEnv -Name "CARGO_TARGET_X86_64_PC_WINDOWS_MSVC_LINKER" -Value $linkPath +# The Justfile pins `set shell := ["bash", "-cu"]`, so every `just` recipe (and +# the `cargo` / `link.exe` it spawns) runs under git-bash, whose MSYS2 runtime +# rewrites "path-like" environment variables when crossing the bash<->native +# boundary. LIB / INCLUDE / LIBPATH from VsDevCmd.bat are semicolon-separated +# lists of Windows paths that contain spaces and parentheses (e.g. +# `C:\Program Files (x86)\Windows Kits\10\Lib\...\um\x64`). MSYS2's heuristic +# conversion corrupts these on the round trip, so the native linker receives a +# broken LIB and fails with `LNK1181: cannot open input file 'kernel32.lib'` +# (kernel32.lib lives in the Windows SDK's um\x64, which is exactly the entry +# that gets mangled). MSYS2_ENV_CONV_EXCL is the documented mechanism to opt +# specific variables out of that conversion so they pass through verbatim. +# Setting it here (via GITHUB_ENV) means every subsequent bash step -- and the +# nested `bash -cu` that `just` spawns for each recipe -- honors it, because the +# MSYS2 runtime reads MSYS2_ENV_CONV_EXCL from the process environment at +# startup. This is required for cold cargo builds (build scripts that haven't +# been pre-warmed in the rust-cache) to link on Windows. +Add-GitHubEnv -Name "MSYS2_ENV_CONV_EXCL" -Value "LIB;INCLUDE;LIBPATH" Write-Host "Configured Visual Studio environment from $vsPath for $Arch" -Write-Host "Configured Cargo MSVC linker: $linkPath" +if ($NoPinLinker) { + Write-Host "Linker NOT pinned; exported PECOS_MSVC_HOST_BIN=$msvcHostBin (recipes prepend it to PATH; rustc auto-detects MSVC + LIB)" +} else { + Write-Host "Configured Cargo MSVC linker: $linkPath" +} +Write-Host "Excluded LIB;INCLUDE;LIBPATH from MSYS2 path conversion" diff --git a/scripts/clean.py b/scripts/clean.py index 300958b88..a9fc0125f 100755 --- a/scripts/clean.py +++ b/scripts/clean.py @@ -216,10 +216,11 @@ def clean_selene(root: Path, *, dry_run: bool = False) -> None: # File-less leftover scaffolding — safe to remove. stale_count += 1 + action = "Would remove" if dry_run else "Removed" if dist_count > 0: - print(f" Removed {dist_count} _dist directories") + print(f" {action} {dist_count} _dist directories") if stale_count > 0: - print(f" Removed {stale_count} stale plugin scaffold directories") + print(f" {action} {stale_count} stale plugin scaffold directories") def clean_pecos_home(what: str, *, dry_run: bool = False) -> None: @@ -282,9 +283,9 @@ def main() -> int: clean_selene(root, dry_run=args.dry_run) clean_pecos_home("cache", dry_run=args.dry_run) clean_pecos_home("deps", dry_run=args.dry_run) - elif args.selene: - clean_selene(root, dry_run=args.dry_run) - elif args.cache or args.deps: + elif args.selene or args.cache or args.deps: + if args.selene: + clean_selene(root, dry_run=args.dry_run) if args.cache: clean_pecos_home("cache", dry_run=args.dry_run) if args.deps: diff --git a/scripts/docs/generate_doc_tests.py b/scripts/docs/generate_doc_tests.py index cc27e943d..d69774775 100755 --- a/scripts/docs/generate_doc_tests.py +++ b/scripts/docs/generate_doc_tests.py @@ -864,7 +864,7 @@ def generate_test_file(file_path: Path, blocks: list[CodeBlock]) -> str: "", " try:", " result = subprocess.run(", - ' ["cargo", "run", "-p", "pecos", "--features", "cli",', + ' ["cargo", "run", "-p", "pecos-cli", "--quiet",', ' "--", "cuda", "check", "-q"],', " capture_output=True, timeout=30, check=False,", " )", @@ -957,7 +957,7 @@ def _check_cuda_available() -> bool: # Check for CUDA toolkit using pecos CLI (same as Justfile pattern) try: result = subprocess.run( - ["cargo", "run", "-p", "pecos", "--features", "cli", "--", "cuda", "check", "-q"], + ["cargo", "run", "-p", "pecos-cli", "--quiet", "--", "cuda", "check", "-q"], capture_output=True, timeout=30, check=False, @@ -1324,6 +1324,7 @@ def main() -> None: total_rust_blocks = 0 total_skipped = 0 files_generated = 0 + generated_paths: set[Path] = set() for md_file in markdown_files: # Extract Python and Rust blocks @@ -1363,10 +1364,16 @@ def main() -> None: # Generate test file test_content = generate_test_file(md_file, pytest_blocks) - # Create output path preserving directory structure + # Create output path preserving directory structure. Sanitize each + # directory component so the output tree is a valid Python package + # (no dashes, etc.); otherwise pytest's importlib mode resolves + # `tests/docs/generated/foo-bar/test_x.py` to a module name with a + # dash, and any duplicate basename elsewhere in the tree silently + # aliases via sys.modules. See also: tests/slr_tests/__init__.py. relative_path = md_file.relative_to(args.docs_dir) test_file_name = f"test_{_sanitize_name(relative_path.stem)}.py" - output_subdir = args.output_dir / relative_path.parent + sanitized_parent = Path(*[_sanitize_name(p) for p in relative_path.parent.parts]) + output_subdir = args.output_dir / sanitized_parent output_path = output_subdir / test_file_name if args.dry_run: @@ -1381,10 +1388,22 @@ def main() -> None: init_file.write_text('"""Auto-generated doc test package."""\n') output_path.write_text(test_content) files_generated += 1 + generated_paths.add(output_path.resolve()) print( f"Generated: {output_path} ({len(python_blocks)} Python, {len(rust_blocks)} Rust blocks)", ) + # Prune stale auto-generated test files whose source markdown was deleted + # or now skips every block. Only touch files matching `test_*.py` under the + # output dir so __init__.py, conftest.py, and __pycache__ are left alone. + stale_removed = 0 + if not args.dry_run and args.output_dir.exists(): + for stale in args.output_dir.rglob("test_*.py"): + if stale.resolve() not in generated_paths: + stale.unlink() + stale_removed += 1 + print(f"Removed stale: {stale}") + # Generate unified Rust test crate if not args.dry_run: rust_crate_dir = args.output_dir.parent / "rust_crate" @@ -1397,6 +1416,8 @@ def main() -> None: print(f" Total code blocks: {total_python_blocks + total_rust_blocks}") print(f" Blocks with skip markers: {total_skipped}") print(f" Test files generated: {files_generated}") + if stale_removed: + print(f" Stale test files removed: {stale_removed}") print(f"\nRun tests with: pytest {args.output_dir} -v") print(f"Run Rust doc tests: cargo test --manifest-path {rust_crate_dir}/Cargo.toml")