diff --git a/CHANGELOG.md b/CHANGELOG.md index 4804c31..0f4b6b1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,15 @@ All notable changes to the Toolpath workspace are documented here. ### Changed +- `toolpath-cli` 0.5.0: CLI restructure — external-boundary verbs collapsed into two symmetric verbs with an on-disk document cache at `~/.toolpath/documents/.json`: + - `path import ` replaces `path derive `. Writes each derived document into the cache and prints the resulting path to stdout. `--no-cache` sends JSON to stdout instead (preserving old pipe ergonomics: `path import git --no-cache | path render md`). `--force` overwrites an existing cache entry; default is error-on-exists, uniform across every source. + - `path export ` replaces `path incept` and `path project claude`. `export claude --input [--project | --output ]` covers both old commands; `` resolves as a bare cache id first (e.g. `claude-abc123`) or a filesystem path. + - `path cache ls | rm` list / remove cached documents. + - **New Pathbase round-trip:** `path import pathbase ` downloads a previously uploaded trace into the cache; `path export pathbase --input ` uploads. Reuses the existing `path auth login` session at `~/.toolpath/credentials.json`. Targets `POST/GET /api/v1/traces[/:id]`. `--url` on `export pathbase` warns when its host differs from the session's. + - **Cache id** is `-`. Git folds a short hash of the canonical repo path so two repos on the same branch don't collide (`git-a1b2c3d4-path-main` vs `git-e5f6a7b8-path-main`). `make_id` strips any trailing `.json` to avoid round-tripping into a `.json.json` file. + - **Atomic cache writes:** `write_cached` uses `O_CREAT | O_EXCL` when not forcing, so concurrent imports can't silently stomp each other. + - **Deprecation aliases** (one-release overlap, hidden, stderr warning): `path derive` → `path import` (stdout preserved via implicit `--no-cache`), `path incept` → `path export claude --project `, `path project claude` → `path export claude`. + - Shared HTTP/session plumbing extracted into `cmd_pathbase` (from `cmd_auth`); config-dir resolution lives in a new `config` module so `cmd_cache` builds on wasm/emscripten targets where `cmd_pathbase` is gated out. - `toolpath-convo` 0.7.0: **breaking** — `file_write_diff` gains a `before_state: Option<&str>` parameter. For the `Write { content }` shape, callers can now supply the prior file contents (e.g. resolved from `git show HEAD:`) so the resulting diff shows `-` lines for replaced content instead of an addition-only hunk. `None` preserves the old behaviour (diff against `""`). `Edit` / `MultiEdit` shapes are unaffected — they carry their own `old_string`. `toolpath-claude`'s Claude-JSONL deriver wires a best-effort git-HEAD lookup for `Write` tool invocations; falls back silently to additions-only when the project isn't a git repo, the file isn't tracked, or `git` isn't on `PATH`. (#35) - `toolpath-convo` 0.6.0: adds `derive_path(view, config) -> Path` and `DeriveConfig` (moved in from the unreleased `toolpath-derive` crate). `toolpath-convo` now depends on `toolpath`. - `toolpath-convo` 0.6.0: adds `ConversationProjector` trait, `AnyProjector` type-erasing wrapper, `extract_conversation()` for Path → ConversationView, and conversation sub-protocol (`conversation.init`, `conversation.append`, `tool.invoke`, `agent://` URN scheme). diff --git a/CLAUDE.md b/CLAUDE.md index db08c9d..fc3f524 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -65,13 +65,27 @@ Requires Rust 1.85+ (edition 2024). Pinned to 1.94.0 via `rust-toolchain.toml`. The binary is called `path` (package: `toolpath-cli`): ```bash -cargo run -p toolpath-cli -- derive git --repo . --branch main --pretty -cargo run -p toolpath-cli -- derive github --repo owner/repo --pr 42 --pretty -cargo run -p toolpath-cli -- derive claude --project /path/to/project -cargo run -p toolpath-cli -- derive gemini --project /path/to/project -cargo run -p toolpath-cli -- derive codex --session -cargo run -p toolpath-cli -- derive opencode --session ses_ -cargo run -p toolpath-cli -- derive pi --project /path/to/project +# Import from external formats into the local toolpath cache (~/.toolpath/documents/) +cargo run -p toolpath-cli -- import git --repo . --branch main +cargo run -p toolpath-cli -- import github https://github.com/owner/repo/pull/42 +cargo run -p toolpath-cli -- import claude --project /path/to/project +cargo run -p toolpath-cli -- import gemini --project /path/to/project +cargo run -p toolpath-cli -- import codex --session +cargo run -p toolpath-cli -- import opencode --session ses_ +cargo run -p toolpath-cli -- import pi --project /path/to/project +cargo run -p toolpath-cli -- import pathbase +cargo run -p toolpath-cli -- import claude --project . --no-cache | path render md --input - + +# Export toolpath documents into external formats. is a cache id or a file path. +cargo run -p toolpath-cli -- export claude --input --project /tmp/sandbox +cargo run -p toolpath-cli -- export claude --input --output conv.jsonl +cargo run -p toolpath-cli -- export pathbase --input + +# Manage the cache +cargo run -p toolpath-cli -- cache ls +cargo run -p toolpath-cli -- cache rm + +# Inspect / analyze cargo run -p toolpath-cli -- render dot --input doc.json cargo run -p toolpath-cli -- render md --input doc.json --detail full cargo run -p toolpath-cli -- query dead-ends --input doc.json @@ -91,6 +105,10 @@ cargo run -p toolpath-cli -- auth whoami cargo run -p toolpath-cli -- auth logout ``` +`path derive`, `path incept`, and `path project` are deprecated aliases for `path import` / `path export claude` and print a deprecation warning to stderr. They will be removed in the release after next. + +The **cache** at `~/.toolpath/documents/.json` is the single landing zone for every `import` (and for `import pathbase` downloads). Cache id is `-` — e.g. `claude-abc123`, `git-main`, `pathbase-trc_01H…`. Files are `0600`, parent directory `0700`. `$TOOLPATH_CONFIG_DIR` overrides the root. Default behavior: error on cache hit; pass `--force` to overwrite. `--no-cache` sends the JSON to stdout for shell composition. + `path auth login` prints `/auth/cli`; the user opens it, logs in, and pastes the 8-character code back into the CLI. The CLI calls `POST /api/v1/auth/cli/redeem` to trade the code for a bearer token, which it @@ -121,7 +139,7 @@ Tests live alongside the code (`#[cfg(test)] mod tests`), plus `toolpath-cli` ha - `toolpath-opencode`: 43 unit + 1 doc test (SQLite reader, JSON payload serde, provider assembly, snapshot-based derive, tool-input fallback for gitignored paths) - `toolpath-pi`: ~88 unit tests (types, paths, error, reader, io, provider) - `toolpath-dot`: 30 unit + 2 doc tests (render, visual conventions, escaping) -- `toolpath-cli`: 126 unit + 24 integration tests (all commands, track sessions, merge, validate, roundtrip, render-md snapshots) +- `toolpath-cli`: 165 unit + 26 integration tests (import/export/cache, track sessions, merge, validate, roundtrip, render-md snapshots, deprecation aliases, pathbase HTTP mock-server tests) Validate example documents: `for f in examples/*.json; do cargo run -p toolpath-cli -- validate --input "$f"; done` diff --git a/Cargo.lock b/Cargo.lock index db32301..d9a2c4d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1946,7 +1946,7 @@ dependencies = [ [[package]] name = "toolpath-cli" -version = "0.4.0" +version = "0.5.0" dependencies = [ "anyhow", "assert_cmd", diff --git a/README.md b/README.md index e2942c9..025ad39 100644 --- a/README.md +++ b/README.md @@ -62,29 +62,42 @@ See each crate's README for library-level documentation. # Build everything cargo build --workspace -# Derive a Toolpath document from this repo's git history -path derive git --repo . --branch main --pretty +# Import a Toolpath document from this repo's git history (cached under ~/.toolpath/documents/) +path import git --repo . --branch main # Visualize it -path derive git --repo . --branch main | path render dot | dot -Tpng -o graph.png +path import git --repo . --branch main --no-cache | path render dot | dot -Tpng -o graph.png # Render as Markdown for an LLM -path derive git --repo . --branch main | path render md +path import git --repo . --branch main --no-cache | path render md -# Derive from a GitHub pull request -path derive github --repo owner/repo --pr 42 --pretty +# Import from a GitHub pull request +path import github https://github.com/owner/repo/pull/42 -# Derive from Claude conversation logs -path derive claude --project /path/to/project --pretty +# Import from Claude conversation logs +path import claude --project /path/to/project -# Derive from Gemini CLI conversation logs -path derive gemini --project /path/to/project --pretty +# Import from Gemini CLI conversation logs +path import gemini --project /path/to/project -# Derive from Codex CLI rollout files (most recent session by default) -path derive codex --pretty +# Import from Codex CLI rollout files (most recent session by default) +path import codex -# Derive from opencode session database (most recent session by default) -path derive opencode --pretty +# Import from opencode session database (most recent session by default) +path import opencode + +# List what's in the cache +path cache ls + +# Export a cached document back into a Claude Code session +path export claude --input claude- --project /path/to/resume + +# Push a cached document to Pathbase +path auth login +path export pathbase --input claude- + +# Pull a trace from Pathbase back into the local cache +path import pathbase # Query for dead ends (abandoned approaches) path query dead-ends --input doc.json diff --git a/crates/toolpath-cli/Cargo.toml b/crates/toolpath-cli/Cargo.toml index 26d3c20..f29b701 100644 --- a/crates/toolpath-cli/Cargo.toml +++ b/crates/toolpath-cli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "toolpath-cli" -version = "0.4.0" +version = "0.5.0" edition.workspace = true license.workspace = true repository = "https://github.com/empathic/toolpath" diff --git a/crates/toolpath-cli/src/cmd_auth.rs b/crates/toolpath-cli/src/cmd_auth.rs index 8ee74d1..8444af1 100644 --- a/crates/toolpath-cli/src/cmd_auth.rs +++ b/crates/toolpath-cli/src/cmd_auth.rs @@ -1,13 +1,11 @@ -use anyhow::{Context, Result, anyhow, bail}; +use anyhow::{Result, anyhow}; use clap::Subcommand; -use serde::{Deserialize, Serialize}; -use std::path::{Path, PathBuf}; +use std::path::Path; -const CONFIG_DIR_NAME: &str = ".toolpath"; -const CREDENTIALS_FILE: &str = "credentials.json"; -const CONFIG_DIR_ENV: &str = "TOOLPATH_CONFIG_DIR"; -const DEFAULT_URL: &str = "https://pathbase.dev"; -const PATHBASE_URL_ENV: &str = "PATHBASE_URL"; +use crate::cmd_pathbase::{ + StoredSession, api_logout, api_me, api_redeem, clear_session, credentials_path, load_session, + prompt_line, resolve_url, store_session, +}; #[derive(Subcommand, Debug)] pub enum AuthOp { @@ -29,26 +27,6 @@ pub enum AuthOp { Whoami, } -/// JSON blob persisted in the OS keychain. -#[derive(Debug, Clone, Serialize, Deserialize)] -struct StoredSession { - url: String, - token: String, - user: User, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -struct User { - id: String, - username: String, - #[serde(default)] - email: Option, - #[serde(default)] - display_name: Option, - #[serde(default)] - avatar_url: Option, -} - pub fn run(op: AuthOp) -> Result<()> { let path = credentials_path()?; match op { @@ -108,7 +86,6 @@ fn logout(path: &Path) -> Result<()> { } }; - // Best effort: tell the server to invalidate the session. if let Err(e) = api_logout(&stored.url, &stored.token) { eprintln!("warning: server logout failed: {e}"); } @@ -147,226 +124,3 @@ fn whoami(path: &Path) -> Result<()> { println!("server: {}", stored.url); Ok(()) } - -// ── URL + prompt helpers ──────────────────────────────────────────────── - -fn resolve_url(cli_url: Option) -> String { - let raw = cli_url - .or_else(|| std::env::var(PATHBASE_URL_ENV).ok()) - .unwrap_or_else(|| DEFAULT_URL.to_string()); - raw.trim_end_matches('/').to_string() -} - -fn prompt_line(prompt: &str) -> Result { - use std::io::{BufRead, Write}; - let mut stdout = std::io::stdout(); - stdout.write_all(prompt.as_bytes())?; - stdout.flush()?; - let stdin = std::io::stdin(); - let mut line = String::new(); - stdin.lock().read_line(&mut line)?; - Ok(line.trim().to_string()) -} - -// ── HTTP layer ────────────────────────────────────────────────────────── - -fn http_client() -> Result { - reqwest::blocking::Client::builder() - .user_agent(concat!("toolpath-cli/", env!("CARGO_PKG_VERSION"))) - .timeout(std::time::Duration::from_secs(30)) - .build() - .context("failed to build HTTP client") -} - -#[derive(Deserialize)] -struct RedeemResponse { - token: String, - user: User, -} - -fn api_redeem(base_url: &str, code: &str) -> Result<(String, User)> { - let client = http_client()?; - let resp = client - .post(format!("{base_url}/api/v1/auth/cli/redeem")) - .json(&serde_json::json!({ "code": code })) - .send() - .with_context(|| format!("connect to {base_url}"))?; - - let status = resp.status(); - let body = resp.text().unwrap_or_default(); - - if !status.is_success() { - if status == reqwest::StatusCode::UNAUTHORIZED { - bail!("code is invalid, already used, or expired — generate a new one"); - } - if status == reqwest::StatusCode::BAD_REQUEST { - // server returns a JSON {"error": "..."} we can surface - let msg = serde_json::from_str::(&body) - .ok() - .and_then(|v| v.get("error").and_then(|e| e.as_str()).map(String::from)) - .unwrap_or_else(|| body.clone()); - bail!("{msg}"); - } - bail!("redeem failed ({status}): {body}"); - } - - let parsed: RedeemResponse = - serde_json::from_str(&body).with_context(|| format!("parsing redeem response: {body}"))?; - Ok((parsed.token, parsed.user)) -} - -fn api_logout(base_url: &str, token: &str) -> Result<()> { - let client = http_client()?; - let resp = client - .post(format!("{base_url}/api/v1/auth/logout")) - .bearer_auth(token) - .send() - .with_context(|| format!("connect to {base_url}"))?; - if !resp.status().is_success() && resp.status() != reqwest::StatusCode::NO_CONTENT { - bail!("server returned {}", resp.status()); - } - Ok(()) -} - -fn api_me(base_url: &str, token: &str) -> Result { - let client = http_client()?; - let resp = client - .get(format!("{base_url}/api/v1/auth/me")) - .bearer_auth(token) - .send() - .with_context(|| format!("connect to {base_url}"))?; - - if resp.status() == reqwest::StatusCode::UNAUTHORIZED { - bail!("stored session is no longer valid — run `path auth login` again"); - } - if !resp.status().is_success() { - bail!("server returned {}", resp.status()); - } - let user: User = resp.json().context("parsing /auth/me response")?; - Ok(user) -} - -// ── File storage ──────────────────────────────────────────────────────── - -fn credentials_path() -> Result { - if let Some(override_) = std::env::var_os(CONFIG_DIR_ENV) { - return Ok(PathBuf::from(override_).join(CREDENTIALS_FILE)); - } - let home = std::env::var_os("HOME") - .ok_or_else(|| anyhow!("$HOME is not set — cannot locate credentials"))?; - Ok(PathBuf::from(home) - .join(CONFIG_DIR_NAME) - .join(CREDENTIALS_FILE)) -} - -fn store_session(path: &Path, s: &StoredSession) -> Result<()> { - let parent = path - .parent() - .ok_or_else(|| anyhow!("credentials path has no parent: {}", path.display()))?; - std::fs::create_dir_all(parent) - .with_context(|| format!("create {}", parent.display()))?; - #[cfg(unix)] - { - use std::os::unix::fs::PermissionsExt; - let _ = std::fs::set_permissions(parent, std::fs::Permissions::from_mode(0o700)); - } - - let payload = serde_json::to_string_pretty(s)?; - std::fs::write(path, payload).with_context(|| format!("write {}", path.display()))?; - #[cfg(unix)] - { - use std::os::unix::fs::PermissionsExt; - std::fs::set_permissions(path, std::fs::Permissions::from_mode(0o600)) - .with_context(|| format!("chmod 0600 {}", path.display()))?; - } - Ok(()) -} - -fn load_session(path: &Path) -> Result> { - match std::fs::read_to_string(path) { - Ok(s) if s.trim().is_empty() => Ok(None), - Ok(s) => Ok(Some(serde_json::from_str(&s).with_context(|| { - format!("decode credentials at {}", path.display()) - })?)), - Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(None), - Err(e) => Err(anyhow!("read {}: {e}", path.display())), - } -} - -fn clear_session(path: &Path) -> Result<()> { - match std::fs::remove_file(path) { - Ok(()) => Ok(()), - Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()), - Err(e) => Err(anyhow!("remove {}: {e}", path.display())), - } -} - -#[cfg(test)] -mod tests { - use super::*; - - fn sample() -> StoredSession { - StoredSession { - url: "https://pathbase.dev".into(), - token: "tok".into(), - user: User { - id: "u1".into(), - username: "alice".into(), - email: Some("alice@example.com".into()), - display_name: None, - avatar_url: None, - }, - } - } - - #[test] - fn resolve_url_prefers_cli_flag() { - let got = resolve_url(Some("https://example.com/".into())); - assert_eq!(got, "https://example.com"); - } - - #[test] - fn store_then_load_roundtrips_on_disk() { - let dir = tempfile::tempdir().unwrap(); - let path = dir.path().join("credentials.json"); - assert!(load_session(&path).unwrap().is_none()); - store_session(&path, &sample()).unwrap(); - let back = load_session(&path).unwrap().unwrap(); - assert_eq!(back.user.username, "alice"); - assert_eq!(back.token, "tok"); - } - - #[test] - fn store_creates_parent_directory() { - let dir = tempfile::tempdir().unwrap(); - let path = dir.path().join("nested").join("dir").join("credentials.json"); - store_session(&path, &sample()).unwrap(); - assert!(path.exists()); - } - - #[cfg(unix)] - #[test] - fn store_sets_restrictive_permissions() { - use std::os::unix::fs::PermissionsExt; - let dir = tempfile::tempdir().unwrap(); - let path = dir.path().join("credentials.json"); - store_session(&path, &sample()).unwrap(); - let mode = std::fs::metadata(&path).unwrap().permissions().mode() & 0o777; - assert_eq!(mode, 0o600, "expected 0600 on credentials file, got {mode:o}"); - } - - #[test] - fn clear_on_missing_file_is_ok() { - let dir = tempfile::tempdir().unwrap(); - let path = dir.path().join("nope.json"); - assert!(clear_session(&path).is_ok()); - } - - #[test] - fn load_empty_file_returns_none() { - let dir = tempfile::tempdir().unwrap(); - let path = dir.path().join("credentials.json"); - std::fs::write(&path, "").unwrap(); - assert!(load_session(&path).unwrap().is_none()); - } -} diff --git a/crates/toolpath-cli/src/cmd_cache.rs b/crates/toolpath-cli/src/cmd_cache.rs new file mode 100644 index 0000000..1d3037f --- /dev/null +++ b/crates/toolpath-cli/src/cmd_cache.rs @@ -0,0 +1,351 @@ +//! On-disk cache for toolpath documents at `$CONFIG_DIR/documents/`. +//! +//! `path import` and `path export` both use this as the pivot between +//! external formats and toolpath JSON. Users refer to cached documents +//! by a short id (filename without `.json`) instead of full paths. +//! The `path cache ls | rm` subcommands make the directory legible. + +use anyhow::{Context, Result, anyhow, bail}; +use clap::Subcommand; +use std::path::PathBuf; +use toolpath::v1::Document; + +use crate::config::config_dir; + +const DOCUMENTS_DIR: &str = "documents"; + +#[derive(Subcommand, Debug)] +pub enum CacheOp { + /// List cached documents (newest first) + Ls, + /// Remove a cached document by id + Rm { + /// Cache id (filename without `.json`) + id: String, + }, +} + +pub fn run(op: CacheOp) -> Result<()> { + match op { + CacheOp::Ls => run_ls(), + CacheOp::Rm { id } => run_rm(&id), + } +} + +fn run_ls() -> Result<()> { + let entries = list_cached()?; + if entries.is_empty() { + eprintln!("No cached documents. Run `path import ` to create one."); + return Ok(()); + } + for e in entries { + println!("{}\t{}\t{}", e.id, e.bytes, e.path.display()); + } + Ok(()) +} + +fn run_rm(id: &str) -> Result<()> { + remove_cached(id)?; + eprintln!("Removed {id}"); + Ok(()) +} + +/// An entry surfaced by `list_cached`. +#[derive(Debug, Clone)] +pub(crate) struct CacheEntry { + pub id: String, + pub path: PathBuf, + pub bytes: u64, + pub modified: std::time::SystemTime, +} + +/// The cache directory: `$CONFIG_DIR/documents/`. +pub(crate) fn cache_dir() -> Result { + Ok(config_dir()?.join(DOCUMENTS_DIR)) +} + +/// Path for a given cache id (does not check existence). +pub(crate) fn cache_path(id: &str) -> Result { + if id.is_empty() || id.contains('/') || id.contains('\\') || id.ends_with(".json") { + bail!("invalid cache id: {id:?}"); + } + Ok(cache_dir()?.join(format!("{id}.json"))) +} + +/// Write a toolpath document to the cache under `id`. Errors if the +/// file already exists unless `force` is true. +/// +/// Uses `O_CREAT | O_EXCL` (`create_new`) when `force == false` so the +/// exists-check and the write are atomic — two concurrent `path import` +/// invocations racing the same id can't silently stomp each other. +pub(crate) fn write_cached(id: &str, doc: &Document, force: bool) -> Result { + use std::io::Write; + + let dir = cache_dir()?; + std::fs::create_dir_all(&dir) + .with_context(|| format!("create {}", dir.display()))?; + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + let _ = std::fs::set_permissions(&dir, std::fs::Permissions::from_mode(0o700)); + } + + let path = cache_path(id)?; + let json = doc.to_json_pretty()?; + + let mut opts = std::fs::OpenOptions::new(); + opts.write(true).truncate(true); + if force { + opts.create(true); + } else { + opts.create_new(true); + } + + let mut file = match opts.open(&path) { + Ok(f) => f, + Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => { + bail!( + "cache entry {id} already exists at {}; pass --force to overwrite", + path.display() + ); + } + Err(e) => { + return Err(anyhow!("open {}: {e}", path.display())); + } + }; + file.write_all(json.as_bytes()) + .with_context(|| format!("write {}", path.display()))?; + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + std::fs::set_permissions(&path, std::fs::Permissions::from_mode(0o600)) + .with_context(|| format!("chmod 0600 {}", path.display()))?; + } + Ok(path) +} + +/// Resolve a `` string to a filesystem path. A ref is either a +/// bare cache id (looks up `$CACHE_DIR/.json`) or a file path +/// (contains `/` or `\\`, or ends with `.json`). +pub(crate) fn cache_ref(s: &str) -> Result { + if s.contains('/') || s.contains('\\') || s.ends_with(".json") { + let p = PathBuf::from(s); + if !p.exists() { + bail!( + "file not found: {}; if you meant a cache id, drop the path/extension and run `path cache ls`", + p.display() + ); + } + return Ok(p); + } + let p = cache_path(s)?; + if !p.exists() { + bail!( + "cache entry {s} not found at {}; run `path cache ls` to see what's cached", + p.display() + ); + } + Ok(p) +} + +pub(crate) fn list_cached() -> Result> { + let dir = cache_dir()?; + if !dir.exists() { + return Ok(Vec::new()); + } + let mut out = Vec::new(); + for entry in std::fs::read_dir(&dir) + .with_context(|| format!("read {}", dir.display()))? + { + let entry = entry?; + let path = entry.path(); + if path.extension().and_then(|s| s.to_str()) != Some("json") { + continue; + } + let id = match path.file_stem().and_then(|s| s.to_str()) { + Some(s) => s.to_string(), + None => continue, + }; + let meta = entry.metadata()?; + out.push(CacheEntry { + id, + path, + bytes: meta.len(), + modified: meta.modified().unwrap_or(std::time::SystemTime::UNIX_EPOCH), + }); + } + out.sort_by(|a, b| b.modified.cmp(&a.modified)); + Ok(out) +} + +pub(crate) fn remove_cached(id: &str) -> Result<()> { + let path = cache_path(id)?; + if !path.exists() { + return Err(anyhow!("cache entry {id} not found")); + } + std::fs::remove_file(&path).with_context(|| format!("remove {}", path.display()))?; + Ok(()) +} + +/// Build a cache id for a given source + inner id. +/// +/// Sanitizes `/` and other filesystem-unfriendly characters in the +/// inner id to `_` so (e.g.) git branch names land cleanly. Also strips +/// a trailing `.json` so the result never collides with the cache's +/// file extension (see [`cache_path`]). +pub(crate) fn make_id(source: &str, inner: &str) -> String { + let trimmed = inner.trim_end_matches(".json"); + let safe: String = trimmed + .chars() + .map(|c| match c { + '/' | '\\' | ':' | ' ' | '\t' => '_', + c => c, + }) + .collect(); + format!("{source}-{safe}") +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::config::{CONFIG_DIR_ENV, TEST_ENV_LOCK}; + + fn with_cfg R, R>(f: F) -> R { + let temp = tempfile::tempdir().unwrap(); + let _g = TEST_ENV_LOCK.lock().unwrap_or_else(|e| e.into_inner()); + unsafe { + std::env::set_var(CONFIG_DIR_ENV, temp.path()); + } + let result = f(temp.path()); + unsafe { + std::env::remove_var(CONFIG_DIR_ENV); + } + result + } + + fn sample_doc() -> Document { + Document::Step(toolpath::v1::Step::new( + "s1", + "human:alex", + "2026-01-01T00:00:00Z", + )) + } + + #[test] + fn write_and_read_cache_entry() { + with_cfg(|_| { + let doc = sample_doc(); + let p = write_cached("claude-abc", &doc, false).unwrap(); + assert!(p.exists()); + assert_eq!(p.file_name().unwrap(), "claude-abc.json"); + }); + } + + #[test] + fn write_errors_if_exists_without_force() { + with_cfg(|_| { + let doc = sample_doc(); + write_cached("claude-abc", &doc, false).unwrap(); + let err = write_cached("claude-abc", &doc, false).unwrap_err(); + assert!(err.to_string().contains("already exists")); + }); + } + + #[test] + fn write_force_overwrites() { + with_cfg(|_| { + let doc = sample_doc(); + write_cached("claude-abc", &doc, false).unwrap(); + write_cached("claude-abc", &doc, true).unwrap(); + }); + } + + #[test] + fn cache_ref_finds_existing_cache_entry() { + with_cfg(|_| { + let doc = sample_doc(); + let p = write_cached("claude-abc", &doc, false).unwrap(); + let resolved = cache_ref("claude-abc").unwrap(); + assert_eq!(resolved, p); + }); + } + + #[test] + fn cache_ref_returns_file_path_unchanged() { + let tmp = tempfile::NamedTempFile::new().unwrap(); + std::fs::write(tmp.path(), "{}").unwrap(); + let resolved = cache_ref(tmp.path().to_str().unwrap()).unwrap(); + assert_eq!(resolved, tmp.path()); + } + + #[test] + fn cache_ref_errors_on_missing_id() { + with_cfg(|_| { + let err = cache_ref("does-not-exist").unwrap_err(); + assert!(err.to_string().contains("not found")); + }); + } + + #[test] + fn cache_path_rejects_slashes_and_json_suffix() { + assert!(cache_path("foo/bar").is_err()); + assert!(cache_path("foo.json").is_err()); + assert!(cache_path("").is_err()); + } + + #[test] + fn list_empty_when_dir_missing() { + with_cfg(|_| { + assert!(list_cached().unwrap().is_empty()); + }); + } + + #[test] + fn list_and_remove_roundtrip() { + with_cfg(|_| { + let doc = sample_doc(); + write_cached("a", &doc, false).unwrap(); + write_cached("b", &doc, false).unwrap(); + let entries = list_cached().unwrap(); + assert_eq!(entries.len(), 2); + + remove_cached("a").unwrap(); + let entries = list_cached().unwrap(); + assert_eq!(entries.len(), 1); + assert_eq!(entries[0].id, "b"); + + assert!(remove_cached("a").is_err()); + }); + } + + #[cfg(unix)] + #[test] + fn writes_file_with_0600() { + use std::os::unix::fs::PermissionsExt; + with_cfg(|_| { + let p = write_cached("claude-abc", &sample_doc(), false).unwrap(); + let mode = std::fs::metadata(&p).unwrap().permissions().mode() & 0o777; + assert_eq!(mode, 0o600); + }); + } + + #[test] + fn make_id_sanitizes_slashes() { + assert_eq!(make_id("git", "main"), "git-main"); + assert_eq!(make_id("git", "feature/x"), "git-feature_x"); + assert_eq!(make_id("pathbase", "trc_01H"), "pathbase-trc_01H"); + } + + #[test] + fn make_id_strips_trailing_json() { + assert_eq!(make_id("pathbase", "trc_01H.json"), "pathbase-trc_01H"); + assert_eq!(make_id("git", "path-main.json"), "git-path-main"); + } + + #[test] + fn make_id_result_survives_cache_path() { + // Regression: make_id output must be accepted by cache_path. + let id = make_id("pathbase", "trc_01H.json"); + assert!(cache_path(&id).is_ok()); + } +} diff --git a/crates/toolpath-cli/src/cmd_derive.rs b/crates/toolpath-cli/src/cmd_derive.rs index 65b1868..4f5b8dc 100644 --- a/crates/toolpath-cli/src/cmd_derive.rs +++ b/crates/toolpath-cli/src/cmd_derive.rs @@ -1,902 +1,20 @@ -#[cfg(not(target_os = "emscripten"))] -use anyhow::Context; -use anyhow::Result; -use clap::Subcommand; -use std::path::PathBuf; - -#[derive(Subcommand, Debug)] -pub enum DeriveSource { - /// Derive from git repository history - Git { - /// Path to the git repository - #[arg(short, long, default_value = ".")] - repo: PathBuf, - - /// Branch name(s). Format: `name` or `name:start` - #[arg(short, long, required = true)] - branch: Vec, - - /// Global base commit (overrides per-branch starts) - #[arg(long)] - base: Option, - - /// Remote name for URI generation - #[arg(long, default_value = "origin")] - remote: String, - - /// Graph title (for multi-branch output) - #[arg(long)] - title: Option, - }, - /// Derive from a GitHub pull request - Github { - /// PR URL (e.g. ) - #[arg(index = 1)] - url: Option, - - /// Repository in owner/repo format (alternative to URL) - #[arg(short, long)] - repo: Option, - - /// Pull request number (required with --repo) - #[arg(long)] - pr: Option, - - /// Exclude CI check runs - #[arg(long)] - no_ci: bool, - - /// Exclude reviews and comments - #[arg(long)] - no_comments: bool, - }, - /// Derive from Claude conversation logs - Claude { - /// Project path (e.g., /Users/alex/myproject) - #[arg(short, long)] - project: String, - - /// Specific session ID - #[arg(short, long)] - session: Option, - - /// Process all sessions in the project - #[arg(long)] - all: bool, - }, - /// Derive from Gemini CLI conversation logs - Gemini { - /// Project path (e.g., /Users/alex/myproject) - #[arg(short, long)] - project: String, - - /// Specific session UUID (the directory name under chats/) - #[arg(short, long)] - session: Option, - - /// Process all sessions in the project - #[arg(long)] - all: bool, +//! Deprecation shim for `path derive`. +//! +//! `path derive` was renamed to `path import`. This shim preserves the +//! old surface (including stdout JSON output) for one release, printing a +//! deprecation warning on stderr and delegating into `cmd_import`. - /// Include thinking blocks in conversation.append text - #[arg(long)] - include_thinking: bool, - }, - /// Derive from Codex CLI rollout files - Codex { - /// Session id, UUID, or filename stem (default: most recent) - #[arg(short, long)] - session: Option, - - /// Process all sessions (emits one Path per session) - #[arg(long)] - all: bool, - }, - /// Derive from opencode session databases - Opencode { - /// Session id (default: most recent) - #[arg(short, long)] - session: Option, - - /// Process all sessions (emits one Path per session) - #[arg(long)] - all: bool, - - /// Filter by project id (SHA of repo's first root commit) - #[arg(long)] - project: Option, - - /// Skip snapshot-based file diff extraction - #[arg(long)] - no_snapshot_diffs: bool, - }, - /// Derive from Pi (pi.dev) coding-agent session logs - Pi { - /// Project path (cwd the session ran in) - #[arg(short, long)] - project: String, - - /// Specific session ID (default: most recent) - #[arg(short, long)] - session: Option, - - /// Process all sessions in the project (emits a Graph) - #[arg(long)] - all: bool, +use anyhow::Result; - /// Override the Pi sessions base directory (default: ~/.pi/agent/sessions) - #[arg(long)] - base: Option, - }, -} +pub use crate::cmd_import::ImportSource as DeriveSource; pub fn run(source: DeriveSource, pretty: bool) -> Result<()> { - match source { - DeriveSource::Git { - repo, - branch, - base, - remote, - title, - } => run_git(repo, branch, base, remote, title, pretty), - DeriveSource::Github { - url, - repo, - pr, - no_ci, - no_comments, - } => run_github(url, repo, pr, no_ci, no_comments, pretty), - DeriveSource::Claude { - project, - session, - all, - } => run_claude(project, session, all, pretty), - DeriveSource::Gemini { - project, - session, - all, - include_thinking, - } => run_gemini(project, session, all, include_thinking, pretty), - DeriveSource::Codex { session, all } => run_codex(session, all, pretty), - DeriveSource::Opencode { - session, - all, - project, - no_snapshot_diffs, - } => run_opencode(session, all, project, no_snapshot_diffs, pretty), - DeriveSource::Pi { - project, - session, - all, - base, - } => run_pi(project, session, all, base, pretty), - } -} - -fn run_git( - repo_path: PathBuf, - branches: Vec, - base: Option, - remote: String, - title: Option, - pretty: bool, -) -> Result<()> { - #[cfg(target_os = "emscripten")] - { - let _ = (repo_path, branches, base, remote, title, pretty); - anyhow::bail!( - "'path derive git' requires a native environment with access to a git repository" - ); - } - - #[cfg(not(target_os = "emscripten"))] - { - let repo_path = if repo_path.is_absolute() { - repo_path - } else { - std::env::current_dir()?.join(&repo_path) - }; - - let repo = git2::Repository::open(&repo_path) - .with_context(|| format!("Failed to open repository at {:?}", repo_path))?; - - let config = toolpath_git::DeriveConfig { - remote, - title, - base, - }; - - let doc = toolpath_git::derive(&repo, &branches, &config)?; - - let json = if pretty { - doc.to_json_pretty()? - } else { - doc.to_json()? - }; - - println!("{}", json); - Ok(()) - } -} - -fn run_github( - url: Option, - repo: Option, - pr: Option, - no_ci: bool, - no_comments: bool, - pretty: bool, -) -> Result<()> { - #[cfg(target_os = "emscripten")] - { - let _ = (url, repo, pr, no_ci, no_comments, pretty); - anyhow::bail!("'path derive github' requires a native environment with network access"); - } - - #[cfg(not(target_os = "emscripten"))] - { - // Resolve owner/repo/pr from either a URL or --repo/--pr flags - let (owner, repo_name, pr_number) = if let Some(url_str) = &url { - let parsed = toolpath_github::parse_pr_url(url_str).ok_or_else(|| { - anyhow::anyhow!("Invalid PR URL. Expected: https://github.com/owner/repo/pull/N") - })?; - (parsed.owner, parsed.repo, parsed.number) - } else if let (Some(repo_str), Some(pr_num)) = (&repo, pr) { - let (o, r) = repo_str - .split_once('/') - .ok_or_else(|| anyhow::anyhow!("Repository must be in owner/repo format"))?; - (o.to_string(), r.to_string(), pr_num) - } else { - anyhow::bail!( - "Provide a PR URL or both --repo and --pr.\n\ - Usage: path derive github https://github.com/owner/repo/pull/42\n\ - Usage: path derive github --repo owner/repo --pr 42" - ); - }; - - let token = toolpath_github::resolve_token()?; - let config = toolpath_github::DeriveConfig { - token, - include_ci: !no_ci, - include_comments: !no_comments, - ..Default::default() - }; - - let path = toolpath_github::derive_pull_request(&owner, &repo_name, pr_number, &config)?; - let doc = toolpath::v1::Document::Path(path); - - let json = if pretty { - doc.to_json_pretty()? - } else { - doc.to_json()? - }; - - println!("{}", json); - Ok(()) - } -} - -fn run_claude(project: String, session: Option, all: bool, pretty: bool) -> Result<()> { - let manager = toolpath_claude::ClaudeConvo::new(); - run_claude_with_manager(&manager, project, session, all, pretty) -} - -fn run_gemini( - project: String, - session: Option, - all: bool, - include_thinking: bool, - pretty: bool, -) -> Result<()> { - let manager = toolpath_gemini::GeminiConvo::new(); - run_gemini_with_manager(&manager, project, session, all, include_thinking, pretty) -} - -fn run_gemini_with_manager( - manager: &toolpath_gemini::GeminiConvo, - project: String, - session: Option, - all: bool, - include_thinking: bool, - pretty: bool, -) -> Result<()> { - let config = toolpath_gemini::derive::DeriveConfig { - project_path: Some(project.clone()), - include_thinking, - }; - - let docs: Vec = if let Some(session_uuid) = session { - let convo = manager - .read_conversation(&project, &session_uuid) - .map_err(|e| anyhow::anyhow!("{}", e))?; - vec![toolpath_gemini::derive::derive_path(&convo, &config)] - } else if all { - let convos = manager - .read_all_conversations(&project) - .map_err(|e| anyhow::anyhow!("{}", e))?; - toolpath_gemini::derive::derive_project(&convos, &config) - } else { - let convo = manager - .most_recent_conversation(&project) - .map_err(|e| anyhow::anyhow!("{}", e))? - .ok_or_else(|| anyhow::anyhow!("No conversations found for project: {}", project))?; - vec![toolpath_gemini::derive::derive_path(&convo, &config)] - }; - - for path in &docs { - let doc = toolpath::v1::Document::Path(path.clone()); - let json = if pretty { - doc.to_json_pretty()? - } else { - doc.to_json()? - }; - println!("{}", json); - } - Ok(()) -} - -fn run_claude_with_manager( - manager: &toolpath_claude::ClaudeConvo, - project: String, - session: Option, - all: bool, - pretty: bool, -) -> Result<()> { - let config = toolpath_claude::derive::DeriveConfig { - project_path: Some(project.clone()), - include_thinking: false, - }; - - let docs: Vec = if let Some(session_id) = session { - let convo = manager - .read_conversation(&project, &session_id) - .map_err(|e| anyhow::anyhow!("{}", e))?; - vec![toolpath_claude::derive::derive_path(&convo, &config)] - } else if all { - let convos = manager - .read_all_conversations(&project) - .map_err(|e| anyhow::anyhow!("{}", e))?; - toolpath_claude::derive::derive_project(&convos, &config) - } else { - // Default: most recent conversation - let convo = manager - .most_recent_conversation(&project) - .map_err(|e| anyhow::anyhow!("{}", e))? - .ok_or_else(|| anyhow::anyhow!("No conversations found for project: {}", project))?; - vec![toolpath_claude::derive::derive_path(&convo, &config)] - }; - - for path in &docs { - let doc = toolpath::v1::Document::Path(path.clone()); - let json = if pretty { - doc.to_json_pretty()? - } else { - doc.to_json()? - }; - println!("{}", json); - } - - Ok(()) -} - -fn run_opencode( - session: Option, - all: bool, - project: Option, - no_snapshot_diffs: bool, - pretty: bool, -) -> Result<()> { - #[cfg(target_os = "emscripten")] - { - let _ = (session, all, project, no_snapshot_diffs, pretty); - anyhow::bail!( - "'path derive opencode' requires a native environment (SQLite + git2 not available under wasm)" - ); - } - - #[cfg(not(target_os = "emscripten"))] - { - let manager = toolpath_opencode::OpencodeConvo::new(); - let config = toolpath_opencode::derive::DeriveConfig { - no_snapshot_diffs, - ..Default::default() - }; - - let docs: Vec = if all { - let metas = manager - .io() - .list_session_metadata(project.as_deref()) - .map_err(|e| anyhow::anyhow!("{}", e))?; - if metas.is_empty() { - anyhow::bail!("No opencode sessions found"); - } - let mut out = Vec::with_capacity(metas.len()); - for m in metas { - let s = manager - .read_session(&m.id) - .map_err(|e| anyhow::anyhow!("{}: {}", m.id, e))?; - out.push(toolpath_opencode::derive::derive_path_with_resolver( - &s, - &config, - manager.resolver(), - )); - } - out - } else if let Some(sid) = session { - let s = manager - .read_session(&sid) - .map_err(|e| anyhow::anyhow!("{}", e))?; - vec![toolpath_opencode::derive::derive_path_with_resolver( - &s, - &config, - manager.resolver(), - )] - } else { - let s = manager - .most_recent_session() - .map_err(|e| anyhow::anyhow!("{}", e))? - .ok_or_else(|| anyhow::anyhow!("No opencode sessions found"))?; - vec![toolpath_opencode::derive::derive_path_with_resolver( - &s, - &config, - manager.resolver(), - )] - }; - - for path in &docs { - let doc = toolpath::v1::Document::Path(path.clone()); - let json = if pretty { - doc.to_json_pretty()? - } else { - doc.to_json()? - }; - println!("{}", json); - } - Ok(()) - } -} - -fn run_pi( - project: String, - session: Option, - all: bool, - base: Option, - pretty: bool, -) -> Result<()> { - let manager = if let Some(path) = base { - let resolver = toolpath_pi::PathResolver::new().with_sessions_dir(&path); - toolpath_pi::PiConvo::with_resolver(resolver) - } else { - toolpath_pi::PiConvo::new() - }; - run_pi_with_manager(&manager, project, session, all, pretty) -} - -fn run_pi_with_manager( - manager: &toolpath_pi::PiConvo, - project: String, - session: Option, - all: bool, - pretty: bool, -) -> Result<()> { - let config = toolpath_pi::DeriveConfig::default(); - - let doc: toolpath::v1::Document = if all { - let sessions = manager - .read_all_sessions(&project) - .map_err(|e| anyhow::anyhow!("{}", e))?; - if sessions.is_empty() { - anyhow::bail!("No Pi sessions found for project: {}", project); - } - let graph = toolpath_pi::derive::derive_graph(&sessions, None, &config); - toolpath::v1::Document::Graph(graph) - } else if let Some(sid) = session { - let session = manager - .read_session(&project, &sid) - .map_err(|e| anyhow::anyhow!("{}", e))?; - let path = toolpath_pi::derive::derive_path(&session, &config); - toolpath::v1::Document::Path(path) - } else { - let session = manager - .most_recent_session(&project) - .map_err(|e| anyhow::anyhow!("{}", e))? - .ok_or_else(|| anyhow::anyhow!("No Pi sessions found for project: {}", project))?; - let path = toolpath_pi::derive::derive_path(&session, &config); - toolpath::v1::Document::Path(path) - }; - - let json = if pretty { - doc.to_json_pretty()? - } else { - doc.to_json()? - }; - println!("{}", json); - Ok(()) -} - -fn run_codex(session: Option, all: bool, pretty: bool) -> Result<()> { - let manager = toolpath_codex::CodexConvo::new(); - let config = toolpath_codex::derive::DeriveConfig { project_path: None }; - - let docs: Vec = if all { - let sessions = manager - .read_all_sessions() - .map_err(|e| anyhow::anyhow!("{}", e))?; - if sessions.is_empty() { - anyhow::bail!("No Codex sessions found in ~/.codex/sessions"); - } - toolpath_codex::derive::derive_project(&sessions, &config) - } else if let Some(sid) = session { - let s = manager - .read_session(&sid) - .map_err(|e| anyhow::anyhow!("{}", e))?; - vec![toolpath_codex::derive::derive_path(&s, &config)] - } else { - let s = manager - .most_recent_session() - .map_err(|e| anyhow::anyhow!("{}", e))? - .ok_or_else(|| anyhow::anyhow!("No Codex sessions found in ~/.codex/sessions"))?; - vec![toolpath_codex::derive::derive_path(&s, &config)] - }; - - for path in &docs { - let doc = toolpath::v1::Document::Path(path.clone()); - let json = if pretty { - doc.to_json_pretty()? - } else { - doc.to_json()? - }; - println!("{}", json); - } - Ok(()) -} - -#[cfg(all(test, not(target_os = "emscripten")))] -mod tests { - use super::*; - - fn init_temp_repo() -> (tempfile::TempDir, git2::Repository) { - let dir = tempfile::tempdir().unwrap(); - let repo = git2::Repository::init(dir.path()).unwrap(); - let mut config = repo.config().unwrap(); - config.set_str("user.name", "Test User").unwrap(); - config.set_str("user.email", "test@example.com").unwrap(); - (dir, repo) - } - - fn create_commit( - repo: &git2::Repository, - message: &str, - file_name: &str, - content: &str, - parent: Option<&git2::Commit>, - ) -> git2::Oid { - let mut index = repo.index().unwrap(); - let file_path = repo.workdir().unwrap().join(file_name); - std::fs::write(&file_path, content).unwrap(); - index.add_path(std::path::Path::new(file_name)).unwrap(); - index.write().unwrap(); - let tree_id = index.write_tree().unwrap(); - let tree = repo.find_tree(tree_id).unwrap(); - let sig = repo.signature().unwrap(); - let parents: Vec<&git2::Commit> = parent.into_iter().collect(); - repo.commit(Some("HEAD"), &sig, &sig, message, &tree, &parents) - .unwrap() - } - - #[test] - fn test_run_git_single_branch() { - let (dir, repo) = init_temp_repo(); - let oid = create_commit(&repo, "initial commit", "file.txt", "hello", None); - let c1 = repo.find_commit(oid).unwrap(); - create_commit(&repo, "second", "file.txt", "world", Some(&c1)); - - let default = toolpath_git::list_branches(&repo) - .unwrap() - .first() - .unwrap() - .name - .clone(); - - let result = run_git( - dir.path().to_path_buf(), - vec![default], - None, - "origin".to_string(), - None, - false, - ); - assert!(result.is_ok()); - } - - #[test] - fn test_run_git_pretty() { - let (dir, repo) = init_temp_repo(); - create_commit(&repo, "initial", "file.txt", "hello", None); - - let default = toolpath_git::list_branches(&repo) - .unwrap() - .first() - .unwrap() - .name - .clone(); - - let result = run_git( - dir.path().to_path_buf(), - vec![default], - None, - "origin".to_string(), - None, - true, - ); - assert!(result.is_ok()); - } - - #[test] - fn test_run_git_invalid_repo() { - let dir = tempfile::tempdir().unwrap(); - let result = run_git( - dir.path().to_path_buf(), - vec!["main".to_string()], - None, - "origin".to_string(), - None, - false, - ); - assert!(result.is_err()); - } - - fn setup_claude_manager() -> (tempfile::TempDir, toolpath_claude::ClaudeConvo) { - let temp = tempfile::tempdir().unwrap(); - let claude_dir = temp.path().join(".claude"); - let project_dir = claude_dir.join("projects/-test-project"); - std::fs::create_dir_all(&project_dir).unwrap(); - - let entry1 = r#"{"type":"user","uuid":"uuid-1","timestamp":"2024-01-01T00:00:00Z","cwd":"/test/project","message":{"role":"user","content":"Hello"}}"#; - let entry2 = r#"{"type":"assistant","uuid":"uuid-2","timestamp":"2024-01-01T00:00:01Z","message":{"role":"assistant","content":"Hi there"}}"#; - std::fs::write( - project_dir.join("session-abc.jsonl"), - format!("{}\n{}\n", entry1, entry2), - ) - .unwrap(); - - let resolver = toolpath_claude::PathResolver::new().with_claude_dir(&claude_dir); - let manager = toolpath_claude::ClaudeConvo::with_resolver(resolver); - (temp, manager) - } - - #[test] - fn test_run_claude_session() { - let (_temp, manager) = setup_claude_manager(); - let result = run_claude_with_manager( - &manager, - "/test/project".to_string(), - Some("session-abc".to_string()), - false, - false, - ); - assert!(result.is_ok()); - } - - #[test] - fn test_run_claude_session_pretty() { - let (_temp, manager) = setup_claude_manager(); - let result = run_claude_with_manager( - &manager, - "/test/project".to_string(), - Some("session-abc".to_string()), - false, - true, - ); - assert!(result.is_ok()); - } - - #[test] - fn test_run_claude_most_recent() { - let (_temp, manager) = setup_claude_manager(); - let result = - run_claude_with_manager(&manager, "/test/project".to_string(), None, false, false); - assert!(result.is_ok()); - } - - #[test] - fn test_run_claude_all() { - let (_temp, manager) = setup_claude_manager(); - let result = - run_claude_with_manager(&manager, "/test/project".to_string(), None, true, false); - assert!(result.is_ok()); - } - - fn setup_pi_manager() -> (tempfile::TempDir, toolpath_pi::PiConvo) { - let temp = tempfile::tempdir().unwrap(); - let sessions_dir = temp.path().join(".pi/agent/sessions"); - let project_dir = sessions_dir.join("--test-project--"); - std::fs::create_dir_all(&project_dir).unwrap(); - - let header = r#"{"type":"session","version":3,"id":"sess-1","timestamp":"2026-04-16T10:00:00Z","cwd":"/test/project"}"#; - let msg1 = r#"{"type":"message","id":"m1","timestamp":"2026-04-16T10:00:01Z","message":{"role":"user","content":"Hello","timestamp":1744797601000}}"#; - let msg2 = r#"{"type":"message","id":"m2","parentId":"m1","timestamp":"2026-04-16T10:00:02Z","message":{"role":"assistant","content":[{"type":"text","text":"Hi"}],"api":"a","provider":"anthropic","model":"claude-x","usage":{"input":1,"output":1,"cacheRead":0,"cacheWrite":0,"totalTokens":2,"cost":{"input":0.0,"output":0.0,"cacheRead":0.0,"cacheWrite":0.0,"total":0.0}},"stopReason":"stop","timestamp":1744797602000}}"#; - std::fs::write( - project_dir.join("2026-04-16_sess-1.jsonl"), - [header, msg1, msg2].join("\n"), - ) - .unwrap(); - - let resolver = toolpath_pi::PathResolver::new().with_sessions_dir(&sessions_dir); - let manager = toolpath_pi::PiConvo::with_resolver(resolver); - (temp, manager) - } - - #[test] - fn test_run_pi_session() { - let (_temp, manager) = setup_pi_manager(); - let result = run_pi_with_manager( - &manager, - "/test/project".to_string(), - Some("sess-1".to_string()), - false, - false, - ); - assert!(result.is_ok()); - } - - #[test] - fn test_run_pi_session_pretty() { - let (_temp, manager) = setup_pi_manager(); - let result = run_pi_with_manager( - &manager, - "/test/project".to_string(), - Some("sess-1".to_string()), - false, - true, - ); - assert!(result.is_ok()); - } - - #[test] - fn test_run_pi_most_recent() { - let (_temp, manager) = setup_pi_manager(); - let result = run_pi_with_manager(&manager, "/test/project".to_string(), None, false, false); - assert!(result.is_ok()); - } - - #[test] - fn test_run_pi_all() { - let (_temp, manager) = setup_pi_manager(); - let result = run_pi_with_manager(&manager, "/test/project".to_string(), None, true, false); - assert!(result.is_ok()); - } - - #[test] - fn test_run_pi_no_sessions() { - let temp = tempfile::tempdir().unwrap(); - let sessions_dir = temp.path().join(".pi/agent/sessions"); - let project_dir = sessions_dir.join("--empty-project--"); - std::fs::create_dir_all(&project_dir).unwrap(); - - let resolver = toolpath_pi::PathResolver::new().with_sessions_dir(&sessions_dir); - let manager = toolpath_pi::PiConvo::with_resolver(resolver); - - let result = - run_pi_with_manager(&manager, "/empty/project".to_string(), None, false, false); - assert!(result.is_err()); - assert!( - result - .unwrap_err() - .to_string() - .contains("No Pi sessions found") - ); - } - - #[test] - fn test_run_claude_no_conversations() { - let temp = tempfile::tempdir().unwrap(); - let claude_dir = temp.path().join(".claude"); - let project_dir = claude_dir.join("projects/-empty-project"); - std::fs::create_dir_all(&project_dir).unwrap(); - - let resolver = toolpath_claude::PathResolver::new().with_claude_dir(&claude_dir); - let manager = toolpath_claude::ClaudeConvo::with_resolver(resolver); - - let result = - run_claude_with_manager(&manager, "/empty/project".to_string(), None, false, false); - assert!(result.is_err()); - assert!( - result - .unwrap_err() - .to_string() - .contains("No conversations found") - ); - } - - fn setup_gemini_manager() -> (tempfile::TempDir, toolpath_gemini::GeminiConvo) { - let temp = tempfile::tempdir().unwrap(); - let gemini = temp.path().join(".gemini"); - let session_dir = gemini.join("tmp/myrepo/chats/session-uuid"); - std::fs::create_dir_all(&session_dir).unwrap(); - std::fs::write( - gemini.join("projects.json"), - r#"{"projects":{"/abs/myrepo":"myrepo"}}"#, - ) - .unwrap(); - std::fs::write( - session_dir.join("main.json"), - r#"{"sessionId":"s","projectHash":"","startTime":"2026-04-17T10:00:00Z","lastUpdated":"2026-04-17T10:10:00Z","directories":["/abs/myrepo"],"messages":[ - {"id":"u1","timestamp":"2026-04-17T10:00:00Z","type":"user","content":[{"text":"Hello"}]}, - {"id":"a1","timestamp":"2026-04-17T10:00:01Z","type":"gemini","content":"Hi","model":"gemini-3-flash-preview"} -]}"#, - ) - .unwrap(); - let resolver = toolpath_gemini::PathResolver::new().with_gemini_dir(&gemini); - (temp, toolpath_gemini::GeminiConvo::with_resolver(resolver)) - } - - #[test] - fn test_run_gemini_session() { - let (_t, mgr) = setup_gemini_manager(); - let result = run_gemini_with_manager( - &mgr, - "/abs/myrepo".to_string(), - Some("session-uuid".to_string()), - false, - false, - false, - ); - assert!(result.is_ok()); - } - - #[test] - fn test_run_gemini_most_recent() { - let (_t, mgr) = setup_gemini_manager(); - let result = - run_gemini_with_manager(&mgr, "/abs/myrepo".to_string(), None, false, false, true); - assert!(result.is_ok()); - } - - #[test] - fn test_run_gemini_all() { - let (_t, mgr) = setup_gemini_manager(); - let result = - run_gemini_with_manager(&mgr, "/abs/myrepo".to_string(), None, true, false, false); - assert!(result.is_ok()); - } - - #[test] - fn test_run_gemini_no_conversations() { - let temp = tempfile::tempdir().unwrap(); - let gemini = temp.path().join(".gemini"); - std::fs::create_dir_all(gemini.join("tmp/empty")).unwrap(); - let resolver = toolpath_gemini::PathResolver::new().with_gemini_dir(&gemini); - let mgr = toolpath_gemini::GeminiConvo::with_resolver(resolver); - - let result = run_gemini_with_manager( - &mgr, - "/no/such/project".to_string(), - None, - false, - false, - false, - ); - assert!(result.is_err()); - assert!( - result - .unwrap_err() - .to_string() - .contains("No conversations found") - ); - } - - #[test] - fn test_run_gemini_include_thinking() { - let (_t, mgr) = setup_gemini_manager(); - let result = run_gemini_with_manager( - &mgr, - "/abs/myrepo".to_string(), - Some("session-uuid".to_string()), - false, - true, - false, - ); - assert!(result.is_ok()); - } + eprintln!("warning: `path derive` is deprecated; use `path import` instead"); + // Preserve the old stdout-JSON behavior via --no-cache. + let args = crate::cmd_import::ImportArgs { + source, + force: false, + no_cache: true, + }; + crate::cmd_import::run(args, pretty) } diff --git a/crates/toolpath-cli/src/cmd_export.rs b/crates/toolpath-cli/src/cmd_export.rs new file mode 100644 index 0000000..9de6b46 --- /dev/null +++ b/crates/toolpath-cli/src/cmd_export.rs @@ -0,0 +1,381 @@ +//! `path export ` — emit toolpath documents into external formats. +//! +//! `export claude` projects a Path document into Claude Code's JSONL format +//! (either into `~/.claude/projects//.jsonl` for a +//! resumable session, or to a file / stdout). `export pathbase` uploads the +//! document to a Pathbase server. + +use anyhow::{Context, Result}; +use clap::Subcommand; +use std::path::PathBuf; + +use crate::cmd_cache::cache_ref; + +#[derive(Subcommand, Debug)] +pub enum ExportTarget { + /// Project a toolpath document into a Claude Code session + Claude { + /// Input: cache id (e.g. `claude-abc`) or path to a toolpath JSON file + #[arg(short, long)] + input: String, + + /// Target project directory. With this flag, writes the JSONL into + /// `~/.claude/projects//.jsonl` so `claude -r ` + /// can resume it. Defaults to cwd when no `--output` is given. + #[arg(short, long)] + project: Option, + + /// Output JSONL to this file. Mutually exclusive with --project. + #[arg(short, long, conflicts_with = "project")] + output: Option, + }, + /// Upload a toolpath document to Pathbase + Pathbase { + /// Input: cache id (e.g. `claude-abc`) or path to a toolpath JSON file + #[arg(short, long)] + input: String, + + /// Pathbase server URL (defaults to the stored session's server) + #[arg(long)] + url: Option, + }, +} + +pub fn run(target: ExportTarget) -> Result<()> { + match target { + ExportTarget::Claude { + input, + project, + output, + } => run_claude(input, project, output), + ExportTarget::Pathbase { input, url } => run_pathbase(input, url), + } +} + +fn run_claude(input: String, project: Option, output: Option) -> Result<()> { + #[cfg(target_os = "emscripten")] + { + let _ = (input, project, output); + anyhow::bail!("'path export claude' requires a native environment"); + } + + #[cfg(not(target_os = "emscripten"))] + { + let path = load_path_doc(&input)?; + let conversation = build_claude_conversation(&path)?; + let jsonl = serialize_jsonl(&conversation)?; + + match (project, output) { + (Some(project_dir), None) => { + let out_path = write_into_claude_project(&conversation, &jsonl, &project_dir)?; + let session_id = &conversation.session_id; + eprintln!( + "Exported session {} ({} entries) → {}", + session_id, + conversation.preamble.len() + conversation.entries.len(), + out_path.display() + ); + eprintln!(); + eprintln!("Resume with:"); + eprintln!( + " cd {} && claude -r {}", + project_dir.display(), + session_id + ); + } + (None, Some(out_path)) => { + std::fs::write(&out_path, &jsonl) + .with_context(|| format!("write {}", out_path.display()))?; + eprintln!("Wrote {} bytes to {}", jsonl.len(), out_path.display()); + } + (None, None) => { + println!("{}", jsonl); + } + (Some(_), Some(_)) => unreachable!("clap enforces conflicts_with"), + } + + Ok(()) + } +} + +#[cfg(not(target_os = "emscripten"))] +fn load_path_doc(input: &str) -> Result { + let file = cache_ref(input)?; + let json = std::fs::read_to_string(&file) + .with_context(|| format!("Failed to read {}", file.display()))?; + let doc: toolpath::v1::Document = serde_json::from_str(&json) + .map_err(|e| anyhow::anyhow!("Failed to parse toolpath document: {}", e))?; + match doc { + toolpath::v1::Document::Path(p) => Ok(p), + toolpath::v1::Document::Step(_) => { + anyhow::bail!("Expected a Path document, got a Step") + } + toolpath::v1::Document::Graph(_) => { + anyhow::bail!("Expected a Path document, got a Graph") + } + } +} + +#[cfg(not(target_os = "emscripten"))] +fn build_claude_conversation( + path: &toolpath::v1::Path, +) -> Result { + use toolpath_convo::ConversationProjector; + let view = toolpath_convo::extract_conversation(path); + let projector = toolpath_claude::ClaudeProjector; + projector + .project(&view) + .map_err(|e| anyhow::anyhow!("Projection failed: {}", e)) +} + +#[cfg(not(target_os = "emscripten"))] +fn serialize_jsonl(conv: &toolpath_claude::Conversation) -> Result { + let mut lines = Vec::with_capacity(conv.preamble.len() + conv.entries.len()); + for raw in &conv.preamble { + lines.push(serde_json::to_string(raw)?); + } + for entry in &conv.entries { + lines.push(serde_json::to_string(entry)?); + } + Ok(lines.join("\n")) +} + +#[cfg(not(target_os = "emscripten"))] +fn write_into_claude_project( + conv: &toolpath_claude::Conversation, + jsonl: &str, + project_dir: &std::path::Path, +) -> Result { + let project_dir = std::fs::canonicalize(project_dir) + .with_context(|| format!("resolve project path {}", project_dir.display()))?; + let project_path = project_dir.to_string_lossy(); + + let resolver = toolpath_claude::PathResolver::new(); + let claude_project_dir = resolver + .project_dir(&project_path) + .map_err(|e| anyhow::anyhow!("Cannot resolve Claude project dir: {}", e))?; + + std::fs::create_dir_all(&claude_project_dir) + .with_context(|| format!("create {}", claude_project_dir.display()))?; + + let session_id = &conv.session_id; + let out_path = claude_project_dir.join(format!("{}.jsonl", session_id)); + std::fs::write(&out_path, jsonl) + .with_context(|| format!("write {}", out_path.display()))?; + Ok(out_path) +} + +fn run_pathbase(input: String, url_flag: Option) -> Result<()> { + #[cfg(target_os = "emscripten")] + { + let _ = (input, url_flag); + anyhow::bail!("'path export pathbase' requires a native environment with network access"); + } + + #[cfg(not(target_os = "emscripten"))] + { + use crate::cmd_pathbase::{require_session, resolve_url, traces_post}; + + let file = cache_ref(&input)?; + let body = std::fs::read_to_string(&file) + .with_context(|| format!("Failed to read {}", file.display()))?; + // Validate locally so we give a clean error rather than relying on + // the server to reject malformed payloads. + toolpath::v1::Document::from_json(&body) + .map_err(|e| anyhow::anyhow!("Invalid toolpath document: {}", e))?; + + let session = require_session()?; + let base_url = match url_flag { + Some(u) => resolve_url(Some(u)), + None => session.url.clone(), + }; + + if host_of(&base_url) != host_of(&session.url) { + eprintln!( + "warning: uploading to {} with a token issued by {}; expect 401 unless this is the same deployment", + base_url, session.url + ); + } + + let trace = traces_post(&base_url, &session.token, &body)?; + println!("{}", trace.url); + eprintln!("Uploaded {} → {} ({} bytes)", file.display(), trace.id, body.len()); + Ok(()) + } +} + +/// Extract `scheme://host[:port]` from a URL, dropping any path/query. +/// Returns the input unchanged if it doesn't look like a URL. +#[cfg(not(target_os = "emscripten"))] +fn host_of(url: &str) -> &str { + let after_scheme = match url.find("://") { + Some(i) => i + 3, + None => return url, + }; + // Find the next `/` after the scheme://; everything before it is host[:port]. + match url[after_scheme..].find('/') { + Some(off) => &url[..after_scheme + off], + None => url, + } +} + +#[cfg(all(test, not(target_os = "emscripten")))] +mod tests { + use super::*; + use std::collections::HashMap; + use toolpath::v1::{ArtifactChange, PathIdentity, Step, StepIdentity, StructuralChange}; + + fn make_path_doc() -> toolpath::v1::Document { + let artifact_key = "agent://claude/test-session"; + + let init_step = Step { + step: StepIdentity { + id: "step-001".to_string(), + parents: vec![], + actor: "tool:claude-code".to_string(), + timestamp: "2024-01-01T00:00:00Z".to_string(), + }, + change: { + let mut m = HashMap::new(); + m.insert( + artifact_key.to_string(), + ArtifactChange { + raw: None, + structural: Some(StructuralChange { + change_type: "conversation.init".to_string(), + extra: HashMap::new(), + }), + }, + ); + m + }, + meta: None, + }; + + let append_step = Step { + step: StepIdentity { + id: "step-002".to_string(), + parents: vec!["step-001".to_string()], + actor: "human:user".to_string(), + timestamp: "2024-01-01T00:00:01Z".to_string(), + }, + change: { + let mut m = HashMap::new(); + let mut extra = HashMap::new(); + extra.insert("role".to_string(), serde_json::json!("user")); + extra.insert("text".to_string(), serde_json::json!("Hello")); + m.insert( + artifact_key.to_string(), + ArtifactChange { + raw: None, + structural: Some(StructuralChange { + change_type: "conversation.append".to_string(), + extra, + }), + }, + ); + m + }, + meta: None, + }; + + let path = toolpath::v1::Path { + path: PathIdentity { + id: "test-path".to_string(), + base: None, + head: "step-002".to_string(), + }, + steps: vec![init_step, append_step], + meta: None, + }; + + toolpath::v1::Document::Path(path) + } + + #[test] + fn claude_output_to_file() { + let temp = tempfile::tempdir().unwrap(); + let input_path = temp.path().join("input.json"); + let output_path = temp.path().join("out.jsonl"); + + let doc = make_path_doc(); + std::fs::write(&input_path, serde_json::to_string(&doc).unwrap()).unwrap(); + + run_claude( + input_path.to_string_lossy().to_string(), + None, + Some(output_path.clone()), + ) + .unwrap(); + + let out = std::fs::read_to_string(&output_path).unwrap(); + assert!(!out.is_empty()); + for line in out.lines() { + serde_json::from_str::(line).unwrap(); + } + } + + #[test] + fn claude_rejects_non_path_doc() { + let temp = tempfile::tempdir().unwrap(); + let input_path = temp.path().join("input.json"); + let step = Step { + step: StepIdentity { + id: "s1".into(), + parents: vec![], + actor: "human:x".into(), + timestamp: "2024-01-01T00:00:00Z".into(), + }, + change: HashMap::new(), + meta: None, + }; + let doc = toolpath::v1::Document::Step(step); + std::fs::write(&input_path, serde_json::to_string(&doc).unwrap()).unwrap(); + + let err = run_claude(input_path.to_string_lossy().to_string(), None, None).unwrap_err(); + assert!(err.to_string().contains("Step")); + } + + #[test] + fn claude_invalid_json_errors() { + let temp = tempfile::tempdir().unwrap(); + let input_path = temp.path().join("input.json"); + std::fs::write(&input_path, "not json").unwrap(); + let err = run_claude(input_path.to_string_lossy().to_string(), None, None).unwrap_err(); + assert!(err.to_string().contains("parse") || err.to_string().contains("Failed")); + } + + #[test] + fn host_of_strips_path() { + assert_eq!(host_of("https://pathbase.dev"), "https://pathbase.dev"); + assert_eq!(host_of("https://pathbase.dev/"), "https://pathbase.dev"); + assert_eq!( + host_of("https://pathbase.dev/api/v1/traces"), + "https://pathbase.dev" + ); + assert_eq!( + host_of("http://127.0.0.1:9000/foo"), + "http://127.0.0.1:9000" + ); + assert_eq!(host_of("not-a-url"), "not-a-url"); + } + + #[test] + fn pathbase_requires_login() { + let temp = tempfile::tempdir().unwrap(); + let input_path = temp.path().join("input.json"); + std::fs::write(&input_path, serde_json::to_string(&make_path_doc()).unwrap()).unwrap(); + + let _g = crate::config::TEST_ENV_LOCK + .lock() + .unwrap_or_else(|e| e.into_inner()); + unsafe { + std::env::set_var(crate::config::CONFIG_DIR_ENV, temp.path()); + } + let err = run_pathbase(input_path.to_string_lossy().to_string(), None).unwrap_err(); + unsafe { + std::env::remove_var(crate::config::CONFIG_DIR_ENV); + } + assert!(err.to_string().contains("Not logged in")); + } +} diff --git a/crates/toolpath-cli/src/cmd_import.rs b/crates/toolpath-cli/src/cmd_import.rs new file mode 100644 index 0000000..96ef837 --- /dev/null +++ b/crates/toolpath-cli/src/cmd_import.rs @@ -0,0 +1,799 @@ +//! `path import ` — ingest external formats into toolpath documents. +//! +//! Default behavior writes each derived document into the on-disk cache at +//! `$CONFIG_DIR/documents/` under `-.json` and prints the +//! path to stdout. `--no-cache` sends the JSON to stdout instead, for shell +//! composition with `render | query | validate`. + +#[cfg(not(target_os = "emscripten"))] +use anyhow::Context; +use anyhow::Result; +use clap::Subcommand; +use std::path::PathBuf; +use toolpath::v1::Document; + +use crate::cmd_cache::{make_id, write_cached}; + +#[derive(Subcommand, Debug)] +pub enum ImportSource { + /// Import from git repository history + Git { + /// Path to the git repository + #[arg(short, long, default_value = ".")] + repo: PathBuf, + + /// Branch name(s). Format: `name` or `name:start` + #[arg(short, long, required = true)] + branch: Vec, + + /// Global base commit (overrides per-branch starts) + #[arg(long)] + base: Option, + + /// Remote name for URI generation + #[arg(long, default_value = "origin")] + remote: String, + + /// Graph title (for multi-branch output) + #[arg(long)] + title: Option, + }, + /// Import from a GitHub pull request + Github { + /// PR URL (e.g. ) + #[arg(index = 1)] + url: Option, + + /// Repository in owner/repo format (alternative to URL) + #[arg(short, long)] + repo: Option, + + /// Pull request number (required with --repo) + #[arg(long)] + pr: Option, + + /// Exclude CI check runs + #[arg(long)] + no_ci: bool, + + /// Exclude reviews and comments + #[arg(long)] + no_comments: bool, + }, + /// Import from Claude conversation logs + Claude { + /// Project path (e.g., /Users/alex/myproject) + #[arg(short, long)] + project: String, + + /// Specific session ID + #[arg(short, long)] + session: Option, + + /// Process all sessions in the project + #[arg(long)] + all: bool, + }, + /// Import from Gemini CLI conversation logs + Gemini { + /// Project path (e.g., /Users/alex/myproject) + #[arg(short, long)] + project: String, + + /// Specific session UUID (the directory name under chats/) + #[arg(short, long)] + session: Option, + + /// Process all sessions in the project + #[arg(long)] + all: bool, + + /// Include thinking blocks in conversation.append text + #[arg(long)] + include_thinking: bool, + }, + /// Import from Codex CLI rollout files + Codex { + /// Session id, UUID, or filename stem (default: most recent) + #[arg(short, long)] + session: Option, + + /// Process all sessions (emits one Path per session) + #[arg(long)] + all: bool, + }, + /// Import from opencode session databases + Opencode { + /// Session id (default: most recent) + #[arg(short, long)] + session: Option, + + /// Process all sessions (emits one Path per session) + #[arg(long)] + all: bool, + + /// Filter by project id (SHA of repo's first root commit) + #[arg(long)] + project: Option, + + /// Skip snapshot-based file diff extraction + #[arg(long)] + no_snapshot_diffs: bool, + }, + /// Import from Pi (pi.dev) coding-agent session logs + Pi { + /// Project path (cwd the session ran in) + #[arg(short, long)] + project: String, + + /// Specific session ID (default: most recent) + #[arg(short, long)] + session: Option, + + /// Process all sessions in the project (emits a Graph) + #[arg(long)] + all: bool, + + /// Override the Pi sessions base directory (default: ~/.pi/agent/sessions) + #[arg(long)] + base: Option, + }, + /// Import from Pathbase (download a previously uploaded trace) + Pathbase { + /// Trace id or full pathbase URL + #[arg(index = 1)] + target: String, + + /// Pathbase server URL (overrides $PATHBASE_URL; ignored if target is a URL) + #[arg(long)] + url: Option, + }, +} + +#[derive(clap::Args, Debug)] +pub struct ImportArgs { + #[command(subcommand)] + pub source: ImportSource, + + /// Overwrite the cache entry if it already exists + #[arg(long, global = true)] + pub force: bool, + + /// Print the toolpath JSON to stdout instead of writing the cache + #[arg(long, global = true)] + pub no_cache: bool, +} + +pub fn run(args: ImportArgs, pretty: bool) -> Result<()> { + let docs = derive(args.source)?; + emit(&docs, args.force, args.no_cache, pretty) +} + +struct DerivedDoc { + cache_id: String, + doc: Document, +} + +fn emit(docs: &[DerivedDoc], force: bool, no_cache: bool, pretty: bool) -> Result<()> { + if docs.is_empty() { + anyhow::bail!("no documents produced"); + } + for d in docs { + if no_cache { + let json = if pretty { + d.doc.to_json_pretty()? + } else { + d.doc.to_json()? + }; + println!("{}", json); + } else { + let path = write_cached(&d.cache_id, &d.doc, force)?; + println!("{}", path.display()); + let summary = doc_summary(&d.doc); + eprintln!("Imported {} → {}", summary, d.cache_id); + } + } + Ok(()) +} + +fn doc_summary(doc: &Document) -> String { + match doc { + Document::Graph(g) => format!("graph {} ({} paths)", g.graph.id, g.paths.len()), + Document::Path(p) => format!("path {} ({} steps)", p.path.id, p.steps.len()), + Document::Step(s) => format!("step {}", s.step.id), + } +} + +fn derive(source: ImportSource) -> Result> { + match source { + ImportSource::Git { + repo, + branch, + base, + remote, + title, + } => derive_git(repo, branch, base, remote, title), + ImportSource::Github { + url, + repo, + pr, + no_ci, + no_comments, + } => derive_github(url, repo, pr, no_ci, no_comments), + ImportSource::Claude { + project, + session, + all, + } => derive_claude(project, session, all), + ImportSource::Gemini { + project, + session, + all, + include_thinking, + } => derive_gemini(project, session, all, include_thinking), + ImportSource::Codex { session, all } => derive_codex(session, all), + ImportSource::Opencode { + session, + all, + project, + no_snapshot_diffs, + } => derive_opencode(session, all, project, no_snapshot_diffs), + ImportSource::Pi { + project, + session, + all, + base, + } => derive_pi(project, session, all, base), + ImportSource::Pathbase { target, url } => derive_pathbase(target, url), + } +} + +// ── per-source derivations ───────────────────────────────────────────── + +fn derive_git( + repo_path: PathBuf, + branches: Vec, + base: Option, + remote: String, + title: Option, +) -> Result> { + #[cfg(target_os = "emscripten")] + { + let _ = (repo_path, branches, base, remote, title); + anyhow::bail!( + "'path import git' requires a native environment with access to a git repository" + ); + } + + #[cfg(not(target_os = "emscripten"))] + { + let repo_path = if repo_path.is_absolute() { + repo_path + } else { + std::env::current_dir()?.join(&repo_path) + }; + + let repo = git2::Repository::open(&repo_path) + .with_context(|| format!("Failed to open repository at {:?}", repo_path))?; + + let config = toolpath_git::DeriveConfig { + remote, + title, + base, + }; + + let doc = toolpath_git::derive(&repo, &branches, &config)?; + // Fold a short hash of the canonical repo path into the cache id so + // two repos on the same branch (both `main`) don't collide. + let canonical = std::fs::canonicalize(&repo_path).unwrap_or(repo_path.clone()); + let repo_tag = short_path_hash(&canonical.to_string_lossy()); + let inner = doc_inner_id(&doc); + let cache_id = make_id("git", &format!("{repo_tag}-{inner}")); + Ok(vec![DerivedDoc { cache_id, doc }]) + } +} + +/// 8-hex-char stable hash of a path string — used as a repo tag in +/// cache ids so imports from different repos don't collide. +fn short_path_hash(s: &str) -> String { + use std::hash::{Hash, Hasher}; + let mut h = std::collections::hash_map::DefaultHasher::new(); + s.hash(&mut h); + format!("{:08x}", h.finish() as u32) +} + +/// Extract the inner identifier from a document (Path.path.id, Graph.graph.id, etc.) +/// without source prefix. +fn doc_inner_id(doc: &Document) -> String { + match doc { + Document::Graph(g) => g.graph.id.clone(), + Document::Path(p) => p.path.id.clone(), + Document::Step(s) => s.step.id.clone(), + } +} + +fn derive_github( + url: Option, + repo: Option, + pr: Option, + no_ci: bool, + no_comments: bool, +) -> Result> { + #[cfg(target_os = "emscripten")] + { + let _ = (url, repo, pr, no_ci, no_comments); + anyhow::bail!("'path import github' requires a native environment with network access"); + } + + #[cfg(not(target_os = "emscripten"))] + { + let (owner, repo_name, pr_number) = if let Some(url_str) = &url { + let parsed = toolpath_github::parse_pr_url(url_str).ok_or_else(|| { + anyhow::anyhow!("Invalid PR URL. Expected: https://github.com/owner/repo/pull/N") + })?; + (parsed.owner, parsed.repo, parsed.number) + } else if let (Some(repo_str), Some(pr_num)) = (&repo, pr) { + let (o, r) = repo_str + .split_once('/') + .ok_or_else(|| anyhow::anyhow!("Repository must be in owner/repo format"))?; + (o.to_string(), r.to_string(), pr_num) + } else { + anyhow::bail!( + "Provide a PR URL or both --repo and --pr.\n\ + Usage: path import github https://github.com/owner/repo/pull/42\n\ + Usage: path import github --repo owner/repo --pr 42" + ); + }; + + let token = toolpath_github::resolve_token()?; + let config = toolpath_github::DeriveConfig { + token, + include_ci: !no_ci, + include_comments: !no_comments, + ..Default::default() + }; + + let path = toolpath_github::derive_pull_request(&owner, &repo_name, pr_number, &config)?; + let doc = Document::Path(path); + let cache_id = make_id("github", &format!("{owner}_{repo_name}-{pr_number}")); + Ok(vec![DerivedDoc { cache_id, doc }]) + } +} + +fn derive_claude(project: String, session: Option, all: bool) -> Result> { + let manager = toolpath_claude::ClaudeConvo::new(); + derive_claude_with_manager(&manager, project, session, all) +} + +fn derive_claude_with_manager( + manager: &toolpath_claude::ClaudeConvo, + project: String, + session: Option, + all: bool, +) -> Result> { + let config = toolpath_claude::derive::DeriveConfig { + project_path: Some(project.clone()), + include_thinking: false, + }; + + let paths: Vec = if let Some(session_id) = session { + let convo = manager + .read_conversation(&project, &session_id) + .map_err(|e| anyhow::anyhow!("{}", e))?; + vec![toolpath_claude::derive::derive_path(&convo, &config)] + } else if all { + let convos = manager + .read_all_conversations(&project) + .map_err(|e| anyhow::anyhow!("{}", e))?; + toolpath_claude::derive::derive_project(&convos, &config) + } else { + let convo = manager + .most_recent_conversation(&project) + .map_err(|e| anyhow::anyhow!("{}", e))? + .ok_or_else(|| anyhow::anyhow!("No conversations found for project: {}", project))?; + vec![toolpath_claude::derive::derive_path(&convo, &config)] + }; + + Ok(paths + .into_iter() + .map(|p| { + let cache_id = make_id("claude", &p.path.id); + DerivedDoc { + cache_id, + doc: Document::Path(p), + } + }) + .collect()) +} + +fn derive_gemini( + project: String, + session: Option, + all: bool, + include_thinking: bool, +) -> Result> { + let manager = toolpath_gemini::GeminiConvo::new(); + derive_gemini_with_manager(&manager, project, session, all, include_thinking) +} + +fn derive_gemini_with_manager( + manager: &toolpath_gemini::GeminiConvo, + project: String, + session: Option, + all: bool, + include_thinking: bool, +) -> Result> { + let config = toolpath_gemini::derive::DeriveConfig { + project_path: Some(project.clone()), + include_thinking, + }; + + let paths: Vec = if let Some(session_uuid) = session { + let convo = manager + .read_conversation(&project, &session_uuid) + .map_err(|e| anyhow::anyhow!("{}", e))?; + vec![toolpath_gemini::derive::derive_path(&convo, &config)] + } else if all { + let convos = manager + .read_all_conversations(&project) + .map_err(|e| anyhow::anyhow!("{}", e))?; + toolpath_gemini::derive::derive_project(&convos, &config) + } else { + let convo = manager + .most_recent_conversation(&project) + .map_err(|e| anyhow::anyhow!("{}", e))? + .ok_or_else(|| anyhow::anyhow!("No conversations found for project: {}", project))?; + vec![toolpath_gemini::derive::derive_path(&convo, &config)] + }; + + Ok(paths + .into_iter() + .map(|p| { + let cache_id = make_id("gemini", &p.path.id); + DerivedDoc { + cache_id, + doc: Document::Path(p), + } + }) + .collect()) +} + +fn derive_codex(session: Option, all: bool) -> Result> { + let manager = toolpath_codex::CodexConvo::new(); + let config = toolpath_codex::derive::DeriveConfig { project_path: None }; + + let paths: Vec = if all { + let sessions = manager + .read_all_sessions() + .map_err(|e| anyhow::anyhow!("{}", e))?; + if sessions.is_empty() { + anyhow::bail!("No Codex sessions found in ~/.codex/sessions"); + } + toolpath_codex::derive::derive_project(&sessions, &config) + } else if let Some(sid) = session { + let s = manager + .read_session(&sid) + .map_err(|e| anyhow::anyhow!("{}", e))?; + vec![toolpath_codex::derive::derive_path(&s, &config)] + } else { + let s = manager + .most_recent_session() + .map_err(|e| anyhow::anyhow!("{}", e))? + .ok_or_else(|| anyhow::anyhow!("No Codex sessions found in ~/.codex/sessions"))?; + vec![toolpath_codex::derive::derive_path(&s, &config)] + }; + + Ok(paths + .into_iter() + .map(|p| { + let cache_id = make_id("codex", &p.path.id); + DerivedDoc { + cache_id, + doc: Document::Path(p), + } + }) + .collect()) +} + +fn derive_opencode( + session: Option, + all: bool, + project: Option, + no_snapshot_diffs: bool, +) -> Result> { + #[cfg(target_os = "emscripten")] + { + let _ = (session, all, project, no_snapshot_diffs); + anyhow::bail!( + "'path import opencode' requires a native environment (SQLite + git2 not available under wasm)" + ); + } + + #[cfg(not(target_os = "emscripten"))] + { + let manager = toolpath_opencode::OpencodeConvo::new(); + let config = toolpath_opencode::derive::DeriveConfig { + no_snapshot_diffs, + ..Default::default() + }; + + let paths: Vec = if all { + let metas = manager + .io() + .list_session_metadata(project.as_deref()) + .map_err(|e| anyhow::anyhow!("{}", e))?; + if metas.is_empty() { + anyhow::bail!("No opencode sessions found"); + } + let mut out = Vec::with_capacity(metas.len()); + for m in metas { + let s = manager + .read_session(&m.id) + .map_err(|e| anyhow::anyhow!("{}: {}", m.id, e))?; + out.push(toolpath_opencode::derive::derive_path_with_resolver( + &s, + &config, + manager.resolver(), + )); + } + out + } else if let Some(sid) = session { + let s = manager + .read_session(&sid) + .map_err(|e| anyhow::anyhow!("{}", e))?; + vec![toolpath_opencode::derive::derive_path_with_resolver( + &s, + &config, + manager.resolver(), + )] + } else { + let s = manager + .most_recent_session() + .map_err(|e| anyhow::anyhow!("{}", e))? + .ok_or_else(|| anyhow::anyhow!("No opencode sessions found"))?; + vec![toolpath_opencode::derive::derive_path_with_resolver( + &s, + &config, + manager.resolver(), + )] + }; + + Ok(paths + .into_iter() + .map(|p| { + let cache_id = make_id("opencode", &p.path.id); + DerivedDoc { + cache_id, + doc: Document::Path(p), + } + }) + .collect()) + } +} + +fn derive_pi( + project: String, + session: Option, + all: bool, + base: Option, +) -> Result> { + let manager = if let Some(path) = base { + let resolver = toolpath_pi::PathResolver::new().with_sessions_dir(&path); + toolpath_pi::PiConvo::with_resolver(resolver) + } else { + toolpath_pi::PiConvo::new() + }; + derive_pi_with_manager(&manager, project, session, all) +} + +fn derive_pi_with_manager( + manager: &toolpath_pi::PiConvo, + project: String, + session: Option, + all: bool, +) -> Result> { + let config = toolpath_pi::DeriveConfig::default(); + + let doc: Document = if all { + let sessions = manager + .read_all_sessions(&project) + .map_err(|e| anyhow::anyhow!("{}", e))?; + if sessions.is_empty() { + anyhow::bail!("No Pi sessions found for project: {}", project); + } + let graph = toolpath_pi::derive::derive_graph(&sessions, None, &config); + Document::Graph(graph) + } else if let Some(sid) = session { + let session = manager + .read_session(&project, &sid) + .map_err(|e| anyhow::anyhow!("{}", e))?; + Document::Path(toolpath_pi::derive::derive_path(&session, &config)) + } else { + let session = manager + .most_recent_session(&project) + .map_err(|e| anyhow::anyhow!("{}", e))? + .ok_or_else(|| anyhow::anyhow!("No Pi sessions found for project: {}", project))?; + Document::Path(toolpath_pi::derive::derive_path(&session, &config)) + }; + + let cache_id = make_id("pi", &doc_inner_id(&doc)); + Ok(vec![DerivedDoc { cache_id, doc }]) +} + +fn derive_pathbase(target: String, url_flag: Option) -> Result> { + #[cfg(target_os = "emscripten")] + { + let _ = (target, url_flag); + anyhow::bail!("'path import pathbase' requires a native environment with network access"); + } + + #[cfg(not(target_os = "emscripten"))] + { + use crate::cmd_pathbase::{require_session, traces_get}; + + let (base, id) = parse_pathbase_ref(&target, url_flag.as_deref())?; + let session = require_session()?; + // If the ref gave us an explicit base URL (via a full URL or --url), + // use that. Otherwise fall back to the stored session's server. + let base_url = base.unwrap_or_else(|| session.url.clone()); + let body = traces_get(&base_url, &session.token, &id)?; + let doc = Document::from_json(&body).map_err(|e| { + anyhow::anyhow!("server returned a non-toolpath document: {e}") + })?; + let cache_id = make_id("pathbase", &id); + Ok(vec![DerivedDoc { cache_id, doc }]) + } +} + +/// Parse a positional ref for `path import pathbase`. Returns `(override_base, id)`. +/// +/// If the ref is a full URL like `https://pathbase.dev/traces/trc_01H...`, the +/// host prefix replaces the server URL and the trailing segment is the id. +/// Otherwise the ref is a bare id; `--url` (via `url_flag`) or `$PATHBASE_URL` +/// / default apply via the caller's session. +#[cfg(not(target_os = "emscripten"))] +fn parse_pathbase_ref(target: &str, url_flag: Option<&str>) -> Result<(Option, String)> { + use crate::cmd_pathbase::resolve_url; + + let scheme = if target.starts_with("https://") { + Some("https://") + } else if target.starts_with("http://") { + Some("http://") + } else { + None + }; + + if let Some(scheme) = scheme { + let rest = &target[scheme.len()..]; + let (host, path) = match rest.split_once('/') { + Some((h, p)) => (h, p), + None => anyhow::bail!("URL has no trace id segment: {target}"), + }; + if host.is_empty() { + anyhow::bail!("URL is missing a host: {target}"); + } + let path = path + .split(['?', '#']) + .next() + .unwrap_or("") + .trim_end_matches('/'); + let id = path + .rsplit('/') + .find(|s| !s.is_empty()) + .ok_or_else(|| anyhow::anyhow!("URL has no trace id segment: {target}"))? + .to_string(); + let base = format!("{scheme}{host}"); + Ok((Some(base), id)) + } else { + let base = url_flag.map(|u| resolve_url(Some(u.to_string()))); + Ok((base, target.to_string())) + } +} + + +#[cfg(all(test, not(target_os = "emscripten")))] +mod tests { + use super::*; + + #[test] + fn parse_pathbase_ref_full_url() { + let (base, id) = + parse_pathbase_ref("https://pathbase.dev/traces/trc_01H", None).unwrap(); + assert_eq!(base.as_deref(), Some("https://pathbase.dev")); + assert_eq!(id, "trc_01H"); + } + + #[test] + fn parse_pathbase_ref_bare_id_with_url_flag() { + let (base, id) = parse_pathbase_ref("trc_01H", Some("https://other.example/")).unwrap(); + assert_eq!(base.as_deref(), Some("https://other.example")); + assert_eq!(id, "trc_01H"); + } + + #[test] + fn parse_pathbase_ref_bare_id_no_flag() { + let (base, id) = parse_pathbase_ref("trc_01H", None).unwrap(); + assert_eq!(base, None); + assert_eq!(id, "trc_01H"); + } + + #[test] + fn parse_pathbase_ref_url_with_trailing_slash() { + let (base, id) = + parse_pathbase_ref("https://pathbase.dev/traces/trc_01H/", None).unwrap(); + assert_eq!(base.as_deref(), Some("https://pathbase.dev")); + assert_eq!(id, "trc_01H"); + } + + fn setup_claude_manager() -> (tempfile::TempDir, toolpath_claude::ClaudeConvo) { + let temp = tempfile::tempdir().unwrap(); + let claude_dir = temp.path().join(".claude"); + let project_dir = claude_dir.join("projects/-test-project"); + std::fs::create_dir_all(&project_dir).unwrap(); + + let entry1 = r#"{"type":"user","uuid":"uuid-1","timestamp":"2024-01-01T00:00:00Z","cwd":"/test/project","message":{"role":"user","content":"Hello"}}"#; + let entry2 = r#"{"type":"assistant","uuid":"uuid-2","timestamp":"2024-01-01T00:00:01Z","message":{"role":"assistant","content":"Hi there"}}"#; + std::fs::write( + project_dir.join("session-abc.jsonl"), + format!("{}\n{}\n", entry1, entry2), + ) + .unwrap(); + + let resolver = toolpath_claude::PathResolver::new().with_claude_dir(&claude_dir); + let manager = toolpath_claude::ClaudeConvo::with_resolver(resolver); + (temp, manager) + } + + #[test] + fn derive_claude_session_returns_one_doc() { + let (_t, mgr) = setup_claude_manager(); + let out = derive_claude_with_manager( + &mgr, + "/test/project".to_string(), + Some("session-abc".to_string()), + false, + ) + .unwrap(); + assert_eq!(out.len(), 1); + assert!(out[0].cache_id.starts_with("claude-")); + } + + fn setup_claude_manager_with_two_sessions() + -> (tempfile::TempDir, toolpath_claude::ClaudeConvo) { + let temp = tempfile::tempdir().unwrap(); + let claude_dir = temp.path().join(".claude"); + let project_dir = claude_dir.join("projects/-test-project"); + std::fs::create_dir_all(&project_dir).unwrap(); + + // Use sufficiently distinct slugs so toolpath-claude's 8-char id + // prefix doesn't alias them into the same path.id. + for (slug, ts) in [ + ("alpha-session-one", "2024-01-01"), + ("bravo-session-two", "2024-01-02"), + ] { + let u = format!( + r#"{{"type":"user","uuid":"u-{slug}","timestamp":"{ts}T00:00:00Z","cwd":"/test/project","message":{{"role":"user","content":"hi"}}}}"# + ); + let a = format!( + r#"{{"type":"assistant","uuid":"a-{slug}","timestamp":"{ts}T00:00:01Z","message":{{"role":"assistant","content":"hello"}}}}"# + ); + std::fs::write(project_dir.join(format!("{slug}.jsonl")), format!("{u}\n{a}\n")) + .unwrap(); + } + + let resolver = toolpath_claude::PathResolver::new().with_claude_dir(&claude_dir); + (temp, toolpath_claude::ClaudeConvo::with_resolver(resolver)) + } + + #[test] + fn derive_claude_all_emits_one_cache_entry_per_session() { + let (_t, mgr) = setup_claude_manager_with_two_sessions(); + let out = derive_claude_with_manager(&mgr, "/test/project".to_string(), None, true).unwrap(); + assert_eq!(out.len(), 2); + // Distinct cache ids so both can land in the cache without collision. + assert_ne!(out[0].cache_id, out[1].cache_id); + for d in &out { + assert!(d.cache_id.starts_with("claude-")); + } + } +} diff --git a/crates/toolpath-cli/src/cmd_incept.rs b/crates/toolpath-cli/src/cmd_incept.rs index d4f2a1b..2fed86a 100644 --- a/crates/toolpath-cli/src/cmd_incept.rs +++ b/crates/toolpath-cli/src/cmd_incept.rs @@ -1,214 +1,49 @@ -//! `path incept` — project a toolpath document into a Claude session -//! that Claude Code can load and resume. +//! Deprecation shim for `path incept`. //! -//! Format rules this command obeys are documented at -//! `docs/agents/formats/claude-code/writing-compatible-jsonl.md`. When a new -//! empirical constraint is discovered here, capture it there in the same -//! change. +//! `path incept --input X --project Y` became `path export claude --input X +//! --project Y`. This shim preserves the old flag shape for one release, +//! delegating to the new handler. use anyhow::Result; -use std::io::Read; -use std::path::PathBuf; - use clap::Args; +use std::path::PathBuf; #[derive(Args, Debug)] pub struct InceptArgs { - /// Input toolpath document (JSON). Reads from stdin if omitted. + /// Input toolpath document (JSON path, or cache id) #[arg(short, long)] - input: Option, + input: Option, - /// Target project directory. Claude Code will see the session - /// when run from this directory. Defaults to the current directory. + /// Target project directory. Defaults to the current directory. #[arg(short, long)] project: Option, } pub fn run(args: InceptArgs) -> Result<()> { - #[cfg(target_os = "emscripten")] - { - let _ = args; - anyhow::bail!("'path incept' requires a native environment"); - } - - #[cfg(not(target_os = "emscripten"))] - { - use toolpath_convo::ConversationProjector; - - // 1. Read the toolpath document (file or stdin) - let json = match &args.input { - Some(path) => std::fs::read_to_string(path) - .map_err(|e| anyhow::anyhow!("Failed to read {:?}: {}", path, e))?, - None => { - let mut buf = String::new(); - std::io::stdin() - .read_to_string(&mut buf) - .map_err(|e| anyhow::anyhow!("Failed to read stdin: {}", e))?; - buf - } - }; - - // 2. Parse as a toolpath Path document - let doc: toolpath::v1::Document = serde_json::from_str(&json) - .map_err(|e| anyhow::anyhow!("Failed to parse toolpath document: {}", e))?; - - let path = match doc { - toolpath::v1::Document::Path(p) => p, - toolpath::v1::Document::Step(_) => { - anyhow::bail!("Expected a Path document, got a Step") - } - toolpath::v1::Document::Graph(_) => { - anyhow::bail!("Expected a Path document, got a Graph") - } - }; - - // 3. Extract → Project - let view = toolpath_convo::extract_conversation(&path); - let projector = toolpath_claude::ClaudeProjector; - let conversation = projector - .project(&view) - .map_err(|e| anyhow::anyhow!("Projection failed: {}", e))?; - - // 4. Resolve target project directory - let project_dir = match &args.project { - Some(p) => std::fs::canonicalize(p) - .map_err(|e| anyhow::anyhow!("Cannot resolve project path {:?}: {}", p, e))?, - None => std::env::current_dir()?, - }; - let project_path = project_dir.to_string_lossy(); - - // 5. Write to ~/.claude/projects//.jsonl - let resolver = toolpath_claude::PathResolver::new(); - let claude_project_dir = resolver - .project_dir(&project_path) - .map_err(|e| anyhow::anyhow!("Cannot resolve Claude project dir: {}", e))?; - - std::fs::create_dir_all(&claude_project_dir)?; - - let session_id = &conversation.session_id; - let output_path = claude_project_dir.join(format!("{}.jsonl", session_id)); - - // Serialize preamble + entries as JSONL - let mut lines: Vec = - Vec::with_capacity(conversation.preamble.len() + conversation.entries.len()); - for raw in &conversation.preamble { - lines.push(serde_json::to_string(raw)?); - } - for entry in &conversation.entries { - lines.push(serde_json::to_string(entry)?); + eprintln!("warning: `path incept` is deprecated; use `path export claude --project ` instead"); + + let input = match args.input { + Some(s) => s, + None => { + use std::io::Read; + let mut buf = String::new(); + std::io::stdin() + .read_to_string(&mut buf) + .map_err(|e| anyhow::anyhow!("Failed to read stdin: {}", e))?; + let mut f = tempfile::NamedTempFile::new()?; + std::io::Write::write_all(&mut f, buf.as_bytes())?; + let (_file, path) = f.keep()?; + path.to_string_lossy().to_string() } + }; - std::fs::write(&output_path, lines.join("\n"))?; - - eprintln!( - "Incepted session {} ({} entries) into {}", - session_id, - conversation.preamble.len() + conversation.entries.len(), - output_path.display() - ); - eprintln!(); - eprintln!("Resume with:"); - eprintln!(" cd {} && claude -r {}", project_path, session_id); - - Ok(()) - } -} - -#[cfg(all(test, not(target_os = "emscripten")))] -mod tests { - use super::*; - use std::collections::HashMap; - use toolpath::v1::{ArtifactChange, PathIdentity, Step, StepIdentity, StructuralChange}; + let project = args + .project + .unwrap_or_else(|| std::env::current_dir().expect("cwd")); - fn make_test_doc() -> String { - let artifact = "agent://claude/test-incept-session"; - let path = toolpath::v1::Path { - path: PathIdentity { - id: "test-path".into(), - base: None, - head: "step-002".into(), - }, - steps: vec![ - Step { - step: StepIdentity { - id: "step-001".into(), - parents: vec![], - actor: "tool:claude-code".into(), - timestamp: "2024-01-01T00:00:00Z".into(), - }, - change: { - let mut m = HashMap::new(); - m.insert( - artifact.into(), - ArtifactChange { - raw: None, - structural: Some(StructuralChange { - change_type: "conversation.init".into(), - extra: HashMap::new(), - }), - }, - ); - m - }, - meta: None, - }, - Step { - step: StepIdentity { - id: "step-002".into(), - parents: vec!["step-001".into()], - actor: "human:user".into(), - timestamp: "2024-01-01T00:00:01Z".into(), - }, - change: { - let mut m = HashMap::new(); - let mut extra = HashMap::new(); - extra.insert("role".into(), serde_json::json!("user")); - extra.insert("text".into(), serde_json::json!("Hello from incept")); - m.insert( - artifact.into(), - ArtifactChange { - raw: None, - structural: Some(StructuralChange { - change_type: "conversation.append".into(), - extra, - }), - }, - ); - m - }, - meta: None, - }, - ], - meta: None, - }; - serde_json::to_string(&toolpath::v1::Document::Path(path)).unwrap() - } - - #[test] - fn test_incept_creates_session_file() { - let temp = tempfile::tempdir().unwrap(); - let project_dir = temp.path().join("my-project"); - std::fs::create_dir_all(&project_dir).unwrap(); - - let input_path = temp.path().join("input.json"); - std::fs::write(&input_path, make_test_doc()).unwrap(); - - let args = InceptArgs { - input: Some(input_path), - project: Some(project_dir.clone()), - }; - - // This will try to write to ~/.claude/projects/ which exists in the real env. - // For a proper isolated test, we'd need to mock the PathResolver. - // Instead, verify the function doesn't error and check output via CLI test. - let result = run(args); - // May fail if ~/.claude doesn't exist in CI, but in dev it should work - if result.is_err() { - eprintln!( - "Skipping incept test (no ~/.claude): {}", - result.unwrap_err() - ); - return; - } - } + crate::cmd_export::run(crate::cmd_export::ExportTarget::Claude { + input, + project: Some(project), + output: None, + }) } diff --git a/crates/toolpath-cli/src/cmd_pathbase.rs b/crates/toolpath-cli/src/cmd_pathbase.rs new file mode 100644 index 0000000..a17e067 --- /dev/null +++ b/crates/toolpath-cli/src/cmd_pathbase.rs @@ -0,0 +1,451 @@ +//! Shared Pathbase client helpers. +//! +//! Hosts the HTTP client and session-storage logic used by `cmd_auth`, +//! `cmd_import`, and `cmd_export`. Config-dir resolution lives in the +//! sibling `config` module so `cmd_cache` (which doesn't depend on +//! reqwest and must build on emscripten) can reuse it. + +use anyhow::{Context, Result, anyhow, bail}; +use serde::{Deserialize, Serialize}; +use std::path::{Path, PathBuf}; + +use crate::config::config_dir; + +pub(crate) const CREDENTIALS_FILE: &str = "credentials.json"; +pub(crate) const DEFAULT_URL: &str = "https://pathbase.dev"; +pub(crate) const PATHBASE_URL_ENV: &str = "PATHBASE_URL"; + +/// JSON blob persisted at `credentials.json`. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub(crate) struct StoredSession { + pub url: String, + pub token: String, + pub user: User, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub(crate) struct User { + pub id: String, + pub username: String, + #[serde(default)] + pub email: Option, + #[serde(default)] + pub display_name: Option, + #[serde(default)] + pub avatar_url: Option, +} + +/// Reference returned after uploading a trace. +#[derive(Debug, Clone, Deserialize)] +pub(crate) struct TraceRef { + pub id: String, + pub url: String, +} + +// ── URL + prompt helpers ──────────────────────────────────────────────── + +pub(crate) fn resolve_url(cli_url: Option) -> String { + let raw = cli_url + .or_else(|| std::env::var(PATHBASE_URL_ENV).ok()) + .unwrap_or_else(|| DEFAULT_URL.to_string()); + raw.trim_end_matches('/').to_string() +} + +pub(crate) fn prompt_line(prompt: &str) -> Result { + use std::io::{BufRead, Write}; + let mut stdout = std::io::stdout(); + stdout.write_all(prompt.as_bytes())?; + stdout.flush()?; + let stdin = std::io::stdin(); + let mut line = String::new(); + stdin.lock().read_line(&mut line)?; + Ok(line.trim().to_string()) +} + +// ── HTTP layer ────────────────────────────────────────────────────────── + +pub(crate) fn http_client() -> Result { + reqwest::blocking::Client::builder() + .user_agent(concat!("toolpath-cli/", env!("CARGO_PKG_VERSION"))) + .timeout(std::time::Duration::from_secs(30)) + .build() + .context("failed to build HTTP client") +} + +#[derive(Deserialize)] +pub(crate) struct RedeemResponse { + pub token: String, + pub user: User, +} + +pub(crate) fn api_redeem(base_url: &str, code: &str) -> Result<(String, User)> { + let client = http_client()?; + let resp = client + .post(format!("{base_url}/api/v1/auth/cli/redeem")) + .json(&serde_json::json!({ "code": code })) + .send() + .with_context(|| format!("connect to {base_url}"))?; + + let status = resp.status(); + let body = resp.text().unwrap_or_default(); + + if !status.is_success() { + if status == reqwest::StatusCode::UNAUTHORIZED { + bail!("code is invalid, already used, or expired — generate a new one"); + } + if status == reqwest::StatusCode::BAD_REQUEST { + let msg = serde_json::from_str::(&body) + .ok() + .and_then(|v| v.get("error").and_then(|e| e.as_str()).map(String::from)) + .unwrap_or_else(|| body.clone()); + bail!("{msg}"); + } + bail!("redeem failed ({status}): {body}"); + } + + let parsed: RedeemResponse = + serde_json::from_str(&body).with_context(|| format!("parsing redeem response: {body}"))?; + Ok((parsed.token, parsed.user)) +} + +pub(crate) fn api_logout(base_url: &str, token: &str) -> Result<()> { + let client = http_client()?; + let resp = client + .post(format!("{base_url}/api/v1/auth/logout")) + .bearer_auth(token) + .send() + .with_context(|| format!("connect to {base_url}"))?; + if !resp.status().is_success() && resp.status() != reqwest::StatusCode::NO_CONTENT { + bail!("server returned {}", resp.status()); + } + Ok(()) +} + +pub(crate) fn api_me(base_url: &str, token: &str) -> Result { + let client = http_client()?; + let resp = client + .get(format!("{base_url}/api/v1/auth/me")) + .bearer_auth(token) + .send() + .with_context(|| format!("connect to {base_url}"))?; + + if resp.status() == reqwest::StatusCode::UNAUTHORIZED { + bail!("stored session is no longer valid — run `path auth login` again"); + } + if !resp.status().is_success() { + bail!("server returned {}", resp.status()); + } + let user: User = resp.json().context("parsing /auth/me response")?; + Ok(user) +} + +/// `POST /api/v1/traces` — upload a toolpath document. +pub(crate) fn traces_post(base_url: &str, token: &str, body: &str) -> Result { + let client = http_client()?; + let resp = client + .post(format!("{base_url}/api/v1/traces")) + .bearer_auth(token) + .header(reqwest::header::CONTENT_TYPE, "application/json") + .body(body.to_string()) + .send() + .with_context(|| format!("connect to {base_url}"))?; + + let status = resp.status(); + let text = resp.text().unwrap_or_default(); + + if status == reqwest::StatusCode::UNAUTHORIZED { + bail!("stored session is no longer valid — run `path auth login` again"); + } + if !status.is_success() { + let msg = serde_json::from_str::(&text) + .ok() + .and_then(|v| v.get("error").and_then(|e| e.as_str()).map(String::from)) + .unwrap_or_else(|| text.clone()); + bail!("upload failed ({status}): {msg}"); + } + + serde_json::from_str(&text).with_context(|| format!("parsing upload response: {text}")) +} + +/// `GET /api/v1/traces/{id}` — download a toolpath document body (JSON). +pub(crate) fn traces_get(base_url: &str, token: &str, id: &str) -> Result { + let client = http_client()?; + let resp = client + .get(format!("{base_url}/api/v1/traces/{id}")) + .bearer_auth(token) + .send() + .with_context(|| format!("connect to {base_url}"))?; + + let status = resp.status(); + let text = resp.text().unwrap_or_default(); + + if status == reqwest::StatusCode::UNAUTHORIZED { + bail!("stored session is no longer valid — run `path auth login` again"); + } + if status == reqwest::StatusCode::NOT_FOUND { + bail!("trace {id} not found on {base_url}"); + } + if !status.is_success() { + let msg = serde_json::from_str::(&text) + .ok() + .and_then(|v| v.get("error").and_then(|e| e.as_str()).map(String::from)) + .unwrap_or_else(|| text.clone()); + bail!("download failed ({status}): {msg}"); + } + + Ok(text) +} + +// ── File storage ──────────────────────────────────────────────────────── + +pub(crate) fn credentials_path() -> Result { + Ok(config_dir()?.join(CREDENTIALS_FILE)) +} + +pub(crate) fn store_session(path: &Path, s: &StoredSession) -> Result<()> { + let parent = path + .parent() + .ok_or_else(|| anyhow!("credentials path has no parent: {}", path.display()))?; + std::fs::create_dir_all(parent) + .with_context(|| format!("create {}", parent.display()))?; + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + let _ = std::fs::set_permissions(parent, std::fs::Permissions::from_mode(0o700)); + } + + let payload = serde_json::to_string_pretty(s)?; + std::fs::write(path, payload).with_context(|| format!("write {}", path.display()))?; + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + std::fs::set_permissions(path, std::fs::Permissions::from_mode(0o600)) + .with_context(|| format!("chmod 0600 {}", path.display()))?; + } + Ok(()) +} + +pub(crate) fn load_session(path: &Path) -> Result> { + match std::fs::read_to_string(path) { + Ok(s) if s.trim().is_empty() => Ok(None), + Ok(s) => Ok(Some(serde_json::from_str(&s).with_context(|| { + format!("decode credentials at {}", path.display()) + })?)), + Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(None), + Err(e) => Err(anyhow!("read {}: {e}", path.display())), + } +} + +pub(crate) fn clear_session(path: &Path) -> Result<()> { + match std::fs::remove_file(path) { + Ok(()) => Ok(()), + Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()), + Err(e) => Err(anyhow!("remove {}: {e}", path.display())), + } +} + +/// Load the stored session or bail with a helpful message. +pub(crate) fn require_session() -> Result { + let path = credentials_path()?; + load_session(&path)?.ok_or_else(|| anyhow!("Not logged in. Run `path auth login`.")) +} + +#[cfg(test)] +mod tests { + use super::*; + + fn sample() -> StoredSession { + StoredSession { + url: "https://pathbase.dev".into(), + token: "tok".into(), + user: User { + id: "u1".into(), + username: "alice".into(), + email: Some("alice@example.com".into()), + display_name: None, + avatar_url: None, + }, + } + } + + #[test] + fn resolve_url_prefers_cli_flag() { + let got = resolve_url(Some("https://example.com/".into())); + assert_eq!(got, "https://example.com"); + } + + #[test] + fn store_then_load_roundtrips_on_disk() { + let dir = tempfile::tempdir().unwrap(); + let path = dir.path().join("credentials.json"); + assert!(load_session(&path).unwrap().is_none()); + store_session(&path, &sample()).unwrap(); + let back = load_session(&path).unwrap().unwrap(); + assert_eq!(back.user.username, "alice"); + assert_eq!(back.token, "tok"); + } + + #[test] + fn store_creates_parent_directory() { + let dir = tempfile::tempdir().unwrap(); + let path = dir.path().join("nested").join("dir").join("credentials.json"); + store_session(&path, &sample()).unwrap(); + assert!(path.exists()); + } + + #[cfg(unix)] + #[test] + fn store_sets_restrictive_permissions() { + use std::os::unix::fs::PermissionsExt; + let dir = tempfile::tempdir().unwrap(); + let path = dir.path().join("credentials.json"); + store_session(&path, &sample()).unwrap(); + let mode = std::fs::metadata(&path).unwrap().permissions().mode() & 0o777; + assert_eq!(mode, 0o600, "expected 0600 on credentials file, got {mode:o}"); + } + + #[test] + fn clear_on_missing_file_is_ok() { + let dir = tempfile::tempdir().unwrap(); + let path = dir.path().join("nope.json"); + assert!(clear_session(&path).is_ok()); + } + + #[test] + fn load_empty_file_returns_none() { + let dir = tempfile::tempdir().unwrap(); + let path = dir.path().join("credentials.json"); + std::fs::write(&path, "").unwrap(); + assert!(load_session(&path).unwrap().is_none()); + } + + // ── Mock HTTP server ───────────────────────────────────────────── + + /// A one-shot HTTP/1.1 responder. Binds to 127.0.0.1 on a free port, + /// reads one request (headers + body), writes a canned response, closes. + struct MockServer { + port: u16, + thread: Option>>, + } + + impl MockServer { + fn start(status_line: &'static str, body: &'static str) -> Self { + use std::io::{BufRead, BufReader, Write}; + use std::net::TcpListener; + + let listener = TcpListener::bind("127.0.0.1:0").unwrap(); + let port = listener.local_addr().unwrap().port(); + let thread = std::thread::spawn(move || { + let (mut stream, _addr) = listener.accept().unwrap(); + let mut reader = BufReader::new(stream.try_clone().unwrap()); + let mut req = Vec::new(); + loop { + let mut line = String::new(); + if reader.read_line(&mut line).unwrap() == 0 { + break; + } + req.extend_from_slice(line.as_bytes()); + if line == "\r\n" { + break; + } + } + let content_length = req + .split(|b| *b == b'\n') + .find_map(|line| { + let line = std::str::from_utf8(line).ok()?; + let (name, value) = line.trim_end_matches('\r').split_once(':')?; + if name.eq_ignore_ascii_case("content-length") { + value.trim().parse::().ok() + } else { + None + } + }) + .unwrap_or(0); + if content_length > 0 { + use std::io::Read; + let mut body_buf = vec![0u8; content_length]; + reader.read_exact(&mut body_buf).ok(); + req.extend_from_slice(&body_buf); + } + + let response = format!( + "{status_line}\r\nContent-Length: {}\r\nContent-Type: application/json\r\n\r\n{body}", + body.len() + ); + let _ = stream.write_all(response.as_bytes()); + let _ = stream.flush(); + req + }); + MockServer { + port, + thread: Some(thread), + } + } + + fn base(&self) -> String { + format!("http://127.0.0.1:{}", self.port) + } + + fn request(mut self) -> Vec { + self.thread.take().unwrap().join().unwrap() + } + } + + #[test] + fn traces_post_returns_id_and_url_on_success() { + let server = MockServer::start( + "HTTP/1.1 200 OK", + r#"{"id":"trc_01H","url":"https://pathbase.dev/traces/trc_01H"}"#, + ); + let trace = traces_post(&server.base(), "tok", r#"{"Step":{}}"#).unwrap(); + assert_eq!(trace.id, "trc_01H"); + assert_eq!(trace.url, "https://pathbase.dev/traces/trc_01H"); + + let req = String::from_utf8(server.request()).unwrap(); + assert!(req.starts_with("POST /api/v1/traces "), "got: {req}"); + assert!( + req.to_lowercase().contains("authorization: bearer tok"), + "got: {req}" + ); + assert!(req.contains(r#"{"Step":{}}"#)); + } + + #[test] + fn traces_post_401_surfaces_relogin_message() { + let server = MockServer::start("HTTP/1.1 401 Unauthorized", r#"{"error":"bad"}"#); + let err = traces_post(&server.base(), "tok", "{}").unwrap_err(); + assert!(err.to_string().contains("run `path auth login`")); + } + + #[test] + fn traces_post_5xx_includes_server_message() { + let server = MockServer::start( + "HTTP/1.1 500 Internal Server Error", + r#"{"error":"database is on fire"}"#, + ); + let err = traces_post(&server.base(), "tok", "{}").unwrap_err(); + assert!(err.to_string().contains("database is on fire"), "{err}"); + } + + #[test] + fn traces_get_returns_body_on_success() { + let body = r#"{"Step":{"step":{"id":"s1","actor":"human:x","timestamp":"2024-01-01T00:00:00Z"},"change":{}}}"#; + let server = MockServer::start("HTTP/1.1 200 OK", body); + let got = traces_get(&server.base(), "tok", "trc_01H").unwrap(); + assert_eq!(got, body); + + let req = String::from_utf8(server.request()).unwrap(); + assert!(req.starts_with("GET /api/v1/traces/trc_01H "), "got: {req}"); + assert!( + req.to_lowercase().contains("authorization: bearer tok"), + "got: {req}" + ); + } + + #[test] + fn traces_get_404_says_not_found() { + let server = MockServer::start("HTTP/1.1 404 Not Found", ""); + let err = traces_get(&server.base(), "tok", "trc_nope").unwrap_err(); + assert!(err.to_string().contains("not found")); + } +} diff --git a/crates/toolpath-cli/src/cmd_project.rs b/crates/toolpath-cli/src/cmd_project.rs index 5316457..d43ea92 100644 --- a/crates/toolpath-cli/src/cmd_project.rs +++ b/crates/toolpath-cli/src/cmd_project.rs @@ -1,3 +1,9 @@ +//! Deprecation shim for `path project`. +//! +//! `path project claude --input X [--output Y]` became `path export claude +//! --input X [--output Y]`. This shim preserves the old surface for one +//! release. + use anyhow::Result; use clap::Subcommand; use std::path::PathBuf; @@ -6,9 +12,9 @@ use std::path::PathBuf; pub enum ProjectTarget { /// Project a toolpath document into Claude JSONL format Claude { - /// Input toolpath document (JSON) + /// Input toolpath document (JSON path, or cache id) #[arg(short, long)] - input: PathBuf, + input: String, /// Output file (JSONL). Prints to stdout if omitted. #[arg(short, long)] @@ -17,217 +23,14 @@ pub enum ProjectTarget { } pub fn run(target: ProjectTarget) -> Result<()> { + eprintln!("warning: `path project` is deprecated; use `path export` instead"); match target { - ProjectTarget::Claude { input, output } => run_claude(input, output), - } -} - -fn run_claude(input: PathBuf, output: Option) -> Result<()> { - #[cfg(target_os = "emscripten")] - { - let _ = (input, output); - anyhow::bail!("'path project claude' requires a native environment"); - } - - #[cfg(not(target_os = "emscripten"))] - { - use toolpath_convo::ConversationProjector; - - // Read and parse the input document. - let json = std::fs::read_to_string(&input) - .map_err(|e| anyhow::anyhow!("Failed to read {:?}: {}", input, e))?; - - let doc: toolpath::v1::Document = serde_json::from_str(&json) - .map_err(|e| anyhow::anyhow!("Failed to parse toolpath document: {}", e))?; - - let path = match doc { - toolpath::v1::Document::Path(p) => p, - toolpath::v1::Document::Step(_) => { - anyhow::bail!("Expected a Path document, got a Step") - } - toolpath::v1::Document::Graph(_) => { - anyhow::bail!("Expected a Path document, got a Graph") - } - }; - - // Extract conversation view from the path. - let view = toolpath_convo::extract_conversation(&path); - - // Project to Claude Conversation. - let projector = toolpath_claude::ClaudeProjector; - let conversation = projector - .project(&view) - .map_err(|e| anyhow::anyhow!("Projection failed: {}", e))?; - - // Serialize preamble + entries as JSONL lines. - let mut lines: Vec = - Vec::with_capacity(conversation.preamble.len() + conversation.entries.len()); - for raw in &conversation.preamble { - let line = serde_json::to_string(raw) - .map_err(|e| anyhow::anyhow!("Failed to serialize preamble: {}", e))?; - lines.push(line); - } - for entry in &conversation.entries { - let line = serde_json::to_string(entry) - .map_err(|e| anyhow::anyhow!("Failed to serialize entry: {}", e))?; - lines.push(line); + ProjectTarget::Claude { input, output } => { + crate::cmd_export::run(crate::cmd_export::ExportTarget::Claude { + input, + project: None, + output, + }) } - let jsonl = lines.join("\n"); - - // Write to output file or stdout. - match output { - Some(path) => { - std::fs::write(&path, &jsonl) - .map_err(|e| anyhow::anyhow!("Failed to write {:?}: {}", path, e))?; - } - None => { - println!("{}", jsonl); - } - } - - Ok(()) - } -} - -#[cfg(all(test, not(target_os = "emscripten")))] -mod tests { - use super::*; - use std::collections::HashMap; - use toolpath::v1::{ArtifactChange, PathIdentity, Step, StepIdentity, StructuralChange}; - - fn make_path_doc() -> toolpath::v1::Document { - let artifact_key = "agent://claude/test-session"; - - let init_step = Step { - step: StepIdentity { - id: "step-001".to_string(), - parents: vec![], - actor: "tool:claude-code".to_string(), - timestamp: "2024-01-01T00:00:00Z".to_string(), - }, - change: { - let mut m = HashMap::new(); - m.insert( - artifact_key.to_string(), - ArtifactChange { - raw: None, - structural: Some(StructuralChange { - change_type: "conversation.init".to_string(), - extra: HashMap::new(), - }), - }, - ); - m - }, - meta: None, - }; - - let append_step = Step { - step: StepIdentity { - id: "step-002".to_string(), - parents: vec!["step-001".to_string()], - actor: "human:user".to_string(), - timestamp: "2024-01-01T00:00:01Z".to_string(), - }, - change: { - let mut m = HashMap::new(); - let mut extra = HashMap::new(); - extra.insert("role".to_string(), serde_json::json!("user")); - extra.insert("text".to_string(), serde_json::json!("Hello")); - m.insert( - artifact_key.to_string(), - ArtifactChange { - raw: None, - structural: Some(StructuralChange { - change_type: "conversation.append".to_string(), - extra, - }), - }, - ); - m - }, - meta: None, - }; - - let path = toolpath::v1::Path { - path: PathIdentity { - id: "test-path".to_string(), - base: None, - head: "step-002".to_string(), - }, - steps: vec![init_step, append_step], - meta: None, - }; - - toolpath::v1::Document::Path(path) - } - - #[test] - fn test_run_claude_to_stdout() { - let temp = tempfile::tempdir().unwrap(); - let input_path = temp.path().join("input.json"); - - let doc = make_path_doc(); - let json = serde_json::to_string(&doc).unwrap(); - std::fs::write(&input_path, &json).unwrap(); - - let result = run_claude(input_path, None); - assert!(result.is_ok()); - } - - #[test] - fn test_run_claude_to_file() { - let temp = tempfile::tempdir().unwrap(); - let input_path = temp.path().join("input.json"); - let output_path = temp.path().join("output.jsonl"); - - let doc = make_path_doc(); - let json = serde_json::to_string(&doc).unwrap(); - std::fs::write(&input_path, &json).unwrap(); - - let result = run_claude(input_path, Some(output_path.clone())); - assert!(result.is_ok()); - - let output = std::fs::read_to_string(&output_path).unwrap(); - // Should have at least one JSONL entry - assert!(!output.is_empty()); - // Each non-empty line should be valid JSON - for line in output.lines() { - let parsed: serde_json::Value = serde_json::from_str(line).unwrap(); - assert!(parsed.is_object()); - } - } - - #[test] - fn test_run_claude_rejects_step_doc() { - let temp = tempfile::tempdir().unwrap(); - let input_path = temp.path().join("input.json"); - - let step = toolpath::v1::Step { - step: StepIdentity { - id: "s1".to_string(), - parents: vec![], - actor: "human:alex".to_string(), - timestamp: "2024-01-01T00:00:00Z".to_string(), - }, - change: HashMap::new(), - meta: None, - }; - let doc = toolpath::v1::Document::Step(step); - std::fs::write(&input_path, serde_json::to_string(&doc).unwrap()).unwrap(); - - let result = run_claude(input_path, None); - assert!(result.is_err()); - assert!(result.unwrap_err().to_string().contains("Step")); - } - - #[test] - fn test_run_claude_invalid_json() { - let temp = tempfile::tempdir().unwrap(); - let input_path = temp.path().join("input.json"); - std::fs::write(&input_path, "not valid json").unwrap(); - - let result = run_claude(input_path, None); - assert!(result.is_err()); } } diff --git a/crates/toolpath-cli/src/config.rs b/crates/toolpath-cli/src/config.rs new file mode 100644 index 0000000..f9fca69 --- /dev/null +++ b/crates/toolpath-cli/src/config.rs @@ -0,0 +1,48 @@ +//! Shared config-directory resolution. +//! +//! Kept in its own module so it can be used by `cmd_cache` (needed on every +//! target, including wasm/emscripten) and `cmd_pathbase` (native-only). +//! `cmd_pathbase` is cfg-gated; without this split, anything `cmd_cache` +//! imports from it would break wasm builds. + +use anyhow::{Result, anyhow}; +use std::path::PathBuf; + +pub(crate) const CONFIG_DIR_NAME: &str = ".toolpath"; +pub(crate) const CONFIG_DIR_ENV: &str = "TOOLPATH_CONFIG_DIR"; + +/// The configured toolpath config directory (default `~/.toolpath`, +/// overridable via `$TOOLPATH_CONFIG_DIR`). +pub(crate) fn config_dir() -> Result { + if let Some(override_) = std::env::var_os(CONFIG_DIR_ENV) { + return Ok(PathBuf::from(override_)); + } + let home = std::env::var_os("HOME") + .ok_or_else(|| anyhow!("$HOME is not set — cannot locate config directory"))?; + Ok(PathBuf::from(home).join(CONFIG_DIR_NAME)) +} + +/// Shared lock for tests that manipulate `$TOOLPATH_CONFIG_DIR`. Every +/// test module that calls `set_var` / `remove_var` on this env var should +/// grab this lock first, otherwise parallel tests race and clobber each +/// other's directories. +#[cfg(test)] +pub(crate) static TEST_ENV_LOCK: std::sync::Mutex<()> = std::sync::Mutex::new(()); + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn config_dir_honors_override() { + let _g = TEST_ENV_LOCK.lock().unwrap_or_else(|e| e.into_inner()); + unsafe { + std::env::set_var(CONFIG_DIR_ENV, "/tmp/test-toolpath"); + } + let dir = config_dir().unwrap(); + unsafe { + std::env::remove_var(CONFIG_DIR_ENV); + } + assert_eq!(dir, PathBuf::from("/tmp/test-toolpath")); + } +} diff --git a/crates/toolpath-cli/src/main.rs b/crates/toolpath-cli/src/main.rs index b4c9b0e..5408bae 100644 --- a/crates/toolpath-cli/src/main.rs +++ b/crates/toolpath-cli/src/main.rs @@ -1,15 +1,21 @@ #[cfg(not(target_os = "emscripten"))] mod cmd_auth; +mod cmd_cache; mod cmd_derive; +mod cmd_export; mod cmd_haiku; +mod cmd_import; mod cmd_incept; mod cmd_list; mod cmd_merge; +#[cfg(not(target_os = "emscripten"))] +mod cmd_pathbase; mod cmd_project; mod cmd_query; mod cmd_render; mod cmd_track; mod cmd_validate; +mod config; use anyhow::Result; use clap::{Parser, Subcommand}; @@ -38,10 +44,20 @@ enum Commands { #[arg(long, global = true)] json: bool, }, - /// Derive Toolpath documents from source systems - Derive { + /// Import from external formats into the toolpath cache + Import { + #[command(flatten)] + args: cmd_import::ImportArgs, + }, + /// Export toolpath documents into external formats + Export { #[command(subcommand)] - source: cmd_derive::DeriveSource, + target: cmd_export::ExportTarget, + }, + /// Manage the on-disk document cache (~/.toolpath/documents/) + Cache { + #[command(subcommand)] + op: cmd_cache::CacheOp, }, /// Query Toolpath documents Query { @@ -68,16 +84,6 @@ enum Commands { #[command(subcommand)] op: cmd_track::TrackOp, }, - /// Project a toolpath document into a provider's conversation format - Project { - #[command(subcommand)] - target: cmd_project::ProjectTarget, - }, - /// Project a toolpath document into a Claude session that Claude Code can resume - Incept { - #[command(flatten)] - args: cmd_incept::InceptArgs, - }, /// Validate a Toolpath document Validate { /// Input file @@ -92,6 +98,23 @@ enum Commands { #[command(subcommand)] op: cmd_auth::AuthOp, }, + + // ── Deprecated aliases ──────────────────────────────────────────── + #[command(hide = true, about = "[deprecated] Use `path import`")] + Derive { + #[command(subcommand)] + source: cmd_derive::DeriveSource, + }, + #[command(hide = true, about = "[deprecated] Use `path export claude`")] + Incept { + #[command(flatten)] + args: cmd_incept::InceptArgs, + }, + #[command(hide = true, about = "[deprecated] Use `path export`")] + Project { + #[command(subcommand)] + target: cmd_project::ProjectTarget, + }, } fn main() -> Result<()> { @@ -99,13 +122,13 @@ fn main() -> Result<()> { match cli.command { Commands::List { source, json } => cmd_list::run(source, json), - Commands::Derive { source } => cmd_derive::run(source, cli.pretty), + Commands::Import { args } => cmd_import::run(args, cli.pretty), + Commands::Export { target } => cmd_export::run(target), + Commands::Cache { op } => cmd_cache::run(op), Commands::Query { op } => cmd_query::run(op, cli.pretty), Commands::Render { format } => cmd_render::run(format), Commands::Merge { inputs, title } => cmd_merge::run(inputs, title, cli.pretty), Commands::Track { op } => cmd_track::run(op, cli.pretty), - Commands::Project { target } => cmd_project::run(target), - Commands::Incept { args } => cmd_incept::run(args), Commands::Validate { input } => cmd_validate::run(input), Commands::Haiku => { cmd_haiku::run(); @@ -113,5 +136,9 @@ fn main() -> Result<()> { } #[cfg(not(target_os = "emscripten"))] Commands::Auth { op } => cmd_auth::run(op), + + Commands::Derive { source } => cmd_derive::run(source, cli.pretty), + Commands::Incept { args } => cmd_incept::run(args), + Commands::Project { target } => cmd_project::run(target), } } diff --git a/crates/toolpath-cli/tests/integration.rs b/crates/toolpath-cli/tests/integration.rs index 1bf2964..86d1249 100644 --- a/crates/toolpath-cli/tests/integration.rs +++ b/crates/toolpath-cli/tests/integration.rs @@ -343,3 +343,237 @@ fn auth_login_against_unreachable_url_errors() { .failure() .stderr(predicate::str::contains("127.0.0.1")); } + +// ── Import / export / cache ───────────────────────────────────────── + +#[test] +fn import_help_lists_sources_including_pathbase() { + cmd() + .arg("import") + .arg("--help") + .assert() + .success() + .stdout(predicate::str::contains("git")) + .stdout(predicate::str::contains("github")) + .stdout(predicate::str::contains("claude")) + .stdout(predicate::str::contains("pathbase")); +} + +#[test] +fn export_help_lists_claude_and_pathbase() { + cmd() + .arg("export") + .arg("--help") + .assert() + .success() + .stdout(predicate::str::contains("claude")) + .stdout(predicate::str::contains("pathbase")); +} + +#[test] +fn import_git_no_cache_emits_stdout_json() { + let (dir, branch) = git_fixture(); + + cmd() + .arg("import") + .arg("git") + .arg("--no-cache") + .arg("--repo") + .arg(dir.path()) + .arg("--branch") + .arg(&branch) + .assert() + .success() + .stdout(predicate::str::contains("\"Path\"")) + .stdout(predicate::str::contains("\"steps\"")); +} + +#[test] +fn import_git_writes_cache_and_prints_path() { + let (dir, branch) = git_fixture(); + let cfg = tempfile::tempdir().unwrap(); + + cmd() + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .arg("import") + .arg("git") + .arg("--repo") + .arg(dir.path()) + .arg("--branch") + .arg(&branch) + .assert() + .success() + .stdout(predicate::str::contains(".json")) + .stderr(predicate::str::contains("Imported")); +} + +#[test] +fn import_git_errors_on_existing_cache_without_force() { + let (dir, branch) = git_fixture(); + let cfg = tempfile::tempdir().unwrap(); + + cmd() + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args(["import", "git", "--branch"]) + .arg(&branch) + .arg("--repo") + .arg(dir.path()) + .assert() + .success(); + + cmd() + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args(["import", "git", "--branch"]) + .arg(&branch) + .arg("--repo") + .arg(dir.path()) + .assert() + .failure() + .stderr(predicate::str::contains("already exists")); + + cmd() + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args(["import", "git", "--force", "--branch"]) + .arg(&branch) + .arg("--repo") + .arg(dir.path()) + .assert() + .success(); +} + +#[test] +fn cache_ls_on_empty_directory_prints_hint() { + let cfg = tempfile::tempdir().unwrap(); + cmd() + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args(["cache", "ls"]) + .assert() + .success() + .stderr(predicate::str::contains("No cached")); +} + +#[test] +fn cache_ls_after_import_lists_entry() { + let (dir, branch) = git_fixture(); + let cfg = tempfile::tempdir().unwrap(); + + cmd() + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args(["import", "git", "--branch"]) + .arg(&branch) + .arg("--repo") + .arg(dir.path()) + .assert() + .success(); + + cmd() + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args(["cache", "ls"]) + .assert() + .success() + .stdout(predicate::str::contains("git-")); +} + +#[test] +fn export_pathbase_without_login_errors_clearly() { + let cfg = tempfile::tempdir().unwrap(); + cmd() + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args(["export", "pathbase", "--input"]) + .arg(examples_dir().join("path-01-pr.json")) + .assert() + .failure() + .stderr(predicate::str::contains("Not logged in")); +} + +#[test] +fn import_pathbase_without_login_errors_clearly() { + let cfg = tempfile::tempdir().unwrap(); + cmd() + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args(["import", "pathbase", "trc_nonexistent"]) + .assert() + .failure() + .stderr(predicate::str::contains("Not logged in")); +} + +#[test] +fn import_git_no_cache_honors_global_pretty() { + let (dir, branch) = git_fixture(); + + let output = cmd() + .arg("--pretty") + .arg("import") + .arg("git") + .arg("--no-cache") + .arg("--repo") + .arg(dir.path()) + .arg("--branch") + .arg(&branch) + .output() + .unwrap(); + assert!(output.status.success()); + let stdout = String::from_utf8(output.stdout).unwrap(); + // Pretty JSON always has multi-line indentation; compact JSON never does. + assert!( + stdout.contains("\n "), + "expected pretty-printed JSON, got: {stdout}" + ); +} + +#[test] +fn import_git_two_repos_on_same_branch_have_distinct_cache_ids() { + let (dir_a, branch) = git_fixture(); + let (dir_b, _) = git_fixture(); + let cfg = tempfile::tempdir().unwrap(); + + cmd() + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args(["import", "git", "--branch"]) + .arg(&branch) + .arg("--repo") + .arg(dir_a.path()) + .assert() + .success(); + + // Second import from a different repo on the same branch must NOT + // trigger the "cache entry already exists" collision. + cmd() + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args(["import", "git", "--branch"]) + .arg(&branch) + .arg("--repo") + .arg(dir_b.path()) + .assert() + .success(); + + let ls = cmd() + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args(["cache", "ls"]) + .output() + .unwrap(); + let stdout = String::from_utf8(ls.stdout).unwrap(); + let git_entries = stdout.lines().filter(|l| l.starts_with("git-")).count(); + assert_eq!( + git_entries, 2, + "expected two distinct git- cache entries, got:\n{stdout}" + ); +} + +// ── Deprecation aliases ───────────────────────────────────────────── + +#[test] +fn derive_alias_still_works_with_warning() { + let (dir, branch) = git_fixture(); + cmd() + .arg("derive") + .arg("git") + .arg("--repo") + .arg(dir.path()) + .arg("--branch") + .arg(&branch) + .assert() + .success() + .stdout(predicate::str::contains("\"Path\"")) + .stderr(predicate::str::contains("deprecated")); +} diff --git a/crates/toolpath-cli/tests/render_md_snapshots.rs b/crates/toolpath-cli/tests/render_md_snapshots.rs index 22fa6aa..601b00b 100644 --- a/crates/toolpath-cli/tests/render_md_snapshots.rs +++ b/crates/toolpath-cli/tests/render_md_snapshots.rs @@ -6,6 +6,7 @@ fn examples_dir() -> PathBuf { } fn render_md(example: &str) -> String { + #[allow(deprecated)] let output = Command::cargo_bin("path") .unwrap() .args(["render", "md", "--input"]) diff --git a/crates/toolpath-convo/src/lib.rs b/crates/toolpath-convo/src/lib.rs index 47242c6..dc88de9 100644 --- a/crates/toolpath-convo/src/lib.rs +++ b/crates/toolpath-convo/src/lib.rs @@ -1023,7 +1023,7 @@ mod tests { fn test_conversation_view_old_format_no_events() { // Old-format JSON without events field should deserialize with empty vec let json = r#"{"id":"s1","started_at":null,"last_activity":null,"turns":[]}"#; - let view: ConversationView = serde_json::from_str(&json).unwrap(); + let view: ConversationView = serde_json::from_str(json).unwrap(); assert!(view.events.is_empty()); } } diff --git a/site/_data/crates.json b/site/_data/crates.json index d0c939b..81b5559 100644 --- a/site/_data/crates.json +++ b/site/_data/crates.json @@ -89,10 +89,10 @@ }, { "name": "toolpath-cli", - "version": "0.4.0", + "version": "0.5.0", "description": "Unified CLI (binary: path)", "docs": "https://docs.rs/toolpath-cli", "crate": "https://crates.io/crates/toolpath-cli", - "role": "One binary called `path` that ties everything together: derive, query, render, merge, track, validate." + "role": "One binary called `path` that ties everything together: import, export, cache, query, render, merge, track, validate. Pathbase round-trip via `import pathbase` / `export pathbase`." } ]