diff --git a/crates/toolpath-cli/src/cmd_derive.rs b/crates/toolpath-cli/src/cmd_derive.rs index 2227859..16c7690 100644 --- a/crates/toolpath-cli/src/cmd_derive.rs +++ b/crates/toolpath-cli/src/cmd_derive.rs @@ -90,6 +90,19 @@ pub fn run(source: DeriveSource, pretty: bool) -> Result<()> { } } +#[cfg(target_os = "emscripten")] +fn run_git( + _repo_path: PathBuf, + _branches: Vec, + _base: Option, + _remote: String, + _title: Option, + _pretty: bool, +) -> Result<()> { + crate::source::require_native("derive git") +} + +#[cfg(not(target_os = "emscripten"))] fn run_git( repo_path: PathBuf, branches: Vec, @@ -98,44 +111,38 @@ fn run_git( title: Option, pretty: bool, ) -> Result<()> { - #[cfg(target_os = "emscripten")] - { - let _ = (repo_path, branches, base, remote, title, pretty); - anyhow::bail!( - "'path derive git' requires a native environment with access to a git repository" - ); - } - - #[cfg(not(target_os = "emscripten"))] - { - let repo_path = if repo_path.is_absolute() { - repo_path - } else { - std::env::current_dir()?.join(&repo_path) - }; - - let repo = git2::Repository::open(&repo_path) - .with_context(|| format!("Failed to open repository at {:?}", repo_path))?; + let repo_path = if repo_path.is_absolute() { + repo_path + } else { + std::env::current_dir()?.join(&repo_path) + }; - let config = toolpath_git::DeriveConfig { - remote, - title, - base, - }; + let repo = git2::Repository::open(&repo_path) + .with_context(|| format!("Failed to open repository at {:?}", repo_path))?; - let doc = toolpath_git::derive(&repo, &branches, &config)?; + let config = toolpath_git::DeriveConfig { + remote, + title, + base, + }; - let json = if pretty { - doc.to_json_pretty()? - } else { - doc.to_json()? - }; + let doc = toolpath_git::derive(&repo, &branches, &config)?; + crate::io::write_document(&doc, &crate::io::OutputSpec::Stdout, pretty) +} - println!("{}", json); - Ok(()) - } +#[cfg(target_os = "emscripten")] +fn run_github( + _url: Option, + _repo: Option, + _pr: Option, + _no_ci: bool, + _no_comments: bool, + _pretty: bool, +) -> Result<()> { + crate::source::require_native("derive github") } +#[cfg(not(target_os = "emscripten"))] fn run_github( url: Option, repo: Option, @@ -144,53 +151,36 @@ fn run_github( no_comments: bool, pretty: bool, ) -> Result<()> { - #[cfg(target_os = "emscripten")] - { - let _ = (url, repo, pr, no_ci, no_comments, pretty); - anyhow::bail!("'path derive github' requires a native environment with network access"); - } + // Resolve owner/repo/pr from either a URL or --repo/--pr flags + let (owner, repo_name, pr_number) = if let Some(url_str) = &url { + let parsed = toolpath_github::parse_pr_url(url_str).ok_or_else(|| { + anyhow::anyhow!("Invalid PR URL. Expected: https://github.com/owner/repo/pull/N") + })?; + (parsed.owner, parsed.repo, parsed.number) + } else if let (Some(repo_str), Some(pr_num)) = (&repo, pr) { + let (o, r) = repo_str + .split_once('/') + .ok_or_else(|| anyhow::anyhow!("Repository must be in owner/repo format"))?; + (o.to_string(), r.to_string(), pr_num) + } else { + anyhow::bail!( + "Provide a PR URL or both --repo and --pr.\n\ + Usage: path derive github https://github.com/owner/repo/pull/42\n\ + Usage: path derive github --repo owner/repo --pr 42" + ); + }; - #[cfg(not(target_os = "emscripten"))] - { - // Resolve owner/repo/pr from either a URL or --repo/--pr flags - let (owner, repo_name, pr_number) = if let Some(url_str) = &url { - let parsed = toolpath_github::parse_pr_url(url_str).ok_or_else(|| { - anyhow::anyhow!("Invalid PR URL. Expected: https://github.com/owner/repo/pull/N") - })?; - (parsed.owner, parsed.repo, parsed.number) - } else if let (Some(repo_str), Some(pr_num)) = (&repo, pr) { - let (o, r) = repo_str - .split_once('/') - .ok_or_else(|| anyhow::anyhow!("Repository must be in owner/repo format"))?; - (o.to_string(), r.to_string(), pr_num) - } else { - anyhow::bail!( - "Provide a PR URL or both --repo and --pr.\n\ - Usage: path derive github https://github.com/owner/repo/pull/42\n\ - Usage: path derive github --repo owner/repo --pr 42" - ); - }; - - let token = toolpath_github::resolve_token()?; - let config = toolpath_github::DeriveConfig { - token, - include_ci: !no_ci, - include_comments: !no_comments, - ..Default::default() - }; - - let path = toolpath_github::derive_pull_request(&owner, &repo_name, pr_number, &config)?; - let doc = toolpath::v1::Document::Path(path); - - let json = if pretty { - doc.to_json_pretty()? - } else { - doc.to_json()? - }; - - println!("{}", json); - Ok(()) - } + let token = toolpath_github::resolve_token()?; + let config = toolpath_github::DeriveConfig { + token, + include_ci: !no_ci, + include_comments: !no_comments, + ..Default::default() + }; + + let path = toolpath_github::derive_pull_request(&owner, &repo_name, pr_number, &config)?; + let doc = toolpath::v1::Document::Path(path); + crate::io::write_document(&doc, &crate::io::OutputSpec::Stdout, pretty) } fn run_claude(project: String, session: Option, all: bool, pretty: bool) -> Result<()> { @@ -231,12 +221,7 @@ fn run_claude_with_manager( for path in &docs { let doc = toolpath::v1::Document::Path(path.clone()); - let json = if pretty { - doc.to_json_pretty()? - } else { - doc.to_json()? - }; - println!("{}", json); + crate::io::write_document(&doc, &crate::io::OutputSpec::Stdout, pretty)?; } Ok(()) diff --git a/crates/toolpath-cli/src/cmd_list.rs b/crates/toolpath-cli/src/cmd_list.rs index f691bda..aea6ba8 100644 --- a/crates/toolpath-cli/src/cmd_list.rs +++ b/crates/toolpath-cli/src/cmd_list.rs @@ -38,125 +38,117 @@ pub fn run(source: ListSource, json: bool) -> Result<()> { } } +#[cfg(target_os = "emscripten")] +fn run_git(_repo_path: PathBuf, _remote: String, _json: bool) -> Result<()> { + crate::source::require_native("list git") +} + +#[cfg(not(target_os = "emscripten"))] fn run_git(repo_path: PathBuf, remote: String, json: bool) -> Result<()> { - #[cfg(target_os = "emscripten")] - { - let _ = (repo_path, remote, json); - anyhow::bail!( - "'path list git' requires a native environment with access to a git repository" - ); - } + let repo_path = if repo_path.is_absolute() { + repo_path + } else { + std::env::current_dir()?.join(&repo_path) + }; - #[cfg(not(target_os = "emscripten"))] - { - let repo_path = if repo_path.is_absolute() { - repo_path - } else { - std::env::current_dir()?.join(&repo_path) - }; - - let repo = git2::Repository::open(&repo_path) - .with_context(|| format!("Failed to open repository at {:?}", repo_path))?; - - let uri = toolpath_git::get_repo_uri(&repo, &remote)?; - let branches = toolpath_git::list_branches(&repo)?; - - if json { - let items: Vec = branches - .iter() - .map(|b| { - serde_json::json!({ - "name": b.name, - "head": b.head, - "subject": b.subject, - "author": b.author, - "timestamp": b.timestamp, - }) + let repo = git2::Repository::open(&repo_path) + .with_context(|| format!("Failed to open repository at {:?}", repo_path))?; + + let uri = toolpath_git::get_repo_uri(&repo, &remote)?; + let branches = toolpath_git::list_branches(&repo)?; + + if json { + let items: Vec = branches + .iter() + .map(|b| { + serde_json::json!({ + "name": b.name, + "head": b.head, + "subject": b.subject, + "author": b.author, + "timestamp": b.timestamp, }) - .collect(); - let output = serde_json::json!({ - "source": "git", - "uri": uri, - "branches": items, - }); - println!("{}", serde_json::to_string_pretty(&output)?); + }) + .collect(); + let output = serde_json::json!({ + "source": "git", + "uri": uri, + "branches": items, + }); + println!("{}", serde_json::to_string_pretty(&output)?); + } else { + println!("Repository: {}", uri); + println!(); + if branches.is_empty() { + println!(" (no local branches)"); } else { - println!("Repository: {}", uri); - println!(); - if branches.is_empty() { - println!(" (no local branches)"); - } else { - for b in &branches { - println!(" {} {} {}", b.head_short, b.name, truncate(&b.subject, 60)); - } + for b in &branches { + println!(" {} {} {}", b.head_short, b.name, truncate(&b.subject, 60)); } } - Ok(()) } + Ok(()) } +#[cfg(target_os = "emscripten")] +fn run_github(_repo: String, _json: bool) -> Result<()> { + crate::source::require_native("list github") +} + +#[cfg(not(target_os = "emscripten"))] fn run_github(repo: String, json: bool) -> Result<()> { - #[cfg(target_os = "emscripten")] - { - let _ = (repo, json); - anyhow::bail!("'path list github' requires a native environment with network access"); - } + let (owner, repo_name) = repo + .split_once('/') + .ok_or_else(|| anyhow::anyhow!("Repository must be in owner/repo format"))?; + + let token = toolpath_github::resolve_token()?; + let config = toolpath_github::DeriveConfig { + token, + ..Default::default() + }; - #[cfg(not(target_os = "emscripten"))] - { - let (owner, repo_name) = repo - .split_once('/') - .ok_or_else(|| anyhow::anyhow!("Repository must be in owner/repo format"))?; - - let token = toolpath_github::resolve_token()?; - let config = toolpath_github::DeriveConfig { - token, - ..Default::default() - }; - - let prs = toolpath_github::list_pull_requests(owner, repo_name, &config)?; - - if json { - let items: Vec = prs - .iter() - .map(|pr| { - serde_json::json!({ - "number": pr.number, - "title": pr.title, - "state": pr.state, - "author": pr.author, - "head_branch": pr.head_branch, - "base_branch": pr.base_branch, - "created_at": pr.created_at, - "updated_at": pr.updated_at, - }) + let prs = toolpath_github::list_pull_requests(owner, repo_name, &config)?; + + if json { + let items: Vec = prs + .iter() + .map(|pr| { + serde_json::json!({ + "number": pr.number, + "title": pr.title, + "state": pr.state, + "author": pr.author, + "head_branch": pr.head_branch, + "base_branch": pr.base_branch, + "created_at": pr.created_at, + "updated_at": pr.updated_at, }) - .collect(); - let output = serde_json::json!({ - "source": "github", - "repo": format!("{}/{}", owner, repo_name), - "pull_requests": items, - }); - println!("{}", serde_json::to_string_pretty(&output)?); + }) + .collect(); + let output = serde_json::json!({ + "source": "github", + "repo": format!("{}/{}", owner, repo_name), + "pull_requests": items, + }); + println!("{}", serde_json::to_string_pretty(&output)?); + } else { + println!("Pull requests for {}/{}:", owner, repo_name); + println!(); + if prs.is_empty() { + println!(" (none)"); } else { - println!("Pull requests for {}/{}:", owner, repo_name); - println!(); - if prs.is_empty() { - println!(" (none)"); - } else { - for pr in &prs { - println!( - " #{:<5} {:>8} {} {}", - pr.number, - pr.state, - pr.author, - truncate(&pr.title, 50), - ); - } + for pr in &prs { + println!( + " #{:<5} {:>8} {} {}", + pr.number, + pr.state, + pr.author, + truncate(&pr.title, 50), + ); } } - Ok(()) } + Ok(()) } fn run_claude(project: Option, json: bool) -> Result<()> { diff --git a/crates/toolpath-cli/src/cmd_merge.rs b/crates/toolpath-cli/src/cmd_merge.rs index c5c283f..05eede4 100644 --- a/crates/toolpath-cli/src/cmd_merge.rs +++ b/crates/toolpath-cli/src/cmd_merge.rs @@ -1,4 +1,5 @@ -use anyhow::{Context, Result}; +use crate::io::{self as cli_io, InputSpec, OutputSpec}; +use anyhow::Result; use toolpath::v1::{Document, Graph, GraphIdentity, GraphMeta, PathOrRef}; /// Merge multiple Toolpath documents into a single Graph. @@ -9,33 +10,13 @@ pub fn run(inputs: Vec, title: Option, pretty: bool) -> Result<( let mut all_paths = Vec::new(); for input in &inputs { - let content = if input == "-" { - use std::io::Read; - let mut buf = String::new(); - std::io::stdin() - .read_to_string(&mut buf) - .context("Failed to read from stdin")?; - buf - } else { - std::fs::read_to_string(input).with_context(|| format!("Failed to read {:?}", input))? - }; - - let doc = Document::from_json(&content) - .with_context(|| format!("Failed to parse {:?}", input))?; - + let spec = InputSpec::from_str(input); + let doc = cli_io::read_document(&spec)?; extract_paths(doc, &mut all_paths); } let doc = merge_into_graph(all_paths, title); - - let json = if pretty { - doc.to_json_pretty()? - } else { - doc.to_json()? - }; - println!("{}", json); - - Ok(()) + cli_io::write_document(&doc, &OutputSpec::Stdout, pretty) } /// Extract paths from a document and append them to the collector. diff --git a/crates/toolpath-cli/src/cmd_query.rs b/crates/toolpath-cli/src/cmd_query.rs index 679c5e6..c85d54b 100644 --- a/crates/toolpath-cli/src/cmd_query.rs +++ b/crates/toolpath-cli/src/cmd_query.rs @@ -1,84 +1,155 @@ -use anyhow::{Context, Result}; -use clap::Subcommand; +use crate::io::{self as cli_io, InputSpec}; +use anyhow::Result; +use clap::Args; +use std::collections::HashSet; use std::path::PathBuf; -use toolpath::v1::{Document, query}; - -#[derive(Subcommand, Debug)] -pub enum QueryOp { - /// Walk the parent chain from a step - Ancestors { - /// Input file - #[arg(short, long)] - input: PathBuf, - - /// Step ID to trace from - #[arg(long)] - step_id: String, - }, - /// Find steps not on the path to head - DeadEnds { - /// Input file - #[arg(short, long)] - input: PathBuf, - }, - /// Filter steps by criteria - Filter { - /// Input file - #[arg(short, long)] - input: PathBuf, - - /// Actor prefix (e.g., "human:", "agent:claude") - #[arg(long)] - actor: Option, - - /// Artifact path - #[arg(long)] - artifact: Option, - - /// Start time (ISO 8601) - #[arg(long)] - after: Option, - - /// End time (ISO 8601) - #[arg(long)] - before: Option, - }, +use toolpath::v1::{Document, Step, query}; + +#[derive(Args, Debug)] +pub struct QueryArgs { + /// Input file (use `-` or omit to read from stdin) + #[arg(short, long)] + pub input: Option, + + /// Walk the parent chain from this step id + #[arg(long, value_name = "STEP_ID", conflicts_with = "dead_ends")] + pub ancestors_of: Option, + + /// Show steps not on the path to head + #[arg(long)] + pub dead_ends: bool, + + /// Filter by actor prefix (e.g., "human:", "agent:claude") + #[arg(long)] + pub actor: Option, + + /// Filter by artifact path + #[arg(long)] + pub artifact: Option, + + /// Filter: only steps at or after this ISO-8601 timestamp + #[arg(long)] + pub after: Option, + + /// Filter: only steps at or before this ISO-8601 timestamp + #[arg(long)] + pub before: Option, } -pub fn run(op: QueryOp, pretty: bool) -> Result<()> { - match op { - QueryOp::Ancestors { input, step_id } => run_ancestors(input, step_id, pretty), - QueryOp::DeadEnds { input } => run_dead_ends(input, pretty), - QueryOp::Filter { - input, - actor, - artifact, - after, - before, - } => run_filter(input, actor, artifact, after, before, pretty), - } +/// A view into one inline Path: its steps and the head step id. +struct PathView<'a> { + steps: &'a [Step], + head: &'a str, } -fn read_doc(path: &PathBuf) -> Result { - let content = - std::fs::read_to_string(path).with_context(|| format!("Failed to read {:?}", path))?; - Document::from_json(&content).with_context(|| format!("Failed to parse {:?}", path)) +pub fn run(args: QueryArgs, pretty: bool) -> Result<()> { + let doc = cli_io::read_document(&InputSpec::from_opt(args.input))?; + let views = collect_paths(&doc); + + let selected: Vec<&Step> = if let Some(step_id) = args.ancestors_of { + collect_ancestors(&views, &step_id) + } else if args.dead_ends { + collect_dead_ends(&views)? + } else { + collect_filtered(&views, &args.actor, &args.artifact, &args.after, &args.before) + }; + + print_steps(&selected, pretty) } -fn extract_steps(doc: &Document) -> (&[toolpath::v1::Step], Option<&str>) { +/// Collect every inline Path in the document. Graphs contribute every inline +/// path (not just the first); Path docs contribute themselves; Step docs +/// contribute nothing. +fn collect_paths(doc: &Document) -> Vec> { match doc { - Document::Path(p) => (p.steps.as_slice(), Some(p.path.head.as_str())), - Document::Graph(g) => { - // For graphs, use the first inline path - for p in &g.paths { - if let toolpath::v1::PathOrRef::Path(path) = p { - return (path.steps.as_slice(), Some(path.path.head.as_str())); - } + Document::Path(p) => vec![PathView { + steps: p.steps.as_slice(), + head: p.path.head.as_str(), + }], + Document::Graph(g) => g + .paths + .iter() + .filter_map(|p| match p { + toolpath::v1::PathOrRef::Path(path) => Some(PathView { + steps: path.steps.as_slice(), + head: path.path.head.as_str(), + }), + toolpath::v1::PathOrRef::Ref(_) => None, + }) + .collect(), + Document::Step(_) => Vec::new(), + } +} + +fn collect_ancestors<'a>(views: &[PathView<'a>], step_id: &str) -> Vec<&'a Step> { + let mut ancestor_ids: HashSet = HashSet::new(); + for v in views { + for id in query::ancestors(v.steps, step_id) { + ancestor_ids.insert(id); + } + } + dedup_refs(views, |s| ancestor_ids.contains(&s.step.id)) +} + +fn collect_dead_ends<'a>(views: &[PathView<'a>]) -> Result> { + if views.is_empty() { + anyhow::bail!("Document has no head step"); + } + let mut dead_ids: HashSet = HashSet::new(); + for v in views { + for s in query::dead_ends(v.steps, v.head) { + dead_ids.insert(s.step.id.clone()); + } + } + Ok(dedup_refs(views, |s| dead_ids.contains(&s.step.id))) +} + +fn collect_filtered<'a>( + views: &[PathView<'a>], + actor: &Option, + artifact: &Option, + after: &Option, + before: &Option, +) -> Vec<&'a Step> { + dedup_refs(views, |s| { + if let Some(prefix) = actor + && !s.step.actor.starts_with(prefix) + { + return false; + } + if let Some(art) = artifact + && !s.change.contains_key(art) + { + return false; + } + if after.is_some() || before.is_some() { + let ts = s.step.timestamp.as_str(); + let start = after.as_deref().unwrap_or(""); + let end = before.as_deref().unwrap_or("9999-12-31T23:59:59Z"); + if ts < start || ts > end { + return false; + } + } + true + }) +} + +/// Iterate steps across all views and collect references matching `pred`, +/// deduplicating by step id. +fn dedup_refs<'a, F>(views: &[PathView<'a>], pred: F) -> Vec<&'a Step> +where + F: Fn(&Step) -> bool, +{ + let mut seen: HashSet<&str> = HashSet::new(); + let mut out: Vec<&Step> = Vec::new(); + for v in views { + for s in v.steps { + if pred(s) && seen.insert(s.step.id.as_str()) { + out.push(s); } - (&[], None) } - Document::Step(_) => (&[], None), } + out } fn print_steps(steps: &[&toolpath::v1::Step], pretty: bool) -> Result<()> { @@ -91,67 +162,6 @@ fn print_steps(steps: &[&toolpath::v1::Step], pretty: bool) -> Result<()> { Ok(()) } -fn run_ancestors(input: PathBuf, step_id: String, pretty: bool) -> Result<()> { - let doc = read_doc(&input)?; - let (steps, _) = extract_steps(&doc); - let ancestor_ids = query::ancestors(steps, &step_id); - - let ancestor_steps: Vec<&toolpath::v1::Step> = steps - .iter() - .filter(|s| ancestor_ids.contains(&s.step.id)) - .collect(); - - print_steps(&ancestor_steps, pretty) -} - -fn run_dead_ends(input: PathBuf, pretty: bool) -> Result<()> { - let doc = read_doc(&input)?; - let (steps, head) = extract_steps(&doc); - let head = head.ok_or_else(|| anyhow::anyhow!("Document has no head step"))?; - - let dead = query::dead_ends(steps, head); - print_steps(&dead, pretty) -} - -fn run_filter( - input: PathBuf, - actor: Option, - artifact: Option, - after: Option, - before: Option, - pretty: bool, -) -> Result<()> { - let doc = read_doc(&input)?; - let (steps, _) = extract_steps(&doc); - - let mut result: Vec<&toolpath::v1::Step> = steps.iter().collect(); - - if let Some(ref actor_prefix) = actor { - let filtered = query::filter_by_actor(steps, actor_prefix); - let ids: std::collections::HashSet<&str> = - filtered.iter().map(|s| s.step.id.as_str()).collect(); - result.retain(|s| ids.contains(s.step.id.as_str())); - } - - if let Some(ref art) = artifact { - let filtered = query::filter_by_artifact(steps, art); - let ids: std::collections::HashSet<&str> = - filtered.iter().map(|s| s.step.id.as_str()).collect(); - result.retain(|s| ids.contains(s.step.id.as_str())); - } - - if after.is_some() || before.is_some() { - let start = after.as_deref().unwrap_or(""); - let end = before.as_deref().unwrap_or("9999-12-31T23:59:59Z"); - let filtered = query::filter_by_time_range(steps, start, end); - let ids: std::collections::HashSet<&str> = - filtered.iter().map(|s| s.step.id.as_str()).collect(); - result.retain(|s| ids.contains(s.step.id.as_str())); - } - - print_steps(&result, pretty) -} - #[cfg(test)] mod tests { use super::*; @@ -188,158 +198,279 @@ mod tests { f } + fn args_with_input(path: PathBuf) -> QueryArgs { + QueryArgs { + input: Some(path), + ancestors_of: None, + dead_ends: false, + actor: None, + artifact: None, + after: None, + before: None, + } + } + + fn make_graph_two_paths() -> Document { + // path-A: s1 → s2 (head=s2), with abandoned s1a off s1 + let s1 = Step::new("s1", "human:alex", "2026-01-01T10:00:00Z") + .with_raw_change("src/main.rs", "@@"); + let s1a = Step::new("s1a", "agent:claude", "2026-01-01T10:15:00Z") + .with_parent("s1") + .with_raw_change("src/main.rs", "@@"); + let s2 = Step::new("s2", "human:alex", "2026-01-01T11:00:00Z") + .with_parent("s1") + .with_raw_change("src/main.rs", "@@"); + let path_a = Path { + path: PathIdentity { + id: "pA".into(), + base: None, + head: "s2".into(), + }, + steps: vec![s1, s1a, s2], + meta: None, + }; + + // path-B: t1 → t2 (head=t2), with abandoned t1a off t1 + let t1 = Step::new("t1", "tool:rustfmt", "2026-01-02T10:00:00Z") + .with_raw_change("src/lib.rs", "@@"); + let t1a = Step::new("t1a", "agent:claude", "2026-01-02T10:15:00Z") + .with_parent("t1") + .with_raw_change("src/lib.rs", "@@"); + let t2 = Step::new("t2", "tool:rustfmt", "2026-01-02T11:00:00Z") + .with_parent("t1") + .with_raw_change("src/lib.rs", "@@"); + let path_b = Path { + path: PathIdentity { + id: "pB".into(), + base: None, + head: "t2".into(), + }, + steps: vec![t1, t1a, t2], + meta: None, + }; + + Document::Graph(toolpath::v1::Graph { + graph: toolpath::v1::GraphIdentity { id: "g1".into() }, + paths: vec![ + toolpath::v1::PathOrRef::Path(Box::new(path_a)), + toolpath::v1::PathOrRef::Path(Box::new(path_b)), + ], + meta: None, + }) + } + #[test] - fn test_extract_steps_from_path() { + fn test_collect_paths_from_path_doc() { let doc = make_path_doc(); - let (steps, head) = extract_steps(&doc); - assert_eq!(steps.len(), 4); - assert_eq!(head, Some("s3")); + let views = collect_paths(&doc); + assert_eq!(views.len(), 1); + assert_eq!(views[0].steps.len(), 4); + assert_eq!(views[0].head, "s3"); } #[test] - fn test_extract_steps_from_step() { + fn test_collect_paths_from_step_doc() { let doc = Document::Step(Step::new("s1", "human:alex", "2026-01-01T00:00:00Z")); - let (steps, head) = extract_steps(&doc); - assert!(steps.is_empty()); - assert!(head.is_none()); + let views = collect_paths(&doc); + assert!(views.is_empty()); } #[test] - fn test_extract_steps_from_graph() { - let s1 = - Step::new("s1", "human:alex", "2026-01-01T00:00:00Z").with_raw_change("f.rs", "@@"); + fn test_collect_paths_from_graph_visits_every_inline_path() { + // Regression: previously only the first inline path was inspected. + let doc = make_graph_two_paths(); + let views = collect_paths(&doc); + assert_eq!(views.len(), 2); + assert_eq!(views[0].head, "s2"); + assert_eq!(views[1].head, "t2"); + } + + #[test] + fn test_collect_paths_skips_refs() { + let s = Step::new("s1", "human:alex", "2026-01-01T00:00:00Z") + .with_raw_change("f.rs", "@@"); let path = Path { path: PathIdentity { id: "p1".into(), base: None, head: "s1".into(), }, - steps: vec![s1], + steps: vec![s], meta: None, }; let graph = toolpath::v1::Graph { graph: toolpath::v1::GraphIdentity { id: "g1".into() }, - paths: vec![toolpath::v1::PathOrRef::Path(Box::new(path))], + paths: vec![ + toolpath::v1::PathOrRef::Path(Box::new(path)), + toolpath::v1::PathOrRef::Ref(toolpath::v1::PathRef { + ref_url: "https://example.com/p.json".into(), + }), + ], + meta: None, + }; + let doc = Document::Graph(graph); + let views = collect_paths(&doc); + assert_eq!(views.len(), 1); + } + + #[test] + fn test_collect_dead_ends_graph_unions_across_paths() { + // Each inline path has one abandoned branch; expect both in output. + let doc = make_graph_two_paths(); + let views = collect_paths(&doc); + let dead = collect_dead_ends(&views).unwrap(); + let ids: HashSet<&str> = dead.iter().map(|s| s.step.id.as_str()).collect(); + assert!(ids.contains("s1a"), "expected dead-end from path A"); + assert!(ids.contains("t1a"), "expected dead-end from path B"); + } + + #[test] + fn test_collect_filtered_graph_unions_across_paths() { + // Filter by "agent:" — one match per inline path. + let doc = make_graph_two_paths(); + let views = collect_paths(&doc); + let filtered = collect_filtered(&views, &Some("agent:".into()), &None, &None, &None); + let ids: HashSet<&str> = filtered.iter().map(|s| s.step.id.as_str()).collect(); + assert_eq!(ids.len(), 2); + assert!(ids.contains("s1a")); + assert!(ids.contains("t1a")); + } + + #[test] + fn test_collect_ancestors_graph_finds_step_in_any_path() { + // `t2` lives only in path B — ancestors should walk that path. + let doc = make_graph_two_paths(); + let views = collect_paths(&doc); + let anc = collect_ancestors(&views, "t2"); + let ids: HashSet<&str> = anc.iter().map(|s| s.step.id.as_str()).collect(); + assert!(ids.contains("t1")); + assert!(ids.contains("t2")); + assert!(!ids.contains("t1a"), "abandoned branch shouldn't be an ancestor"); + } + + #[test] + fn test_collect_filtered_dedups_duplicate_step_ids_across_paths() { + // Same step id appearing in two inline paths should only show once. + let s = Step::new("shared", "human:alex", "2026-01-01T10:00:00Z") + .with_raw_change("f.rs", "@@"); + let make = |id: &str| Path { + path: PathIdentity { + id: id.into(), + base: None, + head: "shared".into(), + }, + steps: vec![s.clone()], + meta: None, + }; + let graph = toolpath::v1::Graph { + graph: toolpath::v1::GraphIdentity { id: "g".into() }, + paths: vec![ + toolpath::v1::PathOrRef::Path(Box::new(make("p1"))), + toolpath::v1::PathOrRef::Path(Box::new(make("p2"))), + ], meta: None, }; let doc = Document::Graph(graph); - let (steps, head) = extract_steps(&doc); - assert_eq!(steps.len(), 1); - assert_eq!(head, Some("s1")); + let views = collect_paths(&doc); + let filtered = collect_filtered(&views, &Some("human:".into()), &None, &None, &None); + assert_eq!(filtered.len(), 1, "duplicate step ids should collapse"); } #[test] fn test_run_ancestors() { let doc = make_path_doc(); let f = write_temp_doc(&doc); - let result = run_ancestors(f.path().to_path_buf(), "s3".to_string(), false); - assert!(result.is_ok()); + let mut args = args_with_input(f.path().to_path_buf()); + args.ancestors_of = Some("s3".to_string()); + assert!(run(args, false).is_ok()); } #[test] fn test_run_dead_ends() { let doc = make_path_doc(); let f = write_temp_doc(&doc); - let result = run_dead_ends(f.path().to_path_buf(), false); - assert!(result.is_ok()); + let mut args = args_with_input(f.path().to_path_buf()); + args.dead_ends = true; + assert!(run(args, false).is_ok()); } #[test] fn test_run_filter_by_actor() { let doc = make_path_doc(); let f = write_temp_doc(&doc); - let result = run_filter( - f.path().to_path_buf(), - Some("human:".to_string()), - None, - None, - None, - false, - ); - assert!(result.is_ok()); + let mut args = args_with_input(f.path().to_path_buf()); + args.actor = Some("human:".to_string()); + assert!(run(args, false).is_ok()); } #[test] fn test_run_filter_by_artifact() { let doc = make_path_doc(); let f = write_temp_doc(&doc); - let result = run_filter( - f.path().to_path_buf(), - None, - Some("src/main.rs".to_string()), - None, - None, - false, - ); - assert!(result.is_ok()); + let mut args = args_with_input(f.path().to_path_buf()); + args.artifact = Some("src/main.rs".to_string()); + assert!(run(args, false).is_ok()); } #[test] fn test_run_filter_by_time_range() { let doc = make_path_doc(); let f = write_temp_doc(&doc); - let result = run_filter( - f.path().to_path_buf(), - None, - None, - Some("2026-01-01T10:30:00Z".to_string()), - Some("2026-01-01T11:30:00Z".to_string()), - false, - ); - assert!(result.is_ok()); + let mut args = args_with_input(f.path().to_path_buf()); + args.after = Some("2026-01-01T10:30:00Z".to_string()); + args.before = Some("2026-01-01T11:30:00Z".to_string()); + assert!(run(args, false).is_ok()); } #[test] fn test_run_filter_pretty() { let doc = make_path_doc(); let f = write_temp_doc(&doc); - let result = run_filter(f.path().to_path_buf(), None, None, None, None, true); - assert!(result.is_ok()); + let args = args_with_input(f.path().to_path_buf()); + assert!(run(args, true).is_ok()); } #[test] fn test_run_filter_after_only() { let doc = make_path_doc(); let f = write_temp_doc(&doc); - let result = run_filter( - f.path().to_path_buf(), - None, - None, - Some("2026-01-01T11:00:00Z".to_string()), - None, - false, - ); - assert!(result.is_ok()); + let mut args = args_with_input(f.path().to_path_buf()); + args.after = Some("2026-01-01T11:00:00Z".to_string()); + assert!(run(args, false).is_ok()); } #[test] fn test_run_dead_ends_on_step_doc() { let doc = Document::Step(Step::new("s1", "human:alex", "2026-01-01T00:00:00Z")); let f = write_temp_doc(&doc); - let result = run_dead_ends(f.path().to_path_buf(), false); + let mut args = args_with_input(f.path().to_path_buf()); + args.dead_ends = true; // Should fail because Step has no head - assert!(result.is_err()); + assert!(run(args, false).is_err()); } #[test] fn test_run_ancestors_pretty() { let doc = make_path_doc(); let f = write_temp_doc(&doc); - let result = run_ancestors(f.path().to_path_buf(), "s3".to_string(), true); - assert!(result.is_ok()); + let mut args = args_with_input(f.path().to_path_buf()); + args.ancestors_of = Some("s3".to_string()); + assert!(run(args, true).is_ok()); } #[test] fn test_run_dead_ends_pretty() { let doc = make_path_doc(); let f = write_temp_doc(&doc); - let result = run_dead_ends(f.path().to_path_buf(), true); - assert!(result.is_ok()); + let mut args = args_with_input(f.path().to_path_buf()); + args.dead_ends = true; + assert!(run(args, true).is_ok()); } #[test] - fn test_read_doc_invalid_path() { - let result = read_doc(&PathBuf::from("/nonexistent/file.json")); - assert!(result.is_err()); + fn test_run_nonexistent_input() { + let mut args = args_with_input(PathBuf::from("/nonexistent/file.json")); + args.dead_ends = true; + assert!(run(args, false).is_err()); } } diff --git a/crates/toolpath-cli/src/cmd_render.rs b/crates/toolpath-cli/src/cmd_render.rs index d56c07b..2bb60a6 100644 --- a/crates/toolpath-cli/src/cmd_render.rs +++ b/crates/toolpath-cli/src/cmd_render.rs @@ -1,17 +1,17 @@ -use anyhow::{Context, Result}; +use crate::io::{self as cli_io, InputSpec, OutputSpec}; +use anyhow::Result; use clap::Subcommand; use std::path::PathBuf; -use toolpath::v1::Document; #[derive(Subcommand, Debug)] pub enum RenderFormat { /// Render as Graphviz DOT Dot { - /// Input file (reads from stdin if not provided) + /// Input file (use `-` or omit to read from stdin) #[arg(short, long)] input: Option, - /// Output file (writes to stdout if not provided) + /// Output file (use `-` or omit to write to stdout) #[arg(short, long)] output: Option, @@ -29,11 +29,11 @@ pub enum RenderFormat { }, /// Render as Markdown (for LLM consumption) Md { - /// Input file (reads from stdin if not provided) + /// Input file (use `-` or omit to read from stdin) #[arg(short, long)] input: Option, - /// Output file (writes to stdout if not provided) + /// Output file (use `-` or omit to write to stdout) #[arg(short, long)] output: Option, @@ -78,18 +78,7 @@ fn run_dot( show_timestamps: bool, highlight_dead_ends: bool, ) -> Result<()> { - let content = if let Some(path) = &input { - std::fs::read_to_string(path).with_context(|| format!("Failed to read {:?}", path))? - } else { - use std::io::Read; - let mut buf = String::new(); - std::io::stdin() - .read_to_string(&mut buf) - .context("Failed to read from stdin")?; - buf - }; - - let doc = Document::from_json(&content).context("Failed to parse Toolpath document")?; + let doc = cli_io::read_document(&InputSpec::from_opt(input))?; let options = toolpath_dot::RenderOptions { show_files, @@ -98,14 +87,7 @@ fn run_dot( }; let dot = toolpath_dot::render(&doc, &options); - - if let Some(path) = &output { - std::fs::write(path, &dot).with_context(|| format!("Failed to write {:?}", path))?; - } else { - print!("{}", dot); - } - - Ok(()) + OutputSpec::from_opt(output).write_str(&dot) } fn run_md( @@ -114,18 +96,7 @@ fn run_md( detail: &str, front_matter: bool, ) -> Result<()> { - let content = if let Some(path) = &input { - std::fs::read_to_string(path).with_context(|| format!("Failed to read {:?}", path))? - } else { - use std::io::Read; - let mut buf = String::new(); - std::io::stdin() - .read_to_string(&mut buf) - .context("Failed to read from stdin")?; - buf - }; - - let doc = Document::from_json(&content).context("Failed to parse Toolpath document")?; + let doc = cli_io::read_document(&InputSpec::from_opt(input))?; let detail = match detail { "full" => toolpath_md::Detail::Full, @@ -138,21 +109,14 @@ fn run_md( }; let md = toolpath_md::render(&doc, &options); - - if let Some(path) = &output { - std::fs::write(path, &md).with_context(|| format!("Failed to write {:?}", path))?; - } else { - print!("{}", md); - } - - Ok(()) + OutputSpec::from_opt(output).write_str(&md) } #[cfg(test)] mod tests { use super::*; use std::io::Write; - use toolpath::v1::{Path, PathIdentity, Step}; + use toolpath::v1::{Document, Path, PathIdentity, Step}; fn make_doc() -> Document { let s1 = diff --git a/crates/toolpath-cli/src/cmd_validate.rs b/crates/toolpath-cli/src/cmd_validate.rs index 5e500a2..37a14bb 100644 --- a/crates/toolpath-cli/src/cmd_validate.rs +++ b/crates/toolpath-cli/src/cmd_validate.rs @@ -1,10 +1,10 @@ -use anyhow::{Context, Result}; -use std::path::PathBuf; +use crate::io::InputSpec; +use anyhow::Result; use toolpath::v1::Document; -pub fn run(input: PathBuf) -> Result<()> { - let content = - std::fs::read_to_string(&input).with_context(|| format!("Failed to read {:?}", input))?; +pub fn run(input: Option) -> Result<()> { + let spec = InputSpec::from_opt(input); + let content = spec.read_string()?; validate_content(&content) } @@ -27,6 +27,7 @@ fn validate_content(content: &str) -> Result<()> { mod tests { use super::*; use std::io::Write; + use std::path::PathBuf; #[test] fn test_validate_valid_step() { @@ -61,11 +62,11 @@ mod tests { let mut f = tempfile::NamedTempFile::new().unwrap(); write!(f, r#"{{"Step":{{"step":{{"id":"s1","actor":"human:alex","timestamp":"2026-01-01T00:00:00Z"}},"change":{{}}}}}}"#).unwrap(); f.flush().unwrap(); - assert!(run(f.path().to_path_buf()).is_ok()); + assert!(run(Some(f.path().to_path_buf())).is_ok()); } #[test] fn test_run_nonexistent_file() { - assert!(run(PathBuf::from("/nonexistent/file.json")).is_err()); + assert!(run(Some(PathBuf::from("/nonexistent/file.json"))).is_err()); } } diff --git a/crates/toolpath-cli/src/io.rs b/crates/toolpath-cli/src/io.rs new file mode 100644 index 0000000..3b7ce60 --- /dev/null +++ b/crates/toolpath-cli/src/io.rs @@ -0,0 +1,190 @@ +use anyhow::{Context, Result}; +use std::io::{Read, Write}; +use std::path::PathBuf; +use toolpath::v1::Document; + +pub enum InputSpec { + Stdin, + File(PathBuf), +} + +pub enum OutputSpec { + Stdout, + File(PathBuf), +} + +impl InputSpec { + pub fn from_opt(p: Option) -> Self { + match p { + Some(p) if p.as_os_str() == "-" => Self::Stdin, + Some(p) => Self::File(p), + None => Self::Stdin, + } + } + + pub fn from_str(s: &str) -> Self { + if s == "-" { + Self::Stdin + } else { + Self::File(PathBuf::from(s)) + } + } + + pub fn read_string(&self) -> Result { + match self { + Self::Stdin => { + let mut buf = String::new(); + std::io::stdin() + .read_to_string(&mut buf) + .context("Failed to read from stdin")?; + Ok(buf) + } + Self::File(path) => std::fs::read_to_string(path) + .with_context(|| format!("Failed to read {:?}", path)), + } + } + + pub fn label(&self) -> &str { + match self { + Self::Stdin => "", + Self::File(p) => p.to_str().unwrap_or(""), + } + } +} + +impl OutputSpec { + pub fn from_opt(p: Option) -> Self { + match p { + Some(p) if p.as_os_str() == "-" => Self::Stdout, + Some(p) => Self::File(p), + None => Self::Stdout, + } + } + + pub fn write_str(&self, s: &str) -> Result<()> { + match self { + Self::Stdout => { + let mut out = std::io::stdout().lock(); + out.write_all(s.as_bytes()) + .context("Failed to write to stdout")?; + Ok(()) + } + Self::File(path) => std::fs::write(path, s) + .with_context(|| format!("Failed to write {:?}", path)), + } + } +} + +pub fn read_document(input: &InputSpec) -> Result { + let content = input.read_string()?; + Document::from_json(&content) + .with_context(|| format!("Failed to parse Toolpath document from {}", input.label())) +} + +pub fn write_document(doc: &Document, out: &OutputSpec, pretty: bool) -> Result<()> { + let json = if pretty { + doc.to_json_pretty() + } else { + doc.to_json() + } + .context("failed to serialize document")?; + let line = if matches!(out, OutputSpec::Stdout) { + format!("{}\n", json) + } else { + json + }; + out.write_str(&line) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn input_spec_from_opt_none_is_stdin() { + assert!(matches!(InputSpec::from_opt(None), InputSpec::Stdin)); + } + + #[test] + fn input_spec_from_opt_dash_is_stdin() { + assert!(matches!( + InputSpec::from_opt(Some(PathBuf::from("-"))), + InputSpec::Stdin + )); + } + + #[test] + fn input_spec_from_opt_file() { + let s = InputSpec::from_opt(Some(PathBuf::from("foo.json"))); + match s { + InputSpec::File(p) => assert_eq!(p, PathBuf::from("foo.json")), + _ => panic!("expected File"), + } + } + + #[test] + fn input_spec_from_str_dash_is_stdin() { + assert!(matches!(InputSpec::from_str("-"), InputSpec::Stdin)); + } + + #[test] + fn input_spec_from_str_file() { + match InputSpec::from_str("doc.json") { + InputSpec::File(p) => assert_eq!(p, PathBuf::from("doc.json")), + _ => panic!("expected File"), + } + } + + #[test] + fn output_spec_from_opt_none_is_stdout() { + assert!(matches!(OutputSpec::from_opt(None), OutputSpec::Stdout)); + } + + #[test] + fn output_spec_from_opt_dash_is_stdout() { + assert!(matches!( + OutputSpec::from_opt(Some(PathBuf::from("-"))), + OutputSpec::Stdout + )); + } + + #[test] + fn read_document_file_roundtrip() { + use std::io::Write as _; + use toolpath::v1::Step; + let step = Step::new("s1", "human:alex", "2026-01-01T00:00:00Z"); + let doc = Document::Step(step); + let mut f = tempfile::NamedTempFile::new().unwrap(); + write!(f, "{}", doc.to_json().unwrap()).unwrap(); + f.flush().unwrap(); + let parsed = read_document(&InputSpec::File(f.path().to_path_buf())).unwrap(); + assert!(matches!(parsed, Document::Step(_))); + } + + #[test] + fn write_document_file() { + use toolpath::v1::Step; + let step = Step::new("s1", "human:alex", "2026-01-01T00:00:00Z"); + let doc = Document::Step(step); + let f = tempfile::NamedTempFile::new().unwrap(); + write_document(&doc, &OutputSpec::File(f.path().to_path_buf()), true).unwrap(); + let back = std::fs::read_to_string(f.path()).unwrap(); + assert!(back.contains("\"Step\"")); + } + + #[test] + fn read_document_file_missing() { + let result = read_document(&InputSpec::File(PathBuf::from("/nonexistent/x.json"))); + assert!(result.is_err()); + } + + #[test] + fn read_document_invalid_json() { + use std::io::Write as _; + let mut f = tempfile::NamedTempFile::new().unwrap(); + write!(f, "not json").unwrap(); + f.flush().unwrap(); + let result = read_document(&InputSpec::File(f.path().to_path_buf())); + assert!(result.is_err()); + } +} diff --git a/crates/toolpath-cli/src/main.rs b/crates/toolpath-cli/src/main.rs index fb6ef38..056c9d1 100644 --- a/crates/toolpath-cli/src/main.rs +++ b/crates/toolpath-cli/src/main.rs @@ -6,6 +6,8 @@ mod cmd_query; mod cmd_render; mod cmd_track; mod cmd_validate; +mod io; +mod source; use anyhow::Result; use clap::{Parser, Subcommand}; @@ -40,10 +42,7 @@ enum Commands { source: cmd_derive::DeriveSource, }, /// Query Toolpath documents - Query { - #[command(subcommand)] - op: cmd_query::QueryOp, - }, + Query(cmd_query::QueryArgs), /// Render Toolpath documents to other formats Render { #[command(subcommand)] @@ -66,9 +65,9 @@ enum Commands { }, /// Validate a Toolpath document Validate { - /// Input file + /// Input file (use `-` or omit to read from stdin) #[arg(short, long)] - input: PathBuf, + input: Option, }, /// Print a random Toolpath haiku Haiku, @@ -80,7 +79,7 @@ fn main() -> Result<()> { match cli.command { Commands::List { source, json } => cmd_list::run(source, json), Commands::Derive { source } => cmd_derive::run(source, cli.pretty), - Commands::Query { op } => cmd_query::run(op, cli.pretty), + Commands::Query(args) => cmd_query::run(args, cli.pretty), Commands::Render { format } => cmd_render::run(format), Commands::Merge { inputs, title } => cmd_merge::run(inputs, title, cli.pretty), Commands::Track { op } => cmd_track::run(op, cli.pretty), diff --git a/crates/toolpath-cli/src/source.rs b/crates/toolpath-cli/src/source.rs new file mode 100644 index 0000000..0474872 --- /dev/null +++ b/crates/toolpath-cli/src/source.rs @@ -0,0 +1,7 @@ +#[cfg(target_os = "emscripten")] +pub fn require_native(cmd: &str) -> anyhow::Result<()> { + anyhow::bail!( + "'path {}' requires a native environment (not available in this WebAssembly build)", + cmd + ) +} diff --git a/crates/toolpath-cli/tests/integration.rs b/crates/toolpath-cli/tests/integration.rs index d2cfae2..7fd4733 100644 --- a/crates/toolpath-cli/tests/integration.rs +++ b/crates/toolpath-cli/tests/integration.rs @@ -278,9 +278,9 @@ fn render_dot_from_stdin() { fn query_dead_ends() { cmd() .arg("query") - .arg("dead-ends") .arg("--input") .arg(examples_dir().join("path-01-pr.json")) + .arg("--dead-ends") .assert() .success() .stdout(predicate::str::contains("step-002a")); @@ -290,10 +290,9 @@ fn query_dead_ends() { fn query_ancestors() { cmd() .arg("query") - .arg("ancestors") .arg("--input") .arg(examples_dir().join("path-01-pr.json")) - .arg("--step-id") + .arg("--ancestors-of") .arg("step-004") .assert() .success()