diff --git a/rust/Cargo.lock b/rust/Cargo.lock index 5507dca..a182255 100644 --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -111,6 +111,7 @@ dependencies = [ name = "commands" version = "0.1.0" dependencies = [ + "plugins", "runtime", ] @@ -825,6 +826,14 @@ dependencies = [ "time", ] +[[package]] +name = "plugins" +version = "0.1.0" +dependencies = [ + "serde", + "serde_json", +] + [[package]] name = "potential_utf" version = "0.1.4" @@ -1092,6 +1101,7 @@ name = "runtime" version = "0.1.0" dependencies = [ "glob", + "plugins", "regex", "serde", "serde_json", @@ -1181,6 +1191,7 @@ dependencies = [ "commands", "compat-harness", "crossterm", + "plugins", "pulldown-cmark", "runtime", "rustyline", @@ -1546,6 +1557,7 @@ name = "tools" version = "0.1.0" dependencies = [ "api", + "plugins", "reqwest", "runtime", "serde", diff --git a/rust/crates/api/src/sse.rs b/rust/crates/api/src/sse.rs index d7334cd..5f54e50 100644 --- a/rust/crates/api/src/sse.rs +++ b/rust/crates/api/src/sse.rs @@ -216,4 +216,64 @@ mod tests { )) ); } + + #[test] + fn parses_thinking_content_block_start() { + let frame = concat!( + "event: content_block_start\n", + "data: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"thinking\",\"thinking\":\"\",\"signature\":null}}\n\n" + ); + + let event = parse_frame(frame).expect("frame should parse"); + assert_eq!( + event, + Some(StreamEvent::ContentBlockStart( + crate::types::ContentBlockStartEvent { + index: 0, + content_block: OutputContentBlock::Thinking { + thinking: String::new(), + signature: None, + }, + }, + )) + ); + } + + #[test] + fn parses_thinking_related_deltas() { + let thinking = concat!( + "event: content_block_delta\n", + "data: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"thinking_delta\",\"thinking\":\"step 1\"}}\n\n" + ); + let signature = concat!( + "event: content_block_delta\n", + "data: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"signature_delta\",\"signature\":\"sig_123\"}}\n\n" + ); + + let thinking_event = parse_frame(thinking).expect("thinking delta should parse"); + let signature_event = parse_frame(signature).expect("signature delta should parse"); + + assert_eq!( + thinking_event, + Some(StreamEvent::ContentBlockDelta( + crate::types::ContentBlockDeltaEvent { + index: 0, + delta: ContentBlockDelta::ThinkingDelta { + thinking: "step 1".to_string(), + }, + } + )) + ); + assert_eq!( + signature_event, + Some(StreamEvent::ContentBlockDelta( + crate::types::ContentBlockDeltaEvent { + index: 0, + delta: ContentBlockDelta::SignatureDelta { + signature: "sig_123".to_string(), + }, + } + )) + ); + } } diff --git a/rust/crates/api/src/types.rs b/rust/crates/api/src/types.rs index 45d5c08..c060be6 100644 --- a/rust/crates/api/src/types.rs +++ b/rust/crates/api/src/types.rs @@ -135,6 +135,15 @@ pub enum OutputContentBlock { name: String, input: Value, }, + Thinking { + #[serde(default)] + thinking: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + signature: Option, + }, + RedactedThinking { + data: Value, + }, } #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] @@ -190,6 +199,8 @@ pub struct ContentBlockDeltaEvent { pub enum ContentBlockDelta { TextDelta { text: String }, InputJsonDelta { partial_json: String }, + ThinkingDelta { thinking: String }, + SignatureDelta { signature: String }, } #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] diff --git a/rust/crates/api/tests/client_integration.rs b/rust/crates/api/tests/client_integration.rs index c37fa99..be4abca 100644 --- a/rust/crates/api/tests/client_integration.rs +++ b/rust/crates/api/tests/client_integration.rs @@ -75,6 +75,48 @@ async fn send_message_posts_json_and_parses_response() { assert_eq!(body["tool_choice"]["type"], json!("auto")); } +#[tokio::test] +async fn send_message_parses_response_with_thinking_blocks() { + let state = Arc::new(Mutex::new(Vec::::new())); + let body = concat!( + "{", + "\"id\":\"msg_thinking\",", + "\"type\":\"message\",", + "\"role\":\"assistant\",", + "\"content\":[", + "{\"type\":\"thinking\",\"thinking\":\"step 1\",\"signature\":\"sig_123\"},", + "{\"type\":\"text\",\"text\":\"Final answer\"}", + "],", + "\"model\":\"claude-3-7-sonnet-latest\",", + "\"stop_reason\":\"end_turn\",", + "\"stop_sequence\":null,", + "\"usage\":{\"input_tokens\":12,\"output_tokens\":4}", + "}" + ); + let server = spawn_server( + state, + vec![http_response("200 OK", "application/json", body)], + ) + .await; + + let client = AnthropicClient::new("test-key").with_base_url(server.base_url()); + let response = client + .send_message(&sample_request(false)) + .await + .expect("request should succeed"); + + assert_eq!(response.content.len(), 2); + assert!(matches!( + &response.content[0], + OutputContentBlock::Thinking { thinking, signature } + if thinking == "step 1" && signature.as_deref() == Some("sig_123") + )); + assert!(matches!( + &response.content[1], + OutputContentBlock::Text { text } if text == "Final answer" + )); +} + #[tokio::test] async fn stream_message_parses_sse_events_with_tool_use() { let state = Arc::new(Mutex::new(Vec::::new())); @@ -162,6 +204,85 @@ async fn stream_message_parses_sse_events_with_tool_use() { assert!(request.body.contains("\"stream\":true")); } +#[tokio::test] +async fn stream_message_parses_sse_events_with_thinking_blocks() { + let state = Arc::new(Mutex::new(Vec::::new())); + let sse = concat!( + "event: message_start\n", + "data: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_stream_thinking\",\"type\":\"message\",\"role\":\"assistant\",\"content\":[],\"model\":\"claude-3-7-sonnet-latest\",\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":8,\"output_tokens\":0}}}\n\n", + "event: content_block_start\n", + "data: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"thinking\",\"thinking\":\"\"}}\n\n", + "event: content_block_delta\n", + "data: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"thinking_delta\",\"thinking\":\"step 1\"}}\n\n", + "event: content_block_delta\n", + "data: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"signature_delta\",\"signature\":\"sig_123\"}}\n\n", + "event: content_block_stop\n", + "data: {\"type\":\"content_block_stop\",\"index\":0}\n\n", + "event: content_block_start\n", + "data: {\"type\":\"content_block_start\",\"index\":1,\"content_block\":{\"type\":\"text\",\"text\":\"Final answer\"}}\n\n", + "event: content_block_stop\n", + "data: {\"type\":\"content_block_stop\",\"index\":1}\n\n", + "event: message_delta\n", + "data: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":8,\"output_tokens\":1}}\n\n", + "event: message_stop\n", + "data: {\"type\":\"message_stop\"}\n\n", + "data: [DONE]\n\n" + ); + let server = spawn_server( + state, + vec![http_response("200 OK", "text/event-stream", sse)], + ) + .await; + + let client = AnthropicClient::new("test-key").with_base_url(server.base_url()); + let mut stream = client + .stream_message(&sample_request(false)) + .await + .expect("stream should start"); + + let mut events = Vec::new(); + while let Some(event) = stream + .next_event() + .await + .expect("stream event should parse") + { + events.push(event); + } + + assert_eq!(events.len(), 9); + assert!(matches!( + &events[1], + StreamEvent::ContentBlockStart(ContentBlockStartEvent { + content_block: OutputContentBlock::Thinking { thinking, signature }, + .. + }) if thinking.is_empty() && signature.is_none() + )); + assert!(matches!( + &events[2], + StreamEvent::ContentBlockDelta(ContentBlockDeltaEvent { + delta: ContentBlockDelta::ThinkingDelta { thinking }, + .. + }) if thinking == "step 1" + )); + assert!(matches!( + &events[3], + StreamEvent::ContentBlockDelta(ContentBlockDeltaEvent { + delta: ContentBlockDelta::SignatureDelta { signature }, + .. + }) if signature == "sig_123" + )); + assert!(matches!( + &events[5], + StreamEvent::ContentBlockStart(ContentBlockStartEvent { + content_block: OutputContentBlock::Text { text }, + .. + }) if text == "Final answer" + )); + assert!(matches!(events[6], StreamEvent::ContentBlockStop(_))); + assert!(matches!(events[7], StreamEvent::MessageDelta(_))); + assert!(matches!(events[8], StreamEvent::MessageStop(_))); +} + #[tokio::test] async fn retries_retryable_failures_before_succeeding() { let state = Arc::new(Mutex::new(Vec::::new())); diff --git a/rust/crates/commands/Cargo.toml b/rust/crates/commands/Cargo.toml index d465bff..b3a68b6 100644 --- a/rust/crates/commands/Cargo.toml +++ b/rust/crates/commands/Cargo.toml @@ -9,4 +9,5 @@ publish.workspace = true workspace = true [dependencies] +plugins = { path = "../plugins" } runtime = { path = "../runtime" } diff --git a/rust/crates/commands/src/lib.rs b/rust/crates/commands/src/lib.rs index e7f8d13..eb04307 100644 --- a/rust/crates/commands/src/lib.rs +++ b/rust/crates/commands/src/lib.rs @@ -1,3 +1,9 @@ +use std::collections::BTreeMap; +use std::env; +use std::fs; +use std::path::{Path, PathBuf}; + +use plugins::{PluginError, PluginManager, PluginSummary}; use runtime::{compact_session, CompactionConfig, Session}; #[derive(Debug, Clone, PartialEq, Eq)] @@ -33,6 +39,7 @@ impl CommandRegistry { #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct SlashCommandSpec { pub name: &'static str, + pub aliases: &'static [&'static str], pub summary: &'static str, pub argument_hint: Option<&'static str>, pub resume_supported: bool, @@ -41,136 +48,181 @@ pub struct SlashCommandSpec { const SLASH_COMMAND_SPECS: &[SlashCommandSpec] = &[ SlashCommandSpec { name: "help", + aliases: &[], summary: "Show available slash commands", argument_hint: None, resume_supported: true, }, SlashCommandSpec { name: "status", + aliases: &[], summary: "Show current session status", argument_hint: None, resume_supported: true, }, SlashCommandSpec { name: "compact", + aliases: &[], summary: "Compact local session history", argument_hint: None, resume_supported: true, }, SlashCommandSpec { name: "model", + aliases: &[], summary: "Show or switch the active model", argument_hint: Some("[model]"), resume_supported: false, }, SlashCommandSpec { name: "permissions", + aliases: &[], summary: "Show or switch the active permission mode", argument_hint: Some("[read-only|workspace-write|danger-full-access]"), resume_supported: false, }, SlashCommandSpec { name: "clear", + aliases: &[], summary: "Start a fresh local session", argument_hint: Some("[--confirm]"), resume_supported: true, }, SlashCommandSpec { name: "cost", + aliases: &[], summary: "Show cumulative token usage for this session", argument_hint: None, resume_supported: true, }, SlashCommandSpec { name: "resume", + aliases: &[], summary: "Load a saved session into the REPL", argument_hint: Some(""), resume_supported: false, }, SlashCommandSpec { name: "config", + aliases: &[], summary: "Inspect Claude config files or merged sections", - argument_hint: Some("[env|hooks|model]"), + argument_hint: Some("[env|hooks|model|plugins]"), resume_supported: true, }, SlashCommandSpec { name: "memory", + aliases: &[], summary: "Inspect loaded Claude instruction memory files", argument_hint: None, resume_supported: true, }, SlashCommandSpec { name: "init", + aliases: &[], summary: "Create a starter CLAUDE.md for this repo", argument_hint: None, resume_supported: true, }, SlashCommandSpec { name: "diff", + aliases: &[], summary: "Show git diff for current workspace changes", argument_hint: None, resume_supported: true, }, SlashCommandSpec { name: "version", + aliases: &[], summary: "Show CLI version and build information", argument_hint: None, resume_supported: true, }, SlashCommandSpec { name: "bughunter", + aliases: &[], summary: "Inspect the codebase for likely bugs", argument_hint: Some("[scope]"), resume_supported: false, }, SlashCommandSpec { name: "commit", + aliases: &[], summary: "Generate a commit message and create a git commit", argument_hint: None, resume_supported: false, }, SlashCommandSpec { name: "pr", + aliases: &[], summary: "Draft or create a pull request from the conversation", argument_hint: Some("[context]"), resume_supported: false, }, SlashCommandSpec { name: "issue", + aliases: &[], summary: "Draft or create a GitHub issue from the conversation", argument_hint: Some("[context]"), resume_supported: false, }, SlashCommandSpec { name: "ultraplan", + aliases: &[], summary: "Run a deep planning prompt with multi-step reasoning", argument_hint: Some("[task]"), resume_supported: false, }, SlashCommandSpec { name: "teleport", + aliases: &[], summary: "Jump to a file or symbol by searching the workspace", argument_hint: Some(""), resume_supported: false, }, SlashCommandSpec { name: "debug-tool-call", + aliases: &[], summary: "Replay the last tool call with debug details", argument_hint: None, resume_supported: false, }, SlashCommandSpec { name: "export", + aliases: &[], summary: "Export the current conversation to a file", argument_hint: Some("[file]"), resume_supported: true, }, SlashCommandSpec { name: "session", + aliases: &[], summary: "List or switch managed local sessions", argument_hint: Some("[list|switch ]"), resume_supported: false, }, + SlashCommandSpec { + name: "plugin", + aliases: &["plugins", "marketplace"], + summary: "Manage Claude Code plugins", + argument_hint: Some( + "[list|install |enable |disable |uninstall |update ]", + ), + resume_supported: false, + }, + SlashCommandSpec { + name: "agents", + aliases: &[], + summary: "Manage agent configurations", + argument_hint: None, + resume_supported: false, + }, + SlashCommandSpec { + name: "skills", + aliases: &[], + summary: "List available skills", + argument_hint: None, + resume_supported: false, + }, ]; #[derive(Debug, Clone, PartialEq, Eq)] @@ -222,6 +274,16 @@ pub enum SlashCommand { action: Option, target: Option, }, + Plugins { + action: Option, + target: Option, + }, + Agents { + args: Option, + }, + Skills { + args: Option, + }, Unknown(String), } @@ -283,6 +345,19 @@ impl SlashCommand { action: parts.next().map(ToOwned::to_owned), target: parts.next().map(ToOwned::to_owned), }, + "plugin" | "plugins" | "marketplace" => Self::Plugins { + action: parts.next().map(ToOwned::to_owned), + target: { + let remainder = parts.collect::>().join(" "); + (!remainder.is_empty()).then_some(remainder) + }, + }, + "agents" => Self::Agents { + args: remainder_after_command(trimmed, command), + }, + "skills" => Self::Skills { + args: remainder_after_command(trimmed, command), + }, other => Self::Unknown(other.to_string()), }) } @@ -321,12 +396,27 @@ pub fn render_slash_command_help() -> String { Some(argument_hint) => format!("/{} {}", spec.name, argument_hint), None => format!("/{}", spec.name), }; + let alias_suffix = if spec.aliases.is_empty() { + String::new() + } else { + format!( + " (aliases: {})", + spec.aliases + .iter() + .map(|alias| format!("/{alias}")) + .collect::>() + .join(", ") + ) + }; let resume = if spec.resume_supported { " [resume]" } else { "" }; - lines.push(format!(" {name:<20} {}{}", spec.summary, resume)); + lines.push(format!( + " {name:<20} {}{alias_suffix}{resume}", + spec.summary + )); } lines.join("\n") } @@ -337,6 +427,532 @@ pub struct SlashCommandResult { pub session: Session, } +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct PluginsCommandResult { + pub message: String, + pub reload_runtime: bool, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +enum DefinitionSource { + ProjectCodex, + ProjectClaude, + UserCodexHome, + UserCodex, + UserClaude, +} + +impl DefinitionSource { + fn label(self) -> &'static str { + match self { + Self::ProjectCodex => "Project (.codex)", + Self::ProjectClaude => "Project (.claude)", + Self::UserCodexHome => "User ($CODEX_HOME)", + Self::UserCodex => "User (~/.codex)", + Self::UserClaude => "User (~/.claude)", + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +struct AgentSummary { + name: String, + description: Option, + model: Option, + reasoning_effort: Option, + source: DefinitionSource, + shadowed_by: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +struct SkillSummary { + name: String, + description: Option, + source: DefinitionSource, + shadowed_by: Option, +} + +#[allow(clippy::too_many_lines)] +pub fn handle_plugins_slash_command( + action: Option<&str>, + target: Option<&str>, + manager: &mut PluginManager, +) -> Result { + match action { + None | Some("list") => Ok(PluginsCommandResult { + message: render_plugins_report(&manager.list_installed_plugins()?), + reload_runtime: false, + }), + Some("install") => { + let Some(target) = target else { + return Ok(PluginsCommandResult { + message: "Usage: /plugins install ".to_string(), + reload_runtime: false, + }); + }; + let install = manager.install(target)?; + let plugin = manager + .list_installed_plugins()? + .into_iter() + .find(|plugin| plugin.metadata.id == install.plugin_id); + Ok(PluginsCommandResult { + message: render_plugin_install_report(&install.plugin_id, plugin.as_ref()), + reload_runtime: true, + }) + } + Some("enable") => { + let Some(target) = target else { + return Ok(PluginsCommandResult { + message: "Usage: /plugins enable ".to_string(), + reload_runtime: false, + }); + }; + let plugin = resolve_plugin_target(manager, target)?; + manager.enable(&plugin.metadata.id)?; + Ok(PluginsCommandResult { + message: format!( + "Plugins\n Result enabled {}\n Name {}\n Version {}\n Status enabled", + plugin.metadata.id, plugin.metadata.name, plugin.metadata.version + ), + reload_runtime: true, + }) + } + Some("disable") => { + let Some(target) = target else { + return Ok(PluginsCommandResult { + message: "Usage: /plugins disable ".to_string(), + reload_runtime: false, + }); + }; + let plugin = resolve_plugin_target(manager, target)?; + manager.disable(&plugin.metadata.id)?; + Ok(PluginsCommandResult { + message: format!( + "Plugins\n Result disabled {}\n Name {}\n Version {}\n Status disabled", + plugin.metadata.id, plugin.metadata.name, plugin.metadata.version + ), + reload_runtime: true, + }) + } + Some("uninstall") => { + let Some(target) = target else { + return Ok(PluginsCommandResult { + message: "Usage: /plugins uninstall ".to_string(), + reload_runtime: false, + }); + }; + manager.uninstall(target)?; + Ok(PluginsCommandResult { + message: format!("Plugins\n Result uninstalled {target}"), + reload_runtime: true, + }) + } + Some("update") => { + let Some(target) = target else { + return Ok(PluginsCommandResult { + message: "Usage: /plugins update ".to_string(), + reload_runtime: false, + }); + }; + let update = manager.update(target)?; + let plugin = manager + .list_installed_plugins()? + .into_iter() + .find(|plugin| plugin.metadata.id == update.plugin_id); + Ok(PluginsCommandResult { + message: format!( + "Plugins\n Result updated {}\n Name {}\n Old version {}\n New version {}\n Status {}", + update.plugin_id, + plugin + .as_ref() + .map_or_else(|| update.plugin_id.clone(), |plugin| plugin.metadata.name.clone()), + update.old_version, + update.new_version, + plugin + .as_ref() + .map_or("unknown", |plugin| if plugin.enabled { "enabled" } else { "disabled" }), + ), + reload_runtime: true, + }) + } + Some(other) => Ok(PluginsCommandResult { + message: format!( + "Unknown /plugins action '{other}'. Use list, install, enable, disable, uninstall, or update." + ), + reload_runtime: false, + }), + } +} + +pub fn handle_agents_slash_command(args: Option<&str>, cwd: &Path) -> std::io::Result { + if let Some(args) = args.filter(|value| !value.trim().is_empty()) { + return Ok(format!("Usage: /agents\nUnexpected arguments: {args}")); + } + + let roots = discover_definition_roots(cwd, "agents"); + let agents = load_agents_from_roots(&roots)?; + Ok(render_agents_report(&agents)) +} + +pub fn handle_skills_slash_command(args: Option<&str>, cwd: &Path) -> std::io::Result { + if let Some(args) = args.filter(|value| !value.trim().is_empty()) { + return Ok(format!("Usage: /skills\nUnexpected arguments: {args}")); + } + + let roots = discover_definition_roots(cwd, "skills"); + let skills = load_skills_from_roots(&roots)?; + Ok(render_skills_report(&skills)) +} + +#[must_use] +pub fn render_plugins_report(plugins: &[PluginSummary]) -> String { + let mut lines = vec!["Plugins".to_string()]; + if plugins.is_empty() { + lines.push(" No plugins installed.".to_string()); + return lines.join("\n"); + } + for plugin in plugins { + let enabled = if plugin.enabled { + "enabled" + } else { + "disabled" + }; + lines.push(format!( + " {name:<20} v{version:<10} {enabled}", + name = plugin.metadata.name, + version = plugin.metadata.version, + )); + } + lines.join("\n") +} + +fn render_plugin_install_report(plugin_id: &str, plugin: Option<&PluginSummary>) -> String { + let name = plugin.map_or(plugin_id, |plugin| plugin.metadata.name.as_str()); + let version = plugin.map_or("unknown", |plugin| plugin.metadata.version.as_str()); + let enabled = plugin.is_some_and(|plugin| plugin.enabled); + format!( + "Plugins\n Result installed {plugin_id}\n Name {name}\n Version {version}\n Status {}", + if enabled { "enabled" } else { "disabled" } + ) +} + +fn resolve_plugin_target( + manager: &PluginManager, + target: &str, +) -> Result { + let mut matches = manager + .list_installed_plugins()? + .into_iter() + .filter(|plugin| plugin.metadata.id == target || plugin.metadata.name == target) + .collect::>(); + match matches.len() { + 1 => Ok(matches.remove(0)), + 0 => Err(PluginError::NotFound(format!( + "plugin `{target}` is not installed or discoverable" + ))), + _ => Err(PluginError::InvalidManifest(format!( + "plugin name `{target}` is ambiguous; use the full plugin id" + ))), + } +} + +fn discover_definition_roots(cwd: &Path, leaf: &str) -> Vec<(DefinitionSource, PathBuf)> { + let mut roots = Vec::new(); + + for ancestor in cwd.ancestors() { + push_unique_root( + &mut roots, + DefinitionSource::ProjectCodex, + ancestor.join(".codex").join(leaf), + ); + push_unique_root( + &mut roots, + DefinitionSource::ProjectClaude, + ancestor.join(".claude").join(leaf), + ); + } + + if let Ok(codex_home) = env::var("CODEX_HOME") { + push_unique_root( + &mut roots, + DefinitionSource::UserCodexHome, + PathBuf::from(codex_home).join(leaf), + ); + } + + if let Some(home) = env::var_os("HOME") { + let home = PathBuf::from(home); + push_unique_root( + &mut roots, + DefinitionSource::UserCodex, + home.join(".codex").join(leaf), + ); + push_unique_root( + &mut roots, + DefinitionSource::UserClaude, + home.join(".claude").join(leaf), + ); + } + + roots +} + +fn push_unique_root( + roots: &mut Vec<(DefinitionSource, PathBuf)>, + source: DefinitionSource, + path: PathBuf, +) { + if path.is_dir() && !roots.iter().any(|(_, existing)| existing == &path) { + roots.push((source, path)); + } +} + +fn load_agents_from_roots( + roots: &[(DefinitionSource, PathBuf)], +) -> std::io::Result> { + let mut agents = Vec::new(); + let mut active_sources = BTreeMap::::new(); + + for (source, root) in roots { + let mut root_agents = Vec::new(); + for entry in fs::read_dir(root)? { + let entry = entry?; + if entry.path().extension().is_none_or(|ext| ext != "toml") { + continue; + } + let contents = fs::read_to_string(entry.path())?; + let fallback_name = entry + .path() + .file_stem() + .map(|stem| stem.to_string_lossy().to_string()) + .unwrap_or_else(|| entry.file_name().to_string_lossy().to_string()); + root_agents.push(AgentSummary { + name: parse_toml_string(&contents, "name").unwrap_or(fallback_name), + description: parse_toml_string(&contents, "description"), + model: parse_toml_string(&contents, "model"), + reasoning_effort: parse_toml_string(&contents, "model_reasoning_effort"), + source: *source, + shadowed_by: None, + }); + } + root_agents.sort_by(|left, right| left.name.cmp(&right.name)); + + for mut agent in root_agents { + let key = agent.name.to_ascii_lowercase(); + if let Some(existing) = active_sources.get(&key) { + agent.shadowed_by = Some(*existing); + } else { + active_sources.insert(key, agent.source); + } + agents.push(agent); + } + } + + Ok(agents) +} + +fn load_skills_from_roots( + roots: &[(DefinitionSource, PathBuf)], +) -> std::io::Result> { + let mut skills = Vec::new(); + let mut active_sources = BTreeMap::::new(); + + for (source, root) in roots { + let mut root_skills = Vec::new(); + for entry in fs::read_dir(root)? { + let entry = entry?; + if !entry.path().is_dir() { + continue; + } + let skill_path = entry.path().join("SKILL.md"); + if !skill_path.is_file() { + continue; + } + let contents = fs::read_to_string(skill_path)?; + let (name, description) = parse_skill_frontmatter(&contents); + root_skills.push(SkillSummary { + name: name.unwrap_or_else(|| entry.file_name().to_string_lossy().to_string()), + description, + source: *source, + shadowed_by: None, + }); + } + root_skills.sort_by(|left, right| left.name.cmp(&right.name)); + + for mut skill in root_skills { + let key = skill.name.to_ascii_lowercase(); + if let Some(existing) = active_sources.get(&key) { + skill.shadowed_by = Some(*existing); + } else { + active_sources.insert(key, skill.source); + } + skills.push(skill); + } + } + + Ok(skills) +} + +fn parse_toml_string(contents: &str, key: &str) -> Option { + let prefix = format!("{key} ="); + for line in contents.lines() { + let trimmed = line.trim(); + if trimmed.starts_with('#') { + continue; + } + let Some(value) = trimmed.strip_prefix(&prefix) else { + continue; + }; + let value = value.trim(); + let Some(value) = value + .strip_prefix('"') + .and_then(|value| value.strip_suffix('"')) + else { + continue; + }; + if !value.is_empty() { + return Some(value.to_string()); + } + } + None +} + +fn parse_skill_frontmatter(contents: &str) -> (Option, Option) { + let mut lines = contents.lines(); + if lines.next().map(str::trim) != Some("---") { + return (None, None); + } + + let mut name = None; + let mut description = None; + for line in lines { + let trimmed = line.trim(); + if trimmed == "---" { + break; + } + if let Some(value) = trimmed.strip_prefix("name:") { + let value = value.trim(); + if !value.is_empty() { + name = Some(value.to_string()); + } + continue; + } + if let Some(value) = trimmed.strip_prefix("description:") { + let value = value.trim(); + if !value.is_empty() { + description = Some(value.to_string()); + } + } + } + + (name, description) +} + +fn render_agents_report(agents: &[AgentSummary]) -> String { + if agents.is_empty() { + return "No agents found.".to_string(); + } + + let total_active = agents + .iter() + .filter(|agent| agent.shadowed_by.is_none()) + .count(); + let mut lines = vec![ + "Agents".to_string(), + format!(" {total_active} active agents"), + String::new(), + ]; + + for source in [ + DefinitionSource::ProjectCodex, + DefinitionSource::ProjectClaude, + DefinitionSource::UserCodexHome, + DefinitionSource::UserCodex, + DefinitionSource::UserClaude, + ] { + let group = agents + .iter() + .filter(|agent| agent.source == source) + .collect::>(); + if group.is_empty() { + continue; + } + + lines.push(format!("{}:", source.label())); + for agent in group { + let detail = agent_detail(agent); + match agent.shadowed_by { + Some(winner) => lines.push(format!(" (shadowed by {}) {detail}", winner.label())), + None => lines.push(format!(" {detail}")), + } + } + lines.push(String::new()); + } + + lines.join("\n").trim_end().to_string() +} + +fn agent_detail(agent: &AgentSummary) -> String { + let mut parts = vec![agent.name.clone()]; + if let Some(description) = &agent.description { + parts.push(description.clone()); + } + if let Some(model) = &agent.model { + parts.push(model.clone()); + } + if let Some(reasoning) = &agent.reasoning_effort { + parts.push(reasoning.clone()); + } + parts.join(" · ") +} + +fn render_skills_report(skills: &[SkillSummary]) -> String { + if skills.is_empty() { + return "No skills found.".to_string(); + } + + let total_active = skills + .iter() + .filter(|skill| skill.shadowed_by.is_none()) + .count(); + let mut lines = vec![ + "Skills".to_string(), + format!(" {total_active} available skills"), + String::new(), + ]; + + for source in [ + DefinitionSource::ProjectCodex, + DefinitionSource::ProjectClaude, + DefinitionSource::UserCodexHome, + DefinitionSource::UserCodex, + DefinitionSource::UserClaude, + ] { + let group = skills + .iter() + .filter(|skill| skill.source == source) + .collect::>(); + if group.is_empty() { + continue; + } + + lines.push(format!("{}:", source.label())); + for skill in group { + let detail = match &skill.description { + Some(description) => format!("{} · {}", skill.name, description), + None => skill.name.clone(), + }; + match skill.shadowed_by { + Some(winner) => lines.push(format!(" (shadowed by {}) {detail}", winner.label())), + None => lines.push(format!(" {detail}")), + } + } + lines.push(String::new()); + } + + lines.join("\n").trim_end().to_string() +} + #[must_use] pub fn handle_slash_command( input: &str, @@ -383,6 +999,9 @@ pub fn handle_slash_command( | SlashCommand::Version | SlashCommand::Export { .. } | SlashCommand::Session { .. } + | SlashCommand::Plugins { .. } + | SlashCommand::Agents { .. } + | SlashCommand::Skills { .. } | SlashCommand::Unknown(_) => None, } } @@ -390,11 +1009,70 @@ pub fn handle_slash_command( #[cfg(test)] mod tests { use super::{ - handle_slash_command, render_slash_command_help, resume_supported_slash_commands, - slash_command_specs, SlashCommand, + handle_plugins_slash_command, handle_slash_command, load_agents_from_roots, + load_skills_from_roots, render_agents_report, render_plugins_report, render_skills_report, + render_slash_command_help, resume_supported_slash_commands, slash_command_specs, + DefinitionSource, SlashCommand, }; + use plugins::{PluginKind, PluginManager, PluginManagerConfig, PluginMetadata, PluginSummary}; use runtime::{CompactionConfig, ContentBlock, ConversationMessage, MessageRole, Session}; + use std::fs; + use std::path::{Path, PathBuf}; + use std::time::{SystemTime, UNIX_EPOCH}; + fn temp_dir(label: &str) -> PathBuf { + let nanos = SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("time should be after epoch") + .as_nanos(); + std::env::temp_dir().join(format!("commands-plugin-{label}-{nanos}")) + } + + fn write_external_plugin(root: &Path, name: &str, version: &str) { + fs::create_dir_all(root.join(".claude-plugin")).expect("manifest dir"); + fs::write( + root.join(".claude-plugin").join("plugin.json"), + format!( + "{{\n \"name\": \"{name}\",\n \"version\": \"{version}\",\n \"description\": \"commands plugin\"\n}}" + ), + ) + .expect("write manifest"); + } + + fn write_bundled_plugin(root: &Path, name: &str, version: &str, default_enabled: bool) { + fs::create_dir_all(root.join(".claude-plugin")).expect("manifest dir"); + fs::write( + root.join(".claude-plugin").join("plugin.json"), + format!( + "{{\n \"name\": \"{name}\",\n \"version\": \"{version}\",\n \"description\": \"bundled commands plugin\",\n \"defaultEnabled\": {}\n}}", + if default_enabled { "true" } else { "false" } + ), + ) + .expect("write bundled manifest"); + } + + fn write_agent(root: &Path, name: &str, description: &str, model: &str, reasoning: &str) { + fs::create_dir_all(root).expect("agent root"); + fs::write( + root.join(format!("{name}.toml")), + format!( + "name = \"{name}\"\ndescription = \"{description}\"\nmodel = \"{model}\"\nmodel_reasoning_effort = \"{reasoning}\"\n" + ), + ) + .expect("write agent"); + } + + fn write_skill(root: &Path, name: &str, description: &str) { + let skill_root = root.join(name); + fs::create_dir_all(&skill_root).expect("skill root"); + fs::write( + skill_root.join("SKILL.md"), + format!("---\nname: {name}\ndescription: {description}\n---\n\n# {name}\n"), + ) + .expect("write skill"); + } + + #[allow(clippy::too_many_lines)] #[test] fn parses_supported_slash_commands() { assert_eq!(SlashCommand::parse("/help"), Some(SlashCommand::Help)); @@ -492,6 +1170,34 @@ mod tests { target: Some("abc123".to_string()) }) ); + assert_eq!( + SlashCommand::parse("/plugins install demo"), + Some(SlashCommand::Plugins { + action: Some("install".to_string()), + target: Some("demo".to_string()) + }) + ); + assert_eq!( + SlashCommand::parse("/plugins list"), + Some(SlashCommand::Plugins { + action: Some("list".to_string()), + target: None + }) + ); + assert_eq!( + SlashCommand::parse("/plugins enable demo"), + Some(SlashCommand::Plugins { + action: Some("enable".to_string()), + target: Some("demo".to_string()) + }) + ); + assert_eq!( + SlashCommand::parse("/plugins disable demo"), + Some(SlashCommand::Plugins { + action: Some("disable".to_string()), + target: Some("demo".to_string()) + }) + ); } #[test] @@ -513,14 +1219,17 @@ mod tests { assert!(help.contains("/clear [--confirm]")); assert!(help.contains("/cost")); assert!(help.contains("/resume ")); - assert!(help.contains("/config [env|hooks|model]")); + assert!(help.contains("/config [env|hooks|model|plugins]")); assert!(help.contains("/memory")); assert!(help.contains("/init")); assert!(help.contains("/diff")); assert!(help.contains("/version")); assert!(help.contains("/export [file]")); assert!(help.contains("/session [list|switch ]")); - assert_eq!(slash_command_specs().len(), 22); + assert!(help.contains( + "/plugins [list|install |enable |disable |uninstall |update ]" + )); + assert_eq!(slash_command_specs().len(), 23); assert_eq!(resume_supported_slash_commands().len(), 11); } @@ -618,5 +1327,219 @@ mod tests { assert!( handle_slash_command("/session list", &session, CompactionConfig::default()).is_none() ); + assert!( + handle_slash_command("/plugins list", &session, CompactionConfig::default()).is_none() + ); + } + + #[test] + fn renders_plugins_report_with_name_version_and_status() { + let rendered = render_plugins_report(&[ + PluginSummary { + metadata: PluginMetadata { + id: "demo@external".to_string(), + name: "demo".to_string(), + version: "1.2.3".to_string(), + description: "demo plugin".to_string(), + kind: PluginKind::External, + source: "demo".to_string(), + default_enabled: false, + root: None, + }, + enabled: true, + }, + PluginSummary { + metadata: PluginMetadata { + id: "sample@external".to_string(), + name: "sample".to_string(), + version: "0.9.0".to_string(), + description: "sample plugin".to_string(), + kind: PluginKind::External, + source: "sample".to_string(), + default_enabled: false, + root: None, + }, + enabled: false, + }, + ]); + + assert!(rendered.contains("demo")); + assert!(rendered.contains("v1.2.3")); + assert!(rendered.contains("enabled")); + assert!(rendered.contains("sample")); + assert!(rendered.contains("v0.9.0")); + assert!(rendered.contains("disabled")); + } + + #[test] + fn lists_agents_from_project_and_user_roots() { + let workspace = temp_dir("agents-workspace"); + let project_agents = workspace.join(".codex").join("agents"); + let user_home = temp_dir("agents-home"); + let user_agents = user_home.join(".codex").join("agents"); + + write_agent( + &project_agents, + "planner", + "Project planner", + "gpt-5.4", + "medium", + ); + write_agent( + &user_agents, + "planner", + "User planner", + "gpt-5.4-mini", + "high", + ); + write_agent( + &user_agents, + "verifier", + "Verification agent", + "gpt-5.4-mini", + "high", + ); + + let roots = vec![ + (DefinitionSource::ProjectCodex, project_agents), + (DefinitionSource::UserCodex, user_agents), + ]; + let report = + render_agents_report(&load_agents_from_roots(&roots).expect("agent roots should load")); + + assert!(report.contains("Agents")); + assert!(report.contains("2 active agents")); + assert!(report.contains("Project (.codex):")); + assert!(report.contains("planner · Project planner · gpt-5.4 · medium")); + assert!(report.contains("User (~/.codex):")); + assert!(report.contains("(shadowed by Project (.codex)) planner · User planner")); + assert!(report.contains("verifier · Verification agent · gpt-5.4-mini · high")); + + let _ = fs::remove_dir_all(workspace); + let _ = fs::remove_dir_all(user_home); + } + + #[test] + fn lists_skills_from_project_and_user_roots() { + let workspace = temp_dir("skills-workspace"); + let project_skills = workspace.join(".codex").join("skills"); + let user_home = temp_dir("skills-home"); + let user_skills = user_home.join(".codex").join("skills"); + + write_skill(&project_skills, "plan", "Project planning guidance"); + write_skill(&user_skills, "plan", "User planning guidance"); + write_skill(&user_skills, "help", "Help guidance"); + + let roots = vec![ + (DefinitionSource::ProjectCodex, project_skills), + (DefinitionSource::UserCodex, user_skills), + ]; + let report = + render_skills_report(&load_skills_from_roots(&roots).expect("skill roots should load")); + + assert!(report.contains("Skills")); + assert!(report.contains("2 available skills")); + assert!(report.contains("Project (.codex):")); + assert!(report.contains("plan · Project planning guidance")); + assert!(report.contains("User (~/.codex):")); + assert!(report.contains("(shadowed by Project (.codex)) plan · User planning guidance")); + assert!(report.contains("help · Help guidance")); + + let _ = fs::remove_dir_all(workspace); + let _ = fs::remove_dir_all(user_home); + } + + #[test] + fn installs_plugin_from_path_and_lists_it() { + let config_home = temp_dir("home"); + let source_root = temp_dir("source"); + write_external_plugin(&source_root, "demo", "1.0.0"); + + let mut manager = PluginManager::new(PluginManagerConfig::new(&config_home)); + let install = handle_plugins_slash_command( + Some("install"), + Some(source_root.to_str().expect("utf8 path")), + &mut manager, + ) + .expect("install command should succeed"); + assert!(install.reload_runtime); + assert!(install.message.contains("installed demo@external")); + assert!(install.message.contains("Name demo")); + assert!(install.message.contains("Version 1.0.0")); + assert!(install.message.contains("Status enabled")); + + let list = handle_plugins_slash_command(Some("list"), None, &mut manager) + .expect("list command should succeed"); + assert!(!list.reload_runtime); + assert!(list.message.contains("demo")); + assert!(list.message.contains("v1.0.0")); + assert!(list.message.contains("enabled")); + + let _ = fs::remove_dir_all(config_home); + let _ = fs::remove_dir_all(source_root); + } + + #[test] + fn enables_and_disables_plugin_by_name() { + let config_home = temp_dir("toggle-home"); + let source_root = temp_dir("toggle-source"); + write_external_plugin(&source_root, "demo", "1.0.0"); + + let mut manager = PluginManager::new(PluginManagerConfig::new(&config_home)); + handle_plugins_slash_command( + Some("install"), + Some(source_root.to_str().expect("utf8 path")), + &mut manager, + ) + .expect("install command should succeed"); + + let disable = handle_plugins_slash_command(Some("disable"), Some("demo"), &mut manager) + .expect("disable command should succeed"); + assert!(disable.reload_runtime); + assert!(disable.message.contains("disabled demo@external")); + assert!(disable.message.contains("Name demo")); + assert!(disable.message.contains("Status disabled")); + + let list = handle_plugins_slash_command(Some("list"), None, &mut manager) + .expect("list command should succeed"); + assert!(list.message.contains("demo")); + assert!(list.message.contains("disabled")); + + let enable = handle_plugins_slash_command(Some("enable"), Some("demo"), &mut manager) + .expect("enable command should succeed"); + assert!(enable.reload_runtime); + assert!(enable.message.contains("enabled demo@external")); + assert!(enable.message.contains("Name demo")); + assert!(enable.message.contains("Status enabled")); + + let list = handle_plugins_slash_command(Some("list"), None, &mut manager) + .expect("list command should succeed"); + assert!(list.message.contains("demo")); + assert!(list.message.contains("enabled")); + + let _ = fs::remove_dir_all(config_home); + let _ = fs::remove_dir_all(source_root); + } + + #[test] + fn lists_auto_installed_bundled_plugins_with_status() { + let config_home = temp_dir("bundled-home"); + let bundled_root = temp_dir("bundled-root"); + let bundled_plugin = bundled_root.join("starter"); + write_bundled_plugin(&bundled_plugin, "starter", "0.1.0", false); + + let mut config = PluginManagerConfig::new(&config_home); + config.bundled_root = Some(bundled_root.clone()); + let mut manager = PluginManager::new(config); + + let list = handle_plugins_slash_command(Some("list"), None, &mut manager) + .expect("list command should succeed"); + assert!(!list.reload_runtime); + assert!(list.message.contains("starter")); + assert!(list.message.contains("v0.1.0")); + assert!(list.message.contains("disabled")); + + let _ = fs::remove_dir_all(config_home); + let _ = fs::remove_dir_all(bundled_root); } } diff --git a/rust/crates/compat-harness/src/lib.rs b/rust/crates/compat-harness/src/lib.rs index 7176c27..1acfec9 100644 --- a/rust/crates/compat-harness/src/lib.rs +++ b/rust/crates/compat-harness/src/lib.rs @@ -74,11 +74,7 @@ fn upstream_repo_candidates(primary_repo_root: &Path) -> Vec { candidates.push(ancestor.join("clawd-code")); } - candidates.push( - primary_repo_root - .join("reference-source") - .join("claw-code"), - ); + candidates.push(primary_repo_root.join("reference-source").join("claw-code")); candidates.push(primary_repo_root.join("vendor").join("claw-code")); let mut deduped = Vec::new(); diff --git a/rust/crates/plugins/Cargo.toml b/rust/crates/plugins/Cargo.toml new file mode 100644 index 0000000..1771acc --- /dev/null +++ b/rust/crates/plugins/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "plugins" +version.workspace = true +edition.workspace = true +license.workspace = true +publish.workspace = true + +[dependencies] +serde = { version = "1", features = ["derive"] } +serde_json = "1" + +[lints] +workspace = true diff --git a/rust/crates/plugins/bundled/example-bundled/.claude-plugin/plugin.json b/rust/crates/plugins/bundled/example-bundled/.claude-plugin/plugin.json new file mode 100644 index 0000000..81a4220 --- /dev/null +++ b/rust/crates/plugins/bundled/example-bundled/.claude-plugin/plugin.json @@ -0,0 +1,10 @@ +{ + "name": "example-bundled", + "version": "0.1.0", + "description": "Example bundled plugin scaffold for the Rust plugin system", + "defaultEnabled": false, + "hooks": { + "PreToolUse": ["./hooks/pre.sh"], + "PostToolUse": ["./hooks/post.sh"] + } +} diff --git a/rust/crates/plugins/bundled/example-bundled/hooks/post.sh b/rust/crates/plugins/bundled/example-bundled/hooks/post.sh new file mode 100755 index 0000000..c9eb66f --- /dev/null +++ b/rust/crates/plugins/bundled/example-bundled/hooks/post.sh @@ -0,0 +1,2 @@ +#!/bin/sh +printf '%s\n' 'example bundled post hook' diff --git a/rust/crates/plugins/bundled/example-bundled/hooks/pre.sh b/rust/crates/plugins/bundled/example-bundled/hooks/pre.sh new file mode 100755 index 0000000..af6b46b --- /dev/null +++ b/rust/crates/plugins/bundled/example-bundled/hooks/pre.sh @@ -0,0 +1,2 @@ +#!/bin/sh +printf '%s\n' 'example bundled pre hook' diff --git a/rust/crates/plugins/bundled/sample-hooks/.claude-plugin/plugin.json b/rust/crates/plugins/bundled/sample-hooks/.claude-plugin/plugin.json new file mode 100644 index 0000000..555f5df --- /dev/null +++ b/rust/crates/plugins/bundled/sample-hooks/.claude-plugin/plugin.json @@ -0,0 +1,10 @@ +{ + "name": "sample-hooks", + "version": "0.1.0", + "description": "Bundled sample plugin scaffold for hook integration tests.", + "defaultEnabled": false, + "hooks": { + "PreToolUse": ["./hooks/pre.sh"], + "PostToolUse": ["./hooks/post.sh"] + } +} diff --git a/rust/crates/plugins/bundled/sample-hooks/hooks/post.sh b/rust/crates/plugins/bundled/sample-hooks/hooks/post.sh new file mode 100755 index 0000000..c968e6d --- /dev/null +++ b/rust/crates/plugins/bundled/sample-hooks/hooks/post.sh @@ -0,0 +1,2 @@ +#!/bin/sh +printf 'sample bundled post hook' diff --git a/rust/crates/plugins/bundled/sample-hooks/hooks/pre.sh b/rust/crates/plugins/bundled/sample-hooks/hooks/pre.sh new file mode 100755 index 0000000..9560881 --- /dev/null +++ b/rust/crates/plugins/bundled/sample-hooks/hooks/pre.sh @@ -0,0 +1,2 @@ +#!/bin/sh +printf 'sample bundled pre hook' diff --git a/rust/crates/plugins/src/hooks.rs b/rust/crates/plugins/src/hooks.rs new file mode 100644 index 0000000..d473da8 --- /dev/null +++ b/rust/crates/plugins/src/hooks.rs @@ -0,0 +1,395 @@ +use std::ffi::OsStr; +use std::path::Path; +use std::process::Command; + +use serde_json::json; + +use crate::{PluginError, PluginHooks, PluginRegistry}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum HookEvent { + PreToolUse, + PostToolUse, +} + +impl HookEvent { + fn as_str(self) -> &'static str { + match self { + Self::PreToolUse => "PreToolUse", + Self::PostToolUse => "PostToolUse", + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct HookRunResult { + denied: bool, + messages: Vec, +} + +impl HookRunResult { + #[must_use] + pub fn allow(messages: Vec) -> Self { + Self { + denied: false, + messages, + } + } + + #[must_use] + pub fn is_denied(&self) -> bool { + self.denied + } + + #[must_use] + pub fn messages(&self) -> &[String] { + &self.messages + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Default)] +pub struct HookRunner { + hooks: PluginHooks, +} + +impl HookRunner { + #[must_use] + pub fn new(hooks: PluginHooks) -> Self { + Self { hooks } + } + + pub fn from_registry(plugin_registry: &PluginRegistry) -> Result { + Ok(Self::new(plugin_registry.aggregated_hooks()?)) + } + + #[must_use] + pub fn run_pre_tool_use(&self, tool_name: &str, tool_input: &str) -> HookRunResult { + self.run_commands( + HookEvent::PreToolUse, + &self.hooks.pre_tool_use, + tool_name, + tool_input, + None, + false, + ) + } + + #[must_use] + pub fn run_post_tool_use( + &self, + tool_name: &str, + tool_input: &str, + tool_output: &str, + is_error: bool, + ) -> HookRunResult { + self.run_commands( + HookEvent::PostToolUse, + &self.hooks.post_tool_use, + tool_name, + tool_input, + Some(tool_output), + is_error, + ) + } + + fn run_commands( + &self, + event: HookEvent, + commands: &[String], + tool_name: &str, + tool_input: &str, + tool_output: Option<&str>, + is_error: bool, + ) -> HookRunResult { + if commands.is_empty() { + return HookRunResult::allow(Vec::new()); + } + + let payload = json!({ + "hook_event_name": event.as_str(), + "tool_name": tool_name, + "tool_input": parse_tool_input(tool_input), + "tool_input_json": tool_input, + "tool_output": tool_output, + "tool_result_is_error": is_error, + }) + .to_string(); + + let mut messages = Vec::new(); + + for command in commands { + match self.run_command( + command, + event, + tool_name, + tool_input, + tool_output, + is_error, + &payload, + ) { + HookCommandOutcome::Allow { message } => { + if let Some(message) = message { + messages.push(message); + } + } + HookCommandOutcome::Deny { message } => { + messages.push(message.unwrap_or_else(|| { + format!("{} hook denied tool `{tool_name}`", event.as_str()) + })); + return HookRunResult { + denied: true, + messages, + }; + } + HookCommandOutcome::Warn { message } => messages.push(message), + } + } + + HookRunResult::allow(messages) + } + + #[allow(clippy::too_many_arguments, clippy::unused_self)] + fn run_command( + &self, + command: &str, + event: HookEvent, + tool_name: &str, + tool_input: &str, + tool_output: Option<&str>, + is_error: bool, + payload: &str, + ) -> HookCommandOutcome { + let mut child = shell_command(command); + child.stdin(std::process::Stdio::piped()); + child.stdout(std::process::Stdio::piped()); + child.stderr(std::process::Stdio::piped()); + child.env("HOOK_EVENT", event.as_str()); + child.env("HOOK_TOOL_NAME", tool_name); + child.env("HOOK_TOOL_INPUT", tool_input); + child.env("HOOK_TOOL_IS_ERROR", if is_error { "1" } else { "0" }); + if let Some(tool_output) = tool_output { + child.env("HOOK_TOOL_OUTPUT", tool_output); + } + + match child.output_with_stdin(payload.as_bytes()) { + Ok(output) => { + let stdout = String::from_utf8_lossy(&output.stdout).trim().to_string(); + let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string(); + let message = (!stdout.is_empty()).then_some(stdout); + match output.status.code() { + Some(0) => HookCommandOutcome::Allow { message }, + Some(2) => HookCommandOutcome::Deny { message }, + Some(code) => HookCommandOutcome::Warn { + message: format_hook_warning( + command, + code, + message.as_deref(), + stderr.as_str(), + ), + }, + None => HookCommandOutcome::Warn { + message: format!( + "{} hook `{command}` terminated by signal while handling `{tool_name}`", + event.as_str() + ), + }, + } + } + Err(error) => HookCommandOutcome::Warn { + message: format!( + "{} hook `{command}` failed to start for `{tool_name}`: {error}", + event.as_str() + ), + }, + } + } +} + +enum HookCommandOutcome { + Allow { message: Option }, + Deny { message: Option }, + Warn { message: String }, +} + +fn parse_tool_input(tool_input: &str) -> serde_json::Value { + serde_json::from_str(tool_input).unwrap_or_else(|_| json!({ "raw": tool_input })) +} + +fn format_hook_warning(command: &str, code: i32, stdout: Option<&str>, stderr: &str) -> String { + let mut message = + format!("Hook `{command}` exited with status {code}; allowing tool execution to continue"); + if let Some(stdout) = stdout.filter(|stdout| !stdout.is_empty()) { + message.push_str(": "); + message.push_str(stdout); + } else if !stderr.is_empty() { + message.push_str(": "); + message.push_str(stderr); + } + message +} + +fn shell_command(command: &str) -> CommandWithStdin { + #[cfg(windows)] + let command_builder = { + let mut command_builder = Command::new("cmd"); + command_builder.arg("/C").arg(command); + CommandWithStdin::new(command_builder) + }; + + #[cfg(not(windows))] + let command_builder = if Path::new(command).exists() { + let mut command_builder = Command::new("sh"); + command_builder.arg(command); + CommandWithStdin::new(command_builder) + } else { + let mut command_builder = Command::new("sh"); + command_builder.arg("-lc").arg(command); + CommandWithStdin::new(command_builder) + }; + + command_builder +} + +struct CommandWithStdin { + command: Command, +} + +impl CommandWithStdin { + fn new(command: Command) -> Self { + Self { command } + } + + fn stdin(&mut self, cfg: std::process::Stdio) -> &mut Self { + self.command.stdin(cfg); + self + } + + fn stdout(&mut self, cfg: std::process::Stdio) -> &mut Self { + self.command.stdout(cfg); + self + } + + fn stderr(&mut self, cfg: std::process::Stdio) -> &mut Self { + self.command.stderr(cfg); + self + } + + fn env(&mut self, key: K, value: V) -> &mut Self + where + K: AsRef, + V: AsRef, + { + self.command.env(key, value); + self + } + + fn output_with_stdin(&mut self, stdin: &[u8]) -> std::io::Result { + let mut child = self.command.spawn()?; + if let Some(mut child_stdin) = child.stdin.take() { + use std::io::Write as _; + child_stdin.write_all(stdin)?; + } + child.wait_with_output() + } +} + +#[cfg(test)] +mod tests { + use super::{HookRunResult, HookRunner}; + use crate::{PluginManager, PluginManagerConfig}; + use std::fs; + use std::path::{Path, PathBuf}; + use std::time::{SystemTime, UNIX_EPOCH}; + + fn temp_dir(label: &str) -> PathBuf { + let nanos = SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("time should be after epoch") + .as_nanos(); + std::env::temp_dir().join(format!("plugins-hook-runner-{label}-{nanos}")) + } + + fn write_hook_plugin(root: &Path, name: &str, pre_message: &str, post_message: &str) { + fs::create_dir_all(root.join(".claude-plugin")).expect("manifest dir"); + fs::create_dir_all(root.join("hooks")).expect("hooks dir"); + fs::write( + root.join("hooks").join("pre.sh"), + format!("#!/bin/sh\nprintf '%s\\n' '{pre_message}'\n"), + ) + .expect("write pre hook"); + fs::write( + root.join("hooks").join("post.sh"), + format!("#!/bin/sh\nprintf '%s\\n' '{post_message}'\n"), + ) + .expect("write post hook"); + fs::write( + root.join(".claude-plugin").join("plugin.json"), + format!( + "{{\n \"name\": \"{name}\",\n \"version\": \"1.0.0\",\n \"description\": \"hook plugin\",\n \"hooks\": {{\n \"PreToolUse\": [\"./hooks/pre.sh\"],\n \"PostToolUse\": [\"./hooks/post.sh\"]\n }}\n}}" + ), + ) + .expect("write plugin manifest"); + } + + #[test] + fn collects_and_runs_hooks_from_enabled_plugins() { + let config_home = temp_dir("config"); + let first_source_root = temp_dir("source-a"); + let second_source_root = temp_dir("source-b"); + write_hook_plugin( + &first_source_root, + "first", + "plugin pre one", + "plugin post one", + ); + write_hook_plugin( + &second_source_root, + "second", + "plugin pre two", + "plugin post two", + ); + + let mut manager = PluginManager::new(PluginManagerConfig::new(&config_home)); + manager + .install(first_source_root.to_str().expect("utf8 path")) + .expect("first plugin install should succeed"); + manager + .install(second_source_root.to_str().expect("utf8 path")) + .expect("second plugin install should succeed"); + let registry = manager.plugin_registry().expect("registry should build"); + + let runner = HookRunner::from_registry(®istry).expect("plugin hooks should load"); + + assert_eq!( + runner.run_pre_tool_use("Read", r#"{"path":"README.md"}"#), + HookRunResult::allow(vec![ + "plugin pre one".to_string(), + "plugin pre two".to_string(), + ]) + ); + assert_eq!( + runner.run_post_tool_use("Read", r#"{"path":"README.md"}"#, "ok", false), + HookRunResult::allow(vec![ + "plugin post one".to_string(), + "plugin post two".to_string(), + ]) + ); + + let _ = fs::remove_dir_all(config_home); + let _ = fs::remove_dir_all(first_source_root); + let _ = fs::remove_dir_all(second_source_root); + } + + #[test] + fn pre_tool_use_denies_when_plugin_hook_exits_two() { + let runner = HookRunner::new(crate::PluginHooks { + pre_tool_use: vec!["printf 'blocked by plugin'; exit 2".to_string()], + post_tool_use: Vec::new(), + }); + + let result = runner.run_pre_tool_use("Bash", r#"{"command":"pwd"}"#); + + assert!(result.is_denied()); + assert_eq!(result.messages(), &["blocked by plugin".to_string()]); + } +} diff --git a/rust/crates/plugins/src/lib.rs b/rust/crates/plugins/src/lib.rs new file mode 100644 index 0000000..e790d5f --- /dev/null +++ b/rust/crates/plugins/src/lib.rs @@ -0,0 +1,2935 @@ +mod hooks; + +use std::collections::{BTreeMap, BTreeSet}; +use std::fmt::{Display, Formatter}; +use std::fs; +use std::path::{Path, PathBuf}; +use std::process::{Command, Stdio}; +use std::time::{SystemTime, UNIX_EPOCH}; + +use serde::{Deserialize, Serialize}; +use serde_json::{Map, Value}; + +pub use hooks::{HookEvent, HookRunResult, HookRunner}; + +const EXTERNAL_MARKETPLACE: &str = "external"; +const BUILTIN_MARKETPLACE: &str = "builtin"; +const BUNDLED_MARKETPLACE: &str = "bundled"; +const SETTINGS_FILE_NAME: &str = "settings.json"; +const REGISTRY_FILE_NAME: &str = "installed.json"; +const MANIFEST_FILE_NAME: &str = "plugin.json"; +const MANIFEST_RELATIVE_PATH: &str = ".claude-plugin/plugin.json"; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum PluginKind { + Builtin, + Bundled, + External, +} + +impl Display for PluginKind { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + Self::Builtin => write!(f, "builtin"), + Self::Bundled => write!(f, "bundled"), + Self::External => write!(f, "external"), + } + } +} + +impl PluginKind { + #[must_use] + fn marketplace(self) -> &'static str { + match self { + Self::Builtin => BUILTIN_MARKETPLACE, + Self::Bundled => BUNDLED_MARKETPLACE, + Self::External => EXTERNAL_MARKETPLACE, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct PluginMetadata { + pub id: String, + pub name: String, + pub version: String, + pub description: String, + pub kind: PluginKind, + pub source: String, + pub default_enabled: bool, + pub root: Option, +} + +#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)] +pub struct PluginHooks { + #[serde(rename = "PreToolUse", default)] + pub pre_tool_use: Vec, + #[serde(rename = "PostToolUse", default)] + pub post_tool_use: Vec, +} + +impl PluginHooks { + #[must_use] + pub fn is_empty(&self) -> bool { + self.pre_tool_use.is_empty() && self.post_tool_use.is_empty() + } + + #[must_use] + pub fn merged_with(&self, other: &Self) -> Self { + let mut merged = self.clone(); + merged + .pre_tool_use + .extend(other.pre_tool_use.iter().cloned()); + merged + .post_tool_use + .extend(other.post_tool_use.iter().cloned()); + merged + } +} + +#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)] +pub struct PluginLifecycle { + #[serde(rename = "Init", default)] + pub init: Vec, + #[serde(rename = "Shutdown", default)] + pub shutdown: Vec, +} + +impl PluginLifecycle { + #[must_use] + pub fn is_empty(&self) -> bool { + self.init.is_empty() && self.shutdown.is_empty() + } +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct PluginManifest { + pub name: String, + pub version: String, + pub description: String, + pub permissions: Vec, + #[serde(rename = "defaultEnabled", default)] + pub default_enabled: bool, + #[serde(default)] + pub hooks: PluginHooks, + #[serde(default)] + pub lifecycle: PluginLifecycle, + #[serde(default)] + pub tools: Vec, + #[serde(default)] + pub commands: Vec, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum PluginPermission { + Read, + Write, + Execute, +} + +impl PluginPermission { + #[must_use] + pub fn as_str(self) -> &'static str { + match self { + Self::Read => "read", + Self::Write => "write", + Self::Execute => "execute", + } + } + + fn parse(value: &str) -> Option { + match value { + "read" => Some(Self::Read), + "write" => Some(Self::Write), + "execute" => Some(Self::Execute), + _ => None, + } + } +} + +impl AsRef for PluginPermission { + fn as_ref(&self) -> &str { + self.as_str() + } +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct PluginToolManifest { + pub name: String, + pub description: String, + #[serde(rename = "inputSchema")] + pub input_schema: Value, + pub command: String, + #[serde(default)] + pub args: Vec, + pub required_permission: PluginToolPermission, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub enum PluginToolPermission { + ReadOnly, + WorkspaceWrite, + DangerFullAccess, +} + +impl PluginToolPermission { + #[must_use] + pub fn as_str(self) -> &'static str { + match self { + Self::ReadOnly => "read-only", + Self::WorkspaceWrite => "workspace-write", + Self::DangerFullAccess => "danger-full-access", + } + } + + fn parse(value: &str) -> Option { + match value { + "read-only" => Some(Self::ReadOnly), + "workspace-write" => Some(Self::WorkspaceWrite), + "danger-full-access" => Some(Self::DangerFullAccess), + _ => None, + } + } +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct PluginToolDefinition { + pub name: String, + #[serde(default)] + pub description: Option, + #[serde(rename = "inputSchema")] + pub input_schema: Value, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct PluginCommandManifest { + pub name: String, + pub description: String, + pub command: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +struct RawPluginManifest { + pub name: String, + pub version: String, + pub description: String, + #[serde(default)] + pub permissions: Vec, + #[serde(rename = "defaultEnabled", default)] + pub default_enabled: bool, + #[serde(default)] + pub hooks: PluginHooks, + #[serde(default)] + pub lifecycle: PluginLifecycle, + #[serde(default)] + pub tools: Vec, + #[serde(default)] + pub commands: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +struct RawPluginToolManifest { + pub name: String, + pub description: String, + #[serde(rename = "inputSchema")] + pub input_schema: Value, + pub command: String, + #[serde(default)] + pub args: Vec, + #[serde( + rename = "requiredPermission", + default = "default_tool_permission_label" + )] + pub required_permission: String, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct PluginTool { + plugin_id: String, + plugin_name: String, + definition: PluginToolDefinition, + command: String, + args: Vec, + required_permission: PluginToolPermission, + root: Option, +} + +impl PluginTool { + #[must_use] + pub fn new( + plugin_id: impl Into, + plugin_name: impl Into, + definition: PluginToolDefinition, + command: impl Into, + args: Vec, + required_permission: PluginToolPermission, + root: Option, + ) -> Self { + Self { + plugin_id: plugin_id.into(), + plugin_name: plugin_name.into(), + definition, + command: command.into(), + args, + required_permission, + root, + } + } + + #[must_use] + pub fn plugin_id(&self) -> &str { + &self.plugin_id + } + + #[must_use] + pub fn definition(&self) -> &PluginToolDefinition { + &self.definition + } + + #[must_use] + pub fn required_permission(&self) -> &str { + self.required_permission.as_str() + } + + pub fn execute(&self, input: &Value) -> Result { + let input_json = input.to_string(); + let mut process = Command::new(&self.command); + process + .args(&self.args) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .env("CLAWD_PLUGIN_ID", &self.plugin_id) + .env("CLAWD_PLUGIN_NAME", &self.plugin_name) + .env("CLAWD_TOOL_NAME", &self.definition.name) + .env("CLAWD_TOOL_INPUT", &input_json); + if let Some(root) = &self.root { + process + .current_dir(root) + .env("CLAWD_PLUGIN_ROOT", root.display().to_string()); + } + + let mut child = process.spawn()?; + if let Some(stdin) = child.stdin.as_mut() { + use std::io::Write as _; + stdin.write_all(input_json.as_bytes())?; + } + + let output = child.wait_with_output()?; + if output.status.success() { + Ok(String::from_utf8_lossy(&output.stdout).trim().to_string()) + } else { + let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string(); + Err(PluginError::CommandFailed(format!( + "plugin tool `{}` from `{}` failed for `{}`: {}", + self.definition.name, + self.plugin_id, + self.command, + if stderr.is_empty() { + format!("exit status {}", output.status) + } else { + stderr + } + ))) + } + } +} + +fn default_tool_permission_label() -> String { + "danger-full-access".to_string() +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum PluginInstallSource { + LocalPath { path: PathBuf }, + GitUrl { url: String }, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct InstalledPluginRecord { + #[serde(default = "default_plugin_kind")] + pub kind: PluginKind, + pub id: String, + pub name: String, + pub version: String, + pub description: String, + pub install_path: PathBuf, + pub source: PluginInstallSource, + pub installed_at_unix_ms: u128, + pub updated_at_unix_ms: u128, +} + +#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)] +pub struct InstalledPluginRegistry { + #[serde(default)] + pub plugins: BTreeMap, +} + +fn default_plugin_kind() -> PluginKind { + PluginKind::External +} + +#[derive(Debug, Clone, PartialEq)] +pub struct BuiltinPlugin { + metadata: PluginMetadata, + hooks: PluginHooks, + lifecycle: PluginLifecycle, + tools: Vec, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct BundledPlugin { + metadata: PluginMetadata, + hooks: PluginHooks, + lifecycle: PluginLifecycle, + tools: Vec, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct ExternalPlugin { + metadata: PluginMetadata, + hooks: PluginHooks, + lifecycle: PluginLifecycle, + tools: Vec, +} + +pub trait Plugin { + fn metadata(&self) -> &PluginMetadata; + fn hooks(&self) -> &PluginHooks; + fn lifecycle(&self) -> &PluginLifecycle; + fn tools(&self) -> &[PluginTool]; + fn validate(&self) -> Result<(), PluginError>; + fn initialize(&self) -> Result<(), PluginError>; + fn shutdown(&self) -> Result<(), PluginError>; +} + +#[derive(Debug, Clone, PartialEq)] +pub enum PluginDefinition { + Builtin(BuiltinPlugin), + Bundled(BundledPlugin), + External(ExternalPlugin), +} + +impl Plugin for BuiltinPlugin { + fn metadata(&self) -> &PluginMetadata { + &self.metadata + } + + fn hooks(&self) -> &PluginHooks { + &self.hooks + } + + fn lifecycle(&self) -> &PluginLifecycle { + &self.lifecycle + } + + fn tools(&self) -> &[PluginTool] { + &self.tools + } + + fn validate(&self) -> Result<(), PluginError> { + Ok(()) + } + + fn initialize(&self) -> Result<(), PluginError> { + Ok(()) + } + + fn shutdown(&self) -> Result<(), PluginError> { + Ok(()) + } +} + +impl Plugin for BundledPlugin { + fn metadata(&self) -> &PluginMetadata { + &self.metadata + } + + fn hooks(&self) -> &PluginHooks { + &self.hooks + } + + fn lifecycle(&self) -> &PluginLifecycle { + &self.lifecycle + } + + fn tools(&self) -> &[PluginTool] { + &self.tools + } + + fn validate(&self) -> Result<(), PluginError> { + validate_hook_paths(self.metadata.root.as_deref(), &self.hooks)?; + validate_lifecycle_paths(self.metadata.root.as_deref(), &self.lifecycle)?; + validate_tool_paths(self.metadata.root.as_deref(), &self.tools) + } + + fn initialize(&self) -> Result<(), PluginError> { + run_lifecycle_commands( + self.metadata(), + self.lifecycle(), + "init", + &self.lifecycle.init, + ) + } + + fn shutdown(&self) -> Result<(), PluginError> { + run_lifecycle_commands( + self.metadata(), + self.lifecycle(), + "shutdown", + &self.lifecycle.shutdown, + ) + } +} + +impl Plugin for ExternalPlugin { + fn metadata(&self) -> &PluginMetadata { + &self.metadata + } + + fn hooks(&self) -> &PluginHooks { + &self.hooks + } + + fn lifecycle(&self) -> &PluginLifecycle { + &self.lifecycle + } + + fn tools(&self) -> &[PluginTool] { + &self.tools + } + + fn validate(&self) -> Result<(), PluginError> { + validate_hook_paths(self.metadata.root.as_deref(), &self.hooks)?; + validate_lifecycle_paths(self.metadata.root.as_deref(), &self.lifecycle)?; + validate_tool_paths(self.metadata.root.as_deref(), &self.tools) + } + + fn initialize(&self) -> Result<(), PluginError> { + run_lifecycle_commands( + self.metadata(), + self.lifecycle(), + "init", + &self.lifecycle.init, + ) + } + + fn shutdown(&self) -> Result<(), PluginError> { + run_lifecycle_commands( + self.metadata(), + self.lifecycle(), + "shutdown", + &self.lifecycle.shutdown, + ) + } +} + +impl Plugin for PluginDefinition { + fn metadata(&self) -> &PluginMetadata { + match self { + Self::Builtin(plugin) => plugin.metadata(), + Self::Bundled(plugin) => plugin.metadata(), + Self::External(plugin) => plugin.metadata(), + } + } + + fn hooks(&self) -> &PluginHooks { + match self { + Self::Builtin(plugin) => plugin.hooks(), + Self::Bundled(plugin) => plugin.hooks(), + Self::External(plugin) => plugin.hooks(), + } + } + + fn lifecycle(&self) -> &PluginLifecycle { + match self { + Self::Builtin(plugin) => plugin.lifecycle(), + Self::Bundled(plugin) => plugin.lifecycle(), + Self::External(plugin) => plugin.lifecycle(), + } + } + + fn tools(&self) -> &[PluginTool] { + match self { + Self::Builtin(plugin) => plugin.tools(), + Self::Bundled(plugin) => plugin.tools(), + Self::External(plugin) => plugin.tools(), + } + } + + fn validate(&self) -> Result<(), PluginError> { + match self { + Self::Builtin(plugin) => plugin.validate(), + Self::Bundled(plugin) => plugin.validate(), + Self::External(plugin) => plugin.validate(), + } + } + + fn initialize(&self) -> Result<(), PluginError> { + match self { + Self::Builtin(plugin) => plugin.initialize(), + Self::Bundled(plugin) => plugin.initialize(), + Self::External(plugin) => plugin.initialize(), + } + } + + fn shutdown(&self) -> Result<(), PluginError> { + match self { + Self::Builtin(plugin) => plugin.shutdown(), + Self::Bundled(plugin) => plugin.shutdown(), + Self::External(plugin) => plugin.shutdown(), + } + } +} + +#[derive(Debug, Clone, PartialEq)] +pub struct RegisteredPlugin { + definition: PluginDefinition, + enabled: bool, +} + +impl RegisteredPlugin { + #[must_use] + pub fn new(definition: PluginDefinition, enabled: bool) -> Self { + Self { + definition, + enabled, + } + } + + #[must_use] + pub fn metadata(&self) -> &PluginMetadata { + self.definition.metadata() + } + + #[must_use] + pub fn hooks(&self) -> &PluginHooks { + self.definition.hooks() + } + + #[must_use] + pub fn tools(&self) -> &[PluginTool] { + self.definition.tools() + } + + #[must_use] + pub fn is_enabled(&self) -> bool { + self.enabled + } + + pub fn validate(&self) -> Result<(), PluginError> { + self.definition.validate() + } + + pub fn initialize(&self) -> Result<(), PluginError> { + self.definition.initialize() + } + + pub fn shutdown(&self) -> Result<(), PluginError> { + self.definition.shutdown() + } + + #[must_use] + pub fn summary(&self) -> PluginSummary { + PluginSummary { + metadata: self.metadata().clone(), + enabled: self.enabled, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct PluginSummary { + pub metadata: PluginMetadata, + pub enabled: bool, +} + +#[derive(Debug, Clone, Default, PartialEq)] +pub struct PluginRegistry { + plugins: Vec, +} + +impl PluginRegistry { + #[must_use] + pub fn new(mut plugins: Vec) -> Self { + plugins.sort_by(|left, right| left.metadata().id.cmp(&right.metadata().id)); + Self { plugins } + } + + #[must_use] + pub fn plugins(&self) -> &[RegisteredPlugin] { + &self.plugins + } + + #[must_use] + pub fn get(&self, plugin_id: &str) -> Option<&RegisteredPlugin> { + self.plugins + .iter() + .find(|plugin| plugin.metadata().id == plugin_id) + } + + #[must_use] + pub fn contains(&self, plugin_id: &str) -> bool { + self.get(plugin_id).is_some() + } + + #[must_use] + pub fn summaries(&self) -> Vec { + self.plugins.iter().map(RegisteredPlugin::summary).collect() + } + + pub fn aggregated_hooks(&self) -> Result { + self.plugins + .iter() + .filter(|plugin| plugin.is_enabled()) + .try_fold(PluginHooks::default(), |acc, plugin| { + plugin.validate()?; + Ok(acc.merged_with(plugin.hooks())) + }) + } + + pub fn aggregated_tools(&self) -> Result, PluginError> { + let mut tools = Vec::new(); + let mut seen_names = BTreeMap::new(); + for plugin in self.plugins.iter().filter(|plugin| plugin.is_enabled()) { + plugin.validate()?; + for tool in plugin.tools() { + if let Some(existing_plugin) = + seen_names.insert(tool.definition().name.clone(), tool.plugin_id().to_string()) + { + return Err(PluginError::InvalidManifest(format!( + "plugin tool `{}` is defined by both `{existing_plugin}` and `{}`", + tool.definition().name, + tool.plugin_id() + ))); + } + tools.push(tool.clone()); + } + } + Ok(tools) + } + + pub fn initialize(&self) -> Result<(), PluginError> { + for plugin in self.plugins.iter().filter(|plugin| plugin.is_enabled()) { + plugin.validate()?; + plugin.initialize()?; + } + Ok(()) + } + + pub fn shutdown(&self) -> Result<(), PluginError> { + for plugin in self + .plugins + .iter() + .rev() + .filter(|plugin| plugin.is_enabled()) + { + plugin.shutdown()?; + } + Ok(()) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct PluginManagerConfig { + pub config_home: PathBuf, + pub enabled_plugins: BTreeMap, + pub external_dirs: Vec, + pub install_root: Option, + pub registry_path: Option, + pub bundled_root: Option, +} + +impl PluginManagerConfig { + #[must_use] + pub fn new(config_home: impl Into) -> Self { + Self { + config_home: config_home.into(), + enabled_plugins: BTreeMap::new(), + external_dirs: Vec::new(), + install_root: None, + registry_path: None, + bundled_root: None, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct PluginManager { + config: PluginManagerConfig, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct InstallOutcome { + pub plugin_id: String, + pub version: String, + pub install_path: PathBuf, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct UpdateOutcome { + pub plugin_id: String, + pub old_version: String, + pub new_version: String, + pub install_path: PathBuf, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum PluginManifestValidationError { + EmptyField { + field: &'static str, + }, + EmptyEntryField { + kind: &'static str, + field: &'static str, + name: Option, + }, + InvalidPermission { + permission: String, + }, + DuplicatePermission { + permission: String, + }, + DuplicateEntry { + kind: &'static str, + name: String, + }, + MissingPath { + kind: &'static str, + path: PathBuf, + }, + InvalidToolInputSchema { + tool_name: String, + }, + InvalidToolRequiredPermission { + tool_name: String, + permission: String, + }, +} + +impl Display for PluginManifestValidationError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + Self::EmptyField { field } => { + write!(f, "plugin manifest {field} cannot be empty") + } + Self::EmptyEntryField { kind, field, name } => match name { + Some(name) if !name.is_empty() => { + write!(f, "plugin {kind} `{name}` {field} cannot be empty") + } + _ => write!(f, "plugin {kind} {field} cannot be empty"), + }, + Self::InvalidPermission { permission } => { + write!( + f, + "plugin manifest permission `{permission}` must be one of read, write, or execute" + ) + } + Self::DuplicatePermission { permission } => { + write!(f, "plugin manifest permission `{permission}` is duplicated") + } + Self::DuplicateEntry { kind, name } => { + write!(f, "plugin {kind} `{name}` is duplicated") + } + Self::MissingPath { kind, path } => { + write!(f, "{kind} path `{}` does not exist", path.display()) + } + Self::InvalidToolInputSchema { tool_name } => { + write!( + f, + "plugin tool `{tool_name}` inputSchema must be a JSON object" + ) + } + Self::InvalidToolRequiredPermission { + tool_name, + permission, + } => write!( + f, + "plugin tool `{tool_name}` requiredPermission `{permission}` must be read-only, workspace-write, or danger-full-access" + ), + } + } +} + +#[derive(Debug)] +pub enum PluginError { + Io(std::io::Error), + Json(serde_json::Error), + ManifestValidation(Vec), + InvalidManifest(String), + NotFound(String), + CommandFailed(String), +} + +impl Display for PluginError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + Self::Io(error) => write!(f, "{error}"), + Self::Json(error) => write!(f, "{error}"), + Self::ManifestValidation(errors) => { + for (index, error) in errors.iter().enumerate() { + if index > 0 { + write!(f, "; ")?; + } + write!(f, "{error}")?; + } + Ok(()) + } + Self::InvalidManifest(message) + | Self::NotFound(message) + | Self::CommandFailed(message) => write!(f, "{message}"), + } + } +} + +impl std::error::Error for PluginError {} + +impl From for PluginError { + fn from(value: std::io::Error) -> Self { + Self::Io(value) + } +} + +impl From for PluginError { + fn from(value: serde_json::Error) -> Self { + Self::Json(value) + } +} + +impl PluginManager { + #[must_use] + pub fn new(config: PluginManagerConfig) -> Self { + Self { config } + } + + #[must_use] + pub fn bundled_root() -> PathBuf { + PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("bundled") + } + + #[must_use] + pub fn install_root(&self) -> PathBuf { + self.config + .install_root + .clone() + .unwrap_or_else(|| self.config.config_home.join("plugins").join("installed")) + } + + #[must_use] + pub fn registry_path(&self) -> PathBuf { + self.config.registry_path.clone().unwrap_or_else(|| { + self.config + .config_home + .join("plugins") + .join(REGISTRY_FILE_NAME) + }) + } + + #[must_use] + pub fn settings_path(&self) -> PathBuf { + self.config.config_home.join(SETTINGS_FILE_NAME) + } + + pub fn plugin_registry(&self) -> Result { + Ok(PluginRegistry::new( + self.discover_plugins()? + .into_iter() + .map(|plugin| { + let enabled = self.is_enabled(plugin.metadata()); + RegisteredPlugin::new(plugin, enabled) + }) + .collect(), + )) + } + + pub fn list_plugins(&self) -> Result, PluginError> { + Ok(self.plugin_registry()?.summaries()) + } + + pub fn list_installed_plugins(&self) -> Result, PluginError> { + Ok(self.installed_plugin_registry()?.summaries()) + } + + pub fn discover_plugins(&self) -> Result, PluginError> { + self.sync_bundled_plugins()?; + let mut plugins = builtin_plugins(); + plugins.extend(self.discover_installed_plugins()?); + plugins.extend(self.discover_external_directory_plugins(&plugins)?); + Ok(plugins) + } + + pub fn aggregated_hooks(&self) -> Result { + self.plugin_registry()?.aggregated_hooks() + } + + pub fn aggregated_tools(&self) -> Result, PluginError> { + self.plugin_registry()?.aggregated_tools() + } + + pub fn validate_plugin_source(&self, source: &str) -> Result { + let path = resolve_local_source(source)?; + load_plugin_from_directory(&path) + } + + pub fn install(&mut self, source: &str) -> Result { + let install_source = parse_install_source(source)?; + let temp_root = self.install_root().join(".tmp"); + let staged_source = materialize_source(&install_source, &temp_root)?; + let cleanup_source = matches!(install_source, PluginInstallSource::GitUrl { .. }); + let manifest = load_plugin_from_directory(&staged_source)?; + + let plugin_id = plugin_id(&manifest.name, EXTERNAL_MARKETPLACE); + let install_path = self.install_root().join(sanitize_plugin_id(&plugin_id)); + if install_path.exists() { + fs::remove_dir_all(&install_path)?; + } + copy_dir_all(&staged_source, &install_path)?; + if cleanup_source { + let _ = fs::remove_dir_all(&staged_source); + } + + let now = unix_time_ms(); + let record = InstalledPluginRecord { + kind: PluginKind::External, + id: plugin_id.clone(), + name: manifest.name, + version: manifest.version.clone(), + description: manifest.description, + install_path: install_path.clone(), + source: install_source, + installed_at_unix_ms: now, + updated_at_unix_ms: now, + }; + + let mut registry = self.load_registry()?; + registry.plugins.insert(plugin_id.clone(), record); + self.store_registry(®istry)?; + self.write_enabled_state(&plugin_id, Some(true))?; + self.config.enabled_plugins.insert(plugin_id.clone(), true); + + Ok(InstallOutcome { + plugin_id, + version: manifest.version, + install_path, + }) + } + + pub fn enable(&mut self, plugin_id: &str) -> Result<(), PluginError> { + self.ensure_known_plugin(plugin_id)?; + self.write_enabled_state(plugin_id, Some(true))?; + self.config + .enabled_plugins + .insert(plugin_id.to_string(), true); + Ok(()) + } + + pub fn disable(&mut self, plugin_id: &str) -> Result<(), PluginError> { + self.ensure_known_plugin(plugin_id)?; + self.write_enabled_state(plugin_id, Some(false))?; + self.config + .enabled_plugins + .insert(plugin_id.to_string(), false); + Ok(()) + } + + pub fn uninstall(&mut self, plugin_id: &str) -> Result<(), PluginError> { + let mut registry = self.load_registry()?; + let record = registry.plugins.remove(plugin_id).ok_or_else(|| { + PluginError::NotFound(format!("plugin `{plugin_id}` is not installed")) + })?; + if record.kind == PluginKind::Bundled { + registry.plugins.insert(plugin_id.to_string(), record); + return Err(PluginError::CommandFailed(format!( + "plugin `{plugin_id}` is bundled and managed automatically; disable it instead" + ))); + } + if record.install_path.exists() { + fs::remove_dir_all(&record.install_path)?; + } + self.store_registry(®istry)?; + self.write_enabled_state(plugin_id, None)?; + self.config.enabled_plugins.remove(plugin_id); + Ok(()) + } + + pub fn update(&mut self, plugin_id: &str) -> Result { + let mut registry = self.load_registry()?; + let record = registry.plugins.get(plugin_id).cloned().ok_or_else(|| { + PluginError::NotFound(format!("plugin `{plugin_id}` is not installed")) + })?; + + let temp_root = self.install_root().join(".tmp"); + let staged_source = materialize_source(&record.source, &temp_root)?; + let cleanup_source = matches!(record.source, PluginInstallSource::GitUrl { .. }); + let manifest = load_plugin_from_directory(&staged_source)?; + + if record.install_path.exists() { + fs::remove_dir_all(&record.install_path)?; + } + copy_dir_all(&staged_source, &record.install_path)?; + if cleanup_source { + let _ = fs::remove_dir_all(&staged_source); + } + + let updated_record = InstalledPluginRecord { + version: manifest.version.clone(), + description: manifest.description, + updated_at_unix_ms: unix_time_ms(), + ..record.clone() + }; + registry + .plugins + .insert(plugin_id.to_string(), updated_record); + self.store_registry(®istry)?; + + Ok(UpdateOutcome { + plugin_id: plugin_id.to_string(), + old_version: record.version, + new_version: manifest.version, + install_path: record.install_path, + }) + } + + fn discover_installed_plugins(&self) -> Result, PluginError> { + let mut registry = self.load_registry()?; + let mut plugins = Vec::new(); + let mut seen_ids = BTreeSet::::new(); + let mut seen_paths = BTreeSet::::new(); + let mut stale_registry_ids = Vec::new(); + + for install_path in discover_plugin_dirs(&self.install_root())? { + let matched_record = registry + .plugins + .values() + .find(|record| record.install_path == install_path); + let kind = matched_record.map_or(PluginKind::External, |record| record.kind); + let source = matched_record.map_or_else( + || install_path.display().to_string(), + |record| describe_install_source(&record.source), + ); + let plugin = load_plugin_definition(&install_path, kind, source, kind.marketplace())?; + if seen_ids.insert(plugin.metadata().id.clone()) { + seen_paths.insert(install_path); + plugins.push(plugin); + } + } + + for record in registry.plugins.values() { + if seen_paths.contains(&record.install_path) { + continue; + } + if !record.install_path.exists() || plugin_manifest_path(&record.install_path).is_err() + { + stale_registry_ids.push(record.id.clone()); + continue; + } + let plugin = load_plugin_definition( + &record.install_path, + record.kind, + describe_install_source(&record.source), + record.kind.marketplace(), + )?; + if seen_ids.insert(plugin.metadata().id.clone()) { + seen_paths.insert(record.install_path.clone()); + plugins.push(plugin); + } + } + + if !stale_registry_ids.is_empty() { + for plugin_id in stale_registry_ids { + registry.plugins.remove(&plugin_id); + } + self.store_registry(®istry)?; + } + + Ok(plugins) + } + + fn discover_external_directory_plugins( + &self, + existing_plugins: &[PluginDefinition], + ) -> Result, PluginError> { + let mut plugins = Vec::new(); + + for directory in &self.config.external_dirs { + for root in discover_plugin_dirs(directory)? { + let plugin = load_plugin_definition( + &root, + PluginKind::External, + root.display().to_string(), + EXTERNAL_MARKETPLACE, + )?; + if existing_plugins + .iter() + .chain(plugins.iter()) + .all(|existing| existing.metadata().id != plugin.metadata().id) + { + plugins.push(plugin); + } + } + } + + Ok(plugins) + } + + fn installed_plugin_registry(&self) -> Result { + self.sync_bundled_plugins()?; + Ok(PluginRegistry::new( + self.discover_installed_plugins()? + .into_iter() + .map(|plugin| { + let enabled = self.is_enabled(plugin.metadata()); + RegisteredPlugin::new(plugin, enabled) + }) + .collect(), + )) + } + + fn sync_bundled_plugins(&self) -> Result<(), PluginError> { + let bundled_root = self + .config + .bundled_root + .clone() + .unwrap_or_else(Self::bundled_root); + let bundled_plugins = discover_plugin_dirs(&bundled_root)?; + let mut registry = self.load_registry()?; + let mut changed = false; + let install_root = self.install_root(); + let mut active_bundled_ids = BTreeSet::new(); + + for source_root in bundled_plugins { + let manifest = load_plugin_from_directory(&source_root)?; + let plugin_id = plugin_id(&manifest.name, BUNDLED_MARKETPLACE); + active_bundled_ids.insert(plugin_id.clone()); + let install_path = install_root.join(sanitize_plugin_id(&plugin_id)); + let now = unix_time_ms(); + let existing_record = registry.plugins.get(&plugin_id); + let needs_sync = existing_record.is_none_or(|record| { + record.kind != PluginKind::Bundled + || record.version != manifest.version + || record.name != manifest.name + || record.description != manifest.description + || record.install_path != install_path + || !record.install_path.exists() + }); + + if !needs_sync { + continue; + } + + if install_path.exists() { + fs::remove_dir_all(&install_path)?; + } + copy_dir_all(&source_root, &install_path)?; + + let installed_at_unix_ms = + existing_record.map_or(now, |record| record.installed_at_unix_ms); + registry.plugins.insert( + plugin_id.clone(), + InstalledPluginRecord { + kind: PluginKind::Bundled, + id: plugin_id, + name: manifest.name, + version: manifest.version, + description: manifest.description, + install_path, + source: PluginInstallSource::LocalPath { path: source_root }, + installed_at_unix_ms, + updated_at_unix_ms: now, + }, + ); + changed = true; + } + + let stale_bundled_ids = registry + .plugins + .iter() + .filter_map(|(plugin_id, record)| { + (record.kind == PluginKind::Bundled && !active_bundled_ids.contains(plugin_id)) + .then_some(plugin_id.clone()) + }) + .collect::>(); + + for plugin_id in stale_bundled_ids { + if let Some(record) = registry.plugins.remove(&plugin_id) { + if record.install_path.exists() { + fs::remove_dir_all(&record.install_path)?; + } + changed = true; + } + } + + if changed { + self.store_registry(®istry)?; + } + + Ok(()) + } + + fn is_enabled(&self, metadata: &PluginMetadata) -> bool { + self.config + .enabled_plugins + .get(&metadata.id) + .copied() + .unwrap_or(match metadata.kind { + PluginKind::External => false, + PluginKind::Builtin | PluginKind::Bundled => metadata.default_enabled, + }) + } + + fn ensure_known_plugin(&self, plugin_id: &str) -> Result<(), PluginError> { + if self.plugin_registry()?.contains(plugin_id) { + Ok(()) + } else { + Err(PluginError::NotFound(format!( + "plugin `{plugin_id}` is not installed or discoverable" + ))) + } + } + + fn load_registry(&self) -> Result { + let path = self.registry_path(); + match fs::read_to_string(&path) { + Ok(contents) => Ok(serde_json::from_str(&contents)?), + Err(error) if error.kind() == std::io::ErrorKind::NotFound => { + Ok(InstalledPluginRegistry::default()) + } + Err(error) => Err(PluginError::Io(error)), + } + } + + fn store_registry(&self, registry: &InstalledPluginRegistry) -> Result<(), PluginError> { + let path = self.registry_path(); + if let Some(parent) = path.parent() { + fs::create_dir_all(parent)?; + } + fs::write(path, serde_json::to_string_pretty(registry)?)?; + Ok(()) + } + + fn write_enabled_state( + &self, + plugin_id: &str, + enabled: Option, + ) -> Result<(), PluginError> { + update_settings_json(&self.settings_path(), |root| { + let enabled_plugins = ensure_object(root, "enabledPlugins"); + match enabled { + Some(value) => { + enabled_plugins.insert(plugin_id.to_string(), Value::Bool(value)); + } + None => { + enabled_plugins.remove(plugin_id); + } + } + }) + } +} + +#[must_use] +pub fn builtin_plugins() -> Vec { + vec![PluginDefinition::Builtin(BuiltinPlugin { + metadata: PluginMetadata { + id: plugin_id("example-builtin", BUILTIN_MARKETPLACE), + name: "example-builtin".to_string(), + version: "0.1.0".to_string(), + description: "Example built-in plugin scaffold for the Rust plugin system".to_string(), + kind: PluginKind::Builtin, + source: BUILTIN_MARKETPLACE.to_string(), + default_enabled: false, + root: None, + }, + hooks: PluginHooks::default(), + lifecycle: PluginLifecycle::default(), + tools: Vec::new(), + })] +} + +fn load_plugin_definition( + root: &Path, + kind: PluginKind, + source: String, + marketplace: &str, +) -> Result { + let manifest = load_plugin_from_directory(root)?; + let metadata = PluginMetadata { + id: plugin_id(&manifest.name, marketplace), + name: manifest.name, + version: manifest.version, + description: manifest.description, + kind, + source, + default_enabled: manifest.default_enabled, + root: Some(root.to_path_buf()), + }; + let hooks = resolve_hooks(root, &manifest.hooks); + let lifecycle = resolve_lifecycle(root, &manifest.lifecycle); + let tools = resolve_tools(root, &metadata.id, &metadata.name, &manifest.tools); + Ok(match kind { + PluginKind::Builtin => PluginDefinition::Builtin(BuiltinPlugin { + metadata, + hooks, + lifecycle, + tools, + }), + PluginKind::Bundled => PluginDefinition::Bundled(BundledPlugin { + metadata, + hooks, + lifecycle, + tools, + }), + PluginKind::External => PluginDefinition::External(ExternalPlugin { + metadata, + hooks, + lifecycle, + tools, + }), + }) +} + +pub fn load_plugin_from_directory(root: &Path) -> Result { + load_manifest_from_directory(root) +} + +fn load_manifest_from_directory(root: &Path) -> Result { + let manifest_path = plugin_manifest_path(root)?; + load_manifest_from_path(root, &manifest_path) +} + +fn load_manifest_from_path( + root: &Path, + manifest_path: &Path, +) -> Result { + let contents = fs::read_to_string(manifest_path).map_err(|error| { + PluginError::NotFound(format!( + "plugin manifest not found at {}: {error}", + manifest_path.display() + )) + })?; + let raw_manifest: RawPluginManifest = serde_json::from_str(&contents)?; + build_plugin_manifest(root, raw_manifest) +} + +fn plugin_manifest_path(root: &Path) -> Result { + let direct_path = root.join(MANIFEST_FILE_NAME); + if direct_path.exists() { + return Ok(direct_path); + } + + let packaged_path = root.join(MANIFEST_RELATIVE_PATH); + if packaged_path.exists() { + return Ok(packaged_path); + } + + Err(PluginError::NotFound(format!( + "plugin manifest not found at {} or {}", + direct_path.display(), + packaged_path.display() + ))) +} + +fn build_plugin_manifest( + root: &Path, + raw: RawPluginManifest, +) -> Result { + let mut errors = Vec::new(); + + validate_required_manifest_field("name", &raw.name, &mut errors); + validate_required_manifest_field("version", &raw.version, &mut errors); + validate_required_manifest_field("description", &raw.description, &mut errors); + + let permissions = build_manifest_permissions(&raw.permissions, &mut errors); + validate_command_entries(root, raw.hooks.pre_tool_use.iter(), "hook", &mut errors); + validate_command_entries(root, raw.hooks.post_tool_use.iter(), "hook", &mut errors); + validate_command_entries( + root, + raw.lifecycle.init.iter(), + "lifecycle command", + &mut errors, + ); + validate_command_entries( + root, + raw.lifecycle.shutdown.iter(), + "lifecycle command", + &mut errors, + ); + let tools = build_manifest_tools(root, raw.tools, &mut errors); + let commands = build_manifest_commands(root, raw.commands, &mut errors); + + if !errors.is_empty() { + return Err(PluginError::ManifestValidation(errors)); + } + + Ok(PluginManifest { + name: raw.name, + version: raw.version, + description: raw.description, + permissions, + default_enabled: raw.default_enabled, + hooks: raw.hooks, + lifecycle: raw.lifecycle, + tools, + commands, + }) +} + +fn validate_required_manifest_field( + field: &'static str, + value: &str, + errors: &mut Vec, +) { + if value.trim().is_empty() { + errors.push(PluginManifestValidationError::EmptyField { field }); + } +} + +fn build_manifest_permissions( + permissions: &[String], + errors: &mut Vec, +) -> Vec { + let mut seen = BTreeSet::new(); + let mut validated = Vec::new(); + + for permission in permissions { + let permission = permission.trim(); + if permission.is_empty() { + errors.push(PluginManifestValidationError::EmptyEntryField { + kind: "permission", + field: "value", + name: None, + }); + continue; + } + if !seen.insert(permission.to_string()) { + errors.push(PluginManifestValidationError::DuplicatePermission { + permission: permission.to_string(), + }); + continue; + } + match PluginPermission::parse(permission) { + Some(permission) => validated.push(permission), + None => errors.push(PluginManifestValidationError::InvalidPermission { + permission: permission.to_string(), + }), + } + } + + validated +} + +fn build_manifest_tools( + root: &Path, + tools: Vec, + errors: &mut Vec, +) -> Vec { + let mut seen = BTreeSet::new(); + let mut validated = Vec::new(); + + for tool in tools { + let name = tool.name.trim().to_string(); + if name.is_empty() { + errors.push(PluginManifestValidationError::EmptyEntryField { + kind: "tool", + field: "name", + name: None, + }); + continue; + } + if !seen.insert(name.clone()) { + errors.push(PluginManifestValidationError::DuplicateEntry { kind: "tool", name }); + continue; + } + if tool.description.trim().is_empty() { + errors.push(PluginManifestValidationError::EmptyEntryField { + kind: "tool", + field: "description", + name: Some(name.clone()), + }); + } + if tool.command.trim().is_empty() { + errors.push(PluginManifestValidationError::EmptyEntryField { + kind: "tool", + field: "command", + name: Some(name.clone()), + }); + } else { + validate_command_entry(root, &tool.command, "tool", errors); + } + if !tool.input_schema.is_object() { + errors.push(PluginManifestValidationError::InvalidToolInputSchema { + tool_name: name.clone(), + }); + } + let Some(required_permission) = + PluginToolPermission::parse(tool.required_permission.trim()) + else { + errors.push( + PluginManifestValidationError::InvalidToolRequiredPermission { + tool_name: name.clone(), + permission: tool.required_permission.trim().to_string(), + }, + ); + continue; + }; + + validated.push(PluginToolManifest { + name, + description: tool.description, + input_schema: tool.input_schema, + command: tool.command, + args: tool.args, + required_permission, + }); + } + + validated +} + +fn build_manifest_commands( + root: &Path, + commands: Vec, + errors: &mut Vec, +) -> Vec { + let mut seen = BTreeSet::new(); + let mut validated = Vec::new(); + + for command in commands { + let name = command.name.trim().to_string(); + if name.is_empty() { + errors.push(PluginManifestValidationError::EmptyEntryField { + kind: "command", + field: "name", + name: None, + }); + continue; + } + if !seen.insert(name.clone()) { + errors.push(PluginManifestValidationError::DuplicateEntry { + kind: "command", + name, + }); + continue; + } + if command.description.trim().is_empty() { + errors.push(PluginManifestValidationError::EmptyEntryField { + kind: "command", + field: "description", + name: Some(name.clone()), + }); + } + if command.command.trim().is_empty() { + errors.push(PluginManifestValidationError::EmptyEntryField { + kind: "command", + field: "command", + name: Some(name.clone()), + }); + } else { + validate_command_entry(root, &command.command, "command", errors); + } + validated.push(command); + } + + validated +} + +fn validate_command_entries<'a>( + root: &Path, + entries: impl Iterator, + kind: &'static str, + errors: &mut Vec, +) { + for entry in entries { + validate_command_entry(root, entry, kind, errors); + } +} + +fn validate_command_entry( + root: &Path, + entry: &str, + kind: &'static str, + errors: &mut Vec, +) { + if entry.trim().is_empty() { + errors.push(PluginManifestValidationError::EmptyEntryField { + kind, + field: "command", + name: None, + }); + return; + } + if is_literal_command(entry) { + return; + } + + let path = if Path::new(entry).is_absolute() { + PathBuf::from(entry) + } else { + root.join(entry) + }; + if !path.exists() { + errors.push(PluginManifestValidationError::MissingPath { kind, path }); + } +} + +fn resolve_hooks(root: &Path, hooks: &PluginHooks) -> PluginHooks { + PluginHooks { + pre_tool_use: hooks + .pre_tool_use + .iter() + .map(|entry| resolve_hook_entry(root, entry)) + .collect(), + post_tool_use: hooks + .post_tool_use + .iter() + .map(|entry| resolve_hook_entry(root, entry)) + .collect(), + } +} + +fn resolve_lifecycle(root: &Path, lifecycle: &PluginLifecycle) -> PluginLifecycle { + PluginLifecycle { + init: lifecycle + .init + .iter() + .map(|entry| resolve_hook_entry(root, entry)) + .collect(), + shutdown: lifecycle + .shutdown + .iter() + .map(|entry| resolve_hook_entry(root, entry)) + .collect(), + } +} + +fn resolve_tools( + root: &Path, + plugin_id: &str, + plugin_name: &str, + tools: &[PluginToolManifest], +) -> Vec { + tools + .iter() + .map(|tool| { + PluginTool::new( + plugin_id, + plugin_name, + PluginToolDefinition { + name: tool.name.clone(), + description: Some(tool.description.clone()), + input_schema: tool.input_schema.clone(), + }, + resolve_hook_entry(root, &tool.command), + tool.args.clone(), + tool.required_permission, + Some(root.to_path_buf()), + ) + }) + .collect() +} + +fn validate_hook_paths(root: Option<&Path>, hooks: &PluginHooks) -> Result<(), PluginError> { + let Some(root) = root else { + return Ok(()); + }; + for entry in hooks.pre_tool_use.iter().chain(hooks.post_tool_use.iter()) { + validate_command_path(root, entry, "hook")?; + } + Ok(()) +} + +fn validate_lifecycle_paths( + root: Option<&Path>, + lifecycle: &PluginLifecycle, +) -> Result<(), PluginError> { + let Some(root) = root else { + return Ok(()); + }; + for entry in lifecycle.init.iter().chain(lifecycle.shutdown.iter()) { + validate_command_path(root, entry, "lifecycle command")?; + } + Ok(()) +} + +fn validate_tool_paths(root: Option<&Path>, tools: &[PluginTool]) -> Result<(), PluginError> { + let Some(root) = root else { + return Ok(()); + }; + for tool in tools { + validate_command_path(root, &tool.command, "tool")?; + } + Ok(()) +} + +fn validate_command_path(root: &Path, entry: &str, kind: &str) -> Result<(), PluginError> { + if is_literal_command(entry) { + return Ok(()); + } + let path = if Path::new(entry).is_absolute() { + PathBuf::from(entry) + } else { + root.join(entry) + }; + if !path.exists() { + return Err(PluginError::InvalidManifest(format!( + "{kind} path `{}` does not exist", + path.display() + ))); + } + Ok(()) +} + +fn resolve_hook_entry(root: &Path, entry: &str) -> String { + if is_literal_command(entry) { + entry.to_string() + } else { + root.join(entry).display().to_string() + } +} + +fn is_literal_command(entry: &str) -> bool { + !entry.starts_with("./") && !entry.starts_with("../") && !Path::new(entry).is_absolute() +} + +fn run_lifecycle_commands( + metadata: &PluginMetadata, + lifecycle: &PluginLifecycle, + phase: &str, + commands: &[String], +) -> Result<(), PluginError> { + if lifecycle.is_empty() || commands.is_empty() { + return Ok(()); + } + + for command in commands { + let mut process = if Path::new(command).exists() { + if cfg!(windows) { + let mut process = Command::new("cmd"); + process.arg("/C").arg(command); + process + } else { + let mut process = Command::new("sh"); + process.arg(command); + process + } + } else if cfg!(windows) { + let mut process = Command::new("cmd"); + process.arg("/C").arg(command); + process + } else { + let mut process = Command::new("sh"); + process.arg("-lc").arg(command); + process + }; + if let Some(root) = &metadata.root { + process.current_dir(root); + } + let output = process.output()?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string(); + return Err(PluginError::CommandFailed(format!( + "plugin `{}` {} failed for `{}`: {}", + metadata.id, + phase, + command, + if stderr.is_empty() { + format!("exit status {}", output.status) + } else { + stderr + } + ))); + } + } + + Ok(()) +} + +fn resolve_local_source(source: &str) -> Result { + let path = PathBuf::from(source); + if path.exists() { + Ok(path) + } else { + Err(PluginError::NotFound(format!( + "plugin source `{source}` was not found" + ))) + } +} + +fn parse_install_source(source: &str) -> Result { + if source.starts_with("http://") + || source.starts_with("https://") + || source.starts_with("git@") + || Path::new(source) + .extension() + .is_some_and(|extension| extension.eq_ignore_ascii_case("git")) + { + Ok(PluginInstallSource::GitUrl { + url: source.to_string(), + }) + } else { + Ok(PluginInstallSource::LocalPath { + path: resolve_local_source(source)?, + }) + } +} + +fn materialize_source( + source: &PluginInstallSource, + temp_root: &Path, +) -> Result { + fs::create_dir_all(temp_root)?; + match source { + PluginInstallSource::LocalPath { path } => Ok(path.clone()), + PluginInstallSource::GitUrl { url } => { + let destination = temp_root.join(format!("plugin-{}", unix_time_ms())); + let output = Command::new("git") + .arg("clone") + .arg("--depth") + .arg("1") + .arg(url) + .arg(&destination) + .output()?; + if !output.status.success() { + return Err(PluginError::CommandFailed(format!( + "git clone failed for `{url}`: {}", + String::from_utf8_lossy(&output.stderr).trim() + ))); + } + Ok(destination) + } + } +} + +fn discover_plugin_dirs(root: &Path) -> Result, PluginError> { + match fs::read_dir(root) { + Ok(entries) => { + let mut paths = Vec::new(); + for entry in entries { + let path = entry?.path(); + if path.is_dir() && plugin_manifest_path(&path).is_ok() { + paths.push(path); + } + } + paths.sort(); + Ok(paths) + } + Err(error) if error.kind() == std::io::ErrorKind::NotFound => Ok(Vec::new()), + Err(error) => Err(PluginError::Io(error)), + } +} + +fn plugin_id(name: &str, marketplace: &str) -> String { + format!("{name}@{marketplace}") +} + +fn sanitize_plugin_id(plugin_id: &str) -> String { + plugin_id + .chars() + .map(|ch| match ch { + '/' | '\\' | '@' | ':' => '-', + other => other, + }) + .collect() +} + +fn describe_install_source(source: &PluginInstallSource) -> String { + match source { + PluginInstallSource::LocalPath { path } => path.display().to_string(), + PluginInstallSource::GitUrl { url } => url.clone(), + } +} + +fn unix_time_ms() -> u128 { + SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("time should be after epoch") + .as_millis() +} + +fn copy_dir_all(source: &Path, destination: &Path) -> Result<(), PluginError> { + fs::create_dir_all(destination)?; + for entry in fs::read_dir(source)? { + let entry = entry?; + let target = destination.join(entry.file_name()); + if entry.file_type()?.is_dir() { + copy_dir_all(&entry.path(), &target)?; + } else { + fs::copy(entry.path(), target)?; + } + } + Ok(()) +} + +fn update_settings_json( + path: &Path, + mut update: impl FnMut(&mut Map), +) -> Result<(), PluginError> { + if let Some(parent) = path.parent() { + fs::create_dir_all(parent)?; + } + let mut root = match fs::read_to_string(path) { + Ok(contents) if !contents.trim().is_empty() => serde_json::from_str::(&contents)?, + Ok(_) => Value::Object(Map::new()), + Err(error) if error.kind() == std::io::ErrorKind::NotFound => Value::Object(Map::new()), + Err(error) => return Err(PluginError::Io(error)), + }; + + let object = root.as_object_mut().ok_or_else(|| { + PluginError::InvalidManifest(format!( + "settings file {} must contain a JSON object", + path.display() + )) + })?; + update(object); + fs::write(path, serde_json::to_string_pretty(&root)?)?; + Ok(()) +} + +fn ensure_object<'a>(root: &'a mut Map, key: &str) -> &'a mut Map { + if !root.get(key).is_some_and(Value::is_object) { + root.insert(key.to_string(), Value::Object(Map::new())); + } + root.get_mut(key) + .and_then(Value::as_object_mut) + .expect("object should exist") +} + +#[cfg(test)] +mod tests { + use super::*; + + fn temp_dir(label: &str) -> PathBuf { + std::env::temp_dir().join(format!("plugins-{label}-{}", unix_time_ms())) + } + + fn write_file(path: &Path, contents: &str) { + if let Some(parent) = path.parent() { + fs::create_dir_all(parent).expect("parent dir"); + } + fs::write(path, contents).expect("write file"); + } + + fn write_loader_plugin(root: &Path) { + write_file( + root.join("hooks").join("pre.sh").as_path(), + "#!/bin/sh\nprintf 'pre'\n", + ); + write_file( + root.join("tools").join("echo-tool.sh").as_path(), + "#!/bin/sh\ncat\n", + ); + write_file( + root.join("commands").join("sync.sh").as_path(), + "#!/bin/sh\nprintf 'sync'\n", + ); + write_file( + root.join(MANIFEST_FILE_NAME).as_path(), + r#"{ + "name": "loader-demo", + "version": "1.2.3", + "description": "Manifest loader test plugin", + "permissions": ["read", "write"], + "hooks": { + "PreToolUse": ["./hooks/pre.sh"] + }, + "tools": [ + { + "name": "echo_tool", + "description": "Echoes JSON input", + "inputSchema": { + "type": "object" + }, + "command": "./tools/echo-tool.sh", + "requiredPermission": "workspace-write" + } + ], + "commands": [ + { + "name": "sync", + "description": "Sync command", + "command": "./commands/sync.sh" + } + ] +}"#, + ); + } + + fn write_external_plugin(root: &Path, name: &str, version: &str) { + write_file( + root.join("hooks").join("pre.sh").as_path(), + "#!/bin/sh\nprintf 'pre'\n", + ); + write_file( + root.join("hooks").join("post.sh").as_path(), + "#!/bin/sh\nprintf 'post'\n", + ); + write_file( + root.join(MANIFEST_RELATIVE_PATH).as_path(), + format!( + "{{\n \"name\": \"{name}\",\n \"version\": \"{version}\",\n \"description\": \"test plugin\",\n \"hooks\": {{\n \"PreToolUse\": [\"./hooks/pre.sh\"],\n \"PostToolUse\": [\"./hooks/post.sh\"]\n }}\n}}" + ) + .as_str(), + ); + } + + fn write_broken_plugin(root: &Path, name: &str) { + write_file( + root.join(MANIFEST_RELATIVE_PATH).as_path(), + format!( + "{{\n \"name\": \"{name}\",\n \"version\": \"1.0.0\",\n \"description\": \"broken plugin\",\n \"hooks\": {{\n \"PreToolUse\": [\"./hooks/missing.sh\"]\n }}\n}}" + ) + .as_str(), + ); + } + + fn write_lifecycle_plugin(root: &Path, name: &str, version: &str) -> PathBuf { + let log_path = root.join("lifecycle.log"); + write_file( + root.join("lifecycle").join("init.sh").as_path(), + "#!/bin/sh\nprintf 'init\\n' >> lifecycle.log\n", + ); + write_file( + root.join("lifecycle").join("shutdown.sh").as_path(), + "#!/bin/sh\nprintf 'shutdown\\n' >> lifecycle.log\n", + ); + write_file( + root.join(MANIFEST_RELATIVE_PATH).as_path(), + format!( + "{{\n \"name\": \"{name}\",\n \"version\": \"{version}\",\n \"description\": \"lifecycle plugin\",\n \"lifecycle\": {{\n \"Init\": [\"./lifecycle/init.sh\"],\n \"Shutdown\": [\"./lifecycle/shutdown.sh\"]\n }}\n}}" + ) + .as_str(), + ); + log_path + } + + fn write_tool_plugin(root: &Path, name: &str, version: &str) { + write_tool_plugin_with_name(root, name, version, "plugin_echo"); + } + + fn write_tool_plugin_with_name(root: &Path, name: &str, version: &str, tool_name: &str) { + let script_path = root.join("tools").join("echo-json.sh"); + write_file( + &script_path, + "#!/bin/sh\nINPUT=$(cat)\nprintf '{\"plugin\":\"%s\",\"tool\":\"%s\",\"input\":%s}\\n' \"$CLAWD_PLUGIN_ID\" \"$CLAWD_TOOL_NAME\" \"$INPUT\"\n", + ); + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + + let mut permissions = fs::metadata(&script_path).expect("metadata").permissions(); + permissions.set_mode(0o755); + fs::set_permissions(&script_path, permissions).expect("chmod"); + } + write_file( + root.join(MANIFEST_RELATIVE_PATH).as_path(), + format!( + "{{\n \"name\": \"{name}\",\n \"version\": \"{version}\",\n \"description\": \"tool plugin\",\n \"tools\": [\n {{\n \"name\": \"{tool_name}\",\n \"description\": \"Echo JSON input\",\n \"inputSchema\": {{\"type\": \"object\", \"properties\": {{\"message\": {{\"type\": \"string\"}}}}, \"required\": [\"message\"], \"additionalProperties\": false}},\n \"command\": \"./tools/echo-json.sh\",\n \"requiredPermission\": \"workspace-write\"\n }}\n ]\n}}" + ) + .as_str(), + ); + } + + fn write_bundled_plugin(root: &Path, name: &str, version: &str, default_enabled: bool) { + write_file( + root.join(MANIFEST_RELATIVE_PATH).as_path(), + format!( + "{{\n \"name\": \"{name}\",\n \"version\": \"{version}\",\n \"description\": \"bundled plugin\",\n \"defaultEnabled\": {}\n}}", + if default_enabled { "true" } else { "false" } + ) + .as_str(), + ); + } + + fn load_enabled_plugins(path: &Path) -> BTreeMap { + let contents = fs::read_to_string(path).expect("settings should exist"); + let root: Value = serde_json::from_str(&contents).expect("settings json"); + root.get("enabledPlugins") + .and_then(Value::as_object) + .map(|enabled_plugins| { + enabled_plugins + .iter() + .map(|(plugin_id, value)| { + ( + plugin_id.clone(), + value.as_bool().expect("plugin state should be a bool"), + ) + }) + .collect() + }) + .unwrap_or_default() + } + + #[test] + fn load_plugin_from_directory_validates_required_fields() { + let root = temp_dir("manifest-required"); + write_file( + root.join(MANIFEST_FILE_NAME).as_path(), + r#"{"name":"","version":"1.0.0","description":"desc"}"#, + ); + + let error = load_plugin_from_directory(&root).expect_err("empty name should fail"); + assert!(error.to_string().contains("name cannot be empty")); + + let _ = fs::remove_dir_all(root); + } + + #[test] + fn load_plugin_from_directory_reads_root_manifest_and_validates_entries() { + let root = temp_dir("manifest-root"); + write_loader_plugin(&root); + + let manifest = load_plugin_from_directory(&root).expect("manifest should load"); + assert_eq!(manifest.name, "loader-demo"); + assert_eq!(manifest.version, "1.2.3"); + assert_eq!( + manifest + .permissions + .iter() + .map(|permission| permission.as_str()) + .collect::>(), + vec!["read", "write"] + ); + assert_eq!(manifest.hooks.pre_tool_use, vec!["./hooks/pre.sh"]); + assert_eq!(manifest.tools.len(), 1); + assert_eq!(manifest.tools[0].name, "echo_tool"); + assert_eq!( + manifest.tools[0].required_permission, + PluginToolPermission::WorkspaceWrite + ); + assert_eq!(manifest.commands.len(), 1); + assert_eq!(manifest.commands[0].name, "sync"); + + let _ = fs::remove_dir_all(root); + } + + #[test] + fn load_plugin_from_directory_supports_packaged_manifest_path() { + let root = temp_dir("manifest-packaged"); + write_external_plugin(&root, "packaged-demo", "1.0.0"); + + let manifest = load_plugin_from_directory(&root).expect("packaged manifest should load"); + assert_eq!(manifest.name, "packaged-demo"); + assert!(manifest.tools.is_empty()); + assert!(manifest.commands.is_empty()); + + let _ = fs::remove_dir_all(root); + } + + #[test] + fn load_plugin_from_directory_defaults_optional_fields() { + let root = temp_dir("manifest-defaults"); + write_file( + root.join(MANIFEST_FILE_NAME).as_path(), + r#"{ + "name": "minimal", + "version": "0.1.0", + "description": "Minimal manifest" +}"#, + ); + + let manifest = load_plugin_from_directory(&root).expect("minimal manifest should load"); + assert!(manifest.permissions.is_empty()); + assert!(manifest.hooks.is_empty()); + assert!(manifest.tools.is_empty()); + assert!(manifest.commands.is_empty()); + + let _ = fs::remove_dir_all(root); + } + + #[test] + fn load_plugin_from_directory_rejects_duplicate_permissions_and_commands() { + let root = temp_dir("manifest-duplicates"); + write_file( + root.join("commands").join("sync.sh").as_path(), + "#!/bin/sh\nprintf 'sync'\n", + ); + write_file( + root.join(MANIFEST_FILE_NAME).as_path(), + r#"{ + "name": "duplicate-manifest", + "version": "1.0.0", + "description": "Duplicate validation", + "permissions": ["read", "read"], + "commands": [ + {"name": "sync", "description": "Sync one", "command": "./commands/sync.sh"}, + {"name": "sync", "description": "Sync two", "command": "./commands/sync.sh"} + ] +}"#, + ); + + let error = load_plugin_from_directory(&root).expect_err("duplicates should fail"); + match error { + PluginError::ManifestValidation(errors) => { + assert!(errors.iter().any(|error| matches!( + error, + PluginManifestValidationError::DuplicatePermission { permission } + if permission == "read" + ))); + assert!(errors.iter().any(|error| matches!( + error, + PluginManifestValidationError::DuplicateEntry { kind, name } + if *kind == "command" && name == "sync" + ))); + } + other => panic!("expected manifest validation errors, got {other}"), + } + + let _ = fs::remove_dir_all(root); + } + + #[test] + fn load_plugin_from_directory_rejects_missing_tool_or_command_paths() { + let root = temp_dir("manifest-paths"); + write_file( + root.join(MANIFEST_FILE_NAME).as_path(), + r#"{ + "name": "missing-paths", + "version": "1.0.0", + "description": "Missing path validation", + "tools": [ + { + "name": "tool_one", + "description": "Missing tool script", + "inputSchema": {"type": "object"}, + "command": "./tools/missing.sh" + } + ] +}"#, + ); + + let error = load_plugin_from_directory(&root).expect_err("missing paths should fail"); + assert!(error.to_string().contains("does not exist")); + + let _ = fs::remove_dir_all(root); + } + + #[test] + fn load_plugin_from_directory_rejects_invalid_permissions() { + let root = temp_dir("manifest-invalid-permissions"); + write_file( + root.join(MANIFEST_FILE_NAME).as_path(), + r#"{ + "name": "invalid-permissions", + "version": "1.0.0", + "description": "Invalid permission validation", + "permissions": ["admin"] +}"#, + ); + + let error = load_plugin_from_directory(&root).expect_err("invalid permissions should fail"); + match error { + PluginError::ManifestValidation(errors) => { + assert!(errors.iter().any(|error| matches!( + error, + PluginManifestValidationError::InvalidPermission { permission } + if permission == "admin" + ))); + } + other => panic!("expected manifest validation errors, got {other}"), + } + + let _ = fs::remove_dir_all(root); + } + + #[test] + fn load_plugin_from_directory_rejects_invalid_tool_required_permission() { + let root = temp_dir("manifest-invalid-tool-permission"); + write_file( + root.join("tools").join("echo.sh").as_path(), + "#!/bin/sh\ncat\n", + ); + write_file( + root.join(MANIFEST_FILE_NAME).as_path(), + r#"{ + "name": "invalid-tool-permission", + "version": "1.0.0", + "description": "Invalid tool permission validation", + "tools": [ + { + "name": "echo_tool", + "description": "Echo tool", + "inputSchema": {"type": "object"}, + "command": "./tools/echo.sh", + "requiredPermission": "admin" + } + ] +}"#, + ); + + let error = + load_plugin_from_directory(&root).expect_err("invalid tool permission should fail"); + match error { + PluginError::ManifestValidation(errors) => { + assert!(errors.iter().any(|error| matches!( + error, + PluginManifestValidationError::InvalidToolRequiredPermission { + tool_name, + permission + } if tool_name == "echo_tool" && permission == "admin" + ))); + } + other => panic!("expected manifest validation errors, got {other}"), + } + + let _ = fs::remove_dir_all(root); + } + + #[test] + fn load_plugin_from_directory_accumulates_multiple_validation_errors() { + let root = temp_dir("manifest-multi-error"); + write_file( + root.join(MANIFEST_FILE_NAME).as_path(), + r#"{ + "name": "", + "version": "1.0.0", + "description": "", + "permissions": ["admin"], + "commands": [ + {"name": "", "description": "", "command": "./commands/missing.sh"} + ] +}"#, + ); + + let error = + load_plugin_from_directory(&root).expect_err("multiple manifest errors should fail"); + match error { + PluginError::ManifestValidation(errors) => { + assert!(errors.len() >= 4); + assert!(errors.iter().any(|error| matches!( + error, + PluginManifestValidationError::EmptyField { field } if *field == "name" + ))); + assert!(errors.iter().any(|error| matches!( + error, + PluginManifestValidationError::EmptyField { field } + if *field == "description" + ))); + assert!(errors.iter().any(|error| matches!( + error, + PluginManifestValidationError::InvalidPermission { permission } + if permission == "admin" + ))); + } + other => panic!("expected manifest validation errors, got {other}"), + } + + let _ = fs::remove_dir_all(root); + } + + #[test] + fn discovers_builtin_and_bundled_plugins() { + let manager = PluginManager::new(PluginManagerConfig::new(temp_dir("discover"))); + let plugins = manager.list_plugins().expect("plugins should list"); + assert!(plugins + .iter() + .any(|plugin| plugin.metadata.kind == PluginKind::Builtin)); + assert!(plugins + .iter() + .any(|plugin| plugin.metadata.kind == PluginKind::Bundled)); + } + + #[test] + fn installs_enables_updates_and_uninstalls_external_plugins() { + let config_home = temp_dir("home"); + let source_root = temp_dir("source"); + write_external_plugin(&source_root, "demo", "1.0.0"); + + let mut manager = PluginManager::new(PluginManagerConfig::new(&config_home)); + let install = manager + .install(source_root.to_str().expect("utf8 path")) + .expect("install should succeed"); + assert_eq!(install.plugin_id, "demo@external"); + assert!(manager + .list_plugins() + .expect("list plugins") + .iter() + .any(|plugin| plugin.metadata.id == "demo@external" && plugin.enabled)); + + let hooks = manager.aggregated_hooks().expect("hooks should aggregate"); + assert_eq!(hooks.pre_tool_use.len(), 1); + assert!(hooks.pre_tool_use[0].contains("pre.sh")); + + manager + .disable("demo@external") + .expect("disable should work"); + assert!(manager + .aggregated_hooks() + .expect("hooks after disable") + .is_empty()); + manager.enable("demo@external").expect("enable should work"); + + write_external_plugin(&source_root, "demo", "2.0.0"); + let update = manager.update("demo@external").expect("update should work"); + assert_eq!(update.old_version, "1.0.0"); + assert_eq!(update.new_version, "2.0.0"); + + manager + .uninstall("demo@external") + .expect("uninstall should work"); + assert!(!manager + .list_plugins() + .expect("list plugins") + .iter() + .any(|plugin| plugin.metadata.id == "demo@external")); + + let _ = fs::remove_dir_all(config_home); + let _ = fs::remove_dir_all(source_root); + } + + #[test] + fn auto_installs_bundled_plugins_into_the_registry() { + let config_home = temp_dir("bundled-home"); + let bundled_root = temp_dir("bundled-root"); + write_bundled_plugin(&bundled_root.join("starter"), "starter", "0.1.0", false); + + let mut config = PluginManagerConfig::new(&config_home); + config.bundled_root = Some(bundled_root.clone()); + let manager = PluginManager::new(config); + + let installed = manager + .list_installed_plugins() + .expect("bundled plugins should auto-install"); + assert!(installed.iter().any(|plugin| { + plugin.metadata.id == "starter@bundled" + && plugin.metadata.kind == PluginKind::Bundled + && !plugin.enabled + })); + + let registry = manager.load_registry().expect("registry should exist"); + let record = registry + .plugins + .get("starter@bundled") + .expect("bundled plugin should be recorded"); + assert_eq!(record.kind, PluginKind::Bundled); + assert!(record.install_path.exists()); + + let _ = fs::remove_dir_all(config_home); + let _ = fs::remove_dir_all(bundled_root); + } + + #[test] + fn default_bundled_root_loads_repo_bundles_as_installed_plugins() { + let config_home = temp_dir("default-bundled-home"); + let manager = PluginManager::new(PluginManagerConfig::new(&config_home)); + + let installed = manager + .list_installed_plugins() + .expect("default bundled plugins should auto-install"); + assert!(installed + .iter() + .any(|plugin| plugin.metadata.id == "example-bundled@bundled")); + assert!(installed + .iter() + .any(|plugin| plugin.metadata.id == "sample-hooks@bundled")); + + let _ = fs::remove_dir_all(config_home); + } + + #[test] + fn bundled_sync_prunes_removed_bundled_registry_entries() { + let config_home = temp_dir("bundled-prune-home"); + let bundled_root = temp_dir("bundled-prune-root"); + let stale_install_path = config_home + .join("plugins") + .join("installed") + .join("stale-bundled-external"); + write_bundled_plugin(&bundled_root.join("active"), "active", "0.1.0", false); + write_file( + stale_install_path.join(MANIFEST_RELATIVE_PATH).as_path(), + r#"{ + "name": "stale", + "version": "0.1.0", + "description": "stale bundled plugin" +}"#, + ); + + let mut config = PluginManagerConfig::new(&config_home); + config.bundled_root = Some(bundled_root.clone()); + config.install_root = Some(config_home.join("plugins").join("installed")); + let manager = PluginManager::new(config); + + let mut registry = InstalledPluginRegistry::default(); + registry.plugins.insert( + "stale@bundled".to_string(), + InstalledPluginRecord { + kind: PluginKind::Bundled, + id: "stale@bundled".to_string(), + name: "stale".to_string(), + version: "0.1.0".to_string(), + description: "stale bundled plugin".to_string(), + install_path: stale_install_path.clone(), + source: PluginInstallSource::LocalPath { + path: bundled_root.join("stale"), + }, + installed_at_unix_ms: 1, + updated_at_unix_ms: 1, + }, + ); + manager.store_registry(®istry).expect("store registry"); + manager + .write_enabled_state("stale@bundled", Some(true)) + .expect("seed bundled enabled state"); + + let installed = manager + .list_installed_plugins() + .expect("bundled sync should succeed"); + assert!(installed + .iter() + .any(|plugin| plugin.metadata.id == "active@bundled")); + assert!(!installed + .iter() + .any(|plugin| plugin.metadata.id == "stale@bundled")); + + let registry = manager.load_registry().expect("load registry"); + assert!(!registry.plugins.contains_key("stale@bundled")); + assert!(!stale_install_path.exists()); + + let _ = fs::remove_dir_all(config_home); + let _ = fs::remove_dir_all(bundled_root); + } + + #[test] + fn installed_plugin_discovery_keeps_registry_entries_outside_install_root() { + let config_home = temp_dir("registry-fallback-home"); + let bundled_root = temp_dir("registry-fallback-bundled"); + let install_root = config_home.join("plugins").join("installed"); + let external_install_path = temp_dir("registry-fallback-external"); + write_file( + external_install_path.join(MANIFEST_FILE_NAME).as_path(), + r#"{ + "name": "registry-fallback", + "version": "1.0.0", + "description": "Registry fallback plugin" +}"#, + ); + + let mut config = PluginManagerConfig::new(&config_home); + config.bundled_root = Some(bundled_root.clone()); + config.install_root = Some(install_root.clone()); + let manager = PluginManager::new(config); + + let mut registry = InstalledPluginRegistry::default(); + registry.plugins.insert( + "registry-fallback@external".to_string(), + InstalledPluginRecord { + kind: PluginKind::External, + id: "registry-fallback@external".to_string(), + name: "registry-fallback".to_string(), + version: "1.0.0".to_string(), + description: "Registry fallback plugin".to_string(), + install_path: external_install_path.clone(), + source: PluginInstallSource::LocalPath { + path: external_install_path.clone(), + }, + installed_at_unix_ms: 1, + updated_at_unix_ms: 1, + }, + ); + manager.store_registry(®istry).expect("store registry"); + manager + .write_enabled_state("stale-external@external", Some(true)) + .expect("seed stale external enabled state"); + + let installed = manager + .list_installed_plugins() + .expect("registry fallback plugin should load"); + assert!(installed + .iter() + .any(|plugin| plugin.metadata.id == "registry-fallback@external")); + + let _ = fs::remove_dir_all(config_home); + let _ = fs::remove_dir_all(bundled_root); + let _ = fs::remove_dir_all(external_install_path); + } + + #[test] + fn installed_plugin_discovery_prunes_stale_registry_entries() { + let config_home = temp_dir("registry-prune-home"); + let bundled_root = temp_dir("registry-prune-bundled"); + let install_root = config_home.join("plugins").join("installed"); + let missing_install_path = temp_dir("registry-prune-missing"); + + let mut config = PluginManagerConfig::new(&config_home); + config.bundled_root = Some(bundled_root.clone()); + config.install_root = Some(install_root); + let manager = PluginManager::new(config); + + let mut registry = InstalledPluginRegistry::default(); + registry.plugins.insert( + "stale-external@external".to_string(), + InstalledPluginRecord { + kind: PluginKind::External, + id: "stale-external@external".to_string(), + name: "stale-external".to_string(), + version: "1.0.0".to_string(), + description: "stale external plugin".to_string(), + install_path: missing_install_path.clone(), + source: PluginInstallSource::LocalPath { + path: missing_install_path.clone(), + }, + installed_at_unix_ms: 1, + updated_at_unix_ms: 1, + }, + ); + manager.store_registry(®istry).expect("store registry"); + + let installed = manager + .list_installed_plugins() + .expect("stale registry entries should be pruned"); + assert!(!installed + .iter() + .any(|plugin| plugin.metadata.id == "stale-external@external")); + + let registry = manager.load_registry().expect("load registry"); + assert!(!registry.plugins.contains_key("stale-external@external")); + + let _ = fs::remove_dir_all(config_home); + let _ = fs::remove_dir_all(bundled_root); + } + + #[test] + fn persists_bundled_plugin_enable_state_across_reloads() { + let config_home = temp_dir("bundled-state-home"); + let bundled_root = temp_dir("bundled-state-root"); + write_bundled_plugin(&bundled_root.join("starter"), "starter", "0.1.0", false); + + let mut config = PluginManagerConfig::new(&config_home); + config.bundled_root = Some(bundled_root.clone()); + let mut manager = PluginManager::new(config.clone()); + + manager + .enable("starter@bundled") + .expect("enable bundled plugin should succeed"); + assert_eq!( + load_enabled_plugins(&manager.settings_path()).get("starter@bundled"), + Some(&true) + ); + + let mut reloaded_config = PluginManagerConfig::new(&config_home); + reloaded_config.bundled_root = Some(bundled_root.clone()); + reloaded_config.enabled_plugins = load_enabled_plugins(&manager.settings_path()); + let reloaded_manager = PluginManager::new(reloaded_config); + let reloaded = reloaded_manager + .list_installed_plugins() + .expect("bundled plugins should still be listed"); + assert!(reloaded + .iter() + .any(|plugin| { plugin.metadata.id == "starter@bundled" && plugin.enabled })); + + let _ = fs::remove_dir_all(config_home); + let _ = fs::remove_dir_all(bundled_root); + } + + #[test] + fn persists_bundled_plugin_disable_state_across_reloads() { + let config_home = temp_dir("bundled-disabled-home"); + let bundled_root = temp_dir("bundled-disabled-root"); + write_bundled_plugin(&bundled_root.join("starter"), "starter", "0.1.0", true); + + let mut config = PluginManagerConfig::new(&config_home); + config.bundled_root = Some(bundled_root.clone()); + let mut manager = PluginManager::new(config); + + manager + .disable("starter@bundled") + .expect("disable bundled plugin should succeed"); + assert_eq!( + load_enabled_plugins(&manager.settings_path()).get("starter@bundled"), + Some(&false) + ); + + let mut reloaded_config = PluginManagerConfig::new(&config_home); + reloaded_config.bundled_root = Some(bundled_root.clone()); + reloaded_config.enabled_plugins = load_enabled_plugins(&manager.settings_path()); + let reloaded_manager = PluginManager::new(reloaded_config); + let reloaded = reloaded_manager + .list_installed_plugins() + .expect("bundled plugins should still be listed"); + assert!(reloaded + .iter() + .any(|plugin| { plugin.metadata.id == "starter@bundled" && !plugin.enabled })); + + let _ = fs::remove_dir_all(config_home); + let _ = fs::remove_dir_all(bundled_root); + } + + #[test] + fn validates_plugin_source_before_install() { + let config_home = temp_dir("validate-home"); + let source_root = temp_dir("validate-source"); + write_external_plugin(&source_root, "validator", "1.0.0"); + let manager = PluginManager::new(PluginManagerConfig::new(&config_home)); + let manifest = manager + .validate_plugin_source(source_root.to_str().expect("utf8 path")) + .expect("manifest should validate"); + assert_eq!(manifest.name, "validator"); + let _ = fs::remove_dir_all(config_home); + let _ = fs::remove_dir_all(source_root); + } + + #[test] + fn plugin_registry_tracks_enabled_state_and_lookup() { + let config_home = temp_dir("registry-home"); + let source_root = temp_dir("registry-source"); + write_external_plugin(&source_root, "registry-demo", "1.0.0"); + + let mut manager = PluginManager::new(PluginManagerConfig::new(&config_home)); + manager + .install(source_root.to_str().expect("utf8 path")) + .expect("install should succeed"); + manager + .disable("registry-demo@external") + .expect("disable should succeed"); + + let registry = manager.plugin_registry().expect("registry should build"); + let plugin = registry + .get("registry-demo@external") + .expect("installed plugin should be discoverable"); + assert_eq!(plugin.metadata().name, "registry-demo"); + assert!(!plugin.is_enabled()); + assert!(registry.contains("registry-demo@external")); + assert!(!registry.contains("missing@external")); + + let _ = fs::remove_dir_all(config_home); + let _ = fs::remove_dir_all(source_root); + } + + #[test] + fn rejects_plugin_sources_with_missing_hook_paths() { + let config_home = temp_dir("broken-home"); + let source_root = temp_dir("broken-source"); + write_broken_plugin(&source_root, "broken"); + + let manager = PluginManager::new(PluginManagerConfig::new(&config_home)); + let error = manager + .validate_plugin_source(source_root.to_str().expect("utf8 path")) + .expect_err("missing hook file should fail validation"); + assert!(error.to_string().contains("does not exist")); + + let mut manager = PluginManager::new(PluginManagerConfig::new(&config_home)); + let install_error = manager + .install(source_root.to_str().expect("utf8 path")) + .expect_err("install should reject invalid hook paths"); + assert!(install_error.to_string().contains("does not exist")); + + let _ = fs::remove_dir_all(config_home); + let _ = fs::remove_dir_all(source_root); + } + + #[test] + fn plugin_registry_runs_initialize_and_shutdown_for_enabled_plugins() { + let config_home = temp_dir("lifecycle-home"); + let source_root = temp_dir("lifecycle-source"); + let _ = write_lifecycle_plugin(&source_root, "lifecycle-demo", "1.0.0"); + + let mut manager = PluginManager::new(PluginManagerConfig::new(&config_home)); + let install = manager + .install(source_root.to_str().expect("utf8 path")) + .expect("install should succeed"); + let log_path = install.install_path.join("lifecycle.log"); + + let registry = manager.plugin_registry().expect("registry should build"); + registry.initialize().expect("init should succeed"); + registry.shutdown().expect("shutdown should succeed"); + + let log = fs::read_to_string(&log_path).expect("lifecycle log should exist"); + assert_eq!(log, "init\nshutdown\n"); + + let _ = fs::remove_dir_all(config_home); + let _ = fs::remove_dir_all(source_root); + } + + #[test] + fn aggregates_and_executes_plugin_tools() { + let config_home = temp_dir("tool-home"); + let source_root = temp_dir("tool-source"); + write_tool_plugin(&source_root, "tool-demo", "1.0.0"); + + let mut manager = PluginManager::new(PluginManagerConfig::new(&config_home)); + manager + .install(source_root.to_str().expect("utf8 path")) + .expect("install should succeed"); + + let tools = manager.aggregated_tools().expect("tools should aggregate"); + assert_eq!(tools.len(), 1); + assert_eq!(tools[0].definition().name, "plugin_echo"); + assert_eq!(tools[0].required_permission(), "workspace-write"); + + let output = tools[0] + .execute(&serde_json::json!({ "message": "hello" })) + .expect("plugin tool should execute"); + let payload: Value = serde_json::from_str(&output).expect("valid json"); + assert_eq!(payload["plugin"], "tool-demo@external"); + assert_eq!(payload["tool"], "plugin_echo"); + assert_eq!(payload["input"]["message"], "hello"); + + let _ = fs::remove_dir_all(config_home); + let _ = fs::remove_dir_all(source_root); + } + + #[test] + fn list_installed_plugins_scans_install_root_without_registry_entries() { + let config_home = temp_dir("installed-scan-home"); + let bundled_root = temp_dir("installed-scan-bundled"); + let install_root = config_home.join("plugins").join("installed"); + let installed_plugin_root = install_root.join("scan-demo"); + write_file( + installed_plugin_root.join(MANIFEST_FILE_NAME).as_path(), + r#"{ + "name": "scan-demo", + "version": "1.0.0", + "description": "Scanned from install root" +}"#, + ); + + let mut config = PluginManagerConfig::new(&config_home); + config.bundled_root = Some(bundled_root.clone()); + config.install_root = Some(install_root); + let manager = PluginManager::new(config); + + let installed = manager + .list_installed_plugins() + .expect("installed plugins should scan directories"); + assert!(installed + .iter() + .any(|plugin| plugin.metadata.id == "scan-demo@external")); + + let _ = fs::remove_dir_all(config_home); + let _ = fs::remove_dir_all(bundled_root); + } + + #[test] + fn list_installed_plugins_scans_packaged_manifests_in_install_root() { + let config_home = temp_dir("installed-packaged-scan-home"); + let bundled_root = temp_dir("installed-packaged-scan-bundled"); + let install_root = config_home.join("plugins").join("installed"); + let installed_plugin_root = install_root.join("scan-packaged"); + write_file( + installed_plugin_root.join(MANIFEST_RELATIVE_PATH).as_path(), + r#"{ + "name": "scan-packaged", + "version": "1.0.0", + "description": "Packaged manifest in install root" +}"#, + ); + + let mut config = PluginManagerConfig::new(&config_home); + config.bundled_root = Some(bundled_root.clone()); + config.install_root = Some(install_root); + let manager = PluginManager::new(config); + + let installed = manager + .list_installed_plugins() + .expect("installed plugins should scan packaged manifests"); + assert!(installed + .iter() + .any(|plugin| plugin.metadata.id == "scan-packaged@external")); + + let _ = fs::remove_dir_all(config_home); + let _ = fs::remove_dir_all(bundled_root); + } +} diff --git a/rust/crates/runtime/Cargo.toml b/rust/crates/runtime/Cargo.toml index 7ce7cd8..30c6d3c 100644 --- a/rust/crates/runtime/Cargo.toml +++ b/rust/crates/runtime/Cargo.toml @@ -8,6 +8,7 @@ publish.workspace = true [dependencies] sha2 = "0.10" glob = "0.3" +plugins = { path = "../plugins" } regex = "1" serde = { version = "1", features = ["derive"] } serde_json = "1" diff --git a/rust/crates/runtime/src/compact.rs b/rust/crates/runtime/src/compact.rs index e227019..ad6b3f3 100644 --- a/rust/crates/runtime/src/compact.rs +++ b/rust/crates/runtime/src/compact.rs @@ -1,5 +1,10 @@ use crate::session::{ContentBlock, ConversationMessage, MessageRole, Session}; +const COMPACT_CONTINUATION_PREAMBLE: &str = + "This session is being continued from a previous conversation that ran out of context. The summary below covers the earlier portion of the conversation.\n\n"; +const COMPACT_RECENT_MESSAGES_NOTE: &str = "Recent messages are preserved verbatim."; +const COMPACT_DIRECT_RESUME_INSTRUCTION: &str = "Continue the conversation from where it left off without asking the user any further questions. Resume directly — do not acknowledge the summary, do not recap what was happening, and do not preface with continuation text."; + #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct CompactionConfig { pub preserve_recent_messages: usize, @@ -30,8 +35,15 @@ pub fn estimate_session_tokens(session: &Session) -> usize { #[must_use] pub fn should_compact(session: &Session, config: CompactionConfig) -> bool { - session.messages.len() > config.preserve_recent_messages - && estimate_session_tokens(session) >= config.max_estimated_tokens + let start = compacted_summary_prefix_len(session); + let compactable = &session.messages[start..]; + + compactable.len() > config.preserve_recent_messages + && compactable + .iter() + .map(estimate_message_tokens) + .sum::() + >= config.max_estimated_tokens } #[must_use] @@ -56,16 +68,18 @@ pub fn get_compact_continuation_message( recent_messages_preserved: bool, ) -> String { let mut base = format!( - "This session is being continued from a previous conversation that ran out of context. The summary below covers the earlier portion of the conversation.\n\n{}", + "{COMPACT_CONTINUATION_PREAMBLE}{}", format_compact_summary(summary) ); if recent_messages_preserved { - base.push_str("\n\nRecent messages are preserved verbatim."); + base.push_str("\n\n"); + base.push_str(COMPACT_RECENT_MESSAGES_NOTE); } if suppress_follow_up_questions { - base.push_str("\nContinue the conversation from where it left off without asking the user any further questions. Resume directly — do not acknowledge the summary, do not recap what was happening, and do not preface with continuation text."); + base.push('\n'); + base.push_str(COMPACT_DIRECT_RESUME_INSTRUCTION); } base @@ -82,13 +96,19 @@ pub fn compact_session(session: &Session, config: CompactionConfig) -> Compactio }; } + let existing_summary = session + .messages + .first() + .and_then(extract_existing_compacted_summary); + let compacted_prefix_len = usize::from(existing_summary.is_some()); let keep_from = session .messages .len() .saturating_sub(config.preserve_recent_messages); - let removed = &session.messages[..keep_from]; + let removed = &session.messages[compacted_prefix_len..keep_from]; let preserved = session.messages[keep_from..].to_vec(); - let summary = summarize_messages(removed); + let summary = + merge_compact_summaries(existing_summary.as_deref(), &summarize_messages(removed)); let formatted_summary = format_compact_summary(&summary); let continuation = get_compact_continuation_message(&summary, true, !preserved.is_empty()); @@ -110,6 +130,16 @@ pub fn compact_session(session: &Session, config: CompactionConfig) -> Compactio } } +fn compacted_summary_prefix_len(session: &Session) -> usize { + usize::from( + session + .messages + .first() + .and_then(extract_existing_compacted_summary) + .is_some(), + ) +} + fn summarize_messages(messages: &[ConversationMessage]) -> String { let user_messages = messages .iter() @@ -197,6 +227,41 @@ fn summarize_messages(messages: &[ConversationMessage]) -> String { lines.join("\n") } +fn merge_compact_summaries(existing_summary: Option<&str>, new_summary: &str) -> String { + let Some(existing_summary) = existing_summary else { + return new_summary.to_string(); + }; + + let previous_highlights = extract_summary_highlights(existing_summary); + let new_formatted_summary = format_compact_summary(new_summary); + let new_highlights = extract_summary_highlights(&new_formatted_summary); + let new_timeline = extract_summary_timeline(&new_formatted_summary); + + let mut lines = vec!["".to_string(), "Conversation summary:".to_string()]; + + if !previous_highlights.is_empty() { + lines.push("- Previously compacted context:".to_string()); + lines.extend( + previous_highlights + .into_iter() + .map(|line| format!(" {line}")), + ); + } + + if !new_highlights.is_empty() { + lines.push("- Newly compacted context:".to_string()); + lines.extend(new_highlights.into_iter().map(|line| format!(" {line}"))); + } + + if !new_timeline.is_empty() { + lines.push("- Key timeline:".to_string()); + lines.extend(new_timeline.into_iter().map(|line| format!(" {line}"))); + } + + lines.push("".to_string()); + lines.join("\n") +} + fn summarize_block(block: &ContentBlock) -> String { let raw = match block { ContentBlock::Text { text } => text.clone(), @@ -374,11 +439,71 @@ fn collapse_blank_lines(content: &str) -> String { result } +fn extract_existing_compacted_summary(message: &ConversationMessage) -> Option { + if message.role != MessageRole::System { + return None; + } + + let text = first_text_block(message)?; + let summary = text.strip_prefix(COMPACT_CONTINUATION_PREAMBLE)?; + let summary = summary + .split_once(&format!("\n\n{COMPACT_RECENT_MESSAGES_NOTE}")) + .map_or(summary, |(value, _)| value); + let summary = summary + .split_once(&format!("\n{COMPACT_DIRECT_RESUME_INSTRUCTION}")) + .map_or(summary, |(value, _)| value); + Some(summary.trim().to_string()) +} + +fn extract_summary_highlights(summary: &str) -> Vec { + let mut lines = Vec::new(); + let mut in_timeline = false; + + for line in format_compact_summary(summary).lines() { + let trimmed = line.trim_end(); + if trimmed.is_empty() || trimmed == "Summary:" || trimmed == "Conversation summary:" { + continue; + } + if trimmed == "- Key timeline:" { + in_timeline = true; + continue; + } + if in_timeline { + continue; + } + lines.push(trimmed.to_string()); + } + + lines +} + +fn extract_summary_timeline(summary: &str) -> Vec { + let mut lines = Vec::new(); + let mut in_timeline = false; + + for line in format_compact_summary(summary).lines() { + let trimmed = line.trim_end(); + if trimmed == "- Key timeline:" { + in_timeline = true; + continue; + } + if !in_timeline { + continue; + } + if trimmed.is_empty() { + break; + } + lines.push(trimmed.to_string()); + } + + lines +} + #[cfg(test)] mod tests { use super::{ collect_key_files, compact_session, estimate_session_tokens, format_compact_summary, - infer_pending_work, should_compact, CompactionConfig, + get_compact_continuation_message, infer_pending_work, should_compact, CompactionConfig, }; use crate::session::{ContentBlock, ConversationMessage, MessageRole, Session}; @@ -453,6 +578,98 @@ mod tests { ); } + #[test] + fn keeps_previous_compacted_context_when_compacting_again() { + let initial_session = Session { + version: 1, + messages: vec![ + ConversationMessage::user_text("Investigate rust/crates/runtime/src/compact.rs"), + ConversationMessage::assistant(vec![ContentBlock::Text { + text: "I will inspect the compact flow.".to_string(), + }]), + ConversationMessage::user_text( + "Also update rust/crates/runtime/src/conversation.rs", + ), + ConversationMessage::assistant(vec![ContentBlock::Text { + text: "Next: preserve prior summary context during auto compact.".to_string(), + }]), + ], + }; + let config = CompactionConfig { + preserve_recent_messages: 2, + max_estimated_tokens: 1, + }; + + let first = compact_session(&initial_session, config); + let mut follow_up_messages = first.compacted_session.messages.clone(); + follow_up_messages.extend([ + ConversationMessage::user_text("Please add regression tests for compaction."), + ConversationMessage::assistant(vec![ContentBlock::Text { + text: "Working on regression coverage now.".to_string(), + }]), + ]); + + let second = compact_session( + &Session { + version: 1, + messages: follow_up_messages, + }, + config, + ); + + assert!(second + .formatted_summary + .contains("Previously compacted context:")); + assert!(second + .formatted_summary + .contains("Scope: 2 earlier messages compacted")); + assert!(second + .formatted_summary + .contains("Newly compacted context:")); + assert!(second + .formatted_summary + .contains("Also update rust/crates/runtime/src/conversation.rs")); + assert!(matches!( + &second.compacted_session.messages[0].blocks[0], + ContentBlock::Text { text } + if text.contains("Previously compacted context:") + && text.contains("Newly compacted context:") + )); + assert!(matches!( + &second.compacted_session.messages[1].blocks[0], + ContentBlock::Text { text } if text.contains("Please add regression tests for compaction.") + )); + } + + #[test] + fn ignores_existing_compacted_summary_when_deciding_to_recompact() { + let summary = "Conversation summary:\n- Scope: earlier work preserved.\n- Key timeline:\n - user: large preserved context\n"; + let session = Session { + version: 1, + messages: vec![ + ConversationMessage { + role: MessageRole::System, + blocks: vec![ContentBlock::Text { + text: get_compact_continuation_message(summary, true, true), + }], + usage: None, + }, + ConversationMessage::user_text("tiny"), + ConversationMessage::assistant(vec![ContentBlock::Text { + text: "recent".to_string(), + }]), + ], + }; + + assert!(!should_compact( + &session, + CompactionConfig { + preserve_recent_messages: 2, + max_estimated_tokens: 1, + } + )); + } + #[test] fn truncates_long_blocks_in_summary() { let summary = super::summarize_block(&ContentBlock::Text { diff --git a/rust/crates/runtime/src/config.rs b/rust/crates/runtime/src/config.rs index 368e7c5..dfc4d1a 100644 --- a/rust/crates/runtime/src/config.rs +++ b/rust/crates/runtime/src/config.rs @@ -35,9 +35,19 @@ pub struct RuntimeConfig { feature_config: RuntimeFeatureConfig, } +#[derive(Debug, Clone, PartialEq, Eq, Default)] +pub struct RuntimePluginConfig { + enabled_plugins: BTreeMap, + external_directories: Vec, + install_root: Option, + registry_path: Option, + bundled_root: Option, +} + #[derive(Debug, Clone, PartialEq, Eq, Default)] pub struct RuntimeFeatureConfig { hooks: RuntimeHookConfig, + plugins: RuntimePluginConfig, mcp: McpConfigCollection, oauth: Option, model: Option, @@ -174,13 +184,15 @@ impl ConfigLoader { #[must_use] pub fn default_for(cwd: impl Into) -> Self { let cwd = cwd.into(); - let config_home = std::env::var_os("CLAUDE_CONFIG_HOME") - .map(PathBuf::from) - .or_else(|| std::env::var_os("HOME").map(|home| PathBuf::from(home).join(".claude"))) - .unwrap_or_else(|| PathBuf::from(".claude")); + let config_home = default_config_home(); Self { cwd, config_home } } + #[must_use] + pub fn config_home(&self) -> &Path { + &self.config_home + } + #[must_use] pub fn discover(&self) -> Vec { let user_legacy_path = self.config_home.parent().map_or_else( @@ -229,6 +241,7 @@ impl ConfigLoader { let feature_config = RuntimeFeatureConfig { hooks: parse_optional_hooks_config(&merged_value)?, + plugins: parse_optional_plugin_config(&merged_value)?, mcp: McpConfigCollection { servers: mcp_servers, }, @@ -291,6 +304,11 @@ impl RuntimeConfig { &self.feature_config.hooks } + #[must_use] + pub fn plugins(&self) -> &RuntimePluginConfig { + &self.feature_config.plugins + } + #[must_use] pub fn oauth(&self) -> Option<&OAuthConfig> { self.feature_config.oauth.as_ref() @@ -319,11 +337,22 @@ impl RuntimeFeatureConfig { self } + #[must_use] + pub fn with_plugins(mut self, plugins: RuntimePluginConfig) -> Self { + self.plugins = plugins; + self + } + #[must_use] pub fn hooks(&self) -> &RuntimeHookConfig { &self.hooks } + #[must_use] + pub fn plugins(&self) -> &RuntimePluginConfig { + &self.plugins + } + #[must_use] pub fn mcp(&self) -> &McpConfigCollection { &self.mcp @@ -350,6 +379,53 @@ impl RuntimeFeatureConfig { } } +impl RuntimePluginConfig { + #[must_use] + pub fn enabled_plugins(&self) -> &BTreeMap { + &self.enabled_plugins + } + + #[must_use] + pub fn external_directories(&self) -> &[String] { + &self.external_directories + } + + #[must_use] + pub fn install_root(&self) -> Option<&str> { + self.install_root.as_deref() + } + + #[must_use] + pub fn registry_path(&self) -> Option<&str> { + self.registry_path.as_deref() + } + + #[must_use] + pub fn bundled_root(&self) -> Option<&str> { + self.bundled_root.as_deref() + } + + pub fn set_plugin_state(&mut self, plugin_id: String, enabled: bool) { + self.enabled_plugins.insert(plugin_id, enabled); + } + + #[must_use] + pub fn state_for(&self, plugin_id: &str, default_enabled: bool) -> bool { + self.enabled_plugins + .get(plugin_id) + .copied() + .unwrap_or(default_enabled) + } +} + +#[must_use] +pub fn default_config_home() -> PathBuf { + std::env::var_os("CLAUDE_CONFIG_HOME") + .map(PathBuf::from) + .or_else(|| std::env::var_os("HOME").map(|home| PathBuf::from(home).join(".claude"))) + .unwrap_or_else(|| PathBuf::from(".claude")) +} + impl RuntimeHookConfig { #[must_use] pub fn new(pre_tool_use: Vec, post_tool_use: Vec) -> Self { @@ -368,6 +444,18 @@ impl RuntimeHookConfig { pub fn post_tool_use(&self) -> &[String] { &self.post_tool_use } + + #[must_use] + pub fn merged(&self, other: &Self) -> Self { + let mut merged = self.clone(); + merged.extend(other); + merged + } + + pub fn extend(&mut self, other: &Self) { + extend_unique(&mut self.pre_tool_use, other.pre_tool_use()); + extend_unique(&mut self.post_tool_use, other.post_tool_use()); + } } impl McpConfigCollection { @@ -484,6 +572,36 @@ fn parse_optional_hooks_config(root: &JsonValue) -> Result Result { + let Some(object) = root.as_object() else { + return Ok(RuntimePluginConfig::default()); + }; + + let mut config = RuntimePluginConfig::default(); + if let Some(enabled_plugins) = object.get("enabledPlugins") { + config.enabled_plugins = parse_bool_map(enabled_plugins, "merged settings.enabledPlugins")?; + } + + let Some(plugins_value) = object.get("plugins") else { + return Ok(config); + }; + let plugins = expect_object(plugins_value, "merged settings.plugins")?; + + if let Some(enabled_value) = plugins.get("enabled") { + config.enabled_plugins = parse_bool_map(enabled_value, "merged settings.plugins.enabled")?; + } + config.external_directories = + optional_string_array(plugins, "externalDirectories", "merged settings.plugins")? + .unwrap_or_default(); + config.install_root = + optional_string(plugins, "installRoot", "merged settings.plugins")?.map(str::to_string); + config.registry_path = + optional_string(plugins, "registryPath", "merged settings.plugins")?.map(str::to_string); + config.bundled_root = + optional_string(plugins, "bundledRoot", "merged settings.plugins")?.map(str::to_string); + Ok(config) +} + fn parse_optional_permission_mode( root: &JsonValue, ) -> Result, ConfigError> { @@ -716,6 +834,24 @@ fn optional_u16( } } +fn parse_bool_map(value: &JsonValue, context: &str) -> Result, ConfigError> { + let Some(map) = value.as_object() else { + return Err(ConfigError::Parse(format!( + "{context}: expected JSON object" + ))); + }; + map.iter() + .map(|(key, value)| { + value + .as_bool() + .map(|enabled| (key.clone(), enabled)) + .ok_or_else(|| { + ConfigError::Parse(format!("{context}: field {key} must be a boolean")) + }) + }) + .collect() +} + fn optional_string_array( object: &BTreeMap, key: &str, @@ -790,6 +926,18 @@ fn deep_merge_objects( } } +fn extend_unique(target: &mut Vec, values: &[String]) { + for value in values { + push_unique(target, value.clone()); + } +} + +fn push_unique(target: &mut Vec, value: String) { + if !target.iter().any(|existing| existing == &value) { + target.push(value); + } +} + #[cfg(test)] mod tests { use super::{ @@ -1033,6 +1181,96 @@ mod tests { fs::remove_dir_all(root).expect("cleanup temp dir"); } + #[test] + fn parses_plugin_config_from_enabled_plugins() { + let root = temp_dir(); + let cwd = root.join("project"); + let home = root.join("home").join(".claude"); + fs::create_dir_all(cwd.join(".claude")).expect("project config dir"); + fs::create_dir_all(&home).expect("home config dir"); + + fs::write( + home.join("settings.json"), + r#"{ + "enabledPlugins": { + "tool-guard@builtin": true, + "sample-plugin@external": false + } + }"#, + ) + .expect("write user settings"); + + let loaded = ConfigLoader::new(&cwd, &home) + .load() + .expect("config should load"); + + assert_eq!( + loaded.plugins().enabled_plugins().get("tool-guard@builtin"), + Some(&true) + ); + assert_eq!( + loaded + .plugins() + .enabled_plugins() + .get("sample-plugin@external"), + Some(&false) + ); + + fs::remove_dir_all(root).expect("cleanup temp dir"); + } + + #[test] + fn parses_plugin_config() { + let root = temp_dir(); + let cwd = root.join("project"); + let home = root.join("home").join(".claude"); + fs::create_dir_all(cwd.join(".claude")).expect("project config dir"); + fs::create_dir_all(&home).expect("home config dir"); + + fs::write( + home.join("settings.json"), + r#"{ + "enabledPlugins": { + "core-helpers@builtin": true + }, + "plugins": { + "externalDirectories": ["./external-plugins"], + "installRoot": "plugin-cache/installed", + "registryPath": "plugin-cache/installed.json", + "bundledRoot": "./bundled-plugins" + } + }"#, + ) + .expect("write plugin settings"); + + let loaded = ConfigLoader::new(&cwd, &home) + .load() + .expect("config should load"); + + assert_eq!( + loaded + .plugins() + .enabled_plugins() + .get("core-helpers@builtin"), + Some(&true) + ); + assert_eq!( + loaded.plugins().external_directories(), + &["./external-plugins".to_string()] + ); + assert_eq!( + loaded.plugins().install_root(), + Some("plugin-cache/installed") + ); + assert_eq!( + loaded.plugins().registry_path(), + Some("plugin-cache/installed.json") + ); + assert_eq!(loaded.plugins().bundled_root(), Some("./bundled-plugins")); + + fs::remove_dir_all(root).expect("cleanup temp dir"); + } + #[test] fn rejects_invalid_mcp_server_shapes() { let root = temp_dir(); diff --git a/rust/crates/runtime/src/conversation.rs b/rust/crates/runtime/src/conversation.rs index f7a7741..a73f2f4 100644 --- a/rust/crates/runtime/src/conversation.rs +++ b/rust/crates/runtime/src/conversation.rs @@ -1,11 +1,13 @@ use std::collections::BTreeMap; use std::fmt::{Display, Formatter}; +use plugins::{HookRunner as PluginHookRunner, PluginRegistry}; + use crate::compact::{ compact_session, estimate_session_tokens, CompactionConfig, CompactionResult, }; use crate::config::RuntimeFeatureConfig; -use crate::hooks::{HookRunResult, HookRunner}; +use crate::hooks::HookRunner; use crate::permissions::{PermissionOutcome, PermissionPolicy, PermissionPrompter}; use crate::session::{ContentBlock, ConversationMessage, Session}; use crate::usage::{TokenUsage, UsageTracker}; @@ -107,6 +109,24 @@ pub struct ConversationRuntime { usage_tracker: UsageTracker, hook_runner: HookRunner, auto_compaction_input_tokens_threshold: u32, + plugin_hook_runner: Option, + plugin_registry: Option, + plugins_shutdown: bool, +} + +impl ConversationRuntime { + fn shutdown_registered_plugins(&mut self) -> Result<(), RuntimeError> { + if self.plugins_shutdown { + return Ok(()); + } + if let Some(registry) = &self.plugin_registry { + registry + .shutdown() + .map_err(|error| RuntimeError::new(format!("plugin shutdown failed: {error}")))?; + } + self.plugins_shutdown = true; + Ok(()) + } } impl ConversationRuntime @@ -133,6 +153,7 @@ where } #[must_use] + #[allow(clippy::needless_pass_by_value)] pub fn new_with_features( session: Session, api_client: C, @@ -152,9 +173,42 @@ where usage_tracker, hook_runner: HookRunner::from_feature_config(&feature_config), auto_compaction_input_tokens_threshold: auto_compaction_threshold_from_env(), + plugin_hook_runner: None, + plugin_registry: None, + plugins_shutdown: false, } } + #[allow(clippy::needless_pass_by_value)] + pub fn new_with_plugins( + session: Session, + api_client: C, + tool_executor: T, + permission_policy: PermissionPolicy, + system_prompt: Vec, + feature_config: RuntimeFeatureConfig, + plugin_registry: PluginRegistry, + ) -> Result { + let plugin_hook_runner = + PluginHookRunner::from_registry(&plugin_registry).map_err(|error| { + RuntimeError::new(format!("plugin hook registration failed: {error}")) + })?; + plugin_registry + .initialize() + .map_err(|error| RuntimeError::new(format!("plugin initialization failed: {error}")))?; + let mut runtime = Self::new_with_features( + session, + api_client, + tool_executor, + permission_policy, + system_prompt, + feature_config, + ); + runtime.plugin_hook_runner = Some(plugin_hook_runner); + runtime.plugin_registry = Some(plugin_registry); + Ok(runtime) + } + #[must_use] pub fn with_max_iterations(mut self, max_iterations: usize) -> Self { self.max_iterations = max_iterations; @@ -167,6 +221,7 @@ where self } + #[allow(clippy::too_many_lines)] pub fn run_turn( &mut self, user_input: impl Into, @@ -179,6 +234,7 @@ where let mut assistant_messages = Vec::new(); let mut tool_results = Vec::new(); let mut iterations = 0; + let mut max_turn_input_tokens = 0; loop { iterations += 1; @@ -195,6 +251,7 @@ where let events = self.api_client.stream(request)?; let (assistant_message, usage) = build_assistant_message(events)?; if let Some(usage) = usage { + max_turn_input_tokens = max_turn_input_tokens.max(usage.input_tokens); self.usage_tracker.record(usage); } let pending_tool_uses = assistant_message @@ -231,35 +288,74 @@ where ConversationMessage::tool_result( tool_use_id, tool_name, - format_hook_message(&pre_hook_result, &deny_message), + format_hook_message(pre_hook_result.messages(), &deny_message), true, ) } else { - let (mut output, mut is_error) = - match self.tool_executor.execute(&tool_name, &input) { - Ok(output) => (output, false), - Err(error) => (error.to_string(), true), - }; - output = merge_hook_feedback(pre_hook_result.messages(), output, false); + let plugin_pre_hook_result = + self.run_plugin_pre_tool_use(&tool_name, &input); + if plugin_pre_hook_result.is_denied() { + let deny_message = + format!("PreToolUse hook denied tool `{tool_name}`"); + let mut messages = pre_hook_result.messages().to_vec(); + messages.extend(plugin_pre_hook_result.messages().iter().cloned()); + ConversationMessage::tool_result( + tool_use_id, + tool_name, + format_hook_message(&messages, &deny_message), + true, + ) + } else { + let (mut output, mut is_error) = + match self.tool_executor.execute(&tool_name, &input) { + Ok(output) => (output, false), + Err(error) => (error.to_string(), true), + }; + output = + merge_hook_feedback(pre_hook_result.messages(), output, false); + output = merge_hook_feedback( + plugin_pre_hook_result.messages(), + output, + false, + ); - let post_hook_result = self - .hook_runner - .run_post_tool_use(&tool_name, &input, &output, is_error); - if post_hook_result.is_denied() { - is_error = true; + let hook_output = output.clone(); + let post_hook_result = self.hook_runner.run_post_tool_use( + &tool_name, + &input, + &hook_output, + is_error, + ); + let plugin_post_hook_result = self.run_plugin_post_tool_use( + &tool_name, + &input, + &hook_output, + is_error, + ); + if post_hook_result.is_denied() { + is_error = true; + } + if plugin_post_hook_result.is_denied() { + is_error = true; + } + output = merge_hook_feedback( + post_hook_result.messages(), + output, + post_hook_result.is_denied(), + ); + output = merge_hook_feedback( + plugin_post_hook_result.messages(), + output, + plugin_post_hook_result.is_denied(), + ); + + ConversationMessage::tool_result( + tool_use_id, + tool_name, + output, + is_error, + ) } - output = merge_hook_feedback( - post_hook_result.messages(), - output, - post_hook_result.is_denied(), - ); - - ConversationMessage::tool_result( - tool_use_id, - tool_name, - output, - is_error, - ) } } PermissionOutcome::Deny { reason } => { @@ -271,7 +367,7 @@ where } } - let auto_compaction = self.maybe_auto_compact(); + let auto_compaction = self.maybe_auto_compact(max_turn_input_tokens); Ok(TurnSummary { assistant_messages, @@ -303,21 +399,45 @@ where } #[must_use] - pub fn into_session(self) -> Session { - self.session + pub fn into_session(mut self) -> Session { + let _ = self.shutdown_registered_plugins(); + std::mem::take(&mut self.session) } - fn maybe_auto_compact(&mut self) -> Option { - if self.usage_tracker.cumulative_usage().input_tokens - < self.auto_compaction_input_tokens_threshold - { + pub fn shutdown_plugins(&mut self) -> Result<(), RuntimeError> { + self.shutdown_registered_plugins() + } + + fn run_plugin_pre_tool_use(&self, tool_name: &str, input: &str) -> plugins::HookRunResult { + self.plugin_hook_runner.as_ref().map_or_else( + || plugins::HookRunResult::allow(Vec::new()), + |runner| runner.run_pre_tool_use(tool_name, input), + ) + } + + fn run_plugin_post_tool_use( + &self, + tool_name: &str, + input: &str, + output: &str, + is_error: bool, + ) -> plugins::HookRunResult { + self.plugin_hook_runner.as_ref().map_or_else( + || plugins::HookRunResult::allow(Vec::new()), + |runner| runner.run_post_tool_use(tool_name, input, output, is_error), + ) + } + + fn maybe_auto_compact(&mut self, turn_input_tokens: u32) -> Option { + if turn_input_tokens < self.auto_compaction_input_tokens_threshold { return None; } let result = compact_session( &self.session, CompactionConfig { - max_estimated_tokens: 0, + max_estimated_tokens: usize::try_from(self.auto_compaction_input_tokens_threshold) + .unwrap_or(usize::MAX), ..CompactionConfig::default() }, ); @@ -333,6 +453,12 @@ where } } +impl Drop for ConversationRuntime { + fn drop(&mut self) { + let _ = self.shutdown_registered_plugins(); + } +} + #[must_use] pub fn auto_compaction_threshold_from_env() -> u32 { parse_auto_compaction_threshold( @@ -397,11 +523,11 @@ fn flush_text_block(text: &mut String, blocks: &mut Vec) { } } -fn format_hook_message(result: &HookRunResult, fallback: &str) -> String { - if result.messages().is_empty() { +fn format_hook_message(messages: &[String], fallback: &str) -> String { + if messages.is_empty() { fallback.to_string() } else { - result.messages().join("\n") + messages.join("\n") } } @@ -471,7 +597,13 @@ mod tests { use crate::prompt::{ProjectContext, SystemPromptBuilder}; use crate::session::{ContentBlock, MessageRole, Session}; use crate::usage::TokenUsage; + use plugins::{PluginManager, PluginManagerConfig}; + use std::fs; + #[cfg(unix)] + use std::os::unix::fs::PermissionsExt; + use std::path::Path; use std::path::PathBuf; + use std::time::{SystemTime, UNIX_EPOCH}; struct ScriptedApiClient { call_count: usize, @@ -533,6 +665,68 @@ mod tests { } } + fn temp_dir(label: &str) -> PathBuf { + let nanos = SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("time should be after epoch") + .as_nanos(); + std::env::temp_dir().join(format!("runtime-plugin-{label}-{nanos}")) + } + + fn write_lifecycle_plugin(root: &Path, name: &str) -> PathBuf { + fs::create_dir_all(root.join(".claude-plugin")).expect("manifest dir"); + fs::create_dir_all(root.join("lifecycle")).expect("lifecycle dir"); + let log_path = root.join("lifecycle.log"); + fs::write( + root.join("lifecycle").join("init.sh"), + "#!/bin/sh\nprintf 'init\\n' >> lifecycle.log\n", + ) + .expect("write init script"); + fs::write( + root.join("lifecycle").join("shutdown.sh"), + "#!/bin/sh\nprintf 'shutdown\\n' >> lifecycle.log\n", + ) + .expect("write shutdown script"); + fs::write( + root.join(".claude-plugin").join("plugin.json"), + format!( + "{{\n \"name\": \"{name}\",\n \"version\": \"1.0.0\",\n \"description\": \"runtime lifecycle plugin\",\n \"lifecycle\": {{\n \"Init\": [\"./lifecycle/init.sh\"],\n \"Shutdown\": [\"./lifecycle/shutdown.sh\"]\n }}\n}}" + ), + ) + .expect("write plugin manifest"); + log_path + } + + fn write_hook_plugin(root: &Path, name: &str, pre_message: &str, post_message: &str) { + fs::create_dir_all(root.join(".claude-plugin")).expect("manifest dir"); + fs::create_dir_all(root.join("hooks")).expect("hooks dir"); + fs::write( + root.join("hooks").join("pre.sh"), + format!("#!/bin/sh\nprintf '%s\\n' '{pre_message}'\n"), + ) + .expect("write pre hook"); + fs::write( + root.join("hooks").join("post.sh"), + format!("#!/bin/sh\nprintf '%s\\n' '{post_message}'\n"), + ) + .expect("write post hook"); + #[cfg(unix)] + { + let exec_mode = fs::Permissions::from_mode(0o755); + fs::set_permissions(root.join("hooks").join("pre.sh"), exec_mode.clone()) + .expect("chmod pre hook"); + fs::set_permissions(root.join("hooks").join("post.sh"), exec_mode) + .expect("chmod post hook"); + } + fs::write( + root.join(".claude-plugin").join("plugin.json"), + format!( + "{{\n \"name\": \"{name}\",\n \"version\": \"1.0.0\",\n \"description\": \"runtime hook plugin\",\n \"hooks\": {{\n \"PreToolUse\": [\"./hooks/pre.sh\"],\n \"PostToolUse\": [\"./hooks/post.sh\"]\n }}\n}}" + ), + ) + .expect("write plugin manifest"); + } + #[test] fn runs_user_to_tool_to_result_loop_end_to_end_and_tracks_usage() { let api_client = ScriptedApiClient { call_count: 0 }; @@ -761,7 +955,7 @@ mod tests { "post hook should preserve non-error result: {output:?}" ); assert!( - output.contains("4"), + output.contains('4'), "tool output missing value: {output:?}" ); assert!( @@ -774,6 +968,153 @@ mod tests { ); } + #[test] + fn initializes_and_shuts_down_plugins_with_runtime_lifecycle() { + let config_home = temp_dir("config"); + let source_root = temp_dir("source"); + let _ = write_lifecycle_plugin(&source_root, "runtime-lifecycle"); + + let mut manager = PluginManager::new(PluginManagerConfig::new(&config_home)); + let install = manager + .install(source_root.to_str().expect("utf8 path")) + .expect("install should succeed"); + let log_path = install.install_path.join("lifecycle.log"); + let registry = manager.plugin_registry().expect("registry should load"); + + { + let runtime = ConversationRuntime::new_with_plugins( + Session::new(), + ScriptedApiClient { call_count: 0 }, + StaticToolExecutor::new().register("add", |_input| Ok("4".to_string())), + PermissionPolicy::new(PermissionMode::WorkspaceWrite), + vec!["system".to_string()], + RuntimeFeatureConfig::default(), + registry, + ) + .expect("runtime should initialize plugins"); + + let log = fs::read_to_string(&log_path).expect("init log should exist"); + assert_eq!(log, "init\n"); + drop(runtime); + } + + let log = fs::read_to_string(&log_path).expect("shutdown log should exist"); + assert_eq!(log, "init\nshutdown\n"); + + let _ = fs::remove_dir_all(config_home); + let _ = fs::remove_dir_all(source_root); + } + + #[test] + fn executes_hooks_from_installed_plugins_during_tool_use() { + struct TwoCallApiClient { + calls: usize, + } + + impl ApiClient for TwoCallApiClient { + fn stream(&mut self, request: ApiRequest) -> Result, RuntimeError> { + self.calls += 1; + match self.calls { + 1 => Ok(vec![ + AssistantEvent::ToolUse { + id: "tool-1".to_string(), + name: "add".to_string(), + input: r#"{"lhs":2,"rhs":2}"#.to_string(), + }, + AssistantEvent::MessageStop, + ]), + 2 => { + assert!(request + .messages + .iter() + .any(|message| message.role == MessageRole::Tool)); + Ok(vec![ + AssistantEvent::TextDelta("done".to_string()), + AssistantEvent::MessageStop, + ]) + } + _ => Err(RuntimeError::new("unexpected extra API call")), + } + } + } + + let config_home = temp_dir("hook-config"); + let first_source_root = temp_dir("hook-source-a"); + let second_source_root = temp_dir("hook-source-b"); + write_hook_plugin( + &first_source_root, + "first", + "plugin pre one", + "plugin post one", + ); + write_hook_plugin( + &second_source_root, + "second", + "plugin pre two", + "plugin post two", + ); + + let mut manager = PluginManager::new(PluginManagerConfig::new(&config_home)); + manager + .install(first_source_root.to_str().expect("utf8 path")) + .expect("first plugin install should succeed"); + manager + .install(second_source_root.to_str().expect("utf8 path")) + .expect("second plugin install should succeed"); + let registry = manager.plugin_registry().expect("registry should load"); + + let mut runtime = ConversationRuntime::new_with_plugins( + Session::new(), + TwoCallApiClient { calls: 0 }, + StaticToolExecutor::new().register("add", |_input| Ok("4".to_string())), + PermissionPolicy::new(PermissionMode::DangerFullAccess), + vec!["system".to_string()], + RuntimeFeatureConfig::default(), + registry, + ) + .expect("runtime should load plugin hooks"); + + let summary = runtime + .run_turn("use add", None) + .expect("tool loop succeeds"); + + assert_eq!(summary.tool_results.len(), 1); + let ContentBlock::ToolResult { + is_error, output, .. + } = &summary.tool_results[0].blocks[0] + else { + panic!("expected tool result block"); + }; + assert!( + !*is_error, + "plugin hooks should not force an error: {output:?}" + ); + assert!( + output.contains('4'), + "tool output missing value: {output:?}" + ); + assert!( + output.contains("plugin pre one"), + "tool output missing first pre hook feedback: {output:?}" + ); + assert!( + output.contains("plugin pre two"), + "tool output missing second pre hook feedback: {output:?}" + ); + assert!( + output.contains("plugin post one"), + "tool output missing first post hook feedback: {output:?}" + ); + assert!( + output.contains("plugin post two"), + "tool output missing second post hook feedback: {output:?}" + ); + + let _ = fs::remove_dir_all(config_home); + let _ = fs::remove_dir_all(first_source_root); + let _ = fs::remove_dir_all(second_source_root); + } + #[test] fn reconstructs_usage_tracker_from_restored_session() { struct SimpleApi; @@ -864,7 +1205,7 @@ mod tests { } #[test] - fn auto_compacts_when_cumulative_input_threshold_is_crossed() { + fn auto_compacts_when_turn_input_threshold_is_crossed() { struct SimpleApi; impl ApiClient for SimpleApi { fn stream( @@ -887,13 +1228,13 @@ mod tests { let session = Session { version: 1, messages: vec![ - crate::session::ConversationMessage::user_text("one"), + crate::session::ConversationMessage::user_text("one ".repeat(30_000)), crate::session::ConversationMessage::assistant(vec![ContentBlock::Text { - text: "two".to_string(), + text: "two ".repeat(30_000), }]), - crate::session::ConversationMessage::user_text("three"), + crate::session::ConversationMessage::user_text("three ".repeat(30_000)), crate::session::ConversationMessage::assistant(vec![ContentBlock::Text { - text: "four".to_string(), + text: "four ".repeat(30_000), }]), ], }; @@ -920,6 +1261,72 @@ mod tests { assert_eq!(runtime.session().messages[0].role, MessageRole::System); } + #[test] + fn auto_compaction_does_not_repeat_after_context_is_already_compacted() { + struct SequentialUsageApi { + call_count: usize, + } + + impl ApiClient for SequentialUsageApi { + fn stream( + &mut self, + _request: ApiRequest, + ) -> Result, RuntimeError> { + self.call_count += 1; + let input_tokens = if self.call_count == 1 { 120_000 } else { 64 }; + Ok(vec![ + AssistantEvent::TextDelta("done".to_string()), + AssistantEvent::Usage(TokenUsage { + input_tokens, + output_tokens: 4, + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, + }), + AssistantEvent::MessageStop, + ]) + } + } + + let session = Session { + version: 1, + messages: vec![ + crate::session::ConversationMessage::user_text("one ".repeat(30_000)), + crate::session::ConversationMessage::assistant(vec![ContentBlock::Text { + text: "two ".repeat(30_000), + }]), + crate::session::ConversationMessage::user_text("three ".repeat(30_000)), + crate::session::ConversationMessage::assistant(vec![ContentBlock::Text { + text: "four ".repeat(30_000), + }]), + ], + }; + + let mut runtime = ConversationRuntime::new( + session, + SequentialUsageApi { call_count: 0 }, + StaticToolExecutor::new(), + PermissionPolicy::new(PermissionMode::DangerFullAccess), + vec!["system".to_string()], + ) + .with_auto_compaction_input_tokens_threshold(100_000); + + let first = runtime + .run_turn("trigger", None) + .expect("first turn should succeed"); + assert_eq!( + first.auto_compaction, + Some(AutoCompactionEvent { + removed_message_count: 2, + }) + ); + + let second = runtime + .run_turn("continue", None) + .expect("second turn should succeed"); + assert_eq!(second.auto_compaction, None); + assert_eq!(runtime.session().messages[0].role, MessageRole::System); + } + #[test] fn skips_auto_compaction_below_threshold() { struct SimpleApi; diff --git a/rust/crates/runtime/src/hooks.rs b/rust/crates/runtime/src/hooks.rs index 36756a0..4aff002 100644 --- a/rust/crates/runtime/src/hooks.rs +++ b/rust/crates/runtime/src/hooks.rs @@ -1,4 +1,5 @@ use std::ffi::OsStr; +use std::path::Path; use std::process::Command; use serde_json::json; @@ -149,6 +150,7 @@ impl HookRunner { HookRunResult::allow(messages) } + #[allow(clippy::too_many_arguments, clippy::unused_self)] fn run_command( &self, command: &str, @@ -237,7 +239,11 @@ fn shell_command(command: &str) -> CommandWithStdin { }; #[cfg(not(windows))] - let command_builder = { + let command_builder = if Path::new(command).exists() { + let mut command_builder = Command::new("sh"); + command_builder.arg(command); + CommandWithStdin::new(command_builder) + } else { let mut command_builder = Command::new("sh"); command_builder.arg("-lc").arg(command); CommandWithStdin::new(command_builder) diff --git a/rust/crates/runtime/src/lib.rs b/rust/crates/runtime/src/lib.rs index 618923f..edac666 100644 --- a/rust/crates/runtime/src/lib.rs +++ b/rust/crates/runtime/src/lib.rs @@ -28,7 +28,7 @@ pub use config::{ McpConfigCollection, McpOAuthConfig, McpRemoteServerConfig, McpSdkServerConfig, McpServerConfig, McpStdioServerConfig, McpTransport, McpWebSocketServerConfig, OAuthConfig, ResolvedPermissionMode, RuntimeConfig, RuntimeFeatureConfig, RuntimeHookConfig, - ScopedMcpServerConfig, CLAUDE_CODE_SETTINGS_SCHEMA_NAME, + RuntimePluginConfig, ScopedMcpServerConfig, CLAUDE_CODE_SETTINGS_SCHEMA_NAME, }; pub use conversation::{ auto_compaction_threshold_from_env, ApiClient, ApiRequest, AssistantEvent, AutoCompactionEvent, diff --git a/rust/crates/runtime/src/prompt.rs b/rust/crates/runtime/src/prompt.rs index 6cfda44..91a3afc 100644 --- a/rust/crates/runtime/src/prompt.rs +++ b/rust/crates/runtime/src/prompt.rs @@ -421,7 +421,7 @@ fn render_config_section(config: &RuntimeConfig) -> String { let mut lines = vec!["# Runtime config".to_string()]; if config.loaded_entries().is_empty() { lines.extend(prepend_bullets(vec![ - "No Claw Code settings files loaded.".to_string(), + "No Claw Code settings files loaded.".to_string() ])); return lines.join("\n"); } diff --git a/rust/crates/rusty-claude-cli/Cargo.toml b/rust/crates/rusty-claude-cli/Cargo.toml index 2ac6701..242ec0f 100644 --- a/rust/crates/rusty-claude-cli/Cargo.toml +++ b/rust/crates/rusty-claude-cli/Cargo.toml @@ -17,6 +17,7 @@ crossterm = "0.28" pulldown-cmark = "0.13" rustyline = "15" runtime = { path = "../runtime" } +plugins = { path = "../plugins" } serde_json = "1" syntect = "5" tokio = { version = "1", features = ["rt-multi-thread", "time"] } diff --git a/rust/crates/rusty-claude-cli/src/main.rs b/rust/crates/rusty-claude-cli/src/main.rs index 7cb70de..b00951f 100644 --- a/rust/crates/rusty-claude-cli/src/main.rs +++ b/rust/crates/rusty-claude-cli/src/main.rs @@ -2,14 +2,18 @@ mod init; mod input; mod render; -use std::collections::{BTreeMap, BTreeSet}; +use std::collections::BTreeSet; use std::env; +use std::fmt::Write as _; use std::fs; use std::io::{self, Read, Write}; use std::net::TcpListener; use std::path::{Path, PathBuf}; use std::process::Command; -use std::time::{SystemTime, UNIX_EPOCH}; +use std::sync::mpsc::{self, RecvTimeoutError}; +use std::sync::{Arc, Mutex}; +use std::thread; +use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH}; use api::{ resolve_startup_auth_source, AnthropicClient, AuthSource, ContentBlockDelta, InputContentBlock, @@ -18,10 +22,12 @@ use api::{ }; use commands::{ - render_slash_command_help, resume_supported_slash_commands, slash_command_specs, SlashCommand, + handle_plugins_slash_command, render_slash_command_help, resume_supported_slash_commands, + slash_command_specs, SlashCommand, }; use compat_harness::{extract_manifest, UpstreamPaths}; use init::initialize_repo; +use plugins::{PluginManager, PluginManagerConfig, PluginRegistry}; use render::{MarkdownStreamState, Spinner, TerminalRenderer}; use runtime::{ clear_oauth_credentials, generate_pkce_pair, generate_state, load_system_prompt, @@ -32,7 +38,7 @@ use runtime::{ Session, TokenUsage, ToolError, ToolExecutor, UsageTracker, }; use serde_json::json; -use tools::{execute_tool, mvp_tool_specs, ToolSpec}; +use tools::GlobalToolRegistry; const DEFAULT_MODEL: &str = "claude-opus-4-6"; fn max_tokens_for_model(model: &str) -> u32 { @@ -47,6 +53,7 @@ const DEFAULT_OAUTH_CALLBACK_PORT: u16 = 4545; const VERSION: &str = env!("CARGO_PKG_VERSION"); const BUILD_TARGET: Option<&str> = option_env!("TARGET"); const GIT_SHA: Option<&str> = option_env!("GIT_SHA"); +const INTERNAL_PROGRESS_HEARTBEAT_INTERVAL: Duration = Duration::from_secs(3); type AllowedToolSet = BTreeSet; @@ -298,51 +305,18 @@ fn resolve_model_alias(model: &str) -> &str { } fn normalize_allowed_tools(values: &[String]) -> Result, String> { - if values.is_empty() { - return Ok(None); - } - - let canonical_names = mvp_tool_specs() - .into_iter() - .map(|spec| spec.name.to_string()) - .collect::>(); - let mut name_map = canonical_names - .iter() - .map(|name| (normalize_tool_name(name), name.clone())) - .collect::>(); - - for (alias, canonical) in [ - ("read", "read_file"), - ("write", "write_file"), - ("edit", "edit_file"), - ("glob", "glob_search"), - ("grep", "grep_search"), - ] { - name_map.insert(alias.to_string(), canonical.to_string()); - } - - let mut allowed = AllowedToolSet::new(); - for value in values { - for token in value - .split(|ch: char| ch == ',' || ch.is_whitespace()) - .filter(|token| !token.is_empty()) - { - let normalized = normalize_tool_name(token); - let canonical = name_map.get(&normalized).ok_or_else(|| { - format!( - "unsupported tool in --allowedTools: {token} (expected one of: {})", - canonical_names.join(", ") - ) - })?; - allowed.insert(canonical.clone()); - } - } - - Ok(Some(allowed)) + current_tool_registry()?.normalize_allowed_tools(values) } -fn normalize_tool_name(value: &str) -> String { - value.trim().replace('-', "_").to_ascii_lowercase() +fn current_tool_registry() -> Result { + let cwd = env::current_dir().map_err(|error| error.to_string())?; + let loader = ConfigLoader::default_for(&cwd); + let runtime_config = loader.load().map_err(|error| error.to_string())?; + let plugin_manager = build_plugin_manager(&cwd, &loader, &runtime_config); + let plugin_tools = plugin_manager + .aggregated_tools() + .map_err(|error| error.to_string())?; + GlobalToolRegistry::with_plugin_tools(plugin_tools) } fn parse_permission_mode_arg(value: &str) -> Result { @@ -372,11 +346,11 @@ fn default_permission_mode() -> PermissionMode { .map_or(PermissionMode::DangerFullAccess, permission_mode_from_label) } -fn filter_tool_specs(allowed_tools: Option<&AllowedToolSet>) -> Vec { - mvp_tool_specs() - .into_iter() - .filter(|spec| allowed_tools.is_none_or(|allowed| allowed.contains(spec.name))) - .collect() +fn filter_tool_specs( + tool_registry: &GlobalToolRegistry, + allowed_tools: Option<&AllowedToolSet>, +) -> Vec { + tool_registry.definitions(allowed_tools) } fn parse_system_prompt_args(args: &[String]) -> Result { @@ -928,6 +902,7 @@ fn run_resume_command( | SlashCommand::Model { .. } | SlashCommand::Permissions { .. } | SlashCommand::Session { .. } + | SlashCommand::Plugins { .. } | SlashCommand::Unknown(_) => Err("unsupported resumed slash command".into()), } } @@ -1012,6 +987,7 @@ impl LiveCli { true, allowed_tools.clone(), permission_mode, + None, )?; let cli = Self { model, @@ -1109,6 +1085,7 @@ impl LiveCli { false, self.allowed_tools.clone(), self.permission_mode, + None, )?; let mut permission_prompter = CliPermissionPrompter::new(self.permission_mode); let summary = runtime.run_turn(input, Some(&mut permission_prompter))?; @@ -1217,6 +1194,9 @@ impl LiveCli { SlashCommand::Session { action, target } => { self.handle_session_command(action.as_deref(), target.as_deref())? } + SlashCommand::Plugins { action, target } => { + self.handle_plugins_command(action.as_deref(), target.as_deref())? + } SlashCommand::Unknown(name) => { eprintln!("unknown slash command: /{name}"); false @@ -1287,6 +1267,7 @@ impl LiveCli { true, self.allowed_tools.clone(), self.permission_mode, + None, )?; self.model.clone_from(&model); println!( @@ -1330,6 +1311,7 @@ impl LiveCli { true, self.allowed_tools.clone(), self.permission_mode, + None, )?; println!( "{}", @@ -1355,6 +1337,7 @@ impl LiveCli { true, self.allowed_tools.clone(), self.permission_mode, + None, )?; println!( "Session cleared\n Mode fresh session\n Preserved model {}\n Permission mode {}\n Session {}", @@ -1390,6 +1373,7 @@ impl LiveCli { true, self.allowed_tools.clone(), self.permission_mode, + None, )?; self.session = handle; println!( @@ -1462,6 +1446,7 @@ impl LiveCli { true, self.allowed_tools.clone(), self.permission_mode, + None, )?; self.session = handle; println!( @@ -1479,6 +1464,37 @@ impl LiveCli { } } + fn handle_plugins_command( + &mut self, + action: Option<&str>, + target: Option<&str>, + ) -> Result> { + let cwd = env::current_dir()?; + let loader = ConfigLoader::default_for(&cwd); + let runtime_config = loader.load()?; + let mut manager = build_plugin_manager(&cwd, &loader, &runtime_config); + let result = handle_plugins_slash_command(action, target, &mut manager)?; + println!("{}", result.message); + if result.reload_runtime { + self.reload_runtime_features()?; + } + Ok(false) + } + + fn reload_runtime_features(&mut self) -> Result<(), Box> { + self.runtime = build_runtime( + self.runtime.session().clone(), + self.model.clone(), + self.system_prompt.clone(), + true, + true, + self.allowed_tools.clone(), + self.permission_mode, + None, + )?; + self.persist_session() + } + fn compact(&mut self) -> Result<(), Box> { let result = self.runtime.compact(CompactionConfig::default()); let removed = result.removed_message_count; @@ -1492,16 +1508,18 @@ impl LiveCli { true, self.allowed_tools.clone(), self.permission_mode, + None, )?; self.persist_session()?; println!("{}", format_compact_report(removed, kept, skipped)); Ok(()) } - fn run_internal_prompt_text( + fn run_internal_prompt_text_with_progress( &self, prompt: &str, enable_tools: bool, + progress: Option, ) -> Result> { let session = self.runtime.session().clone(); let mut runtime = build_runtime( @@ -1512,12 +1530,21 @@ impl LiveCli { false, self.allowed_tools.clone(), self.permission_mode, + progress, )?; let mut permission_prompter = CliPermissionPrompter::new(self.permission_mode); let summary = runtime.run_turn(prompt, Some(&mut permission_prompter))?; Ok(final_assistant_text(&summary).trim().to_string()) } + fn run_internal_prompt_text( + &self, + prompt: &str, + enable_tools: bool, + ) -> Result> { + self.run_internal_prompt_text_with_progress(prompt, enable_tools, None) + } + fn run_bughunter(&self, scope: Option<&str>) -> Result<(), Box> { let scope = scope.unwrap_or("the current repository"); let prompt = format!( @@ -1532,10 +1559,22 @@ impl LiveCli { let prompt = format!( "You are /ultraplan. Produce a deep multi-step execution plan for {task}. Include goals, risks, implementation sequence, verification steps, and rollback considerations. Use tools if needed." ); - println!("{}", self.run_internal_prompt_text(&prompt, true)?); - Ok(()) + let mut progress = InternalPromptProgressRun::start_ultraplan(task); + match self.run_internal_prompt_text_with_progress(&prompt, true, Some(progress.reporter())) + { + Ok(plan) => { + progress.finish_success(); + println!("{plan}"); + Ok(()) + } + Err(error) => { + progress.finish_failure(&error.to_string()); + Err(error) + } + } } + #[allow(clippy::unused_self)] fn run_teleport(&self, target: Option<&str>) -> Result<(), Box> { let Some(target) = target.map(str::trim).filter(|value| !value.is_empty()) else { println!("Usage: /teleport "); @@ -1894,9 +1933,12 @@ fn render_config_report(section: Option<&str>) -> Result runtime_config.get("env"), "hooks" => runtime_config.get("hooks"), "model" => runtime_config.get("model"), + "plugins" => runtime_config + .get("plugins") + .or_else(|| runtime_config.get("enabledPlugins")), other => { lines.push(format!( - " Unsupported config section '{other}'. Use env, hooks, or model." + " Unsupported config section '{other}'. Use env, hooks, model, or plugins." )); return Ok(lines.join( " @@ -2306,15 +2348,392 @@ fn build_system_prompt() -> Result, Box> { )?) } -fn build_runtime_feature_config( -) -> Result> { +fn build_runtime_plugin_state() -> Result< + ( + runtime::RuntimeFeatureConfig, + PluginRegistry, + GlobalToolRegistry, + ), + Box, +> { let cwd = env::current_dir()?; - Ok(ConfigLoader::default_for(cwd) - .load()? - .feature_config() - .clone()) + let loader = ConfigLoader::default_for(&cwd); + let runtime_config = loader.load()?; + let plugin_manager = build_plugin_manager(&cwd, &loader, &runtime_config); + let plugin_registry = plugin_manager.plugin_registry()?; + let tool_registry = GlobalToolRegistry::with_plugin_tools(plugin_registry.aggregated_tools()?)?; + Ok(( + runtime_config.feature_config().clone(), + plugin_registry, + tool_registry, + )) } +fn build_plugin_manager( + cwd: &Path, + loader: &ConfigLoader, + runtime_config: &runtime::RuntimeConfig, +) -> PluginManager { + let plugin_settings = runtime_config.plugins(); + let mut plugin_config = PluginManagerConfig::new(loader.config_home().to_path_buf()); + plugin_config.enabled_plugins = plugin_settings.enabled_plugins().clone(); + plugin_config.external_dirs = plugin_settings + .external_directories() + .iter() + .map(|path| resolve_plugin_path(cwd, loader.config_home(), path)) + .collect(); + plugin_config.install_root = plugin_settings + .install_root() + .map(|path| resolve_plugin_path(cwd, loader.config_home(), path)); + plugin_config.registry_path = plugin_settings + .registry_path() + .map(|path| resolve_plugin_path(cwd, loader.config_home(), path)); + plugin_config.bundled_root = plugin_settings + .bundled_root() + .map(|path| resolve_plugin_path(cwd, loader.config_home(), path)); + PluginManager::new(plugin_config) +} + +fn resolve_plugin_path(cwd: &Path, config_home: &Path, value: &str) -> PathBuf { + let path = PathBuf::from(value); + if path.is_absolute() { + path + } else if value.starts_with('.') { + cwd.join(path) + } else { + config_home.join(path) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +struct InternalPromptProgressState { + command_label: &'static str, + task_label: String, + step: usize, + phase: String, + detail: Option, + saw_final_text: bool, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum InternalPromptProgressEvent { + Started, + Update, + Heartbeat, + Complete, + Failed, +} + +#[derive(Debug)] +struct InternalPromptProgressShared { + state: Mutex, + output_lock: Mutex<()>, + started_at: Instant, +} + +#[derive(Debug, Clone)] +struct InternalPromptProgressReporter { + shared: Arc, +} + +#[derive(Debug)] +struct InternalPromptProgressRun { + reporter: InternalPromptProgressReporter, + heartbeat_stop: Option>, + heartbeat_handle: Option>, +} + +impl InternalPromptProgressReporter { + fn ultraplan(task: &str) -> Self { + Self { + shared: Arc::new(InternalPromptProgressShared { + state: Mutex::new(InternalPromptProgressState { + command_label: "Ultraplan", + task_label: task.to_string(), + step: 0, + phase: "planning started".to_string(), + detail: Some(format!("task: {task}")), + saw_final_text: false, + }), + output_lock: Mutex::new(()), + started_at: Instant::now(), + }), + } + } + + fn emit(&self, event: InternalPromptProgressEvent, error: Option<&str>) { + let snapshot = self.snapshot(); + let line = format_internal_prompt_progress_line(event, &snapshot, self.elapsed(), error); + self.write_line(&line); + } + + fn mark_model_phase(&self) { + let snapshot = { + let mut state = self + .shared + .state + .lock() + .expect("internal prompt progress state poisoned"); + state.step += 1; + state.phase = if state.step == 1 { + "analyzing request".to_string() + } else { + "reviewing findings".to_string() + }; + state.detail = Some(format!("task: {}", state.task_label)); + state.clone() + }; + self.write_line(&format_internal_prompt_progress_line( + InternalPromptProgressEvent::Update, + &snapshot, + self.elapsed(), + None, + )); + } + + fn mark_tool_phase(&self, name: &str, input: &str) { + let detail = describe_tool_progress(name, input); + let snapshot = { + let mut state = self + .shared + .state + .lock() + .expect("internal prompt progress state poisoned"); + state.step += 1; + state.phase = format!("running {name}"); + state.detail = Some(detail); + state.clone() + }; + self.write_line(&format_internal_prompt_progress_line( + InternalPromptProgressEvent::Update, + &snapshot, + self.elapsed(), + None, + )); + } + + fn mark_text_phase(&self, text: &str) { + let trimmed = text.trim(); + if trimmed.is_empty() { + return; + } + let detail = truncate_for_summary(first_visible_line(trimmed), 120); + let snapshot = { + let mut state = self + .shared + .state + .lock() + .expect("internal prompt progress state poisoned"); + if state.saw_final_text { + return; + } + state.saw_final_text = true; + state.step += 1; + state.phase = "drafting final plan".to_string(); + state.detail = (!detail.is_empty()).then_some(detail); + state.clone() + }; + self.write_line(&format_internal_prompt_progress_line( + InternalPromptProgressEvent::Update, + &snapshot, + self.elapsed(), + None, + )); + } + + fn emit_heartbeat(&self) { + let snapshot = self.snapshot(); + self.write_line(&format_internal_prompt_progress_line( + InternalPromptProgressEvent::Heartbeat, + &snapshot, + self.elapsed(), + None, + )); + } + + fn snapshot(&self) -> InternalPromptProgressState { + self.shared + .state + .lock() + .expect("internal prompt progress state poisoned") + .clone() + } + + fn elapsed(&self) -> Duration { + self.shared.started_at.elapsed() + } + + fn write_line(&self, line: &str) { + let _guard = self + .shared + .output_lock + .lock() + .expect("internal prompt progress output lock poisoned"); + let mut stdout = io::stdout(); + let _ = writeln!(stdout, "{line}"); + let _ = stdout.flush(); + } +} + +impl InternalPromptProgressRun { + fn start_ultraplan(task: &str) -> Self { + let reporter = InternalPromptProgressReporter::ultraplan(task); + reporter.emit(InternalPromptProgressEvent::Started, None); + + let (heartbeat_stop, heartbeat_rx) = mpsc::channel(); + let heartbeat_reporter = reporter.clone(); + let heartbeat_handle = thread::spawn(move || loop { + match heartbeat_rx.recv_timeout(INTERNAL_PROGRESS_HEARTBEAT_INTERVAL) { + Ok(()) | Err(RecvTimeoutError::Disconnected) => break, + Err(RecvTimeoutError::Timeout) => heartbeat_reporter.emit_heartbeat(), + } + }); + + Self { + reporter, + heartbeat_stop: Some(heartbeat_stop), + heartbeat_handle: Some(heartbeat_handle), + } + } + + fn reporter(&self) -> InternalPromptProgressReporter { + self.reporter.clone() + } + + fn finish_success(&mut self) { + self.stop_heartbeat(); + self.reporter + .emit(InternalPromptProgressEvent::Complete, None); + } + + fn finish_failure(&mut self, error: &str) { + self.stop_heartbeat(); + self.reporter + .emit(InternalPromptProgressEvent::Failed, Some(error)); + } + + fn stop_heartbeat(&mut self) { + if let Some(sender) = self.heartbeat_stop.take() { + let _ = sender.send(()); + } + if let Some(handle) = self.heartbeat_handle.take() { + let _ = handle.join(); + } + } +} + +impl Drop for InternalPromptProgressRun { + fn drop(&mut self) { + self.stop_heartbeat(); + } +} + +fn format_internal_prompt_progress_line( + event: InternalPromptProgressEvent, + snapshot: &InternalPromptProgressState, + elapsed: Duration, + error: Option<&str>, +) -> String { + let elapsed_seconds = elapsed.as_secs(); + let step_label = if snapshot.step == 0 { + "current step pending".to_string() + } else { + format!("current step {}", snapshot.step) + }; + let mut status_bits = vec![step_label, format!("phase {}", snapshot.phase)]; + if let Some(detail) = snapshot + .detail + .as_deref() + .filter(|detail| !detail.is_empty()) + { + status_bits.push(detail.to_string()); + } + let status = status_bits.join(" · "); + match event { + InternalPromptProgressEvent::Started => { + format!( + "🧭 {} status · planning started · {status}", + snapshot.command_label + ) + } + InternalPromptProgressEvent::Update => { + format!("… {} status · {status}", snapshot.command_label) + } + InternalPromptProgressEvent::Heartbeat => format!( + "… {} heartbeat · {elapsed_seconds}s elapsed · {status}", + snapshot.command_label + ), + InternalPromptProgressEvent::Complete => format!( + "✔ {} status · completed · {elapsed_seconds}s elapsed · {} steps total", + snapshot.command_label, snapshot.step + ), + InternalPromptProgressEvent::Failed => format!( + "✘ {} status · failed · {elapsed_seconds}s elapsed · {}", + snapshot.command_label, + error.unwrap_or("unknown error") + ), + } +} + +fn describe_tool_progress(name: &str, input: &str) -> String { + let parsed: serde_json::Value = + serde_json::from_str(input).unwrap_or(serde_json::Value::String(input.to_string())); + match name { + "bash" | "Bash" => { + let command = parsed + .get("command") + .and_then(|value| value.as_str()) + .unwrap_or_default(); + if command.is_empty() { + "running shell command".to_string() + } else { + format!("command {}", truncate_for_summary(command.trim(), 100)) + } + } + "read_file" | "Read" => format!("reading {}", extract_tool_path(&parsed)), + "write_file" | "Write" => format!("writing {}", extract_tool_path(&parsed)), + "edit_file" | "Edit" => format!("editing {}", extract_tool_path(&parsed)), + "glob_search" | "Glob" => { + let pattern = parsed + .get("pattern") + .and_then(|value| value.as_str()) + .unwrap_or("?"); + let scope = parsed + .get("path") + .and_then(|value| value.as_str()) + .unwrap_or("."); + format!("glob `{pattern}` in {scope}") + } + "grep_search" | "Grep" => { + let pattern = parsed + .get("pattern") + .and_then(|value| value.as_str()) + .unwrap_or("?"); + let scope = parsed + .get("path") + .and_then(|value| value.as_str()) + .unwrap_or("."); + format!("grep `{pattern}` in {scope}") + } + "web_search" | "WebSearch" => parsed + .get("query") + .and_then(|value| value.as_str()) + .map_or_else( + || "running web search".to_string(), + |query| format!("query {}", truncate_for_summary(query, 100)), + ), + _ => { + let summary = summarize_tool_payload(input); + if summary.is_empty() { + format!("running {name}") + } else { + format!("{name}: {summary}") + } + } + } +} + +#[allow(clippy::needless_pass_by_value)] fn build_runtime( session: Session, model: String, @@ -2323,16 +2742,26 @@ fn build_runtime( emit_output: bool, allowed_tools: Option, permission_mode: PermissionMode, + progress_reporter: Option, ) -> Result, Box> { - Ok(ConversationRuntime::new_with_features( + let (feature_config, plugin_registry, tool_registry) = build_runtime_plugin_state()?; + Ok(ConversationRuntime::new_with_plugins( session, - AnthropicRuntimeClient::new(model, enable_tools, emit_output, allowed_tools.clone())?, - CliToolExecutor::new(allowed_tools, emit_output), - permission_policy(permission_mode), + AnthropicRuntimeClient::new( + model, + enable_tools, + emit_output, + allowed_tools.clone(), + tool_registry.clone(), + progress_reporter, + )?, + CliToolExecutor::new(allowed_tools.clone(), emit_output, tool_registry.clone()), + permission_policy(permission_mode, &tool_registry), system_prompt, - build_runtime_feature_config()?, - )) + feature_config, + plugin_registry, + )?) } struct CliPermissionPrompter { @@ -2388,6 +2817,8 @@ struct AnthropicRuntimeClient { enable_tools: bool, emit_output: bool, allowed_tools: Option, + tool_registry: GlobalToolRegistry, + progress_reporter: Option, } impl AnthropicRuntimeClient { @@ -2396,6 +2827,8 @@ impl AnthropicRuntimeClient { enable_tools: bool, emit_output: bool, allowed_tools: Option, + tool_registry: GlobalToolRegistry, + progress_reporter: Option, ) -> Result> { Ok(Self { runtime: tokio::runtime::Runtime::new()?, @@ -2405,6 +2838,8 @@ impl AnthropicRuntimeClient { enable_tools, emit_output, allowed_tools, + tool_registry, + progress_reporter, }) } } @@ -2422,21 +2857,17 @@ fn resolve_cli_auth_source() -> Result> { impl ApiClient for AnthropicRuntimeClient { #[allow(clippy::too_many_lines)] fn stream(&mut self, request: ApiRequest) -> Result, RuntimeError> { + if let Some(progress_reporter) = &self.progress_reporter { + progress_reporter.mark_model_phase(); + } let message_request = MessageRequest { model: self.model.clone(), max_tokens: max_tokens_for_model(&self.model), messages: convert_messages(&request.messages), system: (!request.system_prompt.is_empty()).then(|| request.system_prompt.join("\n\n")), - tools: self.enable_tools.then(|| { - filter_tool_specs(self.allowed_tools.as_ref()) - .into_iter() - .map(|spec| ToolDefinition { - name: spec.name.to_string(), - description: Some(spec.description.to_string()), - input_schema: spec.input_schema, - }) - .collect() - }), + tools: self + .enable_tools + .then(|| filter_tool_specs(&self.tool_registry, self.allowed_tools.as_ref())), tool_choice: self.enable_tools.then_some(ToolChoice::Auto), stream: true, }; @@ -2483,6 +2914,9 @@ impl ApiClient for AnthropicRuntimeClient { ApiStreamEvent::ContentBlockDelta(delta) => match delta.delta { ContentBlockDelta::TextDelta { text } => { if !text.is_empty() { + if let Some(progress_reporter) = &self.progress_reporter { + progress_reporter.mark_text_phase(&text); + } if let Some(rendered) = markdown_stream.push(&renderer, &text) { write!(out, "{rendered}") .and_then(|()| out.flush()) @@ -2496,6 +2930,8 @@ impl ApiClient for AnthropicRuntimeClient { input.push_str(&partial_json); } } + ContentBlockDelta::ThinkingDelta { .. } + | ContentBlockDelta::SignatureDelta { .. } => {} }, ApiStreamEvent::ContentBlockStop(_) => { if let Some(rendered) = markdown_stream.flush(&renderer) { @@ -2504,6 +2940,9 @@ impl ApiClient for AnthropicRuntimeClient { .map_err(|error| RuntimeError::new(error.to_string()))?; } if let Some((id, name, input)) = pending_tool.take() { + if let Some(progress_reporter) = &self.progress_reporter { + progress_reporter.mark_tool_phase(&name, &input); + } // Display tool call now that input is fully accumulated writeln!(out, "\n{}", format_tool_call_start(&name, &input)) .and_then(|()| out.flush()) @@ -2700,13 +3139,17 @@ fn format_tool_result(name: &str, output: &str, is_error: bool) -> String { "edit_file" | "Edit" => format_edit_result(icon, &parsed), "glob_search" | "Glob" => format_glob_result(icon, &parsed), "grep_search" | "Grep" => format_grep_result(icon, &parsed), - _ => { - let summary = truncate_for_summary(output.trim(), 200); - format!("{icon} \x1b[38;5;245m{name}:\x1b[0m {summary}") - } + _ => format_generic_tool_result(icon, name, &parsed), } } +const DISPLAY_TRUNCATION_NOTICE: &str = + "\x1b[2m… output truncated for display; full result preserved in session.\x1b[0m"; +const READ_DISPLAY_MAX_LINES: usize = 80; +const READ_DISPLAY_MAX_CHARS: usize = 6_000; +const TOOL_OUTPUT_DISPLAY_MAX_LINES: usize = 60; +const TOOL_OUTPUT_DISPLAY_MAX_CHARS: usize = 4_000; + fn extract_tool_path(parsed: &serde_json::Value) -> String { parsed .get("file_path") @@ -2767,23 +3210,34 @@ fn format_bash_result(icon: &str, parsed: &serde_json::Value) -> String { .get("backgroundTaskId") .and_then(|value| value.as_str()) { - lines[0].push_str(&format!(" backgrounded ({task_id})")); + write!(&mut lines[0], " backgrounded ({task_id})").expect("write to string"); } else if let Some(status) = parsed .get("returnCodeInterpretation") .and_then(|value| value.as_str()) .filter(|status| !status.is_empty()) { - lines[0].push_str(&format!(" {status}")); + write!(&mut lines[0], " {status}").expect("write to string"); } if let Some(stdout) = parsed.get("stdout").and_then(|value| value.as_str()) { if !stdout.trim().is_empty() { - lines.push(stdout.trim_end().to_string()); + lines.push(truncate_output_for_display( + stdout, + TOOL_OUTPUT_DISPLAY_MAX_LINES, + TOOL_OUTPUT_DISPLAY_MAX_CHARS, + )); } } if let Some(stderr) = parsed.get("stderr").and_then(|value| value.as_str()) { if !stderr.trim().is_empty() { - lines.push(format!("\x1b[38;5;203m{}\x1b[0m", stderr.trim_end())); + lines.push(format!( + "\x1b[38;5;203m{}\x1b[0m", + truncate_output_for_display( + stderr, + TOOL_OUTPUT_DISPLAY_MAX_LINES, + TOOL_OUTPUT_DISPLAY_MAX_CHARS, + ) + )); } } @@ -2795,15 +3249,15 @@ fn format_read_result(icon: &str, parsed: &serde_json::Value) -> String { let path = extract_tool_path(file); let start_line = file .get("startLine") - .and_then(|value| value.as_u64()) + .and_then(serde_json::Value::as_u64) .unwrap_or(1); let num_lines = file .get("numLines") - .and_then(|value| value.as_u64()) + .and_then(serde_json::Value::as_u64) .unwrap_or(0); let total_lines = file .get("totalLines") - .and_then(|value| value.as_u64()) + .and_then(serde_json::Value::as_u64) .unwrap_or(num_lines); let content = file .get("content") @@ -2816,7 +3270,7 @@ fn format_read_result(icon: &str, parsed: &serde_json::Value) -> String { start_line, end_line.max(start_line), total_lines, - content + truncate_output_for_display(content, READ_DISPLAY_MAX_LINES, READ_DISPLAY_MAX_CHARS) ) } @@ -2829,8 +3283,7 @@ fn format_write_result(icon: &str, parsed: &serde_json::Value) -> String { let line_count = parsed .get("content") .and_then(|value| value.as_str()) - .map(|content| content.lines().count()) - .unwrap_or(0); + .map_or(0, |content| content.lines().count()); format!( "{icon} \x1b[1;32m✏️ {} {path}\x1b[0m \x1b[2m({line_count} lines)\x1b[0m", if kind == "create" { "Wrote" } else { "Updated" }, @@ -2861,7 +3314,7 @@ fn format_edit_result(icon: &str, parsed: &serde_json::Value) -> String { let path = extract_tool_path(parsed); let suffix = if parsed .get("replaceAll") - .and_then(|value| value.as_bool()) + .and_then(serde_json::Value::as_bool) .unwrap_or(false) { " (replace all)" @@ -2889,7 +3342,7 @@ fn format_edit_result(icon: &str, parsed: &serde_json::Value) -> String { fn format_glob_result(icon: &str, parsed: &serde_json::Value) -> String { let num_files = parsed .get("numFiles") - .and_then(|value| value.as_u64()) + .and_then(serde_json::Value::as_u64) .unwrap_or(0); let filenames = parsed .get("filenames") @@ -2913,11 +3366,11 @@ fn format_glob_result(icon: &str, parsed: &serde_json::Value) -> String { fn format_grep_result(icon: &str, parsed: &serde_json::Value) -> String { let num_matches = parsed .get("numMatches") - .and_then(|value| value.as_u64()) + .and_then(serde_json::Value::as_u64) .unwrap_or(0); let num_files = parsed .get("numFiles") - .and_then(|value| value.as_u64()) + .and_then(serde_json::Value::as_u64) .unwrap_or(0); let content = parsed .get("content") @@ -2939,7 +3392,14 @@ fn format_grep_result(icon: &str, parsed: &serde_json::Value) -> String { "{icon} \x1b[38;5;245mgrep_search\x1b[0m {num_matches} matches across {num_files} files" ); if !content.trim().is_empty() { - format!("{summary}\n{}", content.trim_end()) + format!( + "{summary}\n{}", + truncate_output_for_display( + content, + TOOL_OUTPUT_DISPLAY_MAX_LINES, + TOOL_OUTPUT_DISPLAY_MAX_CHARS, + ) + ) } else if !filenames.is_empty() { format!("{summary}\n{filenames}") } else { @@ -2947,6 +3407,30 @@ fn format_grep_result(icon: &str, parsed: &serde_json::Value) -> String { } } +fn format_generic_tool_result(icon: &str, name: &str, parsed: &serde_json::Value) -> String { + let rendered_output = match parsed { + serde_json::Value::String(text) => text.clone(), + serde_json::Value::Null => String::new(), + serde_json::Value::Object(_) | serde_json::Value::Array(_) => { + serde_json::to_string_pretty(parsed).unwrap_or_else(|_| parsed.to_string()) + } + _ => parsed.to_string(), + }; + let preview = truncate_output_for_display( + &rendered_output, + TOOL_OUTPUT_DISPLAY_MAX_LINES, + TOOL_OUTPUT_DISPLAY_MAX_CHARS, + ); + + if preview.is_empty() { + format!("{icon} \x1b[38;5;245m{name}\x1b[0m") + } else if preview.contains('\n') { + format!("{icon} \x1b[38;5;245m{name}\x1b[0m\n{preview}") + } else { + format!("{icon} \x1b[38;5;245m{name}:\x1b[0m {preview}") + } +} + fn summarize_tool_payload(payload: &str) -> String { let compact = match serde_json::from_str::(payload) { Ok(value) => value.to_string(), @@ -2965,6 +3449,50 @@ fn truncate_for_summary(value: &str, limit: usize) -> String { } } +fn truncate_output_for_display(content: &str, max_lines: usize, max_chars: usize) -> String { + let original = content.trim_end_matches('\n'); + if original.is_empty() { + return String::new(); + } + + let mut preview_lines = Vec::new(); + let mut used_chars = 0usize; + let mut truncated = false; + + for (index, line) in original.lines().enumerate() { + if index >= max_lines { + truncated = true; + break; + } + + let newline_cost = usize::from(!preview_lines.is_empty()); + let available = max_chars.saturating_sub(used_chars + newline_cost); + if available == 0 { + truncated = true; + break; + } + + let line_chars = line.chars().count(); + if line_chars > available { + preview_lines.push(line.chars().take(available).collect::()); + truncated = true; + break; + } + + preview_lines.push(line.to_string()); + used_chars += newline_cost + line_chars; + } + + let mut preview = preview_lines.join("\n"); + if truncated { + if !preview.is_empty() { + preview.push('\n'); + } + preview.push_str(DISPLAY_TRUNCATION_NOTICE); + } + preview +} + fn push_output_block( block: OutputContentBlock, out: &mut (impl Write + ?Sized), @@ -2996,6 +3524,7 @@ fn push_output_block( }; *pending_tool = Some((id, name, initial_input)); } + OutputContentBlock::Thinking { .. } | OutputContentBlock::RedactedThinking { .. } => {} } Ok(()) } @@ -3028,14 +3557,20 @@ struct CliToolExecutor { renderer: TerminalRenderer, emit_output: bool, allowed_tools: Option, + tool_registry: GlobalToolRegistry, } impl CliToolExecutor { - fn new(allowed_tools: Option, emit_output: bool) -> Self { + fn new( + allowed_tools: Option, + emit_output: bool, + tool_registry: GlobalToolRegistry, + ) -> Self { Self { renderer: TerminalRenderer::new(), emit_output, allowed_tools, + tool_registry, } } } @@ -3053,7 +3588,7 @@ impl ToolExecutor for CliToolExecutor { } let value = serde_json::from_str(input) .map_err(|error| ToolError::new(format!("invalid tool input JSON: {error}")))?; - match execute_tool(tool_name, &value) { + match self.tool_registry.execute(tool_name, &value) { Ok(output) => { if self.emit_output { let markdown = format_tool_result(tool_name, &output, false); @@ -3076,16 +3611,13 @@ impl ToolExecutor for CliToolExecutor { } } -fn permission_policy(mode: PermissionMode) -> PermissionPolicy { - tool_permission_specs() - .into_iter() - .fold(PermissionPolicy::new(mode), |policy, spec| { - policy.with_tool_requirement(spec.name, spec.required_permission) - }) -} - -fn tool_permission_specs() -> Vec { - mvp_tool_specs() +fn permission_policy(mode: PermissionMode, tool_registry: &GlobalToolRegistry) -> PermissionPolicy { + tool_registry.permission_specs(None).into_iter().fold( + PermissionPolicy::new(mode), + |policy, (name, required_permission)| { + policy.with_tool_requirement(name, required_permission) + }, + ) } fn convert_messages(messages: &[ConversationMessage]) -> Vec { @@ -3224,18 +3756,47 @@ fn print_help() { #[cfg(test)] mod tests { use super::{ - filter_tool_specs, format_compact_report, format_cost_report, format_model_report, - format_model_switch_report, format_permissions_report, format_permissions_switch_report, - format_resume_report, format_status_report, format_tool_call_start, format_tool_result, - normalize_permission_mode, parse_args, parse_git_status_metadata, print_help_to, - push_output_block, render_config_report, render_memory_report, render_repl_help, - resolve_model_alias, response_to_events, resume_supported_slash_commands, status_context, - CliAction, CliOutputFormat, SlashCommand, StatusUsage, DEFAULT_MODEL, + describe_tool_progress, filter_tool_specs, format_compact_report, format_cost_report, + format_internal_prompt_progress_line, format_model_report, format_model_switch_report, + format_permissions_report, format_permissions_switch_report, format_resume_report, + format_status_report, format_tool_call_start, format_tool_result, + normalize_permission_mode, parse_args, parse_git_status_metadata, permission_policy, + print_help_to, push_output_block, render_config_report, render_memory_report, + render_repl_help, resolve_model_alias, response_to_events, resume_supported_slash_commands, + status_context, CliAction, CliOutputFormat, InternalPromptProgressEvent, + InternalPromptProgressState, SlashCommand, StatusUsage, DEFAULT_MODEL, }; use api::{MessageResponse, OutputContentBlock, Usage}; + use plugins::{PluginTool, PluginToolDefinition, PluginToolPermission}; use runtime::{AssistantEvent, ContentBlock, ConversationMessage, MessageRole, PermissionMode}; use serde_json::json; use std::path::PathBuf; + use std::time::Duration; + use tools::GlobalToolRegistry; + + fn registry_with_plugin_tool() -> GlobalToolRegistry { + GlobalToolRegistry::with_plugin_tools(vec![PluginTool::new( + "plugin-demo@external", + "plugin-demo", + PluginToolDefinition { + name: "plugin_echo".to_string(), + description: Some("Echo plugin payload".to_string()), + input_schema: json!({ + "type": "object", + "properties": { + "message": { "type": "string" } + }, + "required": ["message"], + "additionalProperties": false + }), + }, + "echo".to_string(), + Vec::new(), + PluginToolPermission::WorkspaceWrite, + None, + )]) + .expect("plugin tool registry should build") + } #[test] fn defaults_to_repl_when_no_args() { @@ -3449,7 +4010,7 @@ mod tests { .into_iter() .map(str::to_string) .collect(); - let filtered = filter_tool_specs(Some(&allowed)); + let filtered = filter_tool_specs(&GlobalToolRegistry::builtin(), Some(&allowed)); let names = filtered .into_iter() .map(|spec| spec.name) @@ -3457,6 +4018,24 @@ mod tests { assert_eq!(names, vec!["read_file", "grep_search"]); } + #[test] + fn filtered_tool_specs_include_plugin_tools() { + let filtered = filter_tool_specs(®istry_with_plugin_tool(), None); + let names = filtered + .into_iter() + .map(|definition| definition.name) + .collect::>(); + assert!(names.contains(&"bash".to_string())); + assert!(names.contains(&"plugin_echo".to_string())); + } + + #[test] + fn permission_policy_uses_plugin_tool_permissions() { + let policy = permission_policy(PermissionMode::ReadOnly, ®istry_with_plugin_tool()); + let required = policy.required_mode_for("plugin_echo"); + assert_eq!(required, PermissionMode::WorkspaceWrite); + } + #[test] fn shared_help_uses_resume_annotation_copy() { let help = commands::render_slash_command_help(); @@ -3475,13 +4054,16 @@ mod tests { assert!(help.contains("/clear [--confirm]")); assert!(help.contains("/cost")); assert!(help.contains("/resume ")); - assert!(help.contains("/config [env|hooks|model]")); + assert!(help.contains("/config [env|hooks|model|plugins]")); assert!(help.contains("/memory")); assert!(help.contains("/init")); assert!(help.contains("/diff")); assert!(help.contains("/version")); assert!(help.contains("/export [file]")); assert!(help.contains("/session [list|switch ]")); + assert!(help.contains( + "/plugins [list|install |enable |disable |uninstall |update ]" + )); assert!(help.contains("/exit")); } @@ -3632,6 +4214,9 @@ mod tests { fn config_report_supports_section_views() { let report = render_config_report(Some("env")).expect("config report should render"); assert!(report.contains("Merged section: env")); + let plugins_report = + render_config_report(Some("plugins")).expect("plugins config report should render"); + assert!(plugins_report.contains("Merged section: plugins")); } #[test] @@ -3777,6 +4362,156 @@ mod tests { assert!(done.contains("hello")); } + #[test] + fn tool_rendering_truncates_large_read_output_for_display_only() { + let content = (0..200) + .map(|index| format!("line {index:03}")) + .collect::>() + .join("\n"); + let output = json!({ + "file": { + "filePath": "src/main.rs", + "content": content, + "numLines": 200, + "startLine": 1, + "totalLines": 200 + } + }) + .to_string(); + + let rendered = format_tool_result("read_file", &output, false); + + assert!(rendered.contains("line 000")); + assert!(rendered.contains("line 079")); + assert!(!rendered.contains("line 199")); + assert!(rendered.contains("full result preserved in session")); + assert!(output.contains("line 199")); + } + + #[test] + fn tool_rendering_truncates_large_bash_output_for_display_only() { + let stdout = (0..120) + .map(|index| format!("stdout {index:03}")) + .collect::>() + .join("\n"); + let output = json!({ + "stdout": stdout, + "stderr": "", + "returnCodeInterpretation": "completed successfully" + }) + .to_string(); + + let rendered = format_tool_result("bash", &output, false); + + assert!(rendered.contains("stdout 000")); + assert!(rendered.contains("stdout 059")); + assert!(!rendered.contains("stdout 119")); + assert!(rendered.contains("full result preserved in session")); + assert!(output.contains("stdout 119")); + } + + #[test] + fn tool_rendering_truncates_generic_long_output_for_display_only() { + let items = (0..120) + .map(|index| format!("payload {index:03}")) + .collect::>(); + let output = json!({ + "summary": "plugin payload", + "items": items, + }) + .to_string(); + + let rendered = format_tool_result("plugin_echo", &output, false); + + assert!(rendered.contains("plugin_echo")); + assert!(rendered.contains("payload 000")); + assert!(rendered.contains("payload 040")); + assert!(!rendered.contains("payload 080")); + assert!(!rendered.contains("payload 119")); + assert!(rendered.contains("full result preserved in session")); + assert!(output.contains("payload 119")); + } + + #[test] + fn tool_rendering_truncates_raw_generic_output_for_display_only() { + let output = (0..120) + .map(|index| format!("raw {index:03}")) + .collect::>() + .join("\n"); + + let rendered = format_tool_result("plugin_echo", &output, false); + + assert!(rendered.contains("plugin_echo")); + assert!(rendered.contains("raw 000")); + assert!(rendered.contains("raw 059")); + assert!(!rendered.contains("raw 119")); + assert!(rendered.contains("full result preserved in session")); + assert!(output.contains("raw 119")); + } + + #[test] + fn ultraplan_progress_lines_include_phase_step_and_elapsed_status() { + let snapshot = InternalPromptProgressState { + command_label: "Ultraplan", + task_label: "ship plugin progress".to_string(), + step: 3, + phase: "running read_file".to_string(), + detail: Some("reading rust/crates/rusty-claude-cli/src/main.rs".to_string()), + saw_final_text: false, + }; + + let started = format_internal_prompt_progress_line( + InternalPromptProgressEvent::Started, + &snapshot, + Duration::from_secs(0), + None, + ); + let heartbeat = format_internal_prompt_progress_line( + InternalPromptProgressEvent::Heartbeat, + &snapshot, + Duration::from_secs(9), + None, + ); + let completed = format_internal_prompt_progress_line( + InternalPromptProgressEvent::Complete, + &snapshot, + Duration::from_secs(12), + None, + ); + let failed = format_internal_prompt_progress_line( + InternalPromptProgressEvent::Failed, + &snapshot, + Duration::from_secs(12), + Some("network timeout"), + ); + + assert!(started.contains("planning started")); + assert!(started.contains("current step 3")); + assert!(heartbeat.contains("heartbeat")); + assert!(heartbeat.contains("9s elapsed")); + assert!(heartbeat.contains("phase running read_file")); + assert!(completed.contains("completed")); + assert!(completed.contains("3 steps total")); + assert!(failed.contains("failed")); + assert!(failed.contains("network timeout")); + } + + #[test] + fn describe_tool_progress_summarizes_known_tools() { + assert_eq!( + describe_tool_progress("read_file", r#"{"path":"src/main.rs"}"#), + "reading src/main.rs" + ); + assert!( + describe_tool_progress("bash", r#"{"command":"cargo test -p rusty-claude-cli"}"#) + .contains("cargo test -p rusty-claude-cli") + ); + assert_eq!( + describe_tool_progress("grep_search", r#"{"pattern":"ultraplan","path":"rust"}"#), + "grep `ultraplan` in rust" + ); + } + #[test] fn push_output_block_renders_markdown_text() { let mut out = Vec::new(); @@ -3894,4 +4629,43 @@ mod tests { if name == "read_file" && input == "{\"path\":\"rust/Cargo.toml\"}" )); } + + #[test] + fn response_to_events_ignores_thinking_blocks() { + let mut out = Vec::new(); + let events = response_to_events( + MessageResponse { + id: "msg-3".to_string(), + kind: "message".to_string(), + model: "claude-opus-4-6".to_string(), + role: "assistant".to_string(), + content: vec![ + OutputContentBlock::Thinking { + thinking: "step 1".to_string(), + signature: Some("sig_123".to_string()), + }, + OutputContentBlock::Text { + text: "Final answer".to_string(), + }, + ], + stop_reason: Some("end_turn".to_string()), + stop_sequence: None, + usage: Usage { + input_tokens: 1, + output_tokens: 1, + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, + }, + request_id: None, + }, + &mut out, + ) + .expect("response conversion should succeed"); + + assert!(matches!( + &events[0], + AssistantEvent::TextDelta(text) if text == "Final answer" + )); + assert!(!String::from_utf8(out).expect("utf8").contains("step 1")); + } } diff --git a/rust/crates/rusty-claude-cli/src/render.rs b/rust/crates/rusty-claude-cli/src/render.rs index 465c5a4..01751fd 100644 --- a/rust/crates/rusty-claude-cli/src/render.rs +++ b/rust/crates/rusty-claude-cli/src/render.rs @@ -286,7 +286,7 @@ impl TerminalRenderer { ) { match event { Event::Start(Tag::Heading { level, .. }) => { - self.start_heading(state, level as u8, output) + self.start_heading(state, level as u8, output); } Event::End(TagEnd::Paragraph) => output.push_str("\n\n"), Event::Start(Tag::BlockQuote(..)) => self.start_quote(state, output), @@ -426,6 +426,7 @@ impl TerminalRenderer { } } + #[allow(clippy::unused_self)] fn start_heading(&self, state: &mut RenderState, level: u8, output: &mut String) { state.heading_level = Some(level); if !output.is_empty() { diff --git a/rust/crates/tools/Cargo.toml b/rust/crates/tools/Cargo.toml index dfa003d..9ecbb06 100644 --- a/rust/crates/tools/Cargo.toml +++ b/rust/crates/tools/Cargo.toml @@ -7,6 +7,7 @@ publish.workspace = true [dependencies] api = { path = "../api" } +plugins = { path = "../plugins" } runtime = { path = "../runtime" } reqwest = { version = "0.12", default-features = false, features = ["blocking", "rustls-tls"] } serde = { version = "1", features = ["derive"] } diff --git a/rust/crates/tools/src/lib.rs b/rust/crates/tools/src/lib.rs index 4071c9b..38fafe9 100644 --- a/rust/crates/tools/src/lib.rs +++ b/rust/crates/tools/src/lib.rs @@ -8,12 +8,13 @@ use api::{ MessageRequest, MessageResponse, OutputContentBlock, StreamEvent as ApiStreamEvent, ToolChoice, ToolDefinition, ToolResultContentBlock, }; +use plugins::{PluginManager, PluginManagerConfig, PluginTool}; use reqwest::blocking::Client; use runtime::{ edit_file, execute_bash, glob_search, grep_search, load_system_prompt, read_file, write_file, - ApiClient, ApiRequest, AssistantEvent, BashCommandInput, ContentBlock, ConversationMessage, - ConversationRuntime, GrepSearchInput, MessageRole, PermissionMode, PermissionPolicy, - RuntimeError, Session, TokenUsage, ToolError, ToolExecutor, + ApiClient, ApiRequest, AssistantEvent, BashCommandInput, ConfigLoader, ContentBlock, + ConversationMessage, ConversationRuntime, GrepSearchInput, MessageRole, PermissionMode, + PermissionPolicy, RuntimeConfig, RuntimeError, Session, TokenUsage, ToolError, ToolExecutor, }; use serde::{Deserialize, Serialize}; use serde_json::{json, Value}; @@ -55,6 +56,239 @@ pub struct ToolSpec { pub required_permission: PermissionMode, } +#[derive(Debug, Clone, PartialEq)] +pub struct RegisteredTool { + pub definition: ToolDefinition, + pub required_permission: PermissionMode, + handler: RegisteredToolHandler, +} + +#[allow(clippy::large_enum_variant)] +#[derive(Debug, Clone, PartialEq)] +enum RegisteredToolHandler { + Builtin, + Plugin(PluginTool), +} + +#[derive(Debug, Clone, PartialEq)] +pub struct GlobalToolRegistry { + entries: Vec, +} + +impl GlobalToolRegistry { + #[must_use] + pub fn builtin() -> Self { + Self { + entries: mvp_tool_specs() + .into_iter() + .map(|spec| RegisteredTool { + definition: ToolDefinition { + name: spec.name.to_string(), + description: Some(spec.description.to_string()), + input_schema: spec.input_schema, + }, + required_permission: spec.required_permission, + handler: RegisteredToolHandler::Builtin, + }) + .collect(), + } + } + + pub fn with_plugin_tools(plugin_tools: Vec) -> Result { + let mut registry = Self::builtin(); + let mut seen = registry + .entries + .iter() + .map(|entry| { + ( + normalize_registry_tool_name(&entry.definition.name), + entry.definition.name.clone(), + ) + }) + .collect::>(); + + for tool in plugin_tools { + let normalized = normalize_registry_tool_name(&tool.definition().name); + if let Some(existing) = seen.get(&normalized) { + return Err(format!( + "plugin tool `{}` from `{}` conflicts with already-registered tool `{existing}`", + tool.definition().name, + tool.plugin_id() + )); + } + seen.insert(normalized, tool.definition().name.clone()); + registry.entries.push(RegisteredTool { + definition: ToolDefinition { + name: tool.definition().name.clone(), + description: tool.definition().description.clone(), + input_schema: tool.definition().input_schema.clone(), + }, + required_permission: permission_mode_from_plugin_tool(tool.required_permission())?, + handler: RegisteredToolHandler::Plugin(tool), + }); + } + + Ok(registry) + } + + #[must_use] + pub fn entries(&self) -> &[RegisteredTool] { + &self.entries + } + + fn find_entry(&self, name: &str) -> Option<&RegisteredTool> { + let normalized = normalize_registry_tool_name(name); + self.entries.iter().find(|entry| { + normalize_registry_tool_name(entry.definition.name.as_str()) == normalized + }) + } + + #[must_use] + pub fn definitions(&self, allowed_tools: Option<&BTreeSet>) -> Vec { + self.entries + .iter() + .filter(|entry| { + allowed_tools.is_none_or(|allowed| allowed.contains(entry.definition.name.as_str())) + }) + .map(|entry| entry.definition.clone()) + .collect() + } + + #[must_use] + pub fn permission_specs( + &self, + allowed_tools: Option<&BTreeSet>, + ) -> Vec<(String, PermissionMode)> { + self.entries + .iter() + .filter(|entry| { + allowed_tools.is_none_or(|allowed| allowed.contains(entry.definition.name.as_str())) + }) + .map(|entry| (entry.definition.name.clone(), entry.required_permission)) + .collect() + } + + pub fn normalize_allowed_tools( + &self, + values: &[String], + ) -> Result>, String> { + if values.is_empty() { + return Ok(None); + } + + let canonical_names = self + .entries + .iter() + .map(|entry| entry.definition.name.clone()) + .collect::>(); + let mut name_map = canonical_names + .iter() + .map(|name| (normalize_registry_tool_name(name), name.clone())) + .collect::>(); + + for (alias, canonical) in [ + ("read", "read_file"), + ("write", "write_file"), + ("edit", "edit_file"), + ("glob", "glob_search"), + ("grep", "grep_search"), + ] { + if canonical_names.iter().any(|name| name == canonical) { + name_map.insert(alias.to_string(), canonical.to_string()); + } + } + + let mut allowed = BTreeSet::new(); + for value in values { + for token in value + .split(|ch: char| ch == ',' || ch.is_whitespace()) + .filter(|token| !token.is_empty()) + { + let normalized = normalize_registry_tool_name(token); + let canonical = name_map.get(&normalized).ok_or_else(|| { + format!( + "unsupported tool in --allowedTools: {token} (expected one of: {})", + canonical_names.join(", ") + ) + })?; + allowed.insert(canonical.clone()); + } + } + + Ok(Some(allowed)) + } + + pub fn execute(&self, name: &str, input: &Value) -> Result { + let entry = self + .find_entry(name) + .ok_or_else(|| format!("unsupported tool: {name}"))?; + match &entry.handler { + RegisteredToolHandler::Builtin => execute_tool(&entry.definition.name, input), + RegisteredToolHandler::Plugin(tool) => { + tool.execute(input).map_err(|error| error.to_string()) + } + } + } +} + +impl Default for GlobalToolRegistry { + fn default() -> Self { + Self::builtin() + } +} + +fn normalize_registry_tool_name(value: &str) -> String { + let trimmed = value.trim(); + let chars = trimmed.chars().collect::>(); + let mut normalized = String::new(); + + for (index, ch) in chars.iter().copied().enumerate() { + if matches!(ch, '-' | ' ' | '\t' | '\n') { + if !normalized.ends_with('_') { + normalized.push('_'); + } + continue; + } + + if ch == '_' { + if !normalized.ends_with('_') { + normalized.push('_'); + } + continue; + } + + if ch.is_uppercase() { + let prev = chars.get(index.wrapping_sub(1)).copied(); + let next = chars.get(index + 1).copied(); + let needs_separator = index > 0 + && !normalized.ends_with('_') + && (prev.is_some_and(|prev| prev.is_lowercase() || prev.is_ascii_digit()) + || (prev.is_some_and(char::is_uppercase) + && next.is_some_and(char::is_lowercase))); + if needs_separator { + normalized.push('_'); + } + normalized.extend(ch.to_lowercase()); + continue; + } + + normalized.push(ch.to_ascii_lowercase()); + } + + normalized.trim_matches('_').to_string() +} + +fn permission_mode_from_plugin_tool(value: &str) -> Result { + match value { + "read-only" => Ok(PermissionMode::ReadOnly), + "workspace-write" => Ok(PermissionMode::WorkspaceWrite), + "danger-full-access" => Ok(PermissionMode::DangerFullAccess), + other => Err(format!( + "unsupported plugin tool permission `{other}` (expected read-only, workspace-write, or danger-full-access)" + )), + } +} + #[must_use] #[allow(clippy::too_many_lines)] pub fn mvp_tool_specs() -> Vec { @@ -1466,13 +1700,15 @@ fn build_agent_runtime( .clone() .unwrap_or_else(|| DEFAULT_AGENT_MODEL.to_string()); let allowed_tools = job.allowed_tools.clone(); - let api_client = AnthropicRuntimeClient::new(model, allowed_tools.clone())?; - let tool_executor = SubagentToolExecutor::new(allowed_tools); + let tool_registry = current_tool_registry()?; + let api_client = + AnthropicRuntimeClient::new(model, allowed_tools.clone(), tool_registry.clone())?; + let tool_executor = SubagentToolExecutor::new(allowed_tools, tool_registry.clone()); Ok(ConversationRuntime::new( Session::new(), api_client, tool_executor, - agent_permission_policy(), + agent_permission_policy(&tool_registry), job.system_prompt.clone(), )) } @@ -1581,10 +1817,12 @@ fn allowed_tools_for_subagent(subagent_type: &str) -> BTreeSet { tools.into_iter().map(str::to_string).collect() } -fn agent_permission_policy() -> PermissionPolicy { - mvp_tool_specs().into_iter().fold( +fn agent_permission_policy(tool_registry: &GlobalToolRegistry) -> PermissionPolicy { + tool_registry.permission_specs(None).into_iter().fold( PermissionPolicy::new(PermissionMode::DangerFullAccess), - |policy, spec| policy.with_tool_requirement(spec.name, spec.required_permission), + |policy, (name, required_permission)| { + policy.with_tool_requirement(name, required_permission) + }, ) } @@ -1640,10 +1878,15 @@ struct AnthropicRuntimeClient { client: AnthropicClient, model: String, allowed_tools: BTreeSet, + tool_registry: GlobalToolRegistry, } impl AnthropicRuntimeClient { - fn new(model: String, allowed_tools: BTreeSet) -> Result { + fn new( + model: String, + allowed_tools: BTreeSet, + tool_registry: GlobalToolRegistry, + ) -> Result { let client = AnthropicClient::from_env() .map_err(|error| error.to_string())? .with_base_url(read_base_url()); @@ -1652,20 +1895,14 @@ impl AnthropicRuntimeClient { client, model, allowed_tools, + tool_registry, }) } } impl ApiClient for AnthropicRuntimeClient { fn stream(&mut self, request: ApiRequest) -> Result, RuntimeError> { - let tools = tool_specs_for_allowed_tools(Some(&self.allowed_tools)) - .into_iter() - .map(|spec| ToolDefinition { - name: spec.name.to_string(), - description: Some(spec.description.to_string()), - input_schema: spec.input_schema, - }) - .collect::>(); + let tools = self.tool_registry.definitions(Some(&self.allowed_tools)); let message_request = MessageRequest { model: self.model.clone(), max_tokens: 32_000, @@ -1716,6 +1953,8 @@ impl ApiClient for AnthropicRuntimeClient { input.push_str(&partial_json); } } + ContentBlockDelta::ThinkingDelta { .. } + | ContentBlockDelta::SignatureDelta { .. } => {} }, ApiStreamEvent::ContentBlockStop(_) => { if let Some((id, name, input)) = pending_tool.take() { @@ -1768,32 +2007,82 @@ impl ApiClient for AnthropicRuntimeClient { struct SubagentToolExecutor { allowed_tools: BTreeSet, + tool_registry: GlobalToolRegistry, } impl SubagentToolExecutor { - fn new(allowed_tools: BTreeSet) -> Self { - Self { allowed_tools } + fn new(allowed_tools: BTreeSet, tool_registry: GlobalToolRegistry) -> Self { + Self { + allowed_tools, + tool_registry, + } } } impl ToolExecutor for SubagentToolExecutor { fn execute(&mut self, tool_name: &str, input: &str) -> Result { - if !self.allowed_tools.contains(tool_name) { + let entry = self + .tool_registry + .find_entry(tool_name) + .ok_or_else(|| ToolError::new(format!("unsupported tool: {tool_name}")))?; + if !self.allowed_tools.contains(entry.definition.name.as_str()) { return Err(ToolError::new(format!( "tool `{tool_name}` is not enabled for this sub-agent" ))); } let value = serde_json::from_str(input) .map_err(|error| ToolError::new(format!("invalid tool input JSON: {error}")))?; - execute_tool(tool_name, &value).map_err(ToolError::new) + self.tool_registry + .execute(tool_name, &value) + .map_err(ToolError::new) } } -fn tool_specs_for_allowed_tools(allowed_tools: Option<&BTreeSet>) -> Vec { - mvp_tool_specs() - .into_iter() - .filter(|spec| allowed_tools.is_none_or(|allowed| allowed.contains(spec.name))) - .collect() +fn current_tool_registry() -> Result { + let cwd = std::env::current_dir().map_err(|error| error.to_string())?; + let loader = ConfigLoader::default_for(&cwd); + let runtime_config = loader.load().map_err(|error| error.to_string())?; + let plugin_manager = build_plugin_manager(&cwd, &loader, &runtime_config); + let plugin_tools = plugin_manager + .aggregated_tools() + .map_err(|error| error.to_string())?; + GlobalToolRegistry::with_plugin_tools(plugin_tools) +} + +fn build_plugin_manager( + cwd: &Path, + loader: &ConfigLoader, + runtime_config: &RuntimeConfig, +) -> PluginManager { + let plugin_settings = runtime_config.plugins(); + let mut plugin_config = PluginManagerConfig::new(loader.config_home().to_path_buf()); + plugin_config.enabled_plugins = plugin_settings.enabled_plugins().clone(); + plugin_config.external_dirs = plugin_settings + .external_directories() + .iter() + .map(|path| resolve_plugin_path(cwd, loader.config_home(), path)) + .collect(); + plugin_config.install_root = plugin_settings + .install_root() + .map(|path| resolve_plugin_path(cwd, loader.config_home(), path)); + plugin_config.registry_path = plugin_settings + .registry_path() + .map(|path| resolve_plugin_path(cwd, loader.config_home(), path)); + plugin_config.bundled_root = plugin_settings + .bundled_root() + .map(|path| resolve_plugin_path(cwd, loader.config_home(), path)); + PluginManager::new(plugin_config) +} + +fn resolve_plugin_path(cwd: &Path, config_home: &Path, value: &str) -> PathBuf { + let path = PathBuf::from(value); + if path.is_absolute() { + path + } else if value.starts_with('.') { + cwd.join(path) + } else { + config_home.join(path) + } } fn convert_messages(messages: &[ConversationMessage]) -> Vec { @@ -1860,6 +2149,7 @@ fn push_output_block( }; *pending_tool = Some((id, name, initial_input)); } + OutputContentBlock::Thinking { .. } | OutputContentBlock::RedactedThinking { .. } => {} } } @@ -2905,9 +3195,13 @@ mod tests { use super::{ agent_permission_policy, allowed_tools_for_subagent, execute_agent_with_spawn, execute_tool, final_assistant_text, mvp_tool_specs, persist_agent_terminal_state, - AgentInput, AgentJob, SubagentToolExecutor, + response_to_events, AgentInput, AgentJob, GlobalToolRegistry, SubagentToolExecutor, + }; + use api::{MessageResponse, OutputContentBlock, Usage}; + use plugins::{PluginTool, PluginToolDefinition, PluginToolPermission}; + use runtime::{ + ApiRequest, AssistantEvent, ConversationRuntime, RuntimeError, Session, ToolExecutor, }; - use runtime::{ApiRequest, AssistantEvent, ConversationRuntime, RuntimeError, Session}; use serde_json::json; fn env_lock() -> &'static Mutex<()> { @@ -2923,6 +3217,17 @@ mod tests { std::env::temp_dir().join(format!("clawd-tools-{unique}-{name}")) } + fn make_executable(path: &PathBuf) { + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + + let mut permissions = std::fs::metadata(path).expect("metadata").permissions(); + permissions.set_mode(0o755); + std::fs::set_permissions(path, permissions).expect("chmod"); + } + } + #[test] fn exposes_mvp_tools() { let names = mvp_tool_specs() @@ -2952,6 +3257,170 @@ mod tests { assert!(error.contains("unsupported tool")); } + #[test] + fn global_registry_registers_and_executes_plugin_tools() { + let script = temp_path("plugin-tool.sh"); + std::fs::write( + &script, + "#!/bin/sh\nINPUT=$(cat)\nprintf '{\"plugin\":\"%s\",\"tool\":\"%s\",\"input\":%s}\\n' \"$CLAWD_PLUGIN_ID\" \"$CLAWD_TOOL_NAME\" \"$INPUT\"\n", + ) + .expect("write script"); + make_executable(&script); + + let registry = GlobalToolRegistry::with_plugin_tools(vec![PluginTool::new( + "demo@external", + "demo", + PluginToolDefinition { + name: "plugin_echo".to_string(), + description: Some("Echo plugin input".to_string()), + input_schema: json!({ + "type": "object", + "properties": { "message": { "type": "string" } }, + "required": ["message"], + "additionalProperties": false + }), + }, + "sh".to_string(), + vec![script.display().to_string()], + PluginToolPermission::WorkspaceWrite, + script.parent().map(PathBuf::from), + )]) + .expect("registry should build"); + + let names = registry + .definitions(None) + .into_iter() + .map(|definition| definition.name) + .collect::>(); + assert!(names.contains(&"bash".to_string())); + assert!(names.contains(&"plugin_echo".to_string())); + + let output = registry + .execute("plugin_echo", &json!({ "message": "hello" })) + .expect("plugin tool should execute"); + let payload: serde_json::Value = serde_json::from_str(&output).expect("valid json"); + assert_eq!(payload["plugin"], "demo@external"); + assert_eq!(payload["tool"], "plugin_echo"); + assert_eq!(payload["input"]["message"], "hello"); + + let _ = std::fs::remove_file(script); + } + + #[test] + fn global_registry_normalizes_plugin_tool_names_for_allowlists_and_execution() { + let script = temp_path("plugin-tool-normalized.sh"); + std::fs::write( + &script, + "#!/bin/sh\nINPUT=$(cat)\nprintf '{\"tool\":\"%s\",\"input\":%s}\\n' \"$CLAWD_TOOL_NAME\" \"$INPUT\"\n", + ) + .expect("write script"); + make_executable(&script); + + let registry = GlobalToolRegistry::with_plugin_tools(vec![PluginTool::new( + "demo@external", + "demo", + PluginToolDefinition { + name: "plugin_echo".to_string(), + description: Some("Echo plugin input".to_string()), + input_schema: json!({ + "type": "object", + "properties": { "message": { "type": "string" } }, + "required": ["message"], + "additionalProperties": false + }), + }, + script.display().to_string(), + Vec::new(), + PluginToolPermission::WorkspaceWrite, + script.parent().map(PathBuf::from), + )]) + .expect("registry should build"); + + let allowed = registry + .normalize_allowed_tools(&[String::from("PLUGIN-ECHO")]) + .expect("plugin tool allowlist should normalize") + .expect("allowlist should be present"); + assert!(allowed.contains("plugin_echo")); + + let output = registry + .execute("plugin-echo", &json!({ "message": "hello" })) + .expect("normalized plugin tool name should execute"); + let payload: serde_json::Value = serde_json::from_str(&output).expect("valid json"); + assert_eq!(payload["tool"], "plugin_echo"); + assert_eq!(payload["input"]["message"], "hello"); + + let builtin_output = GlobalToolRegistry::builtin() + .execute("structured-output", &json!({ "ok": true })) + .expect("normalized builtin tool name should execute"); + let builtin_payload: serde_json::Value = + serde_json::from_str(&builtin_output).expect("valid json"); + assert_eq!(builtin_payload["structured_output"]["ok"], true); + + let _ = std::fs::remove_file(script); + } + + #[test] + fn subagent_executor_executes_allowed_plugin_tools() { + let script = temp_path("subagent-plugin-tool.sh"); + std::fs::write( + &script, + "#!/bin/sh\nINPUT=$(cat)\nprintf '{\"tool\":\"%s\",\"input\":%s}\\n' \"$CLAWD_TOOL_NAME\" \"$INPUT\"\n", + ) + .expect("write script"); + make_executable(&script); + + let registry = GlobalToolRegistry::with_plugin_tools(vec![PluginTool::new( + "demo@external", + "demo", + PluginToolDefinition { + name: "plugin_echo".to_string(), + description: Some("Echo plugin input".to_string()), + input_schema: json!({ + "type": "object", + "properties": { "message": { "type": "string" } }, + "required": ["message"], + "additionalProperties": false + }), + }, + script.display().to_string(), + Vec::new(), + PluginToolPermission::WorkspaceWrite, + script.parent().map(PathBuf::from), + )]) + .expect("registry should build"); + + let mut executor = + SubagentToolExecutor::new(BTreeSet::from([String::from("plugin_echo")]), registry); + let output = executor + .execute("plugin-echo", r#"{"message":"hello"}"#) + .expect("plugin tool should execute for subagent"); + let payload: serde_json::Value = serde_json::from_str(&output).expect("valid json"); + assert_eq!(payload["tool"], "plugin_echo"); + assert_eq!(payload["input"]["message"], "hello"); + + let _ = std::fs::remove_file(script); + } + + #[test] + fn global_registry_rejects_conflicting_plugin_tool_names() { + let error = GlobalToolRegistry::with_plugin_tools(vec![PluginTool::new( + "demo@external", + "demo", + PluginToolDefinition { + name: "read-file".to_string(), + description: Some("Conflicts with builtin".to_string()), + input_schema: json!({ "type": "object" }), + }, + "echo".to_string(), + Vec::new(), + PluginToolPermission::ReadOnly, + None, + )]) + .expect_err("conflicting plugin tool should fail"); + + assert!(error.contains("conflicts with already-registered tool `read_file`")); + } + #[test] fn web_fetch_returns_prompt_aware_summary() { let server = TestServer::spawn(Arc::new(|request_line: &str| { @@ -3531,8 +4000,11 @@ mod tests { calls: 0, input_path: path.display().to_string(), }, - SubagentToolExecutor::new(BTreeSet::from([String::from("read_file")])), - agent_permission_policy(), + SubagentToolExecutor::new( + BTreeSet::from([String::from("read_file")]), + GlobalToolRegistry::builtin(), + ), + agent_permission_policy(&GlobalToolRegistry::builtin()), vec![String::from("system prompt")], ); @@ -3558,6 +4030,42 @@ mod tests { let _ = std::fs::remove_file(path); } + #[test] + fn response_to_events_ignores_thinking_blocks() { + let events = response_to_events(MessageResponse { + id: "msg-1".to_string(), + kind: "message".to_string(), + model: "claude-opus-4-6".to_string(), + role: "assistant".to_string(), + content: vec![ + OutputContentBlock::Thinking { + thinking: "step 1".to_string(), + signature: Some("sig_123".to_string()), + }, + OutputContentBlock::Text { + text: "Final answer".to_string(), + }, + ], + stop_reason: Some("end_turn".to_string()), + stop_sequence: None, + usage: Usage { + input_tokens: 1, + output_tokens: 1, + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, + }, + request_id: None, + }); + + assert!(matches!( + &events[0], + AssistantEvent::TextDelta(text) if text == "Final answer" + )); + assert!(!events + .iter() + .any(|event| matches!(event, AssistantEvent::ToolUse { .. }))); + } + #[test] fn agent_rejects_blank_required_fields() { let missing_description = execute_tool(