mirror of
https://github.com/instructkr/claw-code.git
synced 2026-04-03 12:14:49 +08:00
feat: LSP client integration with diagnostics, definitions, and references
This commit is contained in:
69
rust/Cargo.lock
generated
69
rust/Cargo.lock
generated
@@ -123,6 +123,12 @@ dependencies = [
|
|||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bitflags"
|
||||||
|
version = "1.3.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bitflags"
|
name = "bitflags"
|
||||||
version = "2.11.0"
|
version = "2.11.0"
|
||||||
@@ -241,7 +247,7 @@ version = "0.28.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "829d955a0bb380ef178a640b91779e3987da38c9aea133b20614cfed8cdea9c6"
|
checksum = "829d955a0bb380ef178a640b91779e3987da38c9aea133b20614cfed8cdea9c6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags 2.11.0",
|
||||||
"crossterm_winapi",
|
"crossterm_winapi",
|
||||||
"mio",
|
"mio",
|
||||||
"parking_lot",
|
"parking_lot",
|
||||||
@@ -355,6 +361,15 @@ dependencies = [
|
|||||||
"miniz_oxide",
|
"miniz_oxide",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fluent-uri"
|
||||||
|
version = "0.1.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "17c704e9dbe1ddd863da1e6ff3567795087b1eb201ce80d8fa81162e1516500d"
|
||||||
|
dependencies = [
|
||||||
|
"bitflags 1.3.2",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fnv"
|
name = "fnv"
|
||||||
version = "1.0.7"
|
version = "1.0.7"
|
||||||
@@ -801,6 +816,30 @@ version = "0.1.2"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154"
|
checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "lsp"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"lsp-types",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"tokio",
|
||||||
|
"url",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "lsp-types"
|
||||||
|
version = "0.97.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "53353550a17c04ac46c585feb189c2db82154fc84b79c7a66c96c2c644f66071"
|
||||||
|
dependencies = [
|
||||||
|
"bitflags 1.3.2",
|
||||||
|
"fluent-uri",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"serde_repr",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "matchit"
|
name = "matchit"
|
||||||
version = "0.8.4"
|
version = "0.8.4"
|
||||||
@@ -856,7 +895,7 @@ version = "0.29.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46"
|
checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags 2.11.0",
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"cfg_aliases",
|
"cfg_aliases",
|
||||||
"libc",
|
"libc",
|
||||||
@@ -880,7 +919,7 @@ version = "6.5.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "336b9c63443aceef14bea841b899035ae3abe89b7c486aaf4c5bd8aafedac3f0"
|
checksum = "336b9c63443aceef14bea841b899035ae3abe89b7c486aaf4c5bd8aafedac3f0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags 2.11.0",
|
||||||
"libc",
|
"libc",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"onig_sys",
|
"onig_sys",
|
||||||
@@ -997,7 +1036,7 @@ version = "0.13.3"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7c3a14896dfa883796f1cb410461aef38810ea05f2b2c33c5aded3649095fdad"
|
checksum = "7c3a14896dfa883796f1cb410461aef38810ea05f2b2c33c5aded3649095fdad"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags 2.11.0",
|
||||||
"getopts",
|
"getopts",
|
||||||
"memchr",
|
"memchr",
|
||||||
"pulldown-cmark-escape",
|
"pulldown-cmark-escape",
|
||||||
@@ -1134,7 +1173,7 @@ version = "0.5.18"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d"
|
checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags 2.11.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1227,6 +1266,7 @@ name = "runtime"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"glob",
|
"glob",
|
||||||
|
"lsp",
|
||||||
"plugins",
|
"plugins",
|
||||||
"regex",
|
"regex",
|
||||||
"serde",
|
"serde",
|
||||||
@@ -1248,7 +1288,7 @@ version = "0.38.44"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154"
|
checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags 2.11.0",
|
||||||
"errno",
|
"errno",
|
||||||
"libc",
|
"libc",
|
||||||
"linux-raw-sys 0.4.15",
|
"linux-raw-sys 0.4.15",
|
||||||
@@ -1261,7 +1301,7 @@ version = "1.1.4"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190"
|
checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags 2.11.0",
|
||||||
"errno",
|
"errno",
|
||||||
"libc",
|
"libc",
|
||||||
"linux-raw-sys 0.12.1",
|
"linux-raw-sys 0.12.1",
|
||||||
@@ -1315,7 +1355,7 @@ version = "15.0.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2ee1e066dc922e513bda599c6ccb5f3bb2b0ea5870a579448f2622993f0a9a2f"
|
checksum = "2ee1e066dc922e513bda599c6ccb5f3bb2b0ea5870a579448f2622993f0a9a2f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags 2.11.0",
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"clipboard-win",
|
"clipboard-win",
|
||||||
"fd-lock",
|
"fd-lock",
|
||||||
@@ -1406,6 +1446,17 @@ dependencies = [
|
|||||||
"serde_core",
|
"serde_core",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "serde_repr"
|
||||||
|
version = "0.1.20"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_urlencoded"
|
name = "serde_urlencoded"
|
||||||
version = "0.7.1"
|
version = "0.7.1"
|
||||||
@@ -1732,7 +1783,7 @@ version = "0.6.8"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8"
|
checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags 2.11.0",
|
||||||
"bytes",
|
"bytes",
|
||||||
"futures-util",
|
"futures-util",
|
||||||
"http",
|
"http",
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ license = "MIT"
|
|||||||
publish = false
|
publish = false
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
|
lsp-types = "0.97"
|
||||||
serde_json = "1"
|
serde_json = "1"
|
||||||
|
|
||||||
[workspace.lints.rust]
|
[workspace.lints.rust]
|
||||||
|
|||||||
16
rust/crates/lsp/Cargo.toml
Normal file
16
rust/crates/lsp/Cargo.toml
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
[package]
|
||||||
|
name = "lsp"
|
||||||
|
version.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
publish.workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
lsp-types.workspace = true
|
||||||
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
serde_json.workspace = true
|
||||||
|
tokio = { version = "1", features = ["io-util", "macros", "process", "rt", "rt-multi-thread", "sync", "time"] }
|
||||||
|
url = "2"
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
463
rust/crates/lsp/src/client.rs
Normal file
463
rust/crates/lsp/src/client.rs
Normal file
@@ -0,0 +1,463 @@
|
|||||||
|
use std::collections::BTreeMap;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::process::Stdio;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::sync::atomic::{AtomicI64, Ordering};
|
||||||
|
|
||||||
|
use lsp_types::{
|
||||||
|
Diagnostic, GotoDefinitionResponse, Location, LocationLink, Position, PublishDiagnosticsParams,
|
||||||
|
};
|
||||||
|
use serde_json::{json, Value};
|
||||||
|
use tokio::io::{AsyncBufReadExt, AsyncRead, AsyncReadExt, AsyncWriteExt, BufReader, BufWriter};
|
||||||
|
use tokio::process::{Child, ChildStdin, ChildStdout, Command};
|
||||||
|
use tokio::sync::{oneshot, Mutex};
|
||||||
|
|
||||||
|
use crate::error::LspError;
|
||||||
|
use crate::types::{LspServerConfig, SymbolLocation};
|
||||||
|
|
||||||
|
pub(crate) struct LspClient {
|
||||||
|
config: LspServerConfig,
|
||||||
|
writer: Mutex<BufWriter<ChildStdin>>,
|
||||||
|
child: Mutex<Child>,
|
||||||
|
pending_requests: Arc<Mutex<BTreeMap<i64, oneshot::Sender<Result<Value, LspError>>>>>,
|
||||||
|
diagnostics: Arc<Mutex<BTreeMap<String, Vec<Diagnostic>>>>,
|
||||||
|
open_documents: Mutex<BTreeMap<PathBuf, i32>>,
|
||||||
|
next_request_id: AtomicI64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LspClient {
|
||||||
|
pub(crate) async fn connect(config: LspServerConfig) -> Result<Self, LspError> {
|
||||||
|
let mut command = Command::new(&config.command);
|
||||||
|
command
|
||||||
|
.args(&config.args)
|
||||||
|
.current_dir(&config.workspace_root)
|
||||||
|
.stdin(Stdio::piped())
|
||||||
|
.stdout(Stdio::piped())
|
||||||
|
.stderr(Stdio::piped())
|
||||||
|
.envs(config.env.clone());
|
||||||
|
|
||||||
|
let mut child = command.spawn()?;
|
||||||
|
let stdin = child
|
||||||
|
.stdin
|
||||||
|
.take()
|
||||||
|
.ok_or_else(|| LspError::Protocol("missing LSP stdin pipe".to_string()))?;
|
||||||
|
let stdout = child
|
||||||
|
.stdout
|
||||||
|
.take()
|
||||||
|
.ok_or_else(|| LspError::Protocol("missing LSP stdout pipe".to_string()))?;
|
||||||
|
let stderr = child.stderr.take();
|
||||||
|
|
||||||
|
let client = Self {
|
||||||
|
config,
|
||||||
|
writer: Mutex::new(BufWriter::new(stdin)),
|
||||||
|
child: Mutex::new(child),
|
||||||
|
pending_requests: Arc::new(Mutex::new(BTreeMap::new())),
|
||||||
|
diagnostics: Arc::new(Mutex::new(BTreeMap::new())),
|
||||||
|
open_documents: Mutex::new(BTreeMap::new()),
|
||||||
|
next_request_id: AtomicI64::new(1),
|
||||||
|
};
|
||||||
|
|
||||||
|
client.spawn_reader(stdout);
|
||||||
|
if let Some(stderr) = stderr {
|
||||||
|
client.spawn_stderr_drain(stderr);
|
||||||
|
}
|
||||||
|
client.initialize().await?;
|
||||||
|
Ok(client)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn ensure_document_open(&self, path: &Path) -> Result<(), LspError> {
|
||||||
|
if self.is_document_open(path).await {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let contents = std::fs::read_to_string(path)?;
|
||||||
|
self.open_document(path, &contents).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn open_document(&self, path: &Path, text: &str) -> Result<(), LspError> {
|
||||||
|
let uri = file_url(path)?;
|
||||||
|
let language_id = self
|
||||||
|
.config
|
||||||
|
.language_id_for(path)
|
||||||
|
.ok_or_else(|| LspError::UnsupportedDocument(path.to_path_buf()))?;
|
||||||
|
|
||||||
|
self.notify(
|
||||||
|
"textDocument/didOpen",
|
||||||
|
json!({
|
||||||
|
"textDocument": {
|
||||||
|
"uri": uri,
|
||||||
|
"languageId": language_id,
|
||||||
|
"version": 1,
|
||||||
|
"text": text,
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
self.open_documents
|
||||||
|
.lock()
|
||||||
|
.await
|
||||||
|
.insert(path.to_path_buf(), 1);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn change_document(&self, path: &Path, text: &str) -> Result<(), LspError> {
|
||||||
|
if !self.is_document_open(path).await {
|
||||||
|
return self.open_document(path, text).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
let uri = file_url(path)?;
|
||||||
|
let next_version = {
|
||||||
|
let mut open_documents = self.open_documents.lock().await;
|
||||||
|
let version = open_documents
|
||||||
|
.entry(path.to_path_buf())
|
||||||
|
.and_modify(|value| *value += 1)
|
||||||
|
.or_insert(1);
|
||||||
|
*version
|
||||||
|
};
|
||||||
|
|
||||||
|
self.notify(
|
||||||
|
"textDocument/didChange",
|
||||||
|
json!({
|
||||||
|
"textDocument": {
|
||||||
|
"uri": uri,
|
||||||
|
"version": next_version,
|
||||||
|
},
|
||||||
|
"contentChanges": [{
|
||||||
|
"text": text,
|
||||||
|
}],
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn save_document(&self, path: &Path) -> Result<(), LspError> {
|
||||||
|
if !self.is_document_open(path).await {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
self.notify(
|
||||||
|
"textDocument/didSave",
|
||||||
|
json!({
|
||||||
|
"textDocument": {
|
||||||
|
"uri": file_url(path)?,
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn close_document(&self, path: &Path) -> Result<(), LspError> {
|
||||||
|
if !self.is_document_open(path).await {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
self.notify(
|
||||||
|
"textDocument/didClose",
|
||||||
|
json!({
|
||||||
|
"textDocument": {
|
||||||
|
"uri": file_url(path)?,
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
self.open_documents.lock().await.remove(path);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn is_document_open(&self, path: &Path) -> bool {
|
||||||
|
self.open_documents.lock().await.contains_key(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn go_to_definition(
|
||||||
|
&self,
|
||||||
|
path: &Path,
|
||||||
|
position: Position,
|
||||||
|
) -> Result<Vec<SymbolLocation>, LspError> {
|
||||||
|
self.ensure_document_open(path).await?;
|
||||||
|
let response = self
|
||||||
|
.request::<Option<GotoDefinitionResponse>>(
|
||||||
|
"textDocument/definition",
|
||||||
|
json!({
|
||||||
|
"textDocument": { "uri": file_url(path)? },
|
||||||
|
"position": position,
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(match response {
|
||||||
|
Some(GotoDefinitionResponse::Scalar(location)) => {
|
||||||
|
location_to_symbol_locations(vec![location])
|
||||||
|
}
|
||||||
|
Some(GotoDefinitionResponse::Array(locations)) => location_to_symbol_locations(locations),
|
||||||
|
Some(GotoDefinitionResponse::Link(links)) => location_links_to_symbol_locations(links),
|
||||||
|
None => Vec::new(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn find_references(
|
||||||
|
&self,
|
||||||
|
path: &Path,
|
||||||
|
position: Position,
|
||||||
|
include_declaration: bool,
|
||||||
|
) -> Result<Vec<SymbolLocation>, LspError> {
|
||||||
|
self.ensure_document_open(path).await?;
|
||||||
|
let response = self
|
||||||
|
.request::<Option<Vec<Location>>>(
|
||||||
|
"textDocument/references",
|
||||||
|
json!({
|
||||||
|
"textDocument": { "uri": file_url(path)? },
|
||||||
|
"position": position,
|
||||||
|
"context": {
|
||||||
|
"includeDeclaration": include_declaration,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(location_to_symbol_locations(response.unwrap_or_default()))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn diagnostics_snapshot(&self) -> BTreeMap<String, Vec<Diagnostic>> {
|
||||||
|
self.diagnostics.lock().await.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn shutdown(&self) -> Result<(), LspError> {
|
||||||
|
let _ = self.request::<Value>("shutdown", json!({})).await;
|
||||||
|
let _ = self.notify("exit", Value::Null).await;
|
||||||
|
|
||||||
|
let mut child = self.child.lock().await;
|
||||||
|
if child.kill().await.is_err() {
|
||||||
|
let _ = child.wait().await;
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
let _ = child.wait().await;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn spawn_reader(&self, stdout: ChildStdout) {
|
||||||
|
let diagnostics = &self.diagnostics;
|
||||||
|
let pending_requests = &self.pending_requests;
|
||||||
|
|
||||||
|
let diagnostics = diagnostics.clone();
|
||||||
|
let pending_requests = pending_requests.clone();
|
||||||
|
tokio::spawn(async move {
|
||||||
|
let mut reader = BufReader::new(stdout);
|
||||||
|
let result = async {
|
||||||
|
while let Some(message) = read_message(&mut reader).await? {
|
||||||
|
if let Some(id) = message.get("id").and_then(Value::as_i64) {
|
||||||
|
let response = if let Some(error) = message.get("error") {
|
||||||
|
Err(LspError::Protocol(error.to_string()))
|
||||||
|
} else {
|
||||||
|
Ok(message.get("result").cloned().unwrap_or(Value::Null))
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(sender) = pending_requests.lock().await.remove(&id) {
|
||||||
|
let _ = sender.send(response);
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let Some(method) = message.get("method").and_then(Value::as_str) else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
if method != "textDocument/publishDiagnostics" {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let params = message.get("params").cloned().unwrap_or(Value::Null);
|
||||||
|
let notification = serde_json::from_value::<PublishDiagnosticsParams>(params)?;
|
||||||
|
let mut diagnostics_map = diagnostics.lock().await;
|
||||||
|
if notification.diagnostics.is_empty() {
|
||||||
|
diagnostics_map.remove(¬ification.uri.to_string());
|
||||||
|
} else {
|
||||||
|
diagnostics_map.insert(notification.uri.to_string(), notification.diagnostics);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok::<(), LspError>(())
|
||||||
|
}
|
||||||
|
.await;
|
||||||
|
|
||||||
|
if let Err(error) = result {
|
||||||
|
let mut pending = pending_requests.lock().await;
|
||||||
|
let drained = pending
|
||||||
|
.iter()
|
||||||
|
.map(|(id, _)| *id)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
for id in drained {
|
||||||
|
if let Some(sender) = pending.remove(&id) {
|
||||||
|
let _ = sender.send(Err(LspError::Protocol(error.to_string())));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn spawn_stderr_drain<R>(&self, stderr: R)
|
||||||
|
where
|
||||||
|
R: AsyncRead + Unpin + Send + 'static,
|
||||||
|
{
|
||||||
|
tokio::spawn(async move {
|
||||||
|
let mut reader = BufReader::new(stderr);
|
||||||
|
let mut sink = Vec::new();
|
||||||
|
let _ = reader.read_to_end(&mut sink).await;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn initialize(&self) -> Result<(), LspError> {
|
||||||
|
let workspace_uri = file_url(&self.config.workspace_root)?;
|
||||||
|
let _ = self
|
||||||
|
.request::<Value>(
|
||||||
|
"initialize",
|
||||||
|
json!({
|
||||||
|
"processId": std::process::id(),
|
||||||
|
"rootUri": workspace_uri,
|
||||||
|
"rootPath": self.config.workspace_root,
|
||||||
|
"workspaceFolders": [{
|
||||||
|
"uri": workspace_uri,
|
||||||
|
"name": self.config.name,
|
||||||
|
}],
|
||||||
|
"initializationOptions": self.config.initialization_options.clone().unwrap_or(Value::Null),
|
||||||
|
"capabilities": {
|
||||||
|
"textDocument": {
|
||||||
|
"publishDiagnostics": {
|
||||||
|
"relatedInformation": true,
|
||||||
|
},
|
||||||
|
"definition": {
|
||||||
|
"linkSupport": true,
|
||||||
|
},
|
||||||
|
"references": {}
|
||||||
|
},
|
||||||
|
"workspace": {
|
||||||
|
"configuration": false,
|
||||||
|
"workspaceFolders": true,
|
||||||
|
},
|
||||||
|
"general": {
|
||||||
|
"positionEncodings": ["utf-16"],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
self.notify("initialized", json!({})).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn request<T>(&self, method: &str, params: Value) -> Result<T, LspError>
|
||||||
|
where
|
||||||
|
T: for<'de> serde::Deserialize<'de>,
|
||||||
|
{
|
||||||
|
let id = self.next_request_id.fetch_add(1, Ordering::Relaxed);
|
||||||
|
let (sender, receiver) = oneshot::channel();
|
||||||
|
self.pending_requests.lock().await.insert(id, sender);
|
||||||
|
|
||||||
|
if let Err(error) = self
|
||||||
|
.send_message(&json!({
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": id,
|
||||||
|
"method": method,
|
||||||
|
"params": params,
|
||||||
|
}))
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
self.pending_requests.lock().await.remove(&id);
|
||||||
|
return Err(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
let response = receiver
|
||||||
|
.await
|
||||||
|
.map_err(|_| LspError::Protocol(format!("request channel closed for {method}")))??;
|
||||||
|
Ok(serde_json::from_value(response)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn notify(&self, method: &str, params: Value) -> Result<(), LspError> {
|
||||||
|
self.send_message(&json!({
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"method": method,
|
||||||
|
"params": params,
|
||||||
|
}))
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn send_message(&self, payload: &Value) -> Result<(), LspError> {
|
||||||
|
let body = serde_json::to_vec(payload)?;
|
||||||
|
let mut writer = self.writer.lock().await;
|
||||||
|
writer
|
||||||
|
.write_all(format!("Content-Length: {}\r\n\r\n", body.len()).as_bytes())
|
||||||
|
.await?;
|
||||||
|
writer.write_all(&body).await?;
|
||||||
|
writer.flush().await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn read_message<R>(reader: &mut BufReader<R>) -> Result<Option<Value>, LspError>
|
||||||
|
where
|
||||||
|
R: AsyncRead + Unpin,
|
||||||
|
{
|
||||||
|
let mut content_length = None;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let mut line = String::new();
|
||||||
|
let read = reader.read_line(&mut line).await?;
|
||||||
|
if read == 0 {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
if line == "\r\n" {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
let trimmed = line.trim_end_matches(['\r', '\n']);
|
||||||
|
if let Some((name, value)) = trimmed.split_once(':') {
|
||||||
|
if name.eq_ignore_ascii_case("Content-Length") {
|
||||||
|
let value = value.trim().to_string();
|
||||||
|
content_length = Some(
|
||||||
|
value
|
||||||
|
.parse::<usize>()
|
||||||
|
.map_err(|_| LspError::InvalidContentLength(value.clone()))?,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return Err(LspError::InvalidHeader(trimmed.to_string()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let content_length = content_length.ok_or(LspError::MissingContentLength)?;
|
||||||
|
let mut body = vec![0_u8; content_length];
|
||||||
|
reader.read_exact(&mut body).await?;
|
||||||
|
Ok(Some(serde_json::from_slice(&body)?))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn file_url(path: &Path) -> Result<String, LspError> {
|
||||||
|
url::Url::from_file_path(path)
|
||||||
|
.map(|url| url.to_string())
|
||||||
|
.map_err(|()| LspError::PathToUrl(path.to_path_buf()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn location_to_symbol_locations(locations: Vec<Location>) -> Vec<SymbolLocation> {
|
||||||
|
locations
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|location| {
|
||||||
|
uri_to_path(&location.uri.to_string()).map(|path| SymbolLocation {
|
||||||
|
path,
|
||||||
|
range: location.range,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn location_links_to_symbol_locations(links: Vec<LocationLink>) -> Vec<SymbolLocation> {
|
||||||
|
links.into_iter()
|
||||||
|
.filter_map(|link| {
|
||||||
|
uri_to_path(&link.target_uri.to_string()).map(|path| SymbolLocation {
|
||||||
|
path,
|
||||||
|
range: link.target_selection_range,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn uri_to_path(uri: &str) -> Option<PathBuf> {
|
||||||
|
url::Url::parse(uri).ok()?.to_file_path().ok()
|
||||||
|
}
|
||||||
62
rust/crates/lsp/src/error.rs
Normal file
62
rust/crates/lsp/src/error.rs
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
use std::fmt::{Display, Formatter};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum LspError {
|
||||||
|
Io(std::io::Error),
|
||||||
|
Json(serde_json::Error),
|
||||||
|
InvalidHeader(String),
|
||||||
|
MissingContentLength,
|
||||||
|
InvalidContentLength(String),
|
||||||
|
UnsupportedDocument(PathBuf),
|
||||||
|
UnknownServer(String),
|
||||||
|
DuplicateExtension {
|
||||||
|
extension: String,
|
||||||
|
existing_server: String,
|
||||||
|
new_server: String,
|
||||||
|
},
|
||||||
|
PathToUrl(PathBuf),
|
||||||
|
Protocol(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for LspError {
|
||||||
|
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
Self::Io(error) => write!(f, "{error}"),
|
||||||
|
Self::Json(error) => write!(f, "{error}"),
|
||||||
|
Self::InvalidHeader(header) => write!(f, "invalid LSP header: {header}"),
|
||||||
|
Self::MissingContentLength => write!(f, "missing LSP Content-Length header"),
|
||||||
|
Self::InvalidContentLength(value) => {
|
||||||
|
write!(f, "invalid LSP Content-Length value: {value}")
|
||||||
|
}
|
||||||
|
Self::UnsupportedDocument(path) => {
|
||||||
|
write!(f, "no LSP server configured for {}", path.display())
|
||||||
|
}
|
||||||
|
Self::UnknownServer(name) => write!(f, "unknown LSP server: {name}"),
|
||||||
|
Self::DuplicateExtension {
|
||||||
|
extension,
|
||||||
|
existing_server,
|
||||||
|
new_server,
|
||||||
|
} => write!(
|
||||||
|
f,
|
||||||
|
"duplicate LSP extension mapping for {extension}: {existing_server} and {new_server}"
|
||||||
|
),
|
||||||
|
Self::PathToUrl(path) => write!(f, "failed to convert path to file URL: {}", path.display()),
|
||||||
|
Self::Protocol(message) => write!(f, "LSP protocol error: {message}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::error::Error for LspError {}
|
||||||
|
|
||||||
|
impl From<std::io::Error> for LspError {
|
||||||
|
fn from(value: std::io::Error) -> Self {
|
||||||
|
Self::Io(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<serde_json::Error> for LspError {
|
||||||
|
fn from(value: serde_json::Error) -> Self {
|
||||||
|
Self::Json(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
283
rust/crates/lsp/src/lib.rs
Normal file
283
rust/crates/lsp/src/lib.rs
Normal file
@@ -0,0 +1,283 @@
|
|||||||
|
mod client;
|
||||||
|
mod error;
|
||||||
|
mod manager;
|
||||||
|
mod types;
|
||||||
|
|
||||||
|
pub use error::LspError;
|
||||||
|
pub use manager::LspManager;
|
||||||
|
pub use types::{
|
||||||
|
FileDiagnostics, LspContextEnrichment, LspServerConfig, SymbolLocation, WorkspaceDiagnostics,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use std::collections::BTreeMap;
|
||||||
|
use std::fs;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::process::Command;
|
||||||
|
use std::time::{Duration, SystemTime, UNIX_EPOCH};
|
||||||
|
|
||||||
|
use lsp_types::{DiagnosticSeverity, Position};
|
||||||
|
|
||||||
|
use crate::{LspManager, LspServerConfig};
|
||||||
|
|
||||||
|
fn temp_dir(label: &str) -> PathBuf {
|
||||||
|
let nanos = SystemTime::now()
|
||||||
|
.duration_since(UNIX_EPOCH)
|
||||||
|
.expect("time should be after epoch")
|
||||||
|
.as_nanos();
|
||||||
|
std::env::temp_dir().join(format!("lsp-{label}-{nanos}"))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn python3_path() -> Option<String> {
|
||||||
|
let candidates = ["python3", "/usr/bin/python3"];
|
||||||
|
candidates.iter().find_map(|candidate| {
|
||||||
|
Command::new(candidate)
|
||||||
|
.arg("--version")
|
||||||
|
.output()
|
||||||
|
.ok()
|
||||||
|
.filter(|output| output.status.success())
|
||||||
|
.map(|_| (*candidate).to_string())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_mock_server_script(root: &std::path::Path) -> PathBuf {
|
||||||
|
let script_path = root.join("mock_lsp_server.py");
|
||||||
|
fs::write(
|
||||||
|
&script_path,
|
||||||
|
r#"import json
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def read_message():
|
||||||
|
headers = {}
|
||||||
|
while True:
|
||||||
|
line = sys.stdin.buffer.readline()
|
||||||
|
if not line:
|
||||||
|
return None
|
||||||
|
if line == b"\r\n":
|
||||||
|
break
|
||||||
|
key, value = line.decode("utf-8").split(":", 1)
|
||||||
|
headers[key.lower()] = value.strip()
|
||||||
|
length = int(headers["content-length"])
|
||||||
|
body = sys.stdin.buffer.read(length)
|
||||||
|
return json.loads(body)
|
||||||
|
|
||||||
|
|
||||||
|
def write_message(payload):
|
||||||
|
raw = json.dumps(payload).encode("utf-8")
|
||||||
|
sys.stdout.buffer.write(f"Content-Length: {len(raw)}\r\n\r\n".encode("utf-8"))
|
||||||
|
sys.stdout.buffer.write(raw)
|
||||||
|
sys.stdout.buffer.flush()
|
||||||
|
|
||||||
|
|
||||||
|
while True:
|
||||||
|
message = read_message()
|
||||||
|
if message is None:
|
||||||
|
break
|
||||||
|
|
||||||
|
method = message.get("method")
|
||||||
|
if method == "initialize":
|
||||||
|
write_message({
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": message["id"],
|
||||||
|
"result": {
|
||||||
|
"capabilities": {
|
||||||
|
"definitionProvider": True,
|
||||||
|
"referencesProvider": True,
|
||||||
|
"textDocumentSync": 1,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
})
|
||||||
|
elif method == "initialized":
|
||||||
|
continue
|
||||||
|
elif method == "textDocument/didOpen":
|
||||||
|
document = message["params"]["textDocument"]
|
||||||
|
write_message({
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"method": "textDocument/publishDiagnostics",
|
||||||
|
"params": {
|
||||||
|
"uri": document["uri"],
|
||||||
|
"diagnostics": [
|
||||||
|
{
|
||||||
|
"range": {
|
||||||
|
"start": {"line": 0, "character": 0},
|
||||||
|
"end": {"line": 0, "character": 3},
|
||||||
|
},
|
||||||
|
"severity": 1,
|
||||||
|
"source": "mock-server",
|
||||||
|
"message": "mock error",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
})
|
||||||
|
elif method == "textDocument/didChange":
|
||||||
|
continue
|
||||||
|
elif method == "textDocument/didSave":
|
||||||
|
continue
|
||||||
|
elif method == "textDocument/definition":
|
||||||
|
uri = message["params"]["textDocument"]["uri"]
|
||||||
|
write_message({
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": message["id"],
|
||||||
|
"result": [
|
||||||
|
{
|
||||||
|
"uri": uri,
|
||||||
|
"range": {
|
||||||
|
"start": {"line": 0, "character": 0},
|
||||||
|
"end": {"line": 0, "character": 3},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
],
|
||||||
|
})
|
||||||
|
elif method == "textDocument/references":
|
||||||
|
uri = message["params"]["textDocument"]["uri"]
|
||||||
|
write_message({
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": message["id"],
|
||||||
|
"result": [
|
||||||
|
{
|
||||||
|
"uri": uri,
|
||||||
|
"range": {
|
||||||
|
"start": {"line": 0, "character": 0},
|
||||||
|
"end": {"line": 0, "character": 3},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"uri": uri,
|
||||||
|
"range": {
|
||||||
|
"start": {"line": 1, "character": 4},
|
||||||
|
"end": {"line": 1, "character": 7},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
elif method == "shutdown":
|
||||||
|
write_message({"jsonrpc": "2.0", "id": message["id"], "result": None})
|
||||||
|
elif method == "exit":
|
||||||
|
break
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.expect("mock server should be written");
|
||||||
|
script_path
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn wait_for_diagnostics(manager: &LspManager) {
|
||||||
|
tokio::time::timeout(Duration::from_secs(2), async {
|
||||||
|
loop {
|
||||||
|
if manager
|
||||||
|
.collect_workspace_diagnostics()
|
||||||
|
.await
|
||||||
|
.expect("diagnostics snapshot should load")
|
||||||
|
.total_diagnostics()
|
||||||
|
> 0
|
||||||
|
{
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
tokio::time::sleep(Duration::from_millis(10)).await;
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.expect("diagnostics should arrive from mock server");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test(flavor = "current_thread")]
|
||||||
|
async fn collects_diagnostics_and_symbol_navigation_from_mock_server() {
|
||||||
|
let Some(python) = python3_path() else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
// given
|
||||||
|
let root = temp_dir("manager");
|
||||||
|
fs::create_dir_all(root.join("src")).expect("workspace root should exist");
|
||||||
|
let script_path = write_mock_server_script(&root);
|
||||||
|
let source_path = root.join("src").join("main.rs");
|
||||||
|
fs::write(&source_path, "fn main() {}\nlet value = 1;\n").expect("source file should exist");
|
||||||
|
let manager = LspManager::new(vec![LspServerConfig {
|
||||||
|
name: "rust-analyzer".to_string(),
|
||||||
|
command: python,
|
||||||
|
args: vec![script_path.display().to_string()],
|
||||||
|
env: BTreeMap::new(),
|
||||||
|
workspace_root: root.clone(),
|
||||||
|
initialization_options: None,
|
||||||
|
extension_to_language: BTreeMap::from([(".rs".to_string(), "rust".to_string())]),
|
||||||
|
}])
|
||||||
|
.expect("manager should build");
|
||||||
|
manager
|
||||||
|
.open_document(&source_path, &fs::read_to_string(&source_path).expect("source read should succeed"))
|
||||||
|
.await
|
||||||
|
.expect("document should open");
|
||||||
|
wait_for_diagnostics(&manager).await;
|
||||||
|
|
||||||
|
// when
|
||||||
|
let diagnostics = manager
|
||||||
|
.collect_workspace_diagnostics()
|
||||||
|
.await
|
||||||
|
.expect("diagnostics should be available");
|
||||||
|
let definitions = manager
|
||||||
|
.go_to_definition(&source_path, Position::new(0, 0))
|
||||||
|
.await
|
||||||
|
.expect("definition request should succeed");
|
||||||
|
let references = manager
|
||||||
|
.find_references(&source_path, Position::new(0, 0), true)
|
||||||
|
.await
|
||||||
|
.expect("references request should succeed");
|
||||||
|
|
||||||
|
// then
|
||||||
|
assert_eq!(diagnostics.files.len(), 1);
|
||||||
|
assert_eq!(diagnostics.total_diagnostics(), 1);
|
||||||
|
assert_eq!(diagnostics.files[0].diagnostics[0].severity, Some(DiagnosticSeverity::ERROR));
|
||||||
|
assert_eq!(definitions.len(), 1);
|
||||||
|
assert_eq!(definitions[0].start_line(), 1);
|
||||||
|
assert_eq!(references.len(), 2);
|
||||||
|
|
||||||
|
manager.shutdown().await.expect("shutdown should succeed");
|
||||||
|
fs::remove_dir_all(root).expect("temp workspace should be removed");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test(flavor = "current_thread")]
|
||||||
|
async fn renders_runtime_context_enrichment_for_prompt_usage() {
|
||||||
|
let Some(python) = python3_path() else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
// given
|
||||||
|
let root = temp_dir("prompt");
|
||||||
|
fs::create_dir_all(root.join("src")).expect("workspace root should exist");
|
||||||
|
let script_path = write_mock_server_script(&root);
|
||||||
|
let source_path = root.join("src").join("lib.rs");
|
||||||
|
fs::write(&source_path, "pub fn answer() -> i32 { 42 }\n").expect("source file should exist");
|
||||||
|
let manager = LspManager::new(vec![LspServerConfig {
|
||||||
|
name: "rust-analyzer".to_string(),
|
||||||
|
command: python,
|
||||||
|
args: vec![script_path.display().to_string()],
|
||||||
|
env: BTreeMap::new(),
|
||||||
|
workspace_root: root.clone(),
|
||||||
|
initialization_options: None,
|
||||||
|
extension_to_language: BTreeMap::from([(".rs".to_string(), "rust".to_string())]),
|
||||||
|
}])
|
||||||
|
.expect("manager should build");
|
||||||
|
manager
|
||||||
|
.open_document(&source_path, &fs::read_to_string(&source_path).expect("source read should succeed"))
|
||||||
|
.await
|
||||||
|
.expect("document should open");
|
||||||
|
wait_for_diagnostics(&manager).await;
|
||||||
|
|
||||||
|
// when
|
||||||
|
let enrichment = manager
|
||||||
|
.context_enrichment(&source_path, Position::new(0, 0))
|
||||||
|
.await
|
||||||
|
.expect("context enrichment should succeed");
|
||||||
|
let rendered = enrichment.render_prompt_section();
|
||||||
|
|
||||||
|
// then
|
||||||
|
assert!(rendered.contains("# LSP context"));
|
||||||
|
assert!(rendered.contains("Workspace diagnostics: 1 across 1 file(s)"));
|
||||||
|
assert!(rendered.contains("Definitions:"));
|
||||||
|
assert!(rendered.contains("References:"));
|
||||||
|
assert!(rendered.contains("mock error"));
|
||||||
|
|
||||||
|
manager.shutdown().await.expect("shutdown should succeed");
|
||||||
|
fs::remove_dir_all(root).expect("temp workspace should be removed");
|
||||||
|
}
|
||||||
|
}
|
||||||
191
rust/crates/lsp/src/manager.rs
Normal file
191
rust/crates/lsp/src/manager.rs
Normal file
@@ -0,0 +1,191 @@
|
|||||||
|
use std::collections::{BTreeMap, BTreeSet};
|
||||||
|
use std::path::Path;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use lsp_types::Position;
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
|
||||||
|
use crate::client::LspClient;
|
||||||
|
use crate::error::LspError;
|
||||||
|
use crate::types::{
|
||||||
|
normalize_extension, FileDiagnostics, LspContextEnrichment, LspServerConfig, SymbolLocation,
|
||||||
|
WorkspaceDiagnostics,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub struct LspManager {
|
||||||
|
server_configs: BTreeMap<String, LspServerConfig>,
|
||||||
|
extension_map: BTreeMap<String, String>,
|
||||||
|
clients: Mutex<BTreeMap<String, Arc<LspClient>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LspManager {
|
||||||
|
pub fn new(server_configs: Vec<LspServerConfig>) -> Result<Self, LspError> {
|
||||||
|
let mut configs_by_name = BTreeMap::new();
|
||||||
|
let mut extension_map = BTreeMap::new();
|
||||||
|
|
||||||
|
for config in server_configs {
|
||||||
|
for extension in config.extension_to_language.keys() {
|
||||||
|
let normalized = normalize_extension(extension);
|
||||||
|
if let Some(existing_server) = extension_map.insert(normalized.clone(), config.name.clone()) {
|
||||||
|
return Err(LspError::DuplicateExtension {
|
||||||
|
extension: normalized,
|
||||||
|
existing_server,
|
||||||
|
new_server: config.name.clone(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
configs_by_name.insert(config.name.clone(), config);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
server_configs: configs_by_name,
|
||||||
|
extension_map,
|
||||||
|
clients: Mutex::new(BTreeMap::new()),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn supports_path(&self, path: &Path) -> bool {
|
||||||
|
path.extension().is_some_and(|extension| {
|
||||||
|
let normalized = normalize_extension(extension.to_string_lossy().as_ref());
|
||||||
|
self.extension_map.contains_key(&normalized)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn open_document(&self, path: &Path, text: &str) -> Result<(), LspError> {
|
||||||
|
self.client_for_path(path).await?.open_document(path, text).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn sync_document_from_disk(&self, path: &Path) -> Result<(), LspError> {
|
||||||
|
let contents = std::fs::read_to_string(path)?;
|
||||||
|
self.change_document(path, &contents).await?;
|
||||||
|
self.save_document(path).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn change_document(&self, path: &Path, text: &str) -> Result<(), LspError> {
|
||||||
|
self.client_for_path(path).await?.change_document(path, text).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn save_document(&self, path: &Path) -> Result<(), LspError> {
|
||||||
|
self.client_for_path(path).await?.save_document(path).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn close_document(&self, path: &Path) -> Result<(), LspError> {
|
||||||
|
self.client_for_path(path).await?.close_document(path).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn go_to_definition(
|
||||||
|
&self,
|
||||||
|
path: &Path,
|
||||||
|
position: Position,
|
||||||
|
) -> Result<Vec<SymbolLocation>, LspError> {
|
||||||
|
let mut locations = self.client_for_path(path).await?.go_to_definition(path, position).await?;
|
||||||
|
dedupe_locations(&mut locations);
|
||||||
|
Ok(locations)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_references(
|
||||||
|
&self,
|
||||||
|
path: &Path,
|
||||||
|
position: Position,
|
||||||
|
include_declaration: bool,
|
||||||
|
) -> Result<Vec<SymbolLocation>, LspError> {
|
||||||
|
let mut locations = self
|
||||||
|
.client_for_path(path)
|
||||||
|
.await?
|
||||||
|
.find_references(path, position, include_declaration)
|
||||||
|
.await?;
|
||||||
|
dedupe_locations(&mut locations);
|
||||||
|
Ok(locations)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn collect_workspace_diagnostics(&self) -> Result<WorkspaceDiagnostics, LspError> {
|
||||||
|
let clients = self.clients.lock().await.values().cloned().collect::<Vec<_>>();
|
||||||
|
let mut files = Vec::new();
|
||||||
|
|
||||||
|
for client in clients {
|
||||||
|
for (uri, diagnostics) in client.diagnostics_snapshot().await {
|
||||||
|
let Ok(path) = url::Url::parse(&uri)
|
||||||
|
.and_then(|url| url.to_file_path().map_err(|()| url::ParseError::RelativeUrlWithoutBase))
|
||||||
|
else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
if diagnostics.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
files.push(FileDiagnostics {
|
||||||
|
path,
|
||||||
|
uri,
|
||||||
|
diagnostics,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
files.sort_by(|left, right| left.path.cmp(&right.path));
|
||||||
|
Ok(WorkspaceDiagnostics { files })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn context_enrichment(
|
||||||
|
&self,
|
||||||
|
path: &Path,
|
||||||
|
position: Position,
|
||||||
|
) -> Result<LspContextEnrichment, LspError> {
|
||||||
|
Ok(LspContextEnrichment {
|
||||||
|
file_path: path.to_path_buf(),
|
||||||
|
diagnostics: self.collect_workspace_diagnostics().await?,
|
||||||
|
definitions: self.go_to_definition(path, position).await?,
|
||||||
|
references: self.find_references(path, position, true).await?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn shutdown(&self) -> Result<(), LspError> {
|
||||||
|
let mut clients = self.clients.lock().await;
|
||||||
|
let drained = clients.values().cloned().collect::<Vec<_>>();
|
||||||
|
clients.clear();
|
||||||
|
drop(clients);
|
||||||
|
|
||||||
|
for client in drained {
|
||||||
|
client.shutdown().await?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn client_for_path(&self, path: &Path) -> Result<Arc<LspClient>, LspError> {
|
||||||
|
let extension = path
|
||||||
|
.extension()
|
||||||
|
.map(|extension| normalize_extension(extension.to_string_lossy().as_ref()))
|
||||||
|
.ok_or_else(|| LspError::UnsupportedDocument(path.to_path_buf()))?;
|
||||||
|
let server_name = self
|
||||||
|
.extension_map
|
||||||
|
.get(&extension)
|
||||||
|
.cloned()
|
||||||
|
.ok_or_else(|| LspError::UnsupportedDocument(path.to_path_buf()))?;
|
||||||
|
|
||||||
|
let mut clients = self.clients.lock().await;
|
||||||
|
if let Some(client) = clients.get(&server_name) {
|
||||||
|
return Ok(client.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
let config = self
|
||||||
|
.server_configs
|
||||||
|
.get(&server_name)
|
||||||
|
.cloned()
|
||||||
|
.ok_or_else(|| LspError::UnknownServer(server_name.clone()))?;
|
||||||
|
let client = Arc::new(LspClient::connect(config).await?);
|
||||||
|
clients.insert(server_name, client.clone());
|
||||||
|
Ok(client)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn dedupe_locations(locations: &mut Vec<SymbolLocation>) {
|
||||||
|
let mut seen = BTreeSet::new();
|
||||||
|
locations.retain(|location| {
|
||||||
|
seen.insert((
|
||||||
|
location.path.clone(),
|
||||||
|
location.range.start.line,
|
||||||
|
location.range.start.character,
|
||||||
|
location.range.end.line,
|
||||||
|
location.range.end.character,
|
||||||
|
))
|
||||||
|
});
|
||||||
|
}
|
||||||
186
rust/crates/lsp/src/types.rs
Normal file
186
rust/crates/lsp/src/types.rs
Normal file
@@ -0,0 +1,186 @@
|
|||||||
|
use std::collections::BTreeMap;
|
||||||
|
use std::fmt::{Display, Formatter};
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
use lsp_types::{Diagnostic, Range};
|
||||||
|
use serde_json::Value;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub struct LspServerConfig {
|
||||||
|
pub name: String,
|
||||||
|
pub command: String,
|
||||||
|
pub args: Vec<String>,
|
||||||
|
pub env: BTreeMap<String, String>,
|
||||||
|
pub workspace_root: PathBuf,
|
||||||
|
pub initialization_options: Option<Value>,
|
||||||
|
pub extension_to_language: BTreeMap<String, String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LspServerConfig {
|
||||||
|
#[must_use]
|
||||||
|
pub fn language_id_for(&self, path: &Path) -> Option<&str> {
|
||||||
|
let extension = normalize_extension(path.extension()?.to_string_lossy().as_ref());
|
||||||
|
self.extension_to_language
|
||||||
|
.get(&extension)
|
||||||
|
.map(String::as_str)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub struct FileDiagnostics {
|
||||||
|
pub path: PathBuf,
|
||||||
|
pub uri: String,
|
||||||
|
pub diagnostics: Vec<Diagnostic>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Default, PartialEq)]
|
||||||
|
pub struct WorkspaceDiagnostics {
|
||||||
|
pub files: Vec<FileDiagnostics>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WorkspaceDiagnostics {
|
||||||
|
#[must_use]
|
||||||
|
pub fn is_empty(&self) -> bool {
|
||||||
|
self.files.is_empty()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn total_diagnostics(&self) -> usize {
|
||||||
|
self.files.iter().map(|file| file.diagnostics.len()).sum()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub struct SymbolLocation {
|
||||||
|
pub path: PathBuf,
|
||||||
|
pub range: Range,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SymbolLocation {
|
||||||
|
#[must_use]
|
||||||
|
pub fn start_line(&self) -> u32 {
|
||||||
|
self.range.start.line + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn start_character(&self) -> u32 {
|
||||||
|
self.range.start.character + 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for SymbolLocation {
|
||||||
|
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"{}:{}:{}",
|
||||||
|
self.path.display(),
|
||||||
|
self.start_line(),
|
||||||
|
self.start_character()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Default, PartialEq)]
|
||||||
|
pub struct LspContextEnrichment {
|
||||||
|
pub file_path: PathBuf,
|
||||||
|
pub diagnostics: WorkspaceDiagnostics,
|
||||||
|
pub definitions: Vec<SymbolLocation>,
|
||||||
|
pub references: Vec<SymbolLocation>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LspContextEnrichment {
|
||||||
|
#[must_use]
|
||||||
|
pub fn is_empty(&self) -> bool {
|
||||||
|
self.diagnostics.is_empty() && self.definitions.is_empty() && self.references.is_empty()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn render_prompt_section(&self) -> String {
|
||||||
|
const MAX_RENDERED_DIAGNOSTICS: usize = 12;
|
||||||
|
const MAX_RENDERED_LOCATIONS: usize = 12;
|
||||||
|
|
||||||
|
let mut lines = vec!["# LSP context".to_string()];
|
||||||
|
lines.push(format!(" - Focus file: {}", self.file_path.display()));
|
||||||
|
lines.push(format!(
|
||||||
|
" - Workspace diagnostics: {} across {} file(s)",
|
||||||
|
self.diagnostics.total_diagnostics(),
|
||||||
|
self.diagnostics.files.len()
|
||||||
|
));
|
||||||
|
|
||||||
|
if !self.diagnostics.files.is_empty() {
|
||||||
|
lines.push(String::new());
|
||||||
|
lines.push("Diagnostics:".to_string());
|
||||||
|
let mut rendered = 0usize;
|
||||||
|
for file in &self.diagnostics.files {
|
||||||
|
for diagnostic in &file.diagnostics {
|
||||||
|
if rendered == MAX_RENDERED_DIAGNOSTICS {
|
||||||
|
lines.push(" - Additional diagnostics omitted for brevity.".to_string());
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
let severity = diagnostic_severity_label(diagnostic.severity);
|
||||||
|
lines.push(format!(
|
||||||
|
" - {}:{}:{} [{}] {}",
|
||||||
|
file.path.display(),
|
||||||
|
diagnostic.range.start.line + 1,
|
||||||
|
diagnostic.range.start.character + 1,
|
||||||
|
severity,
|
||||||
|
diagnostic.message.replace('\n', " ")
|
||||||
|
));
|
||||||
|
rendered += 1;
|
||||||
|
}
|
||||||
|
if rendered == MAX_RENDERED_DIAGNOSTICS {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !self.definitions.is_empty() {
|
||||||
|
lines.push(String::new());
|
||||||
|
lines.push("Definitions:".to_string());
|
||||||
|
lines.extend(
|
||||||
|
self.definitions
|
||||||
|
.iter()
|
||||||
|
.take(MAX_RENDERED_LOCATIONS)
|
||||||
|
.map(|location| format!(" - {location}")),
|
||||||
|
);
|
||||||
|
if self.definitions.len() > MAX_RENDERED_LOCATIONS {
|
||||||
|
lines.push(" - Additional definitions omitted for brevity.".to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !self.references.is_empty() {
|
||||||
|
lines.push(String::new());
|
||||||
|
lines.push("References:".to_string());
|
||||||
|
lines.extend(
|
||||||
|
self.references
|
||||||
|
.iter()
|
||||||
|
.take(MAX_RENDERED_LOCATIONS)
|
||||||
|
.map(|location| format!(" - {location}")),
|
||||||
|
);
|
||||||
|
if self.references.len() > MAX_RENDERED_LOCATIONS {
|
||||||
|
lines.push(" - Additional references omitted for brevity.".to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
lines.join("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub(crate) fn normalize_extension(extension: &str) -> String {
|
||||||
|
if extension.starts_with('.') {
|
||||||
|
extension.to_ascii_lowercase()
|
||||||
|
} else {
|
||||||
|
format!(".{}", extension.to_ascii_lowercase())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn diagnostic_severity_label(severity: Option<lsp_types::DiagnosticSeverity>) -> &'static str {
|
||||||
|
match severity {
|
||||||
|
Some(lsp_types::DiagnosticSeverity::ERROR) => "error",
|
||||||
|
Some(lsp_types::DiagnosticSeverity::WARNING) => "warning",
|
||||||
|
Some(lsp_types::DiagnosticSeverity::INFORMATION) => "info",
|
||||||
|
Some(lsp_types::DiagnosticSeverity::HINT) => "hint",
|
||||||
|
_ => "unknown",
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -8,6 +8,7 @@ publish.workspace = true
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
sha2 = "0.10"
|
sha2 = "0.10"
|
||||||
glob = "0.3"
|
glob = "0.3"
|
||||||
|
lsp = { path = "../lsp" }
|
||||||
plugins = { path = "../plugins" }
|
plugins = { path = "../plugins" }
|
||||||
regex = "1"
|
regex = "1"
|
||||||
serde = { version = "1", features = ["derive"] }
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
|||||||
@@ -17,6 +17,10 @@ pub mod sandbox;
|
|||||||
mod session;
|
mod session;
|
||||||
mod usage;
|
mod usage;
|
||||||
|
|
||||||
|
pub use lsp::{
|
||||||
|
FileDiagnostics, LspContextEnrichment, LspError, LspManager, LspServerConfig,
|
||||||
|
SymbolLocation, WorkspaceDiagnostics,
|
||||||
|
};
|
||||||
pub use bash::{execute_bash, BashCommandInput, BashCommandOutput};
|
pub use bash::{execute_bash, BashCommandInput, BashCommandOutput};
|
||||||
pub use bootstrap::{BootstrapPhase, BootstrapPlan};
|
pub use bootstrap::{BootstrapPhase, BootstrapPlan};
|
||||||
pub use compact::{
|
pub use compact::{
|
||||||
@@ -24,8 +28,8 @@ pub use compact::{
|
|||||||
get_compact_continuation_message, should_compact, CompactionConfig, CompactionResult,
|
get_compact_continuation_message, should_compact, CompactionConfig, CompactionResult,
|
||||||
};
|
};
|
||||||
pub use config::{
|
pub use config::{
|
||||||
ConfigEntry, ConfigError, ConfigLoader, ConfigSource, McpConfigCollection,
|
ConfigEntry, ConfigError, ConfigLoader, ConfigSource, McpManagedProxyServerConfig,
|
||||||
McpManagedProxyServerConfig, McpOAuthConfig, McpRemoteServerConfig, McpSdkServerConfig,
|
McpConfigCollection, McpOAuthConfig, McpRemoteServerConfig, McpSdkServerConfig,
|
||||||
McpServerConfig, McpStdioServerConfig, McpTransport, McpWebSocketServerConfig, OAuthConfig,
|
McpServerConfig, McpStdioServerConfig, McpTransport, McpWebSocketServerConfig, OAuthConfig,
|
||||||
ResolvedPermissionMode, RuntimeConfig, RuntimeFeatureConfig, RuntimeHookConfig,
|
ResolvedPermissionMode, RuntimeConfig, RuntimeFeatureConfig, RuntimeHookConfig,
|
||||||
RuntimePluginConfig, ScopedMcpServerConfig, CLAW_SETTINGS_SCHEMA_NAME,
|
RuntimePluginConfig, ScopedMcpServerConfig, CLAW_SETTINGS_SCHEMA_NAME,
|
||||||
@@ -45,7 +49,7 @@ pub use mcp::{
|
|||||||
scoped_mcp_config_hash, unwrap_ccr_proxy_url,
|
scoped_mcp_config_hash, unwrap_ccr_proxy_url,
|
||||||
};
|
};
|
||||||
pub use mcp_client::{
|
pub use mcp_client::{
|
||||||
McpClientAuth, McpClientBootstrap, McpClientTransport, McpManagedProxyTransport,
|
McpManagedProxyTransport, McpClientAuth, McpClientBootstrap, McpClientTransport,
|
||||||
McpRemoteTransport, McpSdkTransport, McpStdioTransport,
|
McpRemoteTransport, McpSdkTransport, McpStdioTransport,
|
||||||
};
|
};
|
||||||
pub use mcp_stdio::{
|
pub use mcp_stdio::{
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ use std::path::{Path, PathBuf};
|
|||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
use crate::config::{ConfigError, ConfigLoader, RuntimeConfig};
|
use crate::config::{ConfigError, ConfigLoader, RuntimeConfig};
|
||||||
|
use lsp::LspContextEnrichment;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum PromptBuildError {
|
pub enum PromptBuildError {
|
||||||
@@ -130,6 +131,15 @@ impl SystemPromptBuilder {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn with_lsp_context(mut self, enrichment: &LspContextEnrichment) -> Self {
|
||||||
|
if !enrichment.is_empty() {
|
||||||
|
self.append_sections
|
||||||
|
.push(enrichment.render_prompt_section());
|
||||||
|
}
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn build(&self) -> Vec<String> {
|
pub fn build(&self) -> Vec<String> {
|
||||||
let mut sections = Vec::new();
|
let mut sections = Vec::new();
|
||||||
|
|||||||
@@ -91,10 +91,7 @@ impl GlobalToolRegistry {
|
|||||||
Ok(Self { plugin_tools })
|
Ok(Self { plugin_tools })
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn normalize_allowed_tools(
|
pub fn normalize_allowed_tools(&self, values: &[String]) -> Result<Option<BTreeSet<String>>, String> {
|
||||||
&self,
|
|
||||||
values: &[String],
|
|
||||||
) -> Result<Option<BTreeSet<String>>, String> {
|
|
||||||
if values.is_empty() {
|
if values.is_empty() {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
@@ -103,11 +100,7 @@ impl GlobalToolRegistry {
|
|||||||
let canonical_names = builtin_specs
|
let canonical_names = builtin_specs
|
||||||
.iter()
|
.iter()
|
||||||
.map(|spec| spec.name.to_string())
|
.map(|spec| spec.name.to_string())
|
||||||
.chain(
|
.chain(self.plugin_tools.iter().map(|tool| tool.definition().name.clone()))
|
||||||
self.plugin_tools
|
|
||||||
.iter()
|
|
||||||
.map(|tool| tool.definition().name.clone()),
|
|
||||||
)
|
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
let mut name_map = canonical_names
|
let mut name_map = canonical_names
|
||||||
.iter()
|
.iter()
|
||||||
@@ -158,8 +151,7 @@ impl GlobalToolRegistry {
|
|||||||
.plugin_tools
|
.plugin_tools
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|tool| {
|
.filter(|tool| {
|
||||||
allowed_tools
|
allowed_tools.is_none_or(|allowed| allowed.contains(tool.definition().name.as_str()))
|
||||||
.is_none_or(|allowed| allowed.contains(tool.definition().name.as_str()))
|
|
||||||
})
|
})
|
||||||
.map(|tool| ToolDefinition {
|
.map(|tool| ToolDefinition {
|
||||||
name: tool.definition().name.clone(),
|
name: tool.definition().name.clone(),
|
||||||
@@ -182,8 +174,7 @@ impl GlobalToolRegistry {
|
|||||||
.plugin_tools
|
.plugin_tools
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|tool| {
|
.filter(|tool| {
|
||||||
allowed_tools
|
allowed_tools.is_none_or(|allowed| allowed.contains(tool.definition().name.as_str()))
|
||||||
.is_none_or(|allowed| allowed.contains(tool.definition().name.as_str()))
|
|
||||||
})
|
})
|
||||||
.map(|tool| {
|
.map(|tool| {
|
||||||
(
|
(
|
||||||
@@ -3458,6 +3449,9 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn skill_loads_local_skill_prompt() {
|
fn skill_loads_local_skill_prompt() {
|
||||||
|
let _guard = env_lock()
|
||||||
|
.lock()
|
||||||
|
.unwrap_or_else(std::sync::PoisonError::into_inner);
|
||||||
let result = execute_tool(
|
let result = execute_tool(
|
||||||
"Skill",
|
"Skill",
|
||||||
&json!({
|
&json!({
|
||||||
|
|||||||
Reference in New Issue
Block a user