feat: LSP client integration with diagnostics, definitions, and references

This commit is contained in:
Sisyphus
2026-04-01 21:34:36 +09:00
parent 821199640a
commit 12182d8b3c
12 changed files with 1287 additions and 25 deletions

View File

@@ -0,0 +1,463 @@
use std::collections::BTreeMap;
use std::path::{Path, PathBuf};
use std::process::Stdio;
use std::sync::Arc;
use std::sync::atomic::{AtomicI64, Ordering};
use lsp_types::{
Diagnostic, GotoDefinitionResponse, Location, LocationLink, Position, PublishDiagnosticsParams,
};
use serde_json::{json, Value};
use tokio::io::{AsyncBufReadExt, AsyncRead, AsyncReadExt, AsyncWriteExt, BufReader, BufWriter};
use tokio::process::{Child, ChildStdin, ChildStdout, Command};
use tokio::sync::{oneshot, Mutex};
use crate::error::LspError;
use crate::types::{LspServerConfig, SymbolLocation};
pub(crate) struct LspClient {
config: LspServerConfig,
writer: Mutex<BufWriter<ChildStdin>>,
child: Mutex<Child>,
pending_requests: Arc<Mutex<BTreeMap<i64, oneshot::Sender<Result<Value, LspError>>>>>,
diagnostics: Arc<Mutex<BTreeMap<String, Vec<Diagnostic>>>>,
open_documents: Mutex<BTreeMap<PathBuf, i32>>,
next_request_id: AtomicI64,
}
impl LspClient {
pub(crate) async fn connect(config: LspServerConfig) -> Result<Self, LspError> {
let mut command = Command::new(&config.command);
command
.args(&config.args)
.current_dir(&config.workspace_root)
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.envs(config.env.clone());
let mut child = command.spawn()?;
let stdin = child
.stdin
.take()
.ok_or_else(|| LspError::Protocol("missing LSP stdin pipe".to_string()))?;
let stdout = child
.stdout
.take()
.ok_or_else(|| LspError::Protocol("missing LSP stdout pipe".to_string()))?;
let stderr = child.stderr.take();
let client = Self {
config,
writer: Mutex::new(BufWriter::new(stdin)),
child: Mutex::new(child),
pending_requests: Arc::new(Mutex::new(BTreeMap::new())),
diagnostics: Arc::new(Mutex::new(BTreeMap::new())),
open_documents: Mutex::new(BTreeMap::new()),
next_request_id: AtomicI64::new(1),
};
client.spawn_reader(stdout);
if let Some(stderr) = stderr {
client.spawn_stderr_drain(stderr);
}
client.initialize().await?;
Ok(client)
}
pub(crate) async fn ensure_document_open(&self, path: &Path) -> Result<(), LspError> {
if self.is_document_open(path).await {
return Ok(());
}
let contents = std::fs::read_to_string(path)?;
self.open_document(path, &contents).await
}
pub(crate) async fn open_document(&self, path: &Path, text: &str) -> Result<(), LspError> {
let uri = file_url(path)?;
let language_id = self
.config
.language_id_for(path)
.ok_or_else(|| LspError::UnsupportedDocument(path.to_path_buf()))?;
self.notify(
"textDocument/didOpen",
json!({
"textDocument": {
"uri": uri,
"languageId": language_id,
"version": 1,
"text": text,
}
}),
)
.await?;
self.open_documents
.lock()
.await
.insert(path.to_path_buf(), 1);
Ok(())
}
pub(crate) async fn change_document(&self, path: &Path, text: &str) -> Result<(), LspError> {
if !self.is_document_open(path).await {
return self.open_document(path, text).await;
}
let uri = file_url(path)?;
let next_version = {
let mut open_documents = self.open_documents.lock().await;
let version = open_documents
.entry(path.to_path_buf())
.and_modify(|value| *value += 1)
.or_insert(1);
*version
};
self.notify(
"textDocument/didChange",
json!({
"textDocument": {
"uri": uri,
"version": next_version,
},
"contentChanges": [{
"text": text,
}],
}),
)
.await
}
pub(crate) async fn save_document(&self, path: &Path) -> Result<(), LspError> {
if !self.is_document_open(path).await {
return Ok(());
}
self.notify(
"textDocument/didSave",
json!({
"textDocument": {
"uri": file_url(path)?,
}
}),
)
.await
}
pub(crate) async fn close_document(&self, path: &Path) -> Result<(), LspError> {
if !self.is_document_open(path).await {
return Ok(());
}
self.notify(
"textDocument/didClose",
json!({
"textDocument": {
"uri": file_url(path)?,
}
}),
)
.await?;
self.open_documents.lock().await.remove(path);
Ok(())
}
pub(crate) async fn is_document_open(&self, path: &Path) -> bool {
self.open_documents.lock().await.contains_key(path)
}
pub(crate) async fn go_to_definition(
&self,
path: &Path,
position: Position,
) -> Result<Vec<SymbolLocation>, LspError> {
self.ensure_document_open(path).await?;
let response = self
.request::<Option<GotoDefinitionResponse>>(
"textDocument/definition",
json!({
"textDocument": { "uri": file_url(path)? },
"position": position,
}),
)
.await?;
Ok(match response {
Some(GotoDefinitionResponse::Scalar(location)) => {
location_to_symbol_locations(vec![location])
}
Some(GotoDefinitionResponse::Array(locations)) => location_to_symbol_locations(locations),
Some(GotoDefinitionResponse::Link(links)) => location_links_to_symbol_locations(links),
None => Vec::new(),
})
}
pub(crate) async fn find_references(
&self,
path: &Path,
position: Position,
include_declaration: bool,
) -> Result<Vec<SymbolLocation>, LspError> {
self.ensure_document_open(path).await?;
let response = self
.request::<Option<Vec<Location>>>(
"textDocument/references",
json!({
"textDocument": { "uri": file_url(path)? },
"position": position,
"context": {
"includeDeclaration": include_declaration,
},
}),
)
.await?;
Ok(location_to_symbol_locations(response.unwrap_or_default()))
}
pub(crate) async fn diagnostics_snapshot(&self) -> BTreeMap<String, Vec<Diagnostic>> {
self.diagnostics.lock().await.clone()
}
pub(crate) async fn shutdown(&self) -> Result<(), LspError> {
let _ = self.request::<Value>("shutdown", json!({})).await;
let _ = self.notify("exit", Value::Null).await;
let mut child = self.child.lock().await;
if child.kill().await.is_err() {
let _ = child.wait().await;
return Ok(());
}
let _ = child.wait().await;
Ok(())
}
fn spawn_reader(&self, stdout: ChildStdout) {
let diagnostics = &self.diagnostics;
let pending_requests = &self.pending_requests;
let diagnostics = diagnostics.clone();
let pending_requests = pending_requests.clone();
tokio::spawn(async move {
let mut reader = BufReader::new(stdout);
let result = async {
while let Some(message) = read_message(&mut reader).await? {
if let Some(id) = message.get("id").and_then(Value::as_i64) {
let response = if let Some(error) = message.get("error") {
Err(LspError::Protocol(error.to_string()))
} else {
Ok(message.get("result").cloned().unwrap_or(Value::Null))
};
if let Some(sender) = pending_requests.lock().await.remove(&id) {
let _ = sender.send(response);
}
continue;
}
let Some(method) = message.get("method").and_then(Value::as_str) else {
continue;
};
if method != "textDocument/publishDiagnostics" {
continue;
}
let params = message.get("params").cloned().unwrap_or(Value::Null);
let notification = serde_json::from_value::<PublishDiagnosticsParams>(params)?;
let mut diagnostics_map = diagnostics.lock().await;
if notification.diagnostics.is_empty() {
diagnostics_map.remove(&notification.uri.to_string());
} else {
diagnostics_map.insert(notification.uri.to_string(), notification.diagnostics);
}
}
Ok::<(), LspError>(())
}
.await;
if let Err(error) = result {
let mut pending = pending_requests.lock().await;
let drained = pending
.iter()
.map(|(id, _)| *id)
.collect::<Vec<_>>();
for id in drained {
if let Some(sender) = pending.remove(&id) {
let _ = sender.send(Err(LspError::Protocol(error.to_string())));
}
}
}
});
}
fn spawn_stderr_drain<R>(&self, stderr: R)
where
R: AsyncRead + Unpin + Send + 'static,
{
tokio::spawn(async move {
let mut reader = BufReader::new(stderr);
let mut sink = Vec::new();
let _ = reader.read_to_end(&mut sink).await;
});
}
async fn initialize(&self) -> Result<(), LspError> {
let workspace_uri = file_url(&self.config.workspace_root)?;
let _ = self
.request::<Value>(
"initialize",
json!({
"processId": std::process::id(),
"rootUri": workspace_uri,
"rootPath": self.config.workspace_root,
"workspaceFolders": [{
"uri": workspace_uri,
"name": self.config.name,
}],
"initializationOptions": self.config.initialization_options.clone().unwrap_or(Value::Null),
"capabilities": {
"textDocument": {
"publishDiagnostics": {
"relatedInformation": true,
},
"definition": {
"linkSupport": true,
},
"references": {}
},
"workspace": {
"configuration": false,
"workspaceFolders": true,
},
"general": {
"positionEncodings": ["utf-16"],
}
}
}),
)
.await?;
self.notify("initialized", json!({})).await
}
async fn request<T>(&self, method: &str, params: Value) -> Result<T, LspError>
where
T: for<'de> serde::Deserialize<'de>,
{
let id = self.next_request_id.fetch_add(1, Ordering::Relaxed);
let (sender, receiver) = oneshot::channel();
self.pending_requests.lock().await.insert(id, sender);
if let Err(error) = self
.send_message(&json!({
"jsonrpc": "2.0",
"id": id,
"method": method,
"params": params,
}))
.await
{
self.pending_requests.lock().await.remove(&id);
return Err(error);
}
let response = receiver
.await
.map_err(|_| LspError::Protocol(format!("request channel closed for {method}")))??;
Ok(serde_json::from_value(response)?)
}
async fn notify(&self, method: &str, params: Value) -> Result<(), LspError> {
self.send_message(&json!({
"jsonrpc": "2.0",
"method": method,
"params": params,
}))
.await
}
async fn send_message(&self, payload: &Value) -> Result<(), LspError> {
let body = serde_json::to_vec(payload)?;
let mut writer = self.writer.lock().await;
writer
.write_all(format!("Content-Length: {}\r\n\r\n", body.len()).as_bytes())
.await?;
writer.write_all(&body).await?;
writer.flush().await?;
Ok(())
}
}
async fn read_message<R>(reader: &mut BufReader<R>) -> Result<Option<Value>, LspError>
where
R: AsyncRead + Unpin,
{
let mut content_length = None;
loop {
let mut line = String::new();
let read = reader.read_line(&mut line).await?;
if read == 0 {
return Ok(None);
}
if line == "\r\n" {
break;
}
let trimmed = line.trim_end_matches(['\r', '\n']);
if let Some((name, value)) = trimmed.split_once(':') {
if name.eq_ignore_ascii_case("Content-Length") {
let value = value.trim().to_string();
content_length = Some(
value
.parse::<usize>()
.map_err(|_| LspError::InvalidContentLength(value.clone()))?,
);
}
} else {
return Err(LspError::InvalidHeader(trimmed.to_string()));
}
}
let content_length = content_length.ok_or(LspError::MissingContentLength)?;
let mut body = vec![0_u8; content_length];
reader.read_exact(&mut body).await?;
Ok(Some(serde_json::from_slice(&body)?))
}
fn file_url(path: &Path) -> Result<String, LspError> {
url::Url::from_file_path(path)
.map(|url| url.to_string())
.map_err(|()| LspError::PathToUrl(path.to_path_buf()))
}
fn location_to_symbol_locations(locations: Vec<Location>) -> Vec<SymbolLocation> {
locations
.into_iter()
.filter_map(|location| {
uri_to_path(&location.uri.to_string()).map(|path| SymbolLocation {
path,
range: location.range,
})
})
.collect()
}
fn location_links_to_symbol_locations(links: Vec<LocationLink>) -> Vec<SymbolLocation> {
links.into_iter()
.filter_map(|link| {
uri_to_path(&link.target_uri.to_string()).map(|path| SymbolLocation {
path,
range: link.target_selection_range,
})
})
.collect()
}
fn uri_to_path(uri: &str) -> Option<PathBuf> {
url::Url::parse(uri).ok()?.to_file_path().ok()
}

View File

@@ -0,0 +1,62 @@
use std::fmt::{Display, Formatter};
use std::path::PathBuf;
#[derive(Debug)]
pub enum LspError {
Io(std::io::Error),
Json(serde_json::Error),
InvalidHeader(String),
MissingContentLength,
InvalidContentLength(String),
UnsupportedDocument(PathBuf),
UnknownServer(String),
DuplicateExtension {
extension: String,
existing_server: String,
new_server: String,
},
PathToUrl(PathBuf),
Protocol(String),
}
impl Display for LspError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
Self::Io(error) => write!(f, "{error}"),
Self::Json(error) => write!(f, "{error}"),
Self::InvalidHeader(header) => write!(f, "invalid LSP header: {header}"),
Self::MissingContentLength => write!(f, "missing LSP Content-Length header"),
Self::InvalidContentLength(value) => {
write!(f, "invalid LSP Content-Length value: {value}")
}
Self::UnsupportedDocument(path) => {
write!(f, "no LSP server configured for {}", path.display())
}
Self::UnknownServer(name) => write!(f, "unknown LSP server: {name}"),
Self::DuplicateExtension {
extension,
existing_server,
new_server,
} => write!(
f,
"duplicate LSP extension mapping for {extension}: {existing_server} and {new_server}"
),
Self::PathToUrl(path) => write!(f, "failed to convert path to file URL: {}", path.display()),
Self::Protocol(message) => write!(f, "LSP protocol error: {message}"),
}
}
}
impl std::error::Error for LspError {}
impl From<std::io::Error> for LspError {
fn from(value: std::io::Error) -> Self {
Self::Io(value)
}
}
impl From<serde_json::Error> for LspError {
fn from(value: serde_json::Error) -> Self {
Self::Json(value)
}
}

283
rust/crates/lsp/src/lib.rs Normal file
View File

@@ -0,0 +1,283 @@
mod client;
mod error;
mod manager;
mod types;
pub use error::LspError;
pub use manager::LspManager;
pub use types::{
FileDiagnostics, LspContextEnrichment, LspServerConfig, SymbolLocation, WorkspaceDiagnostics,
};
#[cfg(test)]
mod tests {
use std::collections::BTreeMap;
use std::fs;
use std::path::PathBuf;
use std::process::Command;
use std::time::{Duration, SystemTime, UNIX_EPOCH};
use lsp_types::{DiagnosticSeverity, Position};
use crate::{LspManager, LspServerConfig};
fn temp_dir(label: &str) -> PathBuf {
let nanos = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("time should be after epoch")
.as_nanos();
std::env::temp_dir().join(format!("lsp-{label}-{nanos}"))
}
fn python3_path() -> Option<String> {
let candidates = ["python3", "/usr/bin/python3"];
candidates.iter().find_map(|candidate| {
Command::new(candidate)
.arg("--version")
.output()
.ok()
.filter(|output| output.status.success())
.map(|_| (*candidate).to_string())
})
}
fn write_mock_server_script(root: &std::path::Path) -> PathBuf {
let script_path = root.join("mock_lsp_server.py");
fs::write(
&script_path,
r#"import json
import sys
def read_message():
headers = {}
while True:
line = sys.stdin.buffer.readline()
if not line:
return None
if line == b"\r\n":
break
key, value = line.decode("utf-8").split(":", 1)
headers[key.lower()] = value.strip()
length = int(headers["content-length"])
body = sys.stdin.buffer.read(length)
return json.loads(body)
def write_message(payload):
raw = json.dumps(payload).encode("utf-8")
sys.stdout.buffer.write(f"Content-Length: {len(raw)}\r\n\r\n".encode("utf-8"))
sys.stdout.buffer.write(raw)
sys.stdout.buffer.flush()
while True:
message = read_message()
if message is None:
break
method = message.get("method")
if method == "initialize":
write_message({
"jsonrpc": "2.0",
"id": message["id"],
"result": {
"capabilities": {
"definitionProvider": True,
"referencesProvider": True,
"textDocumentSync": 1,
}
},
})
elif method == "initialized":
continue
elif method == "textDocument/didOpen":
document = message["params"]["textDocument"]
write_message({
"jsonrpc": "2.0",
"method": "textDocument/publishDiagnostics",
"params": {
"uri": document["uri"],
"diagnostics": [
{
"range": {
"start": {"line": 0, "character": 0},
"end": {"line": 0, "character": 3},
},
"severity": 1,
"source": "mock-server",
"message": "mock error",
}
],
},
})
elif method == "textDocument/didChange":
continue
elif method == "textDocument/didSave":
continue
elif method == "textDocument/definition":
uri = message["params"]["textDocument"]["uri"]
write_message({
"jsonrpc": "2.0",
"id": message["id"],
"result": [
{
"uri": uri,
"range": {
"start": {"line": 0, "character": 0},
"end": {"line": 0, "character": 3},
},
}
],
})
elif method == "textDocument/references":
uri = message["params"]["textDocument"]["uri"]
write_message({
"jsonrpc": "2.0",
"id": message["id"],
"result": [
{
"uri": uri,
"range": {
"start": {"line": 0, "character": 0},
"end": {"line": 0, "character": 3},
},
},
{
"uri": uri,
"range": {
"start": {"line": 1, "character": 4},
"end": {"line": 1, "character": 7},
},
},
],
})
elif method == "shutdown":
write_message({"jsonrpc": "2.0", "id": message["id"], "result": None})
elif method == "exit":
break
"#,
)
.expect("mock server should be written");
script_path
}
async fn wait_for_diagnostics(manager: &LspManager) {
tokio::time::timeout(Duration::from_secs(2), async {
loop {
if manager
.collect_workspace_diagnostics()
.await
.expect("diagnostics snapshot should load")
.total_diagnostics()
> 0
{
break;
}
tokio::time::sleep(Duration::from_millis(10)).await;
}
})
.await
.expect("diagnostics should arrive from mock server");
}
#[tokio::test(flavor = "current_thread")]
async fn collects_diagnostics_and_symbol_navigation_from_mock_server() {
let Some(python) = python3_path() else {
return;
};
// given
let root = temp_dir("manager");
fs::create_dir_all(root.join("src")).expect("workspace root should exist");
let script_path = write_mock_server_script(&root);
let source_path = root.join("src").join("main.rs");
fs::write(&source_path, "fn main() {}\nlet value = 1;\n").expect("source file should exist");
let manager = LspManager::new(vec![LspServerConfig {
name: "rust-analyzer".to_string(),
command: python,
args: vec![script_path.display().to_string()],
env: BTreeMap::new(),
workspace_root: root.clone(),
initialization_options: None,
extension_to_language: BTreeMap::from([(".rs".to_string(), "rust".to_string())]),
}])
.expect("manager should build");
manager
.open_document(&source_path, &fs::read_to_string(&source_path).expect("source read should succeed"))
.await
.expect("document should open");
wait_for_diagnostics(&manager).await;
// when
let diagnostics = manager
.collect_workspace_diagnostics()
.await
.expect("diagnostics should be available");
let definitions = manager
.go_to_definition(&source_path, Position::new(0, 0))
.await
.expect("definition request should succeed");
let references = manager
.find_references(&source_path, Position::new(0, 0), true)
.await
.expect("references request should succeed");
// then
assert_eq!(diagnostics.files.len(), 1);
assert_eq!(diagnostics.total_diagnostics(), 1);
assert_eq!(diagnostics.files[0].diagnostics[0].severity, Some(DiagnosticSeverity::ERROR));
assert_eq!(definitions.len(), 1);
assert_eq!(definitions[0].start_line(), 1);
assert_eq!(references.len(), 2);
manager.shutdown().await.expect("shutdown should succeed");
fs::remove_dir_all(root).expect("temp workspace should be removed");
}
#[tokio::test(flavor = "current_thread")]
async fn renders_runtime_context_enrichment_for_prompt_usage() {
let Some(python) = python3_path() else {
return;
};
// given
let root = temp_dir("prompt");
fs::create_dir_all(root.join("src")).expect("workspace root should exist");
let script_path = write_mock_server_script(&root);
let source_path = root.join("src").join("lib.rs");
fs::write(&source_path, "pub fn answer() -> i32 { 42 }\n").expect("source file should exist");
let manager = LspManager::new(vec![LspServerConfig {
name: "rust-analyzer".to_string(),
command: python,
args: vec![script_path.display().to_string()],
env: BTreeMap::new(),
workspace_root: root.clone(),
initialization_options: None,
extension_to_language: BTreeMap::from([(".rs".to_string(), "rust".to_string())]),
}])
.expect("manager should build");
manager
.open_document(&source_path, &fs::read_to_string(&source_path).expect("source read should succeed"))
.await
.expect("document should open");
wait_for_diagnostics(&manager).await;
// when
let enrichment = manager
.context_enrichment(&source_path, Position::new(0, 0))
.await
.expect("context enrichment should succeed");
let rendered = enrichment.render_prompt_section();
// then
assert!(rendered.contains("# LSP context"));
assert!(rendered.contains("Workspace diagnostics: 1 across 1 file(s)"));
assert!(rendered.contains("Definitions:"));
assert!(rendered.contains("References:"));
assert!(rendered.contains("mock error"));
manager.shutdown().await.expect("shutdown should succeed");
fs::remove_dir_all(root).expect("temp workspace should be removed");
}
}

View File

@@ -0,0 +1,191 @@
use std::collections::{BTreeMap, BTreeSet};
use std::path::Path;
use std::sync::Arc;
use lsp_types::Position;
use tokio::sync::Mutex;
use crate::client::LspClient;
use crate::error::LspError;
use crate::types::{
normalize_extension, FileDiagnostics, LspContextEnrichment, LspServerConfig, SymbolLocation,
WorkspaceDiagnostics,
};
pub struct LspManager {
server_configs: BTreeMap<String, LspServerConfig>,
extension_map: BTreeMap<String, String>,
clients: Mutex<BTreeMap<String, Arc<LspClient>>>,
}
impl LspManager {
pub fn new(server_configs: Vec<LspServerConfig>) -> Result<Self, LspError> {
let mut configs_by_name = BTreeMap::new();
let mut extension_map = BTreeMap::new();
for config in server_configs {
for extension in config.extension_to_language.keys() {
let normalized = normalize_extension(extension);
if let Some(existing_server) = extension_map.insert(normalized.clone(), config.name.clone()) {
return Err(LspError::DuplicateExtension {
extension: normalized,
existing_server,
new_server: config.name.clone(),
});
}
}
configs_by_name.insert(config.name.clone(), config);
}
Ok(Self {
server_configs: configs_by_name,
extension_map,
clients: Mutex::new(BTreeMap::new()),
})
}
#[must_use]
pub fn supports_path(&self, path: &Path) -> bool {
path.extension().is_some_and(|extension| {
let normalized = normalize_extension(extension.to_string_lossy().as_ref());
self.extension_map.contains_key(&normalized)
})
}
pub async fn open_document(&self, path: &Path, text: &str) -> Result<(), LspError> {
self.client_for_path(path).await?.open_document(path, text).await
}
pub async fn sync_document_from_disk(&self, path: &Path) -> Result<(), LspError> {
let contents = std::fs::read_to_string(path)?;
self.change_document(path, &contents).await?;
self.save_document(path).await
}
pub async fn change_document(&self, path: &Path, text: &str) -> Result<(), LspError> {
self.client_for_path(path).await?.change_document(path, text).await
}
pub async fn save_document(&self, path: &Path) -> Result<(), LspError> {
self.client_for_path(path).await?.save_document(path).await
}
pub async fn close_document(&self, path: &Path) -> Result<(), LspError> {
self.client_for_path(path).await?.close_document(path).await
}
pub async fn go_to_definition(
&self,
path: &Path,
position: Position,
) -> Result<Vec<SymbolLocation>, LspError> {
let mut locations = self.client_for_path(path).await?.go_to_definition(path, position).await?;
dedupe_locations(&mut locations);
Ok(locations)
}
pub async fn find_references(
&self,
path: &Path,
position: Position,
include_declaration: bool,
) -> Result<Vec<SymbolLocation>, LspError> {
let mut locations = self
.client_for_path(path)
.await?
.find_references(path, position, include_declaration)
.await?;
dedupe_locations(&mut locations);
Ok(locations)
}
pub async fn collect_workspace_diagnostics(&self) -> Result<WorkspaceDiagnostics, LspError> {
let clients = self.clients.lock().await.values().cloned().collect::<Vec<_>>();
let mut files = Vec::new();
for client in clients {
for (uri, diagnostics) in client.diagnostics_snapshot().await {
let Ok(path) = url::Url::parse(&uri)
.and_then(|url| url.to_file_path().map_err(|()| url::ParseError::RelativeUrlWithoutBase))
else {
continue;
};
if diagnostics.is_empty() {
continue;
}
files.push(FileDiagnostics {
path,
uri,
diagnostics,
});
}
}
files.sort_by(|left, right| left.path.cmp(&right.path));
Ok(WorkspaceDiagnostics { files })
}
pub async fn context_enrichment(
&self,
path: &Path,
position: Position,
) -> Result<LspContextEnrichment, LspError> {
Ok(LspContextEnrichment {
file_path: path.to_path_buf(),
diagnostics: self.collect_workspace_diagnostics().await?,
definitions: self.go_to_definition(path, position).await?,
references: self.find_references(path, position, true).await?,
})
}
pub async fn shutdown(&self) -> Result<(), LspError> {
let mut clients = self.clients.lock().await;
let drained = clients.values().cloned().collect::<Vec<_>>();
clients.clear();
drop(clients);
for client in drained {
client.shutdown().await?;
}
Ok(())
}
async fn client_for_path(&self, path: &Path) -> Result<Arc<LspClient>, LspError> {
let extension = path
.extension()
.map(|extension| normalize_extension(extension.to_string_lossy().as_ref()))
.ok_or_else(|| LspError::UnsupportedDocument(path.to_path_buf()))?;
let server_name = self
.extension_map
.get(&extension)
.cloned()
.ok_or_else(|| LspError::UnsupportedDocument(path.to_path_buf()))?;
let mut clients = self.clients.lock().await;
if let Some(client) = clients.get(&server_name) {
return Ok(client.clone());
}
let config = self
.server_configs
.get(&server_name)
.cloned()
.ok_or_else(|| LspError::UnknownServer(server_name.clone()))?;
let client = Arc::new(LspClient::connect(config).await?);
clients.insert(server_name, client.clone());
Ok(client)
}
}
fn dedupe_locations(locations: &mut Vec<SymbolLocation>) {
let mut seen = BTreeSet::new();
locations.retain(|location| {
seen.insert((
location.path.clone(),
location.range.start.line,
location.range.start.character,
location.range.end.line,
location.range.end.character,
))
});
}

View File

@@ -0,0 +1,186 @@
use std::collections::BTreeMap;
use std::fmt::{Display, Formatter};
use std::path::{Path, PathBuf};
use lsp_types::{Diagnostic, Range};
use serde_json::Value;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct LspServerConfig {
pub name: String,
pub command: String,
pub args: Vec<String>,
pub env: BTreeMap<String, String>,
pub workspace_root: PathBuf,
pub initialization_options: Option<Value>,
pub extension_to_language: BTreeMap<String, String>,
}
impl LspServerConfig {
#[must_use]
pub fn language_id_for(&self, path: &Path) -> Option<&str> {
let extension = normalize_extension(path.extension()?.to_string_lossy().as_ref());
self.extension_to_language
.get(&extension)
.map(String::as_str)
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct FileDiagnostics {
pub path: PathBuf,
pub uri: String,
pub diagnostics: Vec<Diagnostic>,
}
#[derive(Debug, Clone, Default, PartialEq)]
pub struct WorkspaceDiagnostics {
pub files: Vec<FileDiagnostics>,
}
impl WorkspaceDiagnostics {
#[must_use]
pub fn is_empty(&self) -> bool {
self.files.is_empty()
}
#[must_use]
pub fn total_diagnostics(&self) -> usize {
self.files.iter().map(|file| file.diagnostics.len()).sum()
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct SymbolLocation {
pub path: PathBuf,
pub range: Range,
}
impl SymbolLocation {
#[must_use]
pub fn start_line(&self) -> u32 {
self.range.start.line + 1
}
#[must_use]
pub fn start_character(&self) -> u32 {
self.range.start.character + 1
}
}
impl Display for SymbolLocation {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}:{}:{}",
self.path.display(),
self.start_line(),
self.start_character()
)
}
}
#[derive(Debug, Clone, Default, PartialEq)]
pub struct LspContextEnrichment {
pub file_path: PathBuf,
pub diagnostics: WorkspaceDiagnostics,
pub definitions: Vec<SymbolLocation>,
pub references: Vec<SymbolLocation>,
}
impl LspContextEnrichment {
#[must_use]
pub fn is_empty(&self) -> bool {
self.diagnostics.is_empty() && self.definitions.is_empty() && self.references.is_empty()
}
#[must_use]
pub fn render_prompt_section(&self) -> String {
const MAX_RENDERED_DIAGNOSTICS: usize = 12;
const MAX_RENDERED_LOCATIONS: usize = 12;
let mut lines = vec!["# LSP context".to_string()];
lines.push(format!(" - Focus file: {}", self.file_path.display()));
lines.push(format!(
" - Workspace diagnostics: {} across {} file(s)",
self.diagnostics.total_diagnostics(),
self.diagnostics.files.len()
));
if !self.diagnostics.files.is_empty() {
lines.push(String::new());
lines.push("Diagnostics:".to_string());
let mut rendered = 0usize;
for file in &self.diagnostics.files {
for diagnostic in &file.diagnostics {
if rendered == MAX_RENDERED_DIAGNOSTICS {
lines.push(" - Additional diagnostics omitted for brevity.".to_string());
break;
}
let severity = diagnostic_severity_label(diagnostic.severity);
lines.push(format!(
" - {}:{}:{} [{}] {}",
file.path.display(),
diagnostic.range.start.line + 1,
diagnostic.range.start.character + 1,
severity,
diagnostic.message.replace('\n', " ")
));
rendered += 1;
}
if rendered == MAX_RENDERED_DIAGNOSTICS {
break;
}
}
}
if !self.definitions.is_empty() {
lines.push(String::new());
lines.push("Definitions:".to_string());
lines.extend(
self.definitions
.iter()
.take(MAX_RENDERED_LOCATIONS)
.map(|location| format!(" - {location}")),
);
if self.definitions.len() > MAX_RENDERED_LOCATIONS {
lines.push(" - Additional definitions omitted for brevity.".to_string());
}
}
if !self.references.is_empty() {
lines.push(String::new());
lines.push("References:".to_string());
lines.extend(
self.references
.iter()
.take(MAX_RENDERED_LOCATIONS)
.map(|location| format!(" - {location}")),
);
if self.references.len() > MAX_RENDERED_LOCATIONS {
lines.push(" - Additional references omitted for brevity.".to_string());
}
}
lines.join("\n")
}
}
#[must_use]
pub(crate) fn normalize_extension(extension: &str) -> String {
if extension.starts_with('.') {
extension.to_ascii_lowercase()
} else {
format!(".{}", extension.to_ascii_lowercase())
}
}
fn diagnostic_severity_label(severity: Option<lsp_types::DiagnosticSeverity>) -> &'static str {
match severity {
Some(lsp_types::DiagnosticSeverity::ERROR) => "error",
Some(lsp_types::DiagnosticSeverity::WARNING) => "warning",
Some(lsp_types::DiagnosticSeverity::INFORMATION) => "info",
Some(lsp_types::DiagnosticSeverity::HINT) => "hint",
_ => "unknown",
}
}