From a215a2116aa69c0d69768841a1573931619cf7e6 Mon Sep 17 00:00:00 2001 From: sofia Date: Sun, 3 Aug 2025 21:29:40 +0300 Subject: [PATCH] Add reference-finding --- reid-lsp/src/analysis.rs | 16 +++++++++ reid-lsp/src/main.rs | 73 +++++++++++++++++++++++++++++++--------- 2 files changed, 74 insertions(+), 15 deletions(-) diff --git a/reid-lsp/src/analysis.rs b/reid-lsp/src/analysis.rs index bd2e9e4..f712c3f 100644 --- a/reid-lsp/src/analysis.rs +++ b/reid-lsp/src/analysis.rs @@ -53,6 +53,22 @@ impl StaticAnalysis { let def_token_idx = self.state.symbol_to_token.get(&definition_id)?; self.tokens.get(*def_token_idx) } + + pub fn find_references(&self, token_idx: usize) -> Option> { + let mut references = Vec::new(); + let semantic_token = self.state.map.get(&token_idx)?; + let symbol_id = semantic_token.symbol?; + let definition_id = self.state.find_definition(&symbol_id); + references.push(definition_id); + + for semantic_symbol in &self.state.symbol_table { + if let SemanticKind::Reference(idx) = semantic_symbol.kind { + references.push(idx); + } + } + + Some(references) + } } #[derive(Debug, Clone)] diff --git a/reid-lsp/src/main.rs b/reid-lsp/src/main.rs index dcc757e..9bef85c 100644 --- a/reid-lsp/src/main.rs +++ b/reid-lsp/src/main.rs @@ -9,11 +9,11 @@ use tower_lsp::lsp_types::{ self, CompletionItem, CompletionOptions, CompletionParams, CompletionResponse, Diagnostic, DiagnosticSeverity, DidChangeTextDocumentParams, DidOpenTextDocumentParams, DocumentFilter, GotoDefinitionParams, GotoDefinitionResponse, Hover, HoverContents, HoverParams, HoverProviderCapability, InitializeParams, - InitializeResult, InitializedParams, MarkupContent, MarkupKind, MessageType, OneOf, Range, SemanticToken, - SemanticTokensLegend, SemanticTokensOptions, SemanticTokensParams, SemanticTokensResult, - SemanticTokensServerCapabilities, ServerCapabilities, TextDocumentItem, TextDocumentRegistrationOptions, - TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions, WorkspaceFoldersServerCapabilities, - WorkspaceServerCapabilities, + InitializeResult, InitializedParams, Location, MarkupContent, MarkupKind, MessageType, OneOf, Range, + ReferenceParams, SemanticToken, SemanticTokensLegend, SemanticTokensOptions, SemanticTokensParams, + SemanticTokensResult, SemanticTokensServerCapabilities, ServerCapabilities, TextDocumentItem, + TextDocumentRegistrationOptions, TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions, + WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities, }; use tower_lsp::{Client, LanguageServer, LspService, Server, jsonrpc}; @@ -75,6 +75,7 @@ impl LanguageServer for Backend { }, )), definition_provider: Some(OneOf::Left(true)), + references_provider: Some(OneOf::Left(true)), ..Default::default() }; Ok(InitializeResult { @@ -266,16 +267,7 @@ impl LanguageServer for Backend { if let Some(def_token) = analysis.find_definition(token.0) { return Ok(Some(GotoDefinitionResponse::Scalar(lsp_types::Location { uri: params.text_document_position_params.text_document.uri, - range: Range { - start: lsp_types::Position { - line: def_token.position.1.max(1) - 1, - character: def_token.position.0.max(1) - 1, - }, - end: lsp_types::Position { - line: def_token.position.1.max(1) - 1, - character: def_token.position.0.max(1) - 1 + def_token.token.len() as u32, - }, - }, + range: token_to_range(def_token), }))); } } @@ -283,6 +275,57 @@ impl LanguageServer for Backend { Ok(None) } + + async fn references(&self, params: ReferenceParams) -> jsonrpc::Result>> { + let path = PathBuf::from(params.text_document_position.text_document.uri.path()); + let file_name = path.file_name().unwrap().to_str().unwrap().to_owned(); + let analysis = self.analysis.get(&file_name); + let position = params.text_document_position.position; + + if let Some(analysis) = &analysis { + let token = analysis.tokens.iter().enumerate().find(|(_, tok)| { + tok.position.1 == position.line + 1 + && (tok.position.0 <= position.character + 1 + && (tok.position.0 + tok.token.len() as u32) > position.character + 1) + }); + if let Some(token) = token { + let tokens = analysis.find_references(token.0).map(|symbols| { + symbols + .iter() + .map(|symbol_id| analysis.state.symbol_to_token.get(&symbol_id).cloned().unwrap()) + .collect::>() + }); + let mut locations = Vec::new(); + if let Some(tokens) = tokens { + for token_idx in tokens { + let token = analysis.tokens.get(token_idx).unwrap(); + locations.push(Location { + uri: params.text_document_position.text_document.uri.clone(), + range: token_to_range(token), + }); + } + } + Ok(Some(locations)) + } else { + Ok(None) + } + } else { + Ok(None) + } + } +} + +fn token_to_range(token: &FullToken) -> lsp_types::Range { + Range { + start: lsp_types::Position { + line: token.position.1.max(1) - 1, + character: token.position.0.max(1) - 1, + }, + end: lsp_types::Position { + line: token.position.1.max(1) - 1, + character: token.position.0.max(1) - 1 + token.token.len() as u32, + }, + } } impl Backend {