Add reference-finding

This commit is contained in:
Sofia 2025-08-03 21:29:40 +03:00
parent 0abeb0c4cd
commit a215a2116a
2 changed files with 74 additions and 15 deletions

View File

@ -53,6 +53,22 @@ impl StaticAnalysis {
let def_token_idx = self.state.symbol_to_token.get(&definition_id)?;
self.tokens.get(*def_token_idx)
}
pub fn find_references(&self, token_idx: usize) -> Option<Vec<SymbolId>> {
let mut references = Vec::new();
let semantic_token = self.state.map.get(&token_idx)?;
let symbol_id = semantic_token.symbol?;
let definition_id = self.state.find_definition(&symbol_id);
references.push(definition_id);
for semantic_symbol in &self.state.symbol_table {
if let SemanticKind::Reference(idx) = semantic_symbol.kind {
references.push(idx);
}
}
Some(references)
}
}
#[derive(Debug, Clone)]

View File

@ -9,11 +9,11 @@ use tower_lsp::lsp_types::{
self, CompletionItem, CompletionOptions, CompletionParams, CompletionResponse, Diagnostic, DiagnosticSeverity,
DidChangeTextDocumentParams, DidOpenTextDocumentParams, DocumentFilter, GotoDefinitionParams,
GotoDefinitionResponse, Hover, HoverContents, HoverParams, HoverProviderCapability, InitializeParams,
InitializeResult, InitializedParams, MarkupContent, MarkupKind, MessageType, OneOf, Range, SemanticToken,
SemanticTokensLegend, SemanticTokensOptions, SemanticTokensParams, SemanticTokensResult,
SemanticTokensServerCapabilities, ServerCapabilities, TextDocumentItem, TextDocumentRegistrationOptions,
TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions, WorkspaceFoldersServerCapabilities,
WorkspaceServerCapabilities,
InitializeResult, InitializedParams, Location, MarkupContent, MarkupKind, MessageType, OneOf, Range,
ReferenceParams, SemanticToken, SemanticTokensLegend, SemanticTokensOptions, SemanticTokensParams,
SemanticTokensResult, SemanticTokensServerCapabilities, ServerCapabilities, TextDocumentItem,
TextDocumentRegistrationOptions, TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities,
};
use tower_lsp::{Client, LanguageServer, LspService, Server, jsonrpc};
@ -75,6 +75,7 @@ impl LanguageServer for Backend {
},
)),
definition_provider: Some(OneOf::Left(true)),
references_provider: Some(OneOf::Left(true)),
..Default::default()
};
Ok(InitializeResult {
@ -266,16 +267,7 @@ impl LanguageServer for Backend {
if let Some(def_token) = analysis.find_definition(token.0) {
return Ok(Some(GotoDefinitionResponse::Scalar(lsp_types::Location {
uri: params.text_document_position_params.text_document.uri,
range: Range {
start: lsp_types::Position {
line: def_token.position.1.max(1) - 1,
character: def_token.position.0.max(1) - 1,
},
end: lsp_types::Position {
line: def_token.position.1.max(1) - 1,
character: def_token.position.0.max(1) - 1 + def_token.token.len() as u32,
},
},
range: token_to_range(def_token),
})));
}
}
@ -283,6 +275,57 @@ impl LanguageServer for Backend {
Ok(None)
}
async fn references(&self, params: ReferenceParams) -> jsonrpc::Result<Option<Vec<Location>>> {
let path = PathBuf::from(params.text_document_position.text_document.uri.path());
let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();
let analysis = self.analysis.get(&file_name);
let position = params.text_document_position.position;
if let Some(analysis) = &analysis {
let token = analysis.tokens.iter().enumerate().find(|(_, tok)| {
tok.position.1 == position.line + 1
&& (tok.position.0 <= position.character + 1
&& (tok.position.0 + tok.token.len() as u32) > position.character + 1)
});
if let Some(token) = token {
let tokens = analysis.find_references(token.0).map(|symbols| {
symbols
.iter()
.map(|symbol_id| analysis.state.symbol_to_token.get(&symbol_id).cloned().unwrap())
.collect::<Vec<_>>()
});
let mut locations = Vec::new();
if let Some(tokens) = tokens {
for token_idx in tokens {
let token = analysis.tokens.get(token_idx).unwrap();
locations.push(Location {
uri: params.text_document_position.text_document.uri.clone(),
range: token_to_range(token),
});
}
}
Ok(Some(locations))
} else {
Ok(None)
}
} else {
Ok(None)
}
}
}
fn token_to_range(token: &FullToken) -> lsp_types::Range {
Range {
start: lsp_types::Position {
line: token.position.1.max(1) - 1,
character: token.position.0.max(1) - 1,
},
end: lsp_types::Position {
line: token.position.1.max(1) - 1,
character: token.position.0.max(1) - 1 + token.token.len() as u32,
},
}
}
impl Backend {