Add go-to-definition

This commit is contained in:
Sofia 2025-08-03 20:57:56 +03:00
parent 79b3c6b3ef
commit 82537224e7
2 changed files with 99 additions and 27 deletions

View File

@ -45,7 +45,7 @@ pub struct StaticAnalysis {
}
#[derive(Debug, Clone)]
pub struct TokenAnalysis {
pub struct SemanticToken {
pub ty: Option<TypeKind>,
pub autocomplete: Vec<Autocomplete>,
pub symbol: Option<SymbolId>,
@ -80,15 +80,17 @@ impl ToString for AutocompleteKind {
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct SymbolId(usize);
#[derive(Debug, Clone)]
pub struct AnalysisState {
/// TokenID -> Analysis map, containing SymbolIDs
pub map: HashMap<usize, TokenAnalysis>,
pub map: HashMap<usize, SemanticToken>,
/// SymbolID -> Symbol
symbol_table: Vec<Symbol>,
/// SymbolID -> Symbol
pub symbol_to_token: HashMap<SymbolId, usize>,
}
impl AnalysisState {
@ -97,18 +99,12 @@ impl AnalysisState {
}
}
#[derive(Debug, Clone)]
pub struct Symbol {
pub kind: SemanticKind,
pub definition: usize,
}
impl AnalysisState {
pub fn init_types(&mut self, meta: &mir::Metadata, ty: Option<TypeKind>) {
for token in meta.range.start..=meta.range.end {
self.map.insert(
token,
TokenAnalysis {
SemanticToken {
ty: ty.clone(),
autocomplete: Vec::new(),
symbol: Default::default(),
@ -123,7 +119,7 @@ impl AnalysisState {
} else {
self.map.insert(
token_idx,
TokenAnalysis {
SemanticToken {
ty: None,
autocomplete: autocomplete.clone(),
symbol: Default::default(),
@ -133,12 +129,13 @@ impl AnalysisState {
}
pub fn set_symbol(&mut self, idx: usize, symbol: SymbolId) {
self.symbol_to_token.insert(symbol, idx);
if let Some(token) = self.map.get_mut(&idx) {
token.symbol = Some(symbol);
} else {
self.map.insert(
idx,
TokenAnalysis {
SemanticToken {
ty: None,
autocomplete: Vec::new(),
symbol: Some(symbol),
@ -152,6 +149,20 @@ impl AnalysisState {
self.symbol_table.push(Symbol { kind, definition });
id
}
pub fn find_definition(&self, id: &SymbolId) -> SymbolId {
let symbol = self.get_symbol(*id);
match symbol.kind {
SemanticKind::Reference(idx) => self.find_definition(&idx),
_ => *id,
}
}
}
#[derive(Debug, Clone)]
pub struct Symbol {
pub kind: SemanticKind,
pub definition: usize,
}
pub struct AnalysisScope<'a> {
@ -298,6 +309,7 @@ pub fn analyze_context(context: &mir::Context, module: &mir::Module, error: Opti
let mut state = AnalysisState {
map: HashMap::new(),
symbol_table: Vec::new(),
symbol_to_token: HashMap::new(),
};
let mut scope = AnalysisScope {
@ -615,19 +627,32 @@ pub fn analyze_expr(
analyze_expr(context, source_module, expr, scope);
}
}
mir::ExprKind::Struct(_, items) => {
let idx = scope
mir::ExprKind::Struct(struct_name, items) => {
let struct_type = TypeKind::CustomType(CustomTypeKey(struct_name.clone(), source_module.module_id));
let struct_idx = scope
.token_idx(&expr.1, |t| matches!(t, Token::Identifier(_)))
.unwrap_or(expr.1.range.end);
let symbol = scope.state.new_symbol(idx, SemanticKind::Struct);
scope.state.set_symbol(idx, symbol);
for (_, expr, field_meta) in items {
let idx = scope
let struct_symbol = if let Some(symbol_id) = scope.types.get(&struct_type) {
scope.state.new_symbol(struct_idx, SemanticKind::Reference(*symbol_id))
} else {
scope.state.new_symbol(struct_idx, SemanticKind::Struct)
};
scope.state.set_symbol(struct_idx, struct_symbol);
for (field_name, expr, field_meta) in items {
let field_idx = scope
.token_idx(&field_meta, |t| matches!(t, Token::Identifier(_)))
.unwrap_or(field_meta.range.end);
let symbol = scope.state.new_symbol(idx, SemanticKind::Property);
scope.state.set_symbol(idx, symbol);
let field_symbol =
if let Some(symbol_id) = scope.properties.get(&(struct_type.clone(), field_name.clone())) {
scope.state.new_symbol(field_idx, SemanticKind::Reference(*symbol_id))
} else {
scope.state.new_symbol(field_idx, SemanticKind::Property)
};
scope.state.set_symbol(field_idx, field_symbol);
analyze_expr(context, source_module, expr, scope);
}

View File

@ -7,13 +7,12 @@ use reid::mir::SourceModuleId;
use reid::parse_module;
use tower_lsp::lsp_types::{
self, CompletionItem, CompletionOptions, CompletionParams, CompletionResponse, Diagnostic, DiagnosticSeverity,
DidChangeTextDocumentParams, DidOpenTextDocumentParams, DocumentFilter, DocumentSelector, Hover, HoverContents,
HoverParams, HoverProviderCapability, InitializeParams, InitializeResult, InitializedParams, MarkupContent,
MarkupKind, MessageType, OneOf, Range, SemanticToken, SemanticTokenModifier, SemanticTokenType,
SemanticTokensLegend, SemanticTokensOptions, SemanticTokensParams, SemanticTokensRangeParams,
SemanticTokensRangeResult, SemanticTokensResult, SemanticTokensServerCapabilities, ServerCapabilities,
StaticRegistrationOptions, TextDocumentItem, TextDocumentRegistrationOptions, TextDocumentSyncCapability,
TextDocumentSyncKind, TextDocumentSyncOptions, WorkDoneProgressOptions, WorkspaceFoldersServerCapabilities,
DidChangeTextDocumentParams, DidOpenTextDocumentParams, DocumentFilter, GotoDefinitionParams,
GotoDefinitionResponse, Hover, HoverContents, HoverParams, HoverProviderCapability, InitializeParams,
InitializeResult, InitializedParams, MarkupContent, MarkupKind, MessageType, OneOf, Range, SemanticToken,
SemanticTokensLegend, SemanticTokensOptions, SemanticTokensParams, SemanticTokensResult,
SemanticTokensServerCapabilities, ServerCapabilities, TextDocumentItem, TextDocumentRegistrationOptions,
TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions, WorkspaceFoldersServerCapabilities,
WorkspaceServerCapabilities,
};
use tower_lsp::{Client, LanguageServer, LspService, Server, jsonrpc};
@ -75,6 +74,7 @@ impl LanguageServer for Backend {
static_registration_options: Default::default(),
},
)),
definition_provider: Some(OneOf::Left(true)),
..Default::default()
};
Ok(InitializeResult {
@ -248,6 +248,53 @@ impl LanguageServer for Backend {
data: semantic_tokens,
})))
}
async fn goto_definition(&self, params: GotoDefinitionParams) -> jsonrpc::Result<Option<GotoDefinitionResponse>> {
let path = PathBuf::from(params.text_document_position_params.text_document.uri.path());
let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();
let analysis = self.analysis.get(&file_name);
let position = params.text_document_position_params.position;
if let Some(analysis) = &analysis {
let token = analysis.tokens.iter().enumerate().find(|(_, tok)| {
tok.position.1 == position.line + 1
&& (tok.position.0 <= position.character + 1
&& (tok.position.0 + tok.token.len() as u32) > position.character + 1)
});
if let Some(token) = token {
dbg!(token);
if let Some(semantic_token) = analysis.state.map.get(&token.0) {
dbg!(semantic_token);
if let Some(symbol_id) = semantic_token.symbol {
dbg!(symbol_id);
let definition_id = analysis.state.find_definition(&symbol_id);
if let Some(def_token_idx) = analysis.state.symbol_to_token.get(&definition_id) {
dbg!(def_token_idx);
if let Some(def_token) = analysis.tokens.get(*def_token_idx) {
dbg!(def_token);
return Ok(Some(GotoDefinitionResponse::Scalar(lsp_types::Location {
uri: params.text_document_position_params.text_document.uri,
range: Range {
start: lsp_types::Position {
line: def_token.position.1.max(1) - 1,
character: def_token.position.0.max(1) - 1,
},
end: lsp_types::Position {
line: def_token.position.1.max(1) - 1,
character: def_token.position.0.max(1) - 1 + def_token.token.len() as u32,
},
},
})));
}
}
}
}
}
};
Ok(None)
}
}
impl Backend {