Add definition and reference modifiers

This commit is contained in:
Sofia 2025-08-03 19:01:51 +03:00
parent 018f3e2561
commit dcc53498e7
2 changed files with 53 additions and 68 deletions

View File

@ -15,7 +15,7 @@ use reid::{
}, },
perform_all_passes, perform_all_passes,
}; };
use tower_lsp::lsp_types::SemanticTokenType; use tower_lsp::lsp_types::{SemanticTokenModifier, SemanticTokenType};
pub const TOKEN_LEGEND: [SemanticTokenType; 9] = [ pub const TOKEN_LEGEND: [SemanticTokenType; 9] = [
SemanticTokenType::VARIABLE, SemanticTokenType::VARIABLE,
@ -29,10 +29,14 @@ pub const TOKEN_LEGEND: [SemanticTokenType; 9] = [
SemanticTokenType::PROPERTY, SemanticTokenType::PROPERTY,
]; ];
const SEMANTIC_REFERENCE: SemanticTokenModifier = SemanticTokenModifier::new("reference");
pub const MODIFIER_LEGEND: [SemanticTokenModifier; 2] = [SemanticTokenModifier::DEFINITION, SEMANTIC_REFERENCE];
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct StaticAnalysis { pub struct StaticAnalysis {
pub tokens: Vec<FullToken>, pub tokens: Vec<FullToken>,
pub token_analysis: AnalysisState, pub state: AnalysisState,
pub error: Option<ReidError>, pub error: Option<ReidError>,
} }
@ -150,6 +154,7 @@ pub struct AnalysisScope<'a> {
state: &'a mut AnalysisState, state: &'a mut AnalysisState,
tokens: &'a Vec<FullToken>, tokens: &'a Vec<FullToken>,
variables: HashMap<String, SymbolId>, variables: HashMap<String, SymbolId>,
functions: HashMap<String, SymbolId>,
} }
impl<'a> AnalysisScope<'a> { impl<'a> AnalysisScope<'a> {
@ -158,6 +163,7 @@ impl<'a> AnalysisScope<'a> {
state: self.state, state: self.state,
tokens: self.tokens, tokens: self.tokens,
variables: self.variables.clone(), variables: self.variables.clone(),
functions: self.functions.clone(),
} }
} }
@ -182,6 +188,8 @@ pub enum SemanticKind {
Default, Default,
Variable, Variable,
Function, Function,
Reference(SymbolId),
Type,
} }
impl Default for SemanticKind { impl Default for SemanticKind {
@ -191,11 +199,13 @@ impl Default for SemanticKind {
} }
impl SemanticKind { impl SemanticKind {
pub fn into_token_idx(&self) -> Option<u32> { pub fn into_token_idx(&self, state: &AnalysisState) -> Option<u32> {
let token_type = match self { let token_type = match self {
SemanticKind::Variable => SemanticTokenType::VARIABLE, SemanticKind::Variable => SemanticTokenType::VARIABLE,
SemanticKind::Function => SemanticTokenType::FUNCTION, SemanticKind::Function => SemanticTokenType::FUNCTION,
SemanticKind::Type => SemanticTokenType::TYPE,
SemanticKind::Default => return None, SemanticKind::Default => return None,
SemanticKind::Reference(symbol_id) => return state.get_symbol(*symbol_id).kind.into_token_idx(state),
}; };
TOKEN_LEGEND TOKEN_LEGEND
.iter() .iter()
@ -203,9 +213,22 @@ impl SemanticKind {
.find(|(_, t)| token_type == **t) .find(|(_, t)| token_type == **t)
.map(|(i, _)| i as u32) .map(|(i, _)| i as u32)
} }
}
type TokenAnalysisMap = HashMap<usize, TokenAnalysis>; pub fn get_modifier(&self) -> Option<u32> {
let token_type = match self {
SemanticKind::Variable => SemanticTokenModifier::DEFINITION,
SemanticKind::Function => SemanticTokenModifier::DEFINITION,
SemanticKind::Type => return None,
SemanticKind::Default => return None,
SemanticKind::Reference(_) => SEMANTIC_REFERENCE,
};
MODIFIER_LEGEND
.iter()
.enumerate()
.find(|(_, t)| token_type == **t)
.map(|(i, _)| 1 << i)
}
}
pub fn analyze( pub fn analyze(
module_id: SourceModuleId, module_id: SourceModuleId,
@ -250,6 +273,7 @@ pub fn analyze_context(context: &mir::Context, module: &mir::Module, error: Opti
state: &mut state, state: &mut state,
tokens: &module.tokens, tokens: &module.tokens,
variables: HashMap::new(), variables: HashMap::new(),
functions: HashMap::new(),
}; };
for import in &module.imports { for import in &module.imports {
scope.state.init_types(&import.1, None); scope.state.init_types(&import.1, None);
@ -327,12 +351,10 @@ pub fn analyze_context(context: &mir::Context, module: &mir::Module, error: Opti
.state .state
.init_types(&function.signature(), Some(function.return_type.clone())); .init_types(&function.signature(), Some(function.return_type.clone()));
dbg!(&function.signature());
dbg!(&scope.tokens.get(function.signature().range.start));
let idx = scope.token_idx(&function.signature(), |t| matches!(t, Token::Identifier(_))); let idx = scope.token_idx(&function.signature(), |t| matches!(t, Token::Identifier(_)));
dbg!(idx, scope.tokens.get(idx)); let function_symbol = scope.state.new_symbol(idx, SemanticKind::Function);
let symbol = scope.state.new_symbol(idx, SemanticKind::Function); scope.state.set_symbol(idx, function_symbol);
scope.state.set_symbol(idx, symbol); scope.functions.insert(function.name.clone(), function_symbol);
for param in &function.parameters { for param in &function.parameters {
scope.state.init_types(&param.meta, Some(param.ty.clone())); scope.state.init_types(&param.meta, Some(param.ty.clone()));
@ -351,7 +373,7 @@ pub fn analyze_context(context: &mir::Context, module: &mir::Module, error: Opti
StaticAnalysis { StaticAnalysis {
tokens: module.tokens.clone(), tokens: module.tokens.clone(),
token_analysis: state, state,
error, error,
} }
} }
@ -420,7 +442,7 @@ pub fn analyze_expr(
let idx = scope.token_idx(&var_ref.2, |t| matches!(t, Token::Identifier(_))); let idx = scope.token_idx(&var_ref.2, |t| matches!(t, Token::Identifier(_)));
let symbol = if let Some(symbol_id) = scope.variables.get(&var_ref.1) { let symbol = if let Some(symbol_id) = scope.variables.get(&var_ref.1) {
*symbol_id scope.state.new_symbol(idx, SemanticKind::Reference(*symbol_id))
} else { } else {
scope.state.new_symbol(idx, SemanticKind::Variable) scope.state.new_symbol(idx, SemanticKind::Variable)
}; };
@ -489,10 +511,20 @@ pub fn analyze_expr(
analyze_expr(context, source_module, &lhs, scope); analyze_expr(context, source_module, &lhs, scope);
analyze_expr(context, source_module, &rhs, scope); analyze_expr(context, source_module, &rhs, scope);
} }
mir::ExprKind::FunctionCall(FunctionCall { parameters, .. }) => { mir::ExprKind::FunctionCall(FunctionCall {
parameters, meta, name, ..
}) => {
for expr in parameters { for expr in parameters {
analyze_expr(context, source_module, expr, scope); analyze_expr(context, source_module, expr, scope);
} }
let idx = scope.token_idx(&meta, |t| matches!(t, Token::Identifier(_)));
let symbol = if let Some(symbol_id) = scope.functions.get(name) {
scope.state.new_symbol(idx, SemanticKind::Reference(*symbol_id))
} else {
scope.state.new_symbol(idx, SemanticKind::Function)
};
scope.state.set_symbol(idx, symbol);
} }
mir::ExprKind::AssociatedFunctionCall( mir::ExprKind::AssociatedFunctionCall(
ty, ty,

View File

@ -18,7 +18,7 @@ use tower_lsp::lsp_types::{
}; };
use tower_lsp::{Client, LanguageServer, LspService, Server, jsonrpc}; use tower_lsp::{Client, LanguageServer, LspService, Server, jsonrpc};
use crate::analysis::{StaticAnalysis, TOKEN_LEGEND, analyze}; use crate::analysis::{MODIFIER_LEGEND, StaticAnalysis, TOKEN_LEGEND, analyze};
mod analysis; mod analysis;
@ -67,7 +67,7 @@ impl LanguageServer for Backend {
work_done_progress_options: Default::default(), work_done_progress_options: Default::default(),
legend: SemanticTokensLegend { legend: SemanticTokensLegend {
token_types: TOKEN_LEGEND.into(), token_types: TOKEN_LEGEND.into(),
token_modifiers: vec![], token_modifiers: MODIFIER_LEGEND.into(),
}, },
range: None, range: None,
full: Some(lsp_types::SemanticTokensFullOptions::Bool(true)), full: Some(lsp_types::SemanticTokensFullOptions::Bool(true)),
@ -112,7 +112,7 @@ impl LanguageServer for Backend {
// dbg!(position, token); // dbg!(position, token);
let list = if let Some((idx, _)) = token { let list = if let Some((idx, _)) = token {
if let Some(analysis) = self.analysis.get(&file_name).unwrap().token_analysis.map.get(&idx) { if let Some(analysis) = self.analysis.get(&file_name).unwrap().state.map.get(&idx) {
dbg!(&analysis); dbg!(&analysis);
analysis analysis
.autocomplete .autocomplete
@ -147,7 +147,7 @@ impl LanguageServer for Backend {
}; };
let (range, ty) = if let Some((idx, token)) = token { let (range, ty) = if let Some((idx, token)) = token {
if let Some(analysis) = self.analysis.get(&file_name).unwrap().token_analysis.map.get(&idx) { if let Some(analysis) = self.analysis.get(&file_name).unwrap().state.map.get(&idx) {
let start = token.position; let start = token.position;
let end = token.position.add(token.token.len() as u32); let end = token.position.add(token.token.len() as u32);
let range = Range { let range = Range {
@ -223,16 +223,16 @@ impl LanguageServer for Backend {
vscode_col vscode_col
}; };
if let Some(token_analysis) = analysis.token_analysis.map.get(&i) { if let Some(token_analysis) = analysis.state.map.get(&i) {
if let Some(symbol_id) = token_analysis.symbol { if let Some(symbol_id) = token_analysis.symbol {
let symbol = analysis.token_analysis.get_symbol(symbol_id); let symbol = analysis.state.get_symbol(symbol_id);
if let Some(idx) = symbol.kind.into_token_idx() { if let Some(idx) = symbol.kind.into_token_idx(&analysis.state) {
let semantic_token = SemanticToken { let semantic_token = SemanticToken {
delta_line, delta_line,
delta_start, delta_start,
length: token.token.len() as u32, length: token.token.len() as u32,
token_type: idx, token_type: idx,
token_modifiers_bitset: 0, token_modifiers_bitset: symbol.kind.get_modifier().unwrap_or(0),
}; };
semantic_tokens.push(semantic_token); semantic_tokens.push(semantic_token);
dbg!(semantic_token, prev_line, prev_start, token); dbg!(semantic_token, prev_line, prev_start, token);
@ -249,53 +249,6 @@ impl LanguageServer for Backend {
data: semantic_tokens, data: semantic_tokens,
}))) })))
} }
async fn semantic_tokens_range(
&self,
params: SemanticTokensRangeParams,
) -> jsonrpc::Result<Option<SemanticTokensRangeResult>> {
let path = PathBuf::from(params.text_document.uri.path());
let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();
let analysis = self.analysis.get(&file_name);
dbg!("semantic_token_range");
let mut semantic_tokens = Vec::new();
if let Some(analysis) = analysis {
let mut prev_line = 0;
let mut prev_start = 0;
for (i, token) in analysis.tokens.iter().enumerate() {
let delta_line = token.position.1 - prev_line;
let delta_start = if delta_line == 0 {
token.position.0
} else {
token.position.0 - prev_start
};
prev_line = token.position.1;
prev_start = token.position.0;
if let Some(token_analysis) = analysis.token_analysis.map.get(&i) {
if let Some(symbol_id) = token_analysis.symbol {
let symbol = analysis.token_analysis.get_symbol(symbol_id);
if let Some(idx) = symbol.kind.into_token_idx() {
semantic_tokens.push(SemanticToken {
delta_line,
delta_start,
length: token.token.len() as u32,
token_type: idx,
token_modifiers_bitset: 0,
});
}
}
}
}
}
dbg!(&semantic_tokens);
Ok(Some(SemanticTokensRangeResult::Tokens(lsp_types::SemanticTokens {
result_id: None,
data: semantic_tokens,
})))
}
} }
impl Backend { impl Backend {