Add refactoring and go-to-definition for multifile
This commit is contained in:
parent
109fedb624
commit
766a853b48
@ -27,7 +27,7 @@ mod analysis;
|
|||||||
struct Backend {
|
struct Backend {
|
||||||
client: Client,
|
client: Client,
|
||||||
analysis: DashMap<PathBuf, StaticAnalysis>,
|
analysis: DashMap<PathBuf, StaticAnalysis>,
|
||||||
module_to_url: DashMap<SourceModuleId, PathBuf>,
|
module_to_path: DashMap<SourceModuleId, PathBuf>,
|
||||||
path_to_module: DashMap<PathBuf, SourceModuleId>,
|
path_to_module: DashMap<PathBuf, SourceModuleId>,
|
||||||
module_id_counter: Mutex<SourceModuleId>,
|
module_id_counter: Mutex<SourceModuleId>,
|
||||||
}
|
}
|
||||||
@ -80,6 +80,8 @@ impl LanguageServer for Backend {
|
|||||||
},
|
},
|
||||||
)),
|
)),
|
||||||
references_provider: Some(OneOf::Left(true)),
|
references_provider: Some(OneOf::Left(true)),
|
||||||
|
definition_provider: Some(OneOf::Left(true)),
|
||||||
|
rename_provider: Some(OneOf::Left(true)),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
Ok(InitializeResult {
|
Ok(InitializeResult {
|
||||||
@ -250,34 +252,34 @@ impl LanguageServer for Backend {
|
|||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
|
|
||||||
// async fn goto_definition(&self, params: GotoDefinitionParams) -> jsonrpc::Result<Option<GotoDefinitionResponse>> {
|
async fn goto_definition(&self, params: GotoDefinitionParams) -> jsonrpc::Result<Option<GotoDefinitionResponse>> {
|
||||||
// let path = PathBuf::from(params.text_document_position_params.text_document.uri.path());
|
let path = PathBuf::from(params.text_document_position_params.text_document.uri.path());
|
||||||
// let analysis = self.analysis.get(&path);
|
let analysis = self.analysis.get(&path);
|
||||||
// let position = params.text_document_position_params.position;
|
let position = params.text_document_position_params.position;
|
||||||
|
|
||||||
// if let Some(analysis) = &analysis {
|
if let Some(analysis) = &analysis {
|
||||||
// let token = analysis.tokens.iter().enumerate().find(|(_, tok)| {
|
let token = analysis.tokens.iter().enumerate().find(|(_, tok)| {
|
||||||
// tok.position.1 == position.line + 1
|
tok.position.1 == position.line + 1
|
||||||
// && (tok.position.0 <= position.character + 1
|
&& (tok.position.0 <= position.character + 1
|
||||||
// && (tok.position.0 + tok.token.len() as u32) > position.character + 1)
|
&& (tok.position.0 + tok.token.len() as u32) > position.character + 1)
|
||||||
// });
|
});
|
||||||
|
|
||||||
// if let Some(token) = token {
|
if let Some(token) = token {
|
||||||
// if let Some((module_id, def_token)) = analysis.find_definition(token.0, &self.state_map()) {
|
if let Some((module_id, def_token)) = analysis.find_definition(token.0, &self.state_map()) {
|
||||||
// return if let Some(path) = self.module_to_url.get(&module_id) {
|
return if let Some(path) = self.module_to_path.get(&module_id) {
|
||||||
// Ok(Some(GotoDefinitionResponse::Scalar(lsp_types::Location {
|
Ok(Some(GotoDefinitionResponse::Scalar(lsp_types::Location {
|
||||||
// uri: Url::from_file_path(path.value()).unwrap(),
|
uri: Url::from_file_path(path.value()).unwrap(),
|
||||||
// range: token_to_range(def_token),
|
range: token_to_range(def_token),
|
||||||
// })))
|
})))
|
||||||
// } else {
|
} else {
|
||||||
// Ok(None)
|
Ok(None)
|
||||||
// };
|
};
|
||||||
// }
|
}
|
||||||
// }
|
}
|
||||||
// };
|
};
|
||||||
|
|
||||||
// Ok(None)
|
Ok(None)
|
||||||
// }
|
}
|
||||||
|
|
||||||
async fn references(&self, params: ReferenceParams) -> jsonrpc::Result<Option<Vec<Location>>> {
|
async fn references(&self, params: ReferenceParams) -> jsonrpc::Result<Option<Vec<Location>>> {
|
||||||
let path = PathBuf::from(params.text_document_position.text_document.uri.path());
|
let path = PathBuf::from(params.text_document_position.text_document.uri.path());
|
||||||
@ -296,7 +298,7 @@ impl LanguageServer for Backend {
|
|||||||
let mut locations = Vec::new();
|
let mut locations = Vec::new();
|
||||||
if let Some(reference_tokens) = reference_tokens {
|
if let Some(reference_tokens) = reference_tokens {
|
||||||
for (module_id, symbol_idx) in reference_tokens {
|
for (module_id, symbol_idx) in reference_tokens {
|
||||||
if let Some(path) = self.module_to_url.get(&module_id) {
|
if let Some(path) = self.module_to_path.get(&module_id) {
|
||||||
let url = Url::from_file_path(path.value()).unwrap();
|
let url = Url::from_file_path(path.value()).unwrap();
|
||||||
if let Some(inner_analysis) = self.analysis.get(path.value()) {
|
if let Some(inner_analysis) = self.analysis.get(path.value()) {
|
||||||
if let Some(token_idx) = inner_analysis.state.symbol_to_token.get(&symbol_idx) {
|
if let Some(token_idx) = inner_analysis.state.symbol_to_token.get(&symbol_idx) {
|
||||||
@ -320,48 +322,59 @@ impl LanguageServer for Backend {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// async fn rename(&self, params: RenameParams) -> jsonrpc::Result<Option<WorkspaceEdit>> {
|
async fn rename(&self, params: RenameParams) -> jsonrpc::Result<Option<WorkspaceEdit>> {
|
||||||
// let path = PathBuf::from(params.text_document_position.text_document.uri.path());
|
let path = PathBuf::from(params.text_document_position.text_document.uri.path());
|
||||||
// let analysis = self.analysis.get(&path);
|
let analysis = self.analysis.get(&path);
|
||||||
// let position = params.text_document_position.position;
|
let position = params.text_document_position.position;
|
||||||
|
|
||||||
// if let Some(analysis) = &analysis {
|
if let Some(analysis) = &analysis {
|
||||||
// let token = analysis.tokens.iter().enumerate().find(|(_, tok)| {
|
let token = analysis.tokens.iter().enumerate().find(|(_, tok)| {
|
||||||
// tok.position.1 == position.line + 1
|
tok.position.1 == position.line + 1
|
||||||
// && (tok.position.0 <= position.character + 1
|
&& (tok.position.0 <= position.character + 1
|
||||||
// && (tok.position.0 + tok.token.len() as u32) > position.character + 1)
|
&& (tok.position.0 + tok.token.len() as u32) > position.character + 1)
|
||||||
// });
|
});
|
||||||
// if let Some(token) = token {
|
if let Some(token) = token {
|
||||||
// let tokens = analysis.find_references(token.0, &self.state_map()).map(|symbols| {
|
let symbols = analysis.find_references(token.0, &self.state_map());
|
||||||
// symbols
|
let mut changes: HashMap<Url, Vec<TextEdit>> = HashMap::new();
|
||||||
// .iter()
|
if let Some(symbols) = symbols {
|
||||||
// .map(|symbol_id| analysis.state.symbol_to_token.get(&symbol_id).cloned().unwrap())
|
for (module_id, symbol_id) in symbols {
|
||||||
// .collect::<Vec<_>>()
|
let path = self.module_to_path.get(&module_id);
|
||||||
// });
|
if let Some(path) = path {
|
||||||
// let mut edits = Vec::new();
|
let url = Url::from_file_path(path.value()).unwrap();
|
||||||
// if let Some(tokens) = tokens {
|
let analysis = self.analysis.get(&path.clone());
|
||||||
// for token_idx in tokens {
|
|
||||||
// let token = analysis.tokens.get(token_idx).unwrap();
|
if let Some(analysis) = analysis {
|
||||||
// edits.push(TextEdit {
|
if let Some(token_idx) = analysis.state.symbol_to_token.get(&symbol_id) {
|
||||||
// range: token_to_range(token),
|
let token = analysis.tokens.get(*token_idx).unwrap();
|
||||||
// new_text: params.new_name.clone(),
|
|
||||||
// });
|
// edits = changes.get(k)
|
||||||
// }
|
let edit = TextEdit {
|
||||||
// }
|
range: token_to_range(token),
|
||||||
// let mut changes = HashMap::new();
|
new_text: params.new_name.clone(),
|
||||||
// changes.insert(params.text_document_position.text_document.uri, edits);
|
};
|
||||||
// Ok(Some(WorkspaceEdit {
|
if let Some(edits) = changes.get_mut(&url) {
|
||||||
// changes: Some(changes),
|
edits.push(edit);
|
||||||
// document_changes: None,
|
} else {
|
||||||
// change_annotations: None,
|
changes.insert(url, vec![edit]);
|
||||||
// }))
|
}
|
||||||
// } else {
|
}
|
||||||
// Ok(None)
|
}
|
||||||
// }
|
}
|
||||||
// } else {
|
}
|
||||||
// Ok(None)
|
}
|
||||||
// }
|
|
||||||
// }
|
Ok(Some(WorkspaceEdit {
|
||||||
|
changes: Some(changes),
|
||||||
|
document_changes: None,
|
||||||
|
change_annotations: None,
|
||||||
|
}))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn token_to_range(token: &FullToken) -> lsp_types::Range {
|
fn token_to_range(token: &FullToken) -> lsp_types::Range {
|
||||||
@ -409,7 +422,7 @@ impl Backend {
|
|||||||
let module_id = lock.increment();
|
let module_id = lock.increment();
|
||||||
drop(lock);
|
drop(lock);
|
||||||
self.path_to_module.insert(file_path.clone(), module_id);
|
self.path_to_module.insert(file_path.clone(), module_id);
|
||||||
self.module_to_url.insert(module_id, file_path.clone());
|
self.module_to_path.insert(module_id, file_path.clone());
|
||||||
module_id
|
module_id
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -510,7 +523,7 @@ async fn main() {
|
|||||||
let (service, socket) = LspService::new(|client| Backend {
|
let (service, socket) = LspService::new(|client| Backend {
|
||||||
client,
|
client,
|
||||||
analysis: DashMap::new(),
|
analysis: DashMap::new(),
|
||||||
module_to_url: DashMap::new(),
|
module_to_path: DashMap::new(),
|
||||||
path_to_module: DashMap::new(),
|
path_to_module: DashMap::new(),
|
||||||
module_id_counter: Mutex::new(SourceModuleId(0)),
|
module_id_counter: Mutex::new(SourceModuleId(0)),
|
||||||
});
|
});
|
||||||
|
Loading…
Reference in New Issue
Block a user