Start adding type-information to tooltips
This commit is contained in:
parent
6619f1f0a9
commit
7d3aaa143a
@ -1,8 +1,10 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use dashmap::DashMap;
|
use dashmap::DashMap;
|
||||||
use reid::ast::lexer::{FullToken, Position};
|
use reid::ast::lexer::{FullToken, Position};
|
||||||
use reid::{compile_module, parse_module};
|
use reid::mir::{self, Context, StructType, TypeKind};
|
||||||
|
use reid::{compile_module, parse_module, perform_all_passes};
|
||||||
use tower_lsp::jsonrpc::Result;
|
use tower_lsp::jsonrpc::Result;
|
||||||
use tower_lsp::lsp_types::{
|
use tower_lsp::lsp_types::{
|
||||||
self, CompletionItem, CompletionOptions, CompletionParams, CompletionResponse, Diagnostic, DiagnosticSeverity,
|
self, CompletionItem, CompletionOptions, CompletionParams, CompletionResponse, Diagnostic, DiagnosticSeverity,
|
||||||
@ -18,6 +20,7 @@ struct Backend {
|
|||||||
client: Client,
|
client: Client,
|
||||||
tokens: DashMap<String, Vec<FullToken>>,
|
tokens: DashMap<String, Vec<FullToken>>,
|
||||||
ast: DashMap<String, reid::ast::Module>,
|
ast: DashMap<String, reid::ast::Module>,
|
||||||
|
types: DashMap<String, DashMap<FullToken, Option<TypeKind>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tower_lsp::async_trait]
|
#[tower_lsp::async_trait]
|
||||||
@ -72,7 +75,7 @@ impl LanguageServer for Backend {
|
|||||||
async fn hover(&self, params: HoverParams) -> Result<Option<Hover>> {
|
async fn hover(&self, params: HoverParams) -> Result<Option<Hover>> {
|
||||||
let path = PathBuf::from(params.text_document_position_params.text_document.uri.path());
|
let path = PathBuf::from(params.text_document_position_params.text_document.uri.path());
|
||||||
let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();
|
let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();
|
||||||
let tokens = self.tokens.get(&file_name).unwrap();
|
let tokens = self.tokens.get(&file_name);
|
||||||
let position = params.text_document_position_params.position;
|
let position = params.text_document_position_params.position;
|
||||||
|
|
||||||
self.client
|
self.client
|
||||||
@ -81,20 +84,33 @@ impl LanguageServer for Backend {
|
|||||||
format!("line {}, col {}", position.line, position.character),
|
format!("line {}, col {}", position.line, position.character),
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
let token = tokens.iter().find(|tok| {
|
let token = if let Some(tokens) = &tokens {
|
||||||
|
tokens.iter().find(|tok| {
|
||||||
tok.position.1 == position.line + 1
|
tok.position.1 == position.line + 1
|
||||||
&& (tok.position.0 <= position.character + 1
|
&& (tok.position.0 <= position.character + 1
|
||||||
&& (tok.position.0 + tok.token.len() as u32) > position.character + 1)
|
&& (tok.position.0 + tok.token.len() as u32) > position.character + 1)
|
||||||
});
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let ty = if let Some(token) = token {
|
||||||
|
if let Some(ty) = self.types.get(&file_name).unwrap().get(token) {
|
||||||
|
ty.clone()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
Ok(Some(Hover {
|
Ok(Some(Hover {
|
||||||
contents: HoverContents::Scalar(MarkedString::String(format!("{:?}", token))),
|
contents: HoverContents::Scalar(MarkedString::String(format!("{:?}", ty))),
|
||||||
range: None,
|
range: None,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn did_open(&self, params: DidOpenTextDocumentParams) {
|
async fn did_open(&self, params: DidOpenTextDocumentParams) {
|
||||||
self.client.log_message(MessageType::INFO, "opened!").await;
|
|
||||||
self.recompile(TextDocumentItem {
|
self.recompile(TextDocumentItem {
|
||||||
uri: params.text_document.uri,
|
uri: params.text_document.uri,
|
||||||
language_id: params.text_document.language_id,
|
language_id: params.text_document.language_id,
|
||||||
@ -105,7 +121,6 @@ impl LanguageServer for Backend {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn did_change(&self, params: DidChangeTextDocumentParams) {
|
async fn did_change(&self, params: DidChangeTextDocumentParams) {
|
||||||
self.client.log_message(MessageType::INFO, "changed!").await;
|
|
||||||
self.recompile(TextDocumentItem {
|
self.recompile(TextDocumentItem {
|
||||||
text: params.content_changes[0].text.clone(),
|
text: params.content_changes[0].text.clone(),
|
||||||
uri: params.text_document.uri,
|
uri: params.text_document.uri,
|
||||||
@ -124,6 +139,7 @@ impl Backend {
|
|||||||
|
|
||||||
let mut reid_error = None;
|
let mut reid_error = None;
|
||||||
let mut tokens = None;
|
let mut tokens = None;
|
||||||
|
let token_types = DashMap::new();
|
||||||
|
|
||||||
match parse_module(¶ms.text, file_name.clone(), &mut map) {
|
match parse_module(¶ms.text, file_name.clone(), &mut map) {
|
||||||
Ok(module) => {
|
Ok(module) => {
|
||||||
@ -131,14 +147,34 @@ impl Backend {
|
|||||||
.log_message(MessageType::INFO, format!("successfully parsed!"))
|
.log_message(MessageType::INFO, format!("successfully parsed!"))
|
||||||
.await;
|
.await;
|
||||||
tokens = Some(module.1.clone());
|
tokens = Some(module.1.clone());
|
||||||
match compile_module(module.0, module.1, &mut map, Some(path), true) {
|
match compile_module(module.0, module.1, &mut map, Some(path.clone()), true) {
|
||||||
Ok(_) => {}
|
Ok(module) => {
|
||||||
|
let module_id = module.module_id;
|
||||||
|
let mut context = Context::from(vec![module], path.parent().unwrap().to_owned());
|
||||||
|
match perform_all_passes(&mut context, &mut map) {
|
||||||
|
Ok(_) => {
|
||||||
|
for module in context.modules.values() {
|
||||||
|
if module.module_id != module_id {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
for (idx, token) in module.tokens.iter().enumerate() {
|
||||||
|
token_types.insert(token.clone(), find_type_in_context(&module, idx));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
reid_error = Some(e);
|
reid_error = Some(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(_) => {}
|
Err(e) => {
|
||||||
|
reid_error = Some(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
reid_error = Some(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(tokens) = &tokens {
|
if let Some(tokens) = &tokens {
|
||||||
@ -151,9 +187,6 @@ impl Backend {
|
|||||||
.into_position(&tokens)
|
.into_position(&tokens)
|
||||||
.unwrap_or((Position(0, 0), Position(0, 0)));
|
.unwrap_or((Position(0, 0), Position(0, 0)));
|
||||||
self.client.log_message(MessageType::INFO, format!("{:?}", &meta)).await;
|
self.client.log_message(MessageType::INFO, format!("{:?}", &meta)).await;
|
||||||
self.client
|
|
||||||
.log_message(MessageType::INFO, format!("{:?}", &tokens))
|
|
||||||
.await;
|
|
||||||
self.client
|
self.client
|
||||||
.log_message(MessageType::INFO, format!("{:?}", &positions))
|
.log_message(MessageType::INFO, format!("{:?}", &positions))
|
||||||
.await;
|
.await;
|
||||||
@ -193,6 +226,7 @@ impl Backend {
|
|||||||
if let Some(tokens) = tokens.take() {
|
if let Some(tokens) = tokens.take() {
|
||||||
self.tokens.insert(file_name.clone(), tokens);
|
self.tokens.insert(file_name.clone(), tokens);
|
||||||
}
|
}
|
||||||
|
self.types.insert(file_name.clone(), token_types);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -205,6 +239,55 @@ async fn main() {
|
|||||||
client,
|
client,
|
||||||
ast: DashMap::new(),
|
ast: DashMap::new(),
|
||||||
tokens: DashMap::new(),
|
tokens: DashMap::new(),
|
||||||
|
types: DashMap::new(),
|
||||||
});
|
});
|
||||||
Server::new(stdin, stdout, socket).serve(service).await;
|
Server::new(stdin, stdout, socket).serve(service).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn find_type_in_context(module: &mir::Module, token_idx: usize) -> Option<TypeKind> {
|
||||||
|
for import in &module.imports {
|
||||||
|
if import.1.contains(token_idx) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for typedef in &module.typedefs {
|
||||||
|
if !typedef.meta.contains(token_idx) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
match &typedef.kind {
|
||||||
|
mir::TypeDefinitionKind::Struct(StructType(fields)) => {
|
||||||
|
for field in fields {
|
||||||
|
if field.2.contains(token_idx) {
|
||||||
|
return Some(field.1.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for function in &module.functions {
|
||||||
|
if !(function.signature() + function.block_meta()).contains(token_idx) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
for param in &function.parameters {
|
||||||
|
if param.meta.contains(token_idx) {
|
||||||
|
return Some(param.ty.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return match &function.kind {
|
||||||
|
mir::FunctionDefinitionKind::Local(block, _) => find_type_in_block(&block, token_idx),
|
||||||
|
mir::FunctionDefinitionKind::Extern(_) => None,
|
||||||
|
mir::FunctionDefinitionKind::Intrinsic(_) => None,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn find_type_in_block(block: &mir::Block, token_idx: usize) -> Option<TypeKind> {
|
||||||
|
for statement in &block.statements {}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
@ -11,6 +11,7 @@ default = ["color"]
|
|||||||
|
|
||||||
color = ["colored"]
|
color = ["colored"]
|
||||||
log_output = []
|
log_output = []
|
||||||
|
context_debug = []
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
## Make it easier to generate errors
|
## Make it easier to generate errors
|
||||||
|
@ -7,7 +7,7 @@ static HEXADECIMAL_NUMERICS: &[char] = &[
|
|||||||
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f',
|
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f',
|
||||||
];
|
];
|
||||||
|
|
||||||
#[derive(Eq, PartialEq, Clone, PartialOrd, Ord)]
|
#[derive(Eq, PartialEq, Clone, PartialOrd, Ord, Hash)]
|
||||||
pub enum Token {
|
pub enum Token {
|
||||||
/// Values
|
/// Values
|
||||||
Identifier(String),
|
Identifier(String),
|
||||||
@ -211,7 +211,7 @@ impl std::fmt::Debug for Token {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// A token with a position
|
/// A token with a position
|
||||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
|
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
pub struct FullToken {
|
pub struct FullToken {
|
||||||
pub token: Token,
|
pub token: Token,
|
||||||
pub position: Position,
|
pub position: Position,
|
||||||
|
@ -193,7 +193,7 @@ pub struct FunctionDefinition(pub FunctionSignature, pub bool, pub Block, pub To
|
|||||||
pub struct FunctionSignature {
|
pub struct FunctionSignature {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub self_kind: SelfKind,
|
pub self_kind: SelfKind,
|
||||||
pub params: Vec<(String, Type)>,
|
pub params: Vec<(String, Type, TokenRange)>,
|
||||||
pub return_type: Option<Type>,
|
pub return_type: Option<Type>,
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub range: TokenRange,
|
pub range: TokenRange,
|
||||||
|
@ -668,7 +668,7 @@ impl Parse for FunctionDefinition {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct FunctionParam(String, Type);
|
struct FunctionParam(String, Type, TokenRange);
|
||||||
|
|
||||||
impl Parse for FunctionParam {
|
impl Parse for FunctionParam {
|
||||||
fn parse(mut stream: TokenStream) -> Result<Self, Error> {
|
fn parse(mut stream: TokenStream) -> Result<Self, Error> {
|
||||||
@ -676,7 +676,7 @@ impl Parse for FunctionParam {
|
|||||||
return Err(stream.expected_err("parameter name")?);
|
return Err(stream.expected_err("parameter name")?);
|
||||||
};
|
};
|
||||||
stream.expect(Token::Colon)?;
|
stream.expect(Token::Colon)?;
|
||||||
Ok(FunctionParam(arg_name, stream.parse()?))
|
Ok(FunctionParam(arg_name, stream.parse()?, stream.get_range().unwrap()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -738,11 +738,11 @@ impl Parse for FunctionSignature {
|
|||||||
match &self_kind {
|
match &self_kind {
|
||||||
SelfKind::None => {
|
SelfKind::None => {
|
||||||
if let Ok(param) = stream.parse::<FunctionParam>() {
|
if let Ok(param) = stream.parse::<FunctionParam>() {
|
||||||
params.push((param.0, param.1));
|
params.push((param.0, param.1, param.2));
|
||||||
while let Some(Token::Comma) = stream.peek() {
|
while let Some(Token::Comma) = stream.peek() {
|
||||||
stream.next();
|
stream.next();
|
||||||
let param = stream.parse::<FunctionParam>()?;
|
let param = stream.parse::<FunctionParam>()?;
|
||||||
params.push((param.0, param.1));
|
params.push((param.0, param.1, param.2));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -750,7 +750,7 @@ impl Parse for FunctionSignature {
|
|||||||
while let Some(Token::Comma) = stream.peek() {
|
while let Some(Token::Comma) = stream.peek() {
|
||||||
stream.next();
|
stream.next();
|
||||||
let param = stream.parse::<FunctionParam>()?;
|
let param = stream.parse::<FunctionParam>()?;
|
||||||
params.push((param.0, param.1));
|
params.push((param.0, param.1, param.2));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -51,7 +51,7 @@ impl ast::Module {
|
|||||||
.map(|p| mir::FunctionParam {
|
.map(|p| mir::FunctionParam {
|
||||||
name: p.0,
|
name: p.0,
|
||||||
ty: p.1 .0.into_mir(module_id),
|
ty: p.1 .0.into_mir(module_id),
|
||||||
meta: p.1 .1.as_meta(module_id),
|
meta: p.2.as_meta(module_id),
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
kind: mir::FunctionDefinitionKind::Extern(false),
|
kind: mir::FunctionDefinitionKind::Extern(false),
|
||||||
@ -164,7 +164,7 @@ impl ast::FunctionDefinition {
|
|||||||
params.extend(signature.params.iter().cloned().map(|p| FunctionParam {
|
params.extend(signature.params.iter().cloned().map(|p| FunctionParam {
|
||||||
name: p.0,
|
name: p.0,
|
||||||
ty: p.1 .0.into_mir(module_id),
|
ty: p.1 .0.into_mir(module_id),
|
||||||
meta: p.1 .1.as_meta(module_id),
|
meta: p.2.as_meta(module_id),
|
||||||
}));
|
}));
|
||||||
mir::FunctionDefinition {
|
mir::FunctionDefinition {
|
||||||
name: signature.name.clone(),
|
name: signature.name.clone(),
|
||||||
|
@ -273,6 +273,9 @@ pub fn perform_all_passes<'map>(
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "context_debug")]
|
||||||
|
dbg!(&context);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -43,6 +43,18 @@ impl Metadata {
|
|||||||
pub fn into_positions(&self, tokens: &Vec<FullToken>) -> Option<(Position, Position)> {
|
pub fn into_positions(&self, tokens: &Vec<FullToken>) -> Option<(Position, Position)> {
|
||||||
self.range.into_position(tokens)
|
self.range.into_position(tokens)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn is_after(&self, token_idx: usize) -> bool {
|
||||||
|
return token_idx < self.range.start;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_before(&self, token_idx: usize) -> bool {
|
||||||
|
return token_idx > self.range.end;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn contains(&self, token_idx: usize) -> bool {
|
||||||
|
return token_idx >= self.range.start && token_idx <= self.range.end;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::ops::Add for Metadata {
|
impl std::ops::Add for Metadata {
|
||||||
|
Loading…
Reference in New Issue
Block a user