Make LSP use a more general analysis structure
This commit is contained in:
parent
dae39bc9d2
commit
8595da0c30
294
reid-lsp/src/analysis.rs
Normal file
294
reid-lsp/src/analysis.rs
Normal file
@ -0,0 +1,294 @@
|
||||
use std::{collections::HashMap, path::PathBuf};
|
||||
|
||||
use dashmap::DashMap;
|
||||
use reid::{
|
||||
ast::lexer::FullToken,
|
||||
compile_module,
|
||||
error_raporting::{ErrorModules, ReidError},
|
||||
mir::{self, Context, FunctionCall, IfExpression, SourceModuleId, StructType, TypeKind, WhileStatement},
|
||||
perform_all_passes,
|
||||
};
|
||||
|
||||
use crate::CompileResult;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct StaticAnalysis {
|
||||
pub tokens: Vec<FullToken>,
|
||||
pub token_analysis: HashMap<usize, SemanticAnalysis>,
|
||||
pub error: Option<ReidError>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SemanticAnalysis {
|
||||
pub ty: Option<TypeKind>,
|
||||
}
|
||||
|
||||
pub fn analyze(
|
||||
module_id: SourceModuleId,
|
||||
tokens: Vec<FullToken>,
|
||||
path: PathBuf,
|
||||
map: &mut ErrorModules,
|
||||
) -> Result<Option<StaticAnalysis>, ReidError> {
|
||||
let mut token_analysis = HashMap::new();
|
||||
|
||||
let (module, error) = match compile_module(module_id, tokens, map, Some(path.clone()), true)? {
|
||||
Ok(module) => (module, None),
|
||||
Err((m, err)) => (m.process(module_id), Some(err)),
|
||||
};
|
||||
|
||||
let module_id = module.module_id;
|
||||
let mut context = Context::from(vec![module], path.parent().unwrap().to_owned());
|
||||
perform_all_passes(&mut context, map)?;
|
||||
|
||||
for module in context.modules.into_values() {
|
||||
if module.module_id != module_id {
|
||||
continue;
|
||||
}
|
||||
for idx in 0..module.tokens.len() {
|
||||
token_analysis.insert(
|
||||
idx,
|
||||
SemanticAnalysis {
|
||||
ty: find_type_in_context(&module, idx),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
return Ok(Some(StaticAnalysis {
|
||||
tokens: module.tokens,
|
||||
token_analysis: token_analysis,
|
||||
error,
|
||||
}));
|
||||
}
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
pub fn find_type_in_context(module: &mir::Module, token_idx: usize) -> Option<TypeKind> {
|
||||
for import in &module.imports {
|
||||
if import.1.contains(token_idx) {
|
||||
return Some(TypeKind::CustomType(mir::CustomTypeKey(
|
||||
"d".to_owned(),
|
||||
SourceModuleId(1),
|
||||
)));
|
||||
}
|
||||
}
|
||||
for typedef in &module.typedefs {
|
||||
if !typedef.meta.contains(token_idx) {
|
||||
continue;
|
||||
}
|
||||
|
||||
match &typedef.kind {
|
||||
mir::TypeDefinitionKind::Struct(StructType(fields)) => {
|
||||
for field in fields {
|
||||
if field.2.contains(token_idx) {
|
||||
return Some(field.1.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for binop in &module.binop_defs {
|
||||
if let Some(meta) = binop.block_meta() {
|
||||
if !meta.contains(token_idx) {
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
|
||||
return match &binop.fn_kind {
|
||||
mir::FunctionDefinitionKind::Local(block, _) => find_type_in_block(&block, module.module_id, token_idx),
|
||||
mir::FunctionDefinitionKind::Extern(_) => None,
|
||||
mir::FunctionDefinitionKind::Intrinsic(_) => None,
|
||||
};
|
||||
}
|
||||
|
||||
for (_, function) in &module.associated_functions {
|
||||
if !(function.signature() + function.block_meta()).contains(token_idx) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for param in &function.parameters {
|
||||
if param.meta.contains(token_idx) {
|
||||
return Some(param.ty.clone());
|
||||
}
|
||||
}
|
||||
|
||||
return match &function.kind {
|
||||
mir::FunctionDefinitionKind::Local(block, _) => find_type_in_block(&block, module.module_id, token_idx),
|
||||
mir::FunctionDefinitionKind::Extern(_) => None,
|
||||
mir::FunctionDefinitionKind::Intrinsic(_) => None,
|
||||
};
|
||||
}
|
||||
|
||||
for function in &module.functions {
|
||||
if !(function.signature() + function.block_meta()).contains(token_idx) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for param in &function.parameters {
|
||||
if param.meta.contains(token_idx) {
|
||||
return Some(param.ty.clone());
|
||||
}
|
||||
}
|
||||
|
||||
return match &function.kind {
|
||||
mir::FunctionDefinitionKind::Local(block, _) => find_type_in_block(&block, module.module_id, token_idx),
|
||||
mir::FunctionDefinitionKind::Extern(_) => None,
|
||||
mir::FunctionDefinitionKind::Intrinsic(_) => None,
|
||||
};
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn find_type_in_block(block: &mir::Block, module_id: SourceModuleId, token_idx: usize) -> Option<TypeKind> {
|
||||
if !block.meta.contains(token_idx) {
|
||||
return Some(TypeKind::Bool);
|
||||
}
|
||||
|
||||
for statement in &block.statements {
|
||||
if !statement.1.contains(token_idx) {
|
||||
continue;
|
||||
}
|
||||
match &statement.0 {
|
||||
mir::StmtKind::Let(named_variable_ref, _, expression) => {
|
||||
if named_variable_ref.2.contains(token_idx) {
|
||||
return expression
|
||||
.return_type(&Default::default(), module_id)
|
||||
.ok()
|
||||
.map(|(_, ty)| ty);
|
||||
} else {
|
||||
return find_type_in_expr(&expression, module_id, token_idx);
|
||||
}
|
||||
}
|
||||
mir::StmtKind::Set(lhs, rhs) => {
|
||||
return find_type_in_expr(lhs, module_id, token_idx).or(find_type_in_expr(rhs, module_id, token_idx));
|
||||
}
|
||||
mir::StmtKind::Import(_) => {}
|
||||
mir::StmtKind::Expression(expression) => return find_type_in_expr(expression, module_id, token_idx),
|
||||
mir::StmtKind::While(WhileStatement { condition, block, .. }) => {
|
||||
return find_type_in_expr(condition, module_id, token_idx)
|
||||
.or(find_type_in_block(block, module_id, token_idx));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some((_, Some(return_exp))) = &block.return_expression {
|
||||
if let Some(ty) = find_type_in_expr(return_exp, module_id, token_idx) {
|
||||
return Some(ty);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn find_type_in_expr(expr: &mir::Expression, module_id: SourceModuleId, token_idx: usize) -> Option<TypeKind> {
|
||||
if !expr.1.contains(token_idx) {
|
||||
return None;
|
||||
}
|
||||
|
||||
match &expr.0 {
|
||||
mir::ExprKind::Variable(named_variable_ref) => Some(named_variable_ref.0.clone()),
|
||||
mir::ExprKind::Indexed(value, type_kind, index_expr) => Some(
|
||||
find_type_in_expr(&value, module_id, token_idx)
|
||||
.or(find_type_in_expr(&index_expr, module_id, token_idx))
|
||||
.unwrap_or(type_kind.clone()),
|
||||
),
|
||||
mir::ExprKind::Accessed(expression, type_kind, _, meta) => {
|
||||
if meta.contains(token_idx) {
|
||||
Some(type_kind.clone())
|
||||
} else {
|
||||
find_type_in_expr(&expression, module_id, token_idx)
|
||||
}
|
||||
}
|
||||
mir::ExprKind::Array(expressions) => {
|
||||
for expr in expressions {
|
||||
if let Some(ty) = find_type_in_expr(expr, module_id, token_idx) {
|
||||
return Some(ty);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
mir::ExprKind::Struct(name, items) => {
|
||||
for (_, expr, meta) in items {
|
||||
if meta.contains(token_idx) {
|
||||
return expr.return_type(&Default::default(), module_id).map(|(_, t)| t).ok();
|
||||
}
|
||||
if let Some(ty) = find_type_in_expr(expr, module_id, token_idx) {
|
||||
return Some(ty);
|
||||
}
|
||||
}
|
||||
Some(TypeKind::CustomType(mir::CustomTypeKey(name.clone(), module_id)))
|
||||
}
|
||||
mir::ExprKind::Literal(literal) => Some(literal.as_type()),
|
||||
mir::ExprKind::BinOp(_, lhs, rhs, type_kind) => {
|
||||
if let Some(ty) = find_type_in_expr(lhs, module_id, token_idx) {
|
||||
return Some(ty);
|
||||
}
|
||||
if let Some(ty) = find_type_in_expr(rhs, module_id, token_idx) {
|
||||
return Some(ty);
|
||||
}
|
||||
Some(type_kind.clone())
|
||||
}
|
||||
mir::ExprKind::FunctionCall(FunctionCall {
|
||||
return_type,
|
||||
parameters,
|
||||
..
|
||||
}) => {
|
||||
for expr in parameters {
|
||||
if let Some(ty) = find_type_in_expr(expr, module_id, token_idx) {
|
||||
return Some(ty);
|
||||
}
|
||||
}
|
||||
Some(return_type.clone())
|
||||
}
|
||||
mir::ExprKind::AssociatedFunctionCall(
|
||||
_,
|
||||
FunctionCall {
|
||||
return_type,
|
||||
parameters,
|
||||
..
|
||||
},
|
||||
) => {
|
||||
for expr in parameters {
|
||||
if let Some(ty) = find_type_in_expr(expr, module_id, token_idx) {
|
||||
return Some(ty);
|
||||
}
|
||||
}
|
||||
Some(return_type.clone())
|
||||
}
|
||||
mir::ExprKind::If(IfExpression(cond, then_e, else_e)) => find_type_in_expr(&cond, module_id, token_idx)
|
||||
.or(find_type_in_expr(&then_e, module_id, token_idx))
|
||||
.or(else_e.clone().and_then(|e| find_type_in_expr(&e, module_id, token_idx))),
|
||||
mir::ExprKind::Block(block) => find_type_in_block(block, module_id, token_idx),
|
||||
mir::ExprKind::Borrow(expression, mutable) => {
|
||||
if let Some(ty) = find_type_in_expr(&expression, module_id, token_idx) {
|
||||
return Some(ty);
|
||||
}
|
||||
if let Ok(inner) = expression.return_type(&Default::default(), module_id).map(|(_, ty)| ty) {
|
||||
Some(TypeKind::Borrow(Box::new(inner.clone()), *mutable))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
mir::ExprKind::Deref(expression) => {
|
||||
if let Some(ty) = find_type_in_expr(&expression, module_id, token_idx) {
|
||||
return Some(ty);
|
||||
}
|
||||
if let Ok(TypeKind::Borrow(inner, _)) =
|
||||
expression.return_type(&Default::default(), module_id).map(|(_, ty)| ty)
|
||||
{
|
||||
Some(*inner.clone())
|
||||
} else {
|
||||
Some(TypeKind::CustomType(mir::CustomTypeKey(
|
||||
"ä".to_owned(),
|
||||
SourceModuleId(1),
|
||||
)))
|
||||
}
|
||||
}
|
||||
mir::ExprKind::CastTo(expression, type_kind) => {
|
||||
Some(find_type_in_expr(&expression, module_id, token_idx).unwrap_or(type_kind.clone()))
|
||||
}
|
||||
mir::ExprKind::GlobalRef(_, type_kind) => Some(type_kind.clone()),
|
||||
}
|
||||
}
|
@ -1,29 +1,28 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use dashmap::DashMap;
|
||||
use reid::ast::lexer::{FullToken, Position};
|
||||
use reid::error_raporting::{ErrorModules, ReidError};
|
||||
use reid::mir::{
|
||||
self, Context, FunctionCall, FunctionDefinition, FunctionParam, IfExpression, SourceModuleId, StructType, TypeKind,
|
||||
WhileStatement,
|
||||
};
|
||||
use reid::{compile_module, parse_module, perform_all_passes};
|
||||
use reid::error_raporting::{self, ErrorModules, ReidError};
|
||||
use reid::mir::{SourceModuleId, TypeKind};
|
||||
use reid::parse_module;
|
||||
use tower_lsp::lsp_types::{
|
||||
self, CompletionItem, CompletionOptions, CompletionParams, CompletionResponse, Diagnostic, DiagnosticSeverity,
|
||||
DidChangeTextDocumentParams, DidOpenTextDocumentParams, Hover, HoverContents, HoverParams, HoverProviderCapability,
|
||||
InitializeParams, InitializeResult, InitializedParams, MarkedString, MarkupContent, MarkupKind, MessageType, OneOf,
|
||||
Range, ServerCapabilities, TextDocumentItem, TextDocumentSyncCapability, TextDocumentSyncKind,
|
||||
TextDocumentSyncOptions, WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities,
|
||||
InitializeParams, InitializeResult, InitializedParams, MarkupContent, MarkupKind, MessageType, OneOf, Range,
|
||||
ServerCapabilities, TextDocumentItem, TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
|
||||
WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities,
|
||||
};
|
||||
use tower_lsp::{Client, LanguageServer, LspService, Server, jsonrpc};
|
||||
|
||||
use crate::analysis::{StaticAnalysis, analyze};
|
||||
|
||||
mod analysis;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Backend {
|
||||
client: Client,
|
||||
tokens: DashMap<String, Vec<FullToken>>,
|
||||
analysis: DashMap<String, StaticAnalysis>,
|
||||
ast: DashMap<String, reid::ast::Module>,
|
||||
types: DashMap<String, DashMap<FullToken, Option<TypeKind>>>,
|
||||
}
|
||||
|
||||
#[tower_lsp::async_trait]
|
||||
@ -78,11 +77,11 @@ impl LanguageServer for Backend {
|
||||
async fn hover(&self, params: HoverParams) -> jsonrpc::Result<Option<Hover>> {
|
||||
let path = PathBuf::from(params.text_document_position_params.text_document.uri.path());
|
||||
let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();
|
||||
let tokens = self.tokens.get(&file_name);
|
||||
let analysis = self.analysis.get(&file_name);
|
||||
let position = params.text_document_position_params.position;
|
||||
|
||||
let token = if let Some(tokens) = &tokens {
|
||||
tokens.iter().find(|tok| {
|
||||
let token = if let Some(analysis) = &analysis {
|
||||
analysis.tokens.iter().enumerate().find(|(_, tok)| {
|
||||
tok.position.1 == position.line + 1
|
||||
&& (tok.position.0 <= position.character + 1
|
||||
&& (tok.position.0 + tok.token.len() as u32) > position.character + 1)
|
||||
@ -91,8 +90,8 @@ impl LanguageServer for Backend {
|
||||
None
|
||||
};
|
||||
|
||||
let (range, ty) = if let Some(token) = token {
|
||||
if let Some(possible_ty) = self.types.get(&file_name).unwrap().get(token) {
|
||||
let (range, ty) = if let Some((idx, token)) = token {
|
||||
if let Some(possible_ty) = self.analysis.get(&file_name).unwrap().token_analysis.get(&idx) {
|
||||
let start = token.position;
|
||||
let end = token.position.add(token.token.len() as u32);
|
||||
let range = Range {
|
||||
@ -105,7 +104,7 @@ impl LanguageServer for Backend {
|
||||
character: (end.0 as i32 - 1).max(0) as u32,
|
||||
},
|
||||
};
|
||||
if let Some(ty) = possible_ty.clone() {
|
||||
if let Some(ty) = possible_ty.ty.clone() {
|
||||
(Some(range), format!("{}", ty))
|
||||
} else {
|
||||
(Some(range), String::from("None type"))
|
||||
@ -154,50 +153,33 @@ impl Backend {
|
||||
let mut map = Default::default();
|
||||
let parse_res = parse(¶ms.text, path.clone(), &mut map);
|
||||
let (tokens, result) = match parse_res {
|
||||
Ok((module_id, tokens)) => (tokens.clone(), compile(module_id, tokens, path, &mut map)),
|
||||
Ok((module_id, tokens)) => (tokens.clone(), analyze(module_id, tokens, path, &mut map)),
|
||||
Err(e) => (Vec::new(), Err(e)),
|
||||
};
|
||||
|
||||
let mut diagnostics = Vec::new();
|
||||
match result {
|
||||
Ok(Some(result)) => {
|
||||
self.tokens.insert(file_name.clone(), result.tokens);
|
||||
self.types.insert(file_name.clone(), result.types);
|
||||
Ok(Some(mut analysis)) => {
|
||||
if let Some(reid_error) = &mut analysis.error {
|
||||
self.client
|
||||
.log_message(
|
||||
MessageType::INFO,
|
||||
format!("Successfully compiled despite parsing errors!"),
|
||||
)
|
||||
.await;
|
||||
reid_error.errors.dedup();
|
||||
for error in &reid_error.errors {
|
||||
diagnostics.push(reid_error_into_diagnostic(error, &tokens));
|
||||
self.client.log_message(MessageType::INFO, format!("{}", error)).await;
|
||||
}
|
||||
}
|
||||
self.analysis.insert(file_name.clone(), analysis);
|
||||
}
|
||||
Ok(_) => {}
|
||||
Err(mut reid_error) => {
|
||||
reid_error.errors.dedup();
|
||||
for error in reid_error.errors {
|
||||
let meta = error.get_meta();
|
||||
let positions = meta
|
||||
.range
|
||||
.into_position(&tokens)
|
||||
.unwrap_or((Position(0, 0), Position(0, 0)));
|
||||
self.client.log_message(MessageType::INFO, format!("{:?}", &meta)).await;
|
||||
self.client
|
||||
.log_message(MessageType::INFO, format!("{:?}", &positions))
|
||||
.await;
|
||||
|
||||
diagnostics.push(Diagnostic {
|
||||
range: Range {
|
||||
start: lsp_types::Position {
|
||||
line: ((positions.0.1 as i32) - 1).max(0) as u32,
|
||||
character: ((positions.0.0 as i32) - 1).max(0) as u32,
|
||||
},
|
||||
end: lsp_types::Position {
|
||||
line: ((positions.1.1 as i32) - 1).max(0) as u32,
|
||||
character: ((positions.1.0 as i32) - 1).max(0) as u32,
|
||||
},
|
||||
},
|
||||
severity: Some(DiagnosticSeverity::ERROR),
|
||||
code: None,
|
||||
code_description: None,
|
||||
source: Some(error.get_type_str().to_owned()),
|
||||
message: format!("{}", error),
|
||||
related_information: None,
|
||||
tags: None,
|
||||
data: None,
|
||||
});
|
||||
for error in &reid_error.errors {
|
||||
diagnostics.push(reid_error_into_diagnostic(error, &tokens));
|
||||
self.client.log_message(MessageType::INFO, format!("{}", error)).await;
|
||||
}
|
||||
}
|
||||
@ -209,6 +191,35 @@ impl Backend {
|
||||
}
|
||||
}
|
||||
|
||||
fn reid_error_into_diagnostic(error: &error_raporting::ErrorKind, tokens: &Vec<FullToken>) -> Diagnostic {
|
||||
let meta = error.get_meta();
|
||||
let positions = meta
|
||||
.range
|
||||
.into_position(&tokens)
|
||||
.unwrap_or((Position(0, 0), Position(0, 0)));
|
||||
|
||||
Diagnostic {
|
||||
range: Range {
|
||||
start: lsp_types::Position {
|
||||
line: ((positions.0.1 as i32) - 1).max(0) as u32,
|
||||
character: ((positions.0.0 as i32) - 1).max(0) as u32,
|
||||
},
|
||||
end: lsp_types::Position {
|
||||
line: ((positions.1.1 as i32) - 1).max(0) as u32,
|
||||
character: ((positions.1.0 as i32) - 1).max(0) as u32,
|
||||
},
|
||||
},
|
||||
severity: Some(DiagnosticSeverity::ERROR),
|
||||
code: None,
|
||||
code_description: None,
|
||||
source: Some(error.get_type_str().to_owned()),
|
||||
message: format!("{}", error),
|
||||
related_information: None,
|
||||
tags: None,
|
||||
data: None,
|
||||
}
|
||||
}
|
||||
|
||||
struct CompileResult {
|
||||
tokens: Vec<FullToken>,
|
||||
types: DashMap<FullToken, Option<TypeKind>>,
|
||||
@ -220,36 +231,6 @@ fn parse(source: &str, path: PathBuf, map: &mut ErrorModules) -> Result<(SourceM
|
||||
Ok(parse_module(source, file_name.clone(), map)?)
|
||||
}
|
||||
|
||||
fn compile(
|
||||
module_id: SourceModuleId,
|
||||
tokens: Vec<FullToken>,
|
||||
path: PathBuf,
|
||||
map: &mut ErrorModules,
|
||||
) -> Result<Option<CompileResult>, ReidError> {
|
||||
let token_types = DashMap::new();
|
||||
|
||||
let module = compile_module(module_id, tokens, map, Some(path.clone()), true)?;
|
||||
|
||||
let module_id = module.module_id;
|
||||
let mut context = Context::from(vec![module], path.parent().unwrap().to_owned());
|
||||
perform_all_passes(&mut context, map)?;
|
||||
|
||||
for module in context.modules.into_values() {
|
||||
if module.module_id != module_id {
|
||||
continue;
|
||||
}
|
||||
for (idx, token) in module.tokens.iter().enumerate() {
|
||||
token_types.insert(token.clone(), find_type_in_context(&module, idx));
|
||||
}
|
||||
|
||||
return Ok(Some(CompileResult {
|
||||
tokens: module.tokens,
|
||||
types: token_types,
|
||||
}));
|
||||
}
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let stdin = tokio::io::stdin();
|
||||
@ -258,239 +239,7 @@ async fn main() {
|
||||
let (service, socket) = LspService::new(|client| Backend {
|
||||
client,
|
||||
ast: DashMap::new(),
|
||||
tokens: DashMap::new(),
|
||||
types: DashMap::new(),
|
||||
analysis: DashMap::new(),
|
||||
});
|
||||
Server::new(stdin, stdout, socket).serve(service).await;
|
||||
}
|
||||
|
||||
pub fn find_type_in_context(module: &mir::Module, token_idx: usize) -> Option<TypeKind> {
|
||||
for import in &module.imports {
|
||||
if import.1.contains(token_idx) {
|
||||
return Some(TypeKind::CustomType(mir::CustomTypeKey(
|
||||
"d".to_owned(),
|
||||
SourceModuleId(1),
|
||||
)));
|
||||
}
|
||||
}
|
||||
for typedef in &module.typedefs {
|
||||
if !typedef.meta.contains(token_idx) {
|
||||
continue;
|
||||
}
|
||||
|
||||
match &typedef.kind {
|
||||
mir::TypeDefinitionKind::Struct(StructType(fields)) => {
|
||||
for field in fields {
|
||||
if field.2.contains(token_idx) {
|
||||
return Some(field.1.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for binop in &module.binop_defs {
|
||||
if let Some(meta) = binop.block_meta() {
|
||||
if !meta.contains(token_idx) {
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
|
||||
return match &binop.fn_kind {
|
||||
mir::FunctionDefinitionKind::Local(block, _) => find_type_in_block(&block, module.module_id, token_idx),
|
||||
mir::FunctionDefinitionKind::Extern(_) => None,
|
||||
mir::FunctionDefinitionKind::Intrinsic(_) => None,
|
||||
};
|
||||
}
|
||||
|
||||
for (_, function) in &module.associated_functions {
|
||||
if !(function.signature() + function.block_meta()).contains(token_idx) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for param in &function.parameters {
|
||||
if param.meta.contains(token_idx) {
|
||||
return Some(param.ty.clone());
|
||||
}
|
||||
}
|
||||
|
||||
return match &function.kind {
|
||||
mir::FunctionDefinitionKind::Local(block, _) => find_type_in_block(&block, module.module_id, token_idx),
|
||||
mir::FunctionDefinitionKind::Extern(_) => None,
|
||||
mir::FunctionDefinitionKind::Intrinsic(_) => None,
|
||||
};
|
||||
}
|
||||
|
||||
for function in &module.functions {
|
||||
if !(function.signature() + function.block_meta()).contains(token_idx) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for param in &function.parameters {
|
||||
if param.meta.contains(token_idx) {
|
||||
return Some(param.ty.clone());
|
||||
}
|
||||
}
|
||||
|
||||
return match &function.kind {
|
||||
mir::FunctionDefinitionKind::Local(block, _) => find_type_in_block(&block, module.module_id, token_idx),
|
||||
mir::FunctionDefinitionKind::Extern(_) => None,
|
||||
mir::FunctionDefinitionKind::Intrinsic(_) => None,
|
||||
};
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn find_type_in_block(block: &mir::Block, module_id: SourceModuleId, token_idx: usize) -> Option<TypeKind> {
|
||||
if !block.meta.contains(token_idx) {
|
||||
return Some(TypeKind::Bool);
|
||||
}
|
||||
|
||||
for statement in &block.statements {
|
||||
if !statement.1.contains(token_idx) {
|
||||
continue;
|
||||
}
|
||||
match &statement.0 {
|
||||
mir::StmtKind::Let(named_variable_ref, _, expression) => {
|
||||
if named_variable_ref.2.contains(token_idx) {
|
||||
return expression
|
||||
.return_type(&Default::default(), module_id)
|
||||
.ok()
|
||||
.map(|(_, ty)| ty);
|
||||
} else {
|
||||
return find_type_in_expr(&expression, module_id, token_idx);
|
||||
}
|
||||
}
|
||||
mir::StmtKind::Set(lhs, rhs) => {
|
||||
return find_type_in_expr(lhs, module_id, token_idx).or(find_type_in_expr(rhs, module_id, token_idx));
|
||||
}
|
||||
mir::StmtKind::Import(_) => {}
|
||||
mir::StmtKind::Expression(expression) => return find_type_in_expr(expression, module_id, token_idx),
|
||||
mir::StmtKind::While(WhileStatement { condition, block, .. }) => {
|
||||
return find_type_in_expr(condition, module_id, token_idx)
|
||||
.or(find_type_in_block(block, module_id, token_idx));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some((_, Some(return_exp))) = &block.return_expression {
|
||||
if let Some(ty) = find_type_in_expr(return_exp, module_id, token_idx) {
|
||||
return Some(ty);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn find_type_in_expr(expr: &mir::Expression, module_id: SourceModuleId, token_idx: usize) -> Option<TypeKind> {
|
||||
if !expr.1.contains(token_idx) {
|
||||
return None;
|
||||
}
|
||||
|
||||
match &expr.0 {
|
||||
mir::ExprKind::Variable(named_variable_ref) => Some(named_variable_ref.0.clone()),
|
||||
mir::ExprKind::Indexed(value, type_kind, index_expr) => Some(
|
||||
find_type_in_expr(&value, module_id, token_idx)
|
||||
.or(find_type_in_expr(&index_expr, module_id, token_idx))
|
||||
.unwrap_or(type_kind.clone()),
|
||||
),
|
||||
mir::ExprKind::Accessed(expression, type_kind, _, meta) => {
|
||||
if meta.contains(token_idx) {
|
||||
Some(type_kind.clone())
|
||||
} else {
|
||||
find_type_in_expr(&expression, module_id, token_idx)
|
||||
}
|
||||
}
|
||||
mir::ExprKind::Array(expressions) => {
|
||||
for expr in expressions {
|
||||
if let Some(ty) = find_type_in_expr(expr, module_id, token_idx) {
|
||||
return Some(ty);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
mir::ExprKind::Struct(name, items) => {
|
||||
for (_, expr, meta) in items {
|
||||
if meta.contains(token_idx) {
|
||||
return expr.return_type(&Default::default(), module_id).map(|(_, t)| t).ok();
|
||||
}
|
||||
if let Some(ty) = find_type_in_expr(expr, module_id, token_idx) {
|
||||
return Some(ty);
|
||||
}
|
||||
}
|
||||
Some(TypeKind::CustomType(mir::CustomTypeKey(name.clone(), module_id)))
|
||||
}
|
||||
mir::ExprKind::Literal(literal) => Some(literal.as_type()),
|
||||
mir::ExprKind::BinOp(_, lhs, rhs, type_kind) => {
|
||||
if let Some(ty) = find_type_in_expr(lhs, module_id, token_idx) {
|
||||
return Some(ty);
|
||||
}
|
||||
if let Some(ty) = find_type_in_expr(rhs, module_id, token_idx) {
|
||||
return Some(ty);
|
||||
}
|
||||
Some(type_kind.clone())
|
||||
}
|
||||
mir::ExprKind::FunctionCall(FunctionCall {
|
||||
return_type,
|
||||
parameters,
|
||||
..
|
||||
}) => {
|
||||
for expr in parameters {
|
||||
if let Some(ty) = find_type_in_expr(expr, module_id, token_idx) {
|
||||
return Some(ty);
|
||||
}
|
||||
}
|
||||
Some(return_type.clone())
|
||||
}
|
||||
mir::ExprKind::AssociatedFunctionCall(
|
||||
_,
|
||||
FunctionCall {
|
||||
return_type,
|
||||
parameters,
|
||||
..
|
||||
},
|
||||
) => {
|
||||
for expr in parameters {
|
||||
if let Some(ty) = find_type_in_expr(expr, module_id, token_idx) {
|
||||
return Some(ty);
|
||||
}
|
||||
}
|
||||
Some(return_type.clone())
|
||||
}
|
||||
mir::ExprKind::If(IfExpression(cond, then_e, else_e)) => find_type_in_expr(&cond, module_id, token_idx)
|
||||
.or(find_type_in_expr(&then_e, module_id, token_idx))
|
||||
.or(else_e.clone().and_then(|e| find_type_in_expr(&e, module_id, token_idx))),
|
||||
mir::ExprKind::Block(block) => find_type_in_block(block, module_id, token_idx),
|
||||
mir::ExprKind::Borrow(expression, mutable) => {
|
||||
if let Some(ty) = find_type_in_expr(&expression, module_id, token_idx) {
|
||||
return Some(ty);
|
||||
}
|
||||
if let Ok(inner) = expression.return_type(&Default::default(), module_id).map(|(_, ty)| ty) {
|
||||
Some(TypeKind::Borrow(Box::new(inner.clone()), *mutable))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
mir::ExprKind::Deref(expression) => {
|
||||
if let Some(ty) = find_type_in_expr(&expression, module_id, token_idx) {
|
||||
return Some(ty);
|
||||
}
|
||||
if let Ok(TypeKind::Borrow(inner, _)) =
|
||||
expression.return_type(&Default::default(), module_id).map(|(_, ty)| ty)
|
||||
{
|
||||
Some(*inner.clone())
|
||||
} else {
|
||||
Some(TypeKind::CustomType(mir::CustomTypeKey(
|
||||
"ä".to_owned(),
|
||||
SourceModuleId(1),
|
||||
)))
|
||||
}
|
||||
}
|
||||
mir::ExprKind::CastTo(expression, type_kind) => {
|
||||
Some(find_type_in_expr(&expression, module_id, token_idx).unwrap_or(type_kind.clone()))
|
||||
}
|
||||
mir::ExprKind::GlobalRef(_, type_kind) => Some(type_kind.clone()),
|
||||
}
|
||||
}
|
||||
|
@ -610,7 +610,7 @@ impl Parse for LetStatement {
|
||||
stream.expect(Token::Equals)?;
|
||||
|
||||
let expression = stream.parse()?;
|
||||
stream.expect(Token::Semi)?;
|
||||
stream.expect_nonfatal(Token::Semi);
|
||||
Ok(LetStatement {
|
||||
name: variable,
|
||||
ty,
|
||||
@ -643,7 +643,7 @@ impl Parse for ImportStatement {
|
||||
Err(stream.expected_err("identifier")?)?
|
||||
}
|
||||
|
||||
stream.expect(Token::Semi)?;
|
||||
stream.expect_nonfatal(Token::Semi);
|
||||
|
||||
Ok(ImportStatement(import_list, stream.get_range().unwrap()))
|
||||
}
|
||||
@ -788,7 +788,7 @@ impl Parse for Block {
|
||||
// if semicolon is missing.
|
||||
if !matches!(e, Expression(ExpressionKind::IfExpr(_), _)) {
|
||||
// In theory could ignore the missing semicolon..
|
||||
return Err(stream.expected_err("semicolon to complete statement")?);
|
||||
stream.expected_err_nonfatal("semicolon to complete statement");
|
||||
}
|
||||
|
||||
statements.push(BlockLevelStatement::Expression(e));
|
||||
@ -997,7 +997,7 @@ impl Parse for SetStatement {
|
||||
let var_ref = stream.parse()?;
|
||||
stream.expect(Token::Equals)?;
|
||||
let expr = stream.parse()?;
|
||||
stream.expect(Token::Semi)?;
|
||||
stream.expect_nonfatal(Token::Semi);
|
||||
Ok(SetStatement(var_ref, expr, stream.get_range().unwrap()))
|
||||
}
|
||||
}
|
||||
@ -1038,7 +1038,7 @@ impl Parse for TopLevelStatement {
|
||||
stream.next(); // Consume Extern
|
||||
stream.expect(Token::FnKeyword)?;
|
||||
let extern_fn = Stmt::ExternFunction(stream.parse()?);
|
||||
stream.expect(Token::Semi)?;
|
||||
stream.expect_nonfatal(Token::Semi);
|
||||
extern_fn
|
||||
}
|
||||
Some(Token::FnKeyword) | Some(Token::PubKeyword) => Stmt::FunctionDefinition(stream.parse()?),
|
||||
|
@ -1,6 +1,8 @@
|
||||
//! Contains relevant code for parsing tokens received from
|
||||
//! Lexing/Tokenizing-stage.
|
||||
|
||||
use std::{cell::RefCell, rc::Rc};
|
||||
|
||||
use crate::{
|
||||
ast::parse::Parse,
|
||||
lexer::{FullToken, Token},
|
||||
@ -12,6 +14,7 @@ use crate::{
|
||||
pub struct TokenStream<'a, 'b> {
|
||||
ref_position: Option<&'b mut usize>,
|
||||
tokens: &'a [FullToken],
|
||||
errors: Rc<RefCell<Vec<Error>>>,
|
||||
pub position: usize,
|
||||
}
|
||||
|
||||
@ -20,6 +23,7 @@ impl<'a, 'b> TokenStream<'a, 'b> {
|
||||
TokenStream {
|
||||
ref_position: None,
|
||||
tokens,
|
||||
errors: Rc::new(RefCell::new(Vec::new())),
|
||||
position: 0,
|
||||
}
|
||||
}
|
||||
@ -38,6 +42,16 @@ impl<'a, 'b> TokenStream<'a, 'b> {
|
||||
))
|
||||
}
|
||||
|
||||
/// Returns expected-error for the next token in-line. Useful in conjunction
|
||||
/// with [`TokenStream::peek`]
|
||||
pub fn expected_err_nonfatal<T: Into<String>>(&mut self, expected: T) {
|
||||
let err = match self.expected_err(expected) {
|
||||
Ok(e) => e,
|
||||
Err(e) => e,
|
||||
};
|
||||
self.errors.borrow_mut().push(err);
|
||||
}
|
||||
|
||||
/// Returns expected-error for the previous token that was already consumed.
|
||||
/// Useful in conjunction with [`TokenStream::next`]
|
||||
pub fn expecting_err<T: Into<String>>(&mut self, expected: T) -> Result<Error, Error> {
|
||||
@ -50,6 +64,16 @@ impl<'a, 'b> TokenStream<'a, 'b> {
|
||||
))
|
||||
}
|
||||
|
||||
/// Returns expected-error for the previous token that was already consumed.
|
||||
/// Useful in conjunction with [`TokenStream::next`]
|
||||
pub fn expecting_err_nonfatal<T: Into<String>>(&mut self, expected: T) {
|
||||
let err = match self.expecting_err(expected) {
|
||||
Ok(e) => e,
|
||||
Err(e) => e,
|
||||
};
|
||||
self.errors.borrow_mut().push(err);
|
||||
}
|
||||
|
||||
pub fn expect(&mut self, token: Token) -> Result<(), Error> {
|
||||
if let (pos, Some(peeked)) = self.next_token(self.position) {
|
||||
if token == peeked.token {
|
||||
@ -63,6 +87,18 @@ impl<'a, 'b> TokenStream<'a, 'b> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_nonfatal(&mut self, token: Token) {
|
||||
if let (pos, Some(peeked)) = self.next_token(self.position) {
|
||||
if token == peeked.token {
|
||||
self.position = pos + 1;
|
||||
} else {
|
||||
self.expecting_err_nonfatal(token);
|
||||
}
|
||||
} else {
|
||||
self.expecting_err_nonfatal(token);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next(&mut self) -> Option<Token> {
|
||||
let (position, token) = self.next_token(self.position);
|
||||
self.position = position + 1;
|
||||
@ -147,6 +183,7 @@ impl<'a, 'b> TokenStream<'a, 'b> {
|
||||
let clone = TokenStream {
|
||||
ref_position: Some(&mut ref_pos),
|
||||
tokens: self.tokens,
|
||||
errors: self.errors.clone(),
|
||||
position,
|
||||
};
|
||||
|
||||
@ -197,6 +234,10 @@ impl<'a, 'b> TokenStream<'a, 'b> {
|
||||
}
|
||||
(from, self.tokens.get(from))
|
||||
}
|
||||
|
||||
pub fn errors(&self) -> Vec<Error> {
|
||||
self.errors.borrow().clone().clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for TokenStream<'_, '_> {
|
||||
|
@ -1326,8 +1326,6 @@ impl mir::Expression {
|
||||
(val.1.clone(), type_kind.clone())
|
||||
};
|
||||
|
||||
dbg!(&ty, type_kind);
|
||||
|
||||
match (&ty, type_kind) {
|
||||
(TypeKind::UserPtr(_), TypeKind::UserPtr(_)) => Some(StackValue(
|
||||
val.0.derive(
|
||||
|
@ -105,7 +105,7 @@ pub fn compile_module<'map>(
|
||||
map: &'map mut ErrorModules,
|
||||
path: Option<PathBuf>,
|
||||
is_main: bool,
|
||||
) -> Result<mir::Module, ReidError> {
|
||||
) -> Result<Result<mir::Module, (ast::Module, ReidError)>, ReidError> {
|
||||
let module = map.module(&module_id).cloned().unwrap();
|
||||
|
||||
let mut token_stream = TokenStream::from(&tokens);
|
||||
@ -117,6 +117,8 @@ pub fn compile_module<'map>(
|
||||
statements.push(statement);
|
||||
}
|
||||
|
||||
let errors = token_stream.errors();
|
||||
|
||||
drop(token_stream);
|
||||
|
||||
let ast_module = ast::Module {
|
||||
@ -127,11 +129,33 @@ pub fn compile_module<'map>(
|
||||
is_main,
|
||||
};
|
||||
|
||||
if errors.len() > 0 {
|
||||
return Ok(Err((
|
||||
ast_module,
|
||||
ReidError::from_kind(
|
||||
errors
|
||||
.into_iter()
|
||||
.map(|e| {
|
||||
error_raporting::ErrorKind::from(mir::pass::Error {
|
||||
metadata: mir::Metadata {
|
||||
source_module_id: module_id,
|
||||
range: *e.get_range().unwrap_or(&Default::default()),
|
||||
position: None,
|
||||
},
|
||||
kind: e,
|
||||
})
|
||||
})
|
||||
.collect(),
|
||||
map.clone(),
|
||||
),
|
||||
)));
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[cfg(feature = "log_output")]
|
||||
dbg!(&ast_module);
|
||||
|
||||
Ok(ast_module.process(module_id))
|
||||
Ok(Ok(ast_module.process(module_id)))
|
||||
}
|
||||
|
||||
pub fn perform_all_passes<'map>(
|
||||
@ -293,7 +317,7 @@ pub fn compile_and_pass<'map>(
|
||||
let name = path.file_name().unwrap().to_str().unwrap().to_owned();
|
||||
|
||||
let (id, tokens) = parse_module(source, name, module_map)?;
|
||||
let module = compile_module(id, tokens, module_map, Some(path.clone()), true)?;
|
||||
let module = compile_module(id, tokens, module_map, Some(path.clone()), true)?.map_err(|(_, e)| e)?;
|
||||
|
||||
let mut mir_context = mir::Context::from(vec![module], path.parent().unwrap().to_owned());
|
||||
|
||||
|
@ -52,7 +52,7 @@ pub enum ErrorKind {
|
||||
|
||||
pub fn compile_std(module_map: &mut ErrorModules) -> Result<Module, ReidError> {
|
||||
let (id, tokens) = parse_module(STD_SOURCE, STD_NAME, module_map)?;
|
||||
let module = compile_module(id, tokens, module_map, None, false)?;
|
||||
let module = compile_module(id, tokens, module_map, None, false)?.map_err(|(_, e)| e)?;
|
||||
|
||||
let module_id = module.module_id;
|
||||
let mut mir_context = super::Context::from(vec![module], Default::default());
|
||||
@ -156,21 +156,33 @@ impl<'map> Pass for LinkerPass<'map> {
|
||||
};
|
||||
|
||||
match compile_module(id, tokens, &mut self.module_map, Some(file_path), false) {
|
||||
Ok(imported_module) => {
|
||||
if imported_module.is_main {
|
||||
Ok(res) => match res {
|
||||
Ok(imported_module) => {
|
||||
if imported_module.is_main {
|
||||
state.ok::<_, Infallible>(
|
||||
Err(ErrorKind::TriedLinkingMain(module_name.clone())),
|
||||
import.1,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
let module_id = imported_module.module_id;
|
||||
module_ids.insert(imported_module.name.clone(), imported_module.module_id);
|
||||
modules.insert(module_id, Rc::new(RefCell::new(imported_module)));
|
||||
let imported = modules.get_mut(&module_id).unwrap();
|
||||
modules_to_process.push(imported.clone());
|
||||
imported
|
||||
}
|
||||
Err((_, err)) => {
|
||||
state.ok::<_, Infallible>(
|
||||
Err(ErrorKind::TriedLinkingMain(module_name.clone())),
|
||||
Err(ErrorKind::ModuleCompilationError(
|
||||
module_name.clone(),
|
||||
format!("{}", err),
|
||||
)),
|
||||
import.1,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
let module_id = imported_module.module_id;
|
||||
module_ids.insert(imported_module.name.clone(), imported_module.module_id);
|
||||
modules.insert(module_id, Rc::new(RefCell::new(imported_module)));
|
||||
let imported = modules.get_mut(&module_id).unwrap();
|
||||
modules_to_process.push(imported.clone());
|
||||
imported
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
state.ok::<_, Infallible>(
|
||||
Err(ErrorKind::ModuleCompilationError(
|
||||
|
@ -16,7 +16,7 @@ fn test_compile(source: &str, name: &str) -> CompileOutput {
|
||||
let mut map = Default::default();
|
||||
let (id, tokens) = assert_err(parse_module(source, name, &mut map));
|
||||
|
||||
let module = assert_err(compile_module(id, tokens, &mut map, None, true));
|
||||
let module = assert_err(assert_err(compile_module(id, tokens, &mut map, None, true)).map_err(|(_, e)| e));
|
||||
let mut mir_context = mir::Context::from(vec![module], Default::default());
|
||||
assert_err(perform_all_passes(&mut mir_context, &mut map));
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user