Compare commits

..

No commits in common. "101ee2d8e5865756c7f442990f3635781ce405ba" and "658450993a841f718d6f751f2ba0a4c58db2a16a" have entirely different histories.

20 changed files with 385 additions and 732 deletions

View File

@ -1,12 +1,13 @@
// Main a
fn main() -> bool { (
return 144 == fibonacci(0xc); b
} )
x
// Fibonacci (
fn fibonacci(value: u16) -> u16 { (
if value <= 2 { c
return 1; xyz
} )
fibonacci(value - 1) + fibonacci(value - 2) )
} (
a

View File

@ -36,7 +36,6 @@ export function activate(context: ExtensionContext) {
env: { env: {
...process.env, ...process.env,
RUST_LOG: "debug", RUST_LOG: "debug",
RUST_BACKTRACE: 1,
} }
} }
}; };

View File

@ -1,384 +0,0 @@
use std::{collections::HashMap, fmt::format, path::PathBuf};
use reid::{
ast::{self, FunctionDefinition, lexer::FullToken, token_stream::TokenRange},
codegen::intrinsics::get_intrinsic_assoc_functions,
compile_module,
error_raporting::{ErrorModules, ReidError},
mir::{
self, Context, FunctionCall, FunctionParam, IfExpression, SourceModuleId, StructType, TypeKind, WhileStatement,
typecheck::typerefs::TypeRefs,
},
perform_all_passes,
};
type TokenAnalysisMap = HashMap<usize, SemanticAnalysis>;
#[derive(Debug, Clone)]
pub struct StaticAnalysis {
pub tokens: Vec<FullToken>,
pub token_analysis: TokenAnalysisMap,
pub error: Option<ReidError>,
}
#[derive(Debug, Clone)]
pub struct SemanticAnalysis {
pub ty: Option<TypeKind>,
pub autocomplete: Vec<Autocomplete>,
}
#[derive(Debug, Clone)]
pub struct Autocomplete {
pub text: String,
pub kind: AutocompleteKind,
}
#[derive(Debug, Clone)]
pub enum AutocompleteKind {
Type,
Field(TypeKind),
Function(Vec<FunctionParam>, TypeKind),
}
impl ToString for AutocompleteKind {
fn to_string(&self) -> String {
match self {
AutocompleteKind::Type => String::from("type"),
AutocompleteKind::Function(params, ret_ty) => {
let params = params
.iter()
.map(|p| format!("{}: {}", p.name, p.ty))
.collect::<Vec<_>>();
format!("({}) -> {}", params.join(", "), ret_ty)
}
AutocompleteKind::Field(type_kind) => format!("{}", type_kind),
}
}
}
pub fn analyze(
module_id: SourceModuleId,
tokens: Vec<FullToken>,
path: PathBuf,
map: &mut ErrorModules,
) -> Result<Option<StaticAnalysis>, ReidError> {
let (module, mut parse_error) = match compile_module(module_id, tokens, map, Some(path.clone()), true)? {
Ok(module) => (module, None),
Err((m, err)) => (m.process(module_id), Some(err)),
};
let module_id = module.module_id;
let mut context = Context::from(vec![module], path.parent().unwrap().to_owned());
match perform_all_passes(&mut context, map) {
Ok(_) => {}
Err(pass_error) => {
if let Some(err) = &mut parse_error {
err.extend(pass_error);
} else {
parse_error = Some(pass_error)
}
}
}
for module in context.modules.values() {
if module.module_id != module_id {
continue;
}
return Ok(Some(analyze_context(&context, &module, parse_error)));
}
return Ok(None);
}
pub fn init_types(map: &mut TokenAnalysisMap, meta: &mir::Metadata, ty: Option<TypeKind>) {
for token in meta.range.start..=meta.range.end {
map.insert(
token,
SemanticAnalysis {
ty: ty.clone(),
autocomplete: Vec::new(),
},
);
}
}
pub fn set_autocomplete(map: &mut TokenAnalysisMap, token_idx: usize, autocomplete: Vec<Autocomplete>) {
if let Some(token) = map.get_mut(&token_idx) {
token.autocomplete = autocomplete.clone();
} else {
map.insert(
token_idx,
SemanticAnalysis {
ty: None,
autocomplete: autocomplete.clone(),
},
);
}
}
pub fn analyze_context(context: &mir::Context, module: &mir::Module, error: Option<ReidError>) -> StaticAnalysis {
let mut map = HashMap::new();
for import in &module.imports {
init_types(&mut map, &import.1, None);
if let Some((module_name, _)) = import.0.get(0) {
let (import_name, import_meta) = import.0.get(1).cloned().unwrap_or((
String::new(),
mir::Metadata {
source_module_id: module.module_id,
range: reid::ast::token_stream::TokenRange {
start: import.1.range.end - 1,
end: import.1.range.end - 1,
},
position: None,
},
));
let mut autocompletes = Vec::new();
if let Some((_, module)) = context.modules.iter().find(|m| m.1.name == *module_name) {
for function in &module.functions {
if !function.is_pub {
continue;
}
if function.name.starts_with(&import_name) {
autocompletes.push(Autocomplete {
text: function.name.clone(),
kind: AutocompleteKind::Function(function.parameters.clone(), function.return_type.clone()),
});
}
}
for typedef in &module.typedefs {
if typedef.name.starts_with(&import_name) {
autocompletes.push(Autocomplete {
text: typedef.name.clone(),
kind: AutocompleteKind::Type,
});
}
}
}
set_autocomplete(&mut map, import_meta.range.end, autocompletes);
}
}
for typedef in &module.typedefs {
match &typedef.kind {
mir::TypeDefinitionKind::Struct(StructType(fields)) => {
for field in fields {
init_types(&mut map, &field.2, Some(field.1.clone()));
}
}
}
}
for binop in &module.binop_defs {
match &binop.fn_kind {
mir::FunctionDefinitionKind::Local(block, _) => analyze_block(context, module, block, &mut map),
mir::FunctionDefinitionKind::Extern(_) => {}
mir::FunctionDefinitionKind::Intrinsic(_) => {}
};
}
for (_, function) in &module.associated_functions {
for param in &function.parameters {
init_types(&mut map, &param.meta, Some(param.ty.clone()));
}
match &function.kind {
mir::FunctionDefinitionKind::Local(block, _) => analyze_block(context, module, block, &mut map),
mir::FunctionDefinitionKind::Extern(_) => {}
mir::FunctionDefinitionKind::Intrinsic(_) => {}
};
}
for function in &module.functions {
for param in &function.parameters {
init_types(&mut map, &param.meta, Some(param.ty.clone()));
}
match &function.kind {
mir::FunctionDefinitionKind::Local(block, _) => analyze_block(context, module, block, &mut map),
mir::FunctionDefinitionKind::Extern(_) => {}
mir::FunctionDefinitionKind::Intrinsic(_) => {}
};
}
StaticAnalysis {
tokens: module.tokens.clone(),
token_analysis: map,
error,
}
}
pub fn analyze_block(
context: &mir::Context,
source_module: &mir::Module,
block: &mir::Block,
map: &mut TokenAnalysisMap,
) {
for statement in &block.statements {
match &statement.0 {
mir::StmtKind::Let(named_variable_ref, _, expression) => {
init_types(
map,
&named_variable_ref.2,
expression
.return_type(&TypeRefs::unknown(), source_module.module_id)
.ok()
.map(|(_, ty)| ty),
);
// return analyze_in_expr(&expression, module_id, token_idx);
}
mir::StmtKind::Set(lhs, rhs) => {
analyze_expr(context, source_module, lhs, map);
analyze_expr(context, source_module, rhs, map);
}
mir::StmtKind::Import(_) => {}
mir::StmtKind::Expression(expression) => {
analyze_expr(context, source_module, expression, map);
}
mir::StmtKind::While(WhileStatement { condition, block, .. }) => {
analyze_expr(context, source_module, condition, map);
analyze_block(context, source_module, block, map);
}
}
}
if let Some((_, Some(return_exp))) = &block.return_expression {
analyze_expr(context, source_module, return_exp, map)
}
}
pub fn analyze_expr(
context: &mir::Context,
source_module: &mir::Module,
expr: &mir::Expression,
map: &mut TokenAnalysisMap,
) {
init_types(
map,
&expr.1,
expr.return_type(&TypeRefs::unknown(), source_module.module_id)
.ok()
.map(|(_, t)| t),
);
match &expr.0 {
mir::ExprKind::Variable(_) => {}
mir::ExprKind::Indexed(value, _, index_expr) => {
analyze_expr(context, source_module, &value, map);
analyze_expr(context, source_module, &index_expr, map);
}
mir::ExprKind::Accessed(expression, _, name, meta) => {
analyze_expr(context, source_module, &expression, map);
let accessed_type = expression.return_type(&TypeRefs::unknown(), source_module.module_id);
let mut autocompletes = Vec::new();
match accessed_type {
Ok((_, accessed_type)) => {
autocompletes.extend(
source_module
.associated_functions
.iter()
.filter(|(t, fun)| *t == accessed_type && fun.name.starts_with(name))
.map(|(_, fun)| Autocomplete {
text: fun.name.clone(),
kind: AutocompleteKind::Function(fun.parameters.clone(), fun.return_type.clone()),
}),
);
match accessed_type {
TypeKind::CustomType(ty_key) => {
let typedef = source_module
.typedefs
.iter()
.find(|t| t.name == ty_key.0 && t.source_module == ty_key.1);
if let Some(typedef) = typedef {
autocompletes.extend(match &typedef.kind {
mir::TypeDefinitionKind::Struct(StructType(fields)) => {
fields.iter().filter(|f| f.0.starts_with(name)).map(|f| Autocomplete {
text: f.0.clone(),
kind: AutocompleteKind::Field(f.1.clone()),
})
}
});
}
}
_ => {}
}
}
_ => {}
}
set_autocomplete(map, meta.range.end, autocompletes);
}
mir::ExprKind::Array(expressions) => {
for expr in expressions {
analyze_expr(context, source_module, expr, map);
}
}
mir::ExprKind::Struct(_, items) => {
for (_, expr, _) in items {
analyze_expr(context, source_module, expr, map);
}
}
mir::ExprKind::Literal(_) => {}
mir::ExprKind::BinOp(_, lhs, rhs, _) => {
analyze_expr(context, source_module, &lhs, map);
analyze_expr(context, source_module, &rhs, map);
}
mir::ExprKind::FunctionCall(FunctionCall { parameters, .. }) => {
for expr in parameters {
analyze_expr(context, source_module, expr, map);
}
}
mir::ExprKind::AssociatedFunctionCall(
ty,
FunctionCall {
parameters, name, meta, ..
},
) => {
for expr in parameters {
analyze_expr(context, source_module, expr, map);
}
let mut function_autocomplete = source_module
.associated_functions
.iter()
.filter(|(t, fun)| t == ty && fun.name.starts_with(name))
.map(|(_, fun)| Autocomplete {
text: fun.name.clone(),
kind: AutocompleteKind::Function(fun.parameters.clone(), fun.return_type.clone()),
})
.collect::<Vec<_>>();
function_autocomplete.extend(
get_intrinsic_assoc_functions(ty)
.iter()
.filter_map(|(s, f)| f.as_ref().map(|f| (s, f)))
.filter(|(_, fun)| fun.name.starts_with(name))
.map(|(_, fun)| Autocomplete {
text: fun.name.clone(),
kind: AutocompleteKind::Function(fun.parameters.clone(), fun.return_type.clone()),
})
.collect::<Vec<_>>(),
);
dbg!(ty);
dbg!(&source_module.associated_functions);
set_autocomplete(map, meta.range.end, function_autocomplete.clone());
}
mir::ExprKind::If(IfExpression(cond, then_e, else_e)) => {
analyze_expr(context, source_module, &cond, map);
analyze_expr(context, source_module, &then_e, map);
if let Some(else_e) = else_e.as_ref() {
analyze_expr(context, source_module, &else_e, map);
}
}
mir::ExprKind::Block(block) => analyze_block(context, source_module, block, map),
mir::ExprKind::Borrow(expression, _) => {
analyze_expr(context, source_module, &expression, map);
}
mir::ExprKind::Deref(expression) => {
analyze_expr(context, source_module, &expression, map);
}
mir::ExprKind::CastTo(expression, _) => {
analyze_expr(context, source_module, &expression, map);
}
mir::ExprKind::GlobalRef(_, _) => {}
}
}

View File

@ -1,27 +1,29 @@
use std::collections::HashMap;
use std::path::PathBuf; use std::path::PathBuf;
use dashmap::DashMap; use dashmap::DashMap;
use reid::ast::lexer::{FullToken, Position}; use reid::ast::lexer::{FullToken, Position};
use reid::error_raporting::{self, ErrorModules, ReidError}; use reid::error_raporting::{ErrorModules, ReidError};
use reid::mir::{SourceModuleId, TypeKind}; use reid::mir::{
use reid::parse_module; self, Context, FunctionCall, FunctionDefinition, FunctionParam, IfExpression, SourceModuleId, StructType, TypeKind,
WhileStatement,
};
use reid::{compile_module, parse_module, perform_all_passes};
use tower_lsp::lsp_types::{ use tower_lsp::lsp_types::{
self, CompletionItem, CompletionOptions, CompletionParams, CompletionResponse, Diagnostic, DiagnosticSeverity, self, CompletionItem, CompletionOptions, CompletionParams, CompletionResponse, Diagnostic, DiagnosticSeverity,
DidChangeTextDocumentParams, DidOpenTextDocumentParams, Hover, HoverContents, HoverParams, HoverProviderCapability, DidChangeTextDocumentParams, DidOpenTextDocumentParams, Hover, HoverContents, HoverParams, HoverProviderCapability,
InitializeParams, InitializeResult, InitializedParams, MarkupContent, MarkupKind, MessageType, OneOf, Range, InitializeParams, InitializeResult, InitializedParams, MarkedString, MarkupContent, MarkupKind, MessageType, OneOf,
ServerCapabilities, TextDocumentItem, TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions, Range, ServerCapabilities, TextDocumentItem, TextDocumentSyncCapability, TextDocumentSyncKind,
WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities, TextDocumentSyncOptions, WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities,
}; };
use tower_lsp::{Client, LanguageServer, LspService, Server, jsonrpc}; use tower_lsp::{Client, LanguageServer, LspService, Server, jsonrpc};
use crate::analysis::{StaticAnalysis, analyze};
mod analysis;
#[derive(Debug)] #[derive(Debug)]
struct Backend { struct Backend {
client: Client, client: Client,
analysis: DashMap<String, StaticAnalysis>, tokens: DashMap<String, Vec<FullToken>>,
ast: DashMap<String, reid::ast::Module>,
types: DashMap<String, DashMap<FullToken, Option<TypeKind>>>,
} }
#[tower_lsp::async_trait] #[tower_lsp::async_trait]
@ -67,50 +69,20 @@ impl LanguageServer for Backend {
} }
async fn completion(&self, params: CompletionParams) -> jsonrpc::Result<Option<CompletionResponse>> { async fn completion(&self, params: CompletionParams) -> jsonrpc::Result<Option<CompletionResponse>> {
let path = PathBuf::from(params.text_document_position.text_document.uri.path()); Ok(Some(CompletionResponse::Array(vec![
let file_name = path.file_name().unwrap().to_str().unwrap().to_owned(); CompletionItem::new_simple("Hello".to_string(), "Some detail".to_string()),
let analysis = self.analysis.get(&file_name); CompletionItem::new_simple("Bye".to_string(), "More detail".to_string()),
let position = params.text_document_position.position; ])))
let token = if let Some(analysis) = &analysis {
analysis.tokens.iter().enumerate().find(|(_, tok)| {
tok.position.1 == position.line + 1
&& (tok.position.0 <= position.character
&& (tok.position.0 + tok.token.len() as u32) > position.character)
})
} else {
None
};
dbg!(position, token);
let list = if let Some((idx, _)) = token {
if let Some(analysis) = self.analysis.get(&file_name).unwrap().token_analysis.get(&idx) {
dbg!(&analysis);
analysis
.autocomplete
.iter()
.map(|s| CompletionItem::new_simple(s.text.to_string(), s.kind.to_string()))
.collect()
} else {
Vec::new()
}
} else {
Vec::new()
};
dbg!(&list);
Ok(Some(CompletionResponse::Array(list)))
} }
async fn hover(&self, params: HoverParams) -> jsonrpc::Result<Option<Hover>> { async fn hover(&self, params: HoverParams) -> jsonrpc::Result<Option<Hover>> {
let path = PathBuf::from(params.text_document_position_params.text_document.uri.path()); let path = PathBuf::from(params.text_document_position_params.text_document.uri.path());
let file_name = path.file_name().unwrap().to_str().unwrap().to_owned(); let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();
let analysis = self.analysis.get(&file_name); let tokens = self.tokens.get(&file_name);
let position = params.text_document_position_params.position; let position = params.text_document_position_params.position;
let token = if let Some(analysis) = &analysis { let token = if let Some(tokens) = &tokens {
analysis.tokens.iter().enumerate().find(|(_, tok)| { tokens.iter().find(|tok| {
tok.position.1 == position.line + 1 tok.position.1 == position.line + 1
&& (tok.position.0 <= position.character + 1 && (tok.position.0 <= position.character + 1
&& (tok.position.0 + tok.token.len() as u32) > position.character + 1) && (tok.position.0 + tok.token.len() as u32) > position.character + 1)
@ -119,8 +91,8 @@ impl LanguageServer for Backend {
None None
}; };
let (range, ty) = if let Some((idx, token)) = token { let (range, ty) = if let Some(token) = token {
if let Some(analysis) = self.analysis.get(&file_name).unwrap().token_analysis.get(&idx) { if let Some(possible_ty) = self.types.get(&file_name).unwrap().get(token) {
let start = token.position; let start = token.position;
let end = token.position.add(token.token.len() as u32); let end = token.position.add(token.token.len() as u32);
let range = Range { let range = Range {
@ -133,7 +105,7 @@ impl LanguageServer for Backend {
character: (end.0 as i32 - 1).max(0) as u32, character: (end.0 as i32 - 1).max(0) as u32,
}, },
}; };
if let Some(ty) = analysis.ty.clone() { if let Some(ty) = possible_ty.clone() {
(Some(range), format!("{}", ty)) (Some(range), format!("{}", ty))
} else { } else {
(Some(range), String::from("None type")) (Some(range), String::from("None type"))
@ -182,52 +154,31 @@ impl Backend {
let mut map = Default::default(); let mut map = Default::default();
let parse_res = parse(&params.text, path.clone(), &mut map); let parse_res = parse(&params.text, path.clone(), &mut map);
let (tokens, result) = match parse_res { let (tokens, result) = match parse_res {
Ok((module_id, tokens)) => (tokens.clone(), analyze(module_id, tokens, path, &mut map)), Ok((module_id, tokens)) => (tokens.clone(), compile(module_id, tokens, path, &mut map)),
Err(e) => (Vec::new(), Err(e)), Err(e) => (Vec::new(), Err(e)),
}; };
let mut diagnostics = Vec::new(); let mut diagnostics = Vec::new();
match result { match result {
Ok(Some(mut analysis)) => { Ok(Some(result)) => {
if let Some(reid_error) = &mut analysis.error { self.tokens.insert(file_name.clone(), result.tokens);
self.client self.types.insert(file_name.clone(), result.types);
.log_message(
MessageType::INFO,
format!("Successfully compiled despite parsing errors!"),
)
.await;
reid_error.errors.dedup();
for error in &reid_error.errors {
diagnostics.push(reid_error_into_diagnostic(error, &tokens));
self.client.log_message(MessageType::INFO, format!("{}", error)).await;
}
}
self.analysis.insert(file_name.clone(), analysis);
} }
Ok(_) => {} Ok(_) => {}
Err(mut reid_error) => { Err(mut reid_error) => {
reid_error.errors.dedup(); reid_error.errors.dedup();
for error in &reid_error.errors { for error in reid_error.errors {
diagnostics.push(reid_error_into_diagnostic(error, &tokens));
self.client.log_message(MessageType::INFO, format!("{}", error)).await;
}
}
}
self.client
.publish_diagnostics(params.uri.clone(), diagnostics, Some(params.version))
.await;
}
}
fn reid_error_into_diagnostic(error: &error_raporting::ErrorKind, tokens: &Vec<FullToken>) -> Diagnostic {
let meta = error.get_meta(); let meta = error.get_meta();
let positions = meta let positions = meta
.range .range
.into_position(&tokens) .into_position(&tokens)
.unwrap_or((Position(0, 0), Position(0, 0))); .unwrap_or((Position(0, 0), Position(0, 0)));
self.client.log_message(MessageType::INFO, format!("{:?}", &meta)).await;
self.client
.log_message(MessageType::INFO, format!("{:?}", &positions))
.await;
Diagnostic { diagnostics.push(Diagnostic {
range: Range { range: Range {
start: lsp_types::Position { start: lsp_types::Position {
line: ((positions.0.1 as i32) - 1).max(0) as u32, line: ((positions.0.1 as i32) - 1).max(0) as u32,
@ -246,7 +197,21 @@ fn reid_error_into_diagnostic(error: &error_raporting::ErrorKind, tokens: &Vec<F
related_information: None, related_information: None,
tags: None, tags: None,
data: None, data: None,
});
self.client.log_message(MessageType::INFO, format!("{}", error)).await;
} }
}
}
self.client
.publish_diagnostics(params.uri.clone(), diagnostics, Some(params.version))
.await;
}
}
struct CompileResult {
tokens: Vec<FullToken>,
types: DashMap<FullToken, Option<TypeKind>>,
} }
fn parse(source: &str, path: PathBuf, map: &mut ErrorModules) -> Result<(SourceModuleId, Vec<FullToken>), ReidError> { fn parse(source: &str, path: PathBuf, map: &mut ErrorModules) -> Result<(SourceModuleId, Vec<FullToken>), ReidError> {
@ -255,6 +220,36 @@ fn parse(source: &str, path: PathBuf, map: &mut ErrorModules) -> Result<(SourceM
Ok(parse_module(source, file_name.clone(), map)?) Ok(parse_module(source, file_name.clone(), map)?)
} }
fn compile(
module_id: SourceModuleId,
tokens: Vec<FullToken>,
path: PathBuf,
map: &mut ErrorModules,
) -> Result<Option<CompileResult>, ReidError> {
let token_types = DashMap::new();
let module = compile_module(module_id, tokens, map, Some(path.clone()), true)?;
let module_id = module.module_id;
let mut context = Context::from(vec![module], path.parent().unwrap().to_owned());
perform_all_passes(&mut context, map)?;
for module in context.modules.into_values() {
if module.module_id != module_id {
continue;
}
for (idx, token) in module.tokens.iter().enumerate() {
token_types.insert(token.clone(), find_type_in_context(&module, idx));
}
return Ok(Some(CompileResult {
tokens: module.tokens,
types: token_types,
}));
}
return Ok(None);
}
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
let stdin = tokio::io::stdin(); let stdin = tokio::io::stdin();
@ -262,7 +257,240 @@ async fn main() {
let (service, socket) = LspService::new(|client| Backend { let (service, socket) = LspService::new(|client| Backend {
client, client,
analysis: DashMap::new(), ast: DashMap::new(),
tokens: DashMap::new(),
types: DashMap::new(),
}); });
Server::new(stdin, stdout, socket).serve(service).await; Server::new(stdin, stdout, socket).serve(service).await;
} }
pub fn find_type_in_context(module: &mir::Module, token_idx: usize) -> Option<TypeKind> {
for import in &module.imports {
if import.1.contains(token_idx) {
return Some(TypeKind::CustomType(mir::CustomTypeKey(
"d".to_owned(),
SourceModuleId(1),
)));
}
}
for typedef in &module.typedefs {
if !typedef.meta.contains(token_idx) {
continue;
}
match &typedef.kind {
mir::TypeDefinitionKind::Struct(StructType(fields)) => {
for field in fields {
if field.2.contains(token_idx) {
return Some(field.1.clone());
}
}
}
}
}
for binop in &module.binop_defs {
if let Some(meta) = binop.block_meta() {
if !meta.contains(token_idx) {
continue;
}
} else {
continue;
}
return match &binop.fn_kind {
mir::FunctionDefinitionKind::Local(block, _) => find_type_in_block(&block, module.module_id, token_idx),
mir::FunctionDefinitionKind::Extern(_) => None,
mir::FunctionDefinitionKind::Intrinsic(_) => None,
};
}
for (_, function) in &module.associated_functions {
if !(function.signature() + function.block_meta()).contains(token_idx) {
continue;
}
for param in &function.parameters {
if param.meta.contains(token_idx) {
return Some(param.ty.clone());
}
}
return match &function.kind {
mir::FunctionDefinitionKind::Local(block, _) => find_type_in_block(&block, module.module_id, token_idx),
mir::FunctionDefinitionKind::Extern(_) => None,
mir::FunctionDefinitionKind::Intrinsic(_) => None,
};
}
for function in &module.functions {
if !(function.signature() + function.block_meta()).contains(token_idx) {
continue;
}
for param in &function.parameters {
if param.meta.contains(token_idx) {
return Some(param.ty.clone());
}
}
return match &function.kind {
mir::FunctionDefinitionKind::Local(block, _) => find_type_in_block(&block, module.module_id, token_idx),
mir::FunctionDefinitionKind::Extern(_) => None,
mir::FunctionDefinitionKind::Intrinsic(_) => None,
};
}
None
}
pub fn find_type_in_block(block: &mir::Block, module_id: SourceModuleId, token_idx: usize) -> Option<TypeKind> {
if !block.meta.contains(token_idx) {
return Some(TypeKind::Bool);
}
for statement in &block.statements {
if !statement.1.contains(token_idx) {
continue;
}
match &statement.0 {
mir::StmtKind::Let(named_variable_ref, _, expression) => {
if named_variable_ref.2.contains(token_idx) {
return expression
.return_type(&Default::default(), module_id)
.ok()
.map(|(_, ty)| ty);
} else {
return find_type_in_expr(&expression, module_id, token_idx);
}
}
mir::StmtKind::Set(lhs, rhs) => {
return find_type_in_expr(lhs, module_id, token_idx).or(find_type_in_expr(rhs, module_id, token_idx));
}
mir::StmtKind::Import(_) => {}
mir::StmtKind::Expression(expression) => return find_type_in_expr(expression, module_id, token_idx),
mir::StmtKind::While(WhileStatement { condition, block, .. }) => {
return find_type_in_expr(condition, module_id, token_idx)
.or(find_type_in_block(block, module_id, token_idx));
}
}
}
if let Some((_, Some(return_exp))) = &block.return_expression {
if let Some(ty) = find_type_in_expr(return_exp, module_id, token_idx) {
return Some(ty);
}
}
None
}
pub fn find_type_in_expr(expr: &mir::Expression, module_id: SourceModuleId, token_idx: usize) -> Option<TypeKind> {
if !expr.1.contains(token_idx) {
return None;
}
match &expr.0 {
mir::ExprKind::Variable(named_variable_ref) => Some(named_variable_ref.0.clone()),
mir::ExprKind::Indexed(value, type_kind, index_expr) => Some(
find_type_in_expr(&value, module_id, token_idx)
.or(find_type_in_expr(&index_expr, module_id, token_idx))
.unwrap_or(type_kind.clone()),
),
mir::ExprKind::Accessed(expression, type_kind, _, meta) => {
if meta.contains(token_idx) {
Some(type_kind.clone())
} else {
find_type_in_expr(&expression, module_id, token_idx)
}
}
mir::ExprKind::Array(expressions) => {
for expr in expressions {
if let Some(ty) = find_type_in_expr(expr, module_id, token_idx) {
return Some(ty);
}
}
None
}
mir::ExprKind::Struct(name, items) => {
for (_, expr, meta) in items {
if meta.contains(token_idx) {
return expr.return_type(&Default::default(), module_id).map(|(_, t)| t).ok();
}
if let Some(ty) = find_type_in_expr(expr, module_id, token_idx) {
return Some(ty);
}
}
Some(TypeKind::CustomType(mir::CustomTypeKey(name.clone(), module_id)))
}
mir::ExprKind::Literal(literal) => Some(literal.as_type()),
mir::ExprKind::BinOp(_, lhs, rhs, type_kind) => {
if let Some(ty) = find_type_in_expr(lhs, module_id, token_idx) {
return Some(ty);
}
if let Some(ty) = find_type_in_expr(rhs, module_id, token_idx) {
return Some(ty);
}
Some(type_kind.clone())
}
mir::ExprKind::FunctionCall(FunctionCall {
return_type,
parameters,
..
}) => {
for expr in parameters {
if let Some(ty) = find_type_in_expr(expr, module_id, token_idx) {
return Some(ty);
}
}
Some(return_type.clone())
}
mir::ExprKind::AssociatedFunctionCall(
_,
FunctionCall {
return_type,
parameters,
..
},
) => {
for expr in parameters {
if let Some(ty) = find_type_in_expr(expr, module_id, token_idx) {
return Some(ty);
}
}
Some(return_type.clone())
}
mir::ExprKind::If(IfExpression(cond, then_e, else_e)) => find_type_in_expr(&cond, module_id, token_idx)
.or(find_type_in_expr(&then_e, module_id, token_idx))
.or(else_e.clone().and_then(|e| find_type_in_expr(&e, module_id, token_idx))),
mir::ExprKind::Block(block) => find_type_in_block(block, module_id, token_idx),
mir::ExprKind::Borrow(expression, mutable) => {
if let Some(ty) = find_type_in_expr(&expression, module_id, token_idx) {
return Some(ty);
}
if let Ok(inner) = expression.return_type(&Default::default(), module_id).map(|(_, ty)| ty) {
Some(TypeKind::Borrow(Box::new(inner.clone()), *mutable))
} else {
None
}
}
mir::ExprKind::Deref(expression) => {
if let Some(ty) = find_type_in_expr(&expression, module_id, token_idx) {
return Some(ty);
}
if let Ok(TypeKind::Borrow(inner, _)) =
expression.return_type(&Default::default(), module_id).map(|(_, ty)| ty)
{
Some(*inner.clone())
} else {
Some(TypeKind::CustomType(mir::CustomTypeKey(
"ä".to_owned(),
SourceModuleId(1),
)))
}
}
mir::ExprKind::CastTo(expression, type_kind) => {
Some(find_type_in_expr(&expression, module_id, token_idx).unwrap_or(type_kind.clone()))
}
mir::ExprKind::GlobalRef(_, type_kind) => Some(type_kind.clone()),
}
}

View File

@ -184,7 +184,7 @@ pub struct LetStatement {
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct ImportStatement(pub Vec<(String, TokenRange)>, pub TokenRange); pub struct ImportStatement(pub Vec<String>, pub TokenRange);
#[derive(Debug)] #[derive(Debug)]
pub struct FunctionDefinition(pub FunctionSignature, pub bool, pub Block, pub TokenRange); pub struct FunctionDefinition(pub FunctionSignature, pub bool, pub Block, pub TokenRange);

View File

@ -175,36 +175,7 @@ impl Parse for AssociatedFunctionCall {
let ty = stream.parse()?; let ty = stream.parse()?;
stream.expect(Token::Colon)?; stream.expect(Token::Colon)?;
stream.expect(Token::Colon)?; stream.expect(Token::Colon)?;
match stream.parse() { Ok(AssociatedFunctionCall(ty, stream.parse()?))
Ok(fn_call) => Ok(AssociatedFunctionCall(ty, fn_call)),
_ => {
if let Some(Token::Identifier(fn_name)) = stream.peek() {
stream.next();
stream.expected_err_nonfatal("associated function call");
Ok(AssociatedFunctionCall(
ty,
FunctionCallExpression {
name: fn_name,
params: Vec::new(),
range: stream.get_range_prev_single().unwrap(),
is_macro: false,
},
))
} else {
stream.expected_err_nonfatal("associated function name");
Ok(AssociatedFunctionCall(
ty,
FunctionCallExpression {
name: String::new(),
params: Vec::new(),
range: stream.get_range_prev_single().unwrap(),
is_macro: false,
},
))
}
}
}
} }
} }
@ -639,7 +610,7 @@ impl Parse for LetStatement {
stream.expect(Token::Equals)?; stream.expect(Token::Equals)?;
let expression = stream.parse()?; let expression = stream.parse()?;
stream.expect_nonfatal(Token::Semi).ok(); stream.expect(Token::Semi)?;
Ok(LetStatement { Ok(LetStatement {
name: variable, name: variable,
ty, ty,
@ -660,21 +631,19 @@ impl Parse for ImportStatement {
let mut import_list = Vec::new(); let mut import_list = Vec::new();
if let Some(Token::Identifier(name)) = stream.next() { if let Some(Token::Identifier(name)) = stream.next() {
import_list.push((name, stream.get_range_prev_single().unwrap())); import_list.push(name);
while stream.expect(Token::Colon).is_ok() && stream.expect(Token::Colon).is_ok() { while stream.expect(Token::Colon).is_ok() && stream.expect(Token::Colon).is_ok() {
if let Some(Token::Identifier(name)) = stream.peek() { if let Some(Token::Identifier(name)) = stream.next() {
stream.next(); // Consume identifier import_list.push(name);
import_list.push((name, stream.get_range_prev_single().unwrap()));
} else { } else {
stream.expected_err_nonfatal("identifier"); Err(stream.expected_err("identifier")?)?
break;
} }
} }
} else { } else {
Err(stream.expected_err("identifier")?)? Err(stream.expected_err("identifier")?)?
} }
stream.expect_nonfatal(Token::Semi).ok(); stream.expect(Token::Semi)?;
Ok(ImportStatement(import_list, stream.get_range().unwrap())) Ok(ImportStatement(import_list, stream.get_range().unwrap()))
} }
@ -819,7 +788,7 @@ impl Parse for Block {
// if semicolon is missing. // if semicolon is missing.
if !matches!(e, Expression(ExpressionKind::IfExpr(_), _)) { if !matches!(e, Expression(ExpressionKind::IfExpr(_), _)) {
// In theory could ignore the missing semicolon.. // In theory could ignore the missing semicolon..
stream.expected_err_nonfatal("semicolon to complete statement"); return Err(stream.expected_err("semicolon to complete statement")?);
} }
statements.push(BlockLevelStatement::Expression(e)); statements.push(BlockLevelStatement::Expression(e));
@ -937,8 +906,7 @@ pub enum DotIndexKind {
impl Parse for DotIndexKind { impl Parse for DotIndexKind {
fn parse(mut stream: TokenStream) -> Result<Self, Error> { fn parse(mut stream: TokenStream) -> Result<Self, Error> {
stream.expect(Token::Dot)?; stream.expect(Token::Dot)?;
if let Some(Token::Identifier(name)) = stream.peek() { if let Some(Token::Identifier(name)) = stream.next() {
stream.next(); // Consume identifer
if let Ok(args) = stream.parse::<FunctionArgs>() { if let Ok(args) = stream.parse::<FunctionArgs>() {
Ok(Self::FunctionCall(FunctionCallExpression { Ok(Self::FunctionCall(FunctionCallExpression {
name, name,
@ -947,18 +915,10 @@ impl Parse for DotIndexKind {
is_macro: false, is_macro: false,
})) }))
} else { } else {
Ok(Self::StructValueIndex(name, stream.get_range_prev().unwrap())) Ok(Self::StructValueIndex(name, stream.get_range().unwrap()))
} }
} else { } else {
if stream.next_is_whitespace() { return Err(stream.expected_err("struct index (number)")?);
stream.expecting_err_nonfatal("struct index");
Ok(Self::StructValueIndex(
String::new(),
stream.get_range_prev_single().unwrap(),
))
} else {
Err(stream.expecting_err("struct index")?)
}
} }
} }
} }
@ -972,7 +932,7 @@ impl Parse for BlockLevelStatement {
Some(Token::ReturnKeyword) => { Some(Token::ReturnKeyword) => {
stream.next(); stream.next();
let exp = stream.parse().ok(); let exp = stream.parse().ok();
stream.expect_nonfatal(Token::Semi).ok(); stream.expect(Token::Semi)?;
Stmt::Return(ReturnType::Hard, exp) Stmt::Return(ReturnType::Hard, exp)
} }
Some(Token::For) => { Some(Token::For) => {
@ -1037,7 +997,7 @@ impl Parse for SetStatement {
let var_ref = stream.parse()?; let var_ref = stream.parse()?;
stream.expect(Token::Equals)?; stream.expect(Token::Equals)?;
let expr = stream.parse()?; let expr = stream.parse()?;
stream.expect_nonfatal(Token::Semi).ok(); stream.expect(Token::Semi)?;
Ok(SetStatement(var_ref, expr, stream.get_range().unwrap())) Ok(SetStatement(var_ref, expr, stream.get_range().unwrap()))
} }
} }
@ -1078,7 +1038,7 @@ impl Parse for TopLevelStatement {
stream.next(); // Consume Extern stream.next(); // Consume Extern
stream.expect(Token::FnKeyword)?; stream.expect(Token::FnKeyword)?;
let extern_fn = Stmt::ExternFunction(stream.parse()?); let extern_fn = Stmt::ExternFunction(stream.parse()?);
stream.expect_nonfatal(Token::Semi).ok(); stream.expect(Token::Semi)?;
extern_fn extern_fn
} }
Some(Token::FnKeyword) | Some(Token::PubKeyword) => Stmt::FunctionDefinition(stream.parse()?), Some(Token::FnKeyword) | Some(Token::PubKeyword) => Stmt::FunctionDefinition(stream.parse()?),

View File

@ -30,14 +30,7 @@ impl ast::Module {
for stmt in &self.top_level_statements { for stmt in &self.top_level_statements {
match stmt { match stmt {
Import(import) => { Import(import) => {
imports.push(mir::Import( imports.push(mir::Import(import.0.clone(), import.1.as_meta(module_id)));
import
.0
.iter()
.map(|(s, range)| (s.clone(), range.as_meta(module_id)))
.collect(),
import.1.as_meta(module_id),
));
} }
FunctionDefinition(function_def) => functions.push(function_def.into_mir(module_id)), FunctionDefinition(function_def) => functions.push(function_def.into_mir(module_id)),
ExternFunction(signature) => { ExternFunction(signature) => {

View File

@ -1,8 +1,6 @@
//! Contains relevant code for parsing tokens received from //! Contains relevant code for parsing tokens received from
//! Lexing/Tokenizing-stage. //! Lexing/Tokenizing-stage.
use std::{cell::RefCell, rc::Rc};
use crate::{ use crate::{
ast::parse::Parse, ast::parse::Parse,
lexer::{FullToken, Token}, lexer::{FullToken, Token},
@ -14,7 +12,6 @@ use crate::{
pub struct TokenStream<'a, 'b> { pub struct TokenStream<'a, 'b> {
ref_position: Option<&'b mut usize>, ref_position: Option<&'b mut usize>,
tokens: &'a [FullToken], tokens: &'a [FullToken],
errors: Rc<RefCell<Vec<Error>>>,
pub position: usize, pub position: usize,
} }
@ -23,7 +20,6 @@ impl<'a, 'b> TokenStream<'a, 'b> {
TokenStream { TokenStream {
ref_position: None, ref_position: None,
tokens, tokens,
errors: Rc::new(RefCell::new(Vec::new())),
position: 0, position: 0,
} }
} }
@ -42,16 +38,6 @@ impl<'a, 'b> TokenStream<'a, 'b> {
)) ))
} }
/// Returns expected-error for the next token in-line. Useful in conjunction
/// with [`TokenStream::peek`]
pub fn expected_err_nonfatal<T: Into<String>>(&mut self, expected: T) {
let err = match self.expected_err(expected) {
Ok(e) => e,
Err(e) => e,
};
self.errors.borrow_mut().push(err);
}
/// Returns expected-error for the previous token that was already consumed. /// Returns expected-error for the previous token that was already consumed.
/// Useful in conjunction with [`TokenStream::next`] /// Useful in conjunction with [`TokenStream::next`]
pub fn expecting_err<T: Into<String>>(&mut self, expected: T) -> Result<Error, Error> { pub fn expecting_err<T: Into<String>>(&mut self, expected: T) -> Result<Error, Error> {
@ -64,16 +50,6 @@ impl<'a, 'b> TokenStream<'a, 'b> {
)) ))
} }
/// Returns expected-error for the previous token that was already consumed.
/// Useful in conjunction with [`TokenStream::next`]
pub fn expecting_err_nonfatal<T: Into<String>>(&mut self, expected: T) {
let err = match self.expecting_err(expected) {
Ok(e) => e,
Err(e) => e,
};
self.errors.borrow_mut().push(err);
}
pub fn expect(&mut self, token: Token) -> Result<(), Error> { pub fn expect(&mut self, token: Token) -> Result<(), Error> {
if let (pos, Some(peeked)) = self.next_token(self.position) { if let (pos, Some(peeked)) = self.next_token(self.position) {
if token == peeked.token { if token == peeked.token {
@ -87,21 +63,6 @@ impl<'a, 'b> TokenStream<'a, 'b> {
} }
} }
pub fn expect_nonfatal(&mut self, token: Token) -> Result<(), ()> {
if let (pos, Some(peeked)) = self.next_token(self.position) {
if token == peeked.token {
self.position = pos + 1;
Ok(())
} else {
self.expecting_err_nonfatal(token);
Err(())
}
} else {
self.expecting_err_nonfatal(token);
Err(())
}
}
pub fn next(&mut self) -> Option<Token> { pub fn next(&mut self) -> Option<Token> {
let (position, token) = self.next_token(self.position); let (position, token) = self.next_token(self.position);
self.position = position + 1; self.position = position + 1;
@ -186,7 +147,6 @@ impl<'a, 'b> TokenStream<'a, 'b> {
let clone = TokenStream { let clone = TokenStream {
ref_position: Some(&mut ref_pos), ref_position: Some(&mut ref_pos),
tokens: self.tokens, tokens: self.tokens,
errors: self.errors.clone(),
position, position,
}; };
@ -215,14 +175,6 @@ impl<'a, 'b> TokenStream<'a, 'b> {
}) })
} }
/// Gets range of the previous token only.
pub fn get_range_prev_single(&self) -> Option<TokenRange> {
self.ref_position.as_ref().map(|ref_pos| TokenRange {
start: self.previous_token(self.position).0,
end: self.previous_token(self.position).0,
})
}
fn previous_token(&self, mut from: usize) -> (usize, Option<&'a FullToken>) { fn previous_token(&self, mut from: usize) -> (usize, Option<&'a FullToken>) {
from -= 1; from -= 1;
while let Some(token) = self.tokens.get(from) { while let Some(token) = self.tokens.get(from) {
@ -245,22 +197,6 @@ impl<'a, 'b> TokenStream<'a, 'b> {
} }
(from, self.tokens.get(from)) (from, self.tokens.get(from))
} }
pub fn errors(&self) -> Vec<Error> {
self.errors.borrow().clone().clone()
}
pub fn next_is_whitespace(&self) -> bool {
if let Some(token) = self.tokens.get(self.position) {
if let Token::Whitespace(_) = token.token {
true
} else {
false
}
} else {
true
}
}
} }
impl Drop for TokenStream<'_, '_> { impl Drop for TokenStream<'_, '_> {

View File

@ -1,5 +1,3 @@
use std::{collections::HashMap, hash::Hash};
use reid_lib::{builder::InstructionValue, CmpPredicate, ConstValueKind, Instr, Type}; use reid_lib::{builder::InstructionValue, CmpPredicate, ConstValueKind, Instr, Type};
use crate::{ use crate::{
@ -59,15 +57,6 @@ pub fn form_intrinsics() -> Vec<FunctionDefinition> {
intrinsics intrinsics
} }
pub fn get_intrinsic_assoc_functions(ty: &TypeKind) -> HashMap<String, Option<FunctionDefinition>> {
let mut map = HashMap::new();
map.insert("length".to_owned(), get_intrinsic_assoc_func(ty, "length"));
map.insert("sizeof".to_owned(), get_intrinsic_assoc_func(ty, "sizeof"));
map.insert("malloc".to_owned(), get_intrinsic_assoc_func(ty, "malloc"));
map.insert("null".to_owned(), get_intrinsic_assoc_func(ty, "null"));
map
}
pub fn get_intrinsic_assoc_func(ty: &TypeKind, name: &str) -> Option<FunctionDefinition> { pub fn get_intrinsic_assoc_func(ty: &TypeKind, name: &str) -> Option<FunctionDefinition> {
if let TypeKind::Array(_, len) = ty { if let TypeKind::Array(_, len) = ty {
match name { match name {

View File

@ -1326,6 +1326,8 @@ impl mir::Expression {
(val.1.clone(), type_kind.clone()) (val.1.clone(), type_kind.clone())
}; };
dbg!(&ty, type_kind);
match (&ty, type_kind) { match (&ty, type_kind) {
(TypeKind::UserPtr(_), TypeKind::UserPtr(_)) => Some(StackValue( (TypeKind::UserPtr(_), TypeKind::UserPtr(_)) => Some(StackValue(
val.0.derive( val.0.derive(

View File

@ -181,10 +181,6 @@ impl ReidError {
pub fn from_kind(errors: Vec<ErrorKind>, map: ErrorModules) -> ReidError { pub fn from_kind(errors: Vec<ErrorKind>, map: ErrorModules) -> ReidError {
ReidError { map, errors } ReidError { map, errors }
} }
pub fn extend(&mut self, other: ReidError) {
self.errors.extend(other.errors);
}
} }
impl std::error::Error for ReidError {} impl std::error::Error for ReidError {}

View File

@ -73,7 +73,7 @@ use crate::{
}; };
pub mod ast; pub mod ast;
pub mod codegen; mod codegen;
pub mod error_raporting; pub mod error_raporting;
pub mod ld; pub mod ld;
pub mod mir; pub mod mir;
@ -105,7 +105,7 @@ pub fn compile_module<'map>(
map: &'map mut ErrorModules, map: &'map mut ErrorModules,
path: Option<PathBuf>, path: Option<PathBuf>,
is_main: bool, is_main: bool,
) -> Result<Result<mir::Module, (ast::Module, ReidError)>, ReidError> { ) -> Result<mir::Module, ReidError> {
let module = map.module(&module_id).cloned().unwrap(); let module = map.module(&module_id).cloned().unwrap();
let mut token_stream = TokenStream::from(&tokens); let mut token_stream = TokenStream::from(&tokens);
@ -117,8 +117,6 @@ pub fn compile_module<'map>(
statements.push(statement); statements.push(statement);
} }
let errors = token_stream.errors();
drop(token_stream); drop(token_stream);
let ast_module = ast::Module { let ast_module = ast::Module {
@ -129,34 +127,11 @@ pub fn compile_module<'map>(
is_main, is_main,
}; };
if errors.len() > 0 {
// dbg!(&ast_module);
return Ok(Err((
ast_module,
ReidError::from_kind(
errors
.into_iter()
.map(|e| {
error_raporting::ErrorKind::from(mir::pass::Error {
metadata: mir::Metadata {
source_module_id: module_id,
range: *e.get_range().unwrap_or(&Default::default()),
position: None,
},
kind: e,
})
})
.collect(),
map.clone(),
),
)));
}
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")] #[cfg(feature = "log_output")]
dbg!(&ast_module); dbg!(&ast_module);
Ok(Ok(ast_module.process(module_id))) Ok(ast_module.process(module_id))
} }
pub fn perform_all_passes<'map>( pub fn perform_all_passes<'map>(
@ -318,7 +293,7 @@ pub fn compile_and_pass<'map>(
let name = path.file_name().unwrap().to_str().unwrap().to_owned(); let name = path.file_name().unwrap().to_str().unwrap().to_owned();
let (id, tokens) = parse_module(source, name, module_map)?; let (id, tokens) = parse_module(source, name, module_map)?;
let module = compile_module(id, tokens, module_map, Some(path.clone()), true)?.map_err(|(_, e)| e)?; let module = compile_module(id, tokens, module_map, Some(path.clone()), true)?;
let mut mir_context = mir::Context::from(vec![module], path.parent().unwrap().to_owned()); let mut mir_context = mir::Context::from(vec![module], path.parent().unwrap().to_owned());

View File

@ -84,11 +84,7 @@ impl Display for GlobalKind {
impl Display for Import { impl Display for Import {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!( write!(f, "import {}", self.0.join("::"))
f,
"import {}",
self.0.iter().map(|(s, _)| s.clone()).collect::<Vec<_>>().join("::")
)
} }
} }

View File

@ -52,7 +52,7 @@ pub enum ErrorKind {
pub fn compile_std(module_map: &mut ErrorModules) -> Result<Module, ReidError> { pub fn compile_std(module_map: &mut ErrorModules) -> Result<Module, ReidError> {
let (id, tokens) = parse_module(STD_SOURCE, STD_NAME, module_map)?; let (id, tokens) = parse_module(STD_SOURCE, STD_NAME, module_map)?;
let module = compile_module(id, tokens, module_map, None, false)?.map_err(|(_, e)| e)?; let module = compile_module(id, tokens, module_map, None, false)?;
let module_id = module.module_id; let module_id = module.module_id;
let mut mir_context = super::Context::from(vec![module], Default::default()); let mut mir_context = super::Context::from(vec![module], Default::default());
@ -124,9 +124,7 @@ impl<'map> Pass for LinkerPass<'map> {
state.ok::<_, Infallible>(Err(ErrorKind::InnerModulesNotYetSupported(import.clone())), import.1); state.ok::<_, Infallible>(Err(ErrorKind::InnerModulesNotYetSupported(import.clone())), import.1);
} }
let Some((module_name, _)) = path.get(0) else { let module_name = unsafe { path.get_unchecked(0) };
continue;
};
let mut imported = if let Some(mod_id) = module_ids.get(module_name) { let mut imported = if let Some(mod_id) = module_ids.get(module_name) {
modules.get(mod_id).unwrap() modules.get(mod_id).unwrap()
@ -158,7 +156,6 @@ impl<'map> Pass for LinkerPass<'map> {
}; };
match compile_module(id, tokens, &mut self.module_map, Some(file_path), false) { match compile_module(id, tokens, &mut self.module_map, Some(file_path), false) {
Ok(res) => match res {
Ok(imported_module) => { Ok(imported_module) => {
if imported_module.is_main { if imported_module.is_main {
state.ok::<_, Infallible>( state.ok::<_, Infallible>(
@ -174,17 +171,6 @@ impl<'map> Pass for LinkerPass<'map> {
modules_to_process.push(imported.clone()); modules_to_process.push(imported.clone());
imported imported
} }
Err((_, err)) => {
state.ok::<_, Infallible>(
Err(ErrorKind::ModuleCompilationError(
module_name.clone(),
format!("{}", err),
)),
import.1,
);
continue;
}
},
Err(err) => { Err(err) => {
state.ok::<_, Infallible>( state.ok::<_, Infallible>(
Err(ErrorKind::ModuleCompilationError( Err(ErrorKind::ModuleCompilationError(
@ -199,9 +185,7 @@ impl<'map> Pass for LinkerPass<'map> {
} }
.borrow_mut(); .borrow_mut();
let Some((import_name, _)) = path.get(1) else { let import_name = unsafe { path.get_unchecked(1) };
continue;
};
let import_id = imported.module_id; let import_id = imported.module_id;
let mut imported_types = Vec::new(); let mut imported_types = Vec::new();

View File

@ -256,7 +256,7 @@ pub enum ReturnKind {
pub struct NamedVariableRef(pub TypeKind, pub String, pub Metadata); pub struct NamedVariableRef(pub TypeKind, pub String, pub Metadata);
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct Import(pub Vec<(String, Metadata)>, pub Metadata); pub struct Import(pub Vec<String>, pub Metadata);
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum ExprKind { pub enum ExprKind {

View File

@ -621,7 +621,7 @@ impl Expression {
// Update possibly resolved type // Update possibly resolved type
Ok(true_ty) Ok(true_ty)
} else { } else {
Err(ErrorKind::NoSuchField(key.0.clone())) Err(ErrorKind::NoSuchField(field_name.clone()))
} }
} else { } else {
Err(ErrorKind::TriedAccessingNonStruct(expr_ty)) Err(ErrorKind::TriedAccessingNonStruct(expr_ty))
@ -727,10 +727,7 @@ impl Expression {
type_kind.clone(), type_kind.clone(),
function_call.name.clone(), function_call.name.clone(),
)) ))
.ok_or(ErrorKind::AssocFunctionNotDefined( .ok_or(ErrorKind::FunctionNotDefined(function_call.name.clone()));
function_call.name.clone(),
type_kind.clone(),
));
if let Some(f) = state.ok(true_function, self.1) { if let Some(f) = state.ok(true_function, self.1) {
let param_len_given = function_call.parameters.len(); let param_len_given = function_call.parameters.len();

View File

@ -14,7 +14,7 @@ use crate::{
mir::{ mir::{
pass::{AssociatedFunctionKey, ScopeVariable}, pass::{AssociatedFunctionKey, ScopeVariable},
BinopDefinition, Block, CustomTypeKey, ExprKind, Expression, FunctionDefinition, FunctionDefinitionKind, BinopDefinition, Block, CustomTypeKey, ExprKind, Expression, FunctionDefinition, FunctionDefinitionKind,
IfExpression, Module, ReturnKind, StmtKind, TypeKind, VagueType, WhileStatement, IfExpression, Module, ReturnKind, StmtKind, TypeKind, WhileStatement,
}, },
util::try_all, util::try_all,
}; };
@ -546,10 +546,10 @@ impl Expression {
*type_kind = elem_ty.as_type().clone(); *type_kind = elem_ty.as_type().clone();
Ok(elem_ty) Ok(elem_ty)
} }
None => Ok(type_refs.from_type(&TypeKind::Vague(VagueType::Unknown)).unwrap()), None => Err(ErrorKind::NoSuchField(field_name.clone())),
} }
} }
_ => Ok(type_refs.from_type(&TypeKind::Vague(VagueType::Unknown)).unwrap()), _ => Err(ErrorKind::TriedAccessingNonStruct(kind)),
} }
} }
ExprKind::Struct(struct_name, fields) => { ExprKind::Struct(struct_name, fields) => {
@ -655,13 +655,9 @@ impl Expression {
.ok_or(ErrorKind::AssocFunctionNotDefined( .ok_or(ErrorKind::AssocFunctionNotDefined(
function_call.name.clone(), function_call.name.clone(),
type_kind.clone(), type_kind.clone(),
)) ))?
.clone(); .clone();
let Ok(fn_call) = fn_call else {
return Ok(type_refs.from_type(&Vague(Unknown)).unwrap());
};
// Infer param expression types and narrow them to the // Infer param expression types and narrow them to the
// expected function parameters (or Unknown types if too // expected function parameters (or Unknown types if too
// many were provided) // many were provided)

View File

@ -97,9 +97,6 @@ pub struct TypeRefs {
/// Indirect ID-references, referring to hints-vec /// Indirect ID-references, referring to hints-vec
pub(super) type_refs: RefCell<Vec<TypeIdRef>>, pub(super) type_refs: RefCell<Vec<TypeIdRef>>,
pub(super) binop_types: BinopMap, pub(super) binop_types: BinopMap,
/// Used when the real typerefs are not available, and any TypeRefs need to
/// be resolved as Unknown.
pub unknown_typerefs: bool,
} }
impl std::fmt::Display for TypeRefs { impl std::fmt::Display for TypeRefs {
@ -125,14 +122,6 @@ impl TypeRefs {
hints: Default::default(), hints: Default::default(),
type_refs: Default::default(), type_refs: Default::default(),
binop_types: binops, binop_types: binops,
unknown_typerefs: false,
}
}
pub fn unknown() -> TypeRefs {
TypeRefs {
unknown_typerefs: true,
..Default::default()
} }
} }
@ -188,12 +177,8 @@ impl TypeRefs {
} }
pub fn retrieve_typeref(&self, idx: usize) -> Option<TypeRefKind> { pub fn retrieve_typeref(&self, idx: usize) -> Option<TypeRefKind> {
if !self.unknown_typerefs {
let inner_idx = unsafe { *self.recurse_type_ref(idx).borrow() }; let inner_idx = unsafe { *self.recurse_type_ref(idx).borrow() };
self.hints.borrow().get(inner_idx).cloned() self.hints.borrow().get(inner_idx).cloned()
} else {
Some(TypeRefKind::Direct(TypeKind::Vague(VagueType::Unknown)))
}
} }
pub fn retrieve_wide_type(&self, idx: usize, seen: &mut HashSet<usize>) -> Option<TypeKind> { pub fn retrieve_wide_type(&self, idx: usize, seen: &mut HashSet<usize>) -> Option<TypeKind> {

View File

@ -16,7 +16,7 @@ fn test_compile(source: &str, name: &str) -> CompileOutput {
let mut map = Default::default(); let mut map = Default::default();
let (id, tokens) = assert_err(parse_module(source, name, &mut map)); let (id, tokens) = assert_err(parse_module(source, name, &mut map));
let module = assert_err(assert_err(compile_module(id, tokens, &mut map, None, true)).map_err(|(_, e)| e)); let module = assert_err(compile_module(id, tokens, &mut map, None, true));
let mut mir_context = mir::Context::from(vec![module], Default::default()); let mut mir_context = mir::Context::from(vec![module], Default::default());
assert_err(perform_all_passes(&mut mir_context, &mut map)); assert_err(perform_all_passes(&mut mir_context, &mut map));