Compare commits

...

21 Commits
lsp ... main

Author SHA1 Message Date
101ee2d8e5 Account for intrinsic associated functions with autocomplete 2025-08-03 01:00:02 +03:00
a6844b919b Fix array_structs.reid 2025-08-03 00:16:47 +03:00
4ea0913842 Add autocomplete for associated functions and struct fields 2025-08-03 00:13:53 +03:00
bb9f69ee53 Add autocomplete for imports 2025-08-02 23:03:11 +03:00
97a5c3a65e Optimize LSP analysis a Lot 2025-08-02 21:47:20 +03:00
8595da0c30 Make LSP use a more general analysis structure 2025-08-02 21:11:33 +03:00
dae39bc9d2 Fix fibonacci.reid 2025-08-02 20:21:57 +03:00
658450993a Fix hover types for for-loops 2025-08-02 20:10:48 +03:00
3f6d26679d Update README.md, all TODOs done 2025-08-02 19:24:31 +03:00
16082752e2 Update language server client and configs 2025-08-02 19:19:29 +03:00
8a71ce3629 Update LSP client 2025-08-02 15:02:39 +03:00
81d418c6d8 Update version number 2025-08-02 14:36:56 +03:00
8d0e3d03d5 Improve syntax highlighting 2025-08-02 03:41:08 +03:00
34e31549b3 add some syntax highlighting 2025-08-02 03:09:21 +03:00
0ba25db4c8 Start adding syntax highlighting 2025-08-02 00:14:20 +03:00
314f44304a Update README.md 2025-08-01 23:59:05 +03:00
08f7725ce7 Compile cpu_raytracer example in e2e tests, but don't run it 2025-08-01 22:46:46 +03:00
f89b26bf74 Improve LSP hover typing 2025-08-01 22:41:46 +03:00
4fada0036c Fix debug info for structs 2025-07-31 23:25:46 +03:00
4f0ee72c83 Edit example a bit, fix macro generation in function parameters 2025-07-31 22:48:16 +03:00
deed96bbfd Fix bitwise operators requiring U64 for rhs 2025-07-31 22:17:58 +03:00
37 changed files with 1582 additions and 584 deletions

20
Cargo.lock generated
View File

@ -654,7 +654,7 @@ checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a"
[[package]] [[package]]
name = "reid" name = "reid"
version = "1.0.0-beta.2" version = "1.0.0-beta.3"
dependencies = [ dependencies = [
"colored", "colored",
"reid-lib", "reid-lib",
@ -662,15 +662,7 @@ dependencies = [
] ]
[[package]] [[package]]
name = "reid-lib" name = "reid-language-server"
version = "1.0.0-beta.2"
dependencies = [
"llvm-sys",
"thiserror",
]
[[package]]
name = "reid-lsp"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"dashmap 6.1.0", "dashmap 6.1.0",
@ -680,6 +672,14 @@ dependencies = [
"tower-lsp", "tower-lsp",
] ]
[[package]]
name = "reid-lib"
version = "1.0.0-beta.3"
dependencies = [
"llvm-sys",
"thiserror",
]
[[package]] [[package]]
name = "rustc-demangle" name = "rustc-demangle"
version = "0.1.26" version = "0.1.26"

View File

@ -71,9 +71,9 @@ Currently missing big features (TODOs) are:
Big features that I want later but are not necessary: Big features that I want later but are not necessary:
- ~~User-defined binary operations~~ (DONE) - ~~User-defined binary operations~~ (DONE)
- ~~Asymmetric binary operations (e.g. string + u32)~~ (DONE) - ~~Asymmetric binary operations (e.g. string + u32)~~ (DONE)
- Error handling - ~~Error handling~~ (Not Doing It)
- Lexing & parsing of whitespace and comments as well - ~~Lexing & parsing of whitespace and comments as well~~ (DONE)
- LSP implementation - ~~LSP implementation~~ (CRUCIAL FEATURES DONE)
Smaller features: Smaller features:
- ~~Hex-numbers~~ (DONE) - ~~Hex-numbers~~ (DONE)

View File

@ -4,5 +4,6 @@ import std::print;
fn main() -> u8 { fn main() -> u8 {
let bytes = include_bytes!("./macro_easy_file.txt"); let bytes = include_bytes!("./macro_easy_file.txt");
print(String::new() + bytes.length()); print(String::new() + bytes.length());
return (bytes as *u8)[0]; print(String::new() + (include_bytes!("./macro_easy_file.txt") as *u8)[1] as u64);
return (include_bytes!("./macro_easy_file.txt") as *u8)[0];
} }

View File

@ -1,6 +1,6 @@
[package] [package]
name = "reid-lib" name = "reid-lib"
version = "1.0.0-beta.2" version = "1.0.0-beta.3"
edition = "2024" edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View File

@ -610,30 +610,9 @@ impl Builder {
Instr::PtrToInt(instr, ty) => instr.cast_to(self, &ty).map(|_| ()), Instr::PtrToInt(instr, ty) => instr.cast_to(self, &ty).map(|_| ()),
Instr::IntToPtr(instr, ty) => instr.cast_to(self, &ty).map(|_| ()), Instr::IntToPtr(instr, ty) => instr.cast_to(self, &ty).map(|_| ()),
Instr::BitCast(..) => Ok(()), Instr::BitCast(..) => Ok(()),
Instr::ShiftRightLogical(_, rhs) => { Instr::ShiftRightLogical(lhs, rhs) => match_types(&lhs, &rhs, &self).map(|_| ()),
let rhs_ty = rhs.get_type(&self)?; Instr::ShiftRightArithmetic(lhs, rhs) => match_types(&lhs, &rhs, &self).map(|_| ()),
if rhs_ty.category() == TypeCategory::UnsignedInteger { Instr::ShiftLeft(lhs, rhs) => match_types(&lhs, &rhs, &self).map(|_| ()),
Ok(())
} else {
Err(ErrorKind::Null)
}
}
Instr::ShiftRightArithmetic(_, rhs) => {
let rhs_ty = rhs.get_type(&self)?;
if rhs_ty.category() == TypeCategory::UnsignedInteger {
Ok(())
} else {
Err(ErrorKind::Null)
}
}
Instr::ShiftLeft(_, rhs) => {
let rhs_ty = rhs.get_type(&self)?;
if rhs_ty.category() == TypeCategory::UnsignedInteger {
Ok(())
} else {
Err(ErrorKind::Null)
}
}
Instr::GetGlobal(_) => Ok(()), Instr::GetGlobal(_) => Ok(()),
} }
} }

View File

@ -540,7 +540,7 @@ impl DebugTypeHolder {
field.pos.map(|p| p.line).unwrap_or(1), field.pos.map(|p| p.line).unwrap_or(1),
field.size_bits, field.size_bits,
0, 0,
1, field.offset,
field.flags.as_llvm(), field.flags.as_llvm(),
*debug.types.get(&field.ty).unwrap(), *debug.types.get(&field.ty).unwrap(),
) )

1
reid-lsp/.gitignore vendored
View File

@ -4,3 +4,4 @@ dist
package-lock.json package-lock.json
pnpm-lock.yaml pnpm-lock.yaml
tsconfig.tsbuildinfo tsconfig.tsbuildinfo
*.vsix

View File

@ -1,5 +1,5 @@
[package] [package]
name = "reid-lsp" name = "reid-language-server"
version = "0.1.0" version = "0.1.0"
edition = "2024" edition = "2024"
@ -7,5 +7,5 @@ edition = "2024"
socket = "0.0.7" socket = "0.0.7"
tokio = { version = "1.47.0", features = ["full"] } tokio = { version = "1.47.0", features = ["full"] }
tower-lsp = "0.20.0" tower-lsp = "0.20.0"
reid = { path = "../reid", version = "1.0.0-beta.2", registry="gitea-teascade", features=[] } reid = { path = "../reid", version = "1.0.0-beta.3", registry="gitea-teascade", features=[] }
dashmap = "6.1.0" dashmap = "6.1.0"

View File

@ -1,71 +1 @@
# reid-lsp README # Reid Language Server
This is the README for your extension "reid-lsp". After writing up a brief description, we recommend including the following sections.
## Features
Describe specific features of your extension including screenshots of your extension in action. Image paths are relative to this README file.
For example if there is an image subfolder under your extension project workspace:
\!\[feature X\]\(images/feature-x.png\)
> Tip: Many popular extensions utilize animations. This is an excellent way to show off your extension! We recommend short, focused animations that are easy to follow.
## Requirements
If you have any requirements or dependencies, add a section describing those and how to install and configure them.
## Extension Settings
Include if your extension adds any VS Code settings through the `contributes.configuration` extension point.
For example:
This extension contributes the following settings:
* `myExtension.enable`: Enable/disable this extension.
* `myExtension.thing`: Set to `blah` to do something.
## Known Issues
Calling out known issues can help limit users opening duplicate issues against your extension.
## Release Notes
Users appreciate release notes as you update your extension.
### 1.0.0
Initial release of ...
### 1.0.1
Fixed issue #.
### 1.1.0
Added features X, Y, and Z.
---
## Following extension guidelines
Ensure that you've read through the extensions guidelines and follow the best practices for creating your extension.
* [Extension Guidelines](https://code.visualstudio.com/api/references/extension-guidelines)
## Working with Markdown
You can author your README using Visual Studio Code. Here are some useful editor keyboard shortcuts:
* Split the editor (`Cmd+\` on macOS or `Ctrl+\` on Windows and Linux).
* Toggle preview (`Shift+Cmd+V` on macOS or `Shift+Ctrl+V` on Windows and Linux).
* Press `Ctrl+Space` (Windows, Linux, macOS) to see a list of Markdown snippets.
## For more information
* [Visual Studio Code's Markdown Support](http://code.visualstudio.com/docs/languages/markdown)
* [Markdown Syntax Reference](https://help.github.com/articles/markdown-basics/)
**Enjoy!**

View File

@ -18,16 +18,25 @@ import {
let client: LanguageClient; let client: LanguageClient;
export function activate(context: ExtensionContext) { export function activate(context: ExtensionContext) {
const traceOutputChannel = window.createOutputChannel("Reid Language Server trace"); const configuration = workspace.getConfiguration('reid-language-server');
const command = process.env.SERVER_PATH || "reid-language-server"; let server_path: string = process.env.SERVER_PATH ?? configuration.get("language-server-path") ?? 'reid-language-server';
const regex = /\$(\w+)/;
while (regex.test(server_path)) {
let envVar = regex.exec(server_path)?.[1];
const envVal = envVar ? process.env[envVar] : undefined;
if (envVar === undefined || envVal === undefined) {
console.error(`No such environment variables as ${envVar}`);
}
server_path = server_path.replaceAll(`$${envVar}`, envVal ?? '');
}
const run: Executable = { const run: Executable = {
command, command: server_path,
options: { options: {
env: { env: {
...process.env, ...process.env,
RUST_LOG: "debug", RUST_LOG: "debug",
RUST_BACKTRACE: 1,
} }
} }
}; };
@ -49,13 +58,15 @@ export function activate(context: ExtensionContext) {
// Create the language client and start the client. // Create the language client and start the client.
client = new LanguageClient( client = new LanguageClient(
'reid-lsp', 'reid-language-server',
'Reid Language Server', 'Reid Language Server',
serverOptions, serverOptions,
clientOptions clientOptions
); );
client.info(JSON.stringify(server_path));
client.info(`Loaded Reid Language Server from ${server_path}`);
client.info("hello");
workspace.onDidOpenTextDocument((e) => { workspace.onDidOpenTextDocument((e) => {
}); });

View File

@ -1,8 +1,11 @@
{ {
"name": "reid-lsp", "name": "reid-language-server",
"displayName": "Reid Language Server", "displayName": "Reid Language Server",
"description": "Language Server Extension for Reid", "description": "Language Server Extension for Reid",
"version": "0.0.1", "version": "0.1.0",
"repository": {
"url": "https://git.teascade.net/teascade"
},
"engines": { "engines": {
"vscode": "^1.102.0" "vscode": "^1.102.0"
}, },
@ -19,6 +22,9 @@
"id": "reid", "id": "reid",
"extensions": [ "extensions": [
".reid" ".reid"
],
"aliases": [
"Reid"
] ]
} }
], ],
@ -26,28 +32,25 @@
"type": "object", "type": "object",
"title": "reid-language-server", "title": "reid-language-server",
"properties": { "properties": {
"nrs-language-server.trace.server": { "reid-language-server.language-server-path": {
"type": "string", "type": "string",
"scope": "window", "scope": "window",
"enum": [ "default": "$HOME/.cargo/bin/reid-lsp",
"off", "description": "Path to the Reid Language Server executable"
"messages",
"verbose"
],
"enumDescriptions": [
"No traces",
"Error only",
"Full log"
],
"default": "off",
"description": "Traces the communication between VS Code and the language server."
}
} }
} }
}, },
"grammars": [
{
"language": "reid",
"scopeName": "source.reid",
"path": "./syntaxes/grammar.json"
}
]
},
"scripts": { "scripts": {
"vscode:prepublish": "pnpm run package", "vscode:prepublish": "pnpm run package",
"compile": "webpack", "compile": "npx js-yaml syntaxes/grammar.yaml > syntaxes/grammar.json && webpack",
"watch": "webpack --watch", "watch": "webpack --watch",
"package": "webpack --mode production --devtool hidden-source-map", "package": "webpack --mode production --devtool hidden-source-map",
"compile-tests": "tsc -p . --outDir out", "compile-tests": "tsc -p . --outDir out",
@ -65,6 +68,7 @@
"@vscode/test-cli": "^0.0.11", "@vscode/test-cli": "^0.0.11",
"@vscode/test-electron": "^2.5.2", "@vscode/test-electron": "^2.5.2",
"eslint": "^9.25.1", "eslint": "^9.25.1",
"js-yaml": "^4.1.0",
"ts-loader": "^9.5.2", "ts-loader": "^9.5.2",
"typescript": "^5.8.3", "typescript": "^5.8.3",
"webpack": "^5.99.7", "webpack": "^5.99.7",

384
reid-lsp/src/analysis.rs Normal file
View File

@ -0,0 +1,384 @@
use std::{collections::HashMap, fmt::format, path::PathBuf};
use reid::{
ast::{self, FunctionDefinition, lexer::FullToken, token_stream::TokenRange},
codegen::intrinsics::get_intrinsic_assoc_functions,
compile_module,
error_raporting::{ErrorModules, ReidError},
mir::{
self, Context, FunctionCall, FunctionParam, IfExpression, SourceModuleId, StructType, TypeKind, WhileStatement,
typecheck::typerefs::TypeRefs,
},
perform_all_passes,
};
type TokenAnalysisMap = HashMap<usize, SemanticAnalysis>;
#[derive(Debug, Clone)]
pub struct StaticAnalysis {
pub tokens: Vec<FullToken>,
pub token_analysis: TokenAnalysisMap,
pub error: Option<ReidError>,
}
#[derive(Debug, Clone)]
pub struct SemanticAnalysis {
pub ty: Option<TypeKind>,
pub autocomplete: Vec<Autocomplete>,
}
#[derive(Debug, Clone)]
pub struct Autocomplete {
pub text: String,
pub kind: AutocompleteKind,
}
#[derive(Debug, Clone)]
pub enum AutocompleteKind {
Type,
Field(TypeKind),
Function(Vec<FunctionParam>, TypeKind),
}
impl ToString for AutocompleteKind {
fn to_string(&self) -> String {
match self {
AutocompleteKind::Type => String::from("type"),
AutocompleteKind::Function(params, ret_ty) => {
let params = params
.iter()
.map(|p| format!("{}: {}", p.name, p.ty))
.collect::<Vec<_>>();
format!("({}) -> {}", params.join(", "), ret_ty)
}
AutocompleteKind::Field(type_kind) => format!("{}", type_kind),
}
}
}
pub fn analyze(
module_id: SourceModuleId,
tokens: Vec<FullToken>,
path: PathBuf,
map: &mut ErrorModules,
) -> Result<Option<StaticAnalysis>, ReidError> {
let (module, mut parse_error) = match compile_module(module_id, tokens, map, Some(path.clone()), true)? {
Ok(module) => (module, None),
Err((m, err)) => (m.process(module_id), Some(err)),
};
let module_id = module.module_id;
let mut context = Context::from(vec![module], path.parent().unwrap().to_owned());
match perform_all_passes(&mut context, map) {
Ok(_) => {}
Err(pass_error) => {
if let Some(err) = &mut parse_error {
err.extend(pass_error);
} else {
parse_error = Some(pass_error)
}
}
}
for module in context.modules.values() {
if module.module_id != module_id {
continue;
}
return Ok(Some(analyze_context(&context, &module, parse_error)));
}
return Ok(None);
}
pub fn init_types(map: &mut TokenAnalysisMap, meta: &mir::Metadata, ty: Option<TypeKind>) {
for token in meta.range.start..=meta.range.end {
map.insert(
token,
SemanticAnalysis {
ty: ty.clone(),
autocomplete: Vec::new(),
},
);
}
}
pub fn set_autocomplete(map: &mut TokenAnalysisMap, token_idx: usize, autocomplete: Vec<Autocomplete>) {
if let Some(token) = map.get_mut(&token_idx) {
token.autocomplete = autocomplete.clone();
} else {
map.insert(
token_idx,
SemanticAnalysis {
ty: None,
autocomplete: autocomplete.clone(),
},
);
}
}
pub fn analyze_context(context: &mir::Context, module: &mir::Module, error: Option<ReidError>) -> StaticAnalysis {
let mut map = HashMap::new();
for import in &module.imports {
init_types(&mut map, &import.1, None);
if let Some((module_name, _)) = import.0.get(0) {
let (import_name, import_meta) = import.0.get(1).cloned().unwrap_or((
String::new(),
mir::Metadata {
source_module_id: module.module_id,
range: reid::ast::token_stream::TokenRange {
start: import.1.range.end - 1,
end: import.1.range.end - 1,
},
position: None,
},
));
let mut autocompletes = Vec::new();
if let Some((_, module)) = context.modules.iter().find(|m| m.1.name == *module_name) {
for function in &module.functions {
if !function.is_pub {
continue;
}
if function.name.starts_with(&import_name) {
autocompletes.push(Autocomplete {
text: function.name.clone(),
kind: AutocompleteKind::Function(function.parameters.clone(), function.return_type.clone()),
});
}
}
for typedef in &module.typedefs {
if typedef.name.starts_with(&import_name) {
autocompletes.push(Autocomplete {
text: typedef.name.clone(),
kind: AutocompleteKind::Type,
});
}
}
}
set_autocomplete(&mut map, import_meta.range.end, autocompletes);
}
}
for typedef in &module.typedefs {
match &typedef.kind {
mir::TypeDefinitionKind::Struct(StructType(fields)) => {
for field in fields {
init_types(&mut map, &field.2, Some(field.1.clone()));
}
}
}
}
for binop in &module.binop_defs {
match &binop.fn_kind {
mir::FunctionDefinitionKind::Local(block, _) => analyze_block(context, module, block, &mut map),
mir::FunctionDefinitionKind::Extern(_) => {}
mir::FunctionDefinitionKind::Intrinsic(_) => {}
};
}
for (_, function) in &module.associated_functions {
for param in &function.parameters {
init_types(&mut map, &param.meta, Some(param.ty.clone()));
}
match &function.kind {
mir::FunctionDefinitionKind::Local(block, _) => analyze_block(context, module, block, &mut map),
mir::FunctionDefinitionKind::Extern(_) => {}
mir::FunctionDefinitionKind::Intrinsic(_) => {}
};
}
for function in &module.functions {
for param in &function.parameters {
init_types(&mut map, &param.meta, Some(param.ty.clone()));
}
match &function.kind {
mir::FunctionDefinitionKind::Local(block, _) => analyze_block(context, module, block, &mut map),
mir::FunctionDefinitionKind::Extern(_) => {}
mir::FunctionDefinitionKind::Intrinsic(_) => {}
};
}
StaticAnalysis {
tokens: module.tokens.clone(),
token_analysis: map,
error,
}
}
pub fn analyze_block(
context: &mir::Context,
source_module: &mir::Module,
block: &mir::Block,
map: &mut TokenAnalysisMap,
) {
for statement in &block.statements {
match &statement.0 {
mir::StmtKind::Let(named_variable_ref, _, expression) => {
init_types(
map,
&named_variable_ref.2,
expression
.return_type(&TypeRefs::unknown(), source_module.module_id)
.ok()
.map(|(_, ty)| ty),
);
// return analyze_in_expr(&expression, module_id, token_idx);
}
mir::StmtKind::Set(lhs, rhs) => {
analyze_expr(context, source_module, lhs, map);
analyze_expr(context, source_module, rhs, map);
}
mir::StmtKind::Import(_) => {}
mir::StmtKind::Expression(expression) => {
analyze_expr(context, source_module, expression, map);
}
mir::StmtKind::While(WhileStatement { condition, block, .. }) => {
analyze_expr(context, source_module, condition, map);
analyze_block(context, source_module, block, map);
}
}
}
if let Some((_, Some(return_exp))) = &block.return_expression {
analyze_expr(context, source_module, return_exp, map)
}
}
pub fn analyze_expr(
context: &mir::Context,
source_module: &mir::Module,
expr: &mir::Expression,
map: &mut TokenAnalysisMap,
) {
init_types(
map,
&expr.1,
expr.return_type(&TypeRefs::unknown(), source_module.module_id)
.ok()
.map(|(_, t)| t),
);
match &expr.0 {
mir::ExprKind::Variable(_) => {}
mir::ExprKind::Indexed(value, _, index_expr) => {
analyze_expr(context, source_module, &value, map);
analyze_expr(context, source_module, &index_expr, map);
}
mir::ExprKind::Accessed(expression, _, name, meta) => {
analyze_expr(context, source_module, &expression, map);
let accessed_type = expression.return_type(&TypeRefs::unknown(), source_module.module_id);
let mut autocompletes = Vec::new();
match accessed_type {
Ok((_, accessed_type)) => {
autocompletes.extend(
source_module
.associated_functions
.iter()
.filter(|(t, fun)| *t == accessed_type && fun.name.starts_with(name))
.map(|(_, fun)| Autocomplete {
text: fun.name.clone(),
kind: AutocompleteKind::Function(fun.parameters.clone(), fun.return_type.clone()),
}),
);
match accessed_type {
TypeKind::CustomType(ty_key) => {
let typedef = source_module
.typedefs
.iter()
.find(|t| t.name == ty_key.0 && t.source_module == ty_key.1);
if let Some(typedef) = typedef {
autocompletes.extend(match &typedef.kind {
mir::TypeDefinitionKind::Struct(StructType(fields)) => {
fields.iter().filter(|f| f.0.starts_with(name)).map(|f| Autocomplete {
text: f.0.clone(),
kind: AutocompleteKind::Field(f.1.clone()),
})
}
});
}
}
_ => {}
}
}
_ => {}
}
set_autocomplete(map, meta.range.end, autocompletes);
}
mir::ExprKind::Array(expressions) => {
for expr in expressions {
analyze_expr(context, source_module, expr, map);
}
}
mir::ExprKind::Struct(_, items) => {
for (_, expr, _) in items {
analyze_expr(context, source_module, expr, map);
}
}
mir::ExprKind::Literal(_) => {}
mir::ExprKind::BinOp(_, lhs, rhs, _) => {
analyze_expr(context, source_module, &lhs, map);
analyze_expr(context, source_module, &rhs, map);
}
mir::ExprKind::FunctionCall(FunctionCall { parameters, .. }) => {
for expr in parameters {
analyze_expr(context, source_module, expr, map);
}
}
mir::ExprKind::AssociatedFunctionCall(
ty,
FunctionCall {
parameters, name, meta, ..
},
) => {
for expr in parameters {
analyze_expr(context, source_module, expr, map);
}
let mut function_autocomplete = source_module
.associated_functions
.iter()
.filter(|(t, fun)| t == ty && fun.name.starts_with(name))
.map(|(_, fun)| Autocomplete {
text: fun.name.clone(),
kind: AutocompleteKind::Function(fun.parameters.clone(), fun.return_type.clone()),
})
.collect::<Vec<_>>();
function_autocomplete.extend(
get_intrinsic_assoc_functions(ty)
.iter()
.filter_map(|(s, f)| f.as_ref().map(|f| (s, f)))
.filter(|(_, fun)| fun.name.starts_with(name))
.map(|(_, fun)| Autocomplete {
text: fun.name.clone(),
kind: AutocompleteKind::Function(fun.parameters.clone(), fun.return_type.clone()),
})
.collect::<Vec<_>>(),
);
dbg!(ty);
dbg!(&source_module.associated_functions);
set_autocomplete(map, meta.range.end, function_autocomplete.clone());
}
mir::ExprKind::If(IfExpression(cond, then_e, else_e)) => {
analyze_expr(context, source_module, &cond, map);
analyze_expr(context, source_module, &then_e, map);
if let Some(else_e) = else_e.as_ref() {
analyze_expr(context, source_module, &else_e, map);
}
}
mir::ExprKind::Block(block) => analyze_block(context, source_module, block, map),
mir::ExprKind::Borrow(expression, _) => {
analyze_expr(context, source_module, &expression, map);
}
mir::ExprKind::Deref(expression) => {
analyze_expr(context, source_module, &expression, map);
}
mir::ExprKind::CastTo(expression, _) => {
analyze_expr(context, source_module, &expression, map);
}
mir::ExprKind::GlobalRef(_, _) => {}
}
}

View File

@ -1,29 +1,27 @@
use std::collections::HashMap;
use std::path::PathBuf; use std::path::PathBuf;
use dashmap::DashMap; use dashmap::DashMap;
use reid::ast::lexer::{FullToken, Position}; use reid::ast::lexer::{FullToken, Position};
use reid::error_raporting::{ErrorModules, ReidError}; use reid::error_raporting::{self, ErrorModules, ReidError};
use reid::mir::{ use reid::mir::{SourceModuleId, TypeKind};
self, Context, FunctionCall, FunctionDefinition, FunctionParam, IfExpression, SourceModuleId, StructType, TypeKind, use reid::parse_module;
WhileStatement,
};
use reid::{compile_module, parse_module, perform_all_passes};
use tower_lsp::lsp_types::{ use tower_lsp::lsp_types::{
self, CompletionItem, CompletionOptions, CompletionParams, CompletionResponse, Diagnostic, DiagnosticSeverity, self, CompletionItem, CompletionOptions, CompletionParams, CompletionResponse, Diagnostic, DiagnosticSeverity,
DidChangeTextDocumentParams, DidOpenTextDocumentParams, Hover, HoverContents, HoverParams, HoverProviderCapability, DidChangeTextDocumentParams, DidOpenTextDocumentParams, Hover, HoverContents, HoverParams, HoverProviderCapability,
InitializeParams, InitializeResult, InitializedParams, MarkedString, MessageType, OneOf, Range, ServerCapabilities, InitializeParams, InitializeResult, InitializedParams, MarkupContent, MarkupKind, MessageType, OneOf, Range,
TextDocumentItem, TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions, ServerCapabilities, TextDocumentItem, TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities, WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities,
}; };
use tower_lsp::{Client, LanguageServer, LspService, Server, jsonrpc}; use tower_lsp::{Client, LanguageServer, LspService, Server, jsonrpc};
use crate::analysis::{StaticAnalysis, analyze};
mod analysis;
#[derive(Debug)] #[derive(Debug)]
struct Backend { struct Backend {
client: Client, client: Client,
tokens: DashMap<String, Vec<FullToken>>, analysis: DashMap<String, StaticAnalysis>,
ast: DashMap<String, reid::ast::Module>,
types: DashMap<String, DashMap<FullToken, Option<TypeKind>>>,
} }
#[tower_lsp::async_trait] #[tower_lsp::async_trait]
@ -69,20 +67,50 @@ impl LanguageServer for Backend {
} }
async fn completion(&self, params: CompletionParams) -> jsonrpc::Result<Option<CompletionResponse>> { async fn completion(&self, params: CompletionParams) -> jsonrpc::Result<Option<CompletionResponse>> {
Ok(Some(CompletionResponse::Array(vec![ let path = PathBuf::from(params.text_document_position.text_document.uri.path());
CompletionItem::new_simple("Hello".to_string(), "Some detail".to_string()), let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();
CompletionItem::new_simple("Bye".to_string(), "More detail".to_string()), let analysis = self.analysis.get(&file_name);
]))) let position = params.text_document_position.position;
let token = if let Some(analysis) = &analysis {
analysis.tokens.iter().enumerate().find(|(_, tok)| {
tok.position.1 == position.line + 1
&& (tok.position.0 <= position.character
&& (tok.position.0 + tok.token.len() as u32) > position.character)
})
} else {
None
};
dbg!(position, token);
let list = if let Some((idx, _)) = token {
if let Some(analysis) = self.analysis.get(&file_name).unwrap().token_analysis.get(&idx) {
dbg!(&analysis);
analysis
.autocomplete
.iter()
.map(|s| CompletionItem::new_simple(s.text.to_string(), s.kind.to_string()))
.collect()
} else {
Vec::new()
}
} else {
Vec::new()
};
dbg!(&list);
Ok(Some(CompletionResponse::Array(list)))
} }
async fn hover(&self, params: HoverParams) -> jsonrpc::Result<Option<Hover>> { async fn hover(&self, params: HoverParams) -> jsonrpc::Result<Option<Hover>> {
let path = PathBuf::from(params.text_document_position_params.text_document.uri.path()); let path = PathBuf::from(params.text_document_position_params.text_document.uri.path());
let file_name = path.file_name().unwrap().to_str().unwrap().to_owned(); let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();
let tokens = self.tokens.get(&file_name); let analysis = self.analysis.get(&file_name);
let position = params.text_document_position_params.position; let position = params.text_document_position_params.position;
let token = if let Some(tokens) = &tokens { let token = if let Some(analysis) = &analysis {
tokens.iter().find(|tok| { analysis.tokens.iter().enumerate().find(|(_, tok)| {
tok.position.1 == position.line + 1 tok.position.1 == position.line + 1
&& (tok.position.0 <= position.character + 1 && (tok.position.0 <= position.character + 1
&& (tok.position.0 + tok.token.len() as u32) > position.character + 1) && (tok.position.0 + tok.token.len() as u32) > position.character + 1)
@ -91,24 +119,38 @@ impl LanguageServer for Backend {
None None
}; };
let ty = if let Some(token) = token { let (range, ty) = if let Some((idx, token)) = token {
if let Some(possible_ty) = self.types.get(&file_name).unwrap().get(token) { if let Some(analysis) = self.analysis.get(&file_name).unwrap().token_analysis.get(&idx) {
if let Some(ty) = possible_ty.clone() { let start = token.position;
format!("{}", ty) let end = token.position.add(token.token.len() as u32);
let range = Range {
start: lsp_types::Position {
line: (start.1 as i32 - 1).max(0) as u32,
character: (start.0 as i32 - 1).max(0) as u32,
},
end: lsp_types::Position {
line: (end.1 as i32 - 1).max(0) as u32,
character: (end.0 as i32 - 1).max(0) as u32,
},
};
if let Some(ty) = analysis.ty.clone() {
(Some(range), format!("{}", ty))
} else { } else {
String::from("no type") (Some(range), String::from("None type"))
} }
} else { } else {
String::from("no token") (None, String::from("no type"))
} }
} else { } else {
String::from("no token") (None, String::from("no token"))
}; };
Ok(Some(Hover { let contents = HoverContents::Markup(MarkupContent {
contents: HoverContents::Scalar(MarkedString::String(format!("{}", ty))), kind: MarkupKind::Markdown,
range: None, value: format!("`{ty}`"),
})) });
Ok(Some(Hover { contents, range }))
} }
async fn did_open(&self, params: DidOpenTextDocumentParams) { async fn did_open(&self, params: DidOpenTextDocumentParams) {
@ -140,31 +182,52 @@ impl Backend {
let mut map = Default::default(); let mut map = Default::default();
let parse_res = parse(&params.text, path.clone(), &mut map); let parse_res = parse(&params.text, path.clone(), &mut map);
let (tokens, result) = match parse_res { let (tokens, result) = match parse_res {
Ok((module_id, tokens)) => (tokens.clone(), compile(module_id, tokens, path, &mut map)), Ok((module_id, tokens)) => (tokens.clone(), analyze(module_id, tokens, path, &mut map)),
Err(e) => (Vec::new(), Err(e)), Err(e) => (Vec::new(), Err(e)),
}; };
let mut diagnostics = Vec::new(); let mut diagnostics = Vec::new();
match result { match result {
Ok(Some(result)) => { Ok(Some(mut analysis)) => {
self.tokens.insert(file_name.clone(), result.tokens); if let Some(reid_error) = &mut analysis.error {
self.types.insert(file_name.clone(), result.types); self.client
.log_message(
MessageType::INFO,
format!("Successfully compiled despite parsing errors!"),
)
.await;
reid_error.errors.dedup();
for error in &reid_error.errors {
diagnostics.push(reid_error_into_diagnostic(error, &tokens));
self.client.log_message(MessageType::INFO, format!("{}", error)).await;
}
}
self.analysis.insert(file_name.clone(), analysis);
} }
Ok(_) => {} Ok(_) => {}
Err(mut reid_error) => { Err(mut reid_error) => {
reid_error.errors.dedup(); reid_error.errors.dedup();
for error in reid_error.errors { for error in &reid_error.errors {
diagnostics.push(reid_error_into_diagnostic(error, &tokens));
self.client.log_message(MessageType::INFO, format!("{}", error)).await;
}
}
}
self.client
.publish_diagnostics(params.uri.clone(), diagnostics, Some(params.version))
.await;
}
}
fn reid_error_into_diagnostic(error: &error_raporting::ErrorKind, tokens: &Vec<FullToken>) -> Diagnostic {
let meta = error.get_meta(); let meta = error.get_meta();
let positions = meta let positions = meta
.range .range
.into_position(&tokens) .into_position(&tokens)
.unwrap_or((Position(0, 0), Position(0, 0))); .unwrap_or((Position(0, 0), Position(0, 0)));
self.client.log_message(MessageType::INFO, format!("{:?}", &meta)).await;
self.client
.log_message(MessageType::INFO, format!("{:?}", &positions))
.await;
diagnostics.push(Diagnostic { Diagnostic {
range: Range { range: Range {
start: lsp_types::Position { start: lsp_types::Position {
line: ((positions.0.1 as i32) - 1).max(0) as u32, line: ((positions.0.1 as i32) - 1).max(0) as u32,
@ -183,22 +246,8 @@ impl Backend {
related_information: None, related_information: None,
tags: None, tags: None,
data: None, data: None,
});
self.client.log_message(MessageType::INFO, format!("{}", error)).await;
} }
} }
}
self.client
.publish_diagnostics(params.uri.clone(), diagnostics, Some(params.version))
.await;
}
}
struct CompileResult {
tokens: Vec<FullToken>,
types: DashMap<FullToken, Option<TypeKind>>,
}
fn parse(source: &str, path: PathBuf, map: &mut ErrorModules) -> Result<(SourceModuleId, Vec<FullToken>), ReidError> { fn parse(source: &str, path: PathBuf, map: &mut ErrorModules) -> Result<(SourceModuleId, Vec<FullToken>), ReidError> {
let file_name = path.file_name().unwrap().to_str().unwrap().to_owned(); let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();
@ -206,36 +255,6 @@ fn parse(source: &str, path: PathBuf, map: &mut ErrorModules) -> Result<(SourceM
Ok(parse_module(source, file_name.clone(), map)?) Ok(parse_module(source, file_name.clone(), map)?)
} }
fn compile(
module_id: SourceModuleId,
tokens: Vec<FullToken>,
path: PathBuf,
map: &mut ErrorModules,
) -> Result<Option<CompileResult>, ReidError> {
let token_types = DashMap::new();
let module = compile_module(module_id, tokens, map, Some(path.clone()), true)?;
let module_id = module.module_id;
let mut context = Context::from(vec![module], path.parent().unwrap().to_owned());
perform_all_passes(&mut context, map)?;
for module in context.modules.into_values() {
if module.module_id != module_id {
continue;
}
for (idx, token) in module.tokens.iter().enumerate() {
token_types.insert(token.clone(), find_type_in_context(&module, idx));
}
return Ok(Some(CompileResult {
tokens: module.tokens,
types: token_types,
}));
}
return Ok(None);
}
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
let stdin = tokio::io::stdin(); let stdin = tokio::io::stdin();
@ -243,200 +262,7 @@ async fn main() {
let (service, socket) = LspService::new(|client| Backend { let (service, socket) = LspService::new(|client| Backend {
client, client,
ast: DashMap::new(), analysis: DashMap::new(),
tokens: DashMap::new(),
types: DashMap::new(),
}); });
Server::new(stdin, stdout, socket).serve(service).await; Server::new(stdin, stdout, socket).serve(service).await;
} }
pub fn find_type_in_context(module: &mir::Module, token_idx: usize) -> Option<TypeKind> {
for import in &module.imports {
if import.1.contains(token_idx) {
return None;
}
}
for typedef in &module.typedefs {
if !typedef.meta.contains(token_idx) {
continue;
}
match &typedef.kind {
mir::TypeDefinitionKind::Struct(StructType(fields)) => {
for field in fields {
if field.2.contains(token_idx) {
return Some(field.1.clone());
}
}
}
}
}
for function in &module.functions {
if !(function.signature() + function.block_meta()).contains(token_idx) {
continue;
}
for param in &function.parameters {
if param.meta.contains(token_idx) {
return Some(param.ty.clone());
}
}
return match &function.kind {
mir::FunctionDefinitionKind::Local(block, _) => find_type_in_block(&block, module.module_id, token_idx),
mir::FunctionDefinitionKind::Extern(_) => None,
mir::FunctionDefinitionKind::Intrinsic(_) => None,
};
}
None
}
pub fn find_type_in_block(block: &mir::Block, module_id: SourceModuleId, token_idx: usize) -> Option<TypeKind> {
if !block.meta.contains(token_idx) {
return None;
}
for statement in &block.statements {
if !statement.1.contains(token_idx) {
continue;
}
match &statement.0 {
mir::StmtKind::Let(named_variable_ref, _, expression) => {
if named_variable_ref.2.contains(token_idx) {
return expression
.return_type(&Default::default(), module_id)
.ok()
.map(|(_, ty)| ty);
} else {
return find_type_in_expr(&expression, module_id, token_idx);
}
}
mir::StmtKind::Set(lhs, rhs) => {
return find_type_in_expr(lhs, module_id, token_idx).or(find_type_in_expr(rhs, module_id, token_idx));
}
mir::StmtKind::Import(_) => {}
mir::StmtKind::Expression(expression) => return find_type_in_expr(expression, module_id, token_idx),
mir::StmtKind::While(WhileStatement { condition, block, .. }) => {
return find_type_in_expr(condition, module_id, token_idx)
.or(find_type_in_block(block, module_id, token_idx));
}
}
}
if let Some((_, Some(return_exp))) = &block.return_expression {
if let Some(ty) = find_type_in_expr(return_exp, module_id, token_idx) {
return Some(ty);
}
}
None
}
pub fn find_type_in_expr(expr: &mir::Expression, module_id: SourceModuleId, token_idx: usize) -> Option<TypeKind> {
if !expr.1.contains(token_idx) {
return None;
}
match &expr.0 {
mir::ExprKind::Variable(named_variable_ref) => Some(named_variable_ref.0.clone()),
mir::ExprKind::Indexed(value, type_kind, index_expr) => Some(
find_type_in_expr(&value, module_id, token_idx)
.or(find_type_in_expr(&index_expr, module_id, token_idx))
.unwrap_or(type_kind.clone()),
),
mir::ExprKind::Accessed(expression, type_kind, _, meta) => {
if meta.contains(token_idx) {
Some(type_kind.clone())
} else {
find_type_in_expr(&expression, module_id, token_idx)
}
}
mir::ExprKind::Array(expressions) => {
for expr in expressions {
if let Some(ty) = find_type_in_expr(expr, module_id, token_idx) {
return Some(ty);
}
}
None
}
mir::ExprKind::Struct(name, items) => {
for (_, expr, meta) in items {
if meta.contains(token_idx) {
return expr.return_type(&Default::default(), module_id).map(|(_, t)| t).ok();
}
if let Some(ty) = find_type_in_expr(expr, module_id, token_idx) {
return Some(ty);
}
}
Some(TypeKind::CustomType(mir::CustomTypeKey(name.clone(), module_id)))
}
mir::ExprKind::Literal(literal) => Some(literal.as_type()),
mir::ExprKind::BinOp(binary_operator, lhs, rhs, type_kind) => {
if let Some(ty) = find_type_in_expr(lhs, module_id, token_idx) {
return Some(ty);
}
if let Some(ty) = find_type_in_expr(rhs, module_id, token_idx) {
return Some(ty);
}
Some(type_kind.clone())
}
mir::ExprKind::FunctionCall(FunctionCall {
return_type,
parameters,
..
}) => {
for expr in parameters {
if let Some(ty) = find_type_in_expr(expr, module_id, token_idx) {
return Some(ty);
}
}
Some(return_type.clone())
}
mir::ExprKind::AssociatedFunctionCall(
_,
FunctionCall {
return_type,
parameters,
..
},
) => {
for expr in parameters {
if let Some(ty) = find_type_in_expr(expr, module_id, token_idx) {
return Some(ty);
}
}
Some(return_type.clone())
}
mir::ExprKind::If(IfExpression(cond, then_e, else_e)) => find_type_in_expr(&cond, module_id, token_idx)
.or(find_type_in_expr(&then_e, module_id, token_idx))
.or(else_e.clone().and_then(|e| find_type_in_expr(&e, module_id, token_idx))),
mir::ExprKind::Block(block) => find_type_in_block(block, module_id, token_idx),
mir::ExprKind::Borrow(expression, mutable) => {
if let Some(ty) = find_type_in_expr(&expression, module_id, token_idx) {
return Some(ty);
}
if let Ok(inner) = expression.return_type(&Default::default(), module_id).map(|(_, ty)| ty) {
Some(TypeKind::Borrow(Box::new(inner.clone()), *mutable))
} else {
None
}
}
mir::ExprKind::Deref(expression) => {
if let Some(ty) = find_type_in_expr(&expression, module_id, token_idx) {
return Some(ty);
}
if let Ok(TypeKind::Borrow(inner, _)) =
expression.return_type(&Default::default(), module_id).map(|(_, ty)| ty)
{
Some(*inner.clone())
} else {
None
}
}
mir::ExprKind::CastTo(expression, type_kind) => {
Some(find_type_in_expr(&expression, module_id, token_idx).unwrap_or(type_kind.clone()))
}
mir::ExprKind::GlobalRef(_, type_kind) => Some(type_kind.clone()),
}
}

View File

@ -0,0 +1,395 @@
{
"scopeName": "source.reid",
"patterns": [
{
"include": "#import"
},
{
"include": "#expression"
}
],
"repository": {
"import": {
"begin": "(import)\\s*",
"end": ";",
"beginCaptures": {
"1": {
"name": "keyword"
}
},
"endCaptures": {
"0": {
"name": "punctuation.semi.reid"
}
},
"patterns": [
{
"include": "#identifier"
},
{
"include": "#punctiation"
}
]
},
"punctuation": {
"patterns": [
{
"match": "::",
"name": "keyword.operator.namespace.reid"
},
{
"match": ";",
"name": "punctuation.semi.reid"
},
{
"match": ".",
"name": "punctuation.dot.reid"
},
{
"match": ",",
"name": "punctuation.comma.reid"
}
]
},
"expression": {
"patterns": [
{
"include": "#comment"
},
{
"include": "#fn-signature"
},
{
"include": "#common-type"
},
{
"include": "#binop-impl"
},
{
"include": "#type-impl"
},
{
"include": "#struct-definition"
},
{
"include": "#block"
},
{
"include": "#binop"
},
{
"include": "#namespace"
},
{
"include": "#cast"
},
{
"include": "#function-call"
},
{
"include": "#parenthesis"
},
{
"include": "#array"
},
{
"include": "#keywords"
},
{
"include": "#struct-expression"
},
{
"include": "#number-literal"
},
{
"include": "#string-literal"
},
{
"include": "#identifier"
},
{
"include": "#punctuation"
}
]
},
"comment": {
"match": "\\/\\/(.|\\/)*",
"name": "comment.line.double-slash.reid"
},
"fn-signature": {
"begin": "(fn)\\s*(\\w+)\\(",
"beginCaptures": {
"1": {
"name": "keyword.fn.reid"
},
"2": {
"name": "entity.name.function.reid"
}
},
"end": "\\)",
"patterns": [
{
"include": "#annotated-identifier"
},
{
"include": "#keywords"
},
{
"include": "#binop"
}
],
"endCaptures": {
"2": {
"name": "entity.name.type.reid"
}
}
},
"type-impl": {
"begin": "(impl)\\s* (\\w+)\\s* \\{\n",
"end": "\\}",
"captures": {
"1": {
"name": "keyword.impl.reid"
},
"2": {
"name": "entity.name.type"
}
},
"patterns": [
{
"include": "#expression"
}
]
},
"binop-impl": {
"begin": "(impl)\\s+(binop)\\s+\\(((.*)\\s*:\\s*(.*))\\)(.*)\\(((.*)\\s*:\\s*(.*))\\)\\s*->\\s*(\\w+)\\s*\\{",
"end": "\\}",
"beginCaptures": {
"1": {
"name": "keyword.impl.reid"
},
"2": {
"name": "keyword.impl.reid"
},
"4": {
"name": "variable.parameter.binop.reid"
},
"5": {
"name": "entity.name.type.parameter.binop.reid"
},
"6": {
"name": "keyword.operator.math.reid"
},
"8": {
"name": "variable.parameter.binop.reid"
},
"9": {
"name": "entity.name.type.parameter.binop.reid"
},
"10": {
"name": "entity.name.type.return.binop.reid"
}
},
"patterns": [
{
"include": "#expression"
}
]
},
"struct-definition": {
"begin": "(struct)\\s*(\\w+)\\s*\\{",
"end": "\\}",
"captures": {
"1": {
"name": "keyword.struct.reid"
},
"2": {
"name": "entity.name.type"
}
},
"patterns": [
{
"include": "#annotated-identifier"
}
]
},
"struct-expression": {
"begin": "([A-Z]\\w*)\\s*\\{",
"end": "\\}",
"captures": {
"1": {
"name": "entity.name.type.struct.reid"
}
},
"patterns": [
{
"include": "#expression"
}
]
},
"number-literal": {
"patterns": [
{
"match": "0x[0-9a-fA-F]+(\\.[0-9a-fA-F]+)?",
"name": "constant.hexadecimal"
},
{
"match": "0o[0-7]+(\\.[0-7]+)?",
"name": "constant.octal"
},
{
"match": "0b[01]+(\\.[01]+)?",
"name": "constant.binary"
},
{
"match": "[0-9]+(\\.[0-9]+)?",
"name": "constant.numeric"
}
]
},
"string-literal": {
"begin": "\"",
"end": "\"",
"name": "string.quoted.double",
"patterns": [
{
"match": "\\.",
"name": "constant.character.escape"
}
]
},
"block": {
"begin": "\\{",
"end": "\\}",
"patterns": [
{
"include": "#expression"
}
]
},
"namespace": {
"match": "(\\w+)(\\:\\:)",
"captures": {
"1": {
"name": "entity.name.function.reid"
},
"2": {
"name": "keyword.operator.namespace.reid"
}
}
},
"cast": {
"match": "(as)\\s+(\\w+)",
"captures": {
"1": {
"name": "keyword.cast.reid"
},
"2": {
"name": "entity.name.type.reid"
}
}
},
"function-call": {
"begin": "(\\w+)?\\(",
"end": "\\)",
"beginCaptures": {
"1": {
"name": "entity.name.function.reid"
}
},
"patterns": [
{
"include": "#expression"
}
]
},
"parenthesis": {
"begin": "\\(",
"end": "\\)",
"beginCaptures": {
"0": {
"name": "keyword.operator.parenthesis.reid"
}
},
"endCaptures": {
"0": {
"name": "keyword.operator.parenthesis.reid"
}
},
"patterns": [
{
"include": "#expression"
}
]
},
"annotated-identifier": {
"begin": "(\\w+)\\:",
"end": ",",
"beginCaptures": {
"1": {
"name": "variable.language.reid"
}
},
"patterns": [
{
"include": "#expression"
}
]
},
"identifier": {
"patterns": [
{
"match": "[A-Z]\\w*",
"name": "entity.name.type.reid"
},
{
"match": "\\w+",
"name": "variable.language.reid"
}
]
},
"keywords": {
"patterns": [
{
"match": "let|mut|pub|extern",
"name": "storage.type.reid"
},
{
"match": "if|return",
"name": "keyword.control"
},
{
"match": "self",
"name": "variable.language.self.reid"
}
]
},
"binop": {
"match": "\\<\\=|\\>\\=|\\=\\=|\\<|\\>|\\*|\\+|\\-|\\^|\\&\\&|\\&",
"name": "keyword.operator.math.reid"
},
"array": {
"begin": "\\[",
"end": "\\]",
"beginCaptures": {
"0": {
"name": "entity.name.type.array.reid"
}
},
"endCaptures": {
"0": {
"name": "entity.name.type.array.reid"
}
},
"patterns": [
{
"include": "#expression"
}
]
},
"common-type": {
"match": "u8|u16|u32|u64|u128|i8|i16|i32|i64|i128|bool",
"name": "entity.name.type.common.reid"
}
}
}

View File

@ -0,0 +1,232 @@
scopeName: source.reid
patterns:
- include: "#import"
- include: "#expression"
repository:
# function-definition:
# begin: "(fn)\\s*(\\w+)\\(((\\w+)\\s*\\:\\s*(\\w+),?)*\\)\\s*->\\s*(\\w+)\\s*\\{"
# end: "\\}"
# beginCaptures:
# 1:
# name: "keyword.other"
# 2:
# name: "entity.name.function"
# 4:
# name: "entity.name.parameter"
# 5:
# name: "entity.name.type"
# 6:
# name: "entity.name.type"
# patterns:
# - include: "#type"
# - include: "#expression"
import:
begin: "(import)\\s*"
end: ";"
beginCaptures:
1:
name: keyword
endCaptures:
0:
name: punctuation.semi.reid
patterns:
- include: "#identifier"
- include: "#punctiation"
punctuation:
patterns:
- match: "::"
name: keyword.operator.namespace.reid
- match: ";"
name: punctuation.semi.reid
- match: "."
name: punctuation.dot.reid
- match: ","
name: punctuation.comma.reid
expression:
patterns:
- include: "#comment"
- include: "#fn-signature"
- include: "#common-type"
- include: "#binop-impl"
- include: "#type-impl"
- include: "#struct-definition"
- include: "#block"
- include: "#binop"
- include: "#namespace"
- include: "#cast"
- include: "#function-call"
- include: "#parenthesis"
- include: "#array"
- include: "#keywords"
- include: "#struct-expression"
- include: "#number-literal"
- include: "#string-literal"
- include: "#identifier"
- include: "#punctuation"
comment:
match: "\\/\\/(.|\\/)*"
name: comment.line.double-slash.reid
fn-signature:
begin: "(fn)\\s*(\\w+)\\("
beginCaptures:
1:
name: keyword.fn.reid
2:
name: entity.name.function.reid
end: "\\)"
patterns:
- include: "#annotated-identifier"
- include: "#keywords"
- include: "#binop"
endCaptures:
2:
name: entity.name.type.reid
type-impl:
begin: >
(impl)\s*
(\w+)\s*
\{
end: "\\}"
captures:
1:
name: keyword.impl.reid
2:
name: entity.name.type
patterns:
- include: "#expression"
binop-impl:
begin: "(impl)\\s+(binop)\\s+\\(((.*)\\s*:\\s*(.*))\\)(.*)\\(((.*)\\s*:\\s*(.*))\\)\\s*->\\s*(\\w+)\\s*\\{"
end: "\\}"
beginCaptures:
1:
name: keyword.impl.reid
2:
name: keyword.impl.reid
4:
name: variable.parameter.binop.reid
5:
name: entity.name.type.parameter.binop.reid
6:
name: keyword.operator.math.reid
8:
name: variable.parameter.binop.reid
9:
name: entity.name.type.parameter.binop.reid
10:
name: entity.name.type.return.binop.reid
patterns:
- include: "#expression"
struct-definition:
begin: "(struct)\\s*(\\w+)\\s*\\{"
end: "\\}"
captures:
1:
name: keyword.struct.reid
2:
name: entity.name.type
patterns:
- include: "#annotated-identifier"
struct-expression:
begin: "([A-Z]\\w*)\\s*\\{"
end: "\\}"
captures:
1:
name: entity.name.type.struct.reid
patterns:
- include: "#expression"
number-literal:
patterns:
- match: "0x[0-9a-fA-F]+(\\.[0-9a-fA-F]+)?"
name: "constant.hexadecimal"
- match: "0o[0-7]+(\\.[0-7]+)?"
name: "constant.octal"
- match: "0b[01]+(\\.[01]+)?"
name: "constant.binary"
- match: "[0-9]+(\\.[0-9]+)?"
name: "constant.numeric"
string-literal:
begin: '"'
end: '"'
name: string.quoted.double
patterns:
- match: "\\."
name: constant.character.escape
block:
begin: "\\{"
end: "\\}"
patterns:
- include: "#expression"
namespace:
match: "(\\w+)(\\:\\:)"
captures:
1:
name: entity.name.function.reid
2:
name: keyword.operator.namespace.reid
cast:
match: "(as)\\s+(\\w+)"
captures:
1:
name: keyword.cast.reid
2:
name: entity.name.type.reid
function-call:
begin: "(\\w+)?\\("
end: "\\)"
beginCaptures:
1:
name: entity.name.function.reid
patterns:
- include: "#expression"
parenthesis:
begin: "\\("
end: "\\)"
beginCaptures:
0:
name: keyword.operator.parenthesis.reid
endCaptures:
0:
name: keyword.operator.parenthesis.reid
patterns:
- include: "#expression"
annotated-identifier:
begin: "(\\w+)\\:"
end: ","
beginCaptures:
1:
name: variable.language.reid
patterns:
- include: "#expression"
identifier:
patterns:
- match: "[A-Z]\\w*"
name: entity.name.type.reid
- match: "\\w+"
name: variable.language.reid
keywords:
patterns:
- match: "let|mut|pub|extern"
name: "storage.type.reid"
- match: "if|return"
name: "keyword.control"
- match: "self"
name: "variable.language.self.reid"
binop:
match: "\\<\\=|\\>\\=|\\=\\=|\\<|\\>|\\*|\\+|\\-|\\^|\\&\\&|\\&"
name: keyword.operator.math.reid
array:
begin: "\\["
end: "\\]"
beginCaptures:
0:
name: entity.name.type.array.reid
endCaptures:
0:
name: entity.name.type.array.reid
patterns:
- include: "#expression"
common-type:
match: "u8|u16|u32|u64|u128|i8|i16|i32|i64|i128|bool"
name: entity.name.type.common.reid

View File

@ -1,6 +1,6 @@
[package] [package]
name = "reid" name = "reid"
version = "1.0.0-beta.2" version = "1.0.0-beta.3"
edition = "2021" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -16,6 +16,6 @@ context_debug = []
[dependencies] [dependencies]
## Make it easier to generate errors ## Make it easier to generate errors
thiserror = "1.0.44" thiserror = "1.0.44"
reid-lib = { path = "../reid-llvm-lib", version = "1.0.0-beta.1", registry="gitea-teascade" } reid-lib = { path = "../reid-llvm-lib", version = "1.0.0-beta.3", registry="gitea-teascade" }
colored = {version = "3.0.0", optional = true} colored = {version = "3.0.0", optional = true}

View File

@ -184,7 +184,7 @@ pub struct LetStatement {
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct ImportStatement(pub Vec<String>, pub TokenRange); pub struct ImportStatement(pub Vec<(String, TokenRange)>, pub TokenRange);
#[derive(Debug)] #[derive(Debug)]
pub struct FunctionDefinition(pub FunctionSignature, pub bool, pub Block, pub TokenRange); pub struct FunctionDefinition(pub FunctionSignature, pub bool, pub Block, pub TokenRange);

View File

@ -175,7 +175,36 @@ impl Parse for AssociatedFunctionCall {
let ty = stream.parse()?; let ty = stream.parse()?;
stream.expect(Token::Colon)?; stream.expect(Token::Colon)?;
stream.expect(Token::Colon)?; stream.expect(Token::Colon)?;
Ok(AssociatedFunctionCall(ty, stream.parse()?)) match stream.parse() {
Ok(fn_call) => Ok(AssociatedFunctionCall(ty, fn_call)),
_ => {
if let Some(Token::Identifier(fn_name)) = stream.peek() {
stream.next();
stream.expected_err_nonfatal("associated function call");
Ok(AssociatedFunctionCall(
ty,
FunctionCallExpression {
name: fn_name,
params: Vec::new(),
range: stream.get_range_prev_single().unwrap(),
is_macro: false,
},
))
} else {
stream.expected_err_nonfatal("associated function name");
Ok(AssociatedFunctionCall(
ty,
FunctionCallExpression {
name: String::new(),
params: Vec::new(),
range: stream.get_range_prev_single().unwrap(),
is_macro: false,
},
))
}
}
}
} }
} }
@ -610,7 +639,7 @@ impl Parse for LetStatement {
stream.expect(Token::Equals)?; stream.expect(Token::Equals)?;
let expression = stream.parse()?; let expression = stream.parse()?;
stream.expect(Token::Semi)?; stream.expect_nonfatal(Token::Semi).ok();
Ok(LetStatement { Ok(LetStatement {
name: variable, name: variable,
ty, ty,
@ -631,19 +660,21 @@ impl Parse for ImportStatement {
let mut import_list = Vec::new(); let mut import_list = Vec::new();
if let Some(Token::Identifier(name)) = stream.next() { if let Some(Token::Identifier(name)) = stream.next() {
import_list.push(name); import_list.push((name, stream.get_range_prev_single().unwrap()));
while stream.expect(Token::Colon).is_ok() && stream.expect(Token::Colon).is_ok() { while stream.expect(Token::Colon).is_ok() && stream.expect(Token::Colon).is_ok() {
if let Some(Token::Identifier(name)) = stream.next() { if let Some(Token::Identifier(name)) = stream.peek() {
import_list.push(name); stream.next(); // Consume identifier
import_list.push((name, stream.get_range_prev_single().unwrap()));
} else { } else {
Err(stream.expected_err("identifier")?)? stream.expected_err_nonfatal("identifier");
break;
} }
} }
} else { } else {
Err(stream.expected_err("identifier")?)? Err(stream.expected_err("identifier")?)?
} }
stream.expect(Token::Semi)?; stream.expect_nonfatal(Token::Semi).ok();
Ok(ImportStatement(import_list, stream.get_range().unwrap())) Ok(ImportStatement(import_list, stream.get_range().unwrap()))
} }
@ -788,7 +819,7 @@ impl Parse for Block {
// if semicolon is missing. // if semicolon is missing.
if !matches!(e, Expression(ExpressionKind::IfExpr(_), _)) { if !matches!(e, Expression(ExpressionKind::IfExpr(_), _)) {
// In theory could ignore the missing semicolon.. // In theory could ignore the missing semicolon..
return Err(stream.expected_err("semicolon to complete statement")?); stream.expected_err_nonfatal("semicolon to complete statement");
} }
statements.push(BlockLevelStatement::Expression(e)); statements.push(BlockLevelStatement::Expression(e));
@ -906,7 +937,8 @@ pub enum DotIndexKind {
impl Parse for DotIndexKind { impl Parse for DotIndexKind {
fn parse(mut stream: TokenStream) -> Result<Self, Error> { fn parse(mut stream: TokenStream) -> Result<Self, Error> {
stream.expect(Token::Dot)?; stream.expect(Token::Dot)?;
if let Some(Token::Identifier(name)) = stream.next() { if let Some(Token::Identifier(name)) = stream.peek() {
stream.next(); // Consume identifer
if let Ok(args) = stream.parse::<FunctionArgs>() { if let Ok(args) = stream.parse::<FunctionArgs>() {
Ok(Self::FunctionCall(FunctionCallExpression { Ok(Self::FunctionCall(FunctionCallExpression {
name, name,
@ -915,10 +947,18 @@ impl Parse for DotIndexKind {
is_macro: false, is_macro: false,
})) }))
} else { } else {
Ok(Self::StructValueIndex(name, stream.get_range().unwrap())) Ok(Self::StructValueIndex(name, stream.get_range_prev().unwrap()))
} }
} else { } else {
return Err(stream.expected_err("struct index (number)")?); if stream.next_is_whitespace() {
stream.expecting_err_nonfatal("struct index");
Ok(Self::StructValueIndex(
String::new(),
stream.get_range_prev_single().unwrap(),
))
} else {
Err(stream.expecting_err("struct index")?)
}
} }
} }
} }
@ -932,7 +972,7 @@ impl Parse for BlockLevelStatement {
Some(Token::ReturnKeyword) => { Some(Token::ReturnKeyword) => {
stream.next(); stream.next();
let exp = stream.parse().ok(); let exp = stream.parse().ok();
stream.expect(Token::Semi)?; stream.expect_nonfatal(Token::Semi).ok();
Stmt::Return(ReturnType::Hard, exp) Stmt::Return(ReturnType::Hard, exp)
} }
Some(Token::For) => { Some(Token::For) => {
@ -997,7 +1037,7 @@ impl Parse for SetStatement {
let var_ref = stream.parse()?; let var_ref = stream.parse()?;
stream.expect(Token::Equals)?; stream.expect(Token::Equals)?;
let expr = stream.parse()?; let expr = stream.parse()?;
stream.expect(Token::Semi)?; stream.expect_nonfatal(Token::Semi).ok();
Ok(SetStatement(var_ref, expr, stream.get_range().unwrap())) Ok(SetStatement(var_ref, expr, stream.get_range().unwrap()))
} }
} }
@ -1038,7 +1078,7 @@ impl Parse for TopLevelStatement {
stream.next(); // Consume Extern stream.next(); // Consume Extern
stream.expect(Token::FnKeyword)?; stream.expect(Token::FnKeyword)?;
let extern_fn = Stmt::ExternFunction(stream.parse()?); let extern_fn = Stmt::ExternFunction(stream.parse()?);
stream.expect(Token::Semi)?; stream.expect_nonfatal(Token::Semi).ok();
extern_fn extern_fn
} }
Some(Token::FnKeyword) | Some(Token::PubKeyword) => Stmt::FunctionDefinition(stream.parse()?), Some(Token::FnKeyword) | Some(Token::PubKeyword) => Stmt::FunctionDefinition(stream.parse()?),

View File

@ -30,7 +30,14 @@ impl ast::Module {
for stmt in &self.top_level_statements { for stmt in &self.top_level_statements {
match stmt { match stmt {
Import(import) => { Import(import) => {
imports.push(mir::Import(import.0.clone(), import.1.as_meta(module_id))); imports.push(mir::Import(
import
.0
.iter()
.map(|(s, range)| (s.clone(), range.as_meta(module_id)))
.collect(),
import.1.as_meta(module_id),
));
} }
FunctionDefinition(function_def) => functions.push(function_def.into_mir(module_id)), FunctionDefinition(function_def) => functions.push(function_def.into_mir(module_id)),
ExternFunction(signature) => { ExternFunction(signature) => {
@ -228,33 +235,34 @@ impl ast::Block {
StmtKind::Let(counter_var.clone(), true, start.process(module_id)), StmtKind::Let(counter_var.clone(), true, start.process(module_id)),
counter_range.as_meta(module_id), counter_range.as_meta(module_id),
); );
let statement_range = counter_range.clone() + start.1 + end.1 + block.2;
let set_new = mir::Statement( let set_new = mir::Statement(
StmtKind::Set( StmtKind::Set(
mir::Expression( mir::Expression(
mir::ExprKind::Variable(counter_var.clone()), mir::ExprKind::Variable(counter_var.clone()),
counter_range.as_meta(module_id), (start.1 + end.1).as_meta(module_id),
), ),
mir::Expression( mir::Expression(
mir::ExprKind::BinOp( mir::ExprKind::BinOp(
mir::BinaryOperator::Add, mir::BinaryOperator::Add,
Box::new(mir::Expression( Box::new(mir::Expression(
mir::ExprKind::Variable(counter_var.clone()), mir::ExprKind::Variable(counter_var.clone()),
counter_range.as_meta(module_id), (start.1 + end.1).as_meta(module_id),
)), )),
Box::new(mir::Expression( Box::new(mir::Expression(
mir::ExprKind::Literal(mir::Literal::Vague(mir::VagueLiteral::Number(1))), mir::ExprKind::Literal(mir::Literal::Vague(mir::VagueLiteral::Number(1))),
counter_range.as_meta(module_id), (start.1 + end.1).as_meta(module_id),
)), )),
mir::TypeKind::Vague(mir::VagueType::Unknown), mir::TypeKind::Vague(mir::VagueType::Unknown),
), ),
counter_range.as_meta(module_id), (start.1 + end.1).as_meta(module_id),
), ),
), ),
counter_range.as_meta(module_id), (start.1 + end.1).as_meta(module_id),
); );
let mut block = block.into_mir(module_id); let mut mir_block = block.into_mir(module_id);
block.statements.push(set_new); mir_block.statements.push(set_new);
let while_statement = mir::Statement( let while_statement = mir::Statement(
StmtKind::While(WhileStatement { StmtKind::While(WhileStatement {
condition: mir::Expression( condition: mir::Expression(
@ -262,28 +270,28 @@ impl ast::Block {
mir::BinaryOperator::Cmp(mir::CmpOperator::LT), mir::BinaryOperator::Cmp(mir::CmpOperator::LT),
Box::new(mir::Expression( Box::new(mir::Expression(
mir::ExprKind::Variable(counter_var), mir::ExprKind::Variable(counter_var),
counter_range.as_meta(module_id), (start.1 + end.1).as_meta(module_id),
)), )),
Box::new(end.process(module_id)), Box::new(end.process(module_id)),
mir::TypeKind::Vague(mir::VagueType::Unknown), mir::TypeKind::Vague(mir::VagueType::Unknown),
), ),
counter_range.as_meta(module_id), (start.1 + end.1).as_meta(module_id),
), ),
block, block: mir_block.clone(),
meta: self.2.as_meta(module_id), meta: (start.1 + end.1 + block.2).as_meta(module_id),
}), }),
self.2.as_meta(module_id), (start.1 + end.1 + block.2).as_meta(module_id),
); );
let inner_scope = StmtKind::Expression(mir::Expression( let inner_scope = StmtKind::Expression(mir::Expression(
mir::ExprKind::Block(mir::Block { mir::ExprKind::Block(mir::Block {
statements: vec![let_statement, while_statement], statements: vec![let_statement, while_statement],
return_expression: None, return_expression: None,
meta: counter_range.as_meta(module_id) + end.1.as_meta(module_id), meta: statement_range.as_meta(module_id),
}), }),
counter_range.as_meta(module_id) + end.1.as_meta(module_id), statement_range.as_meta(module_id),
)); ));
(inner_scope, self.2) (inner_scope, statement_range)
} }
ast::BlockLevelStatement::WhileLoop(expression, block) => ( ast::BlockLevelStatement::WhileLoop(expression, block) => (
StmtKind::While(WhileStatement { StmtKind::While(WhileStatement {
@ -291,7 +299,7 @@ impl ast::Block {
block: block.into_mir(module_id), block: block.into_mir(module_id),
meta: self.2.as_meta(module_id), meta: self.2.as_meta(module_id),
}), }),
self.2, expression.1 + block.2,
), ),
}; };

View File

@ -1,6 +1,8 @@
//! Contains relevant code for parsing tokens received from //! Contains relevant code for parsing tokens received from
//! Lexing/Tokenizing-stage. //! Lexing/Tokenizing-stage.
use std::{cell::RefCell, rc::Rc};
use crate::{ use crate::{
ast::parse::Parse, ast::parse::Parse,
lexer::{FullToken, Token}, lexer::{FullToken, Token},
@ -12,6 +14,7 @@ use crate::{
pub struct TokenStream<'a, 'b> { pub struct TokenStream<'a, 'b> {
ref_position: Option<&'b mut usize>, ref_position: Option<&'b mut usize>,
tokens: &'a [FullToken], tokens: &'a [FullToken],
errors: Rc<RefCell<Vec<Error>>>,
pub position: usize, pub position: usize,
} }
@ -20,6 +23,7 @@ impl<'a, 'b> TokenStream<'a, 'b> {
TokenStream { TokenStream {
ref_position: None, ref_position: None,
tokens, tokens,
errors: Rc::new(RefCell::new(Vec::new())),
position: 0, position: 0,
} }
} }
@ -38,6 +42,16 @@ impl<'a, 'b> TokenStream<'a, 'b> {
)) ))
} }
/// Returns expected-error for the next token in-line. Useful in conjunction
/// with [`TokenStream::peek`]
pub fn expected_err_nonfatal<T: Into<String>>(&mut self, expected: T) {
let err = match self.expected_err(expected) {
Ok(e) => e,
Err(e) => e,
};
self.errors.borrow_mut().push(err);
}
/// Returns expected-error for the previous token that was already consumed. /// Returns expected-error for the previous token that was already consumed.
/// Useful in conjunction with [`TokenStream::next`] /// Useful in conjunction with [`TokenStream::next`]
pub fn expecting_err<T: Into<String>>(&mut self, expected: T) -> Result<Error, Error> { pub fn expecting_err<T: Into<String>>(&mut self, expected: T) -> Result<Error, Error> {
@ -50,6 +64,16 @@ impl<'a, 'b> TokenStream<'a, 'b> {
)) ))
} }
/// Returns expected-error for the previous token that was already consumed.
/// Useful in conjunction with [`TokenStream::next`]
pub fn expecting_err_nonfatal<T: Into<String>>(&mut self, expected: T) {
let err = match self.expecting_err(expected) {
Ok(e) => e,
Err(e) => e,
};
self.errors.borrow_mut().push(err);
}
pub fn expect(&mut self, token: Token) -> Result<(), Error> { pub fn expect(&mut self, token: Token) -> Result<(), Error> {
if let (pos, Some(peeked)) = self.next_token(self.position) { if let (pos, Some(peeked)) = self.next_token(self.position) {
if token == peeked.token { if token == peeked.token {
@ -63,6 +87,21 @@ impl<'a, 'b> TokenStream<'a, 'b> {
} }
} }
pub fn expect_nonfatal(&mut self, token: Token) -> Result<(), ()> {
if let (pos, Some(peeked)) = self.next_token(self.position) {
if token == peeked.token {
self.position = pos + 1;
Ok(())
} else {
self.expecting_err_nonfatal(token);
Err(())
}
} else {
self.expecting_err_nonfatal(token);
Err(())
}
}
pub fn next(&mut self) -> Option<Token> { pub fn next(&mut self) -> Option<Token> {
let (position, token) = self.next_token(self.position); let (position, token) = self.next_token(self.position);
self.position = position + 1; self.position = position + 1;
@ -147,6 +186,7 @@ impl<'a, 'b> TokenStream<'a, 'b> {
let clone = TokenStream { let clone = TokenStream {
ref_position: Some(&mut ref_pos), ref_position: Some(&mut ref_pos),
tokens: self.tokens, tokens: self.tokens,
errors: self.errors.clone(),
position, position,
}; };
@ -175,6 +215,14 @@ impl<'a, 'b> TokenStream<'a, 'b> {
}) })
} }
/// Gets range of the previous token only.
pub fn get_range_prev_single(&self) -> Option<TokenRange> {
self.ref_position.as_ref().map(|ref_pos| TokenRange {
start: self.previous_token(self.position).0,
end: self.previous_token(self.position).0,
})
}
fn previous_token(&self, mut from: usize) -> (usize, Option<&'a FullToken>) { fn previous_token(&self, mut from: usize) -> (usize, Option<&'a FullToken>) {
from -= 1; from -= 1;
while let Some(token) = self.tokens.get(from) { while let Some(token) = self.tokens.get(from) {
@ -197,6 +245,22 @@ impl<'a, 'b> TokenStream<'a, 'b> {
} }
(from, self.tokens.get(from)) (from, self.tokens.get(from))
} }
pub fn errors(&self) -> Vec<Error> {
self.errors.borrow().clone().clone()
}
pub fn next_is_whitespace(&self) -> bool {
if let Some(token) = self.tokens.get(self.position) {
if let Token::Whitespace(_) = token.token {
true
} else {
false
}
} else {
true
}
}
} }
impl Drop for TokenStream<'_, '_> { impl Drop for TokenStream<'_, '_> {

View File

@ -1,3 +1,5 @@
use std::{collections::HashMap, hash::Hash};
use reid_lib::{builder::InstructionValue, CmpPredicate, ConstValueKind, Instr, Type}; use reid_lib::{builder::InstructionValue, CmpPredicate, ConstValueKind, Instr, Type};
use crate::{ use crate::{
@ -57,6 +59,15 @@ pub fn form_intrinsics() -> Vec<FunctionDefinition> {
intrinsics intrinsics
} }
pub fn get_intrinsic_assoc_functions(ty: &TypeKind) -> HashMap<String, Option<FunctionDefinition>> {
let mut map = HashMap::new();
map.insert("length".to_owned(), get_intrinsic_assoc_func(ty, "length"));
map.insert("sizeof".to_owned(), get_intrinsic_assoc_func(ty, "sizeof"));
map.insert("malloc".to_owned(), get_intrinsic_assoc_func(ty, "malloc"));
map.insert("null".to_owned(), get_intrinsic_assoc_func(ty, "null"));
map
}
pub fn get_intrinsic_assoc_func(ty: &TypeKind, name: &str) -> Option<FunctionDefinition> { pub fn get_intrinsic_assoc_func(ty: &TypeKind, name: &str) -> Option<FunctionDefinition> {
if let TypeKind::Array(_, len) = ty { if let TypeKind::Array(_, len) = ty {
match name { match name {
@ -74,7 +85,7 @@ pub fn get_intrinsic_assoc_func(ty: &TypeKind, name: &str) -> Option<FunctionDef
}], }],
kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicConst(*len))), kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicConst(*len))),
source: None, source: None,
}) });
} }
_ => {} _ => {}
} }
@ -247,26 +258,17 @@ pub fn form_intrinsic_binops() -> Vec<BinopDefinition> {
scope.block.build(Instr::XOr(lhs, rhs)).unwrap() scope.block.build(Instr::XOr(lhs, rhs)).unwrap()
})); }));
if ty.signed() { if ty.signed() {
intrinsics.push(complex_binop_def( intrinsics.push(complex_binop_def(BitshiftRight, &ty, &ty, |scope, lhs, rhs| {
BitshiftRight, scope.block.build(Instr::ShiftRightArithmetic(lhs, rhs)).unwrap()
&ty, }));
&TypeKind::U64,
|scope, lhs, rhs| scope.block.build(Instr::ShiftRightArithmetic(lhs, rhs)).unwrap(),
));
} else { } else {
intrinsics.push(complex_binop_def( intrinsics.push(complex_binop_def(BitshiftRight, &ty, &ty, |scope, lhs, rhs| {
BitshiftRight, scope.block.build(Instr::ShiftRightLogical(lhs, rhs)).unwrap()
&ty, }));
&TypeKind::U64,
|scope, lhs, rhs| scope.block.build(Instr::ShiftRightLogical(lhs, rhs)).unwrap(),
));
} }
intrinsics.push(complex_binop_def( intrinsics.push(complex_binop_def(BitshiftLeft, &ty, &ty, |scope, lhs, rhs| {
BitshiftLeft, scope.block.build(Instr::ShiftLeft(lhs, rhs)).unwrap()
&ty, }));
&TypeKind::U64,
|scope, lhs, rhs| scope.block.build(Instr::ShiftLeft(lhs, rhs)).unwrap(),
));
} }
for ty in INTEGERS.iter().chain(&[TypeKind::Bool, TypeKind::Char]) { for ty in INTEGERS.iter().chain(&[TypeKind::Bool, TypeKind::Char]) {
intrinsics.push(boolean_binop_def(Cmp(CmpOperator::EQ), &ty, |scope, lhs, rhs| { intrinsics.push(boolean_binop_def(Cmp(CmpOperator::EQ), &ty, |scope, lhs, rhs| {
@ -386,7 +388,9 @@ impl IntrinsicFunction for IntrinsicSizeOf {
fn codegen<'ctx, 'a>(&self, scope: &mut Scope<'ctx, 'a>, _: &[StackValue]) -> Result<StackValue, ErrorKind> { fn codegen<'ctx, 'a>(&self, scope: &mut Scope<'ctx, 'a>, _: &[StackValue]) -> Result<StackValue, ErrorKind> {
let instr = scope let instr = scope
.block .block
.build(Instr::Constant(reid_lib::ConstValueKind::U64(self.0.size_of() / 8))) .build(Instr::Constant(reid_lib::ConstValueKind::U64(
self.0.size_of(&scope.type_map) / 8,
)))
.unwrap(); .unwrap();
Ok(StackValue(StackValueKind::Literal(instr), self.0.clone())) Ok(StackValue(StackValueKind::Literal(instr), self.0.clone()))
} }
@ -404,7 +408,9 @@ impl IntrinsicFunction for IntrinsicMalloc {
let sizeof = scope let sizeof = scope
.block .block
.build(Instr::Constant(ConstValueKind::U64(self.0.size_of() / 8))) .build(Instr::Constant(ConstValueKind::U64(
self.0.size_of(&scope.type_map) / 8,
)))
.unwrap(); .unwrap();
let bytes = scope.block.build(Instr::Mul(sizeof, amount.instr())).unwrap(); let bytes = scope.block.build(Instr::Mul(sizeof, amount.instr())).unwrap();
let instr = scope.block.build(Instr::FunctionCall(function, vec![bytes])).unwrap(); let instr = scope.block.build(Instr::FunctionCall(function, vec![bytes])).unwrap();

View File

@ -144,6 +144,7 @@ impl mir::Module {
let mut types = HashMap::new(); let mut types = HashMap::new();
let mut type_values = HashMap::new(); let mut type_values = HashMap::new();
let mut debug_types = HashMap::new(); let mut debug_types = HashMap::new();
let mut type_map = HashMap::new();
macro_rules! insert_debug { macro_rules! insert_debug {
($kind:expr) => { ($kind:expr) => {
@ -153,8 +154,7 @@ impl mir::Module {
&compile_unit, &compile_unit,
&debug, &debug,
&debug_types, &debug_types,
&type_values, &type_map,
&types,
self.module_id, self.module_id,
&self.tokens, &self.tokens,
&modules, &modules,
@ -182,6 +182,8 @@ impl mir::Module {
for typedef in typedefs { for typedef in typedefs {
let type_key = CustomTypeKey(typedef.name.clone(), typedef.source_module); let type_key = CustomTypeKey(typedef.name.clone(), typedef.source_module);
type_map.insert(type_key.clone(), typedef.clone());
let type_value = match &typedef.kind { let type_value = match &typedef.kind {
TypeDefinitionKind::Struct(StructType(fields)) => { TypeDefinitionKind::Struct(StructType(fields)) => {
module.custom_type(CustomTypeKind::NamedStruct(NamedStruct( module.custom_type(CustomTypeKind::NamedStruct(NamedStruct(
@ -198,6 +200,7 @@ impl mir::Module {
}; };
types.insert(type_value, typedef.clone()); types.insert(type_value, typedef.clone());
type_values.insert(type_key.clone(), type_value); type_values.insert(type_key.clone(), type_value);
insert_debug!(&TypeKind::CustomType(type_key.clone())); insert_debug!(&TypeKind::CustomType(type_key.clone()));
} }
@ -380,6 +383,7 @@ impl mir::Module {
functions: &functions, functions: &functions,
types: &types, types: &types,
type_values: &type_values, type_values: &type_values,
type_map: &type_map,
globals: &globals, globals: &globals,
stack_values: HashMap::new(), stack_values: HashMap::new(),
debug: Some(Debug { debug: Some(Debug {
@ -457,6 +461,7 @@ impl mir::Module {
functions: &functions, functions: &functions,
types: &types, types: &types,
type_values: &type_values, type_values: &type_values,
type_map: &type_map,
stack_values: HashMap::new(), stack_values: HashMap::new(),
debug: Some(Debug { debug: Some(Debug {
info: &debug, info: &debug,
@ -518,6 +523,7 @@ impl mir::Module {
functions: &functions, functions: &functions,
types: &types, types: &types,
type_values: &type_values, type_values: &type_values,
type_map: &type_map,
stack_values: HashMap::new(), stack_values: HashMap::new(),
debug: Some(Debug { debug: Some(Debug {
info: &debug, info: &debug,
@ -1311,45 +1317,45 @@ impl mir::Expression {
if val.1 == *type_kind { if val.1 == *type_kind {
Some(val) Some(val)
} else { } else {
match (&val.1, type_kind) { let (ty, other) = if !state.should_load {
(TypeKind::CodegenPtr(inner), TypeKind::UserPtr(ty2)) => match *inner.clone() { let TypeKind::CodegenPtr(inner) = &val.1 else {
TypeKind::UserPtr(_) => Some(StackValue( panic!();
};
(*inner.clone(), TypeKind::CodegenPtr(Box::new(type_kind.clone())))
} else {
(val.1.clone(), type_kind.clone())
};
match (&ty, type_kind) {
(TypeKind::UserPtr(_), TypeKind::UserPtr(_)) => Some(StackValue(
val.0.derive( val.0.derive(
scope scope
.block .block
.build(Instr::BitCast( .build(Instr::BitCast(val.instr(), other.get_type(scope.type_values)))
val.instr(),
Type::Ptr(Box::new(type_kind.get_type(scope.type_values))),
))
.unwrap(), .unwrap(),
), ),
TypeKind::CodegenPtr(Box::new(type_kind.clone())), other.clone(),
)), )),
TypeKind::Borrow(ty1, _) => match *ty1.clone() { (TypeKind::Borrow(ty1, _), TypeKind::UserPtr(ty2)) => {
TypeKind::Array(ty1, _) => { if let TypeKind::Array(ty1, _) = ty1.as_ref() {
if ty1 == *ty2 { if ty1 == ty2 {
Some(StackValue( Some(StackValue(
val.0.derive( val.0.derive(
scope scope
.block .block
.build(Instr::BitCast( .build(Instr::BitCast(val.instr(), other.get_type(scope.type_values)))
val.instr(),
Type::Ptr(Box::new(type_kind.get_type(scope.type_values))),
))
.unwrap(), .unwrap(),
), ),
TypeKind::CodegenPtr(Box::new(type_kind.clone())), other,
)) ))
} else { } else {
return Err(ErrorKind::Null); return Err(ErrorKind::Null).unwrap();
}
} else {
return Err(ErrorKind::Null).unwrap();
} }
} }
_ => return Err(ErrorKind::Null), (TypeKind::Char, TypeKind::U8)
},
_ => panic!(),
},
(TypeKind::UserPtr(_), TypeKind::UserPtr(_))
| (TypeKind::Char, TypeKind::U8)
| (TypeKind::U8, TypeKind::Char) | (TypeKind::U8, TypeKind::Char)
| (TypeKind::U8, TypeKind::I8) => Some(StackValue( | (TypeKind::U8, TypeKind::I8) => Some(StackValue(
val.0.derive( val.0.derive(
@ -1361,8 +1367,7 @@ impl mir::Expression {
type_kind.clone(), type_kind.clone(),
)), )),
_ => { _ => {
let cast_instr = val let cast_instr = ty
.1
.get_type(scope.type_values) .get_type(scope.type_values)
.cast_instruction(val.instr(), &type_kind.get_type(scope.type_values)) .cast_instruction(val.instr(), &type_kind.get_type(scope.type_values))
.unwrap(); .unwrap();
@ -1378,11 +1383,30 @@ impl mir::Expression {
mir::ExprKind::AssociatedFunctionCall(ty, call) => codegen_function_call(Some(ty), call, scope, state)?, mir::ExprKind::AssociatedFunctionCall(ty, call) => codegen_function_call(Some(ty), call, scope, state)?,
mir::ExprKind::GlobalRef(global_name, ty) => { mir::ExprKind::GlobalRef(global_name, ty) => {
let global_value = scope.globals.get(global_name).unwrap(); let global_value = scope.globals.get(global_name).unwrap();
let value = scope.block.build(Instr::GetGlobal(global_value.clone())).unwrap();
if !state.should_load {
let allocated = scope
.block
.build(Instr::Alloca(ty.get_type(scope.type_values)))
.unwrap();
scope
.block
.build(Instr::Store(allocated, value))
.unwrap()
.maybe_location(&mut scope.block, location.clone());
let a = Some(StackValue( let a = Some(StackValue(
StackValueKind::Literal(scope.block.build(Instr::GetGlobal(global_value.clone())).unwrap()), StackValueKind::Literal(allocated),
ty.clone(), TypeKind::CodegenPtr(Box::new(ty.clone())),
)); ));
a a
} else {
let a = Some(StackValue(StackValueKind::Literal(value), ty.clone()));
a
}
} }
}; };
if let Some(value) = &value { if let Some(value) = &value {

View File

@ -26,6 +26,7 @@ pub struct Scope<'ctx, 'scope> {
pub(super) block: Block<'ctx>, pub(super) block: Block<'ctx>,
pub(super) types: &'scope HashMap<TypeValue, TypeDefinition>, pub(super) types: &'scope HashMap<TypeValue, TypeDefinition>,
pub(super) type_values: &'scope HashMap<CustomTypeKey, TypeValue>, pub(super) type_values: &'scope HashMap<CustomTypeKey, TypeValue>,
pub(super) type_map: &'scope HashMap<CustomTypeKey, TypeDefinition>,
pub(super) assoc_functions: &'scope HashMap<AssociatedFunctionKey, ScopeFunctionKind<'ctx>>, pub(super) assoc_functions: &'scope HashMap<AssociatedFunctionKey, ScopeFunctionKind<'ctx>>,
pub(super) functions: &'scope HashMap<String, ScopeFunctionKind<'ctx>>, pub(super) functions: &'scope HashMap<String, ScopeFunctionKind<'ctx>>,
pub(super) binops: &'scope HashMap<BinopKey, StackBinopDefinition<'ctx>>, pub(super) binops: &'scope HashMap<BinopKey, StackBinopDefinition<'ctx>>,
@ -49,6 +50,7 @@ impl<'ctx, 'a> Scope<'ctx, 'a> {
functions: self.functions, functions: self.functions,
types: self.types, types: self.types,
type_values: self.type_values, type_values: self.type_values,
type_map: self.type_map,
stack_values: self.stack_values.clone(), stack_values: self.stack_values.clone(),
debug: self.debug.clone(), debug: self.debug.clone(),
allocator: self.allocator.clone(), allocator: self.allocator.clone(),

View File

@ -109,8 +109,7 @@ impl TypeKind {
&debug.scope, &debug.scope,
debug.info, debug.info,
debug.types, debug.types,
scope.type_values, scope.type_map,
scope.types,
scope.module_id, scope.module_id,
scope.tokens, scope.tokens,
scope.modules, scope.modules,
@ -122,8 +121,7 @@ impl TypeKind {
scope: &DebugScopeValue, scope: &DebugScopeValue,
debug_info: &DebugInformation, debug_info: &DebugInformation,
debug_types: &HashMap<TypeKind, DebugTypeValue>, debug_types: &HashMap<TypeKind, DebugTypeValue>,
type_values: &HashMap<CustomTypeKey, TypeValue>, type_map: &HashMap<CustomTypeKey, TypeDefinition>,
types: &HashMap<TypeValue, TypeDefinition>,
local_mod: SourceModuleId, local_mod: SourceModuleId,
tokens: &Vec<FullToken>, tokens: &Vec<FullToken>,
modules: &HashMap<SourceModuleId, ModuleCodegen>, modules: &HashMap<SourceModuleId, ModuleCodegen>,
@ -142,13 +140,12 @@ impl TypeKind {
scope, scope,
debug_info, debug_info,
debug_types, debug_types,
type_values, type_map,
types,
local_mod, local_mod,
tokens, tokens,
modules, modules,
), ),
size_bits: self.size_of(), size_bits: self.size_of(type_map),
}) })
} }
TypeKind::Array(elem_ty, len) => { TypeKind::Array(elem_ty, len) => {
@ -156,21 +153,20 @@ impl TypeKind {
scope, scope,
debug_info, debug_info,
debug_types, debug_types,
type_values, type_map,
types,
local_mod, local_mod,
tokens, tokens,
modules, modules,
); );
DebugTypeData::Array(DebugArrayType { DebugTypeData::Array(DebugArrayType {
size_bits: self.size_of(), size_bits: self.size_of(type_map),
align_bits: self.alignment(), align_bits: self.alignment(),
element_type: elem_ty, element_type: elem_ty,
length: *len, length: *len,
}) })
} }
TypeKind::CustomType(key) => { TypeKind::CustomType(key) => {
let typedef = types.get(type_values.get(key).unwrap()).unwrap(); let typedef = type_map.get(key).unwrap();
match &typedef.kind { match &typedef.kind {
TypeDefinitionKind::Struct(struct_type) => { TypeDefinitionKind::Struct(struct_type) => {
let mut fields = Vec::new(); let mut fields = Vec::new();
@ -186,21 +182,20 @@ impl TypeKind {
name: field.0.clone(), name: field.0.clone(),
scope: scope.clone(), scope: scope.clone(),
pos: location.map(|l| l.pos), pos: location.map(|l| l.pos),
size_bits: field.1.size_of(), size_bits: field.1.size_of(type_map),
offset: size_bits, offset: size_bits,
flags: DwarfFlags, flags: DwarfFlags,
ty: field.1.get_debug_type_hard( ty: field.1.get_debug_type_hard(
scope, scope,
debug_info, debug_info,
debug_types, debug_types,
type_values, type_map,
types,
local_mod, local_mod,
tokens, tokens,
modules, modules,
), ),
}); });
size_bits += field.1.size_of(); size_bits += field.1.size_of(type_map);
} }
{ {
let location = if typedef.source_module != local_mod { let location = if typedef.source_module != local_mod {
@ -222,7 +217,7 @@ impl TypeKind {
} }
_ => DebugTypeData::Basic(DebugBasicType { _ => DebugTypeData::Basic(DebugBasicType {
name, name,
size_bits: self.size_of(), size_bits: self.size_of(type_map),
encoding: match self { encoding: match self {
TypeKind::Bool => DwarfEncoding::Boolean, TypeKind::Bool => DwarfEncoding::Boolean,
TypeKind::I8 => DwarfEncoding::SignedChar, TypeKind::I8 => DwarfEncoding::SignedChar,

View File

@ -181,6 +181,10 @@ impl ReidError {
pub fn from_kind(errors: Vec<ErrorKind>, map: ErrorModules) -> ReidError { pub fn from_kind(errors: Vec<ErrorKind>, map: ErrorModules) -> ReidError {
ReidError { map, errors } ReidError { map, errors }
} }
pub fn extend(&mut self, other: ReidError) {
self.errors.extend(other.errors);
}
} }
impl std::error::Error for ReidError {} impl std::error::Error for ReidError {}

View File

@ -73,7 +73,7 @@ use crate::{
}; };
pub mod ast; pub mod ast;
mod codegen; pub mod codegen;
pub mod error_raporting; pub mod error_raporting;
pub mod ld; pub mod ld;
pub mod mir; pub mod mir;
@ -105,7 +105,7 @@ pub fn compile_module<'map>(
map: &'map mut ErrorModules, map: &'map mut ErrorModules,
path: Option<PathBuf>, path: Option<PathBuf>,
is_main: bool, is_main: bool,
) -> Result<mir::Module, ReidError> { ) -> Result<Result<mir::Module, (ast::Module, ReidError)>, ReidError> {
let module = map.module(&module_id).cloned().unwrap(); let module = map.module(&module_id).cloned().unwrap();
let mut token_stream = TokenStream::from(&tokens); let mut token_stream = TokenStream::from(&tokens);
@ -117,6 +117,8 @@ pub fn compile_module<'map>(
statements.push(statement); statements.push(statement);
} }
let errors = token_stream.errors();
drop(token_stream); drop(token_stream);
let ast_module = ast::Module { let ast_module = ast::Module {
@ -127,11 +129,34 @@ pub fn compile_module<'map>(
is_main, is_main,
}; };
if errors.len() > 0 {
// dbg!(&ast_module);
return Ok(Err((
ast_module,
ReidError::from_kind(
errors
.into_iter()
.map(|e| {
error_raporting::ErrorKind::from(mir::pass::Error {
metadata: mir::Metadata {
source_module_id: module_id,
range: *e.get_range().unwrap_or(&Default::default()),
position: None,
},
kind: e,
})
})
.collect(),
map.clone(),
),
)));
}
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")] #[cfg(feature = "log_output")]
dbg!(&ast_module); dbg!(&ast_module);
Ok(ast_module.process(module_id)) Ok(Ok(ast_module.process(module_id)))
} }
pub fn perform_all_passes<'map>( pub fn perform_all_passes<'map>(
@ -293,7 +318,7 @@ pub fn compile_and_pass<'map>(
let name = path.file_name().unwrap().to_str().unwrap().to_owned(); let name = path.file_name().unwrap().to_str().unwrap().to_owned();
let (id, tokens) = parse_module(source, name, module_map)?; let (id, tokens) = parse_module(source, name, module_map)?;
let module = compile_module(id, tokens, module_map, Some(path.clone()), true)?; let module = compile_module(id, tokens, module_map, Some(path.clone()), true)?.map_err(|(_, e)| e)?;
let mut mir_context = mir::Context::from(vec![module], path.parent().unwrap().to_owned()); let mut mir_context = mir::Context::from(vec![module], path.parent().unwrap().to_owned());

View File

@ -84,7 +84,11 @@ impl Display for GlobalKind {
impl Display for Import { impl Display for Import {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "import {}", self.0.join("::")) write!(
f,
"import {}",
self.0.iter().map(|(s, _)| s.clone()).collect::<Vec<_>>().join("::")
)
} }
} }

View File

@ -1,3 +1,5 @@
use reid_lib::builder::TypeValue;
use crate::util::maybe; use crate::util::maybe;
use super::{typecheck::typerefs::TypeRefs, *}; use super::{typecheck::typerefs::TypeRefs, *};
@ -57,7 +59,7 @@ impl TypeKind {
} }
} }
pub fn size_of(&self) -> u64 { pub fn size_of(&self, map: &HashMap<CustomTypeKey, TypeDefinition>) -> u64 {
match self { match self {
TypeKind::Bool => 1, TypeKind::Bool => 1,
TypeKind::I8 => 8, TypeKind::I8 => 8,
@ -72,8 +74,16 @@ impl TypeKind {
TypeKind::U128 => 128, TypeKind::U128 => 128,
TypeKind::Void => 0, TypeKind::Void => 0,
TypeKind::Char => 8, TypeKind::Char => 8,
TypeKind::Array(type_kind, len) => type_kind.size_of() * (*len as u64), TypeKind::Array(type_kind, len) => type_kind.size_of(map) * (*len as u64),
TypeKind::CustomType(..) => 32, TypeKind::CustomType(key) => match &map.get(key).unwrap().kind {
TypeDefinitionKind::Struct(struct_type) => {
let mut size = 0;
for field in &struct_type.0 {
size += field.1.size_of(map)
}
size
}
},
TypeKind::CodegenPtr(_) => 64, TypeKind::CodegenPtr(_) => 64,
TypeKind::Vague(_) => panic!("Tried to sizeof a vague type!"), TypeKind::Vague(_) => panic!("Tried to sizeof a vague type!"),
TypeKind::Borrow(..) => 64, TypeKind::Borrow(..) => 64,

View File

@ -52,7 +52,7 @@ pub enum ErrorKind {
pub fn compile_std(module_map: &mut ErrorModules) -> Result<Module, ReidError> { pub fn compile_std(module_map: &mut ErrorModules) -> Result<Module, ReidError> {
let (id, tokens) = parse_module(STD_SOURCE, STD_NAME, module_map)?; let (id, tokens) = parse_module(STD_SOURCE, STD_NAME, module_map)?;
let module = compile_module(id, tokens, module_map, None, false)?; let module = compile_module(id, tokens, module_map, None, false)?.map_err(|(_, e)| e)?;
let module_id = module.module_id; let module_id = module.module_id;
let mut mir_context = super::Context::from(vec![module], Default::default()); let mut mir_context = super::Context::from(vec![module], Default::default());
@ -124,7 +124,9 @@ impl<'map> Pass for LinkerPass<'map> {
state.ok::<_, Infallible>(Err(ErrorKind::InnerModulesNotYetSupported(import.clone())), import.1); state.ok::<_, Infallible>(Err(ErrorKind::InnerModulesNotYetSupported(import.clone())), import.1);
} }
let module_name = unsafe { path.get_unchecked(0) }; let Some((module_name, _)) = path.get(0) else {
continue;
};
let mut imported = if let Some(mod_id) = module_ids.get(module_name) { let mut imported = if let Some(mod_id) = module_ids.get(module_name) {
modules.get(mod_id).unwrap() modules.get(mod_id).unwrap()
@ -156,6 +158,7 @@ impl<'map> Pass for LinkerPass<'map> {
}; };
match compile_module(id, tokens, &mut self.module_map, Some(file_path), false) { match compile_module(id, tokens, &mut self.module_map, Some(file_path), false) {
Ok(res) => match res {
Ok(imported_module) => { Ok(imported_module) => {
if imported_module.is_main { if imported_module.is_main {
state.ok::<_, Infallible>( state.ok::<_, Infallible>(
@ -171,6 +174,17 @@ impl<'map> Pass for LinkerPass<'map> {
modules_to_process.push(imported.clone()); modules_to_process.push(imported.clone());
imported imported
} }
Err((_, err)) => {
state.ok::<_, Infallible>(
Err(ErrorKind::ModuleCompilationError(
module_name.clone(),
format!("{}", err),
)),
import.1,
);
continue;
}
},
Err(err) => { Err(err) => {
state.ok::<_, Infallible>( state.ok::<_, Infallible>(
Err(ErrorKind::ModuleCompilationError( Err(ErrorKind::ModuleCompilationError(
@ -185,7 +199,9 @@ impl<'map> Pass for LinkerPass<'map> {
} }
.borrow_mut(); .borrow_mut();
let import_name = unsafe { path.get_unchecked(1) }; let Some((import_name, _)) = path.get(1) else {
continue;
};
let import_id = imported.module_id; let import_id = imported.module_id;
let mut imported_types = Vec::new(); let mut imported_types = Vec::new();

View File

@ -118,10 +118,13 @@ impl mir::Expression {
let mut globals = Vec::new(); let mut globals = Vec::new();
match &mut self.0 { match &mut self.0 {
mir::ExprKind::FunctionCall(function_call) => { mir::ExprKind::FunctionCall(function_call) => {
for param in &mut function_call.parameters {
globals.extend(param.gen_macros(data, state, map));
}
if function_call.is_macro { if function_call.is_macro {
if let Some(existing_macro) = data.macros.get(&function_call.name) { if let Some(existing_macro) = data.macros.get(&function_call.name) {
let mut literals = Vec::new(); let mut literals = Vec::new();
for param in &function_call.parameters { for param in &mut function_call.parameters {
match &param.0 { match &param.0 {
super::ExprKind::Literal(literal) => literals.push(literal.clone()), super::ExprKind::Literal(literal) => literals.push(literal.clone()),
_ => state.note_errors(&vec![ErrorKind::InvalidMacroArgs], param.1), _ => state.note_errors(&vec![ErrorKind::InvalidMacroArgs], param.1),

View File

@ -256,7 +256,7 @@ pub enum ReturnKind {
pub struct NamedVariableRef(pub TypeKind, pub String, pub Metadata); pub struct NamedVariableRef(pub TypeKind, pub String, pub Metadata);
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct Import(pub Vec<String>, pub Metadata); pub struct Import(pub Vec<(String, Metadata)>, pub Metadata);
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum ExprKind { pub enum ExprKind {

View File

@ -621,7 +621,7 @@ impl Expression {
// Update possibly resolved type // Update possibly resolved type
Ok(true_ty) Ok(true_ty)
} else { } else {
Err(ErrorKind::NoSuchField(field_name.clone())) Err(ErrorKind::NoSuchField(key.0.clone()))
} }
} else { } else {
Err(ErrorKind::TriedAccessingNonStruct(expr_ty)) Err(ErrorKind::TriedAccessingNonStruct(expr_ty))
@ -727,7 +727,10 @@ impl Expression {
type_kind.clone(), type_kind.clone(),
function_call.name.clone(), function_call.name.clone(),
)) ))
.ok_or(ErrorKind::FunctionNotDefined(function_call.name.clone())); .ok_or(ErrorKind::AssocFunctionNotDefined(
function_call.name.clone(),
type_kind.clone(),
));
if let Some(f) = state.ok(true_function, self.1) { if let Some(f) = state.ok(true_function, self.1) {
let param_len_given = function_call.parameters.len(); let param_len_given = function_call.parameters.len();

View File

@ -14,7 +14,7 @@ use crate::{
mir::{ mir::{
pass::{AssociatedFunctionKey, ScopeVariable}, pass::{AssociatedFunctionKey, ScopeVariable},
BinopDefinition, Block, CustomTypeKey, ExprKind, Expression, FunctionDefinition, FunctionDefinitionKind, BinopDefinition, Block, CustomTypeKey, ExprKind, Expression, FunctionDefinition, FunctionDefinitionKind,
IfExpression, Module, ReturnKind, StmtKind, TypeKind, WhileStatement, IfExpression, Module, ReturnKind, StmtKind, TypeKind, VagueType, WhileStatement,
}, },
util::try_all, util::try_all,
}; };
@ -546,10 +546,10 @@ impl Expression {
*type_kind = elem_ty.as_type().clone(); *type_kind = elem_ty.as_type().clone();
Ok(elem_ty) Ok(elem_ty)
} }
None => Err(ErrorKind::NoSuchField(field_name.clone())), None => Ok(type_refs.from_type(&TypeKind::Vague(VagueType::Unknown)).unwrap()),
} }
} }
_ => Err(ErrorKind::TriedAccessingNonStruct(kind)), _ => Ok(type_refs.from_type(&TypeKind::Vague(VagueType::Unknown)).unwrap()),
} }
} }
ExprKind::Struct(struct_name, fields) => { ExprKind::Struct(struct_name, fields) => {
@ -655,9 +655,13 @@ impl Expression {
.ok_or(ErrorKind::AssocFunctionNotDefined( .ok_or(ErrorKind::AssocFunctionNotDefined(
function_call.name.clone(), function_call.name.clone(),
type_kind.clone(), type_kind.clone(),
))? ))
.clone(); .clone();
let Ok(fn_call) = fn_call else {
return Ok(type_refs.from_type(&Vague(Unknown)).unwrap());
};
// Infer param expression types and narrow them to the // Infer param expression types and narrow them to the
// expected function parameters (or Unknown types if too // expected function parameters (or Unknown types if too
// many were provided) // many were provided)

View File

@ -97,6 +97,9 @@ pub struct TypeRefs {
/// Indirect ID-references, referring to hints-vec /// Indirect ID-references, referring to hints-vec
pub(super) type_refs: RefCell<Vec<TypeIdRef>>, pub(super) type_refs: RefCell<Vec<TypeIdRef>>,
pub(super) binop_types: BinopMap, pub(super) binop_types: BinopMap,
/// Used when the real typerefs are not available, and any TypeRefs need to
/// be resolved as Unknown.
pub unknown_typerefs: bool,
} }
impl std::fmt::Display for TypeRefs { impl std::fmt::Display for TypeRefs {
@ -122,6 +125,14 @@ impl TypeRefs {
hints: Default::default(), hints: Default::default(),
type_refs: Default::default(), type_refs: Default::default(),
binop_types: binops, binop_types: binops,
unknown_typerefs: false,
}
}
pub fn unknown() -> TypeRefs {
TypeRefs {
unknown_typerefs: true,
..Default::default()
} }
} }
@ -177,8 +188,12 @@ impl TypeRefs {
} }
pub fn retrieve_typeref(&self, idx: usize) -> Option<TypeRefKind> { pub fn retrieve_typeref(&self, idx: usize) -> Option<TypeRefKind> {
if !self.unknown_typerefs {
let inner_idx = unsafe { *self.recurse_type_ref(idx).borrow() }; let inner_idx = unsafe { *self.recurse_type_ref(idx).borrow() };
self.hints.borrow().get(inner_idx).cloned() self.hints.borrow().get(inner_idx).cloned()
} else {
Some(TypeRefKind::Direct(TypeKind::Vague(VagueType::Unknown)))
}
} }
pub fn retrieve_wide_type(&self, idx: usize, seen: &mut HashSet<usize>) -> Option<TypeKind> { pub fn retrieve_wide_type(&self, idx: usize, seen: &mut HashSet<usize>) -> Option<TypeKind> {

View File

@ -6,17 +6,17 @@ use reid::{
mir::{self}, mir::{self},
parse_module, perform_all_passes, parse_module, perform_all_passes,
}; };
use reid_lib::Context; use reid_lib::{compile::CompileOutput, Context};
use util::assert_err; use util::assert_err;
mod util; mod util;
fn test(source: &str, name: &str, expected_exit_code: Option<i32>) { fn test_compile(source: &str, name: &str) -> CompileOutput {
assert_err(assert_err(std::panic::catch_unwind(|| { assert_err(assert_err(std::panic::catch_unwind(|| {
let mut map = Default::default(); let mut map = Default::default();
let (id, tokens) = assert_err(parse_module(source, name, &mut map)); let (id, tokens) = assert_err(parse_module(source, name, &mut map));
let module = assert_err(compile_module(id, tokens, &mut map, None, true)); let module = assert_err(assert_err(compile_module(id, tokens, &mut map, None, true)).map_err(|(_, e)| e));
let mut mir_context = mir::Context::from(vec![module], Default::default()); let mut mir_context = mir::Context::from(vec![module], Default::default());
assert_err(perform_all_passes(&mut mir_context, &mut map)); assert_err(perform_all_passes(&mut mir_context, &mut map));
@ -24,7 +24,14 @@ fn test(source: &str, name: &str, expected_exit_code: Option<i32>) {
let codegen = assert_err(mir_context.codegen(&context)); let codegen = assert_err(mir_context.codegen(&context));
let output = codegen.compile(None, Vec::new()).output(); Ok::<_, ()>(codegen.compile(None, Vec::new()).output())
})))
}
fn test(source: &str, name: &str, expected_exit_code: Option<i32>) {
assert_err(assert_err(std::panic::catch_unwind(|| {
let output = test_compile(source, name);
let time = SystemTime::now(); let time = SystemTime::now();
let in_path = PathBuf::from(format!( let in_path = PathBuf::from(format!(
"/tmp/temp-{}.o", "/tmp/temp-{}.o",
@ -157,3 +164,8 @@ fn associated_functions() {
fn mutable_inner_functions() { fn mutable_inner_functions() {
test(include_str!("../../examples/mutable_inner.reid"), "test", Some(0)); test(include_str!("../../examples/mutable_inner.reid"), "test", Some(0));
} }
#[test]
fn cpu_raytracer_compiles() {
test_compile(include_str!("../../examples/cpu_raytracer.reid"), "test");
}