Compare commits

...

36 Commits
macros ... main

Author SHA1 Message Date
c7f1b81c9d Improve associated functions so you can now call them on numbers too 2025-08-03 01:33:52 +03:00
a51a2c8f56 Remove useless prints 2025-08-03 01:00:30 +03:00
101ee2d8e5 Account for intrinsic associated functions with autocomplete 2025-08-03 01:00:02 +03:00
a6844b919b Fix array_structs.reid 2025-08-03 00:16:47 +03:00
4ea0913842 Add autocomplete for associated functions and struct fields 2025-08-03 00:13:53 +03:00
bb9f69ee53 Add autocomplete for imports 2025-08-02 23:03:11 +03:00
97a5c3a65e Optimize LSP analysis a Lot 2025-08-02 21:47:20 +03:00
8595da0c30 Make LSP use a more general analysis structure 2025-08-02 21:11:33 +03:00
dae39bc9d2 Fix fibonacci.reid 2025-08-02 20:21:57 +03:00
658450993a Fix hover types for for-loops 2025-08-02 20:10:48 +03:00
3f6d26679d Update README.md, all TODOs done 2025-08-02 19:24:31 +03:00
16082752e2 Update language server client and configs 2025-08-02 19:19:29 +03:00
8a71ce3629 Update LSP client 2025-08-02 15:02:39 +03:00
81d418c6d8 Update version number 2025-08-02 14:36:56 +03:00
8d0e3d03d5 Improve syntax highlighting 2025-08-02 03:41:08 +03:00
34e31549b3 add some syntax highlighting 2025-08-02 03:09:21 +03:00
0ba25db4c8 Start adding syntax highlighting 2025-08-02 00:14:20 +03:00
314f44304a Update README.md 2025-08-01 23:59:05 +03:00
08f7725ce7 Compile cpu_raytracer example in e2e tests, but don't run it 2025-08-01 22:46:46 +03:00
f89b26bf74 Improve LSP hover typing 2025-08-01 22:41:46 +03:00
4fada0036c Fix debug info for structs 2025-07-31 23:25:46 +03:00
4f0ee72c83 Edit example a bit, fix macro generation in function parameters 2025-07-31 22:48:16 +03:00
deed96bbfd Fix bitwise operators requiring U64 for rhs 2025-07-31 22:17:58 +03:00
1e094eeea0 Allow wider expressions for when self is not taken as borrow 2025-07-29 23:55:31 +03:00
3adb745576 Fix struct recursion testing 2025-07-29 23:38:26 +03:00
8f7b785664 Fix two small bugs, add new example to test 2025-07-29 23:16:56 +03:00
c7aacfe756 Refactor code a little bit 2025-07-29 21:56:50 +03:00
b71c253942 Add types to hovers in LSP, fix around and add metas 2025-07-29 21:39:14 +03:00
7d3aaa143a Start adding type-information to tooltips 2025-07-29 20:44:15 +03:00
6619f1f0a9 Add simple error diagnostic from parser 2025-07-29 19:53:12 +03:00
bc59b6f575 Start adding LSP implementation 2025-07-29 17:48:45 +03:00
c262418f88 Add comments and whitespace to lexer 2025-07-29 16:41:07 +03:00
2dd3a5904b Add whitespace to lexer 2025-07-29 16:37:58 +03:00
ff1da716e9 Update README.md 2025-07-29 16:08:54 +03:00
7c6d634287 Update README.md 2025-07-29 16:04:26 +03:00
b0442e5685 Add documentation for include_bytes! 2025-07-29 16:02:54 +03:00
59 changed files with 3386 additions and 352 deletions

963
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,6 @@
[workspace] [workspace]
members = [ members = [
"reid", "reid",
"reid-llvm-lib" "reid-llvm-lib",
] "reid-lsp"
]

View File

@ -71,17 +71,17 @@ Currently missing big features (TODOs) are:
Big features that I want later but are not necessary: Big features that I want later but are not necessary:
- ~~User-defined binary operations~~ (DONE) - ~~User-defined binary operations~~ (DONE)
- ~~Asymmetric binary operations (e.g. string + u32)~~ (DONE) - ~~Asymmetric binary operations (e.g. string + u32)~~ (DONE)
- Error handling - ~~Error handling~~ (Not Doing It)
- Lexing & parsing of whitespace and comments as well - ~~Lexing & parsing of whitespace and comments as well~~ (DONE)
- LSP implementation - ~~LSP implementation~~ (CRUCIAL FEATURES DONE)
Smaller features: Smaller features:
- ~~Hex-numbers~~ - ~~Hex-numbers~~ (DONE)
- ~~Bitwise operations~~ - ~~Bitwise operations~~ (DONE)
- ~~Easier way to initialize arrays with a single value~~ - ~~Easier way to initialize arrays with a single value~~ (DONE)
- ~~Void-returns (`return;` for void-returning functions)~~ - ~~Void-returns (`return;` for void-returning functions)~~ (DONE)
- ~~Only include standard library at all if it is imported~~ - ~~Only include standard library at all if it is imported~~ (DONE)
- Lexical scopes for Debug Information - ~~Lexical scopes for Debug Information~~ (DONE)
### Why "Reid" ### Why "Reid"
@ -157,10 +157,6 @@ cmake llvm -B build -DCMAKE_BUILD_TYPE=MinSizeRel -DLLVM_ENABLE_ASSERTIONS=ON -D
ninja -j23 ninja -j23
``` ```
*Also Note:* Building LLVM with `Ninja` was not successful for me, but this
method was. Ninja may be successful with you, to try it, add `-G Ninja` to the
`cmake`-command, and instead of `make` run `ninja install`.
### Building this crate itself ### Building this crate itself
Assuming `llvm-project` from the previous step was at Assuming `llvm-project` from the previous step was at
@ -170,6 +166,5 @@ Assuming `llvm-project` from the previous step was at
LLVM_SYS_201_PREFIX=/path/llvm-project/build cargo build LLVM_SYS_201_PREFIX=/path/llvm-project/build cargo build
``` ```
## In conclusion Alternatively assuming you have LLVM 20.1 or newer installed you may use omit
Good luck! It took me a good 10 hours to figure this out for myself, I sure hope the environment variable entirely and use dynamic linking instead
these instructions help both myself and someone else in the future!

View File

@ -261,6 +261,9 @@ calls, literals, or if-expressions. Types of supported expressions include:
*associated type* with given parameters. *associated type* with given parameters.
- **Accessing function calls**, a shorthand to call associated function calls - **Accessing function calls**, a shorthand to call associated function calls
which have `&self` or `&mut self` as their first parameter. which have `&self` or `&mut self` as their first parameter.
- **Macro invocations** for invoking **macros** which are evaluated at
compile-time rather than runtime. Currently it is not possible to define
your own macros, but there are some pre-defined in the intrinsics.
- **Block-expressions**, which can return a value to the higher-level expression - **Block-expressions**, which can return a value to the higher-level expression
if they have a statement with a soft-return. Otherwise they return void. if they have a statement with a soft-return. Otherwise they return void.
- **If-expressions**, which can execute one of two expressions depending on the - **If-expressions**, which can execute one of two expressions depending on the
@ -278,7 +281,7 @@ In formal grammar:
<indexing> | <accessing> | <indexing> | <accessing> |
<binary-exp> | <unary-exp> | <binary-exp> | <unary-exp> |
<function-call> | <accessing-function-call> | <assoc-function-call> <function-call> | <accessing-function-call> | <assoc-function-call>
<block> | <if-expr> | <cast> | <macro-invocation> | <block> | <if-expr> | <cast> |
( "(" <expression> ")" ) ( "(" <expression> ")" )
<variable> :: <ident> <variable> :: <ident>
@ -294,6 +297,7 @@ In formal grammar:
<function-call> :: <expression> "(" [ <expression> ( "," <expression> )* ] ")" <function-call> :: <expression> "(" [ <expression> ( "," <expression> )* ] ")"
<accessing-function-call> :: <accessing> "(" [ <expression> ( "," <expression> )* ] ")" <accessing-function-call> :: <accessing> "(" [ <expression> ( "," <expression> )* ] ")"
<assoc-function-call> :: <type> "::" <function-call> <assoc-function-call> :: <type> "::" <function-call>
<macro-invocation> :: <expression> "!(" [ <expression> ( "," <expression> )* ] ")"
<if-expr> :: "if" <expression> <expression> [ "else" <expression> ] <if-expr> :: "if" <expression> <expression> [ "else" <expression> ]
<cast> :: <expression> "as" <type> <cast> :: <expression> "as" <type>
``` ```
@ -312,6 +316,7 @@ test.first // Accessing
func(value, 14) // Function call func(value, 14) // Function call
Test::get_field(&test); // Associated function call Test::get_field(&test); // Associated function call
test.get_field(); // Same, but using a the dot-form shorthand test.get_field(); // Same, but using a the dot-form shorthand
include_bytes!("./test"); // Macro invocation
if varname {} else {} // If-expression if varname {} else {} // If-expression
value as u32 // cast value as u32 // cast
(value + 2) // Binop within parenthesis (value + 2) // Binop within parenthesis

View File

@ -17,6 +17,14 @@ Allocates `size` bytes and returns a pointer of `u8` of length `size`.
i32::malloc(40); // Reserves 40 bytes i32::malloc(40); // Reserves 40 bytes
``` ```
### Macro Intrinsics
#### `include_bytes!(path: *char) -> &[u8; _]`
Attempts to load file from `path` (relative to module) into memory and includes
it into the compiled binary directly. Returns a borrow to an array containing
bytes from the file. Array length varies depending on the file contents.
### Associated Intrinsics ### Associated Intrinsics
#### `<T>::sizeof() -> u64` #### `<T>::sizeof() -> u64`

View File

@ -20,7 +20,7 @@ fn main() -> u32 {
let boop: f32 = 3; let boop: f32 = 3;
let mut a = &mut value; let mut a = &mut value;
*a.second[2] = 5; *a.second[2] = 5;
return *a.second[2]; return *a.second[2];

View File

@ -9,4 +9,4 @@ fn fibonacci(value: u16) -> u16 {
return 1; return 1;
} }
fibonacci(value - 1) + fibonacci(value - 2) fibonacci(value - 1) + fibonacci(value - 2)
} }

View File

@ -4,5 +4,6 @@ import std::print;
fn main() -> u8 { fn main() -> u8 {
let bytes = include_bytes!("./macro_easy_file.txt"); let bytes = include_bytes!("./macro_easy_file.txt");
print(String::new() + bytes.length()); print(String::new() + bytes.length());
return (bytes as *u8)[0]; print(String::new() + (include_bytes!("./macro_easy_file.txt") as *u8)[1] as u64);
return (include_bytes!("./macro_easy_file.txt") as *u8)[0];
} }

View File

@ -0,0 +1,25 @@
struct Game {}
impl Game {
pub fn run_frame(&mut self) {}
}
struct Platform {
game: Game,
}
impl Platform {
pub fn new() -> Platform {
return Platform { game: Game {} };
}
pub fn run_frame(&mut self) {
*self.game.run_frame();
}
}
fn main() -> i32 {
let mut platform = Platform::new();
platform.run_frame();
return 0;
}

View File

@ -16,7 +16,7 @@ BINARY="$(echo $1 | cut -d'.' -f1)"".out"
echo $1 echo $1
cargo run --example cli $@ && \ cargo run --example cli $@ && \
./$BINARY ; echo "Return value: ""$?" ./$BINARY ; echo "Return value: ""$?"
## Command from: clang -v hello.o -o test ## Command from: clang -v hello.o -o test

View File

@ -1,6 +1,6 @@
[package] [package]
name = "reid-lib" name = "reid-lib"
version = "1.0.0-beta.2" version = "1.0.0-beta.3"
edition = "2024" edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View File

@ -52,7 +52,5 @@ fn main() {
else_b.terminate(TerminatorKind::Ret(add)).unwrap(); else_b.terminate(TerminatorKind::Ret(add)).unwrap();
dbg!(&context);
context.compile(None, Vec::new()); context.compile(None, Vec::new());
} }

View File

@ -223,7 +223,6 @@ impl Builder {
unsafe { unsafe {
let mut modules = self.modules.borrow_mut(); let mut modules = self.modules.borrow_mut();
let module = modules.get_unchecked_mut(module.0); let module = modules.get_unchecked_mut(module.0);
dbg!(module.functions.iter().map(|f| f.data.name.clone()).collect::<Vec<_>>());
module.functions.iter().find(|f| f.data.name == *name).map(|f| f.value) module.functions.iter().find(|f| f.data.name == *name).map(|f| f.value)
} }
} }
@ -611,30 +610,9 @@ impl Builder {
Instr::PtrToInt(instr, ty) => instr.cast_to(self, &ty).map(|_| ()), Instr::PtrToInt(instr, ty) => instr.cast_to(self, &ty).map(|_| ()),
Instr::IntToPtr(instr, ty) => instr.cast_to(self, &ty).map(|_| ()), Instr::IntToPtr(instr, ty) => instr.cast_to(self, &ty).map(|_| ()),
Instr::BitCast(..) => Ok(()), Instr::BitCast(..) => Ok(()),
Instr::ShiftRightLogical(_, rhs) => { Instr::ShiftRightLogical(lhs, rhs) => match_types(&lhs, &rhs, &self).map(|_| ()),
let rhs_ty = rhs.get_type(&self)?; Instr::ShiftRightArithmetic(lhs, rhs) => match_types(&lhs, &rhs, &self).map(|_| ()),
if rhs_ty.category() == TypeCategory::UnsignedInteger { Instr::ShiftLeft(lhs, rhs) => match_types(&lhs, &rhs, &self).map(|_| ()),
Ok(())
} else {
Err(ErrorKind::Null)
}
}
Instr::ShiftRightArithmetic(_, rhs) => {
let rhs_ty = rhs.get_type(&self)?;
if rhs_ty.category() == TypeCategory::UnsignedInteger {
Ok(())
} else {
Err(ErrorKind::Null)
}
}
Instr::ShiftLeft(_, rhs) => {
let rhs_ty = rhs.get_type(&self)?;
if rhs_ty.category() == TypeCategory::UnsignedInteger {
Ok(())
} else {
Err(ErrorKind::Null)
}
}
Instr::GetGlobal(_) => Ok(()), Instr::GetGlobal(_) => Ok(()),
} }
} }

View File

@ -123,8 +123,6 @@ impl CompiledModule {
let llvm_ir = let llvm_ir =
from_cstring(LLVMPrintModuleToString(self.module_ref)).expect("Unable to print LLVM IR to string"); from_cstring(LLVMPrintModuleToString(self.module_ref)).expect("Unable to print LLVM IR to string");
println!("{}", llvm_ir);
let mut err = ErrorMessageHolder::null(); let mut err = ErrorMessageHolder::null();
LLVMVerifyModule( LLVMVerifyModule(
self.module_ref, self.module_ref,
@ -542,7 +540,7 @@ impl DebugTypeHolder {
field.pos.map(|p| p.line).unwrap_or(1), field.pos.map(|p| p.line).unwrap_or(1),
field.size_bits, field.size_bits,
0, 0,
1, field.offset,
field.flags.as_llvm(), field.flags.as_llvm(),
*debug.types.get(&field.ty).unwrap(), *debug.types.get(&field.ty).unwrap(),
) )

7
reid-lsp/.gitignore vendored Normal file
View File

@ -0,0 +1,7 @@
.vscode
node_modules
dist
package-lock.json
pnpm-lock.yaml
tsconfig.tsbuildinfo
*.vsix

1
reid-lsp/.npmrc Normal file
View File

@ -0,0 +1 @@
enable-pre-post-scripts = true

View File

@ -0,0 +1,5 @@
import { defineConfig } from '@vscode/test-cli';
export default defineConfig({
files: 'out/test/**/*.test.js',
});

15
reid-lsp/.vscodeignore Normal file
View File

@ -0,0 +1,15 @@
.vscode/**
.vscode-test/**
out/**
node_modules/**
src/**
client/**
.gitignore
.yarnrc
webpack.config.js
vsc-extension-quickstart.md
**/tsconfig.json
**/eslint.config.mjs
**/*.map
**/*.ts
**/.vscode-test.*

9
reid-lsp/CHANGELOG.md Normal file
View File

@ -0,0 +1,9 @@
# Change Log
All notable changes to the "reid-lsp" extension will be documented in this file.
Check [Keep a Changelog](http://keepachangelog.com/) for recommendations on how to structure this file.
## [Unreleased]
- Initial release

11
reid-lsp/Cargo.toml Normal file
View File

@ -0,0 +1,11 @@
[package]
name = "reid-language-server"
version = "0.1.0"
edition = "2024"
[dependencies]
socket = "0.0.7"
tokio = { version = "1.47.0", features = ["full"] }
tower-lsp = "0.20.0"
reid = { path = "../reid", version = "1.0.0-beta.3", registry="gitea-teascade", features=[] }
dashmap = "6.1.0"

1
reid-lsp/README.md Normal file
View File

@ -0,0 +1 @@
# Reid Language Server

View File

@ -0,0 +1,27 @@
{
"name": "reid-lsp",
"displayName": "Reid Language Server",
"description": "Language Server Extension for Reid",
"version": "0.0.1",
"engines": {
"vscode": "^1.102.0"
},
"main": "../out/extension.js",
"devDependencies": {
"@types/mocha": "^10.0.10",
"@types/node": "20.x",
"@types/vscode": "^1.102.0",
"@typescript-eslint/eslint-plugin": "^8.31.1",
"@typescript-eslint/parser": "^8.31.1",
"@vscode/test-cli": "^0.0.11",
"@vscode/test-electron": "^2.5.2",
"eslint": "^9.25.1",
"ts-loader": "^9.5.2",
"typescript": "^5.8.3",
"webpack": "^5.99.7",
"webpack-cli": "^6.0.1"
},
"dependencies": {
"vscode-languageclient": "^9.0.1"
}
}

View File

@ -0,0 +1,83 @@
/* --------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
* ------------------------------------------------------------------------------------------ */
import * as path from 'path';
import { workspace, ExtensionContext, window } from 'vscode';
import {
Executable,
LanguageClient,
LanguageClientOptions,
ServerOptions,
TransportKind
} from 'vscode-languageclient/node';
let client: LanguageClient;
export function activate(context: ExtensionContext) {
const configuration = workspace.getConfiguration('reid-language-server');
let server_path: string = process.env.SERVER_PATH ?? configuration.get("language-server-path") ?? 'reid-language-server';
const regex = /\$(\w+)/;
while (regex.test(server_path)) {
let envVar = regex.exec(server_path)?.[1];
const envVal = envVar ? process.env[envVar] : undefined;
if (envVar === undefined || envVal === undefined) {
console.error(`No such environment variables as ${envVar}`);
}
server_path = server_path.replaceAll(`$${envVar}`, envVal ?? '');
}
const run: Executable = {
command: server_path,
options: {
env: {
...process.env,
RUST_LOG: "debug",
RUST_BACKTRACE: 1,
}
}
};
const serverOptions: ServerOptions = {
run,
debug: run,
};
// Options to control the language client
const clientOptions: LanguageClientOptions = {
// Register the server for plain text documents
documentSelector: [{ scheme: 'file', language: 'reid' }],
synchronize: {
// Notify the server about file changes to '.clientrc files contained in the workspace
fileEvents: workspace.createFileSystemWatcher('**/.clientrc')
}
};
// Create the language client and start the client.
client = new LanguageClient(
'reid-language-server',
'Reid Language Server',
serverOptions,
clientOptions
);
client.info(JSON.stringify(server_path));
client.info(`Loaded Reid Language Server from ${server_path}`);
workspace.onDidOpenTextDocument((e) => {
});
// Start the client. This will also launch the server
client.start();
}
export function deactivate(): Thenable<void> | undefined {
if (!client) {
return undefined;
}
return client.stop();
}

View File

@ -0,0 +1,15 @@
import * as assert from 'assert';
// You can import and use all API from the 'vscode' module
// as well as import your extension to test it
import * as vscode from 'vscode';
// import * as myExtension from '../../extension';
suite('Extension Test Suite', () => {
vscode.window.showInformationMessage('Start all tests.');
test('Sample test', () => {
assert.strictEqual(-1, [1, 2, 3].indexOf(5));
assert.strictEqual(-1, [1, 2, 3].indexOf(0));
});
});

View File

@ -0,0 +1,24 @@
{
"compilerOptions": {
"module": "Node16",
"target": "ES2022",
"lib": [
"ES2022"
],
"sourceMap": true,
"rootDir": "src",
"outDir": "../dist",
"strict": true /* enable all strict type-checking options */
/* Additional Checks */
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
// "noUnusedParameters": true, /* Report errors on unused parameters. */
},
"include": [
"src"
],
"exclude": [
"node_modules",
".vscode-test"
]
}

View File

@ -0,0 +1,28 @@
import typescriptEslint from "@typescript-eslint/eslint-plugin";
import tsParser from "@typescript-eslint/parser";
export default [{
files: ["**/*.ts"],
}, {
plugins: {
"@typescript-eslint": typescriptEslint,
},
languageOptions: {
parser: tsParser,
ecmaVersion: 2022,
sourceType: "module",
},
rules: {
"@typescript-eslint/naming-convention": ["warn", {
selector: "import",
format: ["camelCase", "PascalCase"],
}],
curly: "warn",
eqeqeq: "warn",
"no-throw-literal": "warn",
semi: "warn",
},
}];

80
reid-lsp/package.json Normal file
View File

@ -0,0 +1,80 @@
{
"name": "reid-language-server",
"displayName": "Reid Language Server",
"description": "Language Server Extension for Reid",
"version": "0.1.0",
"repository": {
"url": "https://git.teascade.net/teascade"
},
"engines": {
"vscode": "^1.102.0"
},
"categories": [
"Other"
],
"activationEvents": [
"onLanguage:reid"
],
"main": "./dist/extension.js",
"contributes": {
"languages": [
{
"id": "reid",
"extensions": [
".reid"
],
"aliases": [
"Reid"
]
}
],
"configuration": {
"type": "object",
"title": "reid-language-server",
"properties": {
"reid-language-server.language-server-path": {
"type": "string",
"scope": "window",
"default": "$HOME/.cargo/bin/reid-lsp",
"description": "Path to the Reid Language Server executable"
}
}
},
"grammars": [
{
"language": "reid",
"scopeName": "source.reid",
"path": "./syntaxes/grammar.json"
}
]
},
"scripts": {
"vscode:prepublish": "pnpm run package",
"compile": "npx js-yaml syntaxes/grammar.yaml > syntaxes/grammar.json && webpack",
"watch": "webpack --watch",
"package": "webpack --mode production --devtool hidden-source-map",
"compile-tests": "tsc -p . --outDir out",
"watch-tests": "tsc -p . -w --outDir out",
"pretest": "pnpm run compile-tests && pnpm run compile && pnpm run lint",
"lint": "eslint src",
"test": "vscode-test"
},
"devDependencies": {
"@types/mocha": "^10.0.10",
"@types/node": "20.x",
"@types/vscode": "^1.102.0",
"@typescript-eslint/eslint-plugin": "^8.31.1",
"@typescript-eslint/parser": "^8.31.1",
"@vscode/test-cli": "^0.0.11",
"@vscode/test-electron": "^2.5.2",
"eslint": "^9.25.1",
"js-yaml": "^4.1.0",
"ts-loader": "^9.5.2",
"typescript": "^5.8.3",
"webpack": "^5.99.7",
"webpack-cli": "^6.0.1"
},
"dependencies": {
"vscode-languageclient": "^9.0.1"
}
}

383
reid-lsp/src/analysis.rs Normal file
View File

@ -0,0 +1,383 @@
use std::{collections::HashMap, fmt::format, path::PathBuf};
use reid::{
ast::{self, FunctionDefinition, lexer::FullToken, token_stream::TokenRange},
codegen::intrinsics::get_intrinsic_assoc_functions,
compile_module,
error_raporting::{ErrorModules, ReidError},
mir::{
self, Context, FunctionCall, FunctionParam, IfExpression, SourceModuleId, StructType, TypeKind, WhileStatement,
typecheck::typerefs::TypeRefs,
},
perform_all_passes,
};
type TokenAnalysisMap = HashMap<usize, SemanticAnalysis>;
#[derive(Debug, Clone)]
pub struct StaticAnalysis {
pub tokens: Vec<FullToken>,
pub token_analysis: TokenAnalysisMap,
pub error: Option<ReidError>,
}
#[derive(Debug, Clone)]
pub struct SemanticAnalysis {
pub ty: Option<TypeKind>,
pub autocomplete: Vec<Autocomplete>,
}
#[derive(Debug, Clone)]
pub struct Autocomplete {
pub text: String,
pub kind: AutocompleteKind,
}
#[derive(Debug, Clone)]
pub enum AutocompleteKind {
Type,
Field(TypeKind),
Function(Vec<FunctionParam>, TypeKind),
}
impl ToString for AutocompleteKind {
fn to_string(&self) -> String {
match self {
AutocompleteKind::Type => String::from("type"),
AutocompleteKind::Function(params, ret_ty) => {
let params = params
.iter()
.map(|p| format!("{}: {}", p.name, p.ty))
.collect::<Vec<_>>();
format!("({}) -> {}", params.join(", "), ret_ty)
}
AutocompleteKind::Field(type_kind) => format!("{}", type_kind),
}
}
}
pub fn analyze(
module_id: SourceModuleId,
tokens: Vec<FullToken>,
path: PathBuf,
map: &mut ErrorModules,
) -> Result<Option<StaticAnalysis>, ReidError> {
let (module, mut parse_error) = match compile_module(module_id, tokens, map, Some(path.clone()), true)? {
Ok(module) => (module, None),
Err((m, err)) => (m.process(module_id), Some(err)),
};
let module_id = module.module_id;
let mut context = Context::from(vec![module], path.parent().unwrap().to_owned());
match perform_all_passes(&mut context, map) {
Ok(_) => {}
Err(pass_error) => {
if let Some(err) = &mut parse_error {
err.extend(pass_error);
} else {
parse_error = Some(pass_error)
}
}
}
for module in context.modules.values() {
if module.module_id != module_id {
continue;
}
return Ok(Some(analyze_context(&context, &module, parse_error)));
}
return Ok(None);
}
pub fn init_types(map: &mut TokenAnalysisMap, meta: &mir::Metadata, ty: Option<TypeKind>) {
for token in meta.range.start..=meta.range.end {
map.insert(
token,
SemanticAnalysis {
ty: ty.clone(),
autocomplete: Vec::new(),
},
);
}
}
pub fn set_autocomplete(map: &mut TokenAnalysisMap, token_idx: usize, autocomplete: Vec<Autocomplete>) {
if let Some(token) = map.get_mut(&token_idx) {
token.autocomplete = autocomplete.clone();
} else {
map.insert(
token_idx,
SemanticAnalysis {
ty: None,
autocomplete: autocomplete.clone(),
},
);
}
}
pub fn analyze_context(context: &mir::Context, module: &mir::Module, error: Option<ReidError>) -> StaticAnalysis {
let mut map = HashMap::new();
for import in &module.imports {
init_types(&mut map, &import.1, None);
if let Some((module_name, _)) = import.0.get(0) {
let (import_name, import_meta) = import.0.get(1).cloned().unwrap_or((
String::new(),
mir::Metadata {
source_module_id: module.module_id,
range: reid::ast::token_stream::TokenRange {
start: import.1.range.end - 1,
end: import.1.range.end - 1,
},
position: None,
},
));
let mut autocompletes = Vec::new();
if let Some((_, module)) = context.modules.iter().find(|m| m.1.name == *module_name) {
for function in &module.functions {
if !function.is_pub {
continue;
}
if function.name.starts_with(&import_name) {
autocompletes.push(Autocomplete {
text: function.name.clone(),
kind: AutocompleteKind::Function(function.parameters.clone(), function.return_type.clone()),
});
}
}
for typedef in &module.typedefs {
if typedef.name.starts_with(&import_name) {
autocompletes.push(Autocomplete {
text: typedef.name.clone(),
kind: AutocompleteKind::Type,
});
}
}
}
set_autocomplete(&mut map, import_meta.range.end, autocompletes);
}
}
for typedef in &module.typedefs {
match &typedef.kind {
mir::TypeDefinitionKind::Struct(StructType(fields)) => {
for field in fields {
init_types(&mut map, &field.2, Some(field.1.clone()));
}
}
}
}
for binop in &module.binop_defs {
match &binop.fn_kind {
mir::FunctionDefinitionKind::Local(block, _) => analyze_block(context, module, block, &mut map),
mir::FunctionDefinitionKind::Extern(_) => {}
mir::FunctionDefinitionKind::Intrinsic(_) => {}
};
}
for (_, function) in &module.associated_functions {
for param in &function.parameters {
init_types(&mut map, &param.meta, Some(param.ty.clone()));
}
match &function.kind {
mir::FunctionDefinitionKind::Local(block, _) => analyze_block(context, module, block, &mut map),
mir::FunctionDefinitionKind::Extern(_) => {}
mir::FunctionDefinitionKind::Intrinsic(_) => {}
};
}
for function in &module.functions {
for param in &function.parameters {
init_types(&mut map, &param.meta, Some(param.ty.clone()));
}
match &function.kind {
mir::FunctionDefinitionKind::Local(block, _) => analyze_block(context, module, block, &mut map),
mir::FunctionDefinitionKind::Extern(_) => {}
mir::FunctionDefinitionKind::Intrinsic(_) => {}
};
}
StaticAnalysis {
tokens: module.tokens.clone(),
token_analysis: map,
error,
}
}
pub fn analyze_block(
context: &mir::Context,
source_module: &mir::Module,
block: &mir::Block,
map: &mut TokenAnalysisMap,
) {
for statement in &block.statements {
match &statement.0 {
mir::StmtKind::Let(named_variable_ref, _, expression) => {
init_types(
map,
&named_variable_ref.2,
expression
.return_type(&TypeRefs::unknown(), source_module.module_id)
.ok()
.map(|(_, ty)| ty),
);
// return analyze_in_expr(&expression, module_id, token_idx);
}
mir::StmtKind::Set(lhs, rhs) => {
analyze_expr(context, source_module, lhs, map);
analyze_expr(context, source_module, rhs, map);
}
mir::StmtKind::Import(_) => {}
mir::StmtKind::Expression(expression) => {
analyze_expr(context, source_module, expression, map);
}
mir::StmtKind::While(WhileStatement { condition, block, .. }) => {
analyze_expr(context, source_module, condition, map);
analyze_block(context, source_module, block, map);
}
}
}
if let Some((_, Some(return_exp))) = &block.return_expression {
analyze_expr(context, source_module, return_exp, map)
}
}
pub fn analyze_expr(
context: &mir::Context,
source_module: &mir::Module,
expr: &mir::Expression,
map: &mut TokenAnalysisMap,
) {
init_types(
map,
&expr.1,
expr.return_type(&TypeRefs::unknown(), source_module.module_id)
.ok()
.map(|(_, t)| t),
);
match &expr.0 {
mir::ExprKind::Variable(_) => {}
mir::ExprKind::Indexed(value, _, index_expr) => {
analyze_expr(context, source_module, &value, map);
analyze_expr(context, source_module, &index_expr, map);
}
mir::ExprKind::Accessed(expression, _, name, meta) => {
analyze_expr(context, source_module, &expression, map);
let accessed_type = expression.return_type(&TypeRefs::unknown(), source_module.module_id);
let mut autocompletes = Vec::new();
match accessed_type {
Ok((_, accessed_type)) => {
autocompletes.extend(
source_module
.associated_functions
.iter()
.filter(|(t, fun)| *t == accessed_type && fun.name.starts_with(name))
.map(|(_, fun)| Autocomplete {
text: fun.name.clone(),
kind: AutocompleteKind::Function(fun.parameters.clone(), fun.return_type.clone()),
}),
);
match accessed_type {
TypeKind::CustomType(ty_key) => {
let typedef = source_module
.typedefs
.iter()
.find(|t| t.name == ty_key.0 && t.source_module == ty_key.1);
if let Some(typedef) = typedef {
autocompletes.extend(match &typedef.kind {
mir::TypeDefinitionKind::Struct(StructType(fields)) => {
fields.iter().filter(|f| f.0.starts_with(name)).map(|f| Autocomplete {
text: f.0.clone(),
kind: AutocompleteKind::Field(f.1.clone()),
})
}
});
}
}
_ => {}
}
}
_ => {}
}
set_autocomplete(map, meta.range.end, autocompletes);
}
mir::ExprKind::Array(expressions) => {
for expr in expressions {
analyze_expr(context, source_module, expr, map);
}
}
mir::ExprKind::Struct(_, items) => {
for (_, expr, _) in items {
analyze_expr(context, source_module, expr, map);
}
}
mir::ExprKind::Literal(_) => {}
mir::ExprKind::BinOp(_, lhs, rhs, _) => {
analyze_expr(context, source_module, &lhs, map);
analyze_expr(context, source_module, &rhs, map);
}
mir::ExprKind::FunctionCall(FunctionCall { parameters, .. }) => {
for expr in parameters {
analyze_expr(context, source_module, expr, map);
}
}
mir::ExprKind::AssociatedFunctionCall(
ty,
FunctionCall {
parameters, name, meta, ..
},
) => {
for expr in parameters {
analyze_expr(context, source_module, expr, map);
}
let mut function_autocomplete = source_module
.associated_functions
.iter()
.filter(|(t, fun)| t == ty && fun.name.starts_with(name))
.map(|(_, fun)| Autocomplete {
text: fun.name.clone(),
kind: AutocompleteKind::Function(fun.parameters.clone(), fun.return_type.clone()),
})
.collect::<Vec<_>>();
function_autocomplete.extend(
get_intrinsic_assoc_functions(ty)
.iter()
.filter_map(|(s, f)| f.as_ref().map(|f| (s, f)))
.filter(|(_, fun)| fun.name.starts_with(name))
.map(|(_, fun)| Autocomplete {
text: fun.name.clone(),
kind: AutocompleteKind::Function(fun.parameters.clone(), fun.return_type.clone()),
})
.collect::<Vec<_>>(),
);
set_autocomplete(map, meta.range.start, function_autocomplete.clone());
set_autocomplete(map, meta.range.end, function_autocomplete.clone());
}
mir::ExprKind::If(IfExpression(cond, then_e, else_e)) => {
analyze_expr(context, source_module, &cond, map);
analyze_expr(context, source_module, &then_e, map);
if let Some(else_e) = else_e.as_ref() {
analyze_expr(context, source_module, &else_e, map);
}
}
mir::ExprKind::Block(block) => analyze_block(context, source_module, block, map),
mir::ExprKind::Borrow(expression, _) => {
analyze_expr(context, source_module, &expression, map);
}
mir::ExprKind::Deref(expression) => {
analyze_expr(context, source_module, &expression, map);
}
mir::ExprKind::CastTo(expression, _) => {
analyze_expr(context, source_module, &expression, map);
}
mir::ExprKind::GlobalRef(_, _) => {}
}
}

268
reid-lsp/src/main.rs Normal file
View File

@ -0,0 +1,268 @@
use std::path::PathBuf;
use dashmap::DashMap;
use reid::ast::lexer::{FullToken, Position};
use reid::error_raporting::{self, ErrorModules, ReidError};
use reid::mir::{SourceModuleId, TypeKind};
use reid::parse_module;
use tower_lsp::lsp_types::{
self, CompletionItem, CompletionOptions, CompletionParams, CompletionResponse, Diagnostic, DiagnosticSeverity,
DidChangeTextDocumentParams, DidOpenTextDocumentParams, Hover, HoverContents, HoverParams, HoverProviderCapability,
InitializeParams, InitializeResult, InitializedParams, MarkupContent, MarkupKind, MessageType, OneOf, Range,
ServerCapabilities, TextDocumentItem, TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities,
};
use tower_lsp::{Client, LanguageServer, LspService, Server, jsonrpc};
use crate::analysis::{StaticAnalysis, analyze};
mod analysis;
#[derive(Debug)]
struct Backend {
client: Client,
analysis: DashMap<String, StaticAnalysis>,
}
#[tower_lsp::async_trait]
impl LanguageServer for Backend {
async fn initialize(&self, _: InitializeParams) -> jsonrpc::Result<InitializeResult> {
self.client
.log_message(MessageType::INFO, "Initializing Reid Language Server")
.await;
let sync = TextDocumentSyncOptions {
open_close: Some(true),
change: Some(TextDocumentSyncKind::FULL),
will_save: None,
will_save_wait_until: None,
save: None,
};
Ok(InitializeResult {
capabilities: ServerCapabilities {
hover_provider: Some(HoverProviderCapability::Simple(true)),
completion_provider: Some(CompletionOptions { ..Default::default() }),
text_document_sync: Some(TextDocumentSyncCapability::Options(sync)),
workspace: Some(WorkspaceServerCapabilities {
workspace_folders: Some(WorkspaceFoldersServerCapabilities {
supported: Some(true),
change_notifications: Some(OneOf::Left(true)),
}),
file_operations: None,
}),
..Default::default()
},
..Default::default()
})
}
async fn initialized(&self, _: InitializedParams) {
self.client
.log_message(MessageType::INFO, "Reid Language Server initialized hello!")
.await;
}
async fn shutdown(&self) -> jsonrpc::Result<()> {
Ok(())
}
async fn completion(&self, params: CompletionParams) -> jsonrpc::Result<Option<CompletionResponse>> {
let path = PathBuf::from(params.text_document_position.text_document.uri.path());
let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();
let analysis = self.analysis.get(&file_name);
let position = params.text_document_position.position;
let token = if let Some(analysis) = &analysis {
analysis.tokens.iter().enumerate().find(|(_, tok)| {
tok.position.1 == position.line + 1
&& (tok.position.0 <= position.character
&& (tok.position.0 + tok.token.len() as u32) > position.character)
})
} else {
None
};
dbg!(position, token);
let list = if let Some((idx, _)) = token {
if let Some(analysis) = self.analysis.get(&file_name).unwrap().token_analysis.get(&idx) {
dbg!(&analysis);
analysis
.autocomplete
.iter()
.map(|s| CompletionItem::new_simple(s.text.to_string(), s.kind.to_string()))
.collect()
} else {
Vec::new()
}
} else {
Vec::new()
};
dbg!(&list);
Ok(Some(CompletionResponse::Array(list)))
}
async fn hover(&self, params: HoverParams) -> jsonrpc::Result<Option<Hover>> {
let path = PathBuf::from(params.text_document_position_params.text_document.uri.path());
let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();
let analysis = self.analysis.get(&file_name);
let position = params.text_document_position_params.position;
let token = if let Some(analysis) = &analysis {
analysis.tokens.iter().enumerate().find(|(_, tok)| {
tok.position.1 == position.line + 1
&& (tok.position.0 <= position.character + 1
&& (tok.position.0 + tok.token.len() as u32) > position.character + 1)
})
} else {
None
};
let (range, ty) = if let Some((idx, token)) = token {
if let Some(analysis) = self.analysis.get(&file_name).unwrap().token_analysis.get(&idx) {
let start = token.position;
let end = token.position.add(token.token.len() as u32);
let range = Range {
start: lsp_types::Position {
line: (start.1 as i32 - 1).max(0) as u32,
character: (start.0 as i32 - 1).max(0) as u32,
},
end: lsp_types::Position {
line: (end.1 as i32 - 1).max(0) as u32,
character: (end.0 as i32 - 1).max(0) as u32,
},
};
if let Some(ty) = analysis.ty.clone() {
(Some(range), format!("{}", ty))
} else {
(Some(range), String::from("None type"))
}
} else {
(None, String::from("no type"))
}
} else {
(None, String::from("no token"))
};
let contents = HoverContents::Markup(MarkupContent {
kind: MarkupKind::Markdown,
value: format!("`{ty}`"),
});
Ok(Some(Hover { contents, range }))
}
async fn did_open(&self, params: DidOpenTextDocumentParams) {
self.recompile(TextDocumentItem {
uri: params.text_document.uri,
language_id: params.text_document.language_id,
version: params.text_document.version,
text: params.text_document.text,
})
.await
}
async fn did_change(&self, params: DidChangeTextDocumentParams) {
self.recompile(TextDocumentItem {
text: params.content_changes[0].text.clone(),
uri: params.text_document.uri,
version: params.text_document.version,
language_id: String::new(),
})
.await
}
}
impl Backend {
async fn recompile(&self, params: TextDocumentItem) {
let path = PathBuf::from(params.uri.clone().path());
let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();
let mut map = Default::default();
let parse_res = parse(&params.text, path.clone(), &mut map);
let (tokens, result) = match parse_res {
Ok((module_id, tokens)) => (tokens.clone(), analyze(module_id, tokens, path, &mut map)),
Err(e) => (Vec::new(), Err(e)),
};
let mut diagnostics = Vec::new();
match result {
Ok(Some(mut analysis)) => {
if let Some(reid_error) = &mut analysis.error {
self.client
.log_message(
MessageType::INFO,
format!("Successfully compiled despite parsing errors!"),
)
.await;
reid_error.errors.dedup();
for error in &reid_error.errors {
diagnostics.push(reid_error_into_diagnostic(error, &tokens));
self.client.log_message(MessageType::INFO, format!("{}", error)).await;
}
}
self.analysis.insert(file_name.clone(), analysis);
}
Ok(_) => {}
Err(mut reid_error) => {
reid_error.errors.dedup();
for error in &reid_error.errors {
diagnostics.push(reid_error_into_diagnostic(error, &tokens));
self.client.log_message(MessageType::INFO, format!("{}", error)).await;
}
}
}
self.client
.publish_diagnostics(params.uri.clone(), diagnostics, Some(params.version))
.await;
}
}
fn reid_error_into_diagnostic(error: &error_raporting::ErrorKind, tokens: &Vec<FullToken>) -> Diagnostic {
let meta = error.get_meta();
let positions = meta
.range
.into_position(&tokens)
.unwrap_or((Position(0, 0), Position(0, 0)));
Diagnostic {
range: Range {
start: lsp_types::Position {
line: ((positions.0.1 as i32) - 1).max(0) as u32,
character: ((positions.0.0 as i32) - 1).max(0) as u32,
},
end: lsp_types::Position {
line: ((positions.1.1 as i32) - 1).max(0) as u32,
character: ((positions.1.0 as i32) - 1).max(0) as u32,
},
},
severity: Some(DiagnosticSeverity::ERROR),
code: None,
code_description: None,
source: Some(error.get_type_str().to_owned()),
message: format!("{}", error),
related_information: None,
tags: None,
data: None,
}
}
fn parse(source: &str, path: PathBuf, map: &mut ErrorModules) -> Result<(SourceModuleId, Vec<FullToken>), ReidError> {
let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();
Ok(parse_module(source, file_name.clone(), map)?)
}
#[tokio::main]
async fn main() {
let stdin = tokio::io::stdin();
let stdout = tokio::io::stdout();
let (service, socket) = LspService::new(|client| Backend {
client,
analysis: DashMap::new(),
});
Server::new(stdin, stdout, socket).serve(service).await;
}

View File

@ -0,0 +1,395 @@
{
"scopeName": "source.reid",
"patterns": [
{
"include": "#import"
},
{
"include": "#expression"
}
],
"repository": {
"import": {
"begin": "(import)\\s*",
"end": ";",
"beginCaptures": {
"1": {
"name": "keyword"
}
},
"endCaptures": {
"0": {
"name": "punctuation.semi.reid"
}
},
"patterns": [
{
"include": "#identifier"
},
{
"include": "#punctiation"
}
]
},
"punctuation": {
"patterns": [
{
"match": "::",
"name": "keyword.operator.namespace.reid"
},
{
"match": ";",
"name": "punctuation.semi.reid"
},
{
"match": ".",
"name": "punctuation.dot.reid"
},
{
"match": ",",
"name": "punctuation.comma.reid"
}
]
},
"expression": {
"patterns": [
{
"include": "#comment"
},
{
"include": "#fn-signature"
},
{
"include": "#common-type"
},
{
"include": "#binop-impl"
},
{
"include": "#type-impl"
},
{
"include": "#struct-definition"
},
{
"include": "#block"
},
{
"include": "#binop"
},
{
"include": "#namespace"
},
{
"include": "#cast"
},
{
"include": "#function-call"
},
{
"include": "#parenthesis"
},
{
"include": "#array"
},
{
"include": "#keywords"
},
{
"include": "#struct-expression"
},
{
"include": "#number-literal"
},
{
"include": "#string-literal"
},
{
"include": "#identifier"
},
{
"include": "#punctuation"
}
]
},
"comment": {
"match": "\\/\\/(.|\\/)*",
"name": "comment.line.double-slash.reid"
},
"fn-signature": {
"begin": "(fn)\\s*(\\w+)\\(",
"beginCaptures": {
"1": {
"name": "keyword.fn.reid"
},
"2": {
"name": "entity.name.function.reid"
}
},
"end": "\\)",
"patterns": [
{
"include": "#annotated-identifier"
},
{
"include": "#keywords"
},
{
"include": "#binop"
}
],
"endCaptures": {
"2": {
"name": "entity.name.type.reid"
}
}
},
"type-impl": {
"begin": "(impl)\\s* (\\w+)\\s* \\{\n",
"end": "\\}",
"captures": {
"1": {
"name": "keyword.impl.reid"
},
"2": {
"name": "entity.name.type"
}
},
"patterns": [
{
"include": "#expression"
}
]
},
"binop-impl": {
"begin": "(impl)\\s+(binop)\\s+\\(((.*)\\s*:\\s*(.*))\\)(.*)\\(((.*)\\s*:\\s*(.*))\\)\\s*->\\s*(\\w+)\\s*\\{",
"end": "\\}",
"beginCaptures": {
"1": {
"name": "keyword.impl.reid"
},
"2": {
"name": "keyword.impl.reid"
},
"4": {
"name": "variable.parameter.binop.reid"
},
"5": {
"name": "entity.name.type.parameter.binop.reid"
},
"6": {
"name": "keyword.operator.math.reid"
},
"8": {
"name": "variable.parameter.binop.reid"
},
"9": {
"name": "entity.name.type.parameter.binop.reid"
},
"10": {
"name": "entity.name.type.return.binop.reid"
}
},
"patterns": [
{
"include": "#expression"
}
]
},
"struct-definition": {
"begin": "(struct)\\s*(\\w+)\\s*\\{",
"end": "\\}",
"captures": {
"1": {
"name": "keyword.struct.reid"
},
"2": {
"name": "entity.name.type"
}
},
"patterns": [
{
"include": "#annotated-identifier"
}
]
},
"struct-expression": {
"begin": "([A-Z]\\w*)\\s*\\{",
"end": "\\}",
"captures": {
"1": {
"name": "entity.name.type.struct.reid"
}
},
"patterns": [
{
"include": "#expression"
}
]
},
"number-literal": {
"patterns": [
{
"match": "0x[0-9a-fA-F]+(\\.[0-9a-fA-F]+)?",
"name": "constant.hexadecimal"
},
{
"match": "0o[0-7]+(\\.[0-7]+)?",
"name": "constant.octal"
},
{
"match": "0b[01]+(\\.[01]+)?",
"name": "constant.binary"
},
{
"match": "[0-9]+(\\.[0-9]+)?",
"name": "constant.numeric"
}
]
},
"string-literal": {
"begin": "\"",
"end": "\"",
"name": "string.quoted.double",
"patterns": [
{
"match": "\\.",
"name": "constant.character.escape"
}
]
},
"block": {
"begin": "\\{",
"end": "\\}",
"patterns": [
{
"include": "#expression"
}
]
},
"namespace": {
"match": "(\\w+)(\\:\\:)",
"captures": {
"1": {
"name": "entity.name.function.reid"
},
"2": {
"name": "keyword.operator.namespace.reid"
}
}
},
"cast": {
"match": "(as)\\s+(\\w+)",
"captures": {
"1": {
"name": "keyword.cast.reid"
},
"2": {
"name": "entity.name.type.reid"
}
}
},
"function-call": {
"begin": "(\\w+)?\\(",
"end": "\\)",
"beginCaptures": {
"1": {
"name": "entity.name.function.reid"
}
},
"patterns": [
{
"include": "#expression"
}
]
},
"parenthesis": {
"begin": "\\(",
"end": "\\)",
"beginCaptures": {
"0": {
"name": "keyword.operator.parenthesis.reid"
}
},
"endCaptures": {
"0": {
"name": "keyword.operator.parenthesis.reid"
}
},
"patterns": [
{
"include": "#expression"
}
]
},
"annotated-identifier": {
"begin": "(\\w+)\\:",
"end": ",",
"beginCaptures": {
"1": {
"name": "variable.language.reid"
}
},
"patterns": [
{
"include": "#expression"
}
]
},
"identifier": {
"patterns": [
{
"match": "[A-Z]\\w*",
"name": "entity.name.type.reid"
},
{
"match": "\\w+",
"name": "variable.language.reid"
}
]
},
"keywords": {
"patterns": [
{
"match": "let|mut|pub|extern",
"name": "storage.type.reid"
},
{
"match": "if|return",
"name": "keyword.control"
},
{
"match": "self",
"name": "variable.language.self.reid"
}
]
},
"binop": {
"match": "\\<\\=|\\>\\=|\\=\\=|\\<|\\>|\\*|\\+|\\-|\\^|\\&\\&|\\&",
"name": "keyword.operator.math.reid"
},
"array": {
"begin": "\\[",
"end": "\\]",
"beginCaptures": {
"0": {
"name": "entity.name.type.array.reid"
}
},
"endCaptures": {
"0": {
"name": "entity.name.type.array.reid"
}
},
"patterns": [
{
"include": "#expression"
}
]
},
"common-type": {
"match": "u8|u16|u32|u64|u128|i8|i16|i32|i64|i128|bool",
"name": "entity.name.type.common.reid"
}
}
}

View File

@ -0,0 +1,232 @@
scopeName: source.reid
patterns:
- include: "#import"
- include: "#expression"
repository:
# function-definition:
# begin: "(fn)\\s*(\\w+)\\(((\\w+)\\s*\\:\\s*(\\w+),?)*\\)\\s*->\\s*(\\w+)\\s*\\{"
# end: "\\}"
# beginCaptures:
# 1:
# name: "keyword.other"
# 2:
# name: "entity.name.function"
# 4:
# name: "entity.name.parameter"
# 5:
# name: "entity.name.type"
# 6:
# name: "entity.name.type"
# patterns:
# - include: "#type"
# - include: "#expression"
import:
begin: "(import)\\s*"
end: ";"
beginCaptures:
1:
name: keyword
endCaptures:
0:
name: punctuation.semi.reid
patterns:
- include: "#identifier"
- include: "#punctiation"
punctuation:
patterns:
- match: "::"
name: keyword.operator.namespace.reid
- match: ";"
name: punctuation.semi.reid
- match: "."
name: punctuation.dot.reid
- match: ","
name: punctuation.comma.reid
expression:
patterns:
- include: "#comment"
- include: "#fn-signature"
- include: "#common-type"
- include: "#binop-impl"
- include: "#type-impl"
- include: "#struct-definition"
- include: "#block"
- include: "#binop"
- include: "#namespace"
- include: "#cast"
- include: "#function-call"
- include: "#parenthesis"
- include: "#array"
- include: "#keywords"
- include: "#struct-expression"
- include: "#number-literal"
- include: "#string-literal"
- include: "#identifier"
- include: "#punctuation"
comment:
match: "\\/\\/(.|\\/)*"
name: comment.line.double-slash.reid
fn-signature:
begin: "(fn)\\s*(\\w+)\\("
beginCaptures:
1:
name: keyword.fn.reid
2:
name: entity.name.function.reid
end: "\\)"
patterns:
- include: "#annotated-identifier"
- include: "#keywords"
- include: "#binop"
endCaptures:
2:
name: entity.name.type.reid
type-impl:
begin: >
(impl)\s*
(\w+)\s*
\{
end: "\\}"
captures:
1:
name: keyword.impl.reid
2:
name: entity.name.type
patterns:
- include: "#expression"
binop-impl:
begin: "(impl)\\s+(binop)\\s+\\(((.*)\\s*:\\s*(.*))\\)(.*)\\(((.*)\\s*:\\s*(.*))\\)\\s*->\\s*(\\w+)\\s*\\{"
end: "\\}"
beginCaptures:
1:
name: keyword.impl.reid
2:
name: keyword.impl.reid
4:
name: variable.parameter.binop.reid
5:
name: entity.name.type.parameter.binop.reid
6:
name: keyword.operator.math.reid
8:
name: variable.parameter.binop.reid
9:
name: entity.name.type.parameter.binop.reid
10:
name: entity.name.type.return.binop.reid
patterns:
- include: "#expression"
struct-definition:
begin: "(struct)\\s*(\\w+)\\s*\\{"
end: "\\}"
captures:
1:
name: keyword.struct.reid
2:
name: entity.name.type
patterns:
- include: "#annotated-identifier"
struct-expression:
begin: "([A-Z]\\w*)\\s*\\{"
end: "\\}"
captures:
1:
name: entity.name.type.struct.reid
patterns:
- include: "#expression"
number-literal:
patterns:
- match: "0x[0-9a-fA-F]+(\\.[0-9a-fA-F]+)?"
name: "constant.hexadecimal"
- match: "0o[0-7]+(\\.[0-7]+)?"
name: "constant.octal"
- match: "0b[01]+(\\.[01]+)?"
name: "constant.binary"
- match: "[0-9]+(\\.[0-9]+)?"
name: "constant.numeric"
string-literal:
begin: '"'
end: '"'
name: string.quoted.double
patterns:
- match: "\\."
name: constant.character.escape
block:
begin: "\\{"
end: "\\}"
patterns:
- include: "#expression"
namespace:
match: "(\\w+)(\\:\\:)"
captures:
1:
name: entity.name.function.reid
2:
name: keyword.operator.namespace.reid
cast:
match: "(as)\\s+(\\w+)"
captures:
1:
name: keyword.cast.reid
2:
name: entity.name.type.reid
function-call:
begin: "(\\w+)?\\("
end: "\\)"
beginCaptures:
1:
name: entity.name.function.reid
patterns:
- include: "#expression"
parenthesis:
begin: "\\("
end: "\\)"
beginCaptures:
0:
name: keyword.operator.parenthesis.reid
endCaptures:
0:
name: keyword.operator.parenthesis.reid
patterns:
- include: "#expression"
annotated-identifier:
begin: "(\\w+)\\:"
end: ","
beginCaptures:
1:
name: variable.language.reid
patterns:
- include: "#expression"
identifier:
patterns:
- match: "[A-Z]\\w*"
name: entity.name.type.reid
- match: "\\w+"
name: variable.language.reid
keywords:
patterns:
- match: "let|mut|pub|extern"
name: "storage.type.reid"
- match: "if|return"
name: "keyword.control"
- match: "self"
name: "variable.language.self.reid"
binop:
match: "\\<\\=|\\>\\=|\\=\\=|\\<|\\>|\\*|\\+|\\-|\\^|\\&\\&|\\&"
name: keyword.operator.math.reid
array:
begin: "\\["
end: "\\]"
beginCaptures:
0:
name: entity.name.type.array.reid
endCaptures:
0:
name: entity.name.type.array.reid
patterns:
- include: "#expression"
common-type:
match: "u8|u16|u32|u64|u128|i8|i16|i32|i64|i128|bool"
name: entity.name.type.common.reid

29
reid-lsp/tsconfig.json Normal file
View File

@ -0,0 +1,29 @@
{
"compilerOptions": {
"module": "Node16",
"target": "ES2022",
"lib": [
"ES2022"
],
"sourceMap": true,
"rootDir": "src",
"outDir": "out",
"strict": true /* enable all strict type-checking options */
/* Additional Checks */
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
// "noUnusedParameters": true, /* Report errors on unused parameters. */
},
"include": [
"src"
],
"exclude": [
"node_modules",
".vscode-test"
],
"references": [
{
"path": "./client/"
},
]
}

View File

@ -0,0 +1,48 @@
# Welcome to your VS Code Extension
## What's in the folder
* This folder contains all of the files necessary for your extension.
* `package.json` - this is the manifest file in which you declare your extension and command.
* The sample plugin registers a command and defines its title and command name. With this information VS Code can show the command in the command palette. It doesnt yet need to load the plugin.
* `src/extension.ts` - this is the main file where you will provide the implementation of your command.
* The file exports one function, `activate`, which is called the very first time your extension is activated (in this case by executing the command). Inside the `activate` function we call `registerCommand`.
* We pass the function containing the implementation of the command as the second parameter to `registerCommand`.
## Setup
* install the recommended extensions (amodio.tsl-problem-matcher, ms-vscode.extension-test-runner, and dbaeumer.vscode-eslint)
## Get up and running straight away
* Press `F5` to open a new window with your extension loaded.
* Run your command from the command palette by pressing (`Ctrl+Shift+P` or `Cmd+Shift+P` on Mac) and typing `Hello World`.
* Set breakpoints in your code inside `src/extension.ts` to debug your extension.
* Find output from your extension in the debug console.
## Make changes
* You can relaunch the extension from the debug toolbar after changing code in `src/extension.ts`.
* You can also reload (`Ctrl+R` or `Cmd+R` on Mac) the VS Code window with your extension to load your changes.
## Explore the API
* You can open the full set of our API when you open the file `node_modules/@types/vscode/index.d.ts`.
## Run tests
* Install the [Extension Test Runner](https://marketplace.visualstudio.com/items?itemName=ms-vscode.extension-test-runner)
* Run the "watch" task via the **Tasks: Run Task** command. Make sure this is running, or tests might not be discovered.
* Open the Testing view from the activity bar and click the Run Test" button, or use the hotkey `Ctrl/Cmd + ; A`
* See the output of the test result in the Test Results view.
* Make changes to `src/test/extension.test.ts` or create new test files inside the `test` folder.
* The provided test runner will only consider files matching the name pattern `**.test.ts`.
* You can create folders inside the `test` folder to structure your tests any way you want.
## Go further
* Reduce the extension size and improve the startup time by [bundling your extension](https://code.visualstudio.com/api/working-with-extensions/bundling-extension).
* [Publish your extension](https://code.visualstudio.com/api/working-with-extensions/publishing-extension) on the VS Code extension marketplace.
* Automate builds by setting up [Continuous Integration](https://code.visualstudio.com/api/working-with-extensions/continuous-integration).

View File

@ -0,0 +1,48 @@
//@ts-check
'use strict';
const path = require('path');
//@ts-check
/** @typedef {import('webpack').Configuration} WebpackConfig **/
/** @type WebpackConfig */
const extensionConfig = {
target: 'node', // VS Code extensions run in a Node.js-context 📖 -> https://webpack.js.org/configuration/node/
mode: 'none', // this leaves the source code as close as possible to the original (when packaging we set this to 'production')
entry: './client/src/extension.ts', // the entry point of this extension, 📖 -> https://webpack.js.org/configuration/entry-context/
output: {
// the bundle is stored in the 'dist' folder (check package.json), 📖 -> https://webpack.js.org/configuration/output/
path: path.resolve(__dirname, 'dist'),
filename: 'extension.js',
libraryTarget: 'commonjs2'
},
externals: {
vscode: 'commonjs vscode' // the vscode-module is created on-the-fly and must be excluded. Add other modules that cannot be webpack'ed, 📖 -> https://webpack.js.org/configuration/externals/
// modules added here also need to be added in the .vscodeignore file
},
resolve: {
// support reading TypeScript and JavaScript files, 📖 -> https://github.com/TypeStrong/ts-loader
extensions: ['.ts', '.js']
},
module: {
rules: [
{
test: /\.ts$/,
exclude: /node_modules/,
use: [
{
loader: 'ts-loader'
}
]
}
]
},
devtool: 'nosources-source-map',
infrastructureLogging: {
level: "log", // enables logging required for problem matchers
},
};
module.exports = [extensionConfig];

View File

@ -1,6 +1,6 @@
[package] [package]
name = "reid" name = "reid"
version = "1.0.0-beta.2" version = "1.0.0-beta.3"
edition = "2021" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -10,10 +10,12 @@ edition = "2021"
default = ["color"] default = ["color"]
color = ["colored"] color = ["colored"]
log_output = []
context_debug = []
[dependencies] [dependencies]
## Make it easier to generate errors ## Make it easier to generate errors
thiserror = "1.0.44" thiserror = "1.0.44"
reid-lib = { path = "../reid-llvm-lib", version = "1.0.0-beta.1", registry="gitea-teascade" } reid-lib = { path = "../reid-llvm-lib", version = "1.0.0-beta.3", registry="gitea-teascade" }
colored = {version = "3.0.0", optional = true} colored = {version = "3.0.0", optional = true}

View File

@ -12,7 +12,6 @@ fn main() -> Result<(), std::io::Error> {
libraries.push(libname); libraries.push(libname);
} }
dbg!(&filename);
let path = PathBuf::from(filename).canonicalize().unwrap(); let path = PathBuf::from(filename).canonicalize().unwrap();
let parent = path.with_extension(""); let parent = path.with_extension("");
let llvm_ir_path = parent.with_extension("ll"); let llvm_ir_path = parent.with_extension("ll");
@ -21,6 +20,7 @@ fn main() -> Result<(), std::io::Error> {
let mir_path = parent.with_extension("mir"); let mir_path = parent.with_extension("mir");
let asm_path = parent.with_extension("asm"); let asm_path = parent.with_extension("asm");
#[cfg(feature = "log_output")]
let before = std::time::SystemTime::now(); let before = std::time::SystemTime::now();
let text = fs::read_to_string(&path)?; let text = fs::read_to_string(&path)?;
@ -31,33 +31,39 @@ fn main() -> Result<(), std::io::Error> {
match compile_simple(&text, PathBuf::from(&path), Some(cpu), vec![features]) { match compile_simple(&text, PathBuf::from(&path), Some(cpu), vec![features]) {
Ok(( Ok((
CompileOutput { CompileOutput {
triple, triple: _triple,
assembly, assembly,
obj_buffer, obj_buffer,
llvm_ir, llvm_ir: _llvm_ir,
}, },
CustomIRs { llir, mir }, CustomIRs { llir, mir },
)) => { )) => {
println!("{}", llvm_ir); #[cfg(feature = "log_output")]
{
println!("{}", _llvm_ir);
println!("Compiled with triple: {}\n", &_triple);
println!("Output LLVM IR to {:?}", llvm_ir_path);
println!("Output Assembly to {:?}", asm_path);
println!("Output Object-file to {:?}\n", object_path);
println!("Output LLIR-file to {:?}\n", llir_path);
println!("Output MIR-file to {:?}\n", mir_path);
}
let after = std::time::SystemTime::now(); fs::write(&llvm_ir_path, &_llvm_ir).expect("Could not write LLVM IR -file!");
println!("Compiled with triple: {}\n", &triple);
fs::write(&llvm_ir_path, &llvm_ir).expect("Could not write LLVM IR -file!");
println!("Output LLVM IR to {:?}", llvm_ir_path);
fs::write(&asm_path, &assembly).expect("Could not write Assembly-file!"); fs::write(&asm_path, &assembly).expect("Could not write Assembly-file!");
println!("Output Assembly to {:?}", asm_path);
fs::write(&object_path, &obj_buffer).expect("Could not write Object-file!"); fs::write(&object_path, &obj_buffer).expect("Could not write Object-file!");
println!("Output Object-file to {:?}\n", object_path);
fs::write(&llir_path, &llir).expect("Could not write LLIR-file!"); fs::write(&llir_path, &llir).expect("Could not write LLIR-file!");
println!("Output LLIR-file to {:?}\n", llir_path);
fs::write(&mir_path, &mir).expect("Could not write MIR-file!"); fs::write(&mir_path, &mir).expect("Could not write MIR-file!");
println!("Output MIR-file to {:?}\n", mir_path); #[cfg(feature = "log_output")]
println!( {
"Compilation took: {:.2}ms\n", let after = std::time::SystemTime::now();
(after.duration_since(before).unwrap().as_micros() as f32) / 1000. println!(
); "Compilation took: {:.2}ms\n",
(after.duration_since(before).unwrap().as_micros() as f32) / 1000.
);
println!("Linking {:?}", &object_path); println!("Linking {:?}", &object_path);
}
let linker = std::env::var("LD").unwrap_or("ld".to_owned()); let linker = std::env::var("LD").unwrap_or("ld".to_owned());
let mut linker = LDRunner::from_command(&linker).with_library("c"); let mut linker = LDRunner::from_command(&linker).with_library("c");
@ -69,6 +75,7 @@ fn main() -> Result<(), std::io::Error> {
Err(e) => panic!("{}", e), Err(e) => panic!("{}", e),
}; };
} else { } else {
#[cfg(feature = "log_output")]
println!("Please input compiled file path!") println!("Please input compiled file path!")
} }
Ok(()) Ok(())

View File

@ -7,7 +7,7 @@ static HEXADECIMAL_NUMERICS: &[char] = &[
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f',
]; ];
#[derive(Eq, PartialEq, Clone, PartialOrd, Ord)] #[derive(Eq, PartialEq, Clone, PartialOrd, Ord, Hash)]
pub enum Token { pub enum Token {
/// Values /// Values
Identifier(String), Identifier(String),
@ -114,6 +114,8 @@ pub enum Token {
Unknown(char), Unknown(char),
Whitespace(String),
Comment(String),
Eof, Eof,
} }
@ -192,6 +194,8 @@ impl ToString for Token {
Token::Eof => String::new(), Token::Eof => String::new(),
Token::Slash => String::from('/'), Token::Slash => String::from('/'),
Token::Percent => String::from('%'), Token::Percent => String::from('%'),
Token::Whitespace(val) => val.clone(),
Token::Comment(val) => format!("//{}", val.clone()),
Token::Unknown(val) => val.to_string(), Token::Unknown(val) => val.to_string(),
} }
} }
@ -207,7 +211,7 @@ impl std::fmt::Debug for Token {
} }
/// A token with a position /// A token with a position
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)] #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct FullToken { pub struct FullToken {
pub token: Token, pub token: Token,
pub position: Position, pub position: Position,
@ -293,13 +297,25 @@ pub fn tokenize<T: Into<String>>(to_tokenize: T) -> Result<Vec<FullToken>, Error
let variant = match character { let variant = match character {
// Whitespace // Whitespace
w if w.is_whitespace() => continue, w if w.is_whitespace() => {
let mut whitespace = String::from(*w);
while let Some(w) = cursor.first() {
if !w.is_whitespace() {
break;
}
whitespace.push(cursor.next().unwrap());
}
Token::Whitespace(whitespace)
}
// Comments // Comments
'/' if cursor.first() == Some('/') => { '/' if cursor.first() == Some('/') => {
let mut comment = String::new();
while !matches!(cursor.first(), Some('\n') | None) { while !matches!(cursor.first(), Some('\n') | None) {
cursor.next(); if let Some(c) = cursor.next() {
comment.push(c);
}
} }
continue; Token::Comment(comment)
} }
'\"' | '\'' => { '\"' | '\'' => {
let mut value = String::new(); let mut value = String::new();

View File

@ -1,7 +1,7 @@
//! This is the module that contains relevant code to parsing Reid, that is to //! This is the module that contains relevant code to parsing Reid, that is to
//! say transforming a Vec of FullTokens into a loose parsed AST that can be //! say transforming a Vec of FullTokens into a loose parsed AST that can be
//! used for unwrapping syntax sugar, and then be transformed into Reid MIR. //! used for unwrapping syntax sugar, and then be transformed into Reid MIR.
use std::path::PathBuf; use std::{fs::Metadata, path::PathBuf};
use token_stream::TokenRange; use token_stream::TokenRange;
@ -88,7 +88,7 @@ pub enum ExpressionKind {
/// Array-indexed, e.g. <expr>[<expr>] /// Array-indexed, e.g. <expr>[<expr>]
Indexed(Box<Expression>, Box<Expression>), Indexed(Box<Expression>, Box<Expression>),
/// Struct-accessed, e.g. <expr>.<expr> /// Struct-accessed, e.g. <expr>.<expr>
Accessed(Box<Expression>, String), Accessed(Box<Expression>, String, TokenRange),
/// Associated function call, but with a shorthand /// Associated function call, but with a shorthand
AccessCall(Box<Expression>, Box<FunctionCallExpression>), AccessCall(Box<Expression>, Box<FunctionCallExpression>),
Binop(BinaryOperator, Box<Expression>, Box<Expression>), Binop(BinaryOperator, Box<Expression>, Box<Expression>),
@ -184,7 +184,7 @@ pub struct LetStatement {
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct ImportStatement(pub Vec<String>, pub TokenRange); pub struct ImportStatement(pub Vec<(String, TokenRange)>, pub TokenRange);
#[derive(Debug)] #[derive(Debug)]
pub struct FunctionDefinition(pub FunctionSignature, pub bool, pub Block, pub TokenRange); pub struct FunctionDefinition(pub FunctionSignature, pub bool, pub Block, pub TokenRange);
@ -193,7 +193,7 @@ pub struct FunctionDefinition(pub FunctionSignature, pub bool, pub Block, pub To
pub struct FunctionSignature { pub struct FunctionSignature {
pub name: String, pub name: String,
pub self_kind: SelfKind, pub self_kind: SelfKind,
pub params: Vec<(String, Type)>, pub params: Vec<(String, Type, TokenRange)>,
pub return_type: Option<Type>, pub return_type: Option<Type>,
#[allow(dead_code)] #[allow(dead_code)]
pub range: TokenRange, pub range: TokenRange,
@ -216,7 +216,7 @@ pub enum ReturnType {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct StructExpression { pub struct StructExpression {
name: String, name: String,
fields: Vec<(String, Expression)>, fields: Vec<(String, Expression, TokenRange)>,
range: TokenRange, range: TokenRange,
} }

View File

@ -175,7 +175,50 @@ impl Parse for AssociatedFunctionCall {
let ty = stream.parse()?; let ty = stream.parse()?;
stream.expect(Token::Colon)?; stream.expect(Token::Colon)?;
stream.expect(Token::Colon)?; stream.expect(Token::Colon)?;
Ok(AssociatedFunctionCall(ty, stream.parse()?))
if stream.next_is_whitespace() {
stream.expecting_err_nonfatal("associated function name");
return Ok(AssociatedFunctionCall(
ty,
FunctionCallExpression {
name: String::new(),
params: Vec::new(),
range: stream.get_range_prev_curr().unwrap(),
is_macro: false,
},
));
}
match stream.parse() {
Ok(fn_call) => Ok(AssociatedFunctionCall(ty, fn_call)),
_ => {
if let Some(Token::Identifier(fn_name)) = stream.peek() {
stream.next();
stream.expected_err_nonfatal("associated function call");
Ok(AssociatedFunctionCall(
ty,
FunctionCallExpression {
name: fn_name,
params: Vec::new(),
range: stream.get_range_prev_curr().unwrap(),
is_macro: false,
},
))
} else {
stream.expected_err_nonfatal("associated function name");
Ok(AssociatedFunctionCall(
ty,
FunctionCallExpression {
name: String::new(),
params: Vec::new(),
range: stream.get_range_prev_curr().unwrap(),
is_macro: false,
},
))
}
}
}
} }
} }
@ -191,10 +234,11 @@ where
), ),
expr.0 .1, expr.0 .1,
), ),
ExpressionKind::Accessed(value_expr, index_name) => Expression( ExpressionKind::Accessed(value_expr, index_name, range) => Expression(
ExpressionKind::Accessed( ExpressionKind::Accessed(
Box::new(apply_inner(PrimaryExpression(*value_expr.clone()), fun)), Box::new(apply_inner(PrimaryExpression(*value_expr.clone()), fun)),
index_name.clone(), index_name.clone(),
*range,
), ),
expr.0 .1, expr.0 .1,
), ),
@ -399,9 +443,9 @@ impl Parse for PrimaryExpression {
); );
} }
ValueIndex::Dot(val) => match val { ValueIndex::Dot(val) => match val {
DotIndexKind::StructValueIndex(name) => { DotIndexKind::StructValueIndex(name, range) => {
expr = Expression( expr = Expression(
ExpressionKind::Accessed(Box::new(expr), name), ExpressionKind::Accessed(Box::new(expr), name, range),
stream.get_range().unwrap(), stream.get_range().unwrap(),
); );
} }
@ -609,7 +653,7 @@ impl Parse for LetStatement {
stream.expect(Token::Equals)?; stream.expect(Token::Equals)?;
let expression = stream.parse()?; let expression = stream.parse()?;
stream.expect(Token::Semi)?; stream.expect_nonfatal(Token::Semi).ok();
Ok(LetStatement { Ok(LetStatement {
name: variable, name: variable,
ty, ty,
@ -630,19 +674,21 @@ impl Parse for ImportStatement {
let mut import_list = Vec::new(); let mut import_list = Vec::new();
if let Some(Token::Identifier(name)) = stream.next() { if let Some(Token::Identifier(name)) = stream.next() {
import_list.push(name); import_list.push((name, stream.get_range_prev_curr().unwrap()));
while stream.expect(Token::Colon).is_ok() && stream.expect(Token::Colon).is_ok() { while stream.expect(Token::Colon).is_ok() && stream.expect(Token::Colon).is_ok() {
if let Some(Token::Identifier(name)) = stream.next() { if let Some(Token::Identifier(name)) = stream.peek() {
import_list.push(name); stream.next(); // Consume identifier
import_list.push((name, stream.get_range_prev_curr().unwrap()));
} else { } else {
Err(stream.expected_err("identifier")?)? stream.expected_err_nonfatal("identifier");
break;
} }
} }
} else { } else {
Err(stream.expected_err("identifier")?)? Err(stream.expected_err("identifier")?)?
} }
stream.expect(Token::Semi)?; stream.expect_nonfatal(Token::Semi).ok();
Ok(ImportStatement(import_list, stream.get_range().unwrap())) Ok(ImportStatement(import_list, stream.get_range().unwrap()))
} }
@ -668,7 +714,7 @@ impl Parse for FunctionDefinition {
} }
#[derive(Debug)] #[derive(Debug)]
struct FunctionParam(String, Type); struct FunctionParam(String, Type, TokenRange);
impl Parse for FunctionParam { impl Parse for FunctionParam {
fn parse(mut stream: TokenStream) -> Result<Self, Error> { fn parse(mut stream: TokenStream) -> Result<Self, Error> {
@ -676,7 +722,7 @@ impl Parse for FunctionParam {
return Err(stream.expected_err("parameter name")?); return Err(stream.expected_err("parameter name")?);
}; };
stream.expect(Token::Colon)?; stream.expect(Token::Colon)?;
Ok(FunctionParam(arg_name, stream.parse()?)) Ok(FunctionParam(arg_name, stream.parse()?, stream.get_range().unwrap()))
} }
} }
@ -738,11 +784,11 @@ impl Parse for FunctionSignature {
match &self_kind { match &self_kind {
SelfKind::None => { SelfKind::None => {
if let Ok(param) = stream.parse::<FunctionParam>() { if let Ok(param) = stream.parse::<FunctionParam>() {
params.push((param.0, param.1)); params.push((param.0, param.1, param.2));
while let Some(Token::Comma) = stream.peek() { while let Some(Token::Comma) = stream.peek() {
stream.next(); stream.next();
let param = stream.parse::<FunctionParam>()?; let param = stream.parse::<FunctionParam>()?;
params.push((param.0, param.1)); params.push((param.0, param.1, param.2));
} }
} }
} }
@ -750,7 +796,7 @@ impl Parse for FunctionSignature {
while let Some(Token::Comma) = stream.peek() { while let Some(Token::Comma) = stream.peek() {
stream.next(); stream.next();
let param = stream.parse::<FunctionParam>()?; let param = stream.parse::<FunctionParam>()?;
params.push((param.0, param.1)); params.push((param.0, param.1, param.2));
} }
} }
} }
@ -787,7 +833,7 @@ impl Parse for Block {
// if semicolon is missing. // if semicolon is missing.
if !matches!(e, Expression(ExpressionKind::IfExpr(_), _)) { if !matches!(e, Expression(ExpressionKind::IfExpr(_), _)) {
// In theory could ignore the missing semicolon.. // In theory could ignore the missing semicolon..
return Err(stream.expected_err("semicolon to complete statement")?); stream.expected_err_nonfatal("semicolon to complete statement");
} }
statements.push(BlockLevelStatement::Expression(e)); statements.push(BlockLevelStatement::Expression(e));
@ -818,9 +864,10 @@ impl Parse for StructExpression {
let Some(Token::Identifier(name)) = stream.next() else { let Some(Token::Identifier(name)) = stream.next() else {
return Err(stream.expected_err("struct identifier")?); return Err(stream.expected_err("struct identifier")?);
}; };
stream.expect(Token::BraceOpen)?; stream.expect(Token::BraceOpen)?;
let named_list = stream.parse::<NamedFieldList<Expression>>()?; let named_list = stream.parse::<NamedFieldList<Expression>>()?;
let fields = named_list.0.into_iter().map(|f| (f.0, f.1)).collect(); let fields = named_list.0.into_iter().map(|f| (f.0, f.1, f.2)).collect();
stream.expect(Token::BraceClose)?; stream.expect(Token::BraceClose)?;
@ -897,14 +944,15 @@ impl Parse for ArrayValueIndex {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum DotIndexKind { pub enum DotIndexKind {
StructValueIndex(String), StructValueIndex(String, TokenRange),
FunctionCall(FunctionCallExpression), FunctionCall(FunctionCallExpression),
} }
impl Parse for DotIndexKind { impl Parse for DotIndexKind {
fn parse(mut stream: TokenStream) -> Result<Self, Error> { fn parse(mut stream: TokenStream) -> Result<Self, Error> {
stream.expect(Token::Dot)?; stream.expect(Token::Dot)?;
if let Some(Token::Identifier(name)) = stream.next() { if let Some(Token::Identifier(name)) = stream.peek() {
stream.next(); // Consume identifer
if let Ok(args) = stream.parse::<FunctionArgs>() { if let Ok(args) = stream.parse::<FunctionArgs>() {
Ok(Self::FunctionCall(FunctionCallExpression { Ok(Self::FunctionCall(FunctionCallExpression {
name, name,
@ -913,10 +961,18 @@ impl Parse for DotIndexKind {
is_macro: false, is_macro: false,
})) }))
} else { } else {
Ok(Self::StructValueIndex(name)) Ok(Self::StructValueIndex(name, stream.get_range_prev().unwrap()))
} }
} else { } else {
return Err(stream.expected_err("struct index (number)")?); if stream.next_is_whitespace() {
stream.expecting_err_nonfatal("struct index");
Ok(Self::StructValueIndex(
String::new(),
stream.get_range_prev_curr().unwrap(),
))
} else {
Err(stream.expecting_err("struct index")?)
}
} }
} }
} }
@ -930,7 +986,7 @@ impl Parse for BlockLevelStatement {
Some(Token::ReturnKeyword) => { Some(Token::ReturnKeyword) => {
stream.next(); stream.next();
let exp = stream.parse().ok(); let exp = stream.parse().ok();
stream.expect(Token::Semi)?; stream.expect_nonfatal(Token::Semi).ok();
Stmt::Return(ReturnType::Hard, exp) Stmt::Return(ReturnType::Hard, exp)
} }
Some(Token::For) => { Some(Token::For) => {
@ -995,7 +1051,7 @@ impl Parse for SetStatement {
let var_ref = stream.parse()?; let var_ref = stream.parse()?;
stream.expect(Token::Equals)?; stream.expect(Token::Equals)?;
let expr = stream.parse()?; let expr = stream.parse()?;
stream.expect(Token::Semi)?; stream.expect_nonfatal(Token::Semi).ok();
Ok(SetStatement(var_ref, expr, stream.get_range().unwrap())) Ok(SetStatement(var_ref, expr, stream.get_range().unwrap()))
} }
} }
@ -1036,7 +1092,7 @@ impl Parse for TopLevelStatement {
stream.next(); // Consume Extern stream.next(); // Consume Extern
stream.expect(Token::FnKeyword)?; stream.expect(Token::FnKeyword)?;
let extern_fn = Stmt::ExternFunction(stream.parse()?); let extern_fn = Stmt::ExternFunction(stream.parse()?);
stream.expect(Token::Semi)?; stream.expect_nonfatal(Token::Semi).ok();
extern_fn extern_fn
} }
Some(Token::FnKeyword) | Some(Token::PubKeyword) => Stmt::FunctionDefinition(stream.parse()?), Some(Token::FnKeyword) | Some(Token::PubKeyword) => Stmt::FunctionDefinition(stream.parse()?),

View File

@ -30,7 +30,14 @@ impl ast::Module {
for stmt in &self.top_level_statements { for stmt in &self.top_level_statements {
match stmt { match stmt {
Import(import) => { Import(import) => {
imports.push(mir::Import(import.0.clone(), import.1.as_meta(module_id))); imports.push(mir::Import(
import
.0
.iter()
.map(|(s, range)| (s.clone(), range.as_meta(module_id)))
.collect(),
import.1.as_meta(module_id),
));
} }
FunctionDefinition(function_def) => functions.push(function_def.into_mir(module_id)), FunctionDefinition(function_def) => functions.push(function_def.into_mir(module_id)),
ExternFunction(signature) => { ExternFunction(signature) => {
@ -51,7 +58,7 @@ impl ast::Module {
.map(|p| mir::FunctionParam { .map(|p| mir::FunctionParam {
name: p.0, name: p.0,
ty: p.1 .0.into_mir(module_id), ty: p.1 .0.into_mir(module_id),
meta: p.1 .1.as_meta(module_id), meta: p.2.as_meta(module_id),
}) })
.collect(), .collect(),
kind: mir::FunctionDefinitionKind::Extern(false), kind: mir::FunctionDefinitionKind::Extern(false),
@ -164,7 +171,7 @@ impl ast::FunctionDefinition {
params.extend(signature.params.iter().cloned().map(|p| FunctionParam { params.extend(signature.params.iter().cloned().map(|p| FunctionParam {
name: p.0, name: p.0,
ty: p.1 .0.into_mir(module_id), ty: p.1 .0.into_mir(module_id),
meta: p.1 .1.as_meta(module_id), meta: p.2.as_meta(module_id),
})); }));
mir::FunctionDefinition { mir::FunctionDefinition {
name: signature.name.clone(), name: signature.name.clone(),
@ -228,33 +235,34 @@ impl ast::Block {
StmtKind::Let(counter_var.clone(), true, start.process(module_id)), StmtKind::Let(counter_var.clone(), true, start.process(module_id)),
counter_range.as_meta(module_id), counter_range.as_meta(module_id),
); );
let statement_range = counter_range.clone() + start.1 + end.1 + block.2;
let set_new = mir::Statement( let set_new = mir::Statement(
StmtKind::Set( StmtKind::Set(
mir::Expression( mir::Expression(
mir::ExprKind::Variable(counter_var.clone()), mir::ExprKind::Variable(counter_var.clone()),
counter_range.as_meta(module_id), (start.1 + end.1).as_meta(module_id),
), ),
mir::Expression( mir::Expression(
mir::ExprKind::BinOp( mir::ExprKind::BinOp(
mir::BinaryOperator::Add, mir::BinaryOperator::Add,
Box::new(mir::Expression( Box::new(mir::Expression(
mir::ExprKind::Variable(counter_var.clone()), mir::ExprKind::Variable(counter_var.clone()),
counter_range.as_meta(module_id), (start.1 + end.1).as_meta(module_id),
)), )),
Box::new(mir::Expression( Box::new(mir::Expression(
mir::ExprKind::Literal(mir::Literal::Vague(mir::VagueLiteral::Number(1))), mir::ExprKind::Literal(mir::Literal::Vague(mir::VagueLiteral::Number(1))),
counter_range.as_meta(module_id), (start.1 + end.1).as_meta(module_id),
)), )),
mir::TypeKind::Vague(mir::VagueType::Unknown), mir::TypeKind::Vague(mir::VagueType::Unknown),
), ),
counter_range.as_meta(module_id), (start.1 + end.1).as_meta(module_id),
), ),
), ),
counter_range.as_meta(module_id), (start.1 + end.1).as_meta(module_id),
); );
let mut block = block.into_mir(module_id); let mut mir_block = block.into_mir(module_id);
block.statements.push(set_new); mir_block.statements.push(set_new);
let while_statement = mir::Statement( let while_statement = mir::Statement(
StmtKind::While(WhileStatement { StmtKind::While(WhileStatement {
condition: mir::Expression( condition: mir::Expression(
@ -262,28 +270,28 @@ impl ast::Block {
mir::BinaryOperator::Cmp(mir::CmpOperator::LT), mir::BinaryOperator::Cmp(mir::CmpOperator::LT),
Box::new(mir::Expression( Box::new(mir::Expression(
mir::ExprKind::Variable(counter_var), mir::ExprKind::Variable(counter_var),
counter_range.as_meta(module_id), (start.1 + end.1).as_meta(module_id),
)), )),
Box::new(end.process(module_id)), Box::new(end.process(module_id)),
mir::TypeKind::Vague(mir::VagueType::Unknown), mir::TypeKind::Vague(mir::VagueType::Unknown),
), ),
counter_range.as_meta(module_id), (start.1 + end.1).as_meta(module_id),
), ),
block, block: mir_block.clone(),
meta: self.2.as_meta(module_id), meta: (start.1 + end.1 + block.2).as_meta(module_id),
}), }),
self.2.as_meta(module_id), (start.1 + end.1 + block.2).as_meta(module_id),
); );
let inner_scope = StmtKind::Expression(mir::Expression( let inner_scope = StmtKind::Expression(mir::Expression(
mir::ExprKind::Block(mir::Block { mir::ExprKind::Block(mir::Block {
statements: vec![let_statement, while_statement], statements: vec![let_statement, while_statement],
return_expression: None, return_expression: None,
meta: counter_range.as_meta(module_id) + end.1.as_meta(module_id), meta: statement_range.as_meta(module_id),
}), }),
counter_range.as_meta(module_id) + end.1.as_meta(module_id), statement_range.as_meta(module_id),
)); ));
(inner_scope, self.2) (inner_scope, statement_range)
} }
ast::BlockLevelStatement::WhileLoop(expression, block) => ( ast::BlockLevelStatement::WhileLoop(expression, block) => (
StmtKind::While(WhileStatement { StmtKind::While(WhileStatement {
@ -291,7 +299,7 @@ impl ast::Block {
block: block.into_mir(module_id), block: block.into_mir(module_id),
meta: self.2.as_meta(module_id), meta: self.2.as_meta(module_id),
}), }),
self.2, expression.1 + block.2,
), ),
}; };
@ -375,13 +383,14 @@ impl ast::Expression {
struct_init struct_init
.fields .fields
.iter() .iter()
.map(|(n, e)| (n.clone(), e.process(module_id))) .map(|(n, e, r)| (n.clone(), e.process(module_id), r.as_meta(module_id)))
.collect(), .collect(),
), ),
ast::ExpressionKind::Accessed(expression, name) => mir::ExprKind::Accessed( ast::ExpressionKind::Accessed(expression, name, name_range) => mir::ExprKind::Accessed(
Box::new(expression.process(module_id)), Box::new(expression.process(module_id)),
mir::TypeKind::Vague(mir::VagueType::Unknown), mir::TypeKind::Vague(mir::VagueType::Unknown),
name.clone(), name.clone(),
name_range.as_meta(module_id),
), ),
ast::ExpressionKind::Borrow(expr, mutable) => { ast::ExpressionKind::Borrow(expr, mutable) => {
mir::ExprKind::Borrow(Box::new(expr.process(module_id)), *mutable) mir::ExprKind::Borrow(Box::new(expr.process(module_id)), *mutable)

View File

@ -1,6 +1,8 @@
//! Contains relevant code for parsing tokens received from //! Contains relevant code for parsing tokens received from
//! Lexing/Tokenizing-stage. //! Lexing/Tokenizing-stage.
use std::{cell::RefCell, rc::Rc};
use crate::{ use crate::{
ast::parse::Parse, ast::parse::Parse,
lexer::{FullToken, Token}, lexer::{FullToken, Token},
@ -12,6 +14,7 @@ use crate::{
pub struct TokenStream<'a, 'b> { pub struct TokenStream<'a, 'b> {
ref_position: Option<&'b mut usize>, ref_position: Option<&'b mut usize>,
tokens: &'a [FullToken], tokens: &'a [FullToken],
errors: Rc<RefCell<Vec<Error>>>,
pub position: usize, pub position: usize,
} }
@ -20,6 +23,7 @@ impl<'a, 'b> TokenStream<'a, 'b> {
TokenStream { TokenStream {
ref_position: None, ref_position: None,
tokens, tokens,
errors: Rc::new(RefCell::new(Vec::new())),
position: 0, position: 0,
} }
} }
@ -38,24 +42,42 @@ impl<'a, 'b> TokenStream<'a, 'b> {
)) ))
} }
/// Returns expected-error for the next token in-line. Useful in conjunction
/// with [`TokenStream::peek`]
pub fn expected_err_nonfatal<T: Into<String>>(&mut self, expected: T) {
let err = match self.expected_err(expected) {
Ok(e) => e,
Err(e) => e,
};
self.errors.borrow_mut().push(err);
}
/// Returns expected-error for the previous token that was already consumed. /// Returns expected-error for the previous token that was already consumed.
/// Useful in conjunction with [`TokenStream::next`] /// Useful in conjunction with [`TokenStream::next`]
pub fn expecting_err<T: Into<String>>(&mut self, expected: T) -> Result<Error, Error> { pub fn expecting_err<T: Into<String>>(&mut self, expected: T) -> Result<Error, Error> {
let next_token = self.peek().unwrap_or(Token::Eof); let next_token = self.peek().unwrap_or(Token::Eof);
let pos = self.next_token(self.position).0;
Ok(Error::Expected( Ok(Error::Expected(
expected.into(), expected.into(),
next_token, next_token,
TokenRange { TokenRange { start: pos, end: pos },
start: self.position,
end: self.position,
},
)) ))
} }
/// Returns expected-error for the previous token that was already consumed.
/// Useful in conjunction with [`TokenStream::next`]
pub fn expecting_err_nonfatal<T: Into<String>>(&mut self, expected: T) {
let err = match self.expecting_err(expected) {
Ok(e) => e,
Err(e) => e,
};
self.errors.borrow_mut().push(err);
}
pub fn expect(&mut self, token: Token) -> Result<(), Error> { pub fn expect(&mut self, token: Token) -> Result<(), Error> {
if let Some(peeked) = self.peek() { if let (pos, Some(peeked)) = self.next_token(self.position) {
if token == peeked { if token == peeked.token {
self.position += 1; self.position = pos + 1;
Ok(()) Ok(())
} else { } else {
Err(self.expecting_err(token)?) Err(self.expecting_err(token)?)
@ -65,38 +87,41 @@ impl<'a, 'b> TokenStream<'a, 'b> {
} }
} }
pub fn next(&mut self) -> Option<Token> { pub fn expect_nonfatal(&mut self, token: Token) -> Result<(), ()> {
let value = if self.tokens.len() < self.position { if let (pos, Some(peeked)) = self.next_token(self.position) {
None if token == peeked.token {
self.position = pos + 1;
Ok(())
} else {
self.expecting_err_nonfatal(token);
Err(())
}
} else { } else {
Some(self.tokens[self.position].token.clone()) self.expecting_err_nonfatal(token);
}; Err(())
self.position += 1; }
value }
pub fn next(&mut self) -> Option<Token> {
let (position, token) = self.next_token(self.position);
self.position = position + 1;
token.map(|t| t.token.clone())
} }
pub fn previous(&mut self) -> Option<Token> { pub fn previous(&mut self) -> Option<Token> {
if (self.position as i32 - 1) < 0 { let (_, token) = self.previous_token(self.position);
None token.map(|t| t.token.clone())
} else {
Some(self.tokens[self.position - 1].token.clone())
}
} }
pub fn peek(&mut self) -> Option<Token> { pub fn peek(&mut self) -> Option<Token> {
if self.tokens.len() < self.position { let (_, token) = self.next_token(self.position);
None token.map(|t| t.token.clone())
} else {
Some(self.tokens[self.position].token.clone())
}
} }
pub fn peek2(&mut self) -> Option<Token> { pub fn peek2(&mut self) -> Option<Token> {
if self.tokens.len() < (self.position + 1) { let (pos2, _) = self.next_token(self.position);
None let (_, token) = self.next_token(pos2 + 1);
} else { token.map(|t| t.token.clone())
Some(self.tokens[self.position + 1].token.clone())
}
} }
/// Parse the next value of trait Parse. If the parse succeeded, the related /// Parse the next value of trait Parse. If the parse succeeded, the related
@ -161,6 +186,7 @@ impl<'a, 'b> TokenStream<'a, 'b> {
let clone = TokenStream { let clone = TokenStream {
ref_position: Some(&mut ref_pos), ref_position: Some(&mut ref_pos),
tokens: self.tokens, tokens: self.tokens,
errors: self.errors.clone(),
position, position,
}; };
@ -185,9 +211,56 @@ impl<'a, 'b> TokenStream<'a, 'b> {
pub fn get_range_prev(&self) -> Option<TokenRange> { pub fn get_range_prev(&self) -> Option<TokenRange> {
self.ref_position.as_ref().map(|ref_pos| TokenRange { self.ref_position.as_ref().map(|ref_pos| TokenRange {
start: **ref_pos, start: **ref_pos,
end: self.position - 1, end: self.previous_token(self.position).0,
}) })
} }
/// Gets range of the previous token only.
pub fn get_range_prev_curr(&self) -> Option<TokenRange> {
Some(TokenRange {
start: self.previous_token(self.position).0,
end: self.previous_token(self.position).0,
})
}
fn previous_token(&self, mut from: usize) -> (usize, Option<&'a FullToken>) {
from -= 1;
while let Some(token) = self.tokens.get(from) {
if matches!(token.token, Token::Whitespace(_) | Token::Comment(_)) {
from -= 1;
} else {
break;
}
}
(from, self.tokens.get(from))
}
fn next_token(&self, mut from: usize) -> (usize, Option<&'a FullToken>) {
while let Some(token) = self.tokens.get(from) {
if matches!(token.token, Token::Whitespace(_) | Token::Comment(_)) {
from += 1;
} else {
break;
}
}
(from, self.tokens.get(from))
}
pub fn errors(&self) -> Vec<Error> {
self.errors.borrow().clone().clone()
}
pub fn next_is_whitespace(&self) -> bool {
if let Some(token) = self.tokens.get(self.position) {
if let Token::Whitespace(_) = token.token {
true
} else {
false
}
} else {
true
}
}
} }
impl Drop for TokenStream<'_, '_> { impl Drop for TokenStream<'_, '_> {
@ -217,8 +290,8 @@ impl std::ops::Add for TokenRange {
fn add(self, rhs: Self) -> Self::Output { fn add(self, rhs: Self) -> Self::Output {
TokenRange { TokenRange {
start: self.start.min(rhs.start), start: self.start.min(rhs.start).min(rhs.end),
end: self.end.min(rhs.end), end: self.end.max(rhs.end).max(rhs.start),
} }
} }
} }

View File

@ -133,7 +133,7 @@ impl mir::Expression {
allocated.extend(expr.allocate(scope)); allocated.extend(expr.allocate(scope));
allocated.extend(idx.allocate(scope)); allocated.extend(idx.allocate(scope));
} }
mir::ExprKind::Accessed(expression, _, _) => { mir::ExprKind::Accessed(expression, ..) => {
allocated.extend(expression.allocate(scope)); allocated.extend(expression.allocate(scope));
} }
mir::ExprKind::Array(expressions) => { mir::ExprKind::Array(expressions) => {
@ -159,7 +159,7 @@ impl mir::Expression {
.unwrap(); .unwrap();
allocated.push(Allocation(self.1, ty, allocation)); allocated.push(Allocation(self.1, ty, allocation));
for (field_name, expression) in items { for (field_name, expression, _) in items {
allocated.extend(expression.allocate(scope)); allocated.extend(expression.allocate(scope));
let (_, ty) = expression.return_type(&Default::default(), scope.mod_id).unwrap(); let (_, ty) = expression.return_type(&Default::default(), scope.mod_id).unwrap();

View File

@ -1,3 +1,5 @@
use std::{collections::HashMap, hash::Hash};
use reid_lib::{builder::InstructionValue, CmpPredicate, ConstValueKind, Instr, Type}; use reid_lib::{builder::InstructionValue, CmpPredicate, ConstValueKind, Instr, Type};
use crate::{ use crate::{
@ -57,6 +59,15 @@ pub fn form_intrinsics() -> Vec<FunctionDefinition> {
intrinsics intrinsics
} }
pub fn get_intrinsic_assoc_functions(ty: &TypeKind) -> HashMap<String, Option<FunctionDefinition>> {
let mut map = HashMap::new();
map.insert("length".to_owned(), get_intrinsic_assoc_func(ty, "length"));
map.insert("sizeof".to_owned(), get_intrinsic_assoc_func(ty, "sizeof"));
map.insert("malloc".to_owned(), get_intrinsic_assoc_func(ty, "malloc"));
map.insert("null".to_owned(), get_intrinsic_assoc_func(ty, "null"));
map
}
pub fn get_intrinsic_assoc_func(ty: &TypeKind, name: &str) -> Option<FunctionDefinition> { pub fn get_intrinsic_assoc_func(ty: &TypeKind, name: &str) -> Option<FunctionDefinition> {
if let TypeKind::Array(_, len) = ty { if let TypeKind::Array(_, len) = ty {
match name { match name {
@ -74,7 +85,7 @@ pub fn get_intrinsic_assoc_func(ty: &TypeKind, name: &str) -> Option<FunctionDef
}], }],
kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicConst(*len))), kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicConst(*len))),
source: None, source: None,
}) });
} }
_ => {} _ => {}
} }
@ -247,26 +258,17 @@ pub fn form_intrinsic_binops() -> Vec<BinopDefinition> {
scope.block.build(Instr::XOr(lhs, rhs)).unwrap() scope.block.build(Instr::XOr(lhs, rhs)).unwrap()
})); }));
if ty.signed() { if ty.signed() {
intrinsics.push(complex_binop_def( intrinsics.push(complex_binop_def(BitshiftRight, &ty, &ty, |scope, lhs, rhs| {
BitshiftRight, scope.block.build(Instr::ShiftRightArithmetic(lhs, rhs)).unwrap()
&ty, }));
&TypeKind::U64,
|scope, lhs, rhs| scope.block.build(Instr::ShiftRightArithmetic(lhs, rhs)).unwrap(),
));
} else { } else {
intrinsics.push(complex_binop_def( intrinsics.push(complex_binop_def(BitshiftRight, &ty, &ty, |scope, lhs, rhs| {
BitshiftRight, scope.block.build(Instr::ShiftRightLogical(lhs, rhs)).unwrap()
&ty, }));
&TypeKind::U64,
|scope, lhs, rhs| scope.block.build(Instr::ShiftRightLogical(lhs, rhs)).unwrap(),
));
} }
intrinsics.push(complex_binop_def( intrinsics.push(complex_binop_def(BitshiftLeft, &ty, &ty, |scope, lhs, rhs| {
BitshiftLeft, scope.block.build(Instr::ShiftLeft(lhs, rhs)).unwrap()
&ty, }));
&TypeKind::U64,
|scope, lhs, rhs| scope.block.build(Instr::ShiftLeft(lhs, rhs)).unwrap(),
));
} }
for ty in INTEGERS.iter().chain(&[TypeKind::Bool, TypeKind::Char]) { for ty in INTEGERS.iter().chain(&[TypeKind::Bool, TypeKind::Char]) {
intrinsics.push(boolean_binop_def(Cmp(CmpOperator::EQ), &ty, |scope, lhs, rhs| { intrinsics.push(boolean_binop_def(Cmp(CmpOperator::EQ), &ty, |scope, lhs, rhs| {
@ -386,7 +388,9 @@ impl IntrinsicFunction for IntrinsicSizeOf {
fn codegen<'ctx, 'a>(&self, scope: &mut Scope<'ctx, 'a>, _: &[StackValue]) -> Result<StackValue, ErrorKind> { fn codegen<'ctx, 'a>(&self, scope: &mut Scope<'ctx, 'a>, _: &[StackValue]) -> Result<StackValue, ErrorKind> {
let instr = scope let instr = scope
.block .block
.build(Instr::Constant(reid_lib::ConstValueKind::U64(self.0.size_of() / 8))) .build(Instr::Constant(reid_lib::ConstValueKind::U64(
self.0.size_of(&scope.type_map) / 8,
)))
.unwrap(); .unwrap();
Ok(StackValue(StackValueKind::Literal(instr), self.0.clone())) Ok(StackValue(StackValueKind::Literal(instr), self.0.clone()))
} }
@ -404,7 +408,9 @@ impl IntrinsicFunction for IntrinsicMalloc {
let sizeof = scope let sizeof = scope
.block .block
.build(Instr::Constant(ConstValueKind::U64(self.0.size_of() / 8))) .build(Instr::Constant(ConstValueKind::U64(
self.0.size_of(&scope.type_map) / 8,
)))
.unwrap(); .unwrap();
let bytes = scope.block.build(Instr::Mul(sizeof, amount.instr())).unwrap(); let bytes = scope.block.build(Instr::Mul(sizeof, amount.instr())).unwrap();
let instr = scope.block.build(Instr::FunctionCall(function, vec![bytes])).unwrap(); let instr = scope.block.build(Instr::FunctionCall(function, vec![bytes])).unwrap();

View File

@ -144,6 +144,7 @@ impl mir::Module {
let mut types = HashMap::new(); let mut types = HashMap::new();
let mut type_values = HashMap::new(); let mut type_values = HashMap::new();
let mut debug_types = HashMap::new(); let mut debug_types = HashMap::new();
let mut type_map = HashMap::new();
macro_rules! insert_debug { macro_rules! insert_debug {
($kind:expr) => { ($kind:expr) => {
@ -153,8 +154,7 @@ impl mir::Module {
&compile_unit, &compile_unit,
&debug, &debug,
&debug_types, &debug_types,
&type_values, &type_map,
&types,
self.module_id, self.module_id,
&self.tokens, &self.tokens,
&modules, &modules,
@ -182,6 +182,8 @@ impl mir::Module {
for typedef in typedefs { for typedef in typedefs {
let type_key = CustomTypeKey(typedef.name.clone(), typedef.source_module); let type_key = CustomTypeKey(typedef.name.clone(), typedef.source_module);
type_map.insert(type_key.clone(), typedef.clone());
let type_value = match &typedef.kind { let type_value = match &typedef.kind {
TypeDefinitionKind::Struct(StructType(fields)) => { TypeDefinitionKind::Struct(StructType(fields)) => {
module.custom_type(CustomTypeKind::NamedStruct(NamedStruct( module.custom_type(CustomTypeKind::NamedStruct(NamedStruct(
@ -198,6 +200,7 @@ impl mir::Module {
}; };
types.insert(type_value, typedef.clone()); types.insert(type_value, typedef.clone());
type_values.insert(type_key.clone(), type_value); type_values.insert(type_key.clone(), type_value);
insert_debug!(&TypeKind::CustomType(type_key.clone())); insert_debug!(&TypeKind::CustomType(type_key.clone()));
} }
@ -380,6 +383,7 @@ impl mir::Module {
functions: &functions, functions: &functions,
types: &types, types: &types,
type_values: &type_values, type_values: &type_values,
type_map: &type_map,
globals: &globals, globals: &globals,
stack_values: HashMap::new(), stack_values: HashMap::new(),
debug: Some(Debug { debug: Some(Debug {
@ -457,6 +461,7 @@ impl mir::Module {
functions: &functions, functions: &functions,
types: &types, types: &types,
type_values: &type_values, type_values: &type_values,
type_map: &type_map,
stack_values: HashMap::new(), stack_values: HashMap::new(),
debug: Some(Debug { debug: Some(Debug {
info: &debug, info: &debug,
@ -518,6 +523,7 @@ impl mir::Module {
functions: &functions, functions: &functions,
types: &types, types: &types,
type_values: &type_values, type_values: &type_values,
type_map: &type_map,
stack_values: HashMap::new(), stack_values: HashMap::new(),
debug: Some(Debug { debug: Some(Debug {
info: &debug, info: &debug,
@ -728,7 +734,6 @@ impl mir::Statement {
mir::StmtKind::Let(NamedVariableRef(ty, name, meta), mutable, expression) => { mir::StmtKind::Let(NamedVariableRef(ty, name, meta), mutable, expression) => {
let value = expression.codegen(scope, &state)?.unwrap(); let value = expression.codegen(scope, &state)?.unwrap();
dbg!(&scope.allocator, &meta, &value.1);
let alloca = scope let alloca = scope
.allocate(meta, &value.1) .allocate(meta, &value.1)
.unwrap() .unwrap()
@ -1161,7 +1166,7 @@ impl mir::Expression {
TypeKind::Array(Box::new(elem_ty_kind), instr_list.len() as u64), TypeKind::Array(Box::new(elem_ty_kind), instr_list.len() as u64),
)) ))
} }
mir::ExprKind::Accessed(expression, type_kind, field) => { mir::ExprKind::Accessed(expression, type_kind, field, _) => {
let struct_val = expression.codegen(scope, &state.load(false))?.unwrap(); let struct_val = expression.codegen(scope, &state.load(false))?.unwrap();
let TypeKind::CodegenPtr(inner) = &struct_val.1 else { let TypeKind::CodegenPtr(inner) = &struct_val.1 else {
@ -1223,7 +1228,7 @@ impl mir::Expression {
.unwrap() .unwrap()
.maybe_location(&mut scope.block, location.clone()); .maybe_location(&mut scope.block, location.clone());
for (field_n, exp) in items { for (field_n, exp, _) in items {
let gep_n = format!("{}.{}.gep", name, field_n); let gep_n = format!("{}.{}.gep", name, field_n);
let store_n = format!("{}.{}.store", name, field_n); let store_n = format!("{}.{}.store", name, field_n);
let i = indices.clone().find(|(_, f)| f.0 == *field_n).unwrap().0; let i = indices.clone().find(|(_, f)| f.0 == *field_n).unwrap().0;
@ -1312,45 +1317,45 @@ impl mir::Expression {
if val.1 == *type_kind { if val.1 == *type_kind {
Some(val) Some(val)
} else { } else {
match (&val.1, type_kind) { let (ty, other) = if !state.should_load {
(TypeKind::CodegenPtr(inner), TypeKind::UserPtr(ty2)) => match *inner.clone() { let TypeKind::CodegenPtr(inner) = &val.1 else {
TypeKind::UserPtr(_) => Some(StackValue( panic!();
val.0.derive( };
scope (*inner.clone(), TypeKind::CodegenPtr(Box::new(type_kind.clone())))
.block } else {
.build(Instr::BitCast( (val.1.clone(), type_kind.clone())
val.instr(), };
Type::Ptr(Box::new(type_kind.get_type(scope.type_values))),
)) match (&ty, type_kind) {
.unwrap(), (TypeKind::UserPtr(_), TypeKind::UserPtr(_)) => Some(StackValue(
), val.0.derive(
TypeKind::CodegenPtr(Box::new(type_kind.clone())), scope
)), .block
TypeKind::Borrow(ty1, _) => match *ty1.clone() { .build(Instr::BitCast(val.instr(), other.get_type(scope.type_values)))
TypeKind::Array(ty1, _) => { .unwrap(),
if ty1 == *ty2 { ),
Some(StackValue( other.clone(),
val.0.derive( )),
scope (TypeKind::Borrow(ty1, _), TypeKind::UserPtr(ty2)) => {
.block if let TypeKind::Array(ty1, _) = ty1.as_ref() {
.build(Instr::BitCast( if ty1 == ty2 {
val.instr(), Some(StackValue(
Type::Ptr(Box::new(type_kind.get_type(scope.type_values))), val.0.derive(
)) scope
.unwrap(), .block
), .build(Instr::BitCast(val.instr(), other.get_type(scope.type_values)))
TypeKind::CodegenPtr(Box::new(type_kind.clone())), .unwrap(),
)) ),
} else { other,
return Err(ErrorKind::Null); ))
} } else {
return Err(ErrorKind::Null).unwrap();
} }
_ => return Err(ErrorKind::Null), } else {
}, return Err(ErrorKind::Null).unwrap();
_ => panic!(), }
}, }
(TypeKind::UserPtr(_), TypeKind::UserPtr(_)) (TypeKind::Char, TypeKind::U8)
| (TypeKind::Char, TypeKind::U8)
| (TypeKind::U8, TypeKind::Char) | (TypeKind::U8, TypeKind::Char)
| (TypeKind::U8, TypeKind::I8) => Some(StackValue( | (TypeKind::U8, TypeKind::I8) => Some(StackValue(
val.0.derive( val.0.derive(
@ -1362,8 +1367,7 @@ impl mir::Expression {
type_kind.clone(), type_kind.clone(),
)), )),
_ => { _ => {
let cast_instr = val let cast_instr = ty
.1
.get_type(scope.type_values) .get_type(scope.type_values)
.cast_instruction(val.instr(), &type_kind.get_type(scope.type_values)) .cast_instruction(val.instr(), &type_kind.get_type(scope.type_values))
.unwrap(); .unwrap();
@ -1379,11 +1383,30 @@ impl mir::Expression {
mir::ExprKind::AssociatedFunctionCall(ty, call) => codegen_function_call(Some(ty), call, scope, state)?, mir::ExprKind::AssociatedFunctionCall(ty, call) => codegen_function_call(Some(ty), call, scope, state)?,
mir::ExprKind::GlobalRef(global_name, ty) => { mir::ExprKind::GlobalRef(global_name, ty) => {
let global_value = scope.globals.get(global_name).unwrap(); let global_value = scope.globals.get(global_name).unwrap();
let a = Some(StackValue(
StackValueKind::Literal(scope.block.build(Instr::GetGlobal(global_value.clone())).unwrap()), let value = scope.block.build(Instr::GetGlobal(global_value.clone())).unwrap();
ty.clone(),
)); if !state.should_load {
a let allocated = scope
.block
.build(Instr::Alloca(ty.get_type(scope.type_values)))
.unwrap();
scope
.block
.build(Instr::Store(allocated, value))
.unwrap()
.maybe_location(&mut scope.block, location.clone());
let a = Some(StackValue(
StackValueKind::Literal(allocated),
TypeKind::CodegenPtr(Box::new(ty.clone())),
));
a
} else {
let a = Some(StackValue(StackValueKind::Literal(value), ty.clone()));
a
}
} }
}; };
if let Some(value) = &value { if let Some(value) = &value {

View File

@ -26,6 +26,7 @@ pub struct Scope<'ctx, 'scope> {
pub(super) block: Block<'ctx>, pub(super) block: Block<'ctx>,
pub(super) types: &'scope HashMap<TypeValue, TypeDefinition>, pub(super) types: &'scope HashMap<TypeValue, TypeDefinition>,
pub(super) type_values: &'scope HashMap<CustomTypeKey, TypeValue>, pub(super) type_values: &'scope HashMap<CustomTypeKey, TypeValue>,
pub(super) type_map: &'scope HashMap<CustomTypeKey, TypeDefinition>,
pub(super) assoc_functions: &'scope HashMap<AssociatedFunctionKey, ScopeFunctionKind<'ctx>>, pub(super) assoc_functions: &'scope HashMap<AssociatedFunctionKey, ScopeFunctionKind<'ctx>>,
pub(super) functions: &'scope HashMap<String, ScopeFunctionKind<'ctx>>, pub(super) functions: &'scope HashMap<String, ScopeFunctionKind<'ctx>>,
pub(super) binops: &'scope HashMap<BinopKey, StackBinopDefinition<'ctx>>, pub(super) binops: &'scope HashMap<BinopKey, StackBinopDefinition<'ctx>>,
@ -49,6 +50,7 @@ impl<'ctx, 'a> Scope<'ctx, 'a> {
functions: self.functions, functions: self.functions,
types: self.types, types: self.types,
type_values: self.type_values, type_values: self.type_values,
type_map: self.type_map,
stack_values: self.stack_values.clone(), stack_values: self.stack_values.clone(),
debug: self.debug.clone(), debug: self.debug.clone(),
allocator: self.allocator.clone(), allocator: self.allocator.clone(),

View File

@ -109,8 +109,7 @@ impl TypeKind {
&debug.scope, &debug.scope,
debug.info, debug.info,
debug.types, debug.types,
scope.type_values, scope.type_map,
scope.types,
scope.module_id, scope.module_id,
scope.tokens, scope.tokens,
scope.modules, scope.modules,
@ -122,8 +121,7 @@ impl TypeKind {
scope: &DebugScopeValue, scope: &DebugScopeValue,
debug_info: &DebugInformation, debug_info: &DebugInformation,
debug_types: &HashMap<TypeKind, DebugTypeValue>, debug_types: &HashMap<TypeKind, DebugTypeValue>,
type_values: &HashMap<CustomTypeKey, TypeValue>, type_map: &HashMap<CustomTypeKey, TypeDefinition>,
types: &HashMap<TypeValue, TypeDefinition>,
local_mod: SourceModuleId, local_mod: SourceModuleId,
tokens: &Vec<FullToken>, tokens: &Vec<FullToken>,
modules: &HashMap<SourceModuleId, ModuleCodegen>, modules: &HashMap<SourceModuleId, ModuleCodegen>,
@ -142,13 +140,12 @@ impl TypeKind {
scope, scope,
debug_info, debug_info,
debug_types, debug_types,
type_values, type_map,
types,
local_mod, local_mod,
tokens, tokens,
modules, modules,
), ),
size_bits: self.size_of(), size_bits: self.size_of(type_map),
}) })
} }
TypeKind::Array(elem_ty, len) => { TypeKind::Array(elem_ty, len) => {
@ -156,21 +153,20 @@ impl TypeKind {
scope, scope,
debug_info, debug_info,
debug_types, debug_types,
type_values, type_map,
types,
local_mod, local_mod,
tokens, tokens,
modules, modules,
); );
DebugTypeData::Array(DebugArrayType { DebugTypeData::Array(DebugArrayType {
size_bits: self.size_of(), size_bits: self.size_of(type_map),
align_bits: self.alignment(), align_bits: self.alignment(),
element_type: elem_ty, element_type: elem_ty,
length: *len, length: *len,
}) })
} }
TypeKind::CustomType(key) => { TypeKind::CustomType(key) => {
let typedef = types.get(type_values.get(key).unwrap()).unwrap(); let typedef = type_map.get(key).unwrap();
match &typedef.kind { match &typedef.kind {
TypeDefinitionKind::Struct(struct_type) => { TypeDefinitionKind::Struct(struct_type) => {
let mut fields = Vec::new(); let mut fields = Vec::new();
@ -186,21 +182,20 @@ impl TypeKind {
name: field.0.clone(), name: field.0.clone(),
scope: scope.clone(), scope: scope.clone(),
pos: location.map(|l| l.pos), pos: location.map(|l| l.pos),
size_bits: field.1.size_of(), size_bits: field.1.size_of(type_map),
offset: size_bits, offset: size_bits,
flags: DwarfFlags, flags: DwarfFlags,
ty: field.1.get_debug_type_hard( ty: field.1.get_debug_type_hard(
scope, scope,
debug_info, debug_info,
debug_types, debug_types,
type_values, type_map,
types,
local_mod, local_mod,
tokens, tokens,
modules, modules,
), ),
}); });
size_bits += field.1.size_of(); size_bits += field.1.size_of(type_map);
} }
{ {
let location = if typedef.source_module != local_mod { let location = if typedef.source_module != local_mod {
@ -222,7 +217,7 @@ impl TypeKind {
} }
_ => DebugTypeData::Basic(DebugBasicType { _ => DebugTypeData::Basic(DebugBasicType {
name, name,
size_bits: self.size_of(), size_bits: self.size_of(type_map),
encoding: match self { encoding: match self {
TypeKind::Bool => DwarfEncoding::Boolean, TypeKind::Bool => DwarfEncoding::Boolean,
TypeKind::I8 => DwarfEncoding::SignedChar, TypeKind::I8 => DwarfEncoding::SignedChar,

View File

@ -50,7 +50,7 @@ impl ErrorKind {
} }
impl ErrorKind { impl ErrorKind {
fn get_meta(&self) -> Metadata { pub fn get_meta(&self) -> Metadata {
match &self { match &self {
ErrorKind::LexerError(error) => error.metadata, ErrorKind::LexerError(error) => error.metadata,
ErrorKind::ParserError(error) => error.metadata, ErrorKind::ParserError(error) => error.metadata,
@ -63,6 +63,18 @@ impl ErrorKind {
ErrorKind::MacroError(error) => error.metadata, ErrorKind::MacroError(error) => error.metadata,
} }
} }
pub fn get_type_str(&self) -> &str {
match self {
ErrorKind::LexerError(_) => "lexer",
ErrorKind::ParserError(_) => "parser",
ErrorKind::TypeCheckError(_) => "typechecker",
ErrorKind::TypeInferenceError(_) => "type-inferrer",
ErrorKind::LinkerError(_) => "linker",
ErrorKind::MacroError(_) => "macro-pass",
ErrorKind::CodegenError(_) => "codegen",
}
}
} }
impl PartialOrd for ErrorKind { impl PartialOrd for ErrorKind {
@ -120,7 +132,7 @@ impl ErrorModules {
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
pub struct ReidError { pub struct ReidError {
map: ErrorModules, map: ErrorModules,
errors: Vec<ErrorKind>, pub errors: Vec<ErrorKind>,
} }
impl ReidError { impl ReidError {
@ -169,6 +181,10 @@ impl ReidError {
pub fn from_kind(errors: Vec<ErrorKind>, map: ErrorModules) -> ReidError { pub fn from_kind(errors: Vec<ErrorKind>, map: ErrorModules) -> ReidError {
ReidError { map, errors } ReidError { map, errors }
} }
pub fn extend(&mut self, other: ReidError) {
self.errors.extend(other.errors);
}
} }
impl std::error::Error for ReidError {} impl std::error::Error for ReidError {}
@ -185,9 +201,7 @@ impl std::fmt::Display for ReidError {
let module = self.map.module(&meta.source_module_id); let module = self.map.module(&meta.source_module_id);
let position = if let Some(module) = module { let position = if let Some(module) = module {
if let Some(tokens) = &module.tokens { if let Some(tokens) = &module.tokens {
let range_tokens = meta.range.into_tokens(&tokens); meta.range.into_position(tokens).or(meta.position.map(|p| (p, p)))
get_position(&range_tokens).or(meta.position.map(|p| (p, p)))
} else if let Some(position) = meta.position { } else if let Some(position) = meta.position {
Some((position, position)) Some((position, position))
} else { } else {
@ -237,6 +251,11 @@ impl TokenRange {
.take(self.end + 1 - self.start) .take(self.end + 1 - self.start)
.collect::<Vec<_>>() .collect::<Vec<_>>()
} }
pub fn into_position<'v>(&self, tokens: &'v Vec<FullToken>) -> Option<(Position, Position)> {
let tokens = self.into_tokens(tokens);
get_position(&tokens)
}
} }
fn get_position(tokens: &Vec<&FullToken>) -> Option<(Position, Position)> { fn get_position(tokens: &Vec<&FullToken>) -> Option<(Position, Position)> {

View File

@ -26,12 +26,11 @@ impl LDRunner {
let dyn_linker_path = find_objectfile(&self.dynamic_linker); let dyn_linker_path = find_objectfile(&self.dynamic_linker);
let crt1_path = find_objectfile("crt1.o"); let crt1_path = find_objectfile("crt1.o");
#[cfg(feature = "log_output")]
println!("LDRunner: Using dynamic linker at: {:?}", dyn_linker_path); println!("LDRunner: Using dynamic linker at: {:?}", dyn_linker_path);
let mut ld = Command::new(&self.command); let mut ld = Command::new(&self.command);
ld.arg("-dynamic-linker") ld.arg("-dynamic-linker").arg(dyn_linker_path).arg(crt1_path);
.arg(dyn_linker_path)
.arg(crt1_path);
for library in &self.libraries { for library in &self.libraries {
ld.arg(format!("-l{}", library)); ld.arg(format!("-l{}", library));
@ -41,22 +40,21 @@ impl LDRunner {
.arg("-o") .arg("-o")
.arg(out_path.to_str().unwrap()); .arg(out_path.to_str().unwrap());
#[cfg(feature = "log_output")]
println!( println!(
"LDRunner: Executing linker to objfile at {:?} => {:?}", "LDRunner: Executing linker to objfile at {:?} => {:?}",
input_path, out_path input_path, out_path
); );
#[cfg(feature = "log_output")]
dbg!(&ld); dbg!(&ld);
ld.spawn().expect("Unable to execute ld!"); ld.spawn().expect("Unable to execute ld!");
thread::sleep(Duration::from_millis(100)); thread::sleep(Duration::from_millis(100));
#[cfg(feature = "log_output")]
println!("Setting executable bit to {:?}..", out_path); println!("Setting executable bit to {:?}..", out_path);
Command::new("chmod") Command::new("chmod").arg("+x").arg(out_path).spawn().unwrap();
.arg("+x")
.arg(out_path)
.spawn()
.unwrap();
thread::sleep(Duration::from_millis(100)); thread::sleep(Duration::from_millis(100));
} }
} }

View File

@ -8,22 +8,36 @@
//! Much of the syntax in Reid is directly inspired by rust, but mostly it is //! Much of the syntax in Reid is directly inspired by rust, but mostly it is
//! driven by simplicity. //! driven by simplicity.
//! //!
//! Specifications and a bunch of [documentation for the language can be found
//! here](./documentation/).
//!
//! An example of a real whole program (a CPU pathtracer) can be found [in
//! examples/cpu_raytracer.reid](./examples/cpu_raytracer.reid), go have a look!
//!
//! Reid is currently able to (non-exhaustively): //! Reid is currently able to (non-exhaustively):
//! - Do basic algebra (e.g. Add, Sub, Mult) //! - Do basic algebra binary and unary-operations (e.g. Add, Sub, Div, Mult,
//! And, Not)
//! - Resolve complex one-liners correctly using PEDMAS (e.g. `5 + 2 * 5 - 5 * //! - Resolve complex one-liners correctly using PEDMAS (e.g. `5 + 2 * 5 - 5 *
//! 5` is calculated correctly) //! 5` is calculated correctly)
//! - Handle borrows/derefs, pointers.
//! - Declare and call functions with varying parameters and return types //! - Declare and call functions with varying parameters and return types
//! - Perform type-checking and type-inference such that return-types and //! - Perform type-checking and type-inference such that return-types and
//! parameter types must always match. //! parameter types must always match.
//! - Do simple logic-operations (e.g. If/And/Or) //! - Do simple logic-operations (e.g. If/And/Or)
//! - Handle, access, define and initialize structs and arrays.
//! - Define and execute For/While loops
//! - Output detailed debug information
//! - Define extern functions that can be linked to outside modules such as
//! `libc`.
//! - Define custom binary operations for any two types that hasn't been defined
//! previously (such as `u16 + u32`).
//! //!
//! An example program of Reid, that calculates the 5th fibonacci number (and //!
//! uses Rust for highlighting) is: //! An example program of Reid, that calculates the 5th fibonacci number:
//! ```reid //! ```reid
//! fn main() -> u16 { //! fn main() -> u16 {
//! return fibonacci(5); //! return fibonacci(5);
//! } //! }
//!
//! fn fibonacci(n: u16) -> u16 { //! fn fibonacci(n: u16) -> u16 {
//! if n <= 2 { //! if n <= 2 {
//! return 1; //! return 1;
@ -32,16 +46,13 @@
//! } //! }
//! ``` //! ```
//! //!
//! Currently missing relevant features (TODOs) are: //! TODOs still (see README.md for more)
//! - ~~Arrays~~ (DONE) //! - Error handling
//! - Structs (and custom types as such) //! - Lexing & parsing of whitespace and comments as well
//! - ~~Extern functions~~ (DONE) //! - LSP implementation
//! - ~~Strings~~ (DONE)
//! - Loops
//! - Debug Symbols
//! ``` //! ```
use std::{collections::HashMap, path::PathBuf, thread, time::Duration}; use std::{collections::HashMap, path::PathBuf};
use ast::{ use ast::{
lexer::{self, FullToken, Token}, lexer::{self, FullToken, Token},
@ -61,8 +72,8 @@ use crate::{
mir::macros::{form_macros, MacroModule, MacroPass}, mir::macros::{form_macros, MacroModule, MacroPass},
}; };
mod ast; pub mod ast;
mod codegen; pub mod codegen;
pub mod error_raporting; pub mod error_raporting;
pub mod ld; pub mod ld;
pub mod mir; pub mod mir;
@ -82,6 +93,7 @@ pub fn parse_module<'map, T: Into<String>>(
map.set_tokens(id, tokens.clone()); map.set_tokens(id, tokens.clone());
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{:#?}", &tokens); println!("{:#?}", &tokens);
Ok((id, tokens)) Ok((id, tokens))
@ -93,7 +105,7 @@ pub fn compile_module<'map>(
map: &'map mut ErrorModules, map: &'map mut ErrorModules,
path: Option<PathBuf>, path: Option<PathBuf>,
is_main: bool, is_main: bool,
) -> Result<mir::Module, ReidError> { ) -> Result<Result<mir::Module, (ast::Module, ReidError)>, ReidError> {
let module = map.module(&module_id).cloned().unwrap(); let module = map.module(&module_id).cloned().unwrap();
let mut token_stream = TokenStream::from(&tokens); let mut token_stream = TokenStream::from(&tokens);
@ -105,6 +117,8 @@ pub fn compile_module<'map>(
statements.push(statement); statements.push(statement);
} }
let errors = token_stream.errors();
drop(token_stream); drop(token_stream);
let ast_module = ast::Module { let ast_module = ast::Module {
@ -115,10 +129,34 @@ pub fn compile_module<'map>(
is_main, is_main,
}; };
if errors.len() > 0 {
// dbg!(&ast_module);
return Ok(Err((
ast_module,
ReidError::from_kind(
errors
.into_iter()
.map(|e| {
error_raporting::ErrorKind::from(mir::pass::Error {
metadata: mir::Metadata {
source_module_id: module_id,
range: *e.get_range().unwrap_or(&Default::default()),
position: None,
},
kind: e,
})
})
.collect(),
map.clone(),
),
)));
}
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
dbg!(&ast_module); dbg!(&ast_module);
Ok(ast_module.process(module_id)) Ok(Ok(ast_module.process(module_id)))
} }
pub fn perform_all_passes<'map>( pub fn perform_all_passes<'map>(
@ -126,9 +164,11 @@ pub fn perform_all_passes<'map>(
module_map: &'map mut ErrorModules, module_map: &'map mut ErrorModules,
) -> Result<(), ReidError> { ) -> Result<(), ReidError> {
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
dbg!(&context); dbg!(&context);
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{:#}", &context); println!("{:#}", &context);
let state = context.pass(&mut LinkerPass { let state = context.pass(&mut LinkerPass {
@ -143,10 +183,13 @@ pub fn perform_all_passes<'map>(
} }
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{:-^100}", "LINKER OUTPUT"); println!("{:-^100}", "LINKER OUTPUT");
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{:#}", &context); println!("{:#}", &context);
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
dbg!(&state); dbg!(&state);
if !state.errors.is_empty() { if !state.errors.is_empty() {
@ -168,10 +211,13 @@ pub fn perform_all_passes<'map>(
let state = context.pass(&mut macro_pass)?; let state = context.pass(&mut macro_pass)?;
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{:-^100}", "MACRO OUTPUT"); println!("{:-^100}", "MACRO OUTPUT");
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{:#}", &context); println!("{:#}", &context);
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
dbg!(&state); dbg!(&state);
if !state.errors.is_empty() { if !state.errors.is_empty() {
@ -206,12 +252,16 @@ pub fn perform_all_passes<'map>(
let state = context.pass(&mut TypeInference { refs: &mut refs })?; let state = context.pass(&mut TypeInference { refs: &mut refs })?;
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{:-^100}", "TYPE INFERRER OUTPUT"); println!("{:-^100}", "TYPE INFERRER OUTPUT");
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{}", &refs); println!("{}", &refs);
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{:#}", &context); println!("{:#}", &context);
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
dbg!(&state); dbg!(&state);
if !state.errors.is_empty() { if !state.errors.is_empty() {
@ -228,10 +278,13 @@ pub fn perform_all_passes<'map>(
let state = context.pass(&mut TypeCheck { refs: &refs })?; let state = context.pass(&mut TypeCheck { refs: &refs })?;
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{:-^100}", "TYPECHECKER OUTPUT"); println!("{:-^100}", "TYPECHECKER OUTPUT");
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{:#}", &context); println!("{:#}", &context);
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
dbg!(&state); dbg!(&state);
if !state.errors.is_empty() { if !state.errors.is_empty() {
@ -245,6 +298,9 @@ pub fn perform_all_passes<'map>(
)); ));
} }
#[cfg(feature = "context_debug")]
dbg!(&context);
Ok(()) Ok(())
} }
@ -262,15 +318,17 @@ pub fn compile_and_pass<'map>(
let name = path.file_name().unwrap().to_str().unwrap().to_owned(); let name = path.file_name().unwrap().to_str().unwrap().to_owned();
let (id, tokens) = parse_module(source, name, module_map)?; let (id, tokens) = parse_module(source, name, module_map)?;
let module = compile_module(id, tokens, module_map, Some(path.clone()), true)?; let module = compile_module(id, tokens, module_map, Some(path.clone()), true)?.map_err(|(_, e)| e)?;
let mut mir_context = mir::Context::from(vec![module], path.parent().unwrap().to_owned()); let mut mir_context = mir::Context::from(vec![module], path.parent().unwrap().to_owned());
perform_all_passes(&mut mir_context, module_map)?; perform_all_passes(&mut mir_context, module_map)?;
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{:-^100}", "FINAL OUTPUT"); println!("{:-^100}", "FINAL OUTPUT");
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{:#}", &mir_context); println!("{:#}", &mir_context);
let mut context = Context::new(format!("Reid ({})", env!("CARGO_PKG_VERSION"))); let mut context = Context::new(format!("Reid ({})", env!("CARGO_PKG_VERSION")));
@ -280,6 +338,7 @@ pub fn compile_and_pass<'map>(
}; };
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{}", &codegen_modules.context); println!("{}", &codegen_modules.context);
let compiled = codegen_modules.compile(cpu, features); let compiled = codegen_modules.compile(cpu, features);

View File

@ -84,7 +84,11 @@ impl Display for GlobalKind {
impl Display for Import { impl Display for Import {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "import {}", self.0.join("::")) write!(
f,
"import {}",
self.0.iter().map(|(s, _)| s.clone()).collect::<Vec<_>>().join("::")
)
} }
} }
@ -279,9 +283,9 @@ impl Display for ExprKind {
let mut state = Default::default(); let mut state = Default::default();
let mut inner_f = PadAdapter::wrap(f, &mut state); let mut inner_f = PadAdapter::wrap(f, &mut state);
let mut iter = items.iter(); let mut iter = items.iter();
if let Some((name, expr)) = iter.next() { if let Some((name, expr, _)) = iter.next() {
write!(inner_f, "\n{}: {}", name, expr)?; write!(inner_f, "\n{}: {}", name, expr)?;
while let Some((name, expr)) = iter.next() { while let Some((name, expr, _)) = iter.next() {
writeln!(inner_f, ",")?; writeln!(inner_f, ",")?;
write!(inner_f, "{}: {}", name, expr)?; write!(inner_f, "{}: {}", name, expr)?;
} }
@ -289,7 +293,7 @@ impl Display for ExprKind {
} }
f.write_char('}') f.write_char('}')
} }
ExprKind::Accessed(expression, type_kind, name) => { ExprKind::Accessed(expression, type_kind, name, _) => {
Display::fmt(&expression, f)?; Display::fmt(&expression, f)?;
write_access(f, name)?; write_access(f, name)?;
write!(f, "<{}>", type_kind) write!(f, "<{}>", type_kind)

View File

@ -1,3 +1,5 @@
use reid_lib::builder::TypeValue;
use crate::util::maybe; use crate::util::maybe;
use super::{typecheck::typerefs::TypeRefs, *}; use super::{typecheck::typerefs::TypeRefs, *};
@ -57,7 +59,7 @@ impl TypeKind {
} }
} }
pub fn size_of(&self) -> u64 { pub fn size_of(&self, map: &HashMap<CustomTypeKey, TypeDefinition>) -> u64 {
match self { match self {
TypeKind::Bool => 1, TypeKind::Bool => 1,
TypeKind::I8 => 8, TypeKind::I8 => 8,
@ -72,8 +74,16 @@ impl TypeKind {
TypeKind::U128 => 128, TypeKind::U128 => 128,
TypeKind::Void => 0, TypeKind::Void => 0,
TypeKind::Char => 8, TypeKind::Char => 8,
TypeKind::Array(type_kind, len) => type_kind.size_of() * (*len as u64), TypeKind::Array(type_kind, len) => type_kind.size_of(map) * (*len as u64),
TypeKind::CustomType(..) => 32, TypeKind::CustomType(key) => match &map.get(key).unwrap().kind {
TypeDefinitionKind::Struct(struct_type) => {
let mut size = 0;
for field in &struct_type.0 {
size += field.1.size_of(map)
}
size
}
},
TypeKind::CodegenPtr(_) => 64, TypeKind::CodegenPtr(_) => 64,
TypeKind::Vague(_) => panic!("Tried to sizeof a vague type!"), TypeKind::Vague(_) => panic!("Tried to sizeof a vague type!"),
TypeKind::Borrow(..) => 64, TypeKind::Borrow(..) => 64,
@ -405,7 +415,7 @@ impl Expression {
TypeKind::Array(Box::new(first.1), expressions.len() as u64), TypeKind::Array(Box::new(first.1), expressions.len() as u64),
)) ))
} }
Accessed(_, type_kind, _) => Ok((ReturnKind::Soft, type_kind.clone())), Accessed(_, type_kind, ..) => Ok((ReturnKind::Soft, type_kind.clone())),
Struct(name, _) => Ok(( Struct(name, _) => Ok((
ReturnKind::Soft, ReturnKind::Soft,
TypeKind::CustomType(CustomTypeKey(name.clone(), mod_id)), TypeKind::CustomType(CustomTypeKey(name.clone(), mod_id)),
@ -437,7 +447,7 @@ impl Expression {
match &self.0 { match &self.0 {
ExprKind::Variable(var_ref) => Some(var_ref), ExprKind::Variable(var_ref) => Some(var_ref),
ExprKind::Indexed(lhs, _, _) => lhs.backing_var(), ExprKind::Indexed(lhs, _, _) => lhs.backing_var(),
ExprKind::Accessed(lhs, _, _) => lhs.backing_var(), ExprKind::Accessed(lhs, ..) => lhs.backing_var(),
ExprKind::Borrow(expr, _) => expr.backing_var(), ExprKind::Borrow(expr, _) => expr.backing_var(),
ExprKind::Deref(expr) => expr.backing_var(), ExprKind::Deref(expr) => expr.backing_var(),
ExprKind::Block(block) => block.backing_var(), ExprKind::Block(block) => block.backing_var(),

View File

@ -52,7 +52,7 @@ pub enum ErrorKind {
pub fn compile_std(module_map: &mut ErrorModules) -> Result<Module, ReidError> { pub fn compile_std(module_map: &mut ErrorModules) -> Result<Module, ReidError> {
let (id, tokens) = parse_module(STD_SOURCE, STD_NAME, module_map)?; let (id, tokens) = parse_module(STD_SOURCE, STD_NAME, module_map)?;
let module = compile_module(id, tokens, module_map, None, false)?; let module = compile_module(id, tokens, module_map, None, false)?.map_err(|(_, e)| e)?;
let module_id = module.module_id; let module_id = module.module_id;
let mut mir_context = super::Context::from(vec![module], Default::default()); let mut mir_context = super::Context::from(vec![module], Default::default());
@ -124,7 +124,9 @@ impl<'map> Pass for LinkerPass<'map> {
state.ok::<_, Infallible>(Err(ErrorKind::InnerModulesNotYetSupported(import.clone())), import.1); state.ok::<_, Infallible>(Err(ErrorKind::InnerModulesNotYetSupported(import.clone())), import.1);
} }
let module_name = unsafe { path.get_unchecked(0) }; let Some((module_name, _)) = path.get(0) else {
continue;
};
let mut imported = if let Some(mod_id) = module_ids.get(module_name) { let mut imported = if let Some(mod_id) = module_ids.get(module_name) {
modules.get(mod_id).unwrap() modules.get(mod_id).unwrap()
@ -156,21 +158,33 @@ impl<'map> Pass for LinkerPass<'map> {
}; };
match compile_module(id, tokens, &mut self.module_map, Some(file_path), false) { match compile_module(id, tokens, &mut self.module_map, Some(file_path), false) {
Ok(imported_module) => { Ok(res) => match res {
if imported_module.is_main { Ok(imported_module) => {
if imported_module.is_main {
state.ok::<_, Infallible>(
Err(ErrorKind::TriedLinkingMain(module_name.clone())),
import.1,
);
continue;
}
let module_id = imported_module.module_id;
module_ids.insert(imported_module.name.clone(), imported_module.module_id);
modules.insert(module_id, Rc::new(RefCell::new(imported_module)));
let imported = modules.get_mut(&module_id).unwrap();
modules_to_process.push(imported.clone());
imported
}
Err((_, err)) => {
state.ok::<_, Infallible>( state.ok::<_, Infallible>(
Err(ErrorKind::TriedLinkingMain(module_name.clone())), Err(ErrorKind::ModuleCompilationError(
module_name.clone(),
format!("{}", err),
)),
import.1, import.1,
); );
continue; continue;
} }
let module_id = imported_module.module_id; },
module_ids.insert(imported_module.name.clone(), imported_module.module_id);
modules.insert(module_id, Rc::new(RefCell::new(imported_module)));
let imported = modules.get_mut(&module_id).unwrap();
modules_to_process.push(imported.clone());
imported
}
Err(err) => { Err(err) => {
state.ok::<_, Infallible>( state.ok::<_, Infallible>(
Err(ErrorKind::ModuleCompilationError( Err(ErrorKind::ModuleCompilationError(
@ -185,7 +199,9 @@ impl<'map> Pass for LinkerPass<'map> {
} }
.borrow_mut(); .borrow_mut();
let import_name = unsafe { path.get_unchecked(1) }; let Some((import_name, _)) = path.get(1) else {
continue;
};
let import_id = imported.module_id; let import_id = imported.module_id;
let mut imported_types = Vec::new(); let mut imported_types = Vec::new();
@ -459,7 +475,7 @@ impl<'map> Pass for LinkerPass<'map> {
super::ExprKind::Indexed(.., type_kind, _) => { super::ExprKind::Indexed(.., type_kind, _) => {
*type_kind = type_kind.update_imported(extern_types, mod_id) *type_kind = type_kind.update_imported(extern_types, mod_id)
} }
super::ExprKind::Accessed(.., type_kind, _) => { super::ExprKind::Accessed(.., type_kind, _, _) => {
*type_kind = type_kind.update_imported(extern_types, mod_id) *type_kind = type_kind.update_imported(extern_types, mod_id)
} }
super::ExprKind::BinOp(.., type_kind) => *type_kind = type_kind.update_imported(extern_types, mod_id), super::ExprKind::BinOp(.., type_kind) => *type_kind = type_kind.update_imported(extern_types, mod_id),

View File

@ -118,10 +118,13 @@ impl mir::Expression {
let mut globals = Vec::new(); let mut globals = Vec::new();
match &mut self.0 { match &mut self.0 {
mir::ExprKind::FunctionCall(function_call) => { mir::ExprKind::FunctionCall(function_call) => {
for param in &mut function_call.parameters {
globals.extend(param.gen_macros(data, state, map));
}
if function_call.is_macro { if function_call.is_macro {
if let Some(existing_macro) = data.macros.get(&function_call.name) { if let Some(existing_macro) = data.macros.get(&function_call.name) {
let mut literals = Vec::new(); let mut literals = Vec::new();
for param in &function_call.parameters { for param in &mut function_call.parameters {
match &param.0 { match &param.0 {
super::ExprKind::Literal(literal) => literals.push(literal.clone()), super::ExprKind::Literal(literal) => literals.push(literal.clone()),
_ => state.note_errors(&vec![ErrorKind::InvalidMacroArgs], param.1), _ => state.note_errors(&vec![ErrorKind::InvalidMacroArgs], param.1),

View File

@ -41,16 +41,19 @@ impl Metadata {
} }
pub fn into_positions(&self, tokens: &Vec<FullToken>) -> Option<(Position, Position)> { pub fn into_positions(&self, tokens: &Vec<FullToken>) -> Option<(Position, Position)> {
let mut iter = tokens self.range.into_position(tokens)
.iter() }
.skip(self.range.start)
.take(self.range.end - self.range.start); pub fn is_after(&self, token_idx: usize) -> bool {
if let Some(first) = iter.next() { return token_idx < self.range.start;
let last = iter.last().unwrap_or(first); }
Some((first.position, last.position.add(last.token.len() as u32)))
} else { pub fn is_before(&self, token_idx: usize) -> bool {
None return token_idx > self.range.end;
} }
pub fn contains(&self, token_idx: usize) -> bool {
return token_idx >= self.range.start && token_idx <= self.range.end;
} }
} }
@ -253,15 +256,15 @@ pub enum ReturnKind {
pub struct NamedVariableRef(pub TypeKind, pub String, pub Metadata); pub struct NamedVariableRef(pub TypeKind, pub String, pub Metadata);
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct Import(pub Vec<String>, pub Metadata); pub struct Import(pub Vec<(String, Metadata)>, pub Metadata);
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum ExprKind { pub enum ExprKind {
Variable(NamedVariableRef), Variable(NamedVariableRef),
Indexed(Box<Expression>, TypeKind, Box<Expression>), Indexed(Box<Expression>, TypeKind, Box<Expression>),
Accessed(Box<Expression>, TypeKind, String), Accessed(Box<Expression>, TypeKind, String, Metadata),
Array(Vec<Expression>), Array(Vec<Expression>),
Struct(String, Vec<(String, Expression)>), Struct(String, Vec<(String, Expression, Metadata)>),
Literal(Literal), Literal(Literal),
BinOp(BinaryOperator, Box<Expression>, Box<Expression>, TypeKind), BinOp(BinaryOperator, Box<Expression>, Box<Expression>, TypeKind),
FunctionCall(FunctionCall), FunctionCall(FunctionCall),

View File

@ -585,7 +585,7 @@ impl Expression {
} }
} }
ExprKind::Struct(_, items) => { ExprKind::Struct(_, items) => {
for (_, expr) in items { for (_, expr, _) in items {
expr.pass(pass, state, scope, mod_id)?; expr.pass(pass, state, scope, mod_id)?;
} }
} }

View File

@ -97,9 +97,10 @@ fn check_typedefs_for_recursion<'a, 'b>(
typedef.meta, typedef.meta,
); );
} else { } else {
seen.insert(name.clone());
if let Some(inner_typedef) = defmap.get(name) { if let Some(inner_typedef) = defmap.get(name) {
check_typedefs_for_recursion(defmap, inner_typedef, seen.clone(), state) let mut inner_seen = seen.clone();
inner_seen.insert(name.clone());
check_typedefs_for_recursion(defmap, inner_typedef, inner_seen.clone(), state)
} }
} }
} }
@ -596,7 +597,7 @@ impl Expression {
} }
} }
} }
ExprKind::Accessed(expression, type_kind, field_name) => { ExprKind::Accessed(expression, type_kind, field_name, _) => {
// Resolve expected type // Resolve expected type
let expected_ty = type_kind.resolve_ref(typerefs); let expected_ty = type_kind.resolve_ref(typerefs);
@ -620,7 +621,7 @@ impl Expression {
// Update possibly resolved type // Update possibly resolved type
Ok(true_ty) Ok(true_ty)
} else { } else {
Err(ErrorKind::NoSuchField(field_name.clone())) Err(ErrorKind::NoSuchField(key.0.clone()))
} }
} else { } else {
Err(ErrorKind::TriedAccessingNonStruct(expr_ty)) Err(ErrorKind::TriedAccessingNonStruct(expr_ty))
@ -640,7 +641,7 @@ impl Expression {
HashSet::new() HashSet::new()
}; };
for (field_name, field_expr) in items { for (field_name, field_expr, _) in items {
// Get expected type, or error if field does not exist // Get expected type, or error if field does not exist
let expected_ty = state.or_else( let expected_ty = state.or_else(
struct_def struct_def
@ -720,15 +721,19 @@ impl Expression {
expr.resolve_ref(typerefs).cast_into(type_kind) expr.resolve_ref(typerefs).cast_into(type_kind)
} }
ExprKind::AssociatedFunctionCall(type_kind, function_call) => { ExprKind::AssociatedFunctionCall(type_kind, function_call) => {
*type_kind = type_kind.or_default().unwrap();
let true_function = state let true_function = state
.scope .scope
.get_associated_function(&pass::AssociatedFunctionKey( .get_associated_function(&pass::AssociatedFunctionKey(
type_kind.clone(), type_kind.clone(),
function_call.name.clone(), function_call.name.clone(),
)) ))
.ok_or(ErrorKind::FunctionNotDefined(function_call.name.clone())); .ok_or(ErrorKind::AssocFunctionNotDefined(
function_call.name.clone(),
type_kind.clone(),
));
if let Some(f) = state.ok(true_function, self.1) { if let Some(f) = state.ok(true_function, function_call.meta) {
let param_len_given = function_call.parameters.len(); let param_len_given = function_call.parameters.len();
let param_len_expected = f.params.len(); let param_len_expected = f.params.len();

View File

@ -14,7 +14,7 @@ use crate::{
mir::{ mir::{
pass::{AssociatedFunctionKey, ScopeVariable}, pass::{AssociatedFunctionKey, ScopeVariable},
BinopDefinition, Block, CustomTypeKey, ExprKind, Expression, FunctionDefinition, FunctionDefinitionKind, BinopDefinition, Block, CustomTypeKey, ExprKind, Expression, FunctionDefinition, FunctionDefinitionKind,
IfExpression, Module, ReturnKind, StmtKind, TypeKind, WhileStatement, IfExpression, Module, ReturnKind, StmtKind, TypeKind, VagueType, WhileStatement,
}, },
util::try_all, util::try_all,
}; };
@ -171,8 +171,9 @@ impl FunctionDefinition {
let scope_refs = ScopeTypeRefs::from(type_refs); let scope_refs = ScopeTypeRefs::from(type_refs);
for param in &self.parameters { for param in &self.parameters {
let param_t = state.or_else(param.ty.assert_unvague(), Vague(Unknown), self.signature()); let param_t = state.or_else(param.ty.assert_unvague(), Vague(Unknown), self.signature());
let mutable = matches!(param_t, TypeKind::Borrow(_, true));
let res = scope_refs let res = scope_refs
.new_var(param.name.clone(), false, &param_t) .new_var(param.name.clone(), mutable, &param_t)
.or(Err(ErrorKind::VariableAlreadyDefined(param.name.clone()))); .or(Err(ErrorKind::VariableAlreadyDefined(param.name.clone())));
state.ok(res, self.signature()); state.ok(res, self.signature());
} }
@ -526,7 +527,7 @@ impl Expression {
} }
} }
} }
ExprKind::Accessed(expression, type_kind, field_name) => { ExprKind::Accessed(expression, type_kind, field_name, _) => {
let expr_ty = expression.infer_types(state, type_refs)?; let expr_ty = expression.infer_types(state, type_refs)?;
// Check that the resolved type is at least a struct, no // Check that the resolved type is at least a struct, no
@ -545,10 +546,10 @@ impl Expression {
*type_kind = elem_ty.as_type().clone(); *type_kind = elem_ty.as_type().clone();
Ok(elem_ty) Ok(elem_ty)
} }
None => Err(ErrorKind::NoSuchField(field_name.clone())), None => Ok(type_refs.from_type(&TypeKind::Vague(VagueType::Unknown)).unwrap()),
} }
} }
_ => Err(ErrorKind::TriedAccessingNonStruct(kind)), _ => Ok(type_refs.from_type(&TypeKind::Vague(VagueType::Unknown)).unwrap()),
} }
} }
ExprKind::Struct(struct_name, fields) => { ExprKind::Struct(struct_name, fields) => {
@ -605,7 +606,7 @@ impl Expression {
.parameters .parameters
.get_mut(0) .get_mut(0)
.expect("Unknown-type associated function NEEDS to always have at least one parameter!"); .expect("Unknown-type associated function NEEDS to always have at least one parameter!");
let param_ty = first_param.infer_types(state, type_refs).unwrap().resolve_deep(); let param_ty = first_param.infer_types(state, type_refs)?.resolve_deep();
*type_kind = state *type_kind = state
.or_else( .or_else(
param_ty.ok_or(ErrorKind::CouldNotInferType(format!("{}", first_param))), param_ty.ok_or(ErrorKind::CouldNotInferType(format!("{}", first_param))),
@ -613,24 +614,38 @@ impl Expression {
first_param.1, first_param.1,
) )
.resolve_ref(type_refs.types); .resolve_ref(type_refs.types);
let backing_var = first_param.backing_var().expect("todo").1.clone(); let backing_var = first_param.backing_var();
let is_mutable = if let Some(backing_var) = first_param.backing_var() {
if let Some((mutable, _)) = type_refs.find_var(&backing_var.1) {
mutable
} else {
return Err(ErrorKind::VariableNotDefined(backing_var.1.clone()));
}
} else {
false
};
if let TypeKind::Borrow(inner, _) = type_kind { if backing_var.is_some() {
if let TypeKind::Borrow(..) = *inner.clone() { if let TypeKind::Borrow(inner, _) = type_kind {
*type_kind = type_kind.unroll_borrow(); if let TypeKind::Borrow(..) = *inner.clone() {
let ExprKind::Borrow(val, _) = &first_param.0 else { *type_kind = type_kind.unroll_borrow();
panic!() let ExprKind::Borrow(val, _) = &first_param.0 else {
}; panic!()
};
*first_param = *val.clone();
}
}
} else {
if let ExprKind::Borrow(val, _) = &first_param.0 {
*first_param = *val.clone(); *first_param = *val.clone();
} }
if let TypeKind::Borrow(inner_ty, _) = type_kind {
*type_kind = *inner_ty.clone();
}
} }
if let Some((mutable, _)) = type_refs.find_var(&backing_var) { if !is_mutable {
if !mutable { first_param.remove_borrow_mutability();
first_param.remove_borrow_mutability();
}
} else {
return Err(ErrorKind::VariableNotDefined(backing_var));
} }
} }
} }
@ -642,9 +657,13 @@ impl Expression {
.ok_or(ErrorKind::AssocFunctionNotDefined( .ok_or(ErrorKind::AssocFunctionNotDefined(
function_call.name.clone(), function_call.name.clone(),
type_kind.clone(), type_kind.clone(),
))? ))
.clone(); .clone();
let Ok(fn_call) = fn_call else {
return Ok(type_refs.from_type(&Vague(Unknown)).unwrap());
};
// Infer param expression types and narrow them to the // Infer param expression types and narrow them to the
// expected function parameters (or Unknown types if too // expected function parameters (or Unknown types if too
// many were provided) // many were provided)

View File

@ -97,6 +97,9 @@ pub struct TypeRefs {
/// Indirect ID-references, referring to hints-vec /// Indirect ID-references, referring to hints-vec
pub(super) type_refs: RefCell<Vec<TypeIdRef>>, pub(super) type_refs: RefCell<Vec<TypeIdRef>>,
pub(super) binop_types: BinopMap, pub(super) binop_types: BinopMap,
/// Used when the real typerefs are not available, and any TypeRefs need to
/// be resolved as Unknown.
pub unknown_typerefs: bool,
} }
impl std::fmt::Display for TypeRefs { impl std::fmt::Display for TypeRefs {
@ -122,6 +125,14 @@ impl TypeRefs {
hints: Default::default(), hints: Default::default(),
type_refs: Default::default(), type_refs: Default::default(),
binop_types: binops, binop_types: binops,
unknown_typerefs: false,
}
}
pub fn unknown() -> TypeRefs {
TypeRefs {
unknown_typerefs: true,
..Default::default()
} }
} }
@ -177,8 +188,12 @@ impl TypeRefs {
} }
pub fn retrieve_typeref(&self, idx: usize) -> Option<TypeRefKind> { pub fn retrieve_typeref(&self, idx: usize) -> Option<TypeRefKind> {
let inner_idx = unsafe { *self.recurse_type_ref(idx).borrow() }; if !self.unknown_typerefs {
self.hints.borrow().get(inner_idx).cloned() let inner_idx = unsafe { *self.recurse_type_ref(idx).borrow() };
self.hints.borrow().get(inner_idx).cloned()
} else {
Some(TypeRefKind::Direct(TypeKind::Vague(VagueType::Unknown)))
}
} }
pub fn retrieve_wide_type(&self, idx: usize, seen: &mut HashSet<usize>) -> Option<TypeKind> { pub fn retrieve_wide_type(&self, idx: usize, seen: &mut HashSet<usize>) -> Option<TypeKind> {

View File

@ -6,17 +6,17 @@ use reid::{
mir::{self}, mir::{self},
parse_module, perform_all_passes, parse_module, perform_all_passes,
}; };
use reid_lib::Context; use reid_lib::{compile::CompileOutput, Context};
use util::assert_err; use util::assert_err;
mod util; mod util;
fn test(source: &str, name: &str, expected_exit_code: Option<i32>) { fn test_compile(source: &str, name: &str) -> CompileOutput {
assert_err(assert_err(std::panic::catch_unwind(|| { assert_err(assert_err(std::panic::catch_unwind(|| {
let mut map = Default::default(); let mut map = Default::default();
let (id, tokens) = assert_err(parse_module(source, name, &mut map)); let (id, tokens) = assert_err(parse_module(source, name, &mut map));
let module = assert_err(compile_module(id, tokens, &mut map, None, true)); let module = assert_err(assert_err(compile_module(id, tokens, &mut map, None, true)).map_err(|(_, e)| e));
let mut mir_context = mir::Context::from(vec![module], Default::default()); let mut mir_context = mir::Context::from(vec![module], Default::default());
assert_err(perform_all_passes(&mut mir_context, &mut map)); assert_err(perform_all_passes(&mut mir_context, &mut map));
@ -24,7 +24,14 @@ fn test(source: &str, name: &str, expected_exit_code: Option<i32>) {
let codegen = assert_err(mir_context.codegen(&context)); let codegen = assert_err(mir_context.codegen(&context));
let output = codegen.compile(None, Vec::new()).output(); Ok::<_, ()>(codegen.compile(None, Vec::new()).output())
})))
}
fn test(source: &str, name: &str, expected_exit_code: Option<i32>) {
assert_err(assert_err(std::panic::catch_unwind(|| {
let output = test_compile(source, name);
let time = SystemTime::now(); let time = SystemTime::now();
let in_path = PathBuf::from(format!( let in_path = PathBuf::from(format!(
"/tmp/temp-{}.o", "/tmp/temp-{}.o",
@ -152,3 +159,13 @@ fn associated_functions() {
Some(4), Some(4),
); );
} }
#[test]
fn mutable_inner_functions() {
test(include_str!("../../examples/mutable_inner.reid"), "test", Some(0));
}
#[test]
fn cpu_raytracer_compiles() {
test_compile(include_str!("../../examples/cpu_raytracer.reid"), "test");
}