Compare commits

...

85 Commits
macros ... main

Author SHA1 Message Date
1c3386bc9a Fix triple-importing 2025-08-04 23:46:46 +03:00
8a178387ca Allow initializing foreign structs as well 2025-08-04 22:33:06 +03:00
b93b7aa52b Fix struct fields not being linked correctly for foreign types 2025-08-04 22:24:03 +03:00
1275063dc2 Pass associated functions as well as functions 2025-08-04 22:09:26 +03:00
02522dd36d Fix typechecker crashing unexpectedly with vague type 2025-08-04 21:18:14 +03:00
5b7c3d5b3a Cleanup 2025-08-04 21:14:32 +03:00
70a968d7a0 Add sqrt 2025-08-04 19:48:14 +03:00
a9d5a4d03b Fix token ranges for for-loops 2025-08-04 18:37:19 +03:00
11d93e4adf Fix length-intrinsic 2025-08-04 18:12:01 +03:00
c2f6cfb8e6 Update intrinsics documentation 2025-08-04 18:02:28 +03:00
34612e98ce Add documentation about memcpy-intrinsic 2025-08-04 17:52:03 +03:00
3b4835cff8 Fix and add memcpy-intrinsic 2025-08-04 17:51:32 +03:00
627d1bcfa5 Add a large amount of new intrinsics 2025-08-04 17:51:32 +03:00
ae6796acfc Fix calling convention for integers/real-numbers 2025-08-04 17:51:32 +03:00
5d19d38682 Add intrinsic min/max to integers and floats 2025-08-04 17:51:32 +03:00
4a33e7d123 Fix binop parsing from last commit 2025-08-04 17:51:32 +03:00
79ecb3b9ba Fix token-ranges for derefs and binop rhs 2025-08-04 14:30:36 +03:00
28157ae4b8 Bump version numbers 2025-08-04 00:43:13 +03:00
8d27a6e7bd Fix reference-linking for local functions 2025-08-04 00:41:44 +03:00
136e9f9d1f Update readme 2025-08-04 00:21:10 +03:00
8e41deb653 Add compile-on-save 2025-08-04 00:13:10 +03:00
766a853b48 Add refactoring and go-to-definition for multifile 2025-08-04 00:07:31 +03:00
109fedb624 Make get-references work for multifiles 2025-08-03 23:54:49 +03:00
d27ec2bb70 Add StateMap 2025-08-03 22:44:00 +03:00
5706fd99e3 Move a bunch of fields to AnalysisState instead 2025-08-03 22:37:28 +03:00
3b3b21d4dc Add possibility to pre-define module-ids 2025-08-03 22:32:31 +03:00
7809aeb2b5 Add source module id for symbols 2025-08-03 21:51:04 +03:00
cd2ebb5224 Add refactoring 2025-08-03 21:45:48 +03:00
8b0d09c08d Fix find-all-references 2025-08-03 21:34:44 +03:00
a215a2116a Add reference-finding 2025-08-03 21:29:40 +03:00
0abeb0c4cd Flatten a pyramid with a helper function 2025-08-03 21:15:47 +03:00
ac0d79f816 add trivial semantic tokens 2025-08-03 21:11:04 +03:00
909728a564 Fix type selection for properties 2025-08-03 21:02:05 +03:00
82537224e7 Add go-to-definition 2025-08-03 20:57:56 +03:00
79b3c6b3ef Fix custom struct field definition semantic tokens 2025-08-03 20:27:31 +03:00
1ae164b1d6 Fix assoc function symbols 2025-08-03 20:24:34 +03:00
9b9baabc81 Add associated function symbols 2025-08-03 20:03:31 +03:00
13c462cb9b Add Type and Struct semantic tokens someplaces 2025-08-03 19:49:56 +03:00
8739fe16d1 Add property semantic tokens 2025-08-03 19:46:04 +03:00
1438ba7bd1 Add semantic highlighting for binop params 2025-08-03 19:39:40 +03:00
d9911a8ff5 Fix TokenRange for assoc function self 2025-08-03 19:27:54 +03:00
48dd17b320 Fix parameter symbols for extern functions 2025-08-03 19:15:48 +03:00
dcc53498e7 Add definition and reference modifiers 2025-08-03 19:01:51 +03:00
018f3e2561 Fix function signature meta for extern functions 2025-08-03 18:45:27 +03:00
6a9133baff Add semantic highlighting to variable references 2025-08-03 18:32:35 +03:00
3f3de9e2c0 Clean up some duplicate .max'es 2025-08-03 18:19:50 +03:00
b965ca11b9 Add semantic highlighting for let-statements 2025-08-03 18:17:21 +03:00
3537318466 Refactor analysis a little bit 2025-08-03 16:21:10 +03:00
dbc43f51ee Add language configuration 2025-08-03 15:52:12 +03:00
c7f1b81c9d Improve associated functions so you can now call them on numbers too 2025-08-03 01:33:52 +03:00
a51a2c8f56 Remove useless prints 2025-08-03 01:00:30 +03:00
101ee2d8e5 Account for intrinsic associated functions with autocomplete 2025-08-03 01:00:02 +03:00
a6844b919b Fix array_structs.reid 2025-08-03 00:16:47 +03:00
4ea0913842 Add autocomplete for associated functions and struct fields 2025-08-03 00:13:53 +03:00
bb9f69ee53 Add autocomplete for imports 2025-08-02 23:03:11 +03:00
97a5c3a65e Optimize LSP analysis a Lot 2025-08-02 21:47:20 +03:00
8595da0c30 Make LSP use a more general analysis structure 2025-08-02 21:11:33 +03:00
dae39bc9d2 Fix fibonacci.reid 2025-08-02 20:21:57 +03:00
658450993a Fix hover types for for-loops 2025-08-02 20:10:48 +03:00
3f6d26679d Update README.md, all TODOs done 2025-08-02 19:24:31 +03:00
16082752e2 Update language server client and configs 2025-08-02 19:19:29 +03:00
8a71ce3629 Update LSP client 2025-08-02 15:02:39 +03:00
81d418c6d8 Update version number 2025-08-02 14:36:56 +03:00
8d0e3d03d5 Improve syntax highlighting 2025-08-02 03:41:08 +03:00
34e31549b3 add some syntax highlighting 2025-08-02 03:09:21 +03:00
0ba25db4c8 Start adding syntax highlighting 2025-08-02 00:14:20 +03:00
314f44304a Update README.md 2025-08-01 23:59:05 +03:00
08f7725ce7 Compile cpu_raytracer example in e2e tests, but don't run it 2025-08-01 22:46:46 +03:00
f89b26bf74 Improve LSP hover typing 2025-08-01 22:41:46 +03:00
4fada0036c Fix debug info for structs 2025-07-31 23:25:46 +03:00
4f0ee72c83 Edit example a bit, fix macro generation in function parameters 2025-07-31 22:48:16 +03:00
deed96bbfd Fix bitwise operators requiring U64 for rhs 2025-07-31 22:17:58 +03:00
1e094eeea0 Allow wider expressions for when self is not taken as borrow 2025-07-29 23:55:31 +03:00
3adb745576 Fix struct recursion testing 2025-07-29 23:38:26 +03:00
8f7b785664 Fix two small bugs, add new example to test 2025-07-29 23:16:56 +03:00
c7aacfe756 Refactor code a little bit 2025-07-29 21:56:50 +03:00
b71c253942 Add types to hovers in LSP, fix around and add metas 2025-07-29 21:39:14 +03:00
7d3aaa143a Start adding type-information to tooltips 2025-07-29 20:44:15 +03:00
6619f1f0a9 Add simple error diagnostic from parser 2025-07-29 19:53:12 +03:00
bc59b6f575 Start adding LSP implementation 2025-07-29 17:48:45 +03:00
c262418f88 Add comments and whitespace to lexer 2025-07-29 16:41:07 +03:00
2dd3a5904b Add whitespace to lexer 2025-07-29 16:37:58 +03:00
ff1da716e9 Update README.md 2025-07-29 16:08:54 +03:00
7c6d634287 Update README.md 2025-07-29 16:04:26 +03:00
b0442e5685 Add documentation for include_bytes! 2025-07-29 16:02:54 +03:00
73 changed files with 5513 additions and 660 deletions

963
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,6 @@
[workspace] [workspace]
members = [ members = [
"reid", "reid",
"reid-llvm-lib" "reid-llvm-lib",
"reid-lsp"
] ]

View File

@ -71,17 +71,22 @@ Currently missing big features (TODOs) are:
Big features that I want later but are not necessary: Big features that I want later but are not necessary:
- ~~User-defined binary operations~~ (DONE) - ~~User-defined binary operations~~ (DONE)
- ~~Asymmetric binary operations (e.g. string + u32)~~ (DONE) - ~~Asymmetric binary operations (e.g. string + u32)~~ (DONE)
- Error handling - ~~Error handling~~ (Not Doing It)
- Lexing & parsing of whitespace and comments as well - ~~Lexing & parsing of whitespace and comments as well~~ (DONE)
- LSP implementation - ~~LSP implementation~~ (DONE)
- ~~Syntax Highlighting~~ (DONE)
- ~~Semantic Highlighting~~ (DONE)
- ~~Go-To-Definition~~ (DONE)
- ~~Find-All-References~~ (DONE)
- ~~Refactoring~~ (DONE)
Smaller features: Smaller features:
- ~~Hex-numbers~~ - ~~Hex-numbers~~ (DONE)
- ~~Bitwise operations~~ - ~~Bitwise operations~~ (DONE)
- ~~Easier way to initialize arrays with a single value~~ - ~~Easier way to initialize arrays with a single value~~ (DONE)
- ~~Void-returns (`return;` for void-returning functions)~~ - ~~Void-returns (`return;` for void-returning functions)~~ (DONE)
- ~~Only include standard library at all if it is imported~~ - ~~Only include standard library at all if it is imported~~ (DONE)
- Lexical scopes for Debug Information - ~~Lexical scopes for Debug Information~~ (DONE)
### Why "Reid" ### Why "Reid"
@ -157,10 +162,6 @@ cmake llvm -B build -DCMAKE_BUILD_TYPE=MinSizeRel -DLLVM_ENABLE_ASSERTIONS=ON -D
ninja -j23 ninja -j23
``` ```
*Also Note:* Building LLVM with `Ninja` was not successful for me, but this
method was. Ninja may be successful with you, to try it, add `-G Ninja` to the
`cmake`-command, and instead of `make` run `ninja install`.
### Building this crate itself ### Building this crate itself
Assuming `llvm-project` from the previous step was at Assuming `llvm-project` from the previous step was at
@ -170,6 +171,5 @@ Assuming `llvm-project` from the previous step was at
LLVM_SYS_201_PREFIX=/path/llvm-project/build cargo build LLVM_SYS_201_PREFIX=/path/llvm-project/build cargo build
``` ```
## In conclusion Alternatively assuming you have LLVM 20.1 or newer installed you may use omit
Good luck! It took me a good 10 hours to figure this out for myself, I sure hope the environment variable entirely and use dynamic linking instead
these instructions help both myself and someone else in the future!

View File

@ -261,6 +261,9 @@ calls, literals, or if-expressions. Types of supported expressions include:
*associated type* with given parameters. *associated type* with given parameters.
- **Accessing function calls**, a shorthand to call associated function calls - **Accessing function calls**, a shorthand to call associated function calls
which have `&self` or `&mut self` as their first parameter. which have `&self` or `&mut self` as their first parameter.
- **Macro invocations** for invoking **macros** which are evaluated at
compile-time rather than runtime. Currently it is not possible to define
your own macros, but there are some pre-defined in the intrinsics.
- **Block-expressions**, which can return a value to the higher-level expression - **Block-expressions**, which can return a value to the higher-level expression
if they have a statement with a soft-return. Otherwise they return void. if they have a statement with a soft-return. Otherwise they return void.
- **If-expressions**, which can execute one of two expressions depending on the - **If-expressions**, which can execute one of two expressions depending on the
@ -278,7 +281,7 @@ In formal grammar:
<indexing> | <accessing> | <indexing> | <accessing> |
<binary-exp> | <unary-exp> | <binary-exp> | <unary-exp> |
<function-call> | <accessing-function-call> | <assoc-function-call> <function-call> | <accessing-function-call> | <assoc-function-call>
<block> | <if-expr> | <cast> | <macro-invocation> | <block> | <if-expr> | <cast> |
( "(" <expression> ")" ) ( "(" <expression> ")" )
<variable> :: <ident> <variable> :: <ident>
@ -294,6 +297,7 @@ In formal grammar:
<function-call> :: <expression> "(" [ <expression> ( "," <expression> )* ] ")" <function-call> :: <expression> "(" [ <expression> ( "," <expression> )* ] ")"
<accessing-function-call> :: <accessing> "(" [ <expression> ( "," <expression> )* ] ")" <accessing-function-call> :: <accessing> "(" [ <expression> ( "," <expression> )* ] ")"
<assoc-function-call> :: <type> "::" <function-call> <assoc-function-call> :: <type> "::" <function-call>
<macro-invocation> :: <expression> "!(" [ <expression> ( "," <expression> )* ] ")"
<if-expr> :: "if" <expression> <expression> [ "else" <expression> ] <if-expr> :: "if" <expression> <expression> [ "else" <expression> ]
<cast> :: <expression> "as" <type> <cast> :: <expression> "as" <type>
``` ```
@ -312,6 +316,7 @@ test.first // Accessing
func(value, 14) // Function call func(value, 14) // Function call
Test::get_field(&test); // Associated function call Test::get_field(&test); // Associated function call
test.get_field(); // Same, but using a the dot-form shorthand test.get_field(); // Same, but using a the dot-form shorthand
include_bytes!("./test"); // Macro invocation
if varname {} else {} // If-expression if varname {} else {} // If-expression
value as u32 // cast value as u32 // cast
(value + 2) // Binop within parenthesis (value + 2) // Binop within parenthesis

View File

@ -7,15 +7,13 @@ pre-existing binary-operators, but also some regular functions and associated
functions (that every type has by-default). This document lists them all (except functions (that every type has by-default). This document lists them all (except
for the binary operators, because there are hundreds of those). for the binary operators, because there are hundreds of those).
### Global Intrinsics ### Macro Intrinsics
#### `malloc(size: u64) -> *u8` #### `include_bytes!(path: *char) -> &[u8; _]`
Allocates `size` bytes and returns a pointer of `u8` of length `size`. Attempts to load file from `path` (relative to module) into memory and includes
it into the compiled binary directly. Returns a borrow to an array containing
```rust bytes from the file. Array length varies depending on the file contents.
i32::malloc(40); // Reserves 40 bytes
```
### Associated Intrinsics ### Associated Intrinsics
@ -39,11 +37,143 @@ i32::null(); // Returns *i32 (null-ptr)
Allocates `T::sizeof() * size` bytes and returns a pointer to `T`. Allocates `T::sizeof() * size` bytes and returns a pointer to `T`.
**Note:** This does not seem to work correctly currently.
```rust ```rust
i32::malloc(30); // Returns *i32 i32::malloc(30); // Returns *i32
// Equivalent to // Equivalent to
malloc(i32::sizeof() * 30) as *i32 malloc(i32::sizeof() * 30) as *i32
``` ```
#### `<T>::memcpy(destination: *T, source: *T, size: u64)`
Copies `T::sizeof() * size` bytes from pointer `source` to pointer
`destination`.
```rust
let a = i32::malloc(30);
let b = i32::malloc(30);
// Copies the contents from b to a
i32::memcpy(a, b, 30);
```
#### `<T>::min(a: T, b: T) -> T`
*Note: (only on integer- and floating-point values)*
Returns the smaller of `a` and `b`.
#### `<T>::max(a: T, b: T) -> T`
*Note: (only on integer- and floating-point values)*
Returns the larger of `a` and `b`.
#### `<T>::abs(value: T) -> T`
*Note: (only on signed integer and floating-point values)*
Returns the absolute value of `value`.
#### `<T>::sqrt(value: T) -> T`
*Note: (only on floating-point numbers)*
Calculates the square-root of `value`
#### `<T>::pow(value: T, exponent: T) -> T`
*Note: (only on floating-point numbers)*
Returns `value` raised to the exponent of `exponent`.
#### `<T>::powi(value: T, exponent: u64) -> T`
*Note: (only on floating-point numbers)*
Returns `value` raised to the exponent of `exponent`.
#### `<T>::sin(value: T) -> T`
*Note: (only on floating-point numbers)*
Calculates sine of `value`
#### `<T>::cos(value: T) -> T`
*Note: (only on floating-point numbers)*
Calculates cosine of `value`
#### `<T>::tan(value: T) -> T`
*Note: (only on floating-point numbers)*
Calculates tangent of `value`
#### `<T>::asin(value: T) -> T`
*Note: (only on floating-point numbers)*
Calculates arcsine of `value`
#### `<T>::acos(value: T) -> T`
*Note: (only on floating-point numbers)*
Calculates arccosine of `value`
#### `<T>::atan(value: T) -> T`
*Note: (only on floating-point numbers)*
Calculates arctangent of `value`
#### `<T>::atan2(value: T) -> T`
*Note: (only on floating-point numbers)*
Calculates 2-argument arctangent of `value`
#### `<T>::sinh(value: T) -> T`
*Note: (only on floating-point numbers)*
Calculates hyperbolic sine of `value`
#### `<T>::cosh(value: T) -> T`
*Note: (only on floating-point numbers)*
Calculates hyperbolic cosine of `value`
#### `<T>::tanh(value: T) -> T`
*Note: (only on floating-point numbers)*
Calculates hyperbolic tangent of `value`
#### `<T>::log(value: T) -> T`
*Note: (only on floating-point numbers)*
Returns logₑ of `value`
#### `<T>::log2(value: T) -> T`
*Note: (only on floating-point numbers)*
Returns log₂ of `value`
#### `<T>::log10(value: T) -> T`
*Note: (only on floating-point numbers)*
Returns log₁₀ of `value`
#### `<T>::round(value: T) -> T`
*Note: (only on floating-point numbers)*
Rounds `value` to the nearest integer
#### `<T>::trunc(value: T) -> T`
*Note: (only on floating-point numbers)*
Truncates `value` to the integer nearest to `0`.
#### `<T>::ceil(value: T) -> T`
*Note: (only on floating-point numbers)*
Rounds `value` towards positive infinity.
#### `<T>::floor(value: T) -> T`
*Note: (only on floating-point numbers)*
Rounds `value` towards negative infinity.
#### `<T>::even(value: T) -> T`
*Note: (only on floating-point numbers)*
Rounds `value` to the closest even integer.

View File

@ -28,6 +28,7 @@ fn main() -> u32 {
let mut list = u64::malloc(15); let mut list = u64::malloc(15);
list[4] = 17; list[4] = 17;
print(from_str("value: ") + list[4]); print(from_str("value: ") + list[4]);
return i32::sizeof() as u32; return i32::sizeof() as u32;

View File

@ -22,6 +22,8 @@ fn main() -> u32 {
let otus = Otus { field: 17 }; let otus = Otus { field: 17 };
print(from_str("otus: ") + otus.test() as u64); print(from_str("otus: ") + otus.test() as u64);
otus.field;
return otus.test(); return otus.test();
} }

View File

@ -0,0 +1,6 @@
import foreign_struct::Vec2;
fn main() -> u32 {
let a = Vec2 {x: 16, y: 32};
return a.x;
}

View File

@ -0,0 +1 @@
struct Vec2 { x: u32, y: u32 }

View File

@ -4,5 +4,6 @@ import std::print;
fn main() -> u8 { fn main() -> u8 {
let bytes = include_bytes!("./macro_easy_file.txt"); let bytes = include_bytes!("./macro_easy_file.txt");
print(String::new() + bytes.length()); print(String::new() + bytes.length());
return (bytes as *u8)[0]; print(String::new() + (include_bytes!("./macro_easy_file.txt") as *u8)[1] as u64);
return (include_bytes!("./macro_easy_file.txt") as *u8)[0];
} }

View File

@ -0,0 +1,15 @@
extern fn printf(message: *char, num: f64);
fn main() -> i32 {
let b = 5;
let mut otus = i32::malloc(1);
otus[0] = 10500300;
let potus = i32::malloc(1);
i32::memcpy(potus, otus, 1);
printf("log10 %f\n", f64::round(123.3) as f64);
printf("sqrt %f\n", f64::sqrt(2) as f64);
printf("log10 %f\n", potus[0] as f64);
return potus[0];
}

View File

@ -0,0 +1,8 @@
struct Otus {
field: u32,
}
pub fn test() -> Otus {
Otus {field: 4}
}

View File

@ -0,0 +1,11 @@
// Arithmetic, function calls and imports!
import module_importee::Otus;
import module_importee::test;
fn main() -> u32 {
let value = 0b110;
let other = 0o17;
return value * other + test().field * -value;
}

View File

@ -0,0 +1,25 @@
struct Game {}
impl Game {
pub fn run_frame(&mut self) {}
}
struct Platform {
game: Game,
}
impl Platform {
pub fn new() -> Platform {
return Platform { game: Game {} };
}
pub fn run_frame(&mut self) {
*self.game.run_frame();
}
}
fn main() -> i32 {
let mut platform = Platform::new();
platform.run_frame();
return 0;
}

View File

@ -0,0 +1,5 @@
import triple_import_vec2::Vec2;
import triple_import_ship::Ship;
fn main() -> i32 { return 0; }

View File

@ -0,0 +1,3 @@
import triple_import_vec2::Vec2;
struct Ship { position: Vec2 }

View File

@ -0,0 +1,2 @@
struct Vec2 { x: f32, y: f32 }

View File

@ -16,7 +16,7 @@ BINARY="$(echo $1 | cut -d'.' -f1)"".out"
echo $1 echo $1
cargo run --example cli $@ && \ cargo run --features log_output --example cli $@ && \
./$BINARY ; echo "Return value: ""$?" ./$BINARY ; echo "Return value: ""$?"
## Command from: clang -v hello.o -o test ## Command from: clang -v hello.o -o test

View File

@ -1,6 +1,6 @@
[package] [package]
name = "reid-lib" name = "reid-lib"
version = "1.0.0-beta.2" version = "1.0.0-beta.4"
edition = "2024" edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View File

@ -52,7 +52,5 @@ fn main() {
else_b.terminate(TerminatorKind::Ret(add)).unwrap(); else_b.terminate(TerminatorKind::Ret(add)).unwrap();
dbg!(&context);
context.compile(None, Vec::new()); context.compile(None, Vec::new());
} }

View File

@ -223,7 +223,6 @@ impl Builder {
unsafe { unsafe {
let mut modules = self.modules.borrow_mut(); let mut modules = self.modules.borrow_mut();
let module = modules.get_unchecked_mut(module.0); let module = modules.get_unchecked_mut(module.0);
dbg!(module.functions.iter().map(|f| f.data.name.clone()).collect::<Vec<_>>());
module.functions.iter().find(|f| f.data.name == *name).map(|f| f.value) module.functions.iter().find(|f| f.data.name == *name).map(|f| f.value)
} }
} }
@ -611,30 +610,9 @@ impl Builder {
Instr::PtrToInt(instr, ty) => instr.cast_to(self, &ty).map(|_| ()), Instr::PtrToInt(instr, ty) => instr.cast_to(self, &ty).map(|_| ()),
Instr::IntToPtr(instr, ty) => instr.cast_to(self, &ty).map(|_| ()), Instr::IntToPtr(instr, ty) => instr.cast_to(self, &ty).map(|_| ()),
Instr::BitCast(..) => Ok(()), Instr::BitCast(..) => Ok(()),
Instr::ShiftRightLogical(_, rhs) => { Instr::ShiftRightLogical(lhs, rhs) => match_types(&lhs, &rhs, &self).map(|_| ()),
let rhs_ty = rhs.get_type(&self)?; Instr::ShiftRightArithmetic(lhs, rhs) => match_types(&lhs, &rhs, &self).map(|_| ()),
if rhs_ty.category() == TypeCategory::UnsignedInteger { Instr::ShiftLeft(lhs, rhs) => match_types(&lhs, &rhs, &self).map(|_| ()),
Ok(())
} else {
Err(ErrorKind::Null)
}
}
Instr::ShiftRightArithmetic(_, rhs) => {
let rhs_ty = rhs.get_type(&self)?;
if rhs_ty.category() == TypeCategory::UnsignedInteger {
Ok(())
} else {
Err(ErrorKind::Null)
}
}
Instr::ShiftLeft(_, rhs) => {
let rhs_ty = rhs.get_type(&self)?;
if rhs_ty.category() == TypeCategory::UnsignedInteger {
Ok(())
} else {
Err(ErrorKind::Null)
}
}
Instr::GetGlobal(_) => Ok(()), Instr::GetGlobal(_) => Ok(()),
} }
} }

View File

@ -123,8 +123,6 @@ impl CompiledModule {
let llvm_ir = let llvm_ir =
from_cstring(LLVMPrintModuleToString(self.module_ref)).expect("Unable to print LLVM IR to string"); from_cstring(LLVMPrintModuleToString(self.module_ref)).expect("Unable to print LLVM IR to string");
println!("{}", llvm_ir);
let mut err = ErrorMessageHolder::null(); let mut err = ErrorMessageHolder::null();
LLVMVerifyModule( LLVMVerifyModule(
self.module_ref, self.module_ref,
@ -542,7 +540,7 @@ impl DebugTypeHolder {
field.pos.map(|p| p.line).unwrap_or(1), field.pos.map(|p| p.line).unwrap_or(1),
field.size_bits, field.size_bits,
0, 0,
1, field.offset,
field.flags.as_llvm(), field.flags.as_llvm(),
*debug.types.get(&field.ty).unwrap(), *debug.types.get(&field.ty).unwrap(),
) )

View File

@ -6,7 +6,7 @@ use std::{
}; };
use crate::{ use crate::{
CmpPredicate, Context, Instr, InstructionData, TerminatorKind, CmpPredicate, Context, Instr, InstructionData, TerminatorKind, Type,
builder::*, builder::*,
debug_information::{ debug_information::{
DebugArrayType, DebugBasicType, DebugFieldType, DebugInformation, DebugLocalVariable, DebugLocation, DebugArrayType, DebugBasicType, DebugFieldType, DebugInformation, DebugLocalVariable, DebugLocation,

View File

@ -0,0 +1,263 @@
use crate::{CompileResult, Type, TypeCategory, builder::Builder};
#[derive(Clone, Debug)]
pub enum LLVMIntrinsic {
Abs(Type),
Max(Type),
Min(Type),
Memcpy(Type),
Sqrt(Type),
PowI(Type, Type),
Pow(Type),
Sin(Type),
Cos(Type),
Tan(Type),
ASin(Type),
ACos(Type),
ATan(Type),
ATan2(Type),
SinH(Type),
CosH(Type),
TanH(Type),
Log(Type),
Log2(Type),
Log10(Type),
Copysign(Type),
Floor(Type),
Ceil(Type),
Trunc(Type),
RoundEven(Type),
Round(Type),
}
impl LLVMIntrinsic {
pub(crate) fn signature(&self, builder: &Builder) -> CompileResult<(String, Vec<Type>, Type)> {
match self {
LLVMIntrinsic::Max(ty) => {
let name = match ty.category() {
TypeCategory::SignedInteger => format!("llvm.smax.{}", ty.llvm_ty_str(builder)),
TypeCategory::UnsignedInteger => format!("llvm.umax.{}", ty.llvm_ty_str(builder)),
TypeCategory::Real => format!("llvm.maximum.{}", ty.llvm_ty_str(builder)),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone(), ty.clone()], ty.clone()))
}
LLVMIntrinsic::Min(ty) => {
let name = match ty.category() {
TypeCategory::SignedInteger => format!("llvm.smin.{}", ty.llvm_ty_str(builder)),
TypeCategory::UnsignedInteger => format!("llvm.umin.{}", ty.llvm_ty_str(builder)),
TypeCategory::Real => format!("llvm.minimum.{}", ty.llvm_ty_str(builder)),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone(), ty.clone()], ty.clone()))
}
LLVMIntrinsic::Abs(ty) => {
let name = match ty.category() {
TypeCategory::SignedInteger => format!("llvm.abs.{}", ty.llvm_ty_str(builder)),
TypeCategory::UnsignedInteger => format!("llvm.abs.{}", ty.llvm_ty_str(builder)),
TypeCategory::Real => format!("llvm.fabs.{}", ty.llvm_ty_str(builder)),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone(), Type::Bool], ty.clone()))
}
LLVMIntrinsic::Memcpy(ty) => {
let name = match ty.category() {
TypeCategory::Ptr => String::from("llvm.memcpy"),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone(), ty.clone(), Type::U64, Type::Bool], Type::Void))
}
LLVMIntrinsic::Sqrt(ty) => {
let name = match ty.category() {
TypeCategory::Real => format!("llvm.sqrt.{}", ty.llvm_ty_str(builder)),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone()], ty.clone()))
}
LLVMIntrinsic::PowI(ty1, ty2) => {
let name = match (ty1.category(), ty2.category()) {
(TypeCategory::Real, TypeCategory::SignedInteger) => {
format!("llvm.powi.{}.{}", ty1.llvm_ty_str(builder), ty2.llvm_ty_str(builder))
}
(TypeCategory::Real, TypeCategory::UnsignedInteger) => {
format!("llvm.powi.{}.{}", ty1.llvm_ty_str(builder), ty2.llvm_ty_str(builder))
}
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty1.clone(), ty2.clone()], ty1.clone()))
}
LLVMIntrinsic::Pow(ty) => {
let name = match ty.category() {
TypeCategory::Real => format!("llvm.pow.{}", ty.llvm_ty_str(builder)),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone(), ty.clone()], ty.clone()))
}
LLVMIntrinsic::Sin(ty) => {
let name = match ty.category() {
TypeCategory::Real => format!("llvm.sin.{}", ty.llvm_ty_str(builder)),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone()], ty.clone()))
}
LLVMIntrinsic::Cos(ty) => {
let name = match ty.category() {
TypeCategory::Real => format!("llvm.cos.{}", ty.llvm_ty_str(builder)),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone()], ty.clone()))
}
LLVMIntrinsic::Tan(ty) => {
let name = match ty.category() {
TypeCategory::Real => format!("llvm.tan.{}", ty.llvm_ty_str(builder)),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone()], ty.clone()))
}
LLVMIntrinsic::ASin(ty) => {
let name = match ty.category() {
TypeCategory::Real => format!("llvm.asin.{}", ty.llvm_ty_str(builder)),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone()], ty.clone()))
}
LLVMIntrinsic::ACos(ty) => {
let name = match ty.category() {
TypeCategory::Real => format!("llvm.acos.{}", ty.llvm_ty_str(builder)),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone()], ty.clone()))
}
LLVMIntrinsic::ATan(ty) => {
let name = match ty.category() {
TypeCategory::Real => format!("llvm.atan.{}", ty.llvm_ty_str(builder)),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone()], ty.clone()))
}
LLVMIntrinsic::ATan2(ty) => {
let name = match ty.category() {
TypeCategory::Real => format!("llvm.atan2.{}", ty.llvm_ty_str(builder)),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone(), ty.clone()], ty.clone()))
}
LLVMIntrinsic::SinH(ty) => {
let name = match ty.category() {
TypeCategory::Real => format!("llvm.sinh.{}", ty.llvm_ty_str(builder)),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone()], ty.clone()))
}
LLVMIntrinsic::CosH(ty) => {
let name = match ty.category() {
TypeCategory::Real => format!("llvm.cosh.{}", ty.llvm_ty_str(builder)),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone()], ty.clone()))
}
LLVMIntrinsic::TanH(ty) => {
let name = match ty.category() {
TypeCategory::Real => format!("llvm.tanh.{}", ty.llvm_ty_str(builder)),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone()], ty.clone()))
}
LLVMIntrinsic::Log(ty) => {
let name = match ty.category() {
TypeCategory::Real => format!("llvm.log.{}", ty.llvm_ty_str(builder)),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone()], ty.clone()))
}
LLVMIntrinsic::Log2(ty) => {
let name = match ty.category() {
TypeCategory::Real => format!("llvm.log2.{}", ty.llvm_ty_str(builder)),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone()], ty.clone()))
}
LLVMIntrinsic::Log10(ty) => {
let name = match ty.category() {
TypeCategory::Real => format!("llvm.log10.{}", ty.llvm_ty_str(builder)),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone()], ty.clone()))
}
LLVMIntrinsic::Copysign(ty) => {
let name = match ty.category() {
TypeCategory::Real => format!("llvm.copysign.{}", ty.llvm_ty_str(builder)),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone()], ty.clone()))
}
LLVMIntrinsic::Floor(ty) => {
let name = match ty.category() {
TypeCategory::Real => format!("llvm.floor.{}", ty.llvm_ty_str(builder)),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone()], ty.clone()))
}
LLVMIntrinsic::Ceil(ty) => {
let name = match ty.category() {
TypeCategory::Real => format!("llvm.ceil.{}", ty.llvm_ty_str(builder)),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone()], ty.clone()))
}
LLVMIntrinsic::Trunc(ty) => {
let name = match ty.category() {
TypeCategory::Real => format!("llvm.trunc.{}", ty.llvm_ty_str(builder)),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone()], ty.clone()))
}
LLVMIntrinsic::RoundEven(ty) => {
let name = match ty.category() {
TypeCategory::Real => format!("llvm.roundeven.{}", ty.llvm_ty_str(builder)),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone()], ty.clone()))
}
LLVMIntrinsic::Round(ty) => {
let name = match ty.category() {
TypeCategory::Real => format!("llvm.rint.{}", ty.llvm_ty_str(builder)),
_ => return Err(crate::ErrorKind::Null),
};
Ok((name, vec![ty.clone()], ty.clone()))
}
}
}
}
impl Type {
fn llvm_ty_str(&self, builder: &Builder) -> String {
match self {
Type::I8 => String::from("i8"),
Type::I16 => String::from("u16"),
Type::I32 => String::from("i32"),
Type::I64 => String::from("i64"),
Type::I128 => String::from("i128"),
Type::U8 => String::from("i8"),
Type::U16 => String::from("i16"),
Type::U32 => String::from("i32"),
Type::U64 => String::from("i64"),
Type::U128 => String::from("i128"),
Type::F16 => String::from("f16"),
Type::F32B => String::from("f32b"),
Type::F32 => String::from("f32"),
Type::F64 => String::from("f64"),
Type::F80 => String::from("x86_fp80"),
Type::F128 => String::from("fp128"),
Type::F128PPC => String::from("ppc_fp128"),
Type::Bool => String::from("i1"),
Type::Void => String::from("void"),
Type::CustomType(type_value) => {
let ty = unsafe { builder.type_data(type_value) };
ty.name.clone()
}
Type::Array(ty, len) => format!("[{} x {}]", len, ty.llvm_ty_str(builder)),
Type::Ptr(_) => String::from("ptr"),
}
}
}

View File

@ -11,12 +11,14 @@ use fmt::PrintableModule;
use crate::{ use crate::{
builder::{ConstantValue, GlobalValue}, builder::{ConstantValue, GlobalValue},
debug_information::DebugScopeValue, debug_information::DebugScopeValue,
intrinsics::LLVMIntrinsic,
}; };
pub mod builder; pub mod builder;
pub mod compile; pub mod compile;
pub mod debug_information; pub mod debug_information;
mod fmt; mod fmt;
pub mod intrinsics;
mod pad_adapter; mod pad_adapter;
mod util; mod util;
@ -95,6 +97,25 @@ impl<'ctx> Module<'ctx> {
} }
} }
pub fn intrinsic(&self, intrinsic: LLVMIntrinsic) -> CompileResult<FunctionValue> {
unsafe {
let (name, params, ret) = intrinsic.signature(&self.builder)?;
Ok(self.builder.add_function(
&self.value,
FunctionData {
name: name.to_owned(),
linkage_name: Some(name.to_owned()),
ret,
params,
flags: FunctionFlags {
is_extern: true,
..Default::default()
},
},
))
}
}
pub fn custom_type(&self, ty: CustomTypeKind) -> TypeValue { pub fn custom_type(&self, ty: CustomTypeKind) -> TypeValue {
unsafe { unsafe {
let (name, kind) = match &ty { let (name, kind) = match &ty {

7
reid-lsp/.gitignore vendored Normal file
View File

@ -0,0 +1,7 @@
.vscode
node_modules
dist
package-lock.json
pnpm-lock.yaml
tsconfig.tsbuildinfo
*.vsix

1
reid-lsp/.npmrc Normal file
View File

@ -0,0 +1 @@
enable-pre-post-scripts = true

View File

@ -0,0 +1,5 @@
import { defineConfig } from '@vscode/test-cli';
export default defineConfig({
files: 'out/test/**/*.test.js',
});

15
reid-lsp/.vscodeignore Normal file
View File

@ -0,0 +1,15 @@
.vscode/**
.vscode-test/**
out/**
node_modules/**
src/**
client/**
.gitignore
.yarnrc
webpack.config.js
vsc-extension-quickstart.md
**/tsconfig.json
**/eslint.config.mjs
**/*.map
**/*.ts
**/.vscode-test.*

9
reid-lsp/CHANGELOG.md Normal file
View File

@ -0,0 +1,9 @@
# Change Log
All notable changes to the "reid-lsp" extension will be documented in this file.
Check [Keep a Changelog](http://keepachangelog.com/) for recommendations on how to structure this file.
## [Unreleased]
- Initial release

11
reid-lsp/Cargo.toml Normal file
View File

@ -0,0 +1,11 @@
[package]
name = "reid-language-server"
version = "1.0.0-beta.1"
edition = "2024"
[dependencies]
socket = "0.0.7"
tokio = { version = "1.47.0", features = ["full"] }
tower-lsp = "0.20.0"
reid = { path = "../reid", version = "1.0.0-beta.4", registry="gitea-teascade", features=[] }
dashmap = "6.1.0"

1
reid-lsp/README.md Normal file
View File

@ -0,0 +1 @@
# Reid Language Server

View File

@ -0,0 +1,27 @@
{
"name": "reid-lsp",
"displayName": "Reid Language Server",
"description": "Language Server Extension for Reid",
"version": "0.0.1",
"engines": {
"vscode": "^1.102.0"
},
"main": "../out/extension.js",
"devDependencies": {
"@types/mocha": "^10.0.10",
"@types/node": "20.x",
"@types/vscode": "^1.102.0",
"@typescript-eslint/eslint-plugin": "^8.31.1",
"@typescript-eslint/parser": "^8.31.1",
"@vscode/test-cli": "^0.0.11",
"@vscode/test-electron": "^2.5.2",
"eslint": "^9.25.1",
"ts-loader": "^9.5.2",
"typescript": "^5.8.3",
"webpack": "^5.99.7",
"webpack-cli": "^6.0.1"
},
"dependencies": {
"vscode-languageclient": "^9.0.1"
}
}

View File

@ -0,0 +1,96 @@
/* --------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
* ------------------------------------------------------------------------------------------ */
import * as path from 'path';
import { workspace, ExtensionContext, window, languages, SemanticTokensBuilder } from 'vscode';
import {
Executable,
LanguageClient,
LanguageClientOptions,
ServerOptions,
TransportKind
} from 'vscode-languageclient/node';
let client: LanguageClient;
export function activate(context: ExtensionContext) {
const configuration = workspace.getConfiguration('reid-language-server');
let server_path: string = process.env.SERVER_PATH ?? configuration.get("language-server-path") ?? 'reid-language-server';
const regex = /\$(\w+)/;
while (regex.test(server_path)) {
let envVar = regex.exec(server_path)?.[1];
const envVal = envVar ? process.env[envVar] : undefined;
if (envVar === undefined || envVal === undefined) {
console.error(`No such environment variables as ${envVar}`);
}
server_path = server_path.replaceAll(`$${envVar}`, envVal ?? '');
}
const run: Executable = {
command: server_path,
options: {
env: {
...process.env,
RUST_LOG: "debug",
RUST_BACKTRACE: 1,
}
}
};
const serverOptions: ServerOptions = {
run,
debug: run,
};
// Options to control the language client
const clientOptions: LanguageClientOptions = {
// Register the server for plain text documents
documentSelector: [{ scheme: 'file', language: 'reid' }],
synchronize: {
// Notify the server about file changes to '.clientrc files contained in the workspace
fileEvents: workspace.createFileSystemWatcher('**/.clientrc')
},
};
// Create the language client and start the client.
client = new LanguageClient(
'reid-language-server',
'Reid Language Server',
serverOptions,
clientOptions
);
client.info(JSON.stringify(server_path));
client.info(`Loaded Reid Language Server from ${server_path}`);
workspace.onDidOpenTextDocument((e) => { });
client.info("Registering semantic tokens provide");
context.subscriptions.push(languages.registerDocumentSemanticTokensProvider({
language: 'reid',
scheme: 'file'
}, {
provideDocumentSemanticTokens: () => {
const builder = new SemanticTokensBuilder();
return builder.build();
}
}, {
tokenTypes: [],
tokenModifiers: [],
}));
// Start the client. This will also launch the server
client.start();
}
export function deactivate(): Thenable<void> | undefined {
if (!client) {
return undefined;
}
return client.stop();
}

View File

@ -0,0 +1,15 @@
import * as assert from 'assert';
// You can import and use all API from the 'vscode' module
// as well as import your extension to test it
import * as vscode from 'vscode';
// import * as myExtension from '../../extension';
suite('Extension Test Suite', () => {
vscode.window.showInformationMessage('Start all tests.');
test('Sample test', () => {
assert.strictEqual(-1, [1, 2, 3].indexOf(5));
assert.strictEqual(-1, [1, 2, 3].indexOf(0));
});
});

View File

@ -0,0 +1,24 @@
{
"compilerOptions": {
"module": "Node16",
"target": "ES2022",
"lib": [
"ES2022"
],
"sourceMap": true,
"rootDir": "src",
"outDir": "../dist",
"strict": true /* enable all strict type-checking options */
/* Additional Checks */
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
// "noUnusedParameters": true, /* Report errors on unused parameters. */
},
"include": [
"src"
],
"exclude": [
"node_modules",
".vscode-test"
]
}

View File

@ -0,0 +1,28 @@
import typescriptEslint from "@typescript-eslint/eslint-plugin";
import tsParser from "@typescript-eslint/parser";
export default [{
files: ["**/*.ts"],
}, {
plugins: {
"@typescript-eslint": typescriptEslint,
},
languageOptions: {
parser: tsParser,
ecmaVersion: 2022,
sourceType: "module",
},
rules: {
"@typescript-eslint/naming-convention": ["warn", {
selector: "import",
format: ["camelCase", "PascalCase"],
}],
curly: "warn",
eqeqeq: "warn",
"no-throw-literal": "warn",
semi: "warn",
},
}];

View File

@ -0,0 +1,78 @@
{
"comments": {
"lineComment": "//",
},
"brackets": [
[
"{",
"}"
],
[
"[",
"]"
],
[
"(",
")"
]
],
"autoClosingPairs": [
{
"open": "{",
"close": "}"
},
{
"open": "[",
"close": "]"
},
{
"open": "(",
"close": ")"
},
{
"open": "'",
"close": "'",
"notIn": [
"string",
"comment"
]
},
{
"open": "\"",
"close": "\"",
"notIn": [
"string"
]
},
],
"autoCloseBefore": ";:.,=}])>` \n\t",
"surroundingPairs": [
[
"{",
"}"
],
[
"[",
"]"
],
[
"(",
")"
],
[
"\"",
"\""
],
],
"folding": {
"markers": {
"start": "^\\s*//\\s*#?region\\b",
"end": "^\\s*//\\s*#?endregion\\b"
}
},
"wordPattern": "[a-Z](\\w*)",
"indentationRules": {
"increaseIndentPattern": "^((?!\\/\\/).)*(\\{[^}\"'`]*|\\([^)\"'`]*|\\[[^\\]\"'`]*)$",
"decreaseIndentPattern": "^((?!.*?\\/\\*).*\\*/)?\\s*[\\)\\}\\]].*$"
}
}

78
reid-lsp/package.json Normal file
View File

@ -0,0 +1,78 @@
{
"name": "reid-language-server",
"displayName": "Reid Language Server",
"description": "Language Server Extension for Reid",
"version": "1.0.0-beta.1",
"repository": {
"url": "https://git.teascade.net/teascade"
},
"engines": {
"vscode": "^1.102.0"
},
"categories": [
"Other"
],
"main": "./dist/extension.js",
"contributes": {
"languages": [
{
"id": "reid",
"extensions": [
".reid"
],
"aliases": [
"Reid"
],
"configuration": "./language-configuration.json"
}
],
"configuration": {
"type": "object",
"title": "reid-language-server",
"properties": {
"reid-language-server.language-server-path": {
"type": "string",
"scope": "window",
"default": "$HOME/.cargo/bin/reid-lsp",
"description": "Path to the Reid Language Server executable"
}
}
},
"grammars": [
{
"language": "reid",
"scopeName": "source.reid",
"path": "./syntaxes/grammar.json"
}
]
},
"scripts": {
"vscode:prepublish": "pnpm run package",
"compile": "npx js-yaml syntaxes/grammar.yaml > syntaxes/grammar.json && webpack",
"watch": "webpack --watch",
"package": "webpack --mode production --devtool hidden-source-map",
"compile-tests": "tsc -p . --outDir out",
"watch-tests": "tsc -p . -w --outDir out",
"pretest": "pnpm run compile-tests && pnpm run compile && pnpm run lint",
"lint": "eslint src",
"test": "vscode-test"
},
"devDependencies": {
"@types/mocha": "^10.0.10",
"@types/node": "20.x",
"@types/vscode": "^1.102.0",
"@typescript-eslint/eslint-plugin": "^8.31.1",
"@typescript-eslint/parser": "^8.31.1",
"@vscode/test-cli": "^0.0.11",
"@vscode/test-electron": "^2.5.2",
"eslint": "^9.25.1",
"js-yaml": "^4.1.0",
"ts-loader": "^9.5.2",
"typescript": "^5.8.3",
"webpack": "^5.99.7",
"webpack-cli": "^6.0.1"
},
"dependencies": {
"vscode-languageclient": "^9.0.1"
}
}

1009
reid-lsp/src/analysis.rs Normal file

File diff suppressed because it is too large Load Diff

543
reid-lsp/src/main.rs Normal file
View File

@ -0,0 +1,543 @@
use std::collections::HashMap;
use std::path::PathBuf;
use dashmap::DashMap;
use reid::ast::lexer::{FullToken, Position};
use reid::error_raporting::{self, ErrorModules, ReidError};
use reid::mir::SourceModuleId;
use reid::parse_module;
use tokio::sync::Mutex;
use tower_lsp::lsp_types::{
self, CompletionItem, CompletionOptions, CompletionParams, CompletionResponse, Diagnostic, DiagnosticSeverity,
DidChangeTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, DocumentFilter,
GotoDefinitionParams, GotoDefinitionResponse, Hover, HoverContents, HoverParams, HoverProviderCapability,
InitializeParams, InitializeResult, InitializedParams, Location, MarkupContent, MarkupKind, MessageType, OneOf,
Range, ReferenceParams, RenameParams, SemanticToken, SemanticTokensLegend, SemanticTokensOptions,
SemanticTokensParams, SemanticTokensResult, SemanticTokensServerCapabilities, ServerCapabilities, TextDocumentItem,
TextDocumentRegistrationOptions, TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
TextDocumentSyncSaveOptions, TextEdit, Url, WorkspaceEdit, WorkspaceFoldersServerCapabilities,
WorkspaceServerCapabilities,
};
use tower_lsp::{Client, LanguageServer, LspService, Server, jsonrpc};
use crate::analysis::{MODIFIER_LEGEND, StateMap, StaticAnalysis, TOKEN_LEGEND, analyze};
mod analysis;
#[derive(Debug)]
struct Backend {
client: Client,
analysis: DashMap<PathBuf, StaticAnalysis>,
module_to_path: DashMap<SourceModuleId, PathBuf>,
path_to_module: DashMap<PathBuf, SourceModuleId>,
module_id_counter: Mutex<SourceModuleId>,
}
#[tower_lsp::async_trait]
impl LanguageServer for Backend {
async fn initialize(&self, _: InitializeParams) -> jsonrpc::Result<InitializeResult> {
self.client
.log_message(MessageType::INFO, "Initializing Reid Language Server")
.await;
let sync = TextDocumentSyncOptions {
open_close: Some(true),
change: Some(TextDocumentSyncKind::FULL),
will_save: None,
will_save_wait_until: None,
save: Some(TextDocumentSyncSaveOptions::SaveOptions(lsp_types::SaveOptions {
include_text: Some(true),
})),
};
let capabilities = ServerCapabilities {
hover_provider: Some(HoverProviderCapability::Simple(true)),
completion_provider: Some(CompletionOptions { ..Default::default() }),
text_document_sync: Some(TextDocumentSyncCapability::Options(sync)),
workspace: Some(WorkspaceServerCapabilities {
workspace_folders: Some(WorkspaceFoldersServerCapabilities {
supported: Some(true),
change_notifications: Some(OneOf::Left(true)),
}),
file_operations: None,
}),
semantic_tokens_provider: Some(SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions(
lsp_types::SemanticTokensRegistrationOptions {
text_document_registration_options: TextDocumentRegistrationOptions {
document_selector: Some(vec![DocumentFilter {
language: Some("reid".to_owned()),
scheme: Some("file".to_owned()),
pattern: None,
}]),
},
semantic_tokens_options: SemanticTokensOptions {
work_done_progress_options: Default::default(),
legend: SemanticTokensLegend {
token_types: TOKEN_LEGEND.into(),
token_modifiers: MODIFIER_LEGEND.into(),
},
range: None,
full: Some(lsp_types::SemanticTokensFullOptions::Bool(true)),
},
static_registration_options: Default::default(),
},
)),
references_provider: Some(OneOf::Left(true)),
definition_provider: Some(OneOf::Left(true)),
rename_provider: Some(OneOf::Left(true)),
..Default::default()
};
Ok(InitializeResult {
capabilities,
..Default::default()
})
}
async fn initialized(&self, _: InitializedParams) {
self.client
.log_message(MessageType::INFO, "Reid Language Server initialized!")
.await;
}
async fn shutdown(&self) -> jsonrpc::Result<()> {
Ok(())
}
async fn completion(&self, params: CompletionParams) -> jsonrpc::Result<Option<CompletionResponse>> {
let path = PathBuf::from(params.text_document_position.text_document.uri.path());
let analysis = self.analysis.get(&path);
let position = params.text_document_position.position;
let token = if let Some(analysis) = &analysis {
analysis.tokens.iter().enumerate().find(|(_, tok)| {
tok.position.1 == position.line + 1
&& (tok.position.0 <= position.character
&& (tok.position.0 + tok.token.len() as u32) > position.character)
})
} else {
None
};
// dbg!(position, token);
let list = if let Some((idx, _)) = token {
if let Some(analysis) = self.analysis.get(&path).unwrap().state.map.get(&idx) {
analysis
.autocomplete
.iter()
.map(|s| CompletionItem::new_simple(s.text.to_string(), s.kind.to_string()))
.collect()
} else {
Vec::new()
}
} else {
Vec::new()
};
// dbg!(&list);
Ok(Some(CompletionResponse::Array(list)))
}
async fn hover(&self, params: HoverParams) -> jsonrpc::Result<Option<Hover>> {
let path = PathBuf::from(params.text_document_position_params.text_document.uri.path());
let analysis = self.analysis.get(&path);
let position = params.text_document_position_params.position;
let token = if let Some(analysis) = &analysis {
analysis.tokens.iter().enumerate().find(|(_, tok)| {
tok.position.1 == position.line + 1
&& (tok.position.0 <= position.character + 1
&& (tok.position.0 + tok.token.len() as u32) > position.character + 1)
})
} else {
None
};
let (range, ty) = if let Some((idx, token)) = token {
if let Some(analysis) = self.analysis.get(&path).unwrap().state.map.get(&idx) {
let start = token.position;
let end = token.position.add(token.token.len() as u32);
let range = Range {
start: lsp_types::Position {
line: (start.1 as i32 - 1).max(0) as u32,
character: (start.0 as i32 - 1).max(0) as u32,
},
end: lsp_types::Position {
line: (end.1 as i32 - 1).max(0) as u32,
character: (end.0 as i32 - 1).max(0) as u32,
},
};
if let Some(ty) = analysis.ty.clone() {
(Some(range), format!("{}", ty))
} else {
(Some(range), String::from("None type"))
}
} else {
(None, String::from("no type"))
}
} else {
(None, String::from("no token"))
};
let contents = HoverContents::Markup(MarkupContent {
kind: MarkupKind::Markdown,
value: format!("`{ty}`"),
});
Ok(Some(Hover { contents, range }))
}
async fn did_open(&self, params: DidOpenTextDocumentParams) {
self.recompile(TextDocumentItem {
uri: params.text_document.uri,
language_id: params.text_document.language_id,
version: params.text_document.version,
text: params.text_document.text,
})
.await
}
async fn did_change(&self, params: DidChangeTextDocumentParams) {
self.recompile(TextDocumentItem {
text: params.content_changes[0].text.clone(),
uri: params.text_document.uri,
version: params.text_document.version,
language_id: String::new(),
})
.await
}
async fn did_save(&self, params: DidSaveTextDocumentParams) {
self.recompile(TextDocumentItem {
text: params.text.unwrap(),
uri: params.text_document.uri,
version: 0,
language_id: String::new(),
})
.await
}
async fn semantic_tokens_full(
&self,
params: SemanticTokensParams,
) -> jsonrpc::Result<Option<SemanticTokensResult>> {
let path = PathBuf::from(params.text_document.uri.path());
let analysis = self.analysis.get(&path);
let mut semantic_tokens = Vec::new();
if let Some(analysis) = analysis {
let mut prev_line = 0;
let mut prev_start = 0;
for (i, token) in analysis.tokens.iter().enumerate() {
let vscode_line = token.position.1.max(1) - 1;
let vscode_col = token.position.0.max(1) - 1;
let delta_line = vscode_line - prev_line;
let delta_start = if delta_line == 0 {
vscode_col - prev_start
} else {
vscode_col
};
if let Some(token_analysis) = analysis.state.map.get(&i) {
if let Some(symbol_id) = token_analysis.symbol {
let symbol = analysis.state.get_local_symbol(symbol_id);
if let Some(idx) = symbol.kind.into_token_idx(&self.state_map()) {
let semantic_token = SemanticToken {
delta_line,
delta_start,
length: token.token.len() as u32,
token_type: idx,
token_modifiers_bitset: symbol.kind.get_modifier().unwrap_or(0),
};
semantic_tokens.push(semantic_token);
prev_line = vscode_line;
prev_start = vscode_col;
}
}
}
}
}
Ok(Some(SemanticTokensResult::Tokens(lsp_types::SemanticTokens {
result_id: None,
data: semantic_tokens,
})))
}
async fn goto_definition(&self, params: GotoDefinitionParams) -> jsonrpc::Result<Option<GotoDefinitionResponse>> {
let path = PathBuf::from(params.text_document_position_params.text_document.uri.path());
let analysis = self.analysis.get(&path);
let position = params.text_document_position_params.position;
if let Some(analysis) = &analysis {
let token = analysis.tokens.iter().enumerate().find(|(_, tok)| {
tok.position.1 == position.line + 1
&& (tok.position.0 <= position.character + 1
&& (tok.position.0 + tok.token.len() as u32) > position.character + 1)
});
if let Some(token) = token {
if let Some((module_id, def_token)) = analysis.find_definition(token.0, &self.state_map()) {
return if let Some(path) = self.module_to_path.get(&module_id) {
Ok(Some(GotoDefinitionResponse::Scalar(lsp_types::Location {
uri: Url::from_file_path(path.value()).unwrap(),
range: token_to_range(def_token),
})))
} else {
Ok(None)
};
}
}
};
Ok(None)
}
async fn references(&self, params: ReferenceParams) -> jsonrpc::Result<Option<Vec<Location>>> {
let path = PathBuf::from(params.text_document_position.text_document.uri.path());
let analysis = self.analysis.get(&path);
let position = params.text_document_position.position;
if let Some(analysis) = &analysis {
let token = analysis.tokens.iter().enumerate().find(|(_, tok)| {
tok.position.1 == position.line + 1
&& (tok.position.0 <= position.character + 1
&& (tok.position.0 + tok.token.len() as u32) > position.character + 1)
});
if let Some(token) = token {
let reference_tokens = analysis.find_references(token.0, &self.state_map());
let mut locations = Vec::new();
if let Some(reference_tokens) = reference_tokens {
for (module_id, symbol_idx) in reference_tokens {
if let Some(path) = self.module_to_path.get(&module_id) {
let url = Url::from_file_path(path.value()).unwrap();
if let Some(inner_analysis) = self.analysis.get(path.value()) {
if let Some(token_idx) = inner_analysis.state.symbol_to_token.get(&symbol_idx) {
let token = inner_analysis.tokens.get(*token_idx).unwrap();
locations.push(lsp_types::Location {
uri: url,
range: token_to_range(token),
});
}
}
}
}
}
Ok(Some(locations))
} else {
Ok(None)
}
} else {
Ok(None)
}
}
async fn rename(&self, params: RenameParams) -> jsonrpc::Result<Option<WorkspaceEdit>> {
let path = PathBuf::from(params.text_document_position.text_document.uri.path());
let analysis = self.analysis.get(&path);
let position = params.text_document_position.position;
if let Some(analysis) = &analysis {
let token = analysis.tokens.iter().enumerate().find(|(_, tok)| {
tok.position.1 == position.line + 1
&& (tok.position.0 <= position.character + 1
&& (tok.position.0 + tok.token.len() as u32) > position.character + 1)
});
if let Some(token) = token {
let symbols = analysis.find_references(token.0, &self.state_map());
let mut changes: HashMap<Url, Vec<TextEdit>> = HashMap::new();
if let Some(symbols) = symbols {
for (module_id, symbol_id) in symbols {
let path = self.module_to_path.get(&module_id);
if let Some(path) = path {
let url = Url::from_file_path(path.value()).unwrap();
let analysis = self.analysis.get(&path.clone());
if let Some(analysis) = analysis {
if let Some(token_idx) = analysis.state.symbol_to_token.get(&symbol_id) {
let token = analysis.tokens.get(*token_idx).unwrap();
// edits = changes.get(k)
let edit = TextEdit {
range: token_to_range(token),
new_text: params.new_name.clone(),
};
if let Some(edits) = changes.get_mut(&url) {
edits.push(edit);
} else {
changes.insert(url, vec![edit]);
}
}
}
}
}
}
Ok(Some(WorkspaceEdit {
changes: Some(changes),
document_changes: None,
change_annotations: None,
}))
} else {
Ok(None)
}
} else {
Ok(None)
}
}
}
fn token_to_range(token: &FullToken) -> lsp_types::Range {
Range {
start: lsp_types::Position {
line: token.position.1.max(1) - 1,
character: token.position.0.max(1) - 1,
},
end: lsp_types::Position {
line: token.position.1.max(1) - 1,
character: token.position.0.max(1) - 1 + token.token.len() as u32,
},
}
}
impl Backend {
fn state_map(&self) -> StateMap {
let mut state_map = HashMap::new();
for path_state in self.analysis.iter() {
let (path, state) = path_state.pair();
if let Some(module_id) = self.path_to_module.get(path) {
state_map.insert(*module_id, state.state.clone());
}
}
state_map
}
async fn recompile(&self, params: TextDocumentItem) {
let file_path = PathBuf::from(params.uri.clone().path());
let mut map: ErrorModules = Default::default();
for url_module in self.path_to_module.iter() {
let (url, module) = url_module.pair();
map.add_module(
url.file_name().unwrap().to_str().unwrap().to_owned(),
Some(url.clone()),
Some(*module),
);
}
let module_id = if let Some(module_id) = self.path_to_module.get(&file_path) {
*module_id
} else {
let mut lock = self.module_id_counter.lock().await;
let module_id = lock.increment();
drop(lock);
self.path_to_module.insert(file_path.clone(), module_id);
self.module_to_path.insert(module_id, file_path.clone());
module_id
};
let parse_res = parse(&params.text, file_path.clone(), &mut map, module_id);
let (tokens, result) = match parse_res {
Ok((module_id, tokens)) => (
tokens.clone(),
analyze(module_id, tokens, file_path.clone(), &mut map, &self.state_map()),
),
Err(e) => (Vec::new(), Err(e)),
};
let mut diagnostics = Vec::new();
match result {
Ok(Some(mut analysis)) => {
if let Some(reid_error) = &mut analysis.error {
self.client
.log_message(
MessageType::INFO,
format!("Successfully compiled despite parsing errors!"),
)
.await;
reid_error.errors.dedup();
for error in &reid_error.errors {
diagnostics.push(reid_error_into_diagnostic(error, &tokens));
self.client.log_message(MessageType::INFO, format!("{}", error)).await;
}
}
self.analysis.insert(file_path, analysis);
}
Ok(_) => {}
Err(mut reid_error) => {
reid_error.errors.dedup();
for error in &reid_error.errors {
diagnostics.push(reid_error_into_diagnostic(error, &tokens));
self.client.log_message(MessageType::INFO, format!("{}", error)).await;
}
}
}
self.client
.publish_diagnostics(params.uri.clone(), diagnostics, Some(params.version))
.await;
}
}
fn reid_error_into_diagnostic(error: &error_raporting::ErrorKind, tokens: &Vec<FullToken>) -> Diagnostic {
let meta = error.get_meta();
let positions = meta
.range
.into_position(&tokens)
.unwrap_or((Position(0, 0), Position(0, 0)));
Diagnostic {
range: Range {
start: lsp_types::Position {
line: ((positions.0.1 as i32) - 1).max(0) as u32,
character: ((positions.0.0 as i32) - 1).max(0) as u32,
},
end: lsp_types::Position {
line: ((positions.1.1 as i32) - 1).max(0) as u32,
character: ((positions.1.0 as i32) - 1).max(0) as u32,
},
},
severity: Some(DiagnosticSeverity::ERROR),
code: None,
code_description: None,
source: Some(error.get_type_str().to_owned()),
message: format!("{}", error),
related_information: None,
tags: None,
data: None,
}
}
fn parse(
source: &str,
path: PathBuf,
map: &mut ErrorModules,
module_id: SourceModuleId,
) -> Result<(SourceModuleId, Vec<FullToken>), ReidError> {
let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();
Ok(parse_module(
source,
file_name.clone(),
Some(path),
map,
Some(module_id),
)?)
}
#[tokio::main]
async fn main() {
let stdin = tokio::io::stdin();
let stdout = tokio::io::stdout();
let (service, socket) = LspService::new(|client| Backend {
client,
analysis: DashMap::new(),
module_to_path: DashMap::new(),
path_to_module: DashMap::new(),
module_id_counter: Mutex::new(SourceModuleId(0)),
});
Server::new(stdin, stdout, socket).serve(service).await;
}

View File

@ -0,0 +1,395 @@
{
"scopeName": "source.reid",
"patterns": [
{
"include": "#import"
},
{
"include": "#expression"
}
],
"repository": {
"import": {
"begin": "(import)\\s*",
"end": ";",
"beginCaptures": {
"1": {
"name": "keyword"
}
},
"endCaptures": {
"0": {
"name": "punctuation.semi.reid"
}
},
"patterns": [
{
"include": "#identifier"
},
{
"include": "#punctiation"
}
]
},
"punctuation": {
"patterns": [
{
"match": "::",
"name": "keyword.operator.namespace.reid"
},
{
"match": ";",
"name": "punctuation.semi.reid"
},
{
"match": ".",
"name": "punctuation.dot.reid"
},
{
"match": ",",
"name": "punctuation.comma.reid"
}
]
},
"expression": {
"patterns": [
{
"include": "#comment"
},
{
"include": "#fn-signature"
},
{
"include": "#common-type"
},
{
"include": "#binop-impl"
},
{
"include": "#type-impl"
},
{
"include": "#struct-definition"
},
{
"include": "#block"
},
{
"include": "#binop"
},
{
"include": "#namespace"
},
{
"include": "#cast"
},
{
"include": "#function-call"
},
{
"include": "#parenthesis"
},
{
"include": "#array"
},
{
"include": "#keywords"
},
{
"include": "#struct-expression"
},
{
"include": "#number-literal"
},
{
"include": "#string-literal"
},
{
"include": "#identifier"
},
{
"include": "#punctuation"
}
]
},
"comment": {
"match": "\\/\\/(.|\\/)*",
"name": "comment.line.double-slash.reid"
},
"fn-signature": {
"begin": "(fn)\\s*(\\w+)\\(",
"beginCaptures": {
"1": {
"name": "keyword.fn.reid"
},
"2": {
"name": "entity.name.function.reid"
}
},
"end": "\\)",
"patterns": [
{
"include": "#annotated-identifier"
},
{
"include": "#keywords"
},
{
"include": "#binop"
}
],
"endCaptures": {
"2": {
"name": "entity.name.type.reid"
}
}
},
"type-impl": {
"begin": "(impl)\\s* (\\w+)\\s* \\{\n",
"end": "\\}",
"captures": {
"1": {
"name": "keyword.impl.reid"
},
"2": {
"name": "entity.name.type"
}
},
"patterns": [
{
"include": "#expression"
}
]
},
"binop-impl": {
"begin": "(impl)\\s+(binop)\\s+\\(((.*)\\s*:\\s*(.*))\\)(.*)\\(((.*)\\s*:\\s*(.*))\\)\\s*->\\s*(\\w+)\\s*\\{",
"end": "\\}",
"beginCaptures": {
"1": {
"name": "keyword.impl.reid"
},
"2": {
"name": "keyword.impl.reid"
},
"4": {
"name": "variable.parameter.binop.reid"
},
"5": {
"name": "entity.name.type.parameter.binop.reid"
},
"6": {
"name": "keyword.operator.math.reid"
},
"8": {
"name": "variable.parameter.binop.reid"
},
"9": {
"name": "entity.name.type.parameter.binop.reid"
},
"10": {
"name": "entity.name.type.return.binop.reid"
}
},
"patterns": [
{
"include": "#expression"
}
]
},
"struct-definition": {
"begin": "(struct)\\s*(\\w+)\\s*\\{",
"end": "\\}",
"captures": {
"1": {
"name": "keyword.struct.reid"
},
"2": {
"name": "entity.name.type"
}
},
"patterns": [
{
"include": "#annotated-identifier"
}
]
},
"struct-expression": {
"begin": "([A-Z]\\w*)\\s*\\{",
"end": "\\}",
"captures": {
"1": {
"name": "entity.name.type.struct.reid"
}
},
"patterns": [
{
"include": "#expression"
}
]
},
"number-literal": {
"patterns": [
{
"match": "0x[0-9a-fA-F]+(\\.[0-9a-fA-F]+)?",
"name": "constant.hexadecimal"
},
{
"match": "0o[0-7]+(\\.[0-7]+)?",
"name": "constant.octal"
},
{
"match": "0b[01]+(\\.[01]+)?",
"name": "constant.binary"
},
{
"match": "[0-9]+(\\.[0-9]+)?",
"name": "constant.numeric"
}
]
},
"string-literal": {
"begin": "\"",
"end": "\"",
"name": "string.quoted.double",
"patterns": [
{
"match": "\\.",
"name": "constant.character.escape"
}
]
},
"block": {
"begin": "\\{",
"end": "\\}",
"patterns": [
{
"include": "#expression"
}
]
},
"namespace": {
"match": "(\\w+)(\\:\\:)",
"captures": {
"1": {
"name": "entity.name.function.reid"
},
"2": {
"name": "keyword.operator.namespace.reid"
}
}
},
"cast": {
"match": "(as)\\s+(\\w+)",
"captures": {
"1": {
"name": "keyword.cast.reid"
},
"2": {
"name": "entity.name.type.reid"
}
}
},
"function-call": {
"begin": "(\\w+)?\\(",
"end": "\\)",
"beginCaptures": {
"1": {
"name": "entity.name.function.reid"
}
},
"patterns": [
{
"include": "#expression"
}
]
},
"parenthesis": {
"begin": "\\(",
"end": "\\)",
"beginCaptures": {
"0": {
"name": "keyword.operator.parenthesis.reid"
}
},
"endCaptures": {
"0": {
"name": "keyword.operator.parenthesis.reid"
}
},
"patterns": [
{
"include": "#expression"
}
]
},
"annotated-identifier": {
"begin": "(\\w+)\\:",
"end": ",",
"beginCaptures": {
"1": {
"name": "variable.language.reid"
}
},
"patterns": [
{
"include": "#expression"
}
]
},
"identifier": {
"patterns": [
{
"match": "[A-Z]\\w*",
"name": "entity.name.type.reid"
},
{
"match": "\\w+",
"name": "variable.language.reid"
}
]
},
"keywords": {
"patterns": [
{
"match": "let|mut|pub|extern",
"name": "storage.type.reid"
},
{
"match": "if|return",
"name": "keyword.control"
},
{
"match": "self",
"name": "variable.language.self.reid"
}
]
},
"binop": {
"match": "\\<\\=|\\>\\=|\\=\\=|\\<|\\>|\\*|\\+|\\-|\\^|\\&\\&|\\&",
"name": "keyword.operator.math.reid"
},
"array": {
"begin": "\\[",
"end": "\\]",
"beginCaptures": {
"0": {
"name": "entity.name.type.array.reid"
}
},
"endCaptures": {
"0": {
"name": "entity.name.type.array.reid"
}
},
"patterns": [
{
"include": "#expression"
}
]
},
"common-type": {
"match": "u8|u16|u32|u64|u128|i8|i16|i32|i64|i128|bool",
"name": "entity.name.type.common.reid"
}
}
}

View File

@ -0,0 +1,232 @@
scopeName: source.reid
patterns:
- include: "#import"
- include: "#expression"
repository:
# function-definition:
# begin: "(fn)\\s*(\\w+)\\(((\\w+)\\s*\\:\\s*(\\w+),?)*\\)\\s*->\\s*(\\w+)\\s*\\{"
# end: "\\}"
# beginCaptures:
# 1:
# name: "keyword.other"
# 2:
# name: "entity.name.function"
# 4:
# name: "entity.name.parameter"
# 5:
# name: "entity.name.type"
# 6:
# name: "entity.name.type"
# patterns:
# - include: "#type"
# - include: "#expression"
import:
begin: "(import)\\s*"
end: ";"
beginCaptures:
1:
name: keyword
endCaptures:
0:
name: punctuation.semi.reid
patterns:
- include: "#identifier"
- include: "#punctiation"
punctuation:
patterns:
- match: "::"
name: keyword.operator.namespace.reid
- match: ";"
name: punctuation.semi.reid
- match: "."
name: punctuation.dot.reid
- match: ","
name: punctuation.comma.reid
expression:
patterns:
- include: "#comment"
- include: "#fn-signature"
- include: "#common-type"
- include: "#binop-impl"
- include: "#type-impl"
- include: "#struct-definition"
- include: "#block"
- include: "#binop"
- include: "#namespace"
- include: "#cast"
- include: "#function-call"
- include: "#parenthesis"
- include: "#array"
- include: "#keywords"
- include: "#struct-expression"
- include: "#number-literal"
- include: "#string-literal"
- include: "#identifier"
- include: "#punctuation"
comment:
match: "\\/\\/(.|\\/)*"
name: comment.line.double-slash.reid
fn-signature:
begin: "(fn)\\s*(\\w+)\\("
beginCaptures:
1:
name: keyword.fn.reid
2:
name: entity.name.function.reid
end: "\\)"
patterns:
- include: "#annotated-identifier"
- include: "#keywords"
- include: "#binop"
endCaptures:
2:
name: entity.name.type.reid
type-impl:
begin: >
(impl)\s*
(\w+)\s*
\{
end: "\\}"
captures:
1:
name: keyword.impl.reid
2:
name: entity.name.type
patterns:
- include: "#expression"
binop-impl:
begin: "(impl)\\s+(binop)\\s+\\(((.*)\\s*:\\s*(.*))\\)(.*)\\(((.*)\\s*:\\s*(.*))\\)\\s*->\\s*(\\w+)\\s*\\{"
end: "\\}"
beginCaptures:
1:
name: keyword.impl.reid
2:
name: keyword.impl.reid
4:
name: variable.parameter.binop.reid
5:
name: entity.name.type.parameter.binop.reid
6:
name: keyword.operator.math.reid
8:
name: variable.parameter.binop.reid
9:
name: entity.name.type.parameter.binop.reid
10:
name: entity.name.type.return.binop.reid
patterns:
- include: "#expression"
struct-definition:
begin: "(struct)\\s*(\\w+)\\s*\\{"
end: "\\}"
captures:
1:
name: keyword.struct.reid
2:
name: entity.name.type
patterns:
- include: "#annotated-identifier"
struct-expression:
begin: "([A-Z]\\w*)\\s*\\{"
end: "\\}"
captures:
1:
name: entity.name.type.struct.reid
patterns:
- include: "#expression"
number-literal:
patterns:
- match: "0x[0-9a-fA-F]+(\\.[0-9a-fA-F]+)?"
name: "constant.hexadecimal"
- match: "0o[0-7]+(\\.[0-7]+)?"
name: "constant.octal"
- match: "0b[01]+(\\.[01]+)?"
name: "constant.binary"
- match: "[0-9]+(\\.[0-9]+)?"
name: "constant.numeric"
string-literal:
begin: '"'
end: '"'
name: string.quoted.double
patterns:
- match: "\\."
name: constant.character.escape
block:
begin: "\\{"
end: "\\}"
patterns:
- include: "#expression"
namespace:
match: "(\\w+)(\\:\\:)"
captures:
1:
name: entity.name.function.reid
2:
name: keyword.operator.namespace.reid
cast:
match: "(as)\\s+(\\w+)"
captures:
1:
name: keyword.cast.reid
2:
name: entity.name.type.reid
function-call:
begin: "(\\w+)?\\("
end: "\\)"
beginCaptures:
1:
name: entity.name.function.reid
patterns:
- include: "#expression"
parenthesis:
begin: "\\("
end: "\\)"
beginCaptures:
0:
name: keyword.operator.parenthesis.reid
endCaptures:
0:
name: keyword.operator.parenthesis.reid
patterns:
- include: "#expression"
annotated-identifier:
begin: "(\\w+)\\:"
end: ","
beginCaptures:
1:
name: variable.language.reid
patterns:
- include: "#expression"
identifier:
patterns:
- match: "[A-Z]\\w*"
name: entity.name.type.reid
- match: "\\w+"
name: variable.language.reid
keywords:
patterns:
- match: "let|mut|pub|extern"
name: "storage.type.reid"
- match: "if|return"
name: "keyword.control"
- match: "self"
name: "variable.language.self.reid"
binop:
match: "\\<\\=|\\>\\=|\\=\\=|\\<|\\>|\\*|\\+|\\-|\\^|\\&\\&|\\&"
name: keyword.operator.math.reid
array:
begin: "\\["
end: "\\]"
beginCaptures:
0:
name: entity.name.type.array.reid
endCaptures:
0:
name: entity.name.type.array.reid
patterns:
- include: "#expression"
common-type:
match: "u8|u16|u32|u64|u128|i8|i16|i32|i64|i128|bool"
name: entity.name.type.common.reid

29
reid-lsp/tsconfig.json Normal file
View File

@ -0,0 +1,29 @@
{
"compilerOptions": {
"module": "Node16",
"target": "ES2022",
"lib": [
"ES2022"
],
"sourceMap": true,
"rootDir": "src",
"outDir": "out",
"strict": true /* enable all strict type-checking options */
/* Additional Checks */
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
// "noUnusedParameters": true, /* Report errors on unused parameters. */
},
"include": [
"src"
],
"exclude": [
"node_modules",
".vscode-test"
],
"references": [
{
"path": "./client/"
},
]
}

View File

@ -0,0 +1,48 @@
# Welcome to your VS Code Extension
## What's in the folder
* This folder contains all of the files necessary for your extension.
* `package.json` - this is the manifest file in which you declare your extension and command.
* The sample plugin registers a command and defines its title and command name. With this information VS Code can show the command in the command palette. It doesnt yet need to load the plugin.
* `src/extension.ts` - this is the main file where you will provide the implementation of your command.
* The file exports one function, `activate`, which is called the very first time your extension is activated (in this case by executing the command). Inside the `activate` function we call `registerCommand`.
* We pass the function containing the implementation of the command as the second parameter to `registerCommand`.
## Setup
* install the recommended extensions (amodio.tsl-problem-matcher, ms-vscode.extension-test-runner, and dbaeumer.vscode-eslint)
## Get up and running straight away
* Press `F5` to open a new window with your extension loaded.
* Run your command from the command palette by pressing (`Ctrl+Shift+P` or `Cmd+Shift+P` on Mac) and typing `Hello World`.
* Set breakpoints in your code inside `src/extension.ts` to debug your extension.
* Find output from your extension in the debug console.
## Make changes
* You can relaunch the extension from the debug toolbar after changing code in `src/extension.ts`.
* You can also reload (`Ctrl+R` or `Cmd+R` on Mac) the VS Code window with your extension to load your changes.
## Explore the API
* You can open the full set of our API when you open the file `node_modules/@types/vscode/index.d.ts`.
## Run tests
* Install the [Extension Test Runner](https://marketplace.visualstudio.com/items?itemName=ms-vscode.extension-test-runner)
* Run the "watch" task via the **Tasks: Run Task** command. Make sure this is running, or tests might not be discovered.
* Open the Testing view from the activity bar and click the Run Test" button, or use the hotkey `Ctrl/Cmd + ; A`
* See the output of the test result in the Test Results view.
* Make changes to `src/test/extension.test.ts` or create new test files inside the `test` folder.
* The provided test runner will only consider files matching the name pattern `**.test.ts`.
* You can create folders inside the `test` folder to structure your tests any way you want.
## Go further
* Reduce the extension size and improve the startup time by [bundling your extension](https://code.visualstudio.com/api/working-with-extensions/bundling-extension).
* [Publish your extension](https://code.visualstudio.com/api/working-with-extensions/publishing-extension) on the VS Code extension marketplace.
* Automate builds by setting up [Continuous Integration](https://code.visualstudio.com/api/working-with-extensions/continuous-integration).

View File

@ -0,0 +1,48 @@
//@ts-check
'use strict';
const path = require('path');
//@ts-check
/** @typedef {import('webpack').Configuration} WebpackConfig **/
/** @type WebpackConfig */
const extensionConfig = {
target: 'node', // VS Code extensions run in a Node.js-context 📖 -> https://webpack.js.org/configuration/node/
mode: 'none', // this leaves the source code as close as possible to the original (when packaging we set this to 'production')
entry: './client/src/extension.ts', // the entry point of this extension, 📖 -> https://webpack.js.org/configuration/entry-context/
output: {
// the bundle is stored in the 'dist' folder (check package.json), 📖 -> https://webpack.js.org/configuration/output/
path: path.resolve(__dirname, 'dist'),
filename: 'extension.js',
libraryTarget: 'commonjs2'
},
externals: {
vscode: 'commonjs vscode' // the vscode-module is created on-the-fly and must be excluded. Add other modules that cannot be webpack'ed, 📖 -> https://webpack.js.org/configuration/externals/
// modules added here also need to be added in the .vscodeignore file
},
resolve: {
// support reading TypeScript and JavaScript files, 📖 -> https://github.com/TypeStrong/ts-loader
extensions: ['.ts', '.js']
},
module: {
rules: [
{
test: /\.ts$/,
exclude: /node_modules/,
use: [
{
loader: 'ts-loader'
}
]
}
]
},
devtool: 'nosources-source-map',
infrastructureLogging: {
level: "log", // enables logging required for problem matchers
},
};
module.exports = [extensionConfig];

View File

@ -1,6 +1,6 @@
[package] [package]
name = "reid" name = "reid"
version = "1.0.0-beta.2" version = "1.0.0-beta.4"
edition = "2021" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -10,10 +10,12 @@ edition = "2021"
default = ["color"] default = ["color"]
color = ["colored"] color = ["colored"]
log_output = []
context_debug = []
[dependencies] [dependencies]
## Make it easier to generate errors ## Make it easier to generate errors
thiserror = "1.0.44" thiserror = "1.0.44"
reid-lib = { path = "../reid-llvm-lib", version = "1.0.0-beta.1", registry="gitea-teascade" } reid-lib = { path = "../reid-llvm-lib", version = "1.0.0-beta.4", registry="gitea-teascade" }
colored = {version = "3.0.0", optional = true} colored = {version = "3.0.0", optional = true}

View File

@ -12,7 +12,6 @@ fn main() -> Result<(), std::io::Error> {
libraries.push(libname); libraries.push(libname);
} }
dbg!(&filename);
let path = PathBuf::from(filename).canonicalize().unwrap(); let path = PathBuf::from(filename).canonicalize().unwrap();
let parent = path.with_extension(""); let parent = path.with_extension("");
let llvm_ir_path = parent.with_extension("ll"); let llvm_ir_path = parent.with_extension("ll");
@ -31,27 +30,30 @@ fn main() -> Result<(), std::io::Error> {
match compile_simple(&text, PathBuf::from(&path), Some(cpu), vec![features]) { match compile_simple(&text, PathBuf::from(&path), Some(cpu), vec![features]) {
Ok(( Ok((
CompileOutput { CompileOutput {
triple, triple: _triple,
assembly, assembly,
obj_buffer, obj_buffer,
llvm_ir, llvm_ir: _llvm_ir,
}, },
CustomIRs { llir, mir }, CustomIRs { llir, mir },
)) => { )) => {
println!("{}", llvm_ir); #[cfg(feature = "log_output")]
{
let after = std::time::SystemTime::now(); println!("{}", _llvm_ir);
println!("Compiled with triple: {}\n", &triple); println!("Compiled with triple: {}\n", &_triple);
fs::write(&llvm_ir_path, &llvm_ir).expect("Could not write LLVM IR -file!");
println!("Output LLVM IR to {:?}", llvm_ir_path); println!("Output LLVM IR to {:?}", llvm_ir_path);
fs::write(&asm_path, &assembly).expect("Could not write Assembly-file!");
println!("Output Assembly to {:?}", asm_path); println!("Output Assembly to {:?}", asm_path);
fs::write(&object_path, &obj_buffer).expect("Could not write Object-file!");
println!("Output Object-file to {:?}\n", object_path); println!("Output Object-file to {:?}\n", object_path);
fs::write(&llir_path, &llir).expect("Could not write LLIR-file!");
println!("Output LLIR-file to {:?}\n", llir_path); println!("Output LLIR-file to {:?}\n", llir_path);
fs::write(&mir_path, &mir).expect("Could not write MIR-file!");
println!("Output MIR-file to {:?}\n", mir_path); println!("Output MIR-file to {:?}\n", mir_path);
}
fs::write(&llvm_ir_path, &_llvm_ir).expect("Could not write LLVM IR -file!");
fs::write(&asm_path, &assembly).expect("Could not write Assembly-file!");
fs::write(&object_path, &obj_buffer).expect("Could not write Object-file!");
fs::write(&llir_path, &llir).expect("Could not write LLIR-file!");
fs::write(&mir_path, &mir).expect("Could not write MIR-file!");
let after = std::time::SystemTime::now();
println!( println!(
"Compilation took: {:.2}ms\n", "Compilation took: {:.2}ms\n",
(after.duration_since(before).unwrap().as_micros() as f32) / 1000. (after.duration_since(before).unwrap().as_micros() as f32) / 1000.
@ -60,7 +62,7 @@ fn main() -> Result<(), std::io::Error> {
println!("Linking {:?}", &object_path); println!("Linking {:?}", &object_path);
let linker = std::env::var("LD").unwrap_or("ld".to_owned()); let linker = std::env::var("LD").unwrap_or("ld".to_owned());
let mut linker = LDRunner::from_command(&linker).with_library("c"); let mut linker = LDRunner::from_command(&linker).with_library("c").with_library("m");
for library in libraries { for library in libraries {
linker = linker.with_library(&library); linker = linker.with_library(&library);
} }
@ -69,6 +71,7 @@ fn main() -> Result<(), std::io::Error> {
Err(e) => panic!("{}", e), Err(e) => panic!("{}", e),
}; };
} else { } else {
#[cfg(feature = "log_output")]
println!("Please input compiled file path!") println!("Please input compiled file path!")
} }
Ok(()) Ok(())

View File

@ -7,7 +7,7 @@ static HEXADECIMAL_NUMERICS: &[char] = &[
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f',
]; ];
#[derive(Eq, PartialEq, Clone, PartialOrd, Ord)] #[derive(Eq, PartialEq, Clone, PartialOrd, Ord, Hash)]
pub enum Token { pub enum Token {
/// Values /// Values
Identifier(String), Identifier(String),
@ -114,6 +114,8 @@ pub enum Token {
Unknown(char), Unknown(char),
Whitespace(String),
Comment(String),
Eof, Eof,
} }
@ -192,6 +194,8 @@ impl ToString for Token {
Token::Eof => String::new(), Token::Eof => String::new(),
Token::Slash => String::from('/'), Token::Slash => String::from('/'),
Token::Percent => String::from('%'), Token::Percent => String::from('%'),
Token::Whitespace(val) => val.clone(),
Token::Comment(val) => format!("//{}", val.clone()),
Token::Unknown(val) => val.to_string(), Token::Unknown(val) => val.to_string(),
} }
} }
@ -207,7 +211,7 @@ impl std::fmt::Debug for Token {
} }
/// A token with a position /// A token with a position
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)] #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct FullToken { pub struct FullToken {
pub token: Token, pub token: Token,
pub position: Position, pub position: Position,
@ -293,13 +297,25 @@ pub fn tokenize<T: Into<String>>(to_tokenize: T) -> Result<Vec<FullToken>, Error
let variant = match character { let variant = match character {
// Whitespace // Whitespace
w if w.is_whitespace() => continue, w if w.is_whitespace() => {
let mut whitespace = String::from(*w);
while let Some(w) = cursor.first() {
if !w.is_whitespace() {
break;
}
whitespace.push(cursor.next().unwrap());
}
Token::Whitespace(whitespace)
}
// Comments // Comments
'/' if cursor.first() == Some('/') => { '/' if cursor.first() == Some('/') => {
let mut comment = String::new();
while !matches!(cursor.first(), Some('\n') | None) { while !matches!(cursor.first(), Some('\n') | None) {
cursor.next(); if let Some(c) = cursor.next() {
comment.push(c);
} }
continue; }
Token::Comment(comment)
} }
'\"' | '\'' => { '\"' | '\'' => {
let mut value = String::new(); let mut value = String::new();

View File

@ -1,7 +1,7 @@
//! This is the module that contains relevant code to parsing Reid, that is to //! This is the module that contains relevant code to parsing Reid, that is to
//! say transforming a Vec of FullTokens into a loose parsed AST that can be //! say transforming a Vec of FullTokens into a loose parsed AST that can be
//! used for unwrapping syntax sugar, and then be transformed into Reid MIR. //! used for unwrapping syntax sugar, and then be transformed into Reid MIR.
use std::path::PathBuf; use std::{fs::Metadata, path::PathBuf};
use token_stream::TokenRange; use token_stream::TokenRange;
@ -88,7 +88,7 @@ pub enum ExpressionKind {
/// Array-indexed, e.g. <expr>[<expr>] /// Array-indexed, e.g. <expr>[<expr>]
Indexed(Box<Expression>, Box<Expression>), Indexed(Box<Expression>, Box<Expression>),
/// Struct-accessed, e.g. <expr>.<expr> /// Struct-accessed, e.g. <expr>.<expr>
Accessed(Box<Expression>, String), Accessed(Box<Expression>, String, TokenRange),
/// Associated function call, but with a shorthand /// Associated function call, but with a shorthand
AccessCall(Box<Expression>, Box<FunctionCallExpression>), AccessCall(Box<Expression>, Box<FunctionCallExpression>),
Binop(BinaryOperator, Box<Expression>, Box<Expression>), Binop(BinaryOperator, Box<Expression>, Box<Expression>),
@ -184,7 +184,7 @@ pub struct LetStatement {
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct ImportStatement(pub Vec<String>, pub TokenRange); pub struct ImportStatement(pub Vec<(String, TokenRange)>, pub TokenRange);
#[derive(Debug)] #[derive(Debug)]
pub struct FunctionDefinition(pub FunctionSignature, pub bool, pub Block, pub TokenRange); pub struct FunctionDefinition(pub FunctionSignature, pub bool, pub Block, pub TokenRange);
@ -193,7 +193,7 @@ pub struct FunctionDefinition(pub FunctionSignature, pub bool, pub Block, pub To
pub struct FunctionSignature { pub struct FunctionSignature {
pub name: String, pub name: String,
pub self_kind: SelfKind, pub self_kind: SelfKind,
pub params: Vec<(String, Type)>, pub params: Vec<(String, Type, TokenRange)>,
pub return_type: Option<Type>, pub return_type: Option<Type>,
#[allow(dead_code)] #[allow(dead_code)]
pub range: TokenRange, pub range: TokenRange,
@ -216,7 +216,7 @@ pub enum ReturnType {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct StructExpression { pub struct StructExpression {
name: String, name: String,
fields: Vec<(String, Expression)>, fields: Vec<(String, Expression, TokenRange)>,
range: TokenRange, range: TokenRange,
} }
@ -272,9 +272,9 @@ pub enum TopLevelStatement {
#[derive(Debug)] #[derive(Debug)]
pub struct BinopDefinition { pub struct BinopDefinition {
pub lhs: (String, Type), pub lhs: (String, Type, TokenRange),
pub op: BinaryOperator, pub op: BinaryOperator,
pub rhs: (String, Type), pub rhs: (String, Type, TokenRange),
pub return_ty: Type, pub return_ty: Type,
pub block: Block, pub block: Block,
pub signature_range: TokenRange, pub signature_range: TokenRange,

View File

@ -175,7 +175,50 @@ impl Parse for AssociatedFunctionCall {
let ty = stream.parse()?; let ty = stream.parse()?;
stream.expect(Token::Colon)?; stream.expect(Token::Colon)?;
stream.expect(Token::Colon)?; stream.expect(Token::Colon)?;
Ok(AssociatedFunctionCall(ty, stream.parse()?))
if stream.next_is_whitespace() {
stream.expecting_err_nonfatal("associated function name");
return Ok(AssociatedFunctionCall(
ty,
FunctionCallExpression {
name: String::new(),
params: Vec::new(),
range: stream.get_range_prev_curr().unwrap(),
is_macro: false,
},
));
}
match stream.parse() {
Ok(fn_call) => Ok(AssociatedFunctionCall(ty, fn_call)),
_ => {
if let Some(Token::Identifier(fn_name)) = stream.peek() {
stream.next();
stream.expected_err_nonfatal("associated function call");
Ok(AssociatedFunctionCall(
ty,
FunctionCallExpression {
name: fn_name,
params: Vec::new(),
range: stream.get_range_prev_curr().unwrap(),
is_macro: false,
},
))
} else {
stream.expected_err_nonfatal("associated function name");
Ok(AssociatedFunctionCall(
ty,
FunctionCallExpression {
name: String::new(),
params: Vec::new(),
range: stream.get_range_prev_curr().unwrap(),
is_macro: false,
},
))
}
}
}
} }
} }
@ -191,10 +234,11 @@ where
), ),
expr.0 .1, expr.0 .1,
), ),
ExpressionKind::Accessed(value_expr, index_name) => Expression( ExpressionKind::Accessed(value_expr, index_name, range) => Expression(
ExpressionKind::Accessed( ExpressionKind::Accessed(
Box::new(apply_inner(PrimaryExpression(*value_expr.clone()), fun)), Box::new(apply_inner(PrimaryExpression(*value_expr.clone()), fun)),
index_name.clone(), index_name.clone(),
*range,
), ),
expr.0 .1, expr.0 .1,
), ),
@ -243,9 +287,9 @@ impl Parse for PrimaryExpression {
stream.get_range().unwrap(), stream.get_range().unwrap(),
) )
} else if let Some(Token::Star) = stream.peek() { } else if let Some(Token::Star) = stream.peek() {
stream.next(); // Consume Et stream.next(); // Consume Star
apply_inner(stream.parse()?, |e| { apply_inner(stream.parse()?, |e| {
Expression(Kind::Deref(Box::new(e.0)), stream.get_range().unwrap()) Expression(Kind::Deref(Box::new(e.0.clone())), e.0 .1)
}) })
} else if let Ok(unary) = stream.parse() { } else if let Ok(unary) = stream.parse() {
Expression( Expression(
@ -399,9 +443,9 @@ impl Parse for PrimaryExpression {
); );
} }
ValueIndex::Dot(val) => match val { ValueIndex::Dot(val) => match val {
DotIndexKind::StructValueIndex(name) => { DotIndexKind::StructValueIndex(name, range) => {
expr = Expression( expr = Expression(
ExpressionKind::Accessed(Box::new(expr), name), ExpressionKind::Accessed(Box::new(expr), name, range),
stream.get_range().unwrap(), stream.get_range().unwrap(),
); );
} }
@ -466,7 +510,7 @@ fn parse_binop_rhs(
if curr_token_prec < next_prec { if curr_token_prec < next_prec {
// Operator on the right of rhs has more precedence, turn // Operator on the right of rhs has more precedence, turn
// rhs into lhs for new binop // rhs into lhs for new binop
rhs = parse_binop_rhs(stream, rhs, Some(op))?; rhs = stream.parse_with(|mut st| parse_binop_rhs(&mut st, rhs, Some(op)))?;
} else { } else {
let _ = prev_operator.insert(next_op); let _ = prev_operator.insert(next_op);
} }
@ -609,7 +653,7 @@ impl Parse for LetStatement {
stream.expect(Token::Equals)?; stream.expect(Token::Equals)?;
let expression = stream.parse()?; let expression = stream.parse()?;
stream.expect(Token::Semi)?; stream.expect_nonfatal(Token::Semi).ok();
Ok(LetStatement { Ok(LetStatement {
name: variable, name: variable,
ty, ty,
@ -630,19 +674,21 @@ impl Parse for ImportStatement {
let mut import_list = Vec::new(); let mut import_list = Vec::new();
if let Some(Token::Identifier(name)) = stream.next() { if let Some(Token::Identifier(name)) = stream.next() {
import_list.push(name); import_list.push((name, stream.get_range_prev_curr().unwrap()));
while stream.expect(Token::Colon).is_ok() && stream.expect(Token::Colon).is_ok() { while stream.expect(Token::Colon).is_ok() && stream.expect(Token::Colon).is_ok() {
if let Some(Token::Identifier(name)) = stream.next() { if let Some(Token::Identifier(name)) = stream.peek() {
import_list.push(name); stream.next(); // Consume identifier
import_list.push((name, stream.get_range_prev_curr().unwrap()));
} else { } else {
Err(stream.expected_err("identifier")?)? stream.expected_err_nonfatal("identifier");
break;
} }
} }
} else { } else {
Err(stream.expected_err("identifier")?)? Err(stream.expected_err("identifier")?)?
} }
stream.expect(Token::Semi)?; stream.expect_nonfatal(Token::Semi).ok();
Ok(ImportStatement(import_list, stream.get_range().unwrap())) Ok(ImportStatement(import_list, stream.get_range().unwrap()))
} }
@ -668,7 +714,7 @@ impl Parse for FunctionDefinition {
} }
#[derive(Debug)] #[derive(Debug)]
struct FunctionParam(String, Type); struct FunctionParam(String, Type, TokenRange);
impl Parse for FunctionParam { impl Parse for FunctionParam {
fn parse(mut stream: TokenStream) -> Result<Self, Error> { fn parse(mut stream: TokenStream) -> Result<Self, Error> {
@ -676,7 +722,7 @@ impl Parse for FunctionParam {
return Err(stream.expected_err("parameter name")?); return Err(stream.expected_err("parameter name")?);
}; };
stream.expect(Token::Colon)?; stream.expect(Token::Colon)?;
Ok(FunctionParam(arg_name, stream.parse()?)) Ok(FunctionParam(arg_name, stream.parse()?, stream.get_range().unwrap()))
} }
} }
@ -738,11 +784,11 @@ impl Parse for FunctionSignature {
match &self_kind { match &self_kind {
SelfKind::None => { SelfKind::None => {
if let Ok(param) = stream.parse::<FunctionParam>() { if let Ok(param) = stream.parse::<FunctionParam>() {
params.push((param.0, param.1)); params.push((param.0, param.1, param.2));
while let Some(Token::Comma) = stream.peek() { while let Some(Token::Comma) = stream.peek() {
stream.next(); stream.next();
let param = stream.parse::<FunctionParam>()?; let param = stream.parse::<FunctionParam>()?;
params.push((param.0, param.1)); params.push((param.0, param.1, param.2));
} }
} }
} }
@ -750,7 +796,7 @@ impl Parse for FunctionSignature {
while let Some(Token::Comma) = stream.peek() { while let Some(Token::Comma) = stream.peek() {
stream.next(); stream.next();
let param = stream.parse::<FunctionParam>()?; let param = stream.parse::<FunctionParam>()?;
params.push((param.0, param.1)); params.push((param.0, param.1, param.2));
} }
} }
} }
@ -787,7 +833,7 @@ impl Parse for Block {
// if semicolon is missing. // if semicolon is missing.
if !matches!(e, Expression(ExpressionKind::IfExpr(_), _)) { if !matches!(e, Expression(ExpressionKind::IfExpr(_), _)) {
// In theory could ignore the missing semicolon.. // In theory could ignore the missing semicolon..
return Err(stream.expected_err("semicolon to complete statement")?); stream.expected_err_nonfatal("semicolon to complete statement");
} }
statements.push(BlockLevelStatement::Expression(e)); statements.push(BlockLevelStatement::Expression(e));
@ -818,9 +864,10 @@ impl Parse for StructExpression {
let Some(Token::Identifier(name)) = stream.next() else { let Some(Token::Identifier(name)) = stream.next() else {
return Err(stream.expected_err("struct identifier")?); return Err(stream.expected_err("struct identifier")?);
}; };
stream.expect(Token::BraceOpen)?; stream.expect(Token::BraceOpen)?;
let named_list = stream.parse::<NamedFieldList<Expression>>()?; let named_list = stream.parse::<NamedFieldList<Expression>>()?;
let fields = named_list.0.into_iter().map(|f| (f.0, f.1)).collect(); let fields = named_list.0.into_iter().map(|f| (f.0, f.1, f.2)).collect();
stream.expect(Token::BraceClose)?; stream.expect(Token::BraceClose)?;
@ -897,14 +944,15 @@ impl Parse for ArrayValueIndex {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum DotIndexKind { pub enum DotIndexKind {
StructValueIndex(String), StructValueIndex(String, TokenRange),
FunctionCall(FunctionCallExpression), FunctionCall(FunctionCallExpression),
} }
impl Parse for DotIndexKind { impl Parse for DotIndexKind {
fn parse(mut stream: TokenStream) -> Result<Self, Error> { fn parse(mut stream: TokenStream) -> Result<Self, Error> {
stream.expect(Token::Dot)?; stream.expect(Token::Dot)?;
if let Some(Token::Identifier(name)) = stream.next() { if let Some(Token::Identifier(name)) = stream.peek() {
stream.next(); // Consume identifer
if let Ok(args) = stream.parse::<FunctionArgs>() { if let Ok(args) = stream.parse::<FunctionArgs>() {
Ok(Self::FunctionCall(FunctionCallExpression { Ok(Self::FunctionCall(FunctionCallExpression {
name, name,
@ -913,10 +961,18 @@ impl Parse for DotIndexKind {
is_macro: false, is_macro: false,
})) }))
} else { } else {
Ok(Self::StructValueIndex(name)) Ok(Self::StructValueIndex(name, stream.get_range_prev().unwrap()))
} }
} else { } else {
return Err(stream.expected_err("struct index (number)")?); if stream.next_is_whitespace() {
stream.expecting_err_nonfatal("struct index");
Ok(Self::StructValueIndex(
String::new(),
stream.get_range_prev_curr().unwrap(),
))
} else {
Err(stream.expecting_err("struct index")?)
}
} }
} }
} }
@ -930,7 +986,7 @@ impl Parse for BlockLevelStatement {
Some(Token::ReturnKeyword) => { Some(Token::ReturnKeyword) => {
stream.next(); stream.next();
let exp = stream.parse().ok(); let exp = stream.parse().ok();
stream.expect(Token::Semi)?; stream.expect_nonfatal(Token::Semi).ok();
Stmt::Return(ReturnType::Hard, exp) Stmt::Return(ReturnType::Hard, exp)
} }
Some(Token::For) => { Some(Token::For) => {
@ -995,7 +1051,7 @@ impl Parse for SetStatement {
let var_ref = stream.parse()?; let var_ref = stream.parse()?;
stream.expect(Token::Equals)?; stream.expect(Token::Equals)?;
let expr = stream.parse()?; let expr = stream.parse()?;
stream.expect(Token::Semi)?; stream.expect_nonfatal(Token::Semi).ok();
Ok(SetStatement(var_ref, expr, stream.get_range().unwrap())) Ok(SetStatement(var_ref, expr, stream.get_range().unwrap()))
} }
} }
@ -1036,7 +1092,7 @@ impl Parse for TopLevelStatement {
stream.next(); // Consume Extern stream.next(); // Consume Extern
stream.expect(Token::FnKeyword)?; stream.expect(Token::FnKeyword)?;
let extern_fn = Stmt::ExternFunction(stream.parse()?); let extern_fn = Stmt::ExternFunction(stream.parse()?);
stream.expect(Token::Semi)?; stream.expect_nonfatal(Token::Semi).ok();
extern_fn extern_fn
} }
Some(Token::FnKeyword) | Some(Token::PubKeyword) => Stmt::FunctionDefinition(stream.parse()?), Some(Token::FnKeyword) | Some(Token::PubKeyword) => Stmt::FunctionDefinition(stream.parse()?),
@ -1070,6 +1126,7 @@ impl Parse for BinopDefinition {
let Some(Token::Identifier(lhs_name)) = stream.next() else { let Some(Token::Identifier(lhs_name)) = stream.next() else {
return Err(stream.expected_err("lhs name")?); return Err(stream.expected_err("lhs name")?);
}; };
let lhs_range = stream.get_range_prev_curr().unwrap();
stream.expect(Token::Colon)?; stream.expect(Token::Colon)?;
let lhs_type = stream.parse()?; let lhs_type = stream.parse()?;
stream.expect(Token::ParenClose)?; stream.expect(Token::ParenClose)?;
@ -1080,6 +1137,7 @@ impl Parse for BinopDefinition {
let Some(Token::Identifier(rhs_name)) = stream.next() else { let Some(Token::Identifier(rhs_name)) = stream.next() else {
return Err(stream.expected_err("rhs name")?); return Err(stream.expected_err("rhs name")?);
}; };
let rhs_range = stream.get_range_prev_curr().unwrap();
stream.expect(Token::Colon)?; stream.expect(Token::Colon)?;
let rhs_type = stream.parse()?; let rhs_type = stream.parse()?;
stream.expect(Token::ParenClose)?; stream.expect(Token::ParenClose)?;
@ -1089,9 +1147,9 @@ impl Parse for BinopDefinition {
stream.expect(Token::Arrow)?; stream.expect(Token::Arrow)?;
Ok(BinopDefinition { Ok(BinopDefinition {
lhs: (lhs_name, lhs_type), lhs: (lhs_name, lhs_type, lhs_range),
op: operator, op: operator,
rhs: (rhs_name, rhs_type), rhs: (rhs_name, rhs_type, rhs_range),
return_ty: stream.parse()?, return_ty: stream.parse()?,
block: stream.parse()?, block: stream.parse()?,
signature_range, signature_range,
@ -1112,11 +1170,11 @@ impl Parse for AssociatedFunctionBlock {
match stream.peek() { match stream.peek() {
Some(Token::FnKeyword) | Some(Token::PubKeyword) => { Some(Token::FnKeyword) | Some(Token::PubKeyword) => {
let mut fun: FunctionDefinition = stream.parse()?; let mut fun: FunctionDefinition = stream.parse()?;
fun.0.self_kind = match fun.0.self_kind { match &mut fun.0.self_kind {
SelfKind::Owned(_) => SelfKind::Owned(ty.clone()), SelfKind::Owned(inner_ty) => inner_ty.0 = ty.0.clone(),
SelfKind::Borrow(_) => SelfKind::Borrow(ty.clone()), SelfKind::Borrow(inner_ty) => inner_ty.0 = ty.0.clone(),
SelfKind::MutBorrow(_) => SelfKind::MutBorrow(ty.clone()), SelfKind::MutBorrow(inner_ty) => inner_ty.0 = ty.0.clone(),
SelfKind::None => SelfKind::None, SelfKind::None => {}
}; };
functions.push(fun); functions.push(fun);
} }

View File

@ -30,7 +30,14 @@ impl ast::Module {
for stmt in &self.top_level_statements { for stmt in &self.top_level_statements {
match stmt { match stmt {
Import(import) => { Import(import) => {
imports.push(mir::Import(import.0.clone(), import.1.as_meta(module_id))); imports.push(mir::Import(
import
.0
.iter()
.map(|(s, range)| (s.clone(), range.as_meta(module_id)))
.collect(),
import.1.as_meta(module_id),
));
} }
FunctionDefinition(function_def) => functions.push(function_def.into_mir(module_id)), FunctionDefinition(function_def) => functions.push(function_def.into_mir(module_id)),
ExternFunction(signature) => { ExternFunction(signature) => {
@ -51,11 +58,12 @@ impl ast::Module {
.map(|p| mir::FunctionParam { .map(|p| mir::FunctionParam {
name: p.0, name: p.0,
ty: p.1 .0.into_mir(module_id), ty: p.1 .0.into_mir(module_id),
meta: p.1 .1.as_meta(module_id), meta: p.2.as_meta(module_id),
}) })
.collect(), .collect(),
kind: mir::FunctionDefinitionKind::Extern(false), kind: mir::FunctionDefinitionKind::Extern(false),
source: Some(module_id), source: Some(module_id),
signature_meta: signature.range.as_meta(module_id),
}; };
functions.push(def); functions.push(def);
} }
@ -96,13 +104,13 @@ impl ast::Module {
lhs: mir::FunctionParam { lhs: mir::FunctionParam {
name: lhs.0.clone(), name: lhs.0.clone(),
ty: lhs.1 .0.into_mir(module_id), ty: lhs.1 .0.into_mir(module_id),
meta: lhs.1 .1.as_meta(module_id), meta: lhs.2.as_meta(module_id),
}, },
op: op.mir(), op: op.mir(),
rhs: mir::FunctionParam { rhs: mir::FunctionParam {
name: rhs.0.clone(), name: rhs.0.clone(),
ty: rhs.1 .0.into_mir(module_id), ty: rhs.1 .0.into_mir(module_id),
meta: rhs.1 .1.as_meta(module_id), meta: rhs.2.as_meta(module_id),
}, },
return_type: return_ty.0.into_mir(module_id), return_type: return_ty.0.into_mir(module_id),
fn_kind: mir::FunctionDefinitionKind::Local( fn_kind: mir::FunctionDefinitionKind::Local(
@ -164,7 +172,7 @@ impl ast::FunctionDefinition {
params.extend(signature.params.iter().cloned().map(|p| FunctionParam { params.extend(signature.params.iter().cloned().map(|p| FunctionParam {
name: p.0, name: p.0,
ty: p.1 .0.into_mir(module_id), ty: p.1 .0.into_mir(module_id),
meta: p.1 .1.as_meta(module_id), meta: p.2.as_meta(module_id),
})); }));
mir::FunctionDefinition { mir::FunctionDefinition {
name: signature.name.clone(), name: signature.name.clone(),
@ -179,6 +187,7 @@ impl ast::FunctionDefinition {
parameters: params, parameters: params,
kind: mir::FunctionDefinitionKind::Local(block.into_mir(module_id), (range).as_meta(module_id)), kind: mir::FunctionDefinitionKind::Local(block.into_mir(module_id), (range).as_meta(module_id)),
source: Some(module_id), source: Some(module_id),
signature_meta: signature.range.as_meta(module_id),
} }
} }
} }
@ -226,8 +235,9 @@ impl ast::Block {
); );
let let_statement = mir::Statement( let let_statement = mir::Statement(
StmtKind::Let(counter_var.clone(), true, start.process(module_id)), StmtKind::Let(counter_var.clone(), true, start.process(module_id)),
counter_range.as_meta(module_id), start.1.as_meta(module_id),
); );
let statement_range = counter_range.clone() + start.1 + end.1 + block.2;
let set_new = mir::Statement( let set_new = mir::Statement(
StmtKind::Set( StmtKind::Set(
@ -253,8 +263,8 @@ impl ast::Block {
), ),
counter_range.as_meta(module_id), counter_range.as_meta(module_id),
); );
let mut block = block.into_mir(module_id); let mut mir_block = block.into_mir(module_id);
block.statements.push(set_new); mir_block.statements.push(set_new);
let while_statement = mir::Statement( let while_statement = mir::Statement(
StmtKind::While(WhileStatement { StmtKind::While(WhileStatement {
condition: mir::Expression( condition: mir::Expression(
@ -267,23 +277,23 @@ impl ast::Block {
Box::new(end.process(module_id)), Box::new(end.process(module_id)),
mir::TypeKind::Vague(mir::VagueType::Unknown), mir::TypeKind::Vague(mir::VagueType::Unknown),
), ),
counter_range.as_meta(module_id), end.1.as_meta(module_id),
), ),
block, block: mir_block.clone(),
meta: self.2.as_meta(module_id), meta: (*counter_range + end.1 + block.2).as_meta(module_id),
}), }),
self.2.as_meta(module_id), (*counter_range + end.1 + block.2).as_meta(module_id),
); );
let inner_scope = StmtKind::Expression(mir::Expression( let inner_scope = StmtKind::Expression(mir::Expression(
mir::ExprKind::Block(mir::Block { mir::ExprKind::Block(mir::Block {
statements: vec![let_statement, while_statement], statements: vec![let_statement, while_statement],
return_expression: None, return_expression: None,
meta: counter_range.as_meta(module_id) + end.1.as_meta(module_id), meta: statement_range.as_meta(module_id),
}), }),
counter_range.as_meta(module_id) + end.1.as_meta(module_id), statement_range.as_meta(module_id),
)); ));
(inner_scope, self.2) (inner_scope, statement_range)
} }
ast::BlockLevelStatement::WhileLoop(expression, block) => ( ast::BlockLevelStatement::WhileLoop(expression, block) => (
StmtKind::While(WhileStatement { StmtKind::While(WhileStatement {
@ -291,7 +301,7 @@ impl ast::Block {
block: block.into_mir(module_id), block: block.into_mir(module_id),
meta: self.2.as_meta(module_id), meta: self.2.as_meta(module_id),
}), }),
self.2, expression.1 + block.2,
), ),
}; };
@ -371,17 +381,18 @@ impl ast::Expression {
Box::new(idx_expr.process(module_id)), Box::new(idx_expr.process(module_id)),
), ),
ast::ExpressionKind::StructExpression(struct_init) => mir::ExprKind::Struct( ast::ExpressionKind::StructExpression(struct_init) => mir::ExprKind::Struct(
struct_init.name.clone(), CustomTypeKey(struct_init.name.clone(), module_id),
struct_init struct_init
.fields .fields
.iter() .iter()
.map(|(n, e)| (n.clone(), e.process(module_id))) .map(|(n, e, r)| (n.clone(), e.process(module_id), r.as_meta(module_id)))
.collect(), .collect(),
), ),
ast::ExpressionKind::Accessed(expression, name) => mir::ExprKind::Accessed( ast::ExpressionKind::Accessed(expression, name, name_range) => mir::ExprKind::Accessed(
Box::new(expression.process(module_id)), Box::new(expression.process(module_id)),
mir::TypeKind::Vague(mir::VagueType::Unknown), mir::TypeKind::Vague(mir::VagueType::Unknown),
name.clone(), name.clone(),
name_range.as_meta(module_id),
), ),
ast::ExpressionKind::Borrow(expr, mutable) => { ast::ExpressionKind::Borrow(expr, mutable) => {
mir::ExprKind::Borrow(Box::new(expr.process(module_id)), *mutable) mir::ExprKind::Borrow(Box::new(expr.process(module_id)), *mutable)

View File

@ -1,6 +1,8 @@
//! Contains relevant code for parsing tokens received from //! Contains relevant code for parsing tokens received from
//! Lexing/Tokenizing-stage. //! Lexing/Tokenizing-stage.
use std::{cell::RefCell, rc::Rc};
use crate::{ use crate::{
ast::parse::Parse, ast::parse::Parse,
lexer::{FullToken, Token}, lexer::{FullToken, Token},
@ -12,6 +14,7 @@ use crate::{
pub struct TokenStream<'a, 'b> { pub struct TokenStream<'a, 'b> {
ref_position: Option<&'b mut usize>, ref_position: Option<&'b mut usize>,
tokens: &'a [FullToken], tokens: &'a [FullToken],
errors: Rc<RefCell<Vec<Error>>>,
pub position: usize, pub position: usize,
} }
@ -20,6 +23,7 @@ impl<'a, 'b> TokenStream<'a, 'b> {
TokenStream { TokenStream {
ref_position: None, ref_position: None,
tokens, tokens,
errors: Rc::new(RefCell::new(Vec::new())),
position: 0, position: 0,
} }
} }
@ -38,24 +42,42 @@ impl<'a, 'b> TokenStream<'a, 'b> {
)) ))
} }
/// Returns expected-error for the next token in-line. Useful in conjunction
/// with [`TokenStream::peek`]
pub fn expected_err_nonfatal<T: Into<String>>(&mut self, expected: T) {
let err = match self.expected_err(expected) {
Ok(e) => e,
Err(e) => e,
};
self.errors.borrow_mut().push(err);
}
/// Returns expected-error for the previous token that was already consumed. /// Returns expected-error for the previous token that was already consumed.
/// Useful in conjunction with [`TokenStream::next`] /// Useful in conjunction with [`TokenStream::next`]
pub fn expecting_err<T: Into<String>>(&mut self, expected: T) -> Result<Error, Error> { pub fn expecting_err<T: Into<String>>(&mut self, expected: T) -> Result<Error, Error> {
let next_token = self.peek().unwrap_or(Token::Eof); let next_token = self.peek().unwrap_or(Token::Eof);
let pos = self.next_token(self.position).0;
Ok(Error::Expected( Ok(Error::Expected(
expected.into(), expected.into(),
next_token, next_token,
TokenRange { TokenRange { start: pos, end: pos },
start: self.position,
end: self.position,
},
)) ))
} }
/// Returns expected-error for the previous token that was already consumed.
/// Useful in conjunction with [`TokenStream::next`]
pub fn expecting_err_nonfatal<T: Into<String>>(&mut self, expected: T) {
let err = match self.expecting_err(expected) {
Ok(e) => e,
Err(e) => e,
};
self.errors.borrow_mut().push(err);
}
pub fn expect(&mut self, token: Token) -> Result<(), Error> { pub fn expect(&mut self, token: Token) -> Result<(), Error> {
if let Some(peeked) = self.peek() { if let (pos, Some(peeked)) = self.next_token(self.position) {
if token == peeked { if token == peeked.token {
self.position += 1; self.position = pos + 1;
Ok(()) Ok(())
} else { } else {
Err(self.expecting_err(token)?) Err(self.expecting_err(token)?)
@ -65,38 +87,41 @@ impl<'a, 'b> TokenStream<'a, 'b> {
} }
} }
pub fn next(&mut self) -> Option<Token> { pub fn expect_nonfatal(&mut self, token: Token) -> Result<(), ()> {
let value = if self.tokens.len() < self.position { if let (pos, Some(peeked)) = self.next_token(self.position) {
None if token == peeked.token {
self.position = pos + 1;
Ok(())
} else { } else {
Some(self.tokens[self.position].token.clone()) self.expecting_err_nonfatal(token);
}; Err(())
self.position += 1; }
value } else {
self.expecting_err_nonfatal(token);
Err(())
}
}
pub fn next(&mut self) -> Option<Token> {
let (position, token) = self.next_token(self.position);
self.position = position + 1;
token.map(|t| t.token.clone())
} }
pub fn previous(&mut self) -> Option<Token> { pub fn previous(&mut self) -> Option<Token> {
if (self.position as i32 - 1) < 0 { let (_, token) = self.previous_token(self.position);
None token.map(|t| t.token.clone())
} else {
Some(self.tokens[self.position - 1].token.clone())
}
} }
pub fn peek(&mut self) -> Option<Token> { pub fn peek(&mut self) -> Option<Token> {
if self.tokens.len() < self.position { let (_, token) = self.next_token(self.position);
None token.map(|t| t.token.clone())
} else {
Some(self.tokens[self.position].token.clone())
}
} }
pub fn peek2(&mut self) -> Option<Token> { pub fn peek2(&mut self) -> Option<Token> {
if self.tokens.len() < (self.position + 1) { let (pos2, _) = self.next_token(self.position);
None let (_, token) = self.next_token(pos2 + 1);
} else { token.map(|t| t.token.clone())
Some(self.tokens[self.position + 1].token.clone())
}
} }
/// Parse the next value of trait Parse. If the parse succeeded, the related /// Parse the next value of trait Parse. If the parse succeeded, the related
@ -161,6 +186,7 @@ impl<'a, 'b> TokenStream<'a, 'b> {
let clone = TokenStream { let clone = TokenStream {
ref_position: Some(&mut ref_pos), ref_position: Some(&mut ref_pos),
tokens: self.tokens, tokens: self.tokens,
errors: self.errors.clone(),
position, position,
}; };
@ -173,6 +199,29 @@ impl<'a, 'b> TokenStream<'a, 'b> {
} }
} }
pub fn parse_with<T, U>(&mut self, fun: T) -> Result<U, Error>
where
T: FnOnce(TokenStream) -> Result<U, Error>,
{
let mut ref_pos = self.position;
let position = self.position;
let clone = TokenStream {
ref_position: Some(&mut ref_pos),
tokens: self.tokens,
errors: self.errors.clone(),
position,
};
match fun(clone) {
Ok(res) => {
self.position = ref_pos.max(self.position);
Ok(res)
}
Err(e) => Err(e),
}
}
pub fn get_range(&self) -> Option<TokenRange> { pub fn get_range(&self) -> Option<TokenRange> {
self.ref_position.as_ref().map(|ref_pos| TokenRange { self.ref_position.as_ref().map(|ref_pos| TokenRange {
start: **ref_pos, start: **ref_pos,
@ -185,9 +234,56 @@ impl<'a, 'b> TokenStream<'a, 'b> {
pub fn get_range_prev(&self) -> Option<TokenRange> { pub fn get_range_prev(&self) -> Option<TokenRange> {
self.ref_position.as_ref().map(|ref_pos| TokenRange { self.ref_position.as_ref().map(|ref_pos| TokenRange {
start: **ref_pos, start: **ref_pos,
end: self.position - 1, end: self.previous_token(self.position).0,
}) })
} }
/// Gets range of the previous token only.
pub fn get_range_prev_curr(&self) -> Option<TokenRange> {
Some(TokenRange {
start: self.previous_token(self.position).0,
end: self.previous_token(self.position).0,
})
}
fn previous_token(&self, mut from: usize) -> (usize, Option<&'a FullToken>) {
from -= 1;
while let Some(token) = self.tokens.get(from) {
if matches!(token.token, Token::Whitespace(_) | Token::Comment(_)) {
from -= 1;
} else {
break;
}
}
(from, self.tokens.get(from))
}
fn next_token(&self, mut from: usize) -> (usize, Option<&'a FullToken>) {
while let Some(token) = self.tokens.get(from) {
if matches!(token.token, Token::Whitespace(_) | Token::Comment(_)) {
from += 1;
} else {
break;
}
}
(from, self.tokens.get(from))
}
pub fn errors(&self) -> Vec<Error> {
self.errors.borrow().clone().clone()
}
pub fn next_is_whitespace(&self) -> bool {
if let Some(token) = self.tokens.get(self.position) {
if let Token::Whitespace(_) = token.token {
true
} else {
false
}
} else {
true
}
}
} }
impl Drop for TokenStream<'_, '_> { impl Drop for TokenStream<'_, '_> {
@ -217,8 +313,8 @@ impl std::ops::Add for TokenRange {
fn add(self, rhs: Self) -> Self::Output { fn add(self, rhs: Self) -> Self::Output {
TokenRange { TokenRange {
start: self.start.min(rhs.start), start: self.start.min(rhs.start).min(rhs.end),
end: self.end.min(rhs.end), end: self.end.max(rhs.end).max(rhs.start),
} }
} }
} }

View File

@ -133,7 +133,7 @@ impl mir::Expression {
allocated.extend(expr.allocate(scope)); allocated.extend(expr.allocate(scope));
allocated.extend(idx.allocate(scope)); allocated.extend(idx.allocate(scope));
} }
mir::ExprKind::Accessed(expression, _, _) => { mir::ExprKind::Accessed(expression, ..) => {
allocated.extend(expression.allocate(scope)); allocated.extend(expression.allocate(scope));
} }
mir::ExprKind::Array(expressions) => { mir::ExprKind::Array(expressions) => {
@ -151,15 +151,15 @@ impl mir::Expression {
allocated.extend(expression.allocate(scope)); allocated.extend(expression.allocate(scope));
} }
} }
mir::ExprKind::Struct(name, items) => { mir::ExprKind::Struct(key, items) => {
let (_, ty) = self.return_type(&Default::default(), scope.mod_id).unwrap(); let (_, ty) = self.return_type(&Default::default(), scope.mod_id).unwrap();
let allocation = scope let allocation = scope
.block .block
.build_named(name, Instr::Alloca(ty.get_type(scope.type_values))) .build_named(key.0.clone(), Instr::Alloca(ty.get_type(scope.type_values)))
.unwrap(); .unwrap();
allocated.push(Allocation(self.1, ty, allocation)); allocated.push(Allocation(self.1, ty, allocation));
for (field_name, expression) in items { for (field_name, expression, _) in items {
allocated.extend(expression.allocate(scope)); allocated.extend(expression.allocate(scope));
let (_, ty) = expression.return_type(&Default::default(), scope.mod_id).unwrap(); let (_, ty) = expression.return_type(&Default::default(), scope.mod_id).unwrap();

View File

@ -3,8 +3,8 @@ use reid_lib::{builder::InstructionValue, CmpPredicate, ConstValueKind, Instr, T
use crate::{ use crate::{
codegen::{ErrorKind, StackValueKind}, codegen::{ErrorKind, StackValueKind},
mir::{ mir::{
BinaryOperator, BinopDefinition, CmpOperator, FunctionDefinition, FunctionDefinitionKind, FunctionParam, implement::TypeCategory, BinaryOperator, BinopDefinition, CmpOperator, FunctionDefinition,
TypeKind, FunctionDefinitionKind, FunctionParam, TypeKind,
}, },
}; };
@ -33,6 +33,36 @@ const FLOATS: [TypeKind; 7] = [
TypeKind::F128PPC, TypeKind::F128PPC,
]; ];
#[derive(Debug, Clone, Hash, Eq, PartialEq)]
pub enum LLVMIntrinsicKind {
Max(TypeKind),
Min(TypeKind),
Abs(TypeKind),
Memcpy(TypeKind),
Sqrt(TypeKind),
PowI(TypeKind, TypeKind),
Pow(TypeKind),
Sin(TypeKind),
Cos(TypeKind),
Tan(TypeKind),
ASin(TypeKind),
ACos(TypeKind),
ATan(TypeKind),
ATan2(TypeKind),
SinH(TypeKind),
CosH(TypeKind),
TanH(TypeKind),
Log(TypeKind),
Log2(TypeKind),
Log10(TypeKind),
Copysign(TypeKind),
Floor(TypeKind),
Ceil(TypeKind),
Trunc(TypeKind),
RoundEven(TypeKind),
Round(TypeKind),
}
const INTRINSIC_IDENT: &str = "reid.intrinsic"; const INTRINSIC_IDENT: &str = "reid.intrinsic";
const MALLOC_IDENT: &str = "malloc"; const MALLOC_IDENT: &str = "malloc";
@ -52,16 +82,38 @@ pub fn form_intrinsics() -> Vec<FunctionDefinition> {
}], }],
kind: FunctionDefinitionKind::Extern(false), kind: FunctionDefinitionKind::Extern(false),
source: None, source: None,
signature_meta: Default::default(),
}); });
intrinsics intrinsics
} }
pub fn get_intrinsic_assoc_func(ty: &TypeKind, name: &str) -> Option<FunctionDefinition> { pub fn simple_intrinsic<T: Into<String> + Clone>(
name: T,
params: Vec<T>,
ret: TypeKind,
intrisic: LLVMIntrinsicKind,
) -> FunctionDefinition {
FunctionDefinition {
name: name.into(),
linkage_name: None,
is_pub: true,
is_imported: false,
return_type: ret.clone(),
parameters: params
.iter()
.map(|p| FunctionParam::from(p.clone(), ret.clone()))
.collect(),
kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicLLVM(intrisic, ret.clone()))),
source: None,
signature_meta: Default::default(),
}
}
pub fn get_intrinsic_assoc_functions(ty: &TypeKind) -> Vec<FunctionDefinition> {
let mut intrinsics = Vec::new();
if let TypeKind::Array(_, len) = ty { if let TypeKind::Array(_, len) = ty {
match name { intrinsics.push(FunctionDefinition {
"length" => {
return Some(FunctionDefinition {
name: "length".to_owned(), name: "length".to_owned(),
linkage_name: None, linkage_name: None,
is_pub: true, is_pub: true,
@ -74,14 +126,202 @@ pub fn get_intrinsic_assoc_func(ty: &TypeKind, name: &str) -> Option<FunctionDef
}], }],
kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicConst(*len))), kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicConst(*len))),
source: None, source: None,
}) signature_meta: Default::default(),
});
}
if ty.category() == TypeCategory::Real {
intrinsics.push(simple_intrinsic(
"sqrt",
vec!["self"],
ty.clone(),
LLVMIntrinsicKind::Sqrt(ty.clone()),
));
intrinsics.push(simple_intrinsic(
"sin",
vec!["self"],
ty.clone(),
LLVMIntrinsicKind::Sin(ty.clone()),
));
intrinsics.push(simple_intrinsic(
"cos",
vec!["self"],
ty.clone(),
LLVMIntrinsicKind::Cos(ty.clone()),
));
intrinsics.push(simple_intrinsic(
"tan",
vec!["self"],
ty.clone(),
LLVMIntrinsicKind::Tan(ty.clone()),
));
intrinsics.push(simple_intrinsic(
"sinh",
vec!["self"],
ty.clone(),
LLVMIntrinsicKind::SinH(ty.clone()),
));
intrinsics.push(simple_intrinsic(
"cosh",
vec!["self"],
ty.clone(),
LLVMIntrinsicKind::CosH(ty.clone()),
));
intrinsics.push(simple_intrinsic(
"tanh",
vec!["self"],
ty.clone(),
LLVMIntrinsicKind::TanH(ty.clone()),
));
intrinsics.push(simple_intrinsic(
"asin",
vec!["self"],
ty.clone(),
LLVMIntrinsicKind::ASin(ty.clone()),
));
intrinsics.push(simple_intrinsic(
"acos",
vec!["self"],
ty.clone(),
LLVMIntrinsicKind::ACos(ty.clone()),
));
intrinsics.push(simple_intrinsic(
"atan",
vec!["self"],
ty.clone(),
LLVMIntrinsicKind::ATan(ty.clone()),
));
intrinsics.push(simple_intrinsic(
"atan2",
vec!["self", "other"],
ty.clone(),
LLVMIntrinsicKind::ATan2(ty.clone()),
));
intrinsics.push(simple_intrinsic(
"log",
vec!["self"],
ty.clone(),
LLVMIntrinsicKind::Log(ty.clone()),
));
intrinsics.push(simple_intrinsic(
"log2",
vec!["self"],
ty.clone(),
LLVMIntrinsicKind::Log2(ty.clone()),
));
intrinsics.push(simple_intrinsic(
"log10",
vec!["self"],
ty.clone(),
LLVMIntrinsicKind::Log10(ty.clone()),
));
intrinsics.push(simple_intrinsic(
"floor",
vec!["self"],
ty.clone(),
LLVMIntrinsicKind::Floor(ty.clone()),
));
intrinsics.push(simple_intrinsic(
"ceil",
vec!["self"],
ty.clone(),
LLVMIntrinsicKind::Ceil(ty.clone()),
));
intrinsics.push(simple_intrinsic(
"trunc",
vec!["self"],
ty.clone(),
LLVMIntrinsicKind::Trunc(ty.clone()),
));
intrinsics.push(simple_intrinsic(
"round",
vec!["self"],
ty.clone(),
LLVMIntrinsicKind::Round(ty.clone()),
));
intrinsics.push(simple_intrinsic(
"even",
vec!["self"],
ty.clone(),
LLVMIntrinsicKind::RoundEven(ty.clone()),
));
intrinsics.push(simple_intrinsic(
"pow",
vec!["self", "exponent"],
ty.clone(),
LLVMIntrinsicKind::Pow(ty.clone()),
));
intrinsics.push(FunctionDefinition {
name: "powi".to_owned(),
linkage_name: None,
is_pub: true,
is_imported: false,
return_type: ty.clone(),
parameters: vec![
FunctionParam {
name: String::from("self"),
ty: ty.clone(),
meta: Default::default(),
},
FunctionParam {
name: String::from("exponent"),
ty: TypeKind::U32,
meta: Default::default(),
},
],
kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicLLVM(
LLVMIntrinsicKind::PowI(ty.clone(), TypeKind::U32),
ty.clone(),
))),
source: None,
signature_meta: Default::default(),
});
}
match ty.category() {
TypeCategory::Integer | TypeCategory::Real | TypeCategory::Bool => {
intrinsics.push(simple_intrinsic(
"max",
vec!["self", "other"],
ty.clone(),
LLVMIntrinsicKind::Max(ty.clone()),
));
intrinsics.push(simple_intrinsic(
"min",
vec!["self", "other"],
ty.clone(),
LLVMIntrinsicKind::Min(ty.clone()),
));
if ty.signed() {
intrinsics.push(FunctionDefinition {
name: "abs".to_owned(),
linkage_name: None,
is_pub: true,
is_imported: false,
return_type: ty.clone(),
parameters: vec![FunctionParam {
name: String::from("self"),
ty: ty.clone(),
meta: Default::default(),
}],
kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicSimpleUnaryInstr({
let ty = ty.clone();
|scope, param| {
let intrinsic = scope.get_intrinsic(LLVMIntrinsicKind::Abs(ty));
let constant = scope.block.build(Instr::Constant(ConstValueKind::Bool(false))).unwrap();
let value = scope
.block
.build(Instr::FunctionCall(intrinsic, vec![param, constant]))
.unwrap();
value
}
}))),
source: None,
signature_meta: Default::default(),
});
}
} }
_ => {} _ => {}
} }
} intrinsics.push(FunctionDefinition {
match name {
"sizeof" => Some(FunctionDefinition {
name: "sizeof".to_owned(), name: "sizeof".to_owned(),
linkage_name: None, linkage_name: None,
is_pub: true, is_pub: true,
@ -90,8 +330,9 @@ pub fn get_intrinsic_assoc_func(ty: &TypeKind, name: &str) -> Option<FunctionDef
parameters: Vec::new(), parameters: Vec::new(),
kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicSizeOf(ty.clone()))), kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicSizeOf(ty.clone()))),
source: None, source: None,
}), signature_meta: Default::default(),
"malloc" => Some(FunctionDefinition { });
intrinsics.push(FunctionDefinition {
name: "malloc".to_owned(), name: "malloc".to_owned(),
linkage_name: None, linkage_name: None,
is_pub: true, is_pub: true,
@ -104,8 +345,38 @@ pub fn get_intrinsic_assoc_func(ty: &TypeKind, name: &str) -> Option<FunctionDef
}], }],
kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicMalloc(ty.clone()))), kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicMalloc(ty.clone()))),
source: None, source: None,
}), signature_meta: Default::default(),
"null" => Some(FunctionDefinition { });
intrinsics.push(FunctionDefinition {
name: "memcpy".to_owned(),
linkage_name: None,
is_pub: true,
is_imported: false,
return_type: TypeKind::Void,
parameters: vec![
FunctionParam {
name: String::from("destination"),
ty: TypeKind::UserPtr(Box::new(ty.clone())),
meta: Default::default(),
},
FunctionParam {
name: String::from("source"),
ty: TypeKind::UserPtr(Box::new(ty.clone())),
meta: Default::default(),
},
FunctionParam {
name: String::from("length"),
ty: TypeKind::U64,
meta: Default::default(),
},
],
kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicMemcpy(ty.clone()))),
source: None,
signature_meta: Default::default(),
});
intrinsics.push(FunctionDefinition {
name: "null".to_owned(), name: "null".to_owned(),
linkage_name: None, linkage_name: None,
is_pub: true, is_pub: true,
@ -114,9 +385,14 @@ pub fn get_intrinsic_assoc_func(ty: &TypeKind, name: &str) -> Option<FunctionDef
parameters: Vec::new(), parameters: Vec::new(),
kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicNullPtr(ty.clone()))), kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicNullPtr(ty.clone()))),
source: None, source: None,
}), signature_meta: Default::default(),
_ => None, });
}
intrinsics
}
pub fn get_intrinsic_assoc_func(ty: &TypeKind, name: &str) -> Option<FunctionDefinition> {
get_intrinsic_assoc_functions(ty).into_iter().find(|f| f.name == name)
} }
fn simple_binop_def<T: Clone + 'static>(op: BinaryOperator, ty: &TypeKind, fun: T) -> BinopDefinition fn simple_binop_def<T: Clone + 'static>(op: BinaryOperator, ty: &TypeKind, fun: T) -> BinopDefinition
@ -136,7 +412,7 @@ where
meta: Default::default(), meta: Default::default(),
}, },
return_type: ty.clone(), return_type: ty.clone(),
fn_kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicSimpleInstr(fun))), fn_kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicSimpleBinaryInstr(fun))),
meta: Default::default(), meta: Default::default(),
exported: false, exported: false,
} }
@ -159,7 +435,7 @@ where
meta: Default::default(), meta: Default::default(),
}, },
return_type: lhs.clone(), return_type: lhs.clone(),
fn_kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicSimpleInstr(fun))), fn_kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicSimpleBinaryInstr(fun))),
meta: Default::default(), meta: Default::default(),
exported: false, exported: false,
} }
@ -247,26 +523,17 @@ pub fn form_intrinsic_binops() -> Vec<BinopDefinition> {
scope.block.build(Instr::XOr(lhs, rhs)).unwrap() scope.block.build(Instr::XOr(lhs, rhs)).unwrap()
})); }));
if ty.signed() { if ty.signed() {
intrinsics.push(complex_binop_def( intrinsics.push(complex_binop_def(BitshiftRight, &ty, &ty, |scope, lhs, rhs| {
BitshiftRight, scope.block.build(Instr::ShiftRightArithmetic(lhs, rhs)).unwrap()
&ty, }));
&TypeKind::U64,
|scope, lhs, rhs| scope.block.build(Instr::ShiftRightArithmetic(lhs, rhs)).unwrap(),
));
} else { } else {
intrinsics.push(complex_binop_def( intrinsics.push(complex_binop_def(BitshiftRight, &ty, &ty, |scope, lhs, rhs| {
BitshiftRight, scope.block.build(Instr::ShiftRightLogical(lhs, rhs)).unwrap()
&ty, }));
&TypeKind::U64,
|scope, lhs, rhs| scope.block.build(Instr::ShiftRightLogical(lhs, rhs)).unwrap(),
));
} }
intrinsics.push(complex_binop_def( intrinsics.push(complex_binop_def(BitshiftLeft, &ty, &ty, |scope, lhs, rhs| {
BitshiftLeft, scope.block.build(Instr::ShiftLeft(lhs, rhs)).unwrap()
&ty, }));
&TypeKind::U64,
|scope, lhs, rhs| scope.block.build(Instr::ShiftLeft(lhs, rhs)).unwrap(),
));
} }
for ty in INTEGERS.iter().chain(&[TypeKind::Bool, TypeKind::Char]) { for ty in INTEGERS.iter().chain(&[TypeKind::Bool, TypeKind::Char]) {
intrinsics.push(boolean_binop_def(Cmp(CmpOperator::EQ), &ty, |scope, lhs, rhs| { intrinsics.push(boolean_binop_def(Cmp(CmpOperator::EQ), &ty, |scope, lhs, rhs| {
@ -345,12 +612,37 @@ macro_rules! intrinsic_debug {
} }
#[derive(Clone)] #[derive(Clone)]
pub struct IntrinsicSimpleInstr<T>(T) pub struct IntrinsicSimpleUnaryInstr<T>(T)
where
T: FnOnce(&mut Scope, InstructionValue) -> InstructionValue;
impl<T> std::fmt::Debug for IntrinsicSimpleUnaryInstr<T>
where
T: FnOnce(&mut Scope, InstructionValue) -> InstructionValue,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("IntrinsicSimpleUnaryInstr").finish()
}
}
impl<T: Clone> IntrinsicFunction for IntrinsicSimpleUnaryInstr<T>
where
T: FnOnce(&mut Scope, InstructionValue) -> InstructionValue,
{
fn codegen<'b, 'c>(&self, scope: &mut Scope<'b, 'c>, params: &[StackValue]) -> Result<StackValue, ErrorKind> {
let param = params.get(0).unwrap();
let instr = self.clone().0(scope, param.instr());
Ok(StackValue(StackValueKind::Literal(instr), param.1.clone()))
}
}
#[derive(Clone)]
pub struct IntrinsicSimpleBinaryInstr<T>(T)
where where
T: FnOnce(&mut Scope, InstructionValue, InstructionValue) -> InstructionValue; T: FnOnce(&mut Scope, InstructionValue, InstructionValue) -> InstructionValue;
intrinsic_debug!(IntrinsicSimpleInstr<T>, "IntrinsicSimpleInstr"); intrinsic_debug!(IntrinsicSimpleBinaryInstr<T>, "IntrinsicSimpleBinaryInstr");
impl<T: Clone> IntrinsicFunction for IntrinsicSimpleInstr<T> impl<T: Clone> IntrinsicFunction for IntrinsicSimpleBinaryInstr<T>
where where
T: FnOnce(&mut Scope, InstructionValue, InstructionValue) -> InstructionValue, T: FnOnce(&mut Scope, InstructionValue, InstructionValue) -> InstructionValue,
{ {
@ -386,12 +678,42 @@ impl IntrinsicFunction for IntrinsicSizeOf {
fn codegen<'ctx, 'a>(&self, scope: &mut Scope<'ctx, 'a>, _: &[StackValue]) -> Result<StackValue, ErrorKind> { fn codegen<'ctx, 'a>(&self, scope: &mut Scope<'ctx, 'a>, _: &[StackValue]) -> Result<StackValue, ErrorKind> {
let instr = scope let instr = scope
.block .block
.build(Instr::Constant(reid_lib::ConstValueKind::U64(self.0.size_of() / 8))) .build(Instr::Constant(reid_lib::ConstValueKind::U64(
self.0.size_of(&scope.type_map) / 8,
)))
.unwrap(); .unwrap();
Ok(StackValue(StackValueKind::Literal(instr), self.0.clone())) Ok(StackValue(StackValueKind::Literal(instr), self.0.clone()))
} }
} }
#[derive(Clone, Debug)]
pub struct IntrinsicMemcpy(TypeKind);
impl IntrinsicFunction for IntrinsicMemcpy {
fn codegen<'ctx, 'a>(&self, scope: &mut Scope<'ctx, 'a>, params: &[StackValue]) -> Result<StackValue, ErrorKind> {
let dest = params.get(0).unwrap();
let src = params.get(1).unwrap();
let length = params.get(2).unwrap();
let intrinsic = scope.get_intrinsic(LLVMIntrinsicKind::Memcpy(TypeKind::UserPtr(Box::new(self.0.clone()))));
let sizeof = scope
.block
.build(Instr::Constant(ConstValueKind::U64(
self.0.size_of(&scope.type_map) / 8,
)))
.unwrap();
let bytes = scope.block.build(Instr::Mul(sizeof, length.instr())).unwrap();
let params = vec![
dest.instr(),
src.instr(),
bytes,
scope.block.build(Instr::Constant(ConstValueKind::Bool(false))).unwrap(),
];
let value = scope.block.build(Instr::FunctionCall(intrinsic, params)).unwrap();
Ok(StackValue(StackValueKind::Literal(value), TypeKind::Void))
}
}
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct IntrinsicMalloc(TypeKind); pub struct IntrinsicMalloc(TypeKind);
impl IntrinsicFunction for IntrinsicMalloc { impl IntrinsicFunction for IntrinsicMalloc {
@ -404,7 +726,9 @@ impl IntrinsicFunction for IntrinsicMalloc {
let sizeof = scope let sizeof = scope
.block .block
.build(Instr::Constant(ConstValueKind::U64(self.0.size_of() / 8))) .build(Instr::Constant(ConstValueKind::U64(
self.0.size_of(&scope.type_map) / 8,
)))
.unwrap(); .unwrap();
let bytes = scope.block.build(Instr::Mul(sizeof, amount.instr())).unwrap(); let bytes = scope.block.build(Instr::Mul(sizeof, amount.instr())).unwrap();
let instr = scope.block.build(Instr::FunctionCall(function, vec![bytes])).unwrap(); let instr = scope.block.build(Instr::FunctionCall(function, vec![bytes])).unwrap();
@ -430,6 +754,7 @@ impl IntrinsicFunction for IntrinsicNullPtr {
)) ))
} }
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct IntrinsicConst(u64); pub struct IntrinsicConst(u64);
impl IntrinsicFunction for IntrinsicConst { impl IntrinsicFunction for IntrinsicConst {
@ -439,6 +764,23 @@ impl IntrinsicFunction for IntrinsicConst {
} }
} }
#[derive(Clone, Debug)]
pub struct IntrinsicLLVM(LLVMIntrinsicKind, TypeKind);
impl IntrinsicFunction for IntrinsicLLVM {
fn codegen<'ctx, 'a>(&self, scope: &mut Scope<'ctx, 'a>, params: &[StackValue]) -> Result<StackValue, ErrorKind> {
let intrinsic = scope.get_intrinsic(self.0.clone());
let value = scope
.block
.build(Instr::FunctionCall(
intrinsic,
params.iter().map(|p| p.instr()).collect(),
))
.unwrap();
Ok(StackValue(StackValueKind::Literal(value), self.1.clone()))
}
}
// impl IntrinsicFunction for IntrinsicIAdd { // impl IntrinsicFunction for IntrinsicIAdd {
// fn codegen<'ctx, 'a>( // fn codegen<'ctx, 'a>(
// &self, // &self,

View File

@ -1,4 +1,8 @@
use std::{cell::RefCell, collections::HashMap, rc::Rc}; use std::{
cell::RefCell,
collections::{HashMap, HashSet},
rc::Rc,
};
use allocator::{Allocator, AllocatorScope}; use allocator::{Allocator, AllocatorScope};
use intrinsics::*; use intrinsics::*;
@ -144,6 +148,7 @@ impl mir::Module {
let mut types = HashMap::new(); let mut types = HashMap::new();
let mut type_values = HashMap::new(); let mut type_values = HashMap::new();
let mut debug_types = HashMap::new(); let mut debug_types = HashMap::new();
let mut type_map = HashMap::new();
macro_rules! insert_debug { macro_rules! insert_debug {
($kind:expr) => { ($kind:expr) => {
@ -153,8 +158,7 @@ impl mir::Module {
&compile_unit, &compile_unit,
&debug, &debug,
&debug_types, &debug_types,
&type_values, &type_map,
&types,
self.module_id, self.module_id,
&self.tokens, &self.tokens,
&modules, &modules,
@ -180,17 +184,49 @@ impl mir::Module {
let mut typedefs = self.typedefs.clone(); let mut typedefs = self.typedefs.clone();
typedefs.sort_by(|a, b| b.source_module.cmp(&a.source_module)); typedefs.sort_by(|a, b| b.source_module.cmp(&a.source_module));
for typedef in typedefs { // Since we know by this point that no types are recursive, we can
// somewhat easily sort the type-definitions such that we can process
// the ones with no depencencies first, and later the ones that depend
// on the earlier ones.
let mut typekeys_seen = HashSet::new();
let mut typedefs_sorted = Vec::new();
let mut typedefs_left = typedefs.clone();
typedefs_left.reverse();
while let Some(typedef) = typedefs_left.pop() {
match &typedef.kind {
TypeDefinitionKind::Struct(StructType(fields)) => {
let mut is_ok = true;
for field in fields {
match &field.1 {
TypeKind::CustomType(type_key) => {
if !typekeys_seen.contains(type_key) {
is_ok = false;
break;
}
}
_ => {}
}
}
if is_ok {
typekeys_seen.insert(CustomTypeKey(typedef.name.clone(), typedef.source_module));
typedefs_sorted.push(typedef);
} else {
typedefs_left.insert(0, typedef.clone());
}
}
};
}
for typedef in typedefs_sorted {
let type_key = CustomTypeKey(typedef.name.clone(), typedef.source_module); let type_key = CustomTypeKey(typedef.name.clone(), typedef.source_module);
type_map.insert(type_key.clone(), typedef.clone());
let type_value = match &typedef.kind { let type_value = match &typedef.kind {
TypeDefinitionKind::Struct(StructType(fields)) => { TypeDefinitionKind::Struct(StructType(fields)) => {
module.custom_type(CustomTypeKind::NamedStruct(NamedStruct( module.custom_type(CustomTypeKind::NamedStruct(NamedStruct(
typedef.name.clone(), typedef.name.clone(),
fields fields
.iter() .iter()
// TODO: Reorder custom-type definitions such that
// inner types get evaluated first. Otherwise this
// will cause a panic!
.map(|StructField(_, t, _)| t.get_type(&type_values)) .map(|StructField(_, t, _)| t.get_type(&type_values))
.collect(), .collect(),
))) )))
@ -198,6 +234,7 @@ impl mir::Module {
}; };
types.insert(type_value, typedef.clone()); types.insert(type_value, typedef.clone());
type_values.insert(type_key.clone(), type_value); type_values.insert(type_key.clone(), type_value);
insert_debug!(&TypeKind::CustomType(type_key.clone())); insert_debug!(&TypeKind::CustomType(type_key.clone()));
} }
@ -380,6 +417,7 @@ impl mir::Module {
functions: &functions, functions: &functions,
types: &types, types: &types,
type_values: &type_values, type_values: &type_values,
type_map: &type_map,
globals: &globals, globals: &globals,
stack_values: HashMap::new(), stack_values: HashMap::new(),
debug: Some(Debug { debug: Some(Debug {
@ -389,6 +427,7 @@ impl mir::Module {
}), }),
binops: &binops, binops: &binops,
allocator: Rc::new(RefCell::new(allocator)), allocator: Rc::new(RefCell::new(allocator)),
llvm_intrinsics: Rc::new(RefCell::new(HashMap::new())),
}; };
binop binop
@ -457,6 +496,7 @@ impl mir::Module {
functions: &functions, functions: &functions,
types: &types, types: &types,
type_values: &type_values, type_values: &type_values,
type_map: &type_map,
stack_values: HashMap::new(), stack_values: HashMap::new(),
debug: Some(Debug { debug: Some(Debug {
info: &debug, info: &debug,
@ -466,6 +506,7 @@ impl mir::Module {
globals: &globals, globals: &globals,
binops: &binops, binops: &binops,
allocator: Rc::new(RefCell::new(allocator)), allocator: Rc::new(RefCell::new(allocator)),
llvm_intrinsics: Rc::new(RefCell::new(HashMap::new())),
}; };
mir_function mir_function
@ -518,6 +559,7 @@ impl mir::Module {
functions: &functions, functions: &functions,
types: &types, types: &types,
type_values: &type_values, type_values: &type_values,
type_map: &type_map,
stack_values: HashMap::new(), stack_values: HashMap::new(),
debug: Some(Debug { debug: Some(Debug {
info: &debug, info: &debug,
@ -527,6 +569,7 @@ impl mir::Module {
globals: &globals, globals: &globals,
binops: &binops, binops: &binops,
allocator: Rc::new(RefCell::new(allocator)), allocator: Rc::new(RefCell::new(allocator)),
llvm_intrinsics: Rc::new(RefCell::new(HashMap::new())),
}; };
mir_function mir_function
@ -728,7 +771,6 @@ impl mir::Statement {
mir::StmtKind::Let(NamedVariableRef(ty, name, meta), mutable, expression) => { mir::StmtKind::Let(NamedVariableRef(ty, name, meta), mutable, expression) => {
let value = expression.codegen(scope, &state)?.unwrap(); let value = expression.codegen(scope, &state)?.unwrap();
dbg!(&scope.allocator, &meta, &value.1);
let alloca = scope let alloca = scope
.allocate(meta, &value.1) .allocate(meta, &value.1)
.unwrap() .unwrap()
@ -1161,7 +1203,7 @@ impl mir::Expression {
TypeKind::Array(Box::new(elem_ty_kind), instr_list.len() as u64), TypeKind::Array(Box::new(elem_ty_kind), instr_list.len() as u64),
)) ))
} }
mir::ExprKind::Accessed(expression, type_kind, field) => { mir::ExprKind::Accessed(expression, type_kind, field, _) => {
let struct_val = expression.codegen(scope, &state.load(false))?.unwrap(); let struct_val = expression.codegen(scope, &state.load(false))?.unwrap();
let TypeKind::CodegenPtr(inner) = &struct_val.1 else { let TypeKind::CodegenPtr(inner) = &struct_val.1 else {
@ -1200,10 +1242,9 @@ impl mir::Expression {
)) ))
} }
} }
mir::ExprKind::Struct(name, items) => { mir::ExprKind::Struct(key, items) => {
let type_key = CustomTypeKey(name.clone(), scope.module_id);
let ty = Type::CustomType({ let ty = Type::CustomType({
let Some(a) = scope.type_values.get(&type_key) else { let Some(a) = scope.type_values.get(&key) else {
return Ok(None); return Ok(None);
}; };
*a *a
@ -1212,20 +1253,20 @@ impl mir::Expression {
let TypeDefinition { let TypeDefinition {
kind: TypeDefinitionKind::Struct(struct_ty), kind: TypeDefinitionKind::Struct(struct_ty),
.. ..
} = scope.types.get(scope.type_values.get(&type_key).unwrap()).unwrap(); } = scope.types.get(scope.type_values.get(&key).unwrap()).unwrap();
let indices = struct_ty.0.iter().enumerate(); let indices = struct_ty.0.iter().enumerate();
let load_n = format!("{}.load", name); let load_n = format!("{:?}.load", key);
let struct_ptr = scope let struct_ptr = scope
.allocate(&self.1, &TypeKind::CustomType(type_key.clone())) .allocate(&self.1, &TypeKind::CustomType(key.clone()))
.unwrap() .unwrap()
.maybe_location(&mut scope.block, location.clone()); .maybe_location(&mut scope.block, location.clone());
for (field_n, exp) in items { for (field_n, exp, _) in items {
let gep_n = format!("{}.{}.gep", name, field_n); let gep_n = format!("{:?}.{}.gep", key, field_n);
let store_n = format!("{}.{}.store", name, field_n); let store_n = format!("{:?}.{}.store", key, field_n);
let i = indices.clone().find(|(_, f)| f.0 == *field_n).unwrap().0; let i = indices.clone().find(|(_, f)| f.0 == *field_n).unwrap().0;
let elem_ptr = scope let elem_ptr = scope
@ -1246,7 +1287,7 @@ impl mir::Expression {
Some(StackValue( Some(StackValue(
StackValueKind::Literal(struct_val), StackValueKind::Literal(struct_val),
TypeKind::CustomType(type_key), TypeKind::CustomType(key.clone()),
)) ))
} }
mir::ExprKind::Borrow(expr, mutable) => { mir::ExprKind::Borrow(expr, mutable) => {
@ -1312,45 +1353,45 @@ impl mir::Expression {
if val.1 == *type_kind { if val.1 == *type_kind {
Some(val) Some(val)
} else { } else {
match (&val.1, type_kind) { let (ty, other) = if !state.should_load {
(TypeKind::CodegenPtr(inner), TypeKind::UserPtr(ty2)) => match *inner.clone() { let TypeKind::CodegenPtr(inner) = &val.1 else {
TypeKind::UserPtr(_) => Some(StackValue( panic!();
};
(*inner.clone(), TypeKind::CodegenPtr(Box::new(type_kind.clone())))
} else {
(val.1.clone(), type_kind.clone())
};
match (&ty, type_kind) {
(TypeKind::UserPtr(_), TypeKind::UserPtr(_)) => Some(StackValue(
val.0.derive( val.0.derive(
scope scope
.block .block
.build(Instr::BitCast( .build(Instr::BitCast(val.instr(), other.get_type(scope.type_values)))
val.instr(),
Type::Ptr(Box::new(type_kind.get_type(scope.type_values))),
))
.unwrap(), .unwrap(),
), ),
TypeKind::CodegenPtr(Box::new(type_kind.clone())), other.clone(),
)), )),
TypeKind::Borrow(ty1, _) => match *ty1.clone() { (TypeKind::Borrow(ty1, _), TypeKind::UserPtr(ty2)) => {
TypeKind::Array(ty1, _) => { if let TypeKind::Array(ty1, _) = ty1.as_ref() {
if ty1 == *ty2 { if ty1 == ty2 {
Some(StackValue( Some(StackValue(
val.0.derive( val.0.derive(
scope scope
.block .block
.build(Instr::BitCast( .build(Instr::BitCast(val.instr(), other.get_type(scope.type_values)))
val.instr(),
Type::Ptr(Box::new(type_kind.get_type(scope.type_values))),
))
.unwrap(), .unwrap(),
), ),
TypeKind::CodegenPtr(Box::new(type_kind.clone())), other,
)) ))
} else { } else {
return Err(ErrorKind::Null); return Err(ErrorKind::Null).unwrap();
}
} else {
return Err(ErrorKind::Null).unwrap();
} }
} }
_ => return Err(ErrorKind::Null), (TypeKind::Char, TypeKind::U8)
},
_ => panic!(),
},
(TypeKind::UserPtr(_), TypeKind::UserPtr(_))
| (TypeKind::Char, TypeKind::U8)
| (TypeKind::U8, TypeKind::Char) | (TypeKind::U8, TypeKind::Char)
| (TypeKind::U8, TypeKind::I8) => Some(StackValue( | (TypeKind::U8, TypeKind::I8) => Some(StackValue(
val.0.derive( val.0.derive(
@ -1362,8 +1403,7 @@ impl mir::Expression {
type_kind.clone(), type_kind.clone(),
)), )),
_ => { _ => {
let cast_instr = val let cast_instr = ty
.1
.get_type(scope.type_values) .get_type(scope.type_values)
.cast_instruction(val.instr(), &type_kind.get_type(scope.type_values)) .cast_instruction(val.instr(), &type_kind.get_type(scope.type_values))
.unwrap(); .unwrap();
@ -1379,11 +1419,30 @@ impl mir::Expression {
mir::ExprKind::AssociatedFunctionCall(ty, call) => codegen_function_call(Some(ty), call, scope, state)?, mir::ExprKind::AssociatedFunctionCall(ty, call) => codegen_function_call(Some(ty), call, scope, state)?,
mir::ExprKind::GlobalRef(global_name, ty) => { mir::ExprKind::GlobalRef(global_name, ty) => {
let global_value = scope.globals.get(global_name).unwrap(); let global_value = scope.globals.get(global_name).unwrap();
let value = scope.block.build(Instr::GetGlobal(global_value.clone())).unwrap();
if !state.should_load {
let allocated = scope
.block
.build(Instr::Alloca(ty.get_type(scope.type_values)))
.unwrap();
scope
.block
.build(Instr::Store(allocated, value))
.unwrap()
.maybe_location(&mut scope.block, location.clone());
let a = Some(StackValue( let a = Some(StackValue(
StackValueKind::Literal(scope.block.build(Instr::GetGlobal(global_value.clone())).unwrap()), StackValueKind::Literal(allocated),
ty.clone(), TypeKind::CodegenPtr(Box::new(ty.clone())),
)); ));
a a
} else {
let a = Some(StackValue(StackValueKind::Literal(value), ty.clone()));
a
}
} }
}; };
if let Some(value) = &value { if let Some(value) = &value {

View File

@ -1,12 +1,14 @@
use std::{cell::RefCell, collections::HashMap, mem, rc::Rc}; use std::{cell::RefCell, collections::HashMap, mem, rc::Rc};
use reid_lib::{ use reid_lib::{
builder::{GlobalValue, InstructionValue, TypeValue}, builder::{FunctionValue, GlobalValue, InstructionValue, TypeValue},
debug_information::{DebugInformation, DebugLocation, DebugScopeValue, DebugTypeValue}, debug_information::{DebugInformation, DebugLocation, DebugScopeValue, DebugTypeValue},
intrinsics::LLVMIntrinsic,
Block, Context, Function, Instr, Module, Block, Context, Function, Instr, Module,
}; };
use crate::{ use crate::{
codegen::intrinsics::LLVMIntrinsicKind,
lexer::FullToken, lexer::FullToken,
mir::{ mir::{
pass::{AssociatedFunctionKey, BinopKey}, pass::{AssociatedFunctionKey, BinopKey},
@ -26,6 +28,7 @@ pub struct Scope<'ctx, 'scope> {
pub(super) block: Block<'ctx>, pub(super) block: Block<'ctx>,
pub(super) types: &'scope HashMap<TypeValue, TypeDefinition>, pub(super) types: &'scope HashMap<TypeValue, TypeDefinition>,
pub(super) type_values: &'scope HashMap<CustomTypeKey, TypeValue>, pub(super) type_values: &'scope HashMap<CustomTypeKey, TypeValue>,
pub(super) type_map: &'scope HashMap<CustomTypeKey, TypeDefinition>,
pub(super) assoc_functions: &'scope HashMap<AssociatedFunctionKey, ScopeFunctionKind<'ctx>>, pub(super) assoc_functions: &'scope HashMap<AssociatedFunctionKey, ScopeFunctionKind<'ctx>>,
pub(super) functions: &'scope HashMap<String, ScopeFunctionKind<'ctx>>, pub(super) functions: &'scope HashMap<String, ScopeFunctionKind<'ctx>>,
pub(super) binops: &'scope HashMap<BinopKey, StackBinopDefinition<'ctx>>, pub(super) binops: &'scope HashMap<BinopKey, StackBinopDefinition<'ctx>>,
@ -33,6 +36,7 @@ pub struct Scope<'ctx, 'scope> {
pub(super) globals: &'scope HashMap<String, GlobalValue>, pub(super) globals: &'scope HashMap<String, GlobalValue>,
pub(super) debug: Option<Debug<'ctx>>, pub(super) debug: Option<Debug<'ctx>>,
pub(super) allocator: Rc<RefCell<Allocator>>, pub(super) allocator: Rc<RefCell<Allocator>>,
pub(super) llvm_intrinsics: Rc<RefCell<HashMap<LLVMIntrinsicKind, FunctionValue>>>,
} }
impl<'ctx, 'a> Scope<'ctx, 'a> { impl<'ctx, 'a> Scope<'ctx, 'a> {
@ -49,11 +53,13 @@ impl<'ctx, 'a> Scope<'ctx, 'a> {
functions: self.functions, functions: self.functions,
types: self.types, types: self.types,
type_values: self.type_values, type_values: self.type_values,
type_map: self.type_map,
stack_values: self.stack_values.clone(), stack_values: self.stack_values.clone(),
debug: self.debug.clone(), debug: self.debug.clone(),
allocator: self.allocator.clone(), allocator: self.allocator.clone(),
globals: self.globals, globals: self.globals,
binops: self.binops, binops: self.binops,
llvm_intrinsics: self.llvm_intrinsics.clone(),
} }
} }
@ -72,6 +78,49 @@ impl<'ctx, 'a> Scope<'ctx, 'a> {
pub fn allocate(&self, meta: &Metadata, ty: &TypeKind) -> Option<InstructionValue> { pub fn allocate(&self, meta: &Metadata, ty: &TypeKind) -> Option<InstructionValue> {
self.allocator.borrow_mut().allocate(meta, ty) self.allocator.borrow_mut().allocate(meta, ty)
} }
pub fn get_intrinsic(&self, kind: LLVMIntrinsicKind) -> FunctionValue {
let mut intrinsics = self.llvm_intrinsics.borrow_mut();
if let Some(fun) = intrinsics.get(&kind) {
*fun
} else {
let intrinsic = self
.module
.intrinsic(match &kind {
LLVMIntrinsicKind::Max(ty) => LLVMIntrinsic::Max(ty.get_type(self.type_values)),
LLVMIntrinsicKind::Min(ty) => LLVMIntrinsic::Min(ty.get_type(self.type_values)),
LLVMIntrinsicKind::Abs(ty) => LLVMIntrinsic::Abs(ty.get_type(self.type_values)),
LLVMIntrinsicKind::Memcpy(ty) => LLVMIntrinsic::Memcpy(ty.get_type(self.type_values)),
LLVMIntrinsicKind::Sqrt(ty) => LLVMIntrinsic::Sqrt(ty.get_type(self.type_values)),
LLVMIntrinsicKind::PowI(lhs, rhs) => {
LLVMIntrinsic::PowI(lhs.get_type(self.type_values), rhs.get_type(self.type_values))
}
LLVMIntrinsicKind::Pow(ty) => LLVMIntrinsic::Pow(ty.get_type(self.type_values)),
LLVMIntrinsicKind::Sin(ty) => LLVMIntrinsic::Sin(ty.get_type(self.type_values)),
LLVMIntrinsicKind::Cos(ty) => LLVMIntrinsic::Cos(ty.get_type(self.type_values)),
LLVMIntrinsicKind::Tan(ty) => LLVMIntrinsic::Tan(ty.get_type(self.type_values)),
LLVMIntrinsicKind::ASin(ty) => LLVMIntrinsic::ASin(ty.get_type(self.type_values)),
LLVMIntrinsicKind::ACos(ty) => LLVMIntrinsic::ACos(ty.get_type(self.type_values)),
LLVMIntrinsicKind::ATan(ty) => LLVMIntrinsic::ATan(ty.get_type(self.type_values)),
LLVMIntrinsicKind::ATan2(ty) => LLVMIntrinsic::ATan2(ty.get_type(self.type_values)),
LLVMIntrinsicKind::SinH(ty) => LLVMIntrinsic::SinH(ty.get_type(self.type_values)),
LLVMIntrinsicKind::CosH(ty) => LLVMIntrinsic::CosH(ty.get_type(self.type_values)),
LLVMIntrinsicKind::TanH(ty) => LLVMIntrinsic::TanH(ty.get_type(self.type_values)),
LLVMIntrinsicKind::Log(ty) => LLVMIntrinsic::Log(ty.get_type(self.type_values)),
LLVMIntrinsicKind::Log2(ty) => LLVMIntrinsic::Log2(ty.get_type(self.type_values)),
LLVMIntrinsicKind::Log10(ty) => LLVMIntrinsic::Log10(ty.get_type(self.type_values)),
LLVMIntrinsicKind::Copysign(ty) => LLVMIntrinsic::Copysign(ty.get_type(self.type_values)),
LLVMIntrinsicKind::Floor(ty) => LLVMIntrinsic::Floor(ty.get_type(self.type_values)),
LLVMIntrinsicKind::Ceil(ty) => LLVMIntrinsic::Ceil(ty.get_type(self.type_values)),
LLVMIntrinsicKind::Trunc(ty) => LLVMIntrinsic::Trunc(ty.get_type(self.type_values)),
LLVMIntrinsicKind::RoundEven(ty) => LLVMIntrinsic::RoundEven(ty.get_type(self.type_values)),
LLVMIntrinsicKind::Round(ty) => LLVMIntrinsic::Round(ty.get_type(self.type_values)),
})
.unwrap();
intrinsics.insert(kind, intrinsic.clone());
intrinsic
}
}
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]

View File

@ -109,8 +109,7 @@ impl TypeKind {
&debug.scope, &debug.scope,
debug.info, debug.info,
debug.types, debug.types,
scope.type_values, scope.type_map,
scope.types,
scope.module_id, scope.module_id,
scope.tokens, scope.tokens,
scope.modules, scope.modules,
@ -122,8 +121,7 @@ impl TypeKind {
scope: &DebugScopeValue, scope: &DebugScopeValue,
debug_info: &DebugInformation, debug_info: &DebugInformation,
debug_types: &HashMap<TypeKind, DebugTypeValue>, debug_types: &HashMap<TypeKind, DebugTypeValue>,
type_values: &HashMap<CustomTypeKey, TypeValue>, type_map: &HashMap<CustomTypeKey, TypeDefinition>,
types: &HashMap<TypeValue, TypeDefinition>,
local_mod: SourceModuleId, local_mod: SourceModuleId,
tokens: &Vec<FullToken>, tokens: &Vec<FullToken>,
modules: &HashMap<SourceModuleId, ModuleCodegen>, modules: &HashMap<SourceModuleId, ModuleCodegen>,
@ -142,13 +140,12 @@ impl TypeKind {
scope, scope,
debug_info, debug_info,
debug_types, debug_types,
type_values, type_map,
types,
local_mod, local_mod,
tokens, tokens,
modules, modules,
), ),
size_bits: self.size_of(), size_bits: self.size_of(type_map),
}) })
} }
TypeKind::Array(elem_ty, len) => { TypeKind::Array(elem_ty, len) => {
@ -156,21 +153,20 @@ impl TypeKind {
scope, scope,
debug_info, debug_info,
debug_types, debug_types,
type_values, type_map,
types,
local_mod, local_mod,
tokens, tokens,
modules, modules,
); );
DebugTypeData::Array(DebugArrayType { DebugTypeData::Array(DebugArrayType {
size_bits: self.size_of(), size_bits: self.size_of(type_map),
align_bits: self.alignment(), align_bits: self.alignment(),
element_type: elem_ty, element_type: elem_ty,
length: *len, length: *len,
}) })
} }
TypeKind::CustomType(key) => { TypeKind::CustomType(key) => {
let typedef = types.get(type_values.get(key).unwrap()).unwrap(); let typedef = type_map.get(key).unwrap();
match &typedef.kind { match &typedef.kind {
TypeDefinitionKind::Struct(struct_type) => { TypeDefinitionKind::Struct(struct_type) => {
let mut fields = Vec::new(); let mut fields = Vec::new();
@ -186,21 +182,20 @@ impl TypeKind {
name: field.0.clone(), name: field.0.clone(),
scope: scope.clone(), scope: scope.clone(),
pos: location.map(|l| l.pos), pos: location.map(|l| l.pos),
size_bits: field.1.size_of(), size_bits: field.1.size_of(type_map),
offset: size_bits, offset: size_bits,
flags: DwarfFlags, flags: DwarfFlags,
ty: field.1.get_debug_type_hard( ty: field.1.get_debug_type_hard(
scope, scope,
debug_info, debug_info,
debug_types, debug_types,
type_values, type_map,
types,
local_mod, local_mod,
tokens, tokens,
modules, modules,
), ),
}); });
size_bits += field.1.size_of(); size_bits += field.1.size_of(type_map);
} }
{ {
let location = if typedef.source_module != local_mod { let location = if typedef.source_module != local_mod {
@ -222,7 +217,7 @@ impl TypeKind {
} }
_ => DebugTypeData::Basic(DebugBasicType { _ => DebugTypeData::Basic(DebugBasicType {
name, name,
size_bits: self.size_of(), size_bits: self.size_of(type_map),
encoding: match self { encoding: match self {
TypeKind::Bool => DwarfEncoding::Boolean, TypeKind::Bool => DwarfEncoding::Boolean,
TypeKind::I8 => DwarfEncoding::SignedChar, TypeKind::I8 => DwarfEncoding::SignedChar,

View File

@ -1,6 +1,7 @@
use std::{ use std::{
collections::HashMap, collections::HashMap,
fmt::{Debug, Write}, fmt::{Debug, Write},
path::PathBuf,
}; };
use crate::{ use crate::{
@ -50,7 +51,7 @@ impl ErrorKind {
} }
impl ErrorKind { impl ErrorKind {
fn get_meta(&self) -> Metadata { pub fn get_meta(&self) -> Metadata {
match &self { match &self {
ErrorKind::LexerError(error) => error.metadata, ErrorKind::LexerError(error) => error.metadata,
ErrorKind::ParserError(error) => error.metadata, ErrorKind::ParserError(error) => error.metadata,
@ -63,6 +64,18 @@ impl ErrorKind {
ErrorKind::MacroError(error) => error.metadata, ErrorKind::MacroError(error) => error.metadata,
} }
} }
pub fn get_type_str(&self) -> &str {
match self {
ErrorKind::LexerError(_) => "lexer",
ErrorKind::ParserError(_) => "parser",
ErrorKind::TypeCheckError(_) => "typechecker",
ErrorKind::TypeInferenceError(_) => "type-inferrer",
ErrorKind::LinkerError(_) => "linker",
ErrorKind::MacroError(_) => "macro-pass",
ErrorKind::CodegenError(_) => "codegen",
}
}
} }
impl PartialOrd for ErrorKind { impl PartialOrd for ErrorKind {
@ -83,12 +96,36 @@ pub struct ErrorModule {
#[derive(Debug, Clone, PartialEq, Eq, Default)] #[derive(Debug, Clone, PartialEq, Eq, Default)]
pub struct ErrorModules { pub struct ErrorModules {
pub(super) module_map: HashMap<mir::SourceModuleId, ErrorModule>, pub(super) module_map: HashMap<mir::SourceModuleId, ErrorModule>,
pub(super) source_id_map: HashMap<PathBuf, mir::SourceModuleId>,
module_counter: mir::SourceModuleId, module_counter: mir::SourceModuleId,
} }
impl ErrorModules { impl ErrorModules {
pub fn add_module<T: Into<String>>(&mut self, name: T) -> Option<mir::SourceModuleId> { pub fn add_module<T: Into<String>>(
let id = self.module_counter.increment(); &mut self,
name: T,
path: Option<PathBuf>,
external_module_id: Option<SourceModuleId>,
) -> Option<mir::SourceModuleId> {
let module_id = path.as_ref().and_then(|p| self.source_id_map.get(p));
if let Some(module_id) = module_id {
Some(*module_id)
} else {
let id = if let Some(module_id) = external_module_id {
self.module_counter = SourceModuleId(module_id.0.max(self.module_counter.0));
if let Some(_) = self.module_map.get(&module_id) {
panic!("Can not use external module id: Module already exists!")
}
module_id
} else {
self.module_counter.increment()
};
if let Some(path) = path {
self.source_id_map.insert(path, id);
}
self.module_map.insert( self.module_map.insert(
id, id,
ErrorModule { ErrorModule {
@ -99,6 +136,7 @@ impl ErrorModules {
); );
Some(id) Some(id)
} }
}
pub fn set_tokens(&mut self, id: mir::SourceModuleId, tokens: Vec<FullToken>) { pub fn set_tokens(&mut self, id: mir::SourceModuleId, tokens: Vec<FullToken>) {
if let Some(module) = self.module_map.get_mut(&id) { if let Some(module) = self.module_map.get_mut(&id) {
@ -120,7 +158,7 @@ impl ErrorModules {
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
pub struct ReidError { pub struct ReidError {
map: ErrorModules, map: ErrorModules,
errors: Vec<ErrorKind>, pub errors: Vec<ErrorKind>,
} }
impl ReidError { impl ReidError {
@ -169,6 +207,10 @@ impl ReidError {
pub fn from_kind(errors: Vec<ErrorKind>, map: ErrorModules) -> ReidError { pub fn from_kind(errors: Vec<ErrorKind>, map: ErrorModules) -> ReidError {
ReidError { map, errors } ReidError { map, errors }
} }
pub fn extend(&mut self, other: ReidError) {
self.errors.extend(other.errors);
}
} }
impl std::error::Error for ReidError {} impl std::error::Error for ReidError {}
@ -185,9 +227,7 @@ impl std::fmt::Display for ReidError {
let module = self.map.module(&meta.source_module_id); let module = self.map.module(&meta.source_module_id);
let position = if let Some(module) = module { let position = if let Some(module) = module {
if let Some(tokens) = &module.tokens { if let Some(tokens) = &module.tokens {
let range_tokens = meta.range.into_tokens(&tokens); meta.range.into_position(tokens).or(meta.position.map(|p| (p, p)))
get_position(&range_tokens).or(meta.position.map(|p| (p, p)))
} else if let Some(position) = meta.position { } else if let Some(position) = meta.position {
Some((position, position)) Some((position, position))
} else { } else {
@ -237,6 +277,11 @@ impl TokenRange {
.take(self.end + 1 - self.start) .take(self.end + 1 - self.start)
.collect::<Vec<_>>() .collect::<Vec<_>>()
} }
pub fn into_position<'v>(&self, tokens: &'v Vec<FullToken>) -> Option<(Position, Position)> {
let tokens = self.into_tokens(tokens);
get_position(&tokens)
}
} }
fn get_position(tokens: &Vec<&FullToken>) -> Option<(Position, Position)> { fn get_position(tokens: &Vec<&FullToken>) -> Option<(Position, Position)> {

View File

@ -26,12 +26,11 @@ impl LDRunner {
let dyn_linker_path = find_objectfile(&self.dynamic_linker); let dyn_linker_path = find_objectfile(&self.dynamic_linker);
let crt1_path = find_objectfile("crt1.o"); let crt1_path = find_objectfile("crt1.o");
#[cfg(feature = "log_output")]
println!("LDRunner: Using dynamic linker at: {:?}", dyn_linker_path); println!("LDRunner: Using dynamic linker at: {:?}", dyn_linker_path);
let mut ld = Command::new(&self.command); let mut ld = Command::new(&self.command);
ld.arg("-dynamic-linker") ld.arg("-dynamic-linker").arg(dyn_linker_path).arg(crt1_path);
.arg(dyn_linker_path)
.arg(crt1_path);
for library in &self.libraries { for library in &self.libraries {
ld.arg(format!("-l{}", library)); ld.arg(format!("-l{}", library));
@ -41,22 +40,21 @@ impl LDRunner {
.arg("-o") .arg("-o")
.arg(out_path.to_str().unwrap()); .arg(out_path.to_str().unwrap());
#[cfg(feature = "log_output")]
println!( println!(
"LDRunner: Executing linker to objfile at {:?} => {:?}", "LDRunner: Executing linker to objfile at {:?} => {:?}",
input_path, out_path input_path, out_path
); );
#[cfg(feature = "log_output")]
dbg!(&ld); dbg!(&ld);
ld.spawn().expect("Unable to execute ld!"); ld.spawn().expect("Unable to execute ld!");
thread::sleep(Duration::from_millis(100)); thread::sleep(Duration::from_millis(100));
#[cfg(feature = "log_output")]
println!("Setting executable bit to {:?}..", out_path); println!("Setting executable bit to {:?}..", out_path);
Command::new("chmod") Command::new("chmod").arg("+x").arg(out_path).spawn().unwrap();
.arg("+x")
.arg(out_path)
.spawn()
.unwrap();
thread::sleep(Duration::from_millis(100)); thread::sleep(Duration::from_millis(100));
} }
} }

View File

@ -8,22 +8,36 @@
//! Much of the syntax in Reid is directly inspired by rust, but mostly it is //! Much of the syntax in Reid is directly inspired by rust, but mostly it is
//! driven by simplicity. //! driven by simplicity.
//! //!
//! Specifications and a bunch of [documentation for the language can be found
//! here](./documentation/).
//!
//! An example of a real whole program (a CPU pathtracer) can be found [in
//! examples/cpu_raytracer.reid](./examples/cpu_raytracer.reid), go have a look!
//!
//! Reid is currently able to (non-exhaustively): //! Reid is currently able to (non-exhaustively):
//! - Do basic algebra (e.g. Add, Sub, Mult) //! - Do basic algebra binary and unary-operations (e.g. Add, Sub, Div, Mult,
//! And, Not)
//! - Resolve complex one-liners correctly using PEDMAS (e.g. `5 + 2 * 5 - 5 * //! - Resolve complex one-liners correctly using PEDMAS (e.g. `5 + 2 * 5 - 5 *
//! 5` is calculated correctly) //! 5` is calculated correctly)
//! - Handle borrows/derefs, pointers.
//! - Declare and call functions with varying parameters and return types //! - Declare and call functions with varying parameters and return types
//! - Perform type-checking and type-inference such that return-types and //! - Perform type-checking and type-inference such that return-types and
//! parameter types must always match. //! parameter types must always match.
//! - Do simple logic-operations (e.g. If/And/Or) //! - Do simple logic-operations (e.g. If/And/Or)
//! - Handle, access, define and initialize structs and arrays.
//! - Define and execute For/While loops
//! - Output detailed debug information
//! - Define extern functions that can be linked to outside modules such as
//! `libc`.
//! - Define custom binary operations for any two types that hasn't been defined
//! previously (such as `u16 + u32`).
//! //!
//! An example program of Reid, that calculates the 5th fibonacci number (and //!
//! uses Rust for highlighting) is: //! An example program of Reid, that calculates the 5th fibonacci number:
//! ```reid //! ```reid
//! fn main() -> u16 { //! fn main() -> u16 {
//! return fibonacci(5); //! return fibonacci(5);
//! } //! }
//!
//! fn fibonacci(n: u16) -> u16 { //! fn fibonacci(n: u16) -> u16 {
//! if n <= 2 { //! if n <= 2 {
//! return 1; //! return 1;
@ -32,16 +46,13 @@
//! } //! }
//! ``` //! ```
//! //!
//! Currently missing relevant features (TODOs) are: //! TODOs still (see README.md for more)
//! - ~~Arrays~~ (DONE) //! - Error handling
//! - Structs (and custom types as such) //! - Lexing & parsing of whitespace and comments as well
//! - ~~Extern functions~~ (DONE) //! - LSP implementation
//! - ~~Strings~~ (DONE)
//! - Loops
//! - Debug Symbols
//! ``` //! ```
use std::{collections::HashMap, path::PathBuf, thread, time::Duration}; use std::{collections::HashMap, path::PathBuf};
use ast::{ use ast::{
lexer::{self, FullToken, Token}, lexer::{self, FullToken, Token},
@ -58,11 +69,14 @@ use reid_lib::{compile::CompileOutput, Context};
use crate::{ use crate::{
ast::TopLevelStatement, ast::TopLevelStatement,
mir::macros::{form_macros, MacroModule, MacroPass}, mir::{
macros::{form_macros, MacroModule, MacroPass},
SourceModuleId,
},
}; };
mod ast; pub mod ast;
mod codegen; pub mod codegen;
pub mod error_raporting; pub mod error_raporting;
pub mod ld; pub mod ld;
pub mod mir; pub mod mir;
@ -72,9 +86,11 @@ mod util;
pub fn parse_module<'map, T: Into<String>>( pub fn parse_module<'map, T: Into<String>>(
source: &str, source: &str,
name: T, name: T,
path: Option<PathBuf>,
map: &'map mut ErrorModules, map: &'map mut ErrorModules,
module_id: Option<SourceModuleId>,
) -> Result<(mir::SourceModuleId, Vec<FullToken>), ReidError> { ) -> Result<(mir::SourceModuleId, Vec<FullToken>), ReidError> {
let id = map.add_module(name.into()).unwrap(); let id = map.add_module(name.into(), path, module_id).unwrap();
map.set_source(id, source.to_owned()); map.set_source(id, source.to_owned());
let tokens = ReidError::from_lexer(lexer::tokenize(source), map.clone(), id)?; let tokens = ReidError::from_lexer(lexer::tokenize(source), map.clone(), id)?;
@ -82,6 +98,7 @@ pub fn parse_module<'map, T: Into<String>>(
map.set_tokens(id, tokens.clone()); map.set_tokens(id, tokens.clone());
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{:#?}", &tokens); println!("{:#?}", &tokens);
Ok((id, tokens)) Ok((id, tokens))
@ -93,7 +110,7 @@ pub fn compile_module<'map>(
map: &'map mut ErrorModules, map: &'map mut ErrorModules,
path: Option<PathBuf>, path: Option<PathBuf>,
is_main: bool, is_main: bool,
) -> Result<mir::Module, ReidError> { ) -> Result<Result<mir::Module, (ast::Module, ReidError)>, ReidError> {
let module = map.module(&module_id).cloned().unwrap(); let module = map.module(&module_id).cloned().unwrap();
let mut token_stream = TokenStream::from(&tokens); let mut token_stream = TokenStream::from(&tokens);
@ -105,6 +122,8 @@ pub fn compile_module<'map>(
statements.push(statement); statements.push(statement);
} }
let errors = token_stream.errors();
drop(token_stream); drop(token_stream);
let ast_module = ast::Module { let ast_module = ast::Module {
@ -115,10 +134,34 @@ pub fn compile_module<'map>(
is_main, is_main,
}; };
if errors.len() > 0 {
// dbg!(&ast_module);
return Ok(Err((
ast_module,
ReidError::from_kind(
errors
.into_iter()
.map(|e| {
error_raporting::ErrorKind::from(mir::pass::Error {
metadata: mir::Metadata {
source_module_id: module_id,
range: *e.get_range().unwrap_or(&Default::default()),
position: None,
},
kind: e,
})
})
.collect(),
map.clone(),
),
)));
}
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
dbg!(&ast_module); dbg!(&ast_module);
Ok(ast_module.process(module_id)) Ok(Ok(ast_module.process(module_id)))
} }
pub fn perform_all_passes<'map>( pub fn perform_all_passes<'map>(
@ -126,9 +169,11 @@ pub fn perform_all_passes<'map>(
module_map: &'map mut ErrorModules, module_map: &'map mut ErrorModules,
) -> Result<(), ReidError> { ) -> Result<(), ReidError> {
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
dbg!(&context); dbg!(&context);
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{:#}", &context); println!("{:#}", &context);
let state = context.pass(&mut LinkerPass { let state = context.pass(&mut LinkerPass {
@ -143,10 +188,13 @@ pub fn perform_all_passes<'map>(
} }
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{:-^100}", "LINKER OUTPUT"); println!("{:-^100}", "LINKER OUTPUT");
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{:#}", &context); println!("{:#}", &context);
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
dbg!(&state); dbg!(&state);
if !state.errors.is_empty() { if !state.errors.is_empty() {
@ -168,10 +216,13 @@ pub fn perform_all_passes<'map>(
let state = context.pass(&mut macro_pass)?; let state = context.pass(&mut macro_pass)?;
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{:-^100}", "MACRO OUTPUT"); println!("{:-^100}", "MACRO OUTPUT");
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{:#}", &context); println!("{:#}", &context);
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
dbg!(&state); dbg!(&state);
if !state.errors.is_empty() { if !state.errors.is_empty() {
@ -206,12 +257,16 @@ pub fn perform_all_passes<'map>(
let state = context.pass(&mut TypeInference { refs: &mut refs })?; let state = context.pass(&mut TypeInference { refs: &mut refs })?;
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{:-^100}", "TYPE INFERRER OUTPUT"); println!("{:-^100}", "TYPE INFERRER OUTPUT");
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{}", &refs); println!("{}", &refs);
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{:#}", &context); println!("{:#}", &context);
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
dbg!(&state); dbg!(&state);
if !state.errors.is_empty() { if !state.errors.is_empty() {
@ -228,10 +283,13 @@ pub fn perform_all_passes<'map>(
let state = context.pass(&mut TypeCheck { refs: &refs })?; let state = context.pass(&mut TypeCheck { refs: &refs })?;
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{:-^100}", "TYPECHECKER OUTPUT"); println!("{:-^100}", "TYPECHECKER OUTPUT");
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{:#}", &context); println!("{:#}", &context);
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
dbg!(&state); dbg!(&state);
if !state.errors.is_empty() { if !state.errors.is_empty() {
@ -245,6 +303,9 @@ pub fn perform_all_passes<'map>(
)); ));
} }
#[cfg(feature = "context_debug")]
dbg!(&context);
Ok(()) Ok(())
} }
@ -261,16 +322,18 @@ pub fn compile_and_pass<'map>(
let path = path.canonicalize().unwrap(); let path = path.canonicalize().unwrap();
let name = path.file_name().unwrap().to_str().unwrap().to_owned(); let name = path.file_name().unwrap().to_str().unwrap().to_owned();
let (id, tokens) = parse_module(source, name, module_map)?; let (id, tokens) = parse_module(source, name, Some(path.clone()), module_map, None)?;
let module = compile_module(id, tokens, module_map, Some(path.clone()), true)?; let module = compile_module(id, tokens, module_map, Some(path.clone()), true)?.map_err(|(_, e)| e)?;
let mut mir_context = mir::Context::from(vec![module], path.parent().unwrap().to_owned()); let mut mir_context = mir::Context::from(vec![module], path.parent().unwrap().to_owned());
perform_all_passes(&mut mir_context, module_map)?; perform_all_passes(&mut mir_context, module_map)?;
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{:-^100}", "FINAL OUTPUT"); println!("{:-^100}", "FINAL OUTPUT");
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{:#}", &mir_context); println!("{:#}", &mir_context);
let mut context = Context::new(format!("Reid ({})", env!("CARGO_PKG_VERSION"))); let mut context = Context::new(format!("Reid ({})", env!("CARGO_PKG_VERSION")));
@ -280,6 +343,7 @@ pub fn compile_and_pass<'map>(
}; };
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
#[cfg(feature = "log_output")]
println!("{}", &codegen_modules.context); println!("{}", &codegen_modules.context);
let compiled = codegen_modules.compile(cpu, features); let compiled = codegen_modules.compile(cpu, features);

View File

@ -84,7 +84,11 @@ impl Display for GlobalKind {
impl Display for Import { impl Display for Import {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "import {}", self.0.join("::")) write!(
f,
"import {}",
self.0.iter().map(|(s, _)| s.clone()).collect::<Vec<_>>().join("::")
)
} }
} }
@ -272,16 +276,16 @@ impl Display for ExprKind {
} }
f.write_char(']') f.write_char(']')
} }
ExprKind::Struct(name, items) => { ExprKind::Struct(key, items) => {
write!(f, "{} ", name)?; write!(f, "{:?} ", key)?;
f.write_char('{')?; f.write_char('{')?;
let mut state = Default::default(); let mut state = Default::default();
let mut inner_f = PadAdapter::wrap(f, &mut state); let mut inner_f = PadAdapter::wrap(f, &mut state);
let mut iter = items.iter(); let mut iter = items.iter();
if let Some((name, expr)) = iter.next() { if let Some((name, expr, _)) = iter.next() {
write!(inner_f, "\n{}: {}", name, expr)?; write!(inner_f, "\n{}: {}", name, expr)?;
while let Some((name, expr)) = iter.next() { while let Some((name, expr, _)) = iter.next() {
writeln!(inner_f, ",")?; writeln!(inner_f, ",")?;
write!(inner_f, "{}: {}", name, expr)?; write!(inner_f, "{}: {}", name, expr)?;
} }
@ -289,7 +293,7 @@ impl Display for ExprKind {
} }
f.write_char('}') f.write_char('}')
} }
ExprKind::Accessed(expression, type_kind, name) => { ExprKind::Accessed(expression, type_kind, name, _) => {
Display::fmt(&expression, f)?; Display::fmt(&expression, f)?;
write_access(f, name)?; write_access(f, name)?;
write!(f, "<{}>", type_kind) write!(f, "<{}>", type_kind)

View File

@ -1,3 +1,5 @@
use reid_lib::builder::TypeValue;
use crate::util::maybe; use crate::util::maybe;
use super::{typecheck::typerefs::TypeRefs, *}; use super::{typecheck::typerefs::TypeRefs, *};
@ -57,7 +59,7 @@ impl TypeKind {
} }
} }
pub fn size_of(&self) -> u64 { pub fn size_of(&self, map: &HashMap<CustomTypeKey, TypeDefinition>) -> u64 {
match self { match self {
TypeKind::Bool => 1, TypeKind::Bool => 1,
TypeKind::I8 => 8, TypeKind::I8 => 8,
@ -72,8 +74,16 @@ impl TypeKind {
TypeKind::U128 => 128, TypeKind::U128 => 128,
TypeKind::Void => 0, TypeKind::Void => 0,
TypeKind::Char => 8, TypeKind::Char => 8,
TypeKind::Array(type_kind, len) => type_kind.size_of() * (*len as u64), TypeKind::Array(type_kind, len) => type_kind.size_of(map) * (*len as u64),
TypeKind::CustomType(..) => 32, TypeKind::CustomType(key) => match &map.get(key).unwrap().kind {
TypeDefinitionKind::Struct(struct_type) => {
let mut size = 0;
for field in &struct_type.0 {
size += field.1.size_of(map)
}
size
}
},
TypeKind::CodegenPtr(_) => 64, TypeKind::CodegenPtr(_) => 64,
TypeKind::Vague(_) => panic!("Tried to sizeof a vague type!"), TypeKind::Vague(_) => panic!("Tried to sizeof a vague type!"),
TypeKind::Borrow(..) => 64, TypeKind::Borrow(..) => 64,
@ -405,11 +415,8 @@ impl Expression {
TypeKind::Array(Box::new(first.1), expressions.len() as u64), TypeKind::Array(Box::new(first.1), expressions.len() as u64),
)) ))
} }
Accessed(_, type_kind, _) => Ok((ReturnKind::Soft, type_kind.clone())), Accessed(_, type_kind, ..) => Ok((ReturnKind::Soft, type_kind.clone())),
Struct(name, _) => Ok(( Struct(key, _) => Ok((ReturnKind::Soft, TypeKind::CustomType(key.clone()))),
ReturnKind::Soft,
TypeKind::CustomType(CustomTypeKey(name.clone(), mod_id)),
)),
Borrow(expr, mutable) => { Borrow(expr, mutable) => {
let ret_type = expr.return_type(refs, mod_id)?; let ret_type = expr.return_type(refs, mod_id)?;
Ok((ret_type.0, TypeKind::Borrow(Box::new(ret_type.1), *mutable))) Ok((ret_type.0, TypeKind::Borrow(Box::new(ret_type.1), *mutable)))
@ -437,7 +444,7 @@ impl Expression {
match &self.0 { match &self.0 {
ExprKind::Variable(var_ref) => Some(var_ref), ExprKind::Variable(var_ref) => Some(var_ref),
ExprKind::Indexed(lhs, _, _) => lhs.backing_var(), ExprKind::Indexed(lhs, _, _) => lhs.backing_var(),
ExprKind::Accessed(lhs, _, _) => lhs.backing_var(), ExprKind::Accessed(lhs, ..) => lhs.backing_var(),
ExprKind::Borrow(expr, _) => expr.backing_var(), ExprKind::Borrow(expr, _) => expr.backing_var(),
ExprKind::Deref(expr) => expr.backing_var(), ExprKind::Deref(expr) => expr.backing_var(),
ExprKind::Block(block) => block.backing_var(), ExprKind::Block(block) => block.backing_var(),

View File

@ -12,7 +12,7 @@ use crate::{
error_raporting::{ErrorModules, ReidError}, error_raporting::{ErrorModules, ReidError},
mir::{ mir::{
pass::BinopKey, BinopDefinition, CustomTypeKey, FunctionDefinitionKind, FunctionParam, SourceModuleId, pass::BinopKey, BinopDefinition, CustomTypeKey, FunctionDefinitionKind, FunctionParam, SourceModuleId,
TypeDefinition, TypeKind, StructType, TypeDefinition, TypeDefinitionKind, TypeKind,
}, },
parse_module, parse_module,
}; };
@ -51,8 +51,8 @@ pub enum ErrorKind {
} }
pub fn compile_std(module_map: &mut ErrorModules) -> Result<Module, ReidError> { pub fn compile_std(module_map: &mut ErrorModules) -> Result<Module, ReidError> {
let (id, tokens) = parse_module(STD_SOURCE, STD_NAME, module_map)?; let (id, tokens) = parse_module(STD_SOURCE, STD_NAME, None, module_map, None)?;
let module = compile_module(id, tokens, module_map, None, false)?; let module = compile_module(id, tokens, module_map, None, false)?.map_err(|(_, e)| e)?;
let module_id = module.module_id; let module_id = module.module_id;
let mut mir_context = super::Context::from(vec![module], Default::default()); let mut mir_context = super::Context::from(vec![module], Default::default());
@ -111,11 +111,12 @@ impl<'map> Pass for LinkerPass<'map> {
let mut modules_to_process: Vec<Rc<RefCell<_>>> = modules.values().cloned().collect(); let mut modules_to_process: Vec<Rc<RefCell<_>>> = modules.values().cloned().collect();
let mut already_imported_types = HashSet::<CustomTypeKey>::new(); let mut still_required_types = HashSet::<CustomTypeKey>::new();
let mut already_imported_binops = HashSet::<BinopKey>::new();
while let Some(module) = modules_to_process.pop() { while let Some(module) = modules_to_process.pop() {
let mut extern_types = HashMap::new(); let mut extern_types = HashMap::new();
let mut already_imported_binops = HashSet::<BinopKey>::new();
let mut already_imported_types = HashSet::<CustomTypeKey>::new();
let mut importer_module = module.borrow_mut(); let mut importer_module = module.borrow_mut();
for import in importer_module.imports.clone() { for import in importer_module.imports.clone() {
@ -124,7 +125,9 @@ impl<'map> Pass for LinkerPass<'map> {
state.ok::<_, Infallible>(Err(ErrorKind::InnerModulesNotYetSupported(import.clone())), import.1); state.ok::<_, Infallible>(Err(ErrorKind::InnerModulesNotYetSupported(import.clone())), import.1);
} }
let module_name = unsafe { path.get_unchecked(0) }; let Some((module_name, _)) = path.get(0) else {
continue;
};
let mut imported = if let Some(mod_id) = module_ids.get(module_name) { let mut imported = if let Some(mod_id) = module_ids.get(module_name) {
modules.get(mod_id).unwrap() modules.get(mod_id).unwrap()
@ -141,7 +144,13 @@ impl<'map> Pass for LinkerPass<'map> {
continue; continue;
}; };
let (id, tokens) = match parse_module(&source, module_name.clone(), &mut self.module_map) { let (id, tokens) = match parse_module(
&source,
module_name.clone(),
Some(file_path.clone()),
&mut self.module_map,
None,
) {
Ok(val) => val, Ok(val) => val,
Err(err) => { Err(err) => {
state.ok::<_, Infallible>( state.ok::<_, Infallible>(
@ -156,6 +165,7 @@ impl<'map> Pass for LinkerPass<'map> {
}; };
match compile_module(id, tokens, &mut self.module_map, Some(file_path), false) { match compile_module(id, tokens, &mut self.module_map, Some(file_path), false) {
Ok(res) => match res {
Ok(imported_module) => { Ok(imported_module) => {
if imported_module.is_main { if imported_module.is_main {
state.ok::<_, Infallible>( state.ok::<_, Infallible>(
@ -171,6 +181,17 @@ impl<'map> Pass for LinkerPass<'map> {
modules_to_process.push(imported.clone()); modules_to_process.push(imported.clone());
imported imported
} }
Err((_, err)) => {
state.ok::<_, Infallible>(
Err(ErrorKind::ModuleCompilationError(
module_name.clone(),
format!("{}", err),
)),
import.1,
);
continue;
}
},
Err(err) => { Err(err) => {
state.ok::<_, Infallible>( state.ok::<_, Infallible>(
Err(ErrorKind::ModuleCompilationError( Err(ErrorKind::ModuleCompilationError(
@ -185,13 +206,16 @@ impl<'map> Pass for LinkerPass<'map> {
} }
.borrow_mut(); .borrow_mut();
let import_name = unsafe { path.get_unchecked(1) }; let Some((import_name, _)) = path.get(1) else {
let import_id = imported.module_id; continue;
};
let imported_id = imported.module_id;
let mut imported_types = Vec::new(); let mut imported_types = Vec::new();
if let Some(func) = imported.functions.iter_mut().find(|f| f.name == *import_name) { if let Some(func) = imported.functions.iter_mut().find(|f| f.name == *import_name) {
let func_name = func.name.clone(); let func_name = func.name.clone();
let func_signature = func.signature();
if !func.is_pub { if !func.is_pub {
state.ok::<_, Infallible>( state.ok::<_, Infallible>(
@ -236,6 +260,7 @@ impl<'map> Pass for LinkerPass<'map> {
parameters: param_tys, parameters: param_tys,
kind: super::FunctionDefinitionKind::Extern(true), kind: super::FunctionDefinitionKind::Extern(true),
source: Some(imported.module_id), source: Some(imported.module_id),
signature_meta: func_signature,
}); });
} else if let Some(ty) = imported.typedefs.iter_mut().find(|f| f.name == *import_name) { } else if let Some(ty) = imported.typedefs.iter_mut().find(|f| f.name == *import_name) {
let external_key = CustomTypeKey(ty.name.clone(), ty.source_module); let external_key = CustomTypeKey(ty.name.clone(), ty.source_module);
@ -327,7 +352,8 @@ impl<'map> Pass for LinkerPass<'map> {
return_type, return_type,
parameters: param_tys, parameters: param_tys,
kind: super::FunctionDefinitionKind::Extern(true), kind: super::FunctionDefinitionKind::Extern(true),
source: Some(import_id), source: Some(imported_id),
signature_meta: func.signature_meta,
}, },
)); ));
} }
@ -342,6 +368,13 @@ impl<'map> Pass for LinkerPass<'map> {
let mut seen = HashSet::new(); let mut seen = HashSet::new();
let mut current_extern_types = HashSet::new(); let mut current_extern_types = HashSet::new();
seen.extend(imported_types.clone().iter().map(|t| t.0.clone())); seen.extend(imported_types.clone().iter().map(|t| t.0.clone()));
for ty in still_required_types.clone() {
if ty.1 == imported_id && !seen.contains(&ty) {
imported_types.push((ty, false));
}
}
current_extern_types.extend(imported_types.clone().iter().filter(|t| t.1).map(|t| t.0.clone())); current_extern_types.extend(imported_types.clone().iter().filter(|t| t.1).map(|t| t.0.clone()));
for extern_type in &current_extern_types { for extern_type in &current_extern_types {
extern_types.insert(extern_type.0.clone(), extern_type.1); extern_types.insert(extern_type.0.clone(), extern_type.1);
@ -353,10 +386,16 @@ impl<'map> Pass for LinkerPass<'map> {
for typekey in imported_types.clone() { for typekey in imported_types.clone() {
let typedef = imported_mod_typedefs let typedef = imported_mod_typedefs
.iter() .iter()
.find(|ty| CustomTypeKey(ty.name.clone(), imported_mod_id) == typekey.0) .find(|ty| CustomTypeKey(ty.name.clone(), ty.source_module) == typekey.0)
.unwrap(); .unwrap();
let inner = find_inner_types(typedef, seen.clone(), imported_mod_id); let inner = find_inner_types(typedef, seen.clone(), imported_mod_typedefs);
seen.extend(inner.iter().cloned()); for ty in inner {
if ty.1 == imported_id && imported_mod_typedefs.iter().find(|t| t.name == ty.0).is_some() {
seen.insert(ty);
} else {
still_required_types.insert(ty);
}
}
} }
// TODO: Unable to import same-named type from multiple places.. // TODO: Unable to import same-named type from multiple places..
@ -416,6 +455,22 @@ impl<'map> Pass for LinkerPass<'map> {
Ok(()) Ok(())
} }
fn module(&mut self, module: &mut Module, state: PassState<Self::Data, Self::TError>) -> PassResult {
let extern_types = &state.scope.data.extern_imported_types.get(&module.module_id);
if let Some(extern_types) = extern_types {
for ty in &mut module.typedefs {
match &mut ty.kind {
TypeDefinitionKind::Struct(StructType(fields)) => {
for field in fields {
field.1 = field.1.update_imported(extern_types, module.module_id);
}
}
}
}
}
Ok(())
}
fn function( fn function(
&mut self, &mut self,
function: &mut FunctionDefinition, function: &mut FunctionDefinition,
@ -459,7 +514,7 @@ impl<'map> Pass for LinkerPass<'map> {
super::ExprKind::Indexed(.., type_kind, _) => { super::ExprKind::Indexed(.., type_kind, _) => {
*type_kind = type_kind.update_imported(extern_types, mod_id) *type_kind = type_kind.update_imported(extern_types, mod_id)
} }
super::ExprKind::Accessed(.., type_kind, _) => { super::ExprKind::Accessed(.., type_kind, _, _) => {
*type_kind = type_kind.update_imported(extern_types, mod_id) *type_kind = type_kind.update_imported(extern_types, mod_id)
} }
super::ExprKind::BinOp(.., type_kind) => *type_kind = type_kind.update_imported(extern_types, mod_id), super::ExprKind::BinOp(.., type_kind) => *type_kind = type_kind.update_imported(extern_types, mod_id),
@ -470,6 +525,13 @@ impl<'map> Pass for LinkerPass<'map> {
super::ExprKind::AssociatedFunctionCall(type_kind, _) => { super::ExprKind::AssociatedFunctionCall(type_kind, _) => {
*type_kind = type_kind.update_imported(extern_types, mod_id) *type_kind = type_kind.update_imported(extern_types, mod_id)
} }
super::ExprKind::Struct(key, _) => {
*key = if let Some(mod_id) = extern_types.get(&key.0) {
CustomTypeKey(key.0.clone(), *mod_id)
} else {
key.clone()
}
}
_ => {} _ => {}
} }
} }
@ -525,28 +587,31 @@ fn import_type(ty: &TypeKind, usable_import: bool) -> Vec<(CustomTypeKey, bool)>
fn find_inner_types( fn find_inner_types(
typedef: &TypeDefinition, typedef: &TypeDefinition,
mut seen: HashSet<CustomTypeKey>, mut seen: HashSet<CustomTypeKey>,
mod_id: SourceModuleId, typedefs: &Vec<TypeDefinition>,
) -> Vec<CustomTypeKey> { ) -> Vec<CustomTypeKey> {
match &typedef.kind { match &typedef.kind {
crate::mir::TypeDefinitionKind::Struct(struct_type) => { crate::mir::TypeDefinitionKind::Struct(struct_type) => {
let typenames = struct_type let typekeys = struct_type
.0 .0
.iter() .iter()
.filter(|t| matches!(t.1, TypeKind::CustomType(..))) .filter_map(|t| match &t.1 {
.map(|t| match &t.1 { TypeKind::CustomType(key) => Some(key),
TypeKind::CustomType(CustomTypeKey(t, _)) => t, _ => None,
_ => panic!(),
}) })
.cloned() .cloned()
.collect::<Vec<_>>(); .collect::<Vec<_>>();
for typename in typenames { for typekey in typekeys {
if seen.contains(&CustomTypeKey(typename.clone(), mod_id)) { if seen.contains(&typekey) {
continue; continue;
} }
let inner = find_inner_types(typedef, seen.clone(), mod_id); seen.insert(typekey.clone());
seen.insert(CustomTypeKey(typename, mod_id)); if typekey.1 == typedef.source_module {
seen.extend(inner); if let Some(inner) = typedefs.iter().find(|t| t.name == typekey.0) {
let ret = find_inner_types(inner, seen.clone(), typedefs);
seen.extend(ret);
}
}
} }
seen.into_iter().collect() seen.into_iter().collect()

View File

@ -118,10 +118,13 @@ impl mir::Expression {
let mut globals = Vec::new(); let mut globals = Vec::new();
match &mut self.0 { match &mut self.0 {
mir::ExprKind::FunctionCall(function_call) => { mir::ExprKind::FunctionCall(function_call) => {
for param in &mut function_call.parameters {
globals.extend(param.gen_macros(data, state, map));
}
if function_call.is_macro { if function_call.is_macro {
if let Some(existing_macro) = data.macros.get(&function_call.name) { if let Some(existing_macro) = data.macros.get(&function_call.name) {
let mut literals = Vec::new(); let mut literals = Vec::new();
for param in &function_call.parameters { for param in &mut function_call.parameters {
match &param.0 { match &param.0 {
super::ExprKind::Literal(literal) => literals.push(literal.clone()), super::ExprKind::Literal(literal) => literals.push(literal.clone()),
_ => state.note_errors(&vec![ErrorKind::InvalidMacroArgs], param.1), _ => state.note_errors(&vec![ErrorKind::InvalidMacroArgs], param.1),

View File

@ -41,16 +41,19 @@ impl Metadata {
} }
pub fn into_positions(&self, tokens: &Vec<FullToken>) -> Option<(Position, Position)> { pub fn into_positions(&self, tokens: &Vec<FullToken>) -> Option<(Position, Position)> {
let mut iter = tokens self.range.into_position(tokens)
.iter()
.skip(self.range.start)
.take(self.range.end - self.range.start);
if let Some(first) = iter.next() {
let last = iter.last().unwrap_or(first);
Some((first.position, last.position.add(last.token.len() as u32)))
} else {
None
} }
pub fn is_after(&self, token_idx: usize) -> bool {
return token_idx < self.range.start;
}
pub fn is_before(&self, token_idx: usize) -> bool {
return token_idx > self.range.end;
}
pub fn contains(&self, token_idx: usize) -> bool {
return token_idx >= self.range.start && token_idx <= self.range.end;
} }
} }
@ -253,15 +256,15 @@ pub enum ReturnKind {
pub struct NamedVariableRef(pub TypeKind, pub String, pub Metadata); pub struct NamedVariableRef(pub TypeKind, pub String, pub Metadata);
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct Import(pub Vec<String>, pub Metadata); pub struct Import(pub Vec<(String, Metadata)>, pub Metadata);
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum ExprKind { pub enum ExprKind {
Variable(NamedVariableRef), Variable(NamedVariableRef),
Indexed(Box<Expression>, TypeKind, Box<Expression>), Indexed(Box<Expression>, TypeKind, Box<Expression>),
Accessed(Box<Expression>, TypeKind, String), Accessed(Box<Expression>, TypeKind, String, Metadata),
Array(Vec<Expression>), Array(Vec<Expression>),
Struct(String, Vec<(String, Expression)>), Struct(CustomTypeKey, Vec<(String, Expression, Metadata)>),
Literal(Literal), Literal(Literal),
BinOp(BinaryOperator, Box<Expression>, Box<Expression>, TypeKind), BinOp(BinaryOperator, Box<Expression>, Box<Expression>, TypeKind),
FunctionCall(FunctionCall), FunctionCall(FunctionCall),
@ -302,6 +305,7 @@ pub struct FunctionDefinition {
pub parameters: Vec<FunctionParam>, pub parameters: Vec<FunctionParam>,
pub kind: FunctionDefinitionKind, pub kind: FunctionDefinitionKind,
pub source: Option<SourceModuleId>, pub source: Option<SourceModuleId>,
pub signature_meta: Metadata,
} }
#[derive(Debug, Clone, PartialEq, PartialOrd)] #[derive(Debug, Clone, PartialEq, PartialOrd)]
@ -311,6 +315,16 @@ pub struct FunctionParam {
pub meta: Metadata, pub meta: Metadata,
} }
impl FunctionParam {
pub fn from<T: Into<String>>(name: T, ty: TypeKind) -> FunctionParam {
FunctionParam {
name: name.into(),
ty: ty,
meta: Default::default(),
}
}
}
pub enum SelfKind { pub enum SelfKind {
Borrow, Borrow,
MutBorrow, MutBorrow,
@ -337,11 +351,7 @@ impl FunctionDefinition {
} }
pub fn signature(&self) -> Metadata { pub fn signature(&self) -> Metadata {
match &self.kind { self.signature_meta
FunctionDefinitionKind::Local(_, metadata) => metadata.clone(),
FunctionDefinitionKind::Extern(_) => Metadata::default(),
FunctionDefinitionKind::Intrinsic(_) => Metadata::default(),
}
} }
} }

View File

@ -450,6 +450,10 @@ impl Module {
for function in &mut self.functions { for function in &mut self.functions {
function.pass(pass, state, &mut scope.inner(), self.module_id)?; function.pass(pass, state, &mut scope.inner(), self.module_id)?;
} }
for (_, function) in &mut self.associated_functions {
function.pass(pass, state, &mut scope.inner(), self.module_id)?;
}
Ok(()) Ok(())
} }
} }
@ -585,7 +589,7 @@ impl Expression {
} }
} }
ExprKind::Struct(_, items) => { ExprKind::Struct(_, items) => {
for (_, expr) in items { for (_, expr, _) in items {
expr.pass(pass, state, scope, mod_id)?; expr.pass(pass, state, scope, mod_id)?;
} }
} }

View File

@ -97,9 +97,10 @@ fn check_typedefs_for_recursion<'a, 'b>(
typedef.meta, typedef.meta,
); );
} else { } else {
seen.insert(name.clone());
if let Some(inner_typedef) = defmap.get(name) { if let Some(inner_typedef) = defmap.get(name) {
check_typedefs_for_recursion(defmap, inner_typedef, seen.clone(), state) let mut inner_seen = seen.clone();
inner_seen.insert(name.clone());
check_typedefs_for_recursion(defmap, inner_typedef, inner_seen.clone(), state)
} }
} }
} }
@ -596,7 +597,7 @@ impl Expression {
} }
} }
} }
ExprKind::Accessed(expression, type_kind, field_name) => { ExprKind::Accessed(expression, type_kind, field_name, _) => {
// Resolve expected type // Resolve expected type
let expected_ty = type_kind.resolve_ref(typerefs); let expected_ty = type_kind.resolve_ref(typerefs);
@ -620,32 +621,31 @@ impl Expression {
// Update possibly resolved type // Update possibly resolved type
Ok(true_ty) Ok(true_ty)
} else { } else {
Err(ErrorKind::NoSuchField(field_name.clone())) Err(ErrorKind::NoSuchField(key.0.clone()))
} }
} else { } else {
Err(ErrorKind::TriedAccessingNonStruct(expr_ty)) Err(ErrorKind::TriedAccessingNonStruct(expr_ty))
} }
} }
ExprKind::Struct(struct_name, items) => { ExprKind::Struct(struct_key, items) => {
let type_key = CustomTypeKey(struct_name.clone(), state.module_id.unwrap());
let struct_def = state let struct_def = state
.scope .scope
.get_struct_type(&type_key) .get_struct_type(&struct_key)
.ok_or(ErrorKind::NoSuchType(struct_name.clone(), type_key.1))? .ok_or(ErrorKind::NoSuchType(struct_key.0.clone(), struct_key.1))?
.clone(); .clone();
let mut expected_fields = if let Some(struct_ty) = state.scope.get_struct_type(&type_key) { let mut expected_fields = if let Some(struct_ty) = state.scope.get_struct_type(&struct_key) {
struct_ty.0.iter().map(|f| f.0.clone()).collect() struct_ty.0.iter().map(|f| f.0.clone()).collect()
} else { } else {
HashSet::new() HashSet::new()
}; };
for (field_name, field_expr) in items { for (field_name, field_expr, _) in items {
// Get expected type, or error if field does not exist // Get expected type, or error if field does not exist
let expected_ty = state.or_else( let expected_ty = state.or_else(
struct_def struct_def
.get_field_ty(field_name) .get_field_ty(field_name)
.ok_or(ErrorKind::NoSuchField(format!("{}.{}", struct_name, field_name))), .ok_or(ErrorKind::NoSuchField(format!("{:?}.{}", struct_key, field_name))),
&TypeKind::Vague(VagueType::Unknown), &TypeKind::Vague(VagueType::Unknown),
field_expr.1, field_expr.1,
); );
@ -667,7 +667,7 @@ impl Expression {
self.1, self.1,
); );
Ok(TypeKind::CustomType(type_key)) Ok(TypeKind::CustomType(struct_key.clone()))
} }
ExprKind::Borrow(expr, mutable) => { ExprKind::Borrow(expr, mutable) => {
let hint_t = if let HintKind::Coerce(hint_t) = hint_t { let hint_t = if let HintKind::Coerce(hint_t) = hint_t {
@ -720,15 +720,19 @@ impl Expression {
expr.resolve_ref(typerefs).cast_into(type_kind) expr.resolve_ref(typerefs).cast_into(type_kind)
} }
ExprKind::AssociatedFunctionCall(type_kind, function_call) => { ExprKind::AssociatedFunctionCall(type_kind, function_call) => {
*type_kind = type_kind.or_default()?;
let true_function = state let true_function = state
.scope .scope
.get_associated_function(&pass::AssociatedFunctionKey( .get_associated_function(&pass::AssociatedFunctionKey(
type_kind.clone(), type_kind.clone(),
function_call.name.clone(), function_call.name.clone(),
)) ))
.ok_or(ErrorKind::FunctionNotDefined(function_call.name.clone())); .ok_or(ErrorKind::AssocFunctionNotDefined(
function_call.name.clone(),
type_kind.clone(),
));
if let Some(f) = state.ok(true_function, self.1) { if let Some(f) = state.ok(true_function, function_call.meta) {
let param_len_given = function_call.parameters.len(); let param_len_given = function_call.parameters.len();
let param_len_expected = f.params.len(); let param_len_expected = f.params.len();

View File

@ -12,9 +12,10 @@ use std::{
use crate::{ use crate::{
mir::{ mir::{
implement::TypeCategory,
pass::{AssociatedFunctionKey, ScopeVariable}, pass::{AssociatedFunctionKey, ScopeVariable},
BinopDefinition, Block, CustomTypeKey, ExprKind, Expression, FunctionDefinition, FunctionDefinitionKind, BinopDefinition, Block, CustomTypeKey, ExprKind, Expression, FunctionDefinition, FunctionDefinitionKind,
IfExpression, Module, ReturnKind, StmtKind, TypeKind, WhileStatement, IfExpression, Module, ReturnKind, StmtKind, TypeKind, VagueType, WhileStatement,
}, },
util::try_all, util::try_all,
}; };
@ -171,8 +172,9 @@ impl FunctionDefinition {
let scope_refs = ScopeTypeRefs::from(type_refs); let scope_refs = ScopeTypeRefs::from(type_refs);
for param in &self.parameters { for param in &self.parameters {
let param_t = state.or_else(param.ty.assert_unvague(), Vague(Unknown), self.signature()); let param_t = state.or_else(param.ty.assert_unvague(), Vague(Unknown), self.signature());
let mutable = matches!(param_t, TypeKind::Borrow(_, true));
let res = scope_refs let res = scope_refs
.new_var(param.name.clone(), false, &param_t) .new_var(param.name.clone(), mutable, &param_t)
.or(Err(ErrorKind::VariableAlreadyDefined(param.name.clone()))); .or(Err(ErrorKind::VariableAlreadyDefined(param.name.clone())));
state.ok(res, self.signature()); state.ok(res, self.signature());
} }
@ -526,7 +528,7 @@ impl Expression {
} }
} }
} }
ExprKind::Accessed(expression, type_kind, field_name) => { ExprKind::Accessed(expression, type_kind, field_name, _) => {
let expr_ty = expression.infer_types(state, type_refs)?; let expr_ty = expression.infer_types(state, type_refs)?;
// Check that the resolved type is at least a struct, no // Check that the resolved type is at least a struct, no
@ -545,18 +547,17 @@ impl Expression {
*type_kind = elem_ty.as_type().clone(); *type_kind = elem_ty.as_type().clone();
Ok(elem_ty) Ok(elem_ty)
} }
None => Err(ErrorKind::NoSuchField(field_name.clone())), None => Ok(type_refs.from_type(&TypeKind::Vague(VagueType::Unknown)).unwrap()),
} }
} }
_ => Err(ErrorKind::TriedAccessingNonStruct(kind)), _ => Ok(type_refs.from_type(&TypeKind::Vague(VagueType::Unknown)).unwrap()),
} }
} }
ExprKind::Struct(struct_name, fields) => { ExprKind::Struct(struct_key, fields) => {
let type_key = CustomTypeKey(struct_name.clone(), state.module_id.unwrap());
let expected_struct_ty = state let expected_struct_ty = state
.scope .scope
.get_struct_type(&type_key) .get_struct_type(&struct_key)
.ok_or(ErrorKind::NoSuchType(struct_name.clone(), state.module_id.unwrap()))? .ok_or(ErrorKind::NoSuchType(struct_key.0.clone(), state.module_id.unwrap()))?
.clone(); .clone();
for field in fields { for field in fields {
if let Some(expected_field_ty) = expected_struct_ty.get_field_ty(&field.0) { if let Some(expected_field_ty) = expected_struct_ty.get_field_ty(&field.0) {
@ -566,12 +567,12 @@ impl Expression {
} }
} else { } else {
state.ok::<_, Infallible>( state.ok::<_, Infallible>(
Err(ErrorKind::NoSuchField(format!("{}.{}", struct_name, field.0))), Err(ErrorKind::NoSuchField(format!("{:?}.{}", struct_key, field.0))),
field.1 .1, field.1 .1,
); );
} }
} }
Ok(type_refs.from_type(&TypeKind::CustomType(type_key.clone())).unwrap()) Ok(type_refs.from_type(&TypeKind::CustomType(struct_key.clone())).unwrap())
} }
ExprKind::Borrow(expr, mutable) => { ExprKind::Borrow(expr, mutable) => {
// Find variable type // Find variable type
@ -605,7 +606,7 @@ impl Expression {
.parameters .parameters
.get_mut(0) .get_mut(0)
.expect("Unknown-type associated function NEEDS to always have at least one parameter!"); .expect("Unknown-type associated function NEEDS to always have at least one parameter!");
let param_ty = first_param.infer_types(state, type_refs).unwrap().resolve_deep(); let param_ty = first_param.infer_types(state, type_refs)?.resolve_deep();
*type_kind = state *type_kind = state
.or_else( .or_else(
param_ty.ok_or(ErrorKind::CouldNotInferType(format!("{}", first_param))), param_ty.ok_or(ErrorKind::CouldNotInferType(format!("{}", first_param))),
@ -613,25 +614,45 @@ impl Expression {
first_param.1, first_param.1,
) )
.resolve_ref(type_refs.types); .resolve_ref(type_refs.types);
let backing_var = first_param.backing_var().expect("todo").1.clone(); let backing_var = first_param.backing_var();
let is_mutable = if let Some(backing_var) = first_param.backing_var() {
if let Some((mutable, _)) = type_refs.find_var(&backing_var.1) {
mutable
} else {
return Err(ErrorKind::VariableNotDefined(backing_var.1.clone()));
}
} else {
false
};
if backing_var.is_some() {
if let TypeKind::Borrow(inner, _) = type_kind { if let TypeKind::Borrow(inner, _) = type_kind {
let ty_cat = inner.category();
if let TypeKind::Borrow(..) = *inner.clone() { if let TypeKind::Borrow(..) = *inner.clone() {
*type_kind = type_kind.unroll_borrow(); *type_kind = type_kind.unroll_borrow();
let ExprKind::Borrow(val, _) = &first_param.0 else { let ExprKind::Borrow(val, _) = &first_param.0 else {
panic!() panic!()
}; };
*first_param = *val.clone(); *first_param = *val.clone();
} else if ty_cat == TypeCategory::Integer || ty_cat == TypeCategory::Real {
if let ExprKind::Borrow(val, _) = &first_param.0 {
*first_param = *val.clone();
}
*type_kind = *inner.clone();
}
}
} else {
if let ExprKind::Borrow(val, _) = &first_param.0 {
*first_param = *val.clone();
}
if let TypeKind::Borrow(inner_ty, _) = type_kind {
*type_kind = *inner_ty.clone();
} }
} }
if let Some((mutable, _)) = type_refs.find_var(&backing_var) { if !is_mutable {
if !mutable {
first_param.remove_borrow_mutability(); first_param.remove_borrow_mutability();
} }
} else {
return Err(ErrorKind::VariableNotDefined(backing_var));
}
} }
} }
@ -642,9 +663,13 @@ impl Expression {
.ok_or(ErrorKind::AssocFunctionNotDefined( .ok_or(ErrorKind::AssocFunctionNotDefined(
function_call.name.clone(), function_call.name.clone(),
type_kind.clone(), type_kind.clone(),
))? ))
.clone(); .clone();
let Ok(fn_call) = fn_call else {
return Ok(type_refs.from_type(&Vague(Unknown)).unwrap());
};
// Infer param expression types and narrow them to the // Infer param expression types and narrow them to the
// expected function parameters (or Unknown types if too // expected function parameters (or Unknown types if too
// many were provided) // many were provided)

View File

@ -97,6 +97,9 @@ pub struct TypeRefs {
/// Indirect ID-references, referring to hints-vec /// Indirect ID-references, referring to hints-vec
pub(super) type_refs: RefCell<Vec<TypeIdRef>>, pub(super) type_refs: RefCell<Vec<TypeIdRef>>,
pub(super) binop_types: BinopMap, pub(super) binop_types: BinopMap,
/// Used when the real typerefs are not available, and any TypeRefs need to
/// be resolved as Unknown.
pub unknown_typerefs: bool,
} }
impl std::fmt::Display for TypeRefs { impl std::fmt::Display for TypeRefs {
@ -122,6 +125,14 @@ impl TypeRefs {
hints: Default::default(), hints: Default::default(),
type_refs: Default::default(), type_refs: Default::default(),
binop_types: binops, binop_types: binops,
unknown_typerefs: false,
}
}
pub fn unknown() -> TypeRefs {
TypeRefs {
unknown_typerefs: true,
..Default::default()
} }
} }
@ -177,8 +188,12 @@ impl TypeRefs {
} }
pub fn retrieve_typeref(&self, idx: usize) -> Option<TypeRefKind> { pub fn retrieve_typeref(&self, idx: usize) -> Option<TypeRefKind> {
if !self.unknown_typerefs {
let inner_idx = unsafe { *self.recurse_type_ref(idx).borrow() }; let inner_idx = unsafe { *self.recurse_type_ref(idx).borrow() };
self.hints.borrow().get(inner_idx).cloned() self.hints.borrow().get(inner_idx).cloned()
} else {
Some(TypeRefKind::Direct(TypeKind::Vague(VagueType::Unknown)))
}
} }
pub fn retrieve_wide_type(&self, idx: usize, seen: &mut HashSet<usize>) -> Option<TypeKind> { pub fn retrieve_wide_type(&self, idx: usize, seen: &mut HashSet<usize>) -> Option<TypeKind> {

View File

@ -1,154 +0,0 @@
use std::{path::PathBuf, process::Command, time::SystemTime};
use reid::{
compile_module,
ld::LDRunner,
mir::{self},
parse_module, perform_all_passes,
};
use reid_lib::Context;
use util::assert_err;
mod util;
fn test(source: &str, name: &str, expected_exit_code: Option<i32>) {
assert_err(assert_err(std::panic::catch_unwind(|| {
let mut map = Default::default();
let (id, tokens) = assert_err(parse_module(source, name, &mut map));
let module = assert_err(compile_module(id, tokens, &mut map, None, true));
let mut mir_context = mir::Context::from(vec![module], Default::default());
assert_err(perform_all_passes(&mut mir_context, &mut map));
let context = Context::new(format!("Reid ({})", env!("CARGO_PKG_VERSION")));
let codegen = assert_err(mir_context.codegen(&context));
let output = codegen.compile(None, Vec::new()).output();
let time = SystemTime::now();
let in_path = PathBuf::from(format!(
"/tmp/temp-{}.o",
time.duration_since(SystemTime::UNIX_EPOCH).unwrap().as_nanos()
));
std::fs::write(&in_path, &output.obj_buffer).expect("Could not write OBJ-file!");
let out_path = in_path.with_extension("out");
LDRunner::from_command("ld")
.with_library("c")
.invoke(&in_path, &out_path);
std::fs::remove_file(in_path).unwrap();
let executed = Command::new(&out_path).output();
std::fs::remove_file(out_path).unwrap();
if let Some(expected_exit_code) = expected_exit_code {
assert_eq!(expected_exit_code, executed.unwrap().status.code().unwrap());
}
Ok::<(), ()>(())
})))
}
#[test]
fn arithmetic_compiles_well() {
test(include_str!("../../examples/arithmetic.reid"), "test", Some(48));
}
#[test]
fn array_structs_compiles_well() {
test(include_str!("../../examples/array_structs.reid"), "test", Some(5));
}
#[test]
fn array_compiles_well() {
test(include_str!("../../examples/array.reid"), "test", Some(3));
}
#[test]
fn borrow_compiles_well() {
test(include_str!("../../examples/borrow.reid"), "test", Some(17));
}
#[test]
fn borrow_hard_compiles_well() {
test(include_str!("../../examples/borrow_hard.reid"), "test", Some(17));
}
#[test]
fn cast_compiles_well() {
test(include_str!("../../examples/cast.reid"), "test", Some(6));
}
#[test]
fn char_compiles_well() {
test(include_str!("../../examples/char.reid"), "test", Some(98));
}
#[test]
fn div_mod_compiles_well() {
test(include_str!("../../examples/div_mod.reid"), "test", Some(12));
}
#[test]
fn fibonacci_compiles_well() {
test(include_str!("../../examples/fibonacci.reid"), "test", Some(1));
}
#[test]
fn float_compiles_well() {
test(include_str!("../../examples/float.reid"), "test", Some(1));
}
#[test]
fn hello_world_compiles_well() {
test(include_str!("../../examples/hello_world.reid"), "test", None);
}
#[test]
fn hello_world_harder_compiles_well() {
test(include_str!("../../examples/hello_world_harder.reid"), "test", None);
}
#[test]
fn mutable_compiles_well() {
test(include_str!("../../examples/mutable.reid"), "test", Some(21));
}
#[test]
fn ptr_compiles_well() {
test(include_str!("../../examples/ptr.reid"), "test", Some(5));
}
#[test]
fn std_test_compiles_well() {
test(include_str!("../../examples/std_test.reid"), "test", Some(3));
}
#[test]
fn strings_compiles_well() {
test(include_str!("../../examples/strings.reid"), "test", Some(5));
}
#[test]
fn struct_compiles_well() {
test(include_str!("../../examples/struct.reid"), "test", Some(17));
}
#[test]
fn loops_compiles_well() {
test(include_str!("../../examples/loops.reid"), "test", Some(10));
}
#[test]
fn ptr_hard_compiles_well() {
test(include_str!("../../examples/ptr_hard.reid"), "test", Some(0));
}
#[test]
fn loop_hard_compiles_well() {
test(include_str!("../../examples/loop_hard.reid"), "test", Some(0));
}
#[test]
fn custom_binop_compiles_well() {
test(include_str!("../../examples/custom_binop.reid"), "test", Some(21));
}
#[test]
fn array_short_compiles_well() {
test(include_str!("../../examples/array_short.reid"), "test", Some(5));
}
#[test]
fn imported_type_compiles_well() {
test(include_str!("../../examples/imported_type.reid"), "test", Some(0));
}
#[test]
fn associated_functions() {
test(
include_str!("../../examples/associated_functions.reid"),
"test",
Some(4),
);
}