Compare commits
9 Commits
cb2604dc22
...
ce2278ce45
Author | SHA1 | Date | |
---|---|---|---|
ce2278ce45 | |||
0f782dcb96 | |||
6dfd98eba3 | |||
df4febf1ef | |||
9d1b18f083 | |||
ce7c4bfb52 | |||
f0e47a5d57 | |||
64e34ecf13 | |||
e2dc1a3f85 |
156
Cargo.lock
generated
156
Cargo.lock
generated
@ -4,36 +4,48 @@ version = 4
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aho-corasick"
|
name = "aho-corasick"
|
||||||
version = "1.0.2"
|
version = "1.1.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41"
|
checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"memchr",
|
"memchr",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cc"
|
name = "cc"
|
||||||
version = "1.0.79"
|
version = "1.2.29"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f"
|
checksum = "5c1599538de2394445747c8cf7935946e3cc27e9625f889d979bfb2aaf569362"
|
||||||
|
dependencies = [
|
||||||
|
"shlex",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "colored"
|
||||||
|
version = "3.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "fde0e0ec90c9dfb3b4b1a0891a7dcd0e2bffde2f7efed5fe7c9bb00e5bfb915e"
|
||||||
|
dependencies = [
|
||||||
|
"windows-sys",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lazy_static"
|
name = "lazy_static"
|
||||||
version = "1.4.0"
|
version = "1.5.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libc"
|
name = "libc"
|
||||||
version = "0.2.147"
|
version = "0.2.174"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
|
checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "llvm-sys"
|
name = "llvm-sys"
|
||||||
version = "160.1.3"
|
version = "160.2.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0bf51981ac0622b10fe4790763e3de1f3d68a0ee4222e03accaaab6731bd508d"
|
checksum = "e73861901245d32e1c3d8b35b639cf100859b4cd0c9da56fe0273040acbb3ea4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc",
|
"cc",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
@ -44,33 +56,33 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "memchr"
|
name = "memchr"
|
||||||
version = "2.5.0"
|
version = "2.7.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
|
checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.66"
|
version = "1.0.95"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9"
|
checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quote"
|
name = "quote"
|
||||||
version = "1.0.32"
|
version = "1.0.40"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "50f3b39ccfb720540debaa0164757101c08ecb8d326b15358ce76a62c7e85965"
|
checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex"
|
name = "regex"
|
||||||
version = "1.9.1"
|
version = "1.11.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575"
|
checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
"memchr",
|
"memchr",
|
||||||
@ -80,9 +92,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex-automata"
|
name = "regex-automata"
|
||||||
version = "0.3.4"
|
version = "0.4.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b7b6d6190b7594385f61bd3911cd1be99dfddcfc365a4160cc2ab5bff4aed294"
|
checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
"memchr",
|
"memchr",
|
||||||
@ -91,14 +103,15 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex-syntax"
|
name = "regex-syntax"
|
||||||
version = "0.7.4"
|
version = "0.8.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2"
|
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "reid"
|
name = "reid"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"colored",
|
||||||
"llvm-sys",
|
"llvm-sys",
|
||||||
"reid-lib",
|
"reid-lib",
|
||||||
"thiserror",
|
"thiserror",
|
||||||
@ -114,15 +127,21 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "semver"
|
name = "semver"
|
||||||
version = "1.0.18"
|
version = "1.0.26"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b0293b4b29daaf487284529cc2f5675b8e57c61f70167ba415a463651fd6a918"
|
checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "shlex"
|
||||||
|
version = "1.3.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "2.0.28"
|
version = "2.0.104"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "04361975b3f5e348b2189d8dc55bc942f278b2d482a6a0365de5bdd62d351567"
|
checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@ -131,18 +150,18 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thiserror"
|
name = "thiserror"
|
||||||
version = "1.0.44"
|
version = "1.0.69"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "611040a08a0439f8248d1990b111c95baa9c704c805fa1f62104b39655fd7f90"
|
checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"thiserror-impl",
|
"thiserror-impl",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thiserror-impl"
|
name = "thiserror-impl"
|
||||||
version = "1.0.44"
|
version = "1.0.69"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "090198534930841fab3a5d1bb637cde49e339654e606195f8d9c76eeb081dc96"
|
checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@ -151,6 +170,79 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-ident"
|
name = "unicode-ident"
|
||||||
version = "1.0.11"
|
version = "1.0.18"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c"
|
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows-sys"
|
||||||
|
version = "0.59.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
|
||||||
|
dependencies = [
|
||||||
|
"windows-targets",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows-targets"
|
||||||
|
version = "0.52.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
|
||||||
|
dependencies = [
|
||||||
|
"windows_aarch64_gnullvm",
|
||||||
|
"windows_aarch64_msvc",
|
||||||
|
"windows_i686_gnu",
|
||||||
|
"windows_i686_gnullvm",
|
||||||
|
"windows_i686_msvc",
|
||||||
|
"windows_x86_64_gnu",
|
||||||
|
"windows_x86_64_gnullvm",
|
||||||
|
"windows_x86_64_msvc",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_aarch64_gnullvm"
|
||||||
|
version = "0.52.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_aarch64_msvc"
|
||||||
|
version = "0.52.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_i686_gnu"
|
||||||
|
version = "0.52.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_i686_gnullvm"
|
||||||
|
version = "0.52.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_i686_msvc"
|
||||||
|
version = "0.52.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_x86_64_gnu"
|
||||||
|
version = "0.52.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_x86_64_gnullvm"
|
||||||
|
version = "0.52.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_x86_64_msvc"
|
||||||
|
version = "0.52.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
|
||||||
|
@ -5,9 +5,17 @@ edition = "2021"
|
|||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
|
||||||
|
[features]
|
||||||
|
default = ["color"]
|
||||||
|
|
||||||
|
color = ["colored"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
## LLVM Bindings
|
## LLVM Bindings
|
||||||
llvm-sys = "160"
|
llvm-sys = "160"
|
||||||
## Make it easier to generate errors
|
## Make it easier to generate errors
|
||||||
thiserror = "1.0.44"
|
thiserror = "1.0.44"
|
||||||
reid-lib = { path = "../reid-llvm-lib" }
|
reid-lib = { path = "../reid-llvm-lib" }
|
||||||
|
|
||||||
|
colored = {version = "3.0.0", optional = true}
|
@ -1,6 +1,6 @@
|
|||||||
use std::{env, error::Error, fs, path::PathBuf};
|
use std::{env, fs, path::PathBuf};
|
||||||
|
|
||||||
use reid::compile;
|
use reid::compile_simple;
|
||||||
use reid_lib::compile::CompileOutput;
|
use reid_lib::compile::CompileOutput;
|
||||||
|
|
||||||
fn main() -> Result<(), std::io::Error> {
|
fn main() -> Result<(), std::io::Error> {
|
||||||
@ -15,7 +15,7 @@ fn main() -> Result<(), std::io::Error> {
|
|||||||
let before = std::time::SystemTime::now();
|
let before = std::time::SystemTime::now();
|
||||||
|
|
||||||
let text = fs::read_to_string(&path)?;
|
let text = fs::read_to_string(&path)?;
|
||||||
match compile(&text, PathBuf::from(&path)) {
|
match compile_simple(&text, PathBuf::from(&path)) {
|
||||||
Ok(CompileOutput {
|
Ok(CompileOutput {
|
||||||
triple,
|
triple,
|
||||||
assembly,
|
assembly,
|
||||||
|
@ -161,6 +161,7 @@ fn main() {
|
|||||||
let mir_context = mir::Context {
|
let mir_context = mir::Context {
|
||||||
modules: vec![Module {
|
modules: vec![Module {
|
||||||
name: "test module".to_owned(),
|
name: "test module".to_owned(),
|
||||||
|
module_id: SourceModuleId::default(),
|
||||||
imports: vec![],
|
imports: vec![],
|
||||||
functions: vec![fibonacci, main],
|
functions: vec![fibonacci, main],
|
||||||
typedefs: Vec::new(),
|
typedefs: Vec::new(),
|
||||||
|
@ -39,7 +39,7 @@ impl Parse for Type {
|
|||||||
"u64" => TypeKind::U64,
|
"u64" => TypeKind::U64,
|
||||||
"u128" => TypeKind::U128,
|
"u128" => TypeKind::U128,
|
||||||
"string" => TypeKind::String,
|
"string" => TypeKind::String,
|
||||||
_ => Err(stream.expected_err("known type identifier")?)?,
|
_ => TypeKind::Custom(ident),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return Err(stream.expected_err("type identifier")?)?;
|
return Err(stream.expected_err("type identifier")?)?;
|
||||||
@ -128,7 +128,7 @@ impl Parse for PrimaryExpression {
|
|||||||
stream.expect(Token::BracketClose)?;
|
stream.expect(Token::BracketClose)?;
|
||||||
Expression(Kind::Array(expressions), stream.get_range().unwrap())
|
Expression(Kind::Array(expressions), stream.get_range().unwrap())
|
||||||
}
|
}
|
||||||
_ => Err(stream.expected_err("identifier, constant, parentheses or brackets")?)?,
|
_ => Err(stream.expected_err("expression")?)?,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Err(stream.expected_err("expression")?)?
|
Err(stream.expected_err("expression")?)?
|
||||||
@ -391,7 +391,8 @@ impl Parse for Block {
|
|||||||
// Special list of expressions that are simply not warned about,
|
// Special list of expressions that are simply not warned about,
|
||||||
// if semicolon is missing.
|
// if semicolon is missing.
|
||||||
if !matches!(e, Expression(ExpressionKind::IfExpr(_), _)) {
|
if !matches!(e, Expression(ExpressionKind::IfExpr(_), _)) {
|
||||||
println!("Oh no, does this statement lack ;");
|
// In theory could ignore the missing semicolon..
|
||||||
|
return Err(stream.expected_err("expected semicolon to complete statement")?);
|
||||||
}
|
}
|
||||||
|
|
||||||
statements.push(BlockLevelStatement::Expression(e));
|
statements.push(BlockLevelStatement::Expression(e));
|
||||||
@ -445,7 +446,7 @@ impl<T: Parse + std::fmt::Debug> Parse for NamedFieldList<T> {
|
|||||||
stream.next();
|
stream.next();
|
||||||
} // Consume comma
|
} // Consume comma
|
||||||
Some(Token::BraceClose) => break,
|
Some(Token::BraceClose) => break,
|
||||||
Some(_) | None => Err(stream.expected_err("another field or closing brace")?)?,
|
Some(_) | None => Err(stream.expecting_err("another field or closing brace")?)?,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(NamedFieldList(fields))
|
Ok(NamedFieldList(fields))
|
||||||
@ -478,7 +479,7 @@ impl Parse for ValueIndex {
|
|||||||
match stream.peek() {
|
match stream.peek() {
|
||||||
Some(Token::BracketOpen) => Ok(ValueIndex::Array(stream.parse()?)),
|
Some(Token::BracketOpen) => Ok(ValueIndex::Array(stream.parse()?)),
|
||||||
Some(Token::Dot) => Ok(ValueIndex::Struct(stream.parse()?)),
|
Some(Token::Dot) => Ok(ValueIndex::Struct(stream.parse()?)),
|
||||||
_ => Err(stream.expected_err("value or struct index")?),
|
_ => Err(stream.expecting_err("value or struct index")?),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -534,7 +535,7 @@ impl Parse for BlockLevelStatement {
|
|||||||
Stmt::Return(ReturnType::Soft, e)
|
Stmt::Return(ReturnType::Soft, e)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Err(stream.expected_err("expression")?)?
|
Err(stream.expecting_err("expression")?)?
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -605,7 +606,7 @@ impl Parse for TopLevelStatement {
|
|||||||
range,
|
range,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
_ => Err(stream.expected_err("import or fn")?)?,
|
_ => Err(stream.expecting_err("import or fn")?)?,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,7 +2,7 @@ use std::path::PathBuf;
|
|||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ast::{self},
|
ast::{self},
|
||||||
mir::{self, NamedVariableRef, StmtKind, StructField, StructType},
|
mir::{self, NamedVariableRef, SourceModuleId, StmtKind, StructField, StructType},
|
||||||
};
|
};
|
||||||
|
|
||||||
impl mir::Context {
|
impl mir::Context {
|
||||||
@ -12,7 +12,7 @@ impl mir::Context {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ast::Module {
|
impl ast::Module {
|
||||||
pub fn process(&self) -> mir::Module {
|
pub fn process(&self, module_id: SourceModuleId) -> mir::Module {
|
||||||
let mut imports = Vec::new();
|
let mut imports = Vec::new();
|
||||||
let mut functions = Vec::new();
|
let mut functions = Vec::new();
|
||||||
let mut typedefs = Vec::new();
|
let mut typedefs = Vec::new();
|
||||||
@ -21,7 +21,7 @@ impl ast::Module {
|
|||||||
for stmt in &self.top_level_statements {
|
for stmt in &self.top_level_statements {
|
||||||
match stmt {
|
match stmt {
|
||||||
Import(import) => {
|
Import(import) => {
|
||||||
imports.push(mir::Import(import.0.clone(), import.1.into()));
|
imports.push(mir::Import(import.0.clone(), import.1.as_meta(module_id)));
|
||||||
}
|
}
|
||||||
FunctionDefinition(ast::FunctionDefinition(signature, is_pub, block, range)) => {
|
FunctionDefinition(ast::FunctionDefinition(signature, is_pub, block, range)) => {
|
||||||
let def = mir::FunctionDefinition {
|
let def = mir::FunctionDefinition {
|
||||||
@ -39,7 +39,10 @@ impl ast::Module {
|
|||||||
.cloned()
|
.cloned()
|
||||||
.map(|p| (p.0, p.1.into()))
|
.map(|p| (p.0, p.1.into()))
|
||||||
.collect(),
|
.collect(),
|
||||||
kind: mir::FunctionDefinitionKind::Local(block.into_mir(), (*range).into()),
|
kind: mir::FunctionDefinitionKind::Local(
|
||||||
|
block.into_mir(module_id),
|
||||||
|
(*range).as_meta(module_id),
|
||||||
|
),
|
||||||
};
|
};
|
||||||
functions.push(def);
|
functions.push(def);
|
||||||
}
|
}
|
||||||
@ -75,14 +78,14 @@ impl ast::Module {
|
|||||||
StructField(
|
StructField(
|
||||||
s.name.clone(),
|
s.name.clone(),
|
||||||
s.ty.clone().into(),
|
s.ty.clone().into(),
|
||||||
s.range.into(),
|
s.range.as_meta(module_id),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
meta: (*range).into(),
|
meta: (*range).as_meta(module_id),
|
||||||
};
|
};
|
||||||
typedefs.push(def);
|
typedefs.push(def);
|
||||||
}
|
}
|
||||||
@ -91,6 +94,7 @@ impl ast::Module {
|
|||||||
|
|
||||||
mir::Module {
|
mir::Module {
|
||||||
name: self.name.clone(),
|
name: self.name.clone(),
|
||||||
|
module_id: module_id,
|
||||||
imports,
|
imports,
|
||||||
functions,
|
functions,
|
||||||
path: self.path.clone(),
|
path: self.path.clone(),
|
||||||
@ -101,7 +105,7 @@ impl ast::Module {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ast::Block {
|
impl ast::Block {
|
||||||
pub fn into_mir(&self) -> mir::Block {
|
pub fn into_mir(&self, module_id: SourceModuleId) -> mir::Block {
|
||||||
let mut mir_statements = Vec::new();
|
let mut mir_statements = Vec::new();
|
||||||
|
|
||||||
for statement in &self.0 {
|
for statement in &self.0 {
|
||||||
@ -115,27 +119,31 @@ impl ast::Block {
|
|||||||
.map(|t| t.0.into())
|
.map(|t| t.0.into())
|
||||||
.unwrap_or(mir::TypeKind::Vague(mir::VagueType::Unknown)),
|
.unwrap_or(mir::TypeKind::Vague(mir::VagueType::Unknown)),
|
||||||
s_let.0.clone(),
|
s_let.0.clone(),
|
||||||
s_let.4.into(),
|
s_let.4.as_meta(module_id),
|
||||||
),
|
),
|
||||||
s_let.2,
|
s_let.2,
|
||||||
s_let.3.process(),
|
s_let.3.process(module_id),
|
||||||
),
|
),
|
||||||
s_let.4,
|
s_let.4,
|
||||||
),
|
),
|
||||||
ast::BlockLevelStatement::Set(var_ref, expression, range) => (
|
ast::BlockLevelStatement::Set(var_ref, expression, range) => (
|
||||||
StmtKind::Set(var_ref.process(), expression.process()),
|
StmtKind::Set(var_ref.process(module_id), expression.process(module_id)),
|
||||||
*range,
|
*range,
|
||||||
),
|
),
|
||||||
ast::BlockLevelStatement::Import { _i } => todo!(),
|
ast::BlockLevelStatement::Import { _i } => todo!(),
|
||||||
ast::BlockLevelStatement::Expression(e) => (StmtKind::Expression(e.process()), e.1),
|
ast::BlockLevelStatement::Expression(e) => {
|
||||||
ast::BlockLevelStatement::Return(_, e) => (StmtKind::Expression(e.process()), e.1),
|
(StmtKind::Expression(e.process(module_id)), e.1)
|
||||||
|
}
|
||||||
|
ast::BlockLevelStatement::Return(_, e) => {
|
||||||
|
(StmtKind::Expression(e.process(module_id)), e.1)
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
mir_statements.push(mir::Statement(kind, range.into()));
|
mir_statements.push(mir::Statement(kind, range.as_meta(module_id)));
|
||||||
}
|
}
|
||||||
|
|
||||||
let return_expression = if let Some(r) = &self.1 {
|
let return_expression = if let Some(r) = &self.1 {
|
||||||
Some((r.0.into(), Box::new(r.1.process())))
|
Some((r.0.into(), Box::new(r.1.process(module_id))))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
@ -143,7 +151,7 @@ impl ast::Block {
|
|||||||
mir::Block {
|
mir::Block {
|
||||||
statements: mir_statements,
|
statements: mir_statements,
|
||||||
return_expression,
|
return_expression,
|
||||||
meta: self.2.into(),
|
meta: self.2.as_meta(module_id),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -158,61 +166,67 @@ impl From<ast::ReturnType> for mir::ReturnKind {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ast::Expression {
|
impl ast::Expression {
|
||||||
fn process(&self) -> mir::Expression {
|
fn process(&self, module_id: SourceModuleId) -> mir::Expression {
|
||||||
let kind = match &self.0 {
|
let kind = match &self.0 {
|
||||||
ast::ExpressionKind::VariableName(name) => mir::ExprKind::Variable(NamedVariableRef(
|
ast::ExpressionKind::VariableName(name) => mir::ExprKind::Variable(NamedVariableRef(
|
||||||
mir::TypeKind::Vague(mir::VagueType::Unknown),
|
mir::TypeKind::Vague(mir::VagueType::Unknown),
|
||||||
name.clone(),
|
name.clone(),
|
||||||
self.1.into(),
|
self.1.as_meta(module_id),
|
||||||
)),
|
)),
|
||||||
ast::ExpressionKind::Literal(literal) => mir::ExprKind::Literal(literal.mir()),
|
ast::ExpressionKind::Literal(literal) => mir::ExprKind::Literal(literal.mir()),
|
||||||
ast::ExpressionKind::Binop(binary_operator, lhs, rhs) => mir::ExprKind::BinOp(
|
ast::ExpressionKind::Binop(binary_operator, lhs, rhs) => mir::ExprKind::BinOp(
|
||||||
binary_operator.mir(),
|
binary_operator.mir(),
|
||||||
Box::new(lhs.process()),
|
Box::new(lhs.process(module_id)),
|
||||||
Box::new(rhs.process()),
|
Box::new(rhs.process(module_id)),
|
||||||
),
|
),
|
||||||
ast::ExpressionKind::FunctionCall(fn_call_expr) => {
|
ast::ExpressionKind::FunctionCall(fn_call_expr) => {
|
||||||
mir::ExprKind::FunctionCall(mir::FunctionCall {
|
mir::ExprKind::FunctionCall(mir::FunctionCall {
|
||||||
name: fn_call_expr.0.clone(),
|
name: fn_call_expr.0.clone(),
|
||||||
return_type: mir::TypeKind::Vague(mir::VagueType::Unknown),
|
return_type: mir::TypeKind::Vague(mir::VagueType::Unknown),
|
||||||
parameters: fn_call_expr.1.iter().map(|e| e.process()).collect(),
|
parameters: fn_call_expr
|
||||||
|
.1
|
||||||
|
.iter()
|
||||||
|
.map(|e| e.process(module_id))
|
||||||
|
.collect(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
ast::ExpressionKind::BlockExpr(block) => mir::ExprKind::Block(block.into_mir()),
|
ast::ExpressionKind::BlockExpr(block) => {
|
||||||
|
mir::ExprKind::Block(block.into_mir(module_id))
|
||||||
|
}
|
||||||
ast::ExpressionKind::IfExpr(if_expression) => {
|
ast::ExpressionKind::IfExpr(if_expression) => {
|
||||||
let cond = if_expression.0.process();
|
let cond = if_expression.0.process(module_id);
|
||||||
let then_block = if_expression.1.into_mir();
|
let then_block = if_expression.1.into_mir(module_id);
|
||||||
let else_block = if let Some(el) = &if_expression.2 {
|
let else_block = if let Some(el) = &if_expression.2 {
|
||||||
Some(el.into_mir())
|
Some(el.into_mir(module_id))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
mir::ExprKind::If(mir::IfExpression(Box::new(cond), then_block, else_block))
|
mir::ExprKind::If(mir::IfExpression(Box::new(cond), then_block, else_block))
|
||||||
}
|
}
|
||||||
ast::ExpressionKind::Array(expressions) => {
|
ast::ExpressionKind::Array(expressions) => {
|
||||||
mir::ExprKind::Array(expressions.iter().map(|e| e.process()).collect())
|
mir::ExprKind::Array(expressions.iter().map(|e| e.process(module_id)).collect())
|
||||||
}
|
}
|
||||||
ast::ExpressionKind::Indexed(expression, idx_expr) => mir::ExprKind::Indexed(
|
ast::ExpressionKind::Indexed(expression, idx_expr) => mir::ExprKind::Indexed(
|
||||||
Box::new(expression.process()),
|
Box::new(expression.process(module_id)),
|
||||||
mir::TypeKind::Vague(mir::VagueType::Unknown),
|
mir::TypeKind::Vague(mir::VagueType::Unknown),
|
||||||
Box::new(idx_expr.process()),
|
Box::new(idx_expr.process(module_id)),
|
||||||
),
|
),
|
||||||
ast::ExpressionKind::StructExpression(struct_init) => mir::ExprKind::Struct(
|
ast::ExpressionKind::StructExpression(struct_init) => mir::ExprKind::Struct(
|
||||||
struct_init.name.clone(),
|
struct_init.name.clone(),
|
||||||
struct_init
|
struct_init
|
||||||
.fields
|
.fields
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(n, e)| (n.clone(), e.process()))
|
.map(|(n, e)| (n.clone(), e.process(module_id)))
|
||||||
.collect(),
|
.collect(),
|
||||||
),
|
),
|
||||||
ast::ExpressionKind::Accessed(expression, name) => mir::ExprKind::Accessed(
|
ast::ExpressionKind::Accessed(expression, name) => mir::ExprKind::Accessed(
|
||||||
Box::new(expression.process()),
|
Box::new(expression.process(module_id)),
|
||||||
mir::TypeKind::Vague(mir::VagueType::Unknown),
|
mir::TypeKind::Vague(mir::VagueType::Unknown),
|
||||||
name.clone(),
|
name.clone(),
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
|
|
||||||
mir::Expression(kind, self.1.into())
|
mir::Expression(kind, self.1.as_meta(module_id))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -261,7 +275,7 @@ impl From<ast::TypeKind> for mir::TypeKind {
|
|||||||
mir::TypeKind::Array(Box::new(mir::TypeKind::from(*type_kind.clone())), *length)
|
mir::TypeKind::Array(Box::new(mir::TypeKind::from(*type_kind.clone())), *length)
|
||||||
}
|
}
|
||||||
ast::TypeKind::String => mir::TypeKind::StringPtr,
|
ast::TypeKind::String => mir::TypeKind::StringPtr,
|
||||||
ast::TypeKind::Custom(_) => todo!("Add processing for custom types"),
|
ast::TypeKind::Custom(name) => mir::TypeKind::CustomType(name.clone()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -68,24 +68,6 @@ pub enum StackValueKind {
|
|||||||
Any(InstructionValue),
|
Any(InstructionValue),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl StackValueKind {
|
|
||||||
unsafe fn get_instr(&self) -> &InstructionValue {
|
|
||||||
match self {
|
|
||||||
StackValueKind::Immutable(val) => val,
|
|
||||||
StackValueKind::Mutable(val) => val,
|
|
||||||
StackValueKind::Any(val) => val,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn with_instr(&self, instr: InstructionValue) -> StackValueKind {
|
|
||||||
match self {
|
|
||||||
StackValueKind::Immutable(_) => StackValueKind::Immutable(instr),
|
|
||||||
StackValueKind::Mutable(_) => StackValueKind::Mutable(instr),
|
|
||||||
StackValueKind::Any(_) => StackValueKind::Any(instr),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'ctx, 'a> Scope<'ctx, 'a> {
|
impl<'ctx, 'a> Scope<'ctx, 'a> {
|
||||||
fn with_block(&self, block: Block<'ctx>) -> Scope<'ctx, 'a> {
|
fn with_block(&self, block: Block<'ctx>) -> Scope<'ctx, 'a> {
|
||||||
Scope {
|
Scope {
|
||||||
|
335
reid/src/error_raporting.rs
Normal file
335
reid/src/error_raporting.rs
Normal file
@ -0,0 +1,335 @@
|
|||||||
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
|
fmt::{Debug, Write},
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
ast,
|
||||||
|
lexer::{self, Cursor, FullToken, Position},
|
||||||
|
mir::{self, pass, Metadata, SourceModuleId},
|
||||||
|
token_stream::{self, TokenRange},
|
||||||
|
};
|
||||||
|
|
||||||
|
impl<T: std::error::Error + std::fmt::Display> pass::Error<T> {
|
||||||
|
fn fmt_simple(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
std::fmt::Display::fmt(&self.kind, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn label(text: &str) -> &str {
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
{
|
||||||
|
return text;
|
||||||
|
}
|
||||||
|
#[cfg(not(debug_assertions))]
|
||||||
|
""
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(thiserror::Error, Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub enum ErrorKind {
|
||||||
|
#[error("{}{}", label("(Lexing) "), .0.kind)]
|
||||||
|
LexerError(#[from] mir::pass::Error<lexer::Error>),
|
||||||
|
#[error("{}{}", label("(Parsing) "), .0.kind)]
|
||||||
|
ParserError(#[from] mir::pass::Error<token_stream::Error>),
|
||||||
|
#[error("{}{}", label("(TypeCheck) "), .0.kind)]
|
||||||
|
TypeCheckError(#[source] mir::pass::Error<mir::typecheck::ErrorKind>),
|
||||||
|
#[error("{}{}", label("(TypeInference) "), .0.kind)]
|
||||||
|
TypeInferenceError(#[source] mir::pass::Error<mir::typecheck::ErrorKind>),
|
||||||
|
#[error("{}{}", label("(Linker) "), .0.kind)]
|
||||||
|
LinkerError(#[from] mir::pass::Error<mir::linker::ErrorKind>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ErrorKind {
|
||||||
|
pub fn from_typecheck(err: mir::pass::Error<mir::typecheck::ErrorKind>) -> ErrorKind {
|
||||||
|
ErrorKind::TypeCheckError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_typeinference(err: mir::pass::Error<mir::typecheck::ErrorKind>) -> ErrorKind {
|
||||||
|
ErrorKind::TypeInferenceError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ErrorKind {
|
||||||
|
fn get_meta(&self) -> Metadata {
|
||||||
|
match &self {
|
||||||
|
ErrorKind::LexerError(error) => error.metadata,
|
||||||
|
ErrorKind::ParserError(error) => error.metadata,
|
||||||
|
ErrorKind::TypeCheckError(error) => error.metadata,
|
||||||
|
ErrorKind::TypeInferenceError(error) => error.metadata,
|
||||||
|
ErrorKind::LinkerError(error) => error.metadata,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialOrd for ErrorKind {
|
||||||
|
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||||
|
self.get_meta()
|
||||||
|
.source_module_id
|
||||||
|
.partial_cmp(&other.get_meta().source_module_id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ord for ErrorKind {
|
||||||
|
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||||
|
self.get_meta().cmp(&other.get_meta())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub struct ErrModule {
|
||||||
|
pub name: String,
|
||||||
|
pub tokens: Option<Vec<FullToken>>,
|
||||||
|
pub source: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Default)]
|
||||||
|
pub struct ModuleMap {
|
||||||
|
module_map: HashMap<mir::SourceModuleId, ErrModule>,
|
||||||
|
module_counter: mir::SourceModuleId,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ModuleMap {
|
||||||
|
pub fn add_module<T: Into<String>>(&mut self, name: T) -> Option<mir::SourceModuleId> {
|
||||||
|
let id = self.module_counter.increment();
|
||||||
|
self.module_map.insert(
|
||||||
|
id,
|
||||||
|
ErrModule {
|
||||||
|
name: name.into(),
|
||||||
|
tokens: None,
|
||||||
|
source: None,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
Some(id)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_tokens(&mut self, id: mir::SourceModuleId, tokens: Vec<FullToken>) {
|
||||||
|
if let Some(module) = self.module_map.get_mut(&id) {
|
||||||
|
module.tokens = Some(tokens);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_source(&mut self, id: mir::SourceModuleId, source: String) {
|
||||||
|
if let Some(module) = self.module_map.get_mut(&id) {
|
||||||
|
module.source = Some(source);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_module(&self, id: &mir::SourceModuleId) -> Option<&ErrModule> {
|
||||||
|
self.module_map.get(id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub struct ReidError {
|
||||||
|
map: ModuleMap,
|
||||||
|
errors: Vec<ErrorKind>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ReidError {
|
||||||
|
pub fn from_lexer<U>(
|
||||||
|
result: Result<U, lexer::Error>,
|
||||||
|
map: ModuleMap,
|
||||||
|
module: SourceModuleId,
|
||||||
|
) -> Result<U, ReidError> {
|
||||||
|
result.map_err(|error| {
|
||||||
|
let pass_err = pass::Error {
|
||||||
|
metadata: Metadata {
|
||||||
|
source_module_id: module,
|
||||||
|
range: Default::default(),
|
||||||
|
position: Some(*error.get_position()),
|
||||||
|
},
|
||||||
|
kind: error,
|
||||||
|
};
|
||||||
|
ReidError {
|
||||||
|
map,
|
||||||
|
errors: vec![ErrorKind::LexerError(pass_err)],
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_parser<U>(
|
||||||
|
result: Result<U, token_stream::Error>,
|
||||||
|
map: ModuleMap,
|
||||||
|
module: SourceModuleId,
|
||||||
|
) -> Result<U, ReidError> {
|
||||||
|
result.map_err(|error| {
|
||||||
|
let pass_err = pass::Error {
|
||||||
|
metadata: Metadata {
|
||||||
|
source_module_id: module,
|
||||||
|
range: *error.get_range().unwrap_or(&Default::default()),
|
||||||
|
position: None,
|
||||||
|
},
|
||||||
|
kind: error,
|
||||||
|
};
|
||||||
|
ReidError {
|
||||||
|
map,
|
||||||
|
errors: vec![ErrorKind::ParserError(pass_err)],
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_kind<U>(errors: Vec<ErrorKind>, map: ModuleMap) -> ReidError {
|
||||||
|
ReidError { map, errors }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::error::Error for ReidError {}
|
||||||
|
|
||||||
|
impl std::fmt::Display for ReidError {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
let mut sorted_errors = self.errors.clone();
|
||||||
|
sorted_errors.sort_by(|a, b| a.cmp(&b));
|
||||||
|
sorted_errors.dedup();
|
||||||
|
|
||||||
|
let mut curr_module = None;
|
||||||
|
for error in sorted_errors {
|
||||||
|
let meta = error.get_meta();
|
||||||
|
let module = self.map.get_module(&meta.source_module_id).unwrap();
|
||||||
|
let position = if let Some(tokens) = &module.tokens {
|
||||||
|
let range_tokens = meta.range.into_tokens(&tokens);
|
||||||
|
|
||||||
|
dbg!(&error);
|
||||||
|
dbg!(&meta.range, &tokens[meta.range.start]);
|
||||||
|
get_position(&range_tokens).or(meta.position.map(|p| (p, p)))
|
||||||
|
} else if let Some(position) = meta.position {
|
||||||
|
Some((position, position))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
if curr_module != Some(meta.source_module_id) {
|
||||||
|
curr_module = Some(meta.source_module_id);
|
||||||
|
writeln!(
|
||||||
|
f,
|
||||||
|
"Errors in module {}:",
|
||||||
|
color_err(format!(
|
||||||
|
"{}",
|
||||||
|
self.map
|
||||||
|
.module_map
|
||||||
|
.get(&meta.source_module_id)
|
||||||
|
.unwrap()
|
||||||
|
.name
|
||||||
|
))?
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
writeln!(f)?;
|
||||||
|
write!(f, " Error: ")?;
|
||||||
|
writeln!(f, "{}", color_err(format!("{}", error))?)?;
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"{:>20}{}",
|
||||||
|
color_warn("At: ")?,
|
||||||
|
position
|
||||||
|
.map(|p| fmt_positions(p))
|
||||||
|
.unwrap_or(String::from("{unknown}")),
|
||||||
|
)?;
|
||||||
|
if let (Some(position), Some(source)) = (position, &module.source) {
|
||||||
|
writeln!(f, "{}", fmt_lines(source, position, 6)?)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TokenRange {
|
||||||
|
pub fn into_tokens<'v>(&self, tokens: &'v Vec<FullToken>) -> Vec<&'v FullToken> {
|
||||||
|
tokens
|
||||||
|
.iter()
|
||||||
|
.skip(self.start)
|
||||||
|
.by_ref()
|
||||||
|
.take(self.end + 1 - self.start)
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_position(tokens: &Vec<&FullToken>) -> Option<(Position, Position)> {
|
||||||
|
if let Some(first) = tokens.first() {
|
||||||
|
let last = tokens.last().unwrap();
|
||||||
|
Some((first.position, last.position.add(last.token.len() as u32)))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn into_full_lines<'v>((start, end): (Position, Position)) -> (Position, Position) {
|
||||||
|
(Position(0, start.1), Position(u32::MAX, end.1))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fmt_lines(
|
||||||
|
source: &String,
|
||||||
|
(highlight_start, highlight_end): (Position, Position),
|
||||||
|
ident: usize,
|
||||||
|
) -> Result<String, std::fmt::Error> {
|
||||||
|
let (line_start, line_end) = into_full_lines((highlight_start, highlight_end));
|
||||||
|
let mut cursor = Cursor {
|
||||||
|
position: Position(0, 1),
|
||||||
|
char_stream: source.chars(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut text = String::new();
|
||||||
|
|
||||||
|
while let Some(c) = cursor.next() {
|
||||||
|
if cursor.position.1 > line_end.1 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if cursor.position.1 >= line_start.1 {
|
||||||
|
if c == '\n' {
|
||||||
|
write!(text, "\n{}", " ".repeat(ident))?;
|
||||||
|
} else {
|
||||||
|
if cursor.position > highlight_start && cursor.position <= highlight_end {
|
||||||
|
write!(text, "{}", color_highlight(c)?)?;
|
||||||
|
} else {
|
||||||
|
text.write_char(c)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(text)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fmt_positions((start, end): (Position, Position)) -> String {
|
||||||
|
if start == end {
|
||||||
|
format!("ln {}, col {}", start.1, start.0)
|
||||||
|
} else if start.1 == end.1 {
|
||||||
|
format!("ln {}, col {}-{}", start.1, start.0, end.0)
|
||||||
|
} else {
|
||||||
|
format!("{}:{} - {}:{}", start.1, start.0, end.1, end.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn color_err(elem: impl std::fmt::Display) -> Result<String, std::fmt::Error> {
|
||||||
|
let mut text = format!("{}", elem);
|
||||||
|
|
||||||
|
#[cfg(feature = "color")]
|
||||||
|
{
|
||||||
|
use colored::Colorize;
|
||||||
|
text = format!("{}", text.bright_red())
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(text)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn color_warn(elem: impl std::fmt::Display) -> Result<String, std::fmt::Error> {
|
||||||
|
let mut text = format!("{}", elem);
|
||||||
|
|
||||||
|
#[cfg(feature = "color")]
|
||||||
|
{
|
||||||
|
use colored::Colorize;
|
||||||
|
text = format!("{}", text.bright_yellow())
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(text)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn color_highlight(elem: impl std::fmt::Display) -> Result<String, std::fmt::Error> {
|
||||||
|
let mut text = format!("{}", elem);
|
||||||
|
|
||||||
|
#[cfg(feature = "color")]
|
||||||
|
{
|
||||||
|
use colored::Colorize;
|
||||||
|
text = format!("{}", text.bright_yellow().underline())
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(text)
|
||||||
|
}
|
@ -1,8 +1,11 @@
|
|||||||
use std::{fmt::Debug, str::Chars};
|
use std::{
|
||||||
|
fmt::{Debug, Write},
|
||||||
|
str::Chars,
|
||||||
|
};
|
||||||
|
|
||||||
static DECIMAL_NUMERICS: &[char] = &['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'];
|
static DECIMAL_NUMERICS: &[char] = &['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'];
|
||||||
|
|
||||||
#[derive(Debug, Eq, PartialEq, Clone)]
|
#[derive(Debug, Eq, PartialEq, Clone, PartialOrd, Ord)]
|
||||||
pub enum Token {
|
pub enum Token {
|
||||||
/// Values
|
/// Values
|
||||||
Identifier(String),
|
Identifier(String),
|
||||||
@ -99,8 +102,56 @@ impl From<Token> for String {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Token {
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
self.to_string().len()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToString for Token {
|
||||||
|
fn to_string(&self) -> String {
|
||||||
|
match &self {
|
||||||
|
Token::Identifier(ident) => ident.clone(),
|
||||||
|
Token::DecimalValue(val) => val.to_string(),
|
||||||
|
Token::StringLit(lit) => format!("\"{}\"", lit),
|
||||||
|
Token::LetKeyword => String::from("let"),
|
||||||
|
Token::MutKeyword => String::from("mut"),
|
||||||
|
Token::ImportKeyword => String::from("import"),
|
||||||
|
Token::ReturnKeyword => String::from("return"),
|
||||||
|
Token::FnKeyword => String::from("fn"),
|
||||||
|
Token::PubKeyword => String::from("pub"),
|
||||||
|
Token::Arrow => String::from("=>"),
|
||||||
|
Token::If => String::from("if"),
|
||||||
|
Token::Else => String::from("else"),
|
||||||
|
Token::True => String::from("true"),
|
||||||
|
Token::False => String::from("false"),
|
||||||
|
Token::Extern => String::from("extern"),
|
||||||
|
Token::Struct => String::from("struct"),
|
||||||
|
Token::Semi => String::from(';'),
|
||||||
|
Token::Equals => String::from('='),
|
||||||
|
Token::Colon => String::from(':'),
|
||||||
|
Token::Plus => String::from('+'),
|
||||||
|
Token::Times => String::from('*'),
|
||||||
|
Token::Minus => String::from('-'),
|
||||||
|
Token::GreaterThan => String::from('>'),
|
||||||
|
Token::LessThan => String::from('<'),
|
||||||
|
Token::Et => String::from('&'),
|
||||||
|
Token::Exclamation => String::from('!'),
|
||||||
|
Token::ParenOpen => String::from('('),
|
||||||
|
Token::ParenClose => String::from(')'),
|
||||||
|
Token::BraceOpen => String::from('{'),
|
||||||
|
Token::BraceClose => String::from('}'),
|
||||||
|
Token::BracketOpen => String::from('['),
|
||||||
|
Token::BracketClose => String::from(']'),
|
||||||
|
Token::Comma => String::from(','),
|
||||||
|
Token::Dot => String::from('.'),
|
||||||
|
Token::Eof => String::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A token with a position
|
/// A token with a position
|
||||||
#[derive(Clone)]
|
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
pub struct FullToken {
|
pub struct FullToken {
|
||||||
pub token: Token,
|
pub token: Token,
|
||||||
pub position: Position,
|
pub position: Position,
|
||||||
@ -115,15 +166,37 @@ impl Debug for FullToken {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type Position = (u32, u32);
|
/// (Column, Line)
|
||||||
|
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Ord)]
|
||||||
|
pub struct Position(pub u32, pub u32);
|
||||||
|
|
||||||
struct Cursor<'a> {
|
impl Position {
|
||||||
|
pub fn add(&self, num: u32) -> Position {
|
||||||
|
Position(self.0 + num, self.1)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn sub(&self, num: u32) -> Position {
|
||||||
|
Position(self.0 - num, self.1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialOrd for Position {
|
||||||
|
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||||
|
match self.1.partial_cmp(&other.1) {
|
||||||
|
Some(core::cmp::Ordering::Equal) => {}
|
||||||
|
ord => return ord,
|
||||||
|
}
|
||||||
|
self.0.partial_cmp(&other.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Cursor<'a> {
|
||||||
pub position: Position,
|
pub position: Position,
|
||||||
char_stream: Chars<'a>,
|
pub char_stream: Chars<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Cursor<'a> {
|
impl<'a> Cursor<'a> {
|
||||||
fn next(&mut self) -> Option<char> {
|
pub fn next(&mut self) -> Option<char> {
|
||||||
let next = self.char_stream.next();
|
let next = self.char_stream.next();
|
||||||
if let Some('\n') = next {
|
if let Some('\n') = next {
|
||||||
self.position.1 += 1;
|
self.position.1 += 1;
|
||||||
@ -153,14 +226,14 @@ pub fn tokenize<T: Into<String>>(to_tokenize: T) -> Result<Vec<FullToken>, Error
|
|||||||
let to_tokenize = to_tokenize.into();
|
let to_tokenize = to_tokenize.into();
|
||||||
let mut cursor = Cursor {
|
let mut cursor = Cursor {
|
||||||
char_stream: to_tokenize.chars(),
|
char_stream: to_tokenize.chars(),
|
||||||
position: (0, 1),
|
position: Position(0, 1),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut tokens = Vec::new();
|
let mut tokens = Vec::new();
|
||||||
|
|
||||||
while let Some(character) = &cursor.next() {
|
while let Some(character) = &cursor.next() {
|
||||||
// Save "current" token first character position
|
// Save "current" token first character position
|
||||||
let position = (cursor.position.0 - 1, cursor.position.1);
|
let position = cursor.position.sub(1);
|
||||||
|
|
||||||
let variant = match character {
|
let variant = match character {
|
||||||
// Whitespace
|
// Whitespace
|
||||||
@ -273,10 +346,19 @@ pub fn tokenize<T: Into<String>>(to_tokenize: T) -> Result<Vec<FullToken>, Error
|
|||||||
Ok(tokens)
|
Ok(tokens)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(thiserror::Error, Debug, Clone)]
|
#[derive(thiserror::Error, Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
#[error("Invalid token '{}' at Ln {}, Col {}", .0, (.1).1, (.1).0)]
|
#[error("Invalid token '{}' ", .0)]
|
||||||
InvalidToken(char, Position),
|
InvalidToken(char, Position),
|
||||||
#[error("String literal that starts at Ln {}, Col {} is never finished!", (.0).1, (.0).0)]
|
#[error("String literal is never finished!")]
|
||||||
MissingQuotation(Position),
|
MissingQuotation(Position),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Error {
|
||||||
|
pub fn get_position(&self) -> &Position {
|
||||||
|
match self {
|
||||||
|
Error::InvalidToken(_, pos) => pos,
|
||||||
|
Error::MissingQuotation(pos) => pos,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
115
reid/src/lib.rs
115
reid/src/lib.rs
@ -41,10 +41,13 @@
|
|||||||
//! - Debug Symbols
|
//! - Debug Symbols
|
||||||
//! ```
|
//! ```
|
||||||
|
|
||||||
use std::path::PathBuf;
|
use std::{convert::Infallible, path::PathBuf};
|
||||||
|
|
||||||
|
use error_raporting::{ErrorKind as ErrorRapKind, ModuleMap, ReidError};
|
||||||
|
use lexer::FullToken;
|
||||||
use mir::{
|
use mir::{
|
||||||
linker::LinkerPass, typecheck::TypeCheck, typeinference::TypeInference, typerefs::TypeRefs,
|
linker::LinkerPass, typecheck::TypeCheck, typeinference::TypeInference, typerefs::TypeRefs,
|
||||||
|
SourceModuleId,
|
||||||
};
|
};
|
||||||
use reid_lib::{compile::CompileOutput, Context};
|
use reid_lib::{compile::CompileOutput, Context};
|
||||||
|
|
||||||
@ -52,64 +55,74 @@ use crate::{ast::TopLevelStatement, lexer::Token, token_stream::TokenStream};
|
|||||||
|
|
||||||
mod ast;
|
mod ast;
|
||||||
mod codegen;
|
mod codegen;
|
||||||
|
mod error_raporting;
|
||||||
mod lexer;
|
mod lexer;
|
||||||
pub mod mir;
|
pub mod mir;
|
||||||
mod pad_adapter;
|
mod pad_adapter;
|
||||||
mod token_stream;
|
mod token_stream;
|
||||||
mod util;
|
mod util;
|
||||||
|
|
||||||
#[derive(thiserror::Error, Debug, Clone)]
|
pub fn parse_module<'map, T: Into<String>>(
|
||||||
pub enum ReidError {
|
|
||||||
#[error(transparent)]
|
|
||||||
LexerError(#[from] lexer::Error),
|
|
||||||
#[error(transparent)]
|
|
||||||
ParserError(#[from] token_stream::Error),
|
|
||||||
#[error("Errors during typecheck: {0:?}")]
|
|
||||||
TypeCheckErrors(Vec<mir::pass::Error<mir::typecheck::ErrorKind>>),
|
|
||||||
#[error("Errors during type inference: {0:?}")]
|
|
||||||
TypeInferenceErrors(Vec<mir::pass::Error<mir::typecheck::ErrorKind>>),
|
|
||||||
#[error("Errors during linking: {0:?}")]
|
|
||||||
LinkerErrors(Vec<mir::pass::Error<mir::linker::ErrorKind>>),
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn compile_module(
|
|
||||||
source: &str,
|
source: &str,
|
||||||
name: String,
|
name: T,
|
||||||
path: Option<PathBuf>,
|
map: &'map mut ModuleMap,
|
||||||
is_main: bool,
|
) -> Result<(mir::SourceModuleId, Vec<FullToken>), ReidError> {
|
||||||
) -> Result<mir::Module, ReidError> {
|
let id = map.add_module(name.into()).unwrap();
|
||||||
let tokens = lexer::tokenize(source)?;
|
map.set_source(id, source.to_owned());
|
||||||
|
|
||||||
|
let tokens = ReidError::from_lexer(lexer::tokenize(source), map.clone(), id)?;
|
||||||
|
|
||||||
|
map.set_tokens(id, tokens.clone());
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
dbg!(&tokens);
|
dbg!(&tokens);
|
||||||
|
|
||||||
|
Ok((id, tokens))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn compile_module<'map>(
|
||||||
|
module_id: mir::SourceModuleId,
|
||||||
|
tokens: &Vec<FullToken>,
|
||||||
|
map: &'map mut ModuleMap,
|
||||||
|
path: Option<PathBuf>,
|
||||||
|
is_main: bool,
|
||||||
|
) -> Result<mir::Module, ReidError> {
|
||||||
|
let module = map.get_module(&module_id).cloned().unwrap();
|
||||||
|
|
||||||
let mut token_stream = TokenStream::from(&tokens);
|
let mut token_stream = TokenStream::from(&tokens);
|
||||||
|
|
||||||
let mut statements = Vec::new();
|
let mut statements = Vec::new();
|
||||||
|
|
||||||
while !matches!(token_stream.peek().unwrap_or(Token::Eof), Token::Eof) {
|
while !matches!(token_stream.peek().unwrap_or(Token::Eof), Token::Eof) {
|
||||||
let statement = token_stream.parse::<TopLevelStatement>()?;
|
let statement = ReidError::from_parser(
|
||||||
|
token_stream.parse::<TopLevelStatement>(),
|
||||||
|
map.clone(),
|
||||||
|
module_id,
|
||||||
|
)?;
|
||||||
statements.push(statement);
|
statements.push(statement);
|
||||||
}
|
}
|
||||||
|
|
||||||
let ast_module = ast::Module {
|
let ast_module = ast::Module {
|
||||||
name,
|
name: module.name,
|
||||||
top_level_statements: statements,
|
top_level_statements: statements,
|
||||||
path,
|
path,
|
||||||
is_main,
|
is_main,
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(ast_module.process())
|
Ok(ast_module.process(module_id))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn perform_all_passes(context: &mut mir::Context) -> Result<(), ReidError> {
|
pub fn perform_all_passes<'map>(
|
||||||
|
context: &mut mir::Context,
|
||||||
|
module_map: &'map mut ModuleMap,
|
||||||
|
) -> Result<(), ReidError> {
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
dbg!(&context);
|
dbg!(&context);
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
println!("{}", &context);
|
println!("{}", &context);
|
||||||
|
|
||||||
let state = context.pass(&mut LinkerPass);
|
let state = context.pass(&mut LinkerPass { module_map });
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
println!("{}", &context);
|
println!("{}", &context);
|
||||||
@ -117,7 +130,10 @@ pub fn perform_all_passes(context: &mut mir::Context) -> Result<(), ReidError> {
|
|||||||
dbg!(&state);
|
dbg!(&state);
|
||||||
|
|
||||||
if !state.errors.is_empty() {
|
if !state.errors.is_empty() {
|
||||||
return Err(ReidError::LinkerErrors(state.errors));
|
return Err(ReidError::from_kind::<()>(
|
||||||
|
state.errors.iter().map(|e| e.clone().into()).collect(),
|
||||||
|
module_map.clone(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
let refs = TypeRefs::default();
|
let refs = TypeRefs::default();
|
||||||
@ -132,7 +148,14 @@ pub fn perform_all_passes(context: &mut mir::Context) -> Result<(), ReidError> {
|
|||||||
dbg!(&state);
|
dbg!(&state);
|
||||||
|
|
||||||
if !state.errors.is_empty() {
|
if !state.errors.is_empty() {
|
||||||
return Err(ReidError::TypeInferenceErrors(state.errors));
|
return Err(ReidError::from_kind::<()>(
|
||||||
|
state
|
||||||
|
.errors
|
||||||
|
.iter()
|
||||||
|
.map(|e| ErrorRapKind::TypeInferenceError(e.clone()))
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
module_map.clone(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
let state = context.pass(&mut TypeCheck { refs: &refs });
|
let state = context.pass(&mut TypeCheck { refs: &refs });
|
||||||
@ -143,7 +166,14 @@ pub fn perform_all_passes(context: &mut mir::Context) -> Result<(), ReidError> {
|
|||||||
dbg!(&state);
|
dbg!(&state);
|
||||||
|
|
||||||
if !state.errors.is_empty() {
|
if !state.errors.is_empty() {
|
||||||
return Err(ReidError::TypeCheckErrors(state.errors));
|
return Err(ReidError::from_kind::<()>(
|
||||||
|
state
|
||||||
|
.errors
|
||||||
|
.iter()
|
||||||
|
.map(|e| ErrorRapKind::TypeCheckError(e.clone()))
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
module_map.clone(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -152,20 +182,20 @@ pub fn perform_all_passes(context: &mut mir::Context) -> Result<(), ReidError> {
|
|||||||
/// Takes in a bit of source code, parses and compiles it and produces `hello.o`
|
/// Takes in a bit of source code, parses and compiles it and produces `hello.o`
|
||||||
/// and `hello.asm` from it, which can be linked using `ld` to produce an
|
/// and `hello.asm` from it, which can be linked using `ld` to produce an
|
||||||
/// executable file.
|
/// executable file.
|
||||||
pub fn compile(source: &str, path: PathBuf) -> Result<CompileOutput, ReidError> {
|
pub fn compile_and_pass<'map>(
|
||||||
|
source: &str,
|
||||||
|
path: PathBuf,
|
||||||
|
module_map: &'map mut ModuleMap,
|
||||||
|
) -> Result<CompileOutput, ReidError> {
|
||||||
let path = path.canonicalize().unwrap();
|
let path = path.canonicalize().unwrap();
|
||||||
|
let name = path.file_name().unwrap().to_str().unwrap().to_owned();
|
||||||
|
|
||||||
let mut mir_context = mir::Context::from(
|
let (id, tokens) = parse_module(source, name, module_map).unwrap();
|
||||||
vec![compile_module(
|
let module = compile_module(id, &tokens, module_map, Some(path.clone()), true)?;
|
||||||
source,
|
|
||||||
path.file_name().unwrap().to_str().unwrap().to_owned(),
|
|
||||||
Some(path.clone()),
|
|
||||||
true,
|
|
||||||
)?],
|
|
||||||
path.parent().unwrap().to_owned(),
|
|
||||||
);
|
|
||||||
|
|
||||||
perform_all_passes(&mut mir_context)?;
|
let mut mir_context = mir::Context::from(vec![module], path.parent().unwrap().to_owned());
|
||||||
|
|
||||||
|
perform_all_passes(&mut mir_context, module_map)?;
|
||||||
|
|
||||||
let mut context = Context::new();
|
let mut context = Context::new();
|
||||||
let codegen_modules = mir_context.codegen(&mut context);
|
let codegen_modules = mir_context.codegen(&mut context);
|
||||||
@ -176,3 +206,8 @@ pub fn compile(source: &str, path: PathBuf) -> Result<CompileOutput, ReidError>
|
|||||||
let compiled = codegen_modules.compile();
|
let compiled = codegen_modules.compile();
|
||||||
Ok(compiled.output())
|
Ok(compiled.output())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn compile_simple(source: &str, path: PathBuf) -> Result<CompileOutput, ReidError> {
|
||||||
|
let mut map = ModuleMap::default();
|
||||||
|
compile_and_pass(source, path, &mut map)
|
||||||
|
}
|
||||||
|
@ -289,7 +289,13 @@ impl Display for CmpOperator {
|
|||||||
|
|
||||||
impl Display for Metadata {
|
impl Display for Metadata {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
write!(f, "{:?}", self.range)
|
write!(f, "{:?} ({})", self.range, self.source_module_id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for SourceModuleId {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "Mod {}", self.0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -279,7 +279,7 @@ impl TypeKind {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, thiserror::Error)]
|
#[derive(Debug, Clone, thiserror::Error, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
pub enum EqualsIssue {
|
pub enum EqualsIssue {
|
||||||
#[error("Function is already defined locally at {:?}", (.0).range)]
|
#[error("Function is already defined locally at {:?}", (.0).range)]
|
||||||
ExistsLocally(Metadata),
|
ExistsLocally(Metadata),
|
||||||
|
@ -2,30 +2,29 @@ use std::{
|
|||||||
cell::RefCell,
|
cell::RefCell,
|
||||||
collections::HashMap,
|
collections::HashMap,
|
||||||
convert::Infallible,
|
convert::Infallible,
|
||||||
fmt::Error,
|
|
||||||
fs::{self},
|
fs::{self},
|
||||||
path::PathBuf,
|
path::PathBuf,
|
||||||
rc::Rc,
|
rc::Rc,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{compile_module, ReidError};
|
use crate::{compile_module, error_raporting::ModuleMap, lexer::FullToken, parse_module};
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
pass::{Pass, PassState},
|
pass::{Pass, PassState},
|
||||||
r#impl::EqualsIssue,
|
r#impl::EqualsIssue,
|
||||||
Context, FunctionDefinition, Import, Metadata, Module,
|
Context, FunctionDefinition, Import, Metadata, Module, SourceModuleId,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub static STD_SOURCE: &str = include_str!("../../lib/std.reid");
|
pub static STD_SOURCE: &str = include_str!("../../lib/std.reid");
|
||||||
|
|
||||||
#[derive(thiserror::Error, Debug, Clone)]
|
#[derive(thiserror::Error, Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
pub enum ErrorKind {
|
pub enum ErrorKind {
|
||||||
#[error("Unable to import inner modules, not yet supported: {0}")]
|
#[error("Unable to import inner modules, not yet supported: {0}")]
|
||||||
InnerModulesNotYetSupported(Import),
|
InnerModulesNotYetSupported(Import),
|
||||||
#[error("No such module: {0}")]
|
#[error("No such module: {0}")]
|
||||||
ModuleNotFound(String),
|
ModuleNotFound(String),
|
||||||
#[error("Error while compiling module {0}: {1}")]
|
#[error("Error while compiling module {0}: {1}")]
|
||||||
ModuleCompilationError(String, ReidError),
|
ModuleCompilationError(String, String),
|
||||||
#[error("No such function {0} found in module {1}")]
|
#[error("No such function {0} found in module {1}")]
|
||||||
NoSuchFunctionInModule(String, String),
|
NoSuchFunctionInModule(String, String),
|
||||||
#[error("Importing function {0}::{1} not possible: {2}")]
|
#[error("Importing function {0}::{1} not possible: {2}")]
|
||||||
@ -42,22 +41,25 @@ pub enum ErrorKind {
|
|||||||
FunctionIsPrivate(String, String),
|
FunctionIsPrivate(String, String),
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn compile_std() -> super::Module {
|
pub fn compile_std(module_map: &mut ModuleMap) -> (super::Module, Vec<FullToken>) {
|
||||||
let module = compile_module(STD_SOURCE, "standard_library".to_owned(), None, false).unwrap();
|
let (id, tokens) = parse_module(STD_SOURCE, "standard_library", module_map).unwrap();
|
||||||
|
let module = compile_module(id, &tokens, module_map, None, false).unwrap();
|
||||||
|
|
||||||
let mut mir_context = super::Context::from(vec![module], Default::default());
|
let mut mir_context = super::Context::from(vec![module], Default::default());
|
||||||
|
|
||||||
let std_compiled = mir_context.modules.remove(0);
|
let std_compiled = mir_context.modules.remove(0);
|
||||||
std_compiled
|
(std_compiled, tokens)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Struct used to implement a type-checking pass that can be performed on the
|
/// Struct used to implement a type-checking pass that can be performed on the
|
||||||
/// MIR.
|
/// MIR.
|
||||||
pub struct LinkerPass;
|
pub struct LinkerPass<'map> {
|
||||||
|
pub module_map: &'map mut ModuleMap,
|
||||||
|
}
|
||||||
|
|
||||||
type LinkerPassState<'st, 'sc> = PassState<'st, 'sc, (), ErrorKind>;
|
type LinkerPassState<'st, 'sc> = PassState<'st, 'sc, (), ErrorKind>;
|
||||||
|
|
||||||
impl Pass for LinkerPass {
|
impl<'map> Pass for LinkerPass<'map> {
|
||||||
type Data = ();
|
type Data = ();
|
||||||
type TError = ErrorKind;
|
type TError = ErrorKind;
|
||||||
fn context(&mut self, context: &mut Context, mut state: LinkerPassState) {
|
fn context(&mut self, context: &mut Context, mut state: LinkerPassState) {
|
||||||
@ -80,20 +82,31 @@ impl Pass for LinkerPass {
|
|||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut modules = HashMap::<String, Rc<RefCell<Module>>>::new();
|
let mut modules = HashMap::<String, Rc<RefCell<_>>>::new();
|
||||||
|
|
||||||
for module in context.modules.drain(..) {
|
for module in context.modules.drain(..) {
|
||||||
modules.insert(module.name.clone(), Rc::new(RefCell::new(module)));
|
let tokens = self
|
||||||
|
.module_map
|
||||||
|
.get_module(&module.module_id)
|
||||||
|
.unwrap()
|
||||||
|
.tokens
|
||||||
|
.clone()
|
||||||
|
.unwrap();
|
||||||
|
modules.insert(module.name.clone(), Rc::new(RefCell::new((module, tokens))));
|
||||||
}
|
}
|
||||||
|
|
||||||
modules.insert("std".to_owned(), Rc::new(RefCell::new(compile_std())));
|
modules.insert(
|
||||||
|
"std".to_owned(),
|
||||||
|
Rc::new(RefCell::new(compile_std(&mut self.module_map))),
|
||||||
|
);
|
||||||
|
|
||||||
let mut modules_to_process: Vec<Rc<RefCell<Module>>> = modules.values().cloned().collect();
|
let mut modules_to_process: Vec<Rc<RefCell<(Module, Vec<FullToken>)>>> =
|
||||||
|
modules.values().cloned().collect();
|
||||||
|
|
||||||
while let Some(module) = modules_to_process.pop() {
|
while let Some(module) = modules_to_process.pop() {
|
||||||
let mut importer_module = module.borrow_mut();
|
let mut importer_module = module.borrow_mut();
|
||||||
|
|
||||||
for import in importer_module.imports.clone() {
|
for import in importer_module.0.imports.clone() {
|
||||||
let Import(path, _) = &import;
|
let Import(path, _) = &import;
|
||||||
if path.len() != 2 {
|
if path.len() != 2 {
|
||||||
state.ok::<_, Infallible>(
|
state.ok::<_, Infallible>(
|
||||||
@ -118,7 +131,23 @@ impl Pass for LinkerPass {
|
|||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
match compile_module(&source, module_name.clone(), Some(file_path), false) {
|
let (id, tokens) =
|
||||||
|
match parse_module(&source, module_name.clone(), &mut self.module_map) {
|
||||||
|
Ok(val) => val,
|
||||||
|
Err(err) => {
|
||||||
|
state.ok::<_, Infallible>(
|
||||||
|
Err(ErrorKind::ModuleCompilationError(
|
||||||
|
module_name.clone(),
|
||||||
|
format!("{}", err),
|
||||||
|
)),
|
||||||
|
import.1,
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
match compile_module(id, &tokens, &mut self.module_map, Some(file_path), false)
|
||||||
|
{
|
||||||
Ok(imported_module) => {
|
Ok(imported_module) => {
|
||||||
if imported_module.is_main {
|
if imported_module.is_main {
|
||||||
state.ok::<_, Infallible>(
|
state.ok::<_, Infallible>(
|
||||||
@ -130,7 +159,7 @@ impl Pass for LinkerPass {
|
|||||||
let module_name = imported_module.name.clone();
|
let module_name = imported_module.name.clone();
|
||||||
modules.insert(
|
modules.insert(
|
||||||
module_name.clone(),
|
module_name.clone(),
|
||||||
Rc::new(RefCell::new(imported_module)),
|
Rc::new(RefCell::new((imported_module, tokens))),
|
||||||
);
|
);
|
||||||
let imported = modules.get_mut(&module_name).unwrap();
|
let imported = modules.get_mut(&module_name).unwrap();
|
||||||
modules_to_process.push(imported.clone());
|
modules_to_process.push(imported.clone());
|
||||||
@ -138,7 +167,10 @@ impl Pass for LinkerPass {
|
|||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
state.ok::<_, Infallible>(
|
state.ok::<_, Infallible>(
|
||||||
Err(ErrorKind::ModuleCompilationError(module_name.clone(), err)),
|
Err(ErrorKind::ModuleCompilationError(
|
||||||
|
module_name.clone(),
|
||||||
|
format!("{}", err),
|
||||||
|
)),
|
||||||
import.1,
|
import.1,
|
||||||
);
|
);
|
||||||
continue;
|
continue;
|
||||||
@ -149,7 +181,11 @@ impl Pass for LinkerPass {
|
|||||||
|
|
||||||
let func_name = unsafe { path.get_unchecked(1) };
|
let func_name = unsafe { path.get_unchecked(1) };
|
||||||
|
|
||||||
let Some(func) = imported.functions.iter_mut().find(|f| f.name == *func_name)
|
let Some(func) = imported
|
||||||
|
.0
|
||||||
|
.functions
|
||||||
|
.iter_mut()
|
||||||
|
.find(|f| f.name == *func_name)
|
||||||
else {
|
else {
|
||||||
state.ok::<_, Infallible>(
|
state.ok::<_, Infallible>(
|
||||||
Err(ErrorKind::NoSuchFunctionInModule(
|
Err(ErrorKind::NoSuchFunctionInModule(
|
||||||
@ -175,6 +211,7 @@ impl Pass for LinkerPass {
|
|||||||
func.is_imported = true;
|
func.is_imported = true;
|
||||||
|
|
||||||
if let Some(existing) = importer_module
|
if let Some(existing) = importer_module
|
||||||
|
.0
|
||||||
.functions
|
.functions
|
||||||
.iter()
|
.iter()
|
||||||
.find(|f| f.name == *func_name)
|
.find(|f| f.name == *func_name)
|
||||||
@ -191,7 +228,7 @@ impl Pass for LinkerPass {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
importer_module.functions.push(FunctionDefinition {
|
importer_module.0.functions.push(FunctionDefinition {
|
||||||
name: func.name.clone(),
|
name: func.name.clone(),
|
||||||
is_pub: false,
|
is_pub: false,
|
||||||
is_imported: false,
|
is_imported: false,
|
||||||
@ -204,7 +241,7 @@ impl Pass for LinkerPass {
|
|||||||
|
|
||||||
context.modules = modules
|
context.modules = modules
|
||||||
.into_values()
|
.into_values()
|
||||||
.map(|v| Rc::into_inner(v).unwrap().into_inner())
|
.map(|v| Rc::into_inner(v).unwrap().into_inner().0)
|
||||||
.collect();
|
.collect();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,10 @@
|
|||||||
|
|
||||||
use std::{collections::HashMap, path::PathBuf};
|
use std::{collections::HashMap, path::PathBuf};
|
||||||
|
|
||||||
use crate::token_stream::TokenRange;
|
use crate::{
|
||||||
|
lexer::{FullToken, Position},
|
||||||
|
token_stream::TokenRange,
|
||||||
|
};
|
||||||
|
|
||||||
mod display;
|
mod display;
|
||||||
pub mod r#impl;
|
pub mod r#impl;
|
||||||
@ -14,28 +17,54 @@ pub mod typecheck;
|
|||||||
pub mod typeinference;
|
pub mod typeinference;
|
||||||
pub mod typerefs;
|
pub mod typerefs;
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Copy)]
|
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Default, Hash)]
|
||||||
|
pub struct SourceModuleId(u32);
|
||||||
|
|
||||||
|
impl SourceModuleId {
|
||||||
|
pub fn increment(&mut self) -> SourceModuleId {
|
||||||
|
self.0 += 1;
|
||||||
|
SourceModuleId(self.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
pub struct Metadata {
|
pub struct Metadata {
|
||||||
|
pub source_module_id: SourceModuleId,
|
||||||
pub range: TokenRange,
|
pub range: TokenRange,
|
||||||
|
pub position: Option<Position>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Metadata {
|
||||||
|
pub fn complete_overlap(&self, other: &Metadata) -> bool {
|
||||||
|
(self.range.start >= other.range.start && self.range.end <= other.range.end)
|
||||||
|
|| (other.range.start >= self.range.start && other.range.end <= self.range.end)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::ops::Add for Metadata {
|
impl std::ops::Add for Metadata {
|
||||||
type Output = Metadata;
|
type Output = Metadata;
|
||||||
|
|
||||||
fn add(self, rhs: Self) -> Self::Output {
|
fn add(self, rhs: Self) -> Self::Output {
|
||||||
|
assert!(self.source_module_id == rhs.source_module_id);
|
||||||
Metadata {
|
Metadata {
|
||||||
range: self.range + rhs.range,
|
range: self.range + rhs.range,
|
||||||
|
source_module_id: self.source_module_id,
|
||||||
|
position: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<TokenRange> for Metadata {
|
impl TokenRange {
|
||||||
fn from(value: TokenRange) -> Self {
|
pub fn as_meta(self, module: SourceModuleId) -> Metadata {
|
||||||
Metadata { range: value }
|
Metadata {
|
||||||
|
range: self,
|
||||||
|
source_module_id: module,
|
||||||
|
position: None,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, thiserror::Error)]
|
#[derive(Debug, Clone, PartialEq, Eq, thiserror::Error, PartialOrd, Ord)]
|
||||||
pub enum TypeKind {
|
pub enum TypeKind {
|
||||||
#[error("bool")]
|
#[error("bool")]
|
||||||
Bool,
|
Bool,
|
||||||
@ -73,7 +102,7 @@ pub enum TypeKind {
|
|||||||
Vague(#[from] VagueType),
|
Vague(#[from] VagueType),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, thiserror::Error)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, thiserror::Error, PartialOrd, Ord)]
|
||||||
pub enum VagueType {
|
pub enum VagueType {
|
||||||
#[error("Unknown")]
|
#[error("Unknown")]
|
||||||
Unknown,
|
Unknown,
|
||||||
@ -109,7 +138,7 @@ impl TypeKind {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
pub enum Literal {
|
pub enum Literal {
|
||||||
I8(i8),
|
I8(i8),
|
||||||
I16(i16),
|
I16(i16),
|
||||||
@ -126,7 +155,7 @@ pub enum Literal {
|
|||||||
Vague(VagueLiteral),
|
Vague(VagueLiteral),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
pub enum VagueLiteral {
|
pub enum VagueLiteral {
|
||||||
Number(u64),
|
Number(u64),
|
||||||
}
|
}
|
||||||
@ -188,7 +217,7 @@ pub enum ReturnKind {
|
|||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct NamedVariableRef(pub TypeKind, pub String, pub Metadata);
|
pub struct NamedVariableRef(pub TypeKind, pub String, pub Metadata);
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
pub struct Import(pub Vec<String>, pub Metadata);
|
pub struct Import(pub Vec<String>, pub Metadata);
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@ -240,14 +269,14 @@ pub enum FunctionDefinitionKind {
|
|||||||
impl FunctionDefinition {
|
impl FunctionDefinition {
|
||||||
fn block_meta(&self) -> Metadata {
|
fn block_meta(&self) -> Metadata {
|
||||||
match &self.kind {
|
match &self.kind {
|
||||||
FunctionDefinitionKind::Local(block, _) => block.meta,
|
FunctionDefinitionKind::Local(block, _) => block.meta.clone(),
|
||||||
FunctionDefinitionKind::Extern(_) => Metadata::default(),
|
FunctionDefinitionKind::Extern(_) => Metadata::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Metadata {
|
fn signature(&self) -> Metadata {
|
||||||
match &self.kind {
|
match &self.kind {
|
||||||
FunctionDefinitionKind::Local(_, metadata) => *metadata,
|
FunctionDefinitionKind::Local(_, metadata) => metadata.clone(),
|
||||||
FunctionDefinitionKind::Extern(_) => Metadata::default(),
|
FunctionDefinitionKind::Extern(_) => Metadata::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -288,6 +317,7 @@ pub enum TypeDefinitionKind {
|
|||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Module {
|
pub struct Module {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
|
pub module_id: SourceModuleId,
|
||||||
pub imports: Vec<Import>,
|
pub imports: Vec<Import>,
|
||||||
pub functions: Vec<FunctionDefinition>,
|
pub functions: Vec<FunctionDefinition>,
|
||||||
pub typedefs: Vec<TypeDefinition>,
|
pub typedefs: Vec<TypeDefinition>,
|
||||||
|
@ -15,10 +15,10 @@ pub enum SimplePassError {
|
|||||||
VariableAlreadyDefined(String),
|
VariableAlreadyDefined(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone, Eq, PartialOrd, Ord)]
|
||||||
pub struct Error<TErr: STDError> {
|
pub struct Error<TErr: STDError> {
|
||||||
metadata: Metadata,
|
pub metadata: Metadata,
|
||||||
kind: TErr,
|
pub kind: TErr,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<TErr: STDError> std::fmt::Display for Error<TErr> {
|
impl<TErr: STDError> std::fmt::Display for Error<TErr> {
|
||||||
@ -33,6 +33,12 @@ impl<TErr: STDError> STDError for Error<TErr> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<TErr: STDError + PartialEq> PartialEq for Error<TErr> {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
self.kind == other.kind && self.metadata.complete_overlap(&other.metadata)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct State<TErr: STDError> {
|
pub struct State<TErr: STDError> {
|
||||||
pub errors: Vec<Error<TErr>>,
|
pub errors: Vec<Error<TErr>>,
|
||||||
|
@ -6,11 +6,11 @@ use crate::{mir::*, util::try_all};
|
|||||||
use VagueType as Vague;
|
use VagueType as Vague;
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
pass::{Pass, PassState, ScopeFunction, ScopeVariable, Storage},
|
pass::{Pass, PassState, ScopeFunction, ScopeVariable},
|
||||||
typerefs::TypeRefs,
|
typerefs::TypeRefs,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(thiserror::Error, Debug, Clone)]
|
#[derive(thiserror::Error, Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
pub enum ErrorKind {
|
pub enum ErrorKind {
|
||||||
#[error("NULL error, should never occur!")]
|
#[error("NULL error, should never occur!")]
|
||||||
Null,
|
Null,
|
||||||
|
@ -24,11 +24,31 @@ impl<'a, 'b> TokenStream<'a, 'b> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns expected-error for the next token in-line. Useful in conjunction
|
||||||
|
/// with [`TokenStream::peek`]
|
||||||
pub fn expected_err<T: Into<String>>(&mut self, expected: T) -> Result<Error, Error> {
|
pub fn expected_err<T: Into<String>>(&mut self, expected: T) -> Result<Error, Error> {
|
||||||
|
let next_token = self.previous().unwrap_or(Token::Eof);
|
||||||
Ok(Error::Expected(
|
Ok(Error::Expected(
|
||||||
expected.into(),
|
expected.into(),
|
||||||
self.peek().unwrap_or(Token::Eof),
|
next_token,
|
||||||
self.get_next_position()?,
|
TokenRange {
|
||||||
|
start: self.position - 1,
|
||||||
|
end: self.position - 1,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns expected-error for the previous token that was already consumed.
|
||||||
|
/// Useful in conjunction with [`TokenStream::next`]
|
||||||
|
pub fn expecting_err<T: Into<String>>(&mut self, expected: T) -> Result<Error, Error> {
|
||||||
|
let next_token = self.peek().unwrap_or(Token::Eof);
|
||||||
|
Ok(Error::Expected(
|
||||||
|
expected.into(),
|
||||||
|
next_token,
|
||||||
|
TokenRange {
|
||||||
|
start: self.position,
|
||||||
|
end: self.position,
|
||||||
|
},
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -38,10 +58,10 @@ impl<'a, 'b> TokenStream<'a, 'b> {
|
|||||||
self.position += 1;
|
self.position += 1;
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
Err(self.expected_err(token)?)
|
Err(self.expecting_err(token)?)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Err(self.expected_err(token)?)
|
Err(self.expecting_err(token)?)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -55,6 +75,14 @@ impl<'a, 'b> TokenStream<'a, 'b> {
|
|||||||
value
|
value
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn previous(&mut self) -> Option<Token> {
|
||||||
|
if (self.position as i32 - 1) < 0 {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(self.tokens[self.position - 1].token.clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn peek(&mut self) -> Option<Token> {
|
pub fn peek(&mut self) -> Option<Token> {
|
||||||
if self.tokens.len() < self.position {
|
if self.tokens.len() < self.position {
|
||||||
None
|
None
|
||||||
@ -147,11 +175,11 @@ impl<'a, 'b> TokenStream<'a, 'b> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_next_position(&self) -> Result<Position, Error> {
|
fn get_position(&self, offset: usize) -> Result<Position, Error> {
|
||||||
if self.tokens.is_empty() {
|
if self.tokens.is_empty() {
|
||||||
Err(Error::FileEmpty)
|
Err(Error::FileEmpty)
|
||||||
} else {
|
} else {
|
||||||
let token_idx = self.position.min(self.tokens.len() - 1);
|
let token_idx = (self.position - 1).min(self.tokens.len() - 1);
|
||||||
Ok(self.tokens[token_idx].position)
|
Ok(self.tokens[token_idx].position)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -162,13 +190,6 @@ impl<'a, 'b> TokenStream<'a, 'b> {
|
|||||||
end: self.position,
|
end: self.position,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_one_token_range(&self) -> TokenRange {
|
|
||||||
TokenRange {
|
|
||||||
start: self.position - 1,
|
|
||||||
end: self.position,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Drop for TokenStream<'_, '_> {
|
impl Drop for TokenStream<'_, '_> {
|
||||||
@ -181,7 +202,7 @@ impl Drop for TokenStream<'_, '_> {
|
|||||||
|
|
||||||
/// Index-range that can be used with the original array of [`FullToken`]s to
|
/// Index-range that can be used with the original array of [`FullToken`]s to
|
||||||
/// retrieve the precise location of a failure.
|
/// retrieve the precise location of a failure.
|
||||||
#[derive(Default, Clone, Copy)]
|
#[derive(Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
pub struct TokenRange {
|
pub struct TokenRange {
|
||||||
pub start: usize,
|
pub start: usize,
|
||||||
pub end: usize,
|
pub end: usize,
|
||||||
@ -214,10 +235,10 @@ impl std::iter::Sum for TokenRange {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(thiserror::Error, Debug, Clone)]
|
#[derive(thiserror::Error, Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
#[error("Expected {} at Ln {}, Col {}, got {:?}", .0, (.2).1, (.2).0, .1)]
|
#[error("Expected {} got {:?}", .0, .1)]
|
||||||
Expected(String, Token, Position),
|
Expected(String, Token, TokenRange),
|
||||||
#[error("Source file contains no tokens")]
|
#[error("Source file contains no tokens")]
|
||||||
FileEmpty,
|
FileEmpty,
|
||||||
/// Only use this error in situations where the error never ends up for the end-user!
|
/// Only use this error in situations where the error never ends up for the end-user!
|
||||||
@ -227,3 +248,14 @@ pub enum Error {
|
|||||||
#[error("Condition failed for parse-if. Should never be returned to end-user.")]
|
#[error("Condition failed for parse-if. Should never be returned to end-user.")]
|
||||||
IfFailed,
|
IfFailed,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Error {
|
||||||
|
pub fn get_range(&self) -> Option<&TokenRange> {
|
||||||
|
match self {
|
||||||
|
Error::Expected(_, _, pos) => Some(pos),
|
||||||
|
Error::FileEmpty => None,
|
||||||
|
Error::Undefined => None,
|
||||||
|
Error::IfFailed => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -1,74 +1,71 @@
|
|||||||
use reid::{
|
use reid::{
|
||||||
compile_module,
|
compile_module,
|
||||||
mir::{self},
|
mir::{self},
|
||||||
perform_all_passes,
|
parse_module, perform_all_passes,
|
||||||
};
|
};
|
||||||
use util::assert_err;
|
use util::assert_err;
|
||||||
|
|
||||||
mod util;
|
mod util;
|
||||||
|
|
||||||
|
fn test(source: &str, name: &str) {
|
||||||
|
let mut map = Default::default();
|
||||||
|
let (id, tokens) = assert_err(parse_module(source, name, &mut map));
|
||||||
|
let module = assert_err(compile_module(id, &tokens, &mut map, None, true));
|
||||||
|
|
||||||
|
assert_err(perform_all_passes(
|
||||||
|
&mut mir::Context {
|
||||||
|
modules: vec![module],
|
||||||
|
base: Default::default(),
|
||||||
|
},
|
||||||
|
&mut map,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
pub static ARRAY: &str = include_str!("../../reid_src/array.reid");
|
pub static ARRAY: &str = include_str!("../../reid_src/array.reid");
|
||||||
pub static FIBONACCI: &str = include_str!("../../reid_src/fibonacci.reid");
|
pub static FIBONACCI: &str = include_str!("../../reid_src/fibonacci.reid");
|
||||||
pub static HELLO_WORLD: &str = include_str!("../../reid_src/hello_world.reid");
|
pub static HELLO_WORLD: &str = include_str!("../../reid_src/hello_world.reid");
|
||||||
pub static MUTABLE: &str = include_str!("../../reid_src/mutable.reid");
|
pub static MUTABLE: &str = include_str!("../../reid_src/mutable.reid");
|
||||||
pub static STRINGS: &str = include_str!("../../reid_src/strings.reid");
|
pub static STRINGS: &str = include_str!("../../reid_src/strings.reid");
|
||||||
|
pub static ARRAYS: &str = include_str!("../../reid_src/array.reid");
|
||||||
|
pub static STRUCTS: &str = include_str!("../../reid_src/struct.reid");
|
||||||
|
pub static ARRAY_STRUCTS: &str = include_str!("../../reid_src/array_structs.reid");
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn array_compiles_well() {
|
fn array_compiles_well() {
|
||||||
let module = assert_err(compile_module(ARRAY, "array".to_owned(), None, true));
|
test(ARRAY, "array");
|
||||||
|
|
||||||
assert_err(perform_all_passes(&mut mir::Context {
|
|
||||||
modules: vec![module],
|
|
||||||
base: Default::default(),
|
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn fibonacci_compiles_well() {
|
fn fibonacci_compiles_well() {
|
||||||
let module = assert_err(compile_module(
|
test(FIBONACCI, "fibonacci");
|
||||||
FIBONACCI,
|
|
||||||
"fibonacci".to_owned(),
|
|
||||||
None,
|
|
||||||
true,
|
|
||||||
));
|
|
||||||
|
|
||||||
assert_err(perform_all_passes(&mut mir::Context {
|
|
||||||
modules: vec![module],
|
|
||||||
base: Default::default(),
|
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn hello_world_compiles_well() {
|
fn hello_world_compiles_well() {
|
||||||
let module = assert_err(compile_module(
|
test(HELLO_WORLD, "hello_world");
|
||||||
HELLO_WORLD,
|
|
||||||
"hello_world".to_owned(),
|
|
||||||
None,
|
|
||||||
true,
|
|
||||||
));
|
|
||||||
|
|
||||||
assert_err(perform_all_passes(&mut mir::Context {
|
|
||||||
modules: vec![module],
|
|
||||||
base: Default::default(),
|
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn mutable_compiles_well() {
|
fn mutable_compiles_well() {
|
||||||
let module = assert_err(compile_module(MUTABLE, "mutable".to_owned(), None, true));
|
test(MUTABLE, "mutable");
|
||||||
|
|
||||||
assert_err(perform_all_passes(&mut mir::Context {
|
|
||||||
modules: vec![module],
|
|
||||||
base: Default::default(),
|
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn strings_compiles_well() {
|
fn strings_compiles_well() {
|
||||||
let module = assert_err(compile_module(STRINGS, "strings".to_owned(), None, true));
|
test(STRINGS, "strings");
|
||||||
|
}
|
||||||
assert_err(perform_all_passes(&mut mir::Context {
|
|
||||||
modules: vec![module],
|
#[test]
|
||||||
base: Default::default(),
|
fn arrays_compiles_well() {
|
||||||
}));
|
test(ARRAY, "array");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn struct_compiles_well() {
|
||||||
|
test(STRUCTS, "struct");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn array_structs_compiles_well() {
|
||||||
|
test(ARRAY_STRUCTS, "array_structs");
|
||||||
}
|
}
|
||||||
|
@ -8,18 +8,22 @@ mod util;
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn compiles() {
|
fn compiles() {
|
||||||
let _ = compile_std();
|
let _ = compile_std(&mut Default::default());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn passes_all_passes() {
|
fn passes_all_passes() {
|
||||||
let mut std = compile_std();
|
let mut map = Default::default();
|
||||||
|
let (mut std, _) = compile_std(&mut map);
|
||||||
|
|
||||||
// Needed to pass linker-pass
|
// Needed to pass linker-pass
|
||||||
std.is_main = true;
|
std.is_main = true;
|
||||||
|
|
||||||
assert_err(perform_all_passes(&mut mir::Context {
|
assert_err(perform_all_passes(
|
||||||
|
&mut mir::Context {
|
||||||
modules: vec![std],
|
modules: vec![std],
|
||||||
base: Default::default(),
|
base: Default::default(),
|
||||||
}));
|
},
|
||||||
|
&mut map,
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
@ -5,11 +5,16 @@ struct Test {
|
|||||||
second: [u32; 4]
|
second: [u32; 4]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() -> u32 {
|
fn test() -> [Test; 1] {
|
||||||
let mut value = [Test {
|
let value = [Test {
|
||||||
field: 5,
|
field: 5,
|
||||||
second: [6, 3, 17, 8],
|
second: [6, 3, 4, 8],
|
||||||
}];
|
}];
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() -> u32 {
|
||||||
|
let mut value = test();
|
||||||
|
|
||||||
let val1 = 0;
|
let val1 = 0;
|
||||||
let val2 = 1;
|
let val2 = 1;
|
||||||
|
Loading…
Reference in New Issue
Block a user