diff --git a/src/ast.rs b/src/ast.rs index 4e99dd7..64e6afb 100644 --- a/src/ast.rs +++ b/src/ast.rs @@ -1,10 +1,13 @@ -use crate::{lexer::Token, token_stream::TokenStream}; +use crate::{ + lexer::{Token, TokenList}, + token_stream::{Error, TokenStream}, +}; pub trait Parse where Self: std::marker::Sized, { - fn parse(stream: TokenStream) -> Result; + fn parse(stream: TokenStream) -> Result; } #[derive(Debug, Clone)] @@ -28,13 +31,13 @@ pub enum Expression { } impl Parse for Expression { - fn parse(mut stream: TokenStream) -> Result { + fn parse(mut stream: TokenStream) -> Result { let lhs = parse_primary_expression(&mut stream)?; parse_binop_rhs(&mut stream, lhs, 0) } } -fn parse_primary_expression(stream: &mut TokenStream) -> Result { +fn parse_primary_expression(stream: &mut TokenStream) -> Result { if let Ok(exp) = stream.parse() { Ok(Expression::FunctionCall(Box::new(exp))) } else if let Ok(block) = stream.parse() { @@ -48,10 +51,10 @@ fn parse_primary_expression(stream: &mut TokenStream) -> Result stream.expect(Token::ParenClose)?; exp } - _ => Err(())?, // TODO: Add error raporting! + _ => Err(stream.expected_err("identifier, constant or parentheses")?)?, }) } else { - Err(()) // TODO: Add error raporting! + Err(stream.expected_err("expression")?)? } } @@ -64,7 +67,7 @@ fn parse_binop_rhs( stream: &mut TokenStream, mut lhs: Expression, expr_prec: i8, -) -> Result { +) -> Result { while let Some(token) = stream.peek() { let curr_token_prec = token.get_token_prec(); @@ -89,7 +92,7 @@ fn parse_binop_rhs( lhs = match &token { Token::Plus => Expression::Binop(Add, Box::new(lhs), Box::new(rhs)), Token::Times => Expression::Binop(Mult, Box::new(lhs), Box::new(rhs)), - _ => Err(())?, // TODO: Add error raporting! + _ => Err(stream.expected_err(TokenList(vec![Token::Plus, Token::Times]))?)?, // TODO: Add error raporting! }; } } @@ -101,7 +104,7 @@ fn parse_binop_rhs( pub struct FunctionCallExpression(String, Vec); impl Parse for FunctionCallExpression { - fn parse(mut stream: TokenStream) -> Result { + fn parse(mut stream: TokenStream) -> Result { if let Some(Token::Identifier(name)) = stream.next() { stream.expect(Token::ParenOpen)?; @@ -119,7 +122,7 @@ impl Parse for FunctionCallExpression { Ok(FunctionCallExpression(name, args)) } else { - Err(())? // TODO: Add error raporting! + Err(stream.expected_err("identifier")?) } } } @@ -128,7 +131,7 @@ impl Parse for FunctionCallExpression { pub struct LetStatement(pub String, pub Expression); impl Parse for LetStatement { - fn parse(mut stream: TokenStream) -> Result { + fn parse(mut stream: TokenStream) -> Result { stream.expect(Token::LetKeyword)?; if let Some(Token::Identifier(variable)) = stream.next() { @@ -138,7 +141,7 @@ impl Parse for LetStatement { stream.expect(Token::Semi)?; Ok(LetStatement(variable, expression)) } else { - Err(()) // TODO: Add error raporting! + Err(stream.expected_err("identifier")?) } } } @@ -147,7 +150,7 @@ impl Parse for LetStatement { pub struct ImportStatement(Vec); impl Parse for ImportStatement { - fn parse(mut stream: TokenStream) -> Result { + fn parse(mut stream: TokenStream) -> Result { stream.expect(Token::ImportKeyword)?; let mut import_list = Vec::new(); @@ -158,11 +161,11 @@ impl Parse for ImportStatement { if let Some(Token::Identifier(name)) = stream.next() { import_list.push(name); } else { - Err(())? // TODO: Add error raporting! + Err(stream.expected_err("identifier")?)? } } } else { - Err(())? // TODO: Add error raporting! + Err(stream.expected_err("identifier")?)? } stream.expect(Token::Semi)?; @@ -175,7 +178,7 @@ impl Parse for ImportStatement { pub struct FunctionDefinition(pub FunctionSignature, pub Block); impl Parse for FunctionDefinition { - fn parse(mut stream: TokenStream) -> Result { + fn parse(mut stream: TokenStream) -> Result { stream.expect(Token::FnKeyword)?; Ok(FunctionDefinition(stream.parse()?, stream.parse()?)) } @@ -187,13 +190,13 @@ pub struct FunctionSignature { } impl Parse for FunctionSignature { - fn parse(mut stream: TokenStream) -> Result { + fn parse(mut stream: TokenStream) -> Result { if let Some(Token::Identifier(name)) = stream.next() { stream.expect(Token::ParenOpen)?; stream.expect(Token::ParenClose)?; Ok(FunctionSignature { name }) } else { - Err(()) // TODO: Add error raporting! + Err(stream.expected_err("identifier")?)? } } } @@ -202,7 +205,7 @@ impl Parse for FunctionSignature { pub struct Block(pub Vec, pub Option); impl Parse for Block { - fn parse(mut stream: TokenStream) -> Result { + fn parse(mut stream: TokenStream) -> Result { let mut statements = Vec::new(); let mut return_stmt = None; stream.expect(Token::BraceOpen)?; @@ -228,7 +231,7 @@ pub enum BlockLevelStatement { } impl Parse for BlockLevelStatement { - fn parse(mut stream: TokenStream) -> Result { + fn parse(mut stream: TokenStream) -> Result { use BlockLevelStatement as Stmt; Ok(match stream.peek() { Some(Token::LetKeyword) => Stmt::Let(stream.parse()?), @@ -244,7 +247,7 @@ impl Parse for BlockLevelStatement { stream.expect(Token::Semi)?; Stmt::Expression(e) } else { - Err(())? // TODO: Add error raporting! + Err(stream.expected_err("expression")?)? } } }) @@ -258,12 +261,14 @@ pub enum TopLevelStatement { } impl Parse for TopLevelStatement { - fn parse(mut stream: TokenStream) -> Result { + fn parse(mut stream: TokenStream) -> Result { use TopLevelStatement as Stmt; Ok(match stream.peek() { Some(Token::ImportKeyword) => Stmt::Import(stream.parse()?), Some(Token::FnKeyword) => Stmt::FunctionDefinition(stream.parse()?), - _ => Err(())?, // TODO: Add error raporting! + _ => { + Err(stream.expected_err(TokenList(vec![Token::ImportKeyword, Token::FnKeyword]))?)? + } }) } } diff --git a/src/lexer.rs b/src/lexer.rs index b16527c..c180c8c 100644 --- a/src/lexer.rs +++ b/src/lexer.rs @@ -54,6 +54,30 @@ impl Token { } } +impl Into for Token { + fn into(self) -> String { + format!("{:?}", self) + } +} + +pub struct TokenList(pub Vec); + +impl Into for TokenList { + fn into(self) -> String { + self.0 + .iter() + .map(|tok| tok.clone().into()) + .collect::>() + .join(" or ") + } +} + +impl Into for &[Token] { + fn into(self) -> TokenList { + TokenList(self.to_owned()) + } +} + #[derive(Clone)] pub struct FullToken { pub token: Token, @@ -79,11 +103,11 @@ pub struct Cursor<'a> { impl<'a> Cursor<'a> { fn next(&mut self) -> Option { let next = self.char_stream.next(); - self.position.0 += 1; if let Some('\n') = next { self.position.1 += 1; self.position.0 = 0; } + self.position.0 += 1; next } diff --git a/src/lib.rs b/src/lib.rs index 7acf0f8..3877db9 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -16,6 +16,8 @@ mod token_stream; pub enum ReidError { #[error(transparent)] LexerError(#[from] lexer::Error), + #[error(transparent)] + ParserError(#[from] token_stream::Error), } pub fn compile(source: &str) -> Result { @@ -28,7 +30,7 @@ pub fn compile(source: &str) -> Result { let mut statements = Vec::new(); while !matches!(token_stream.peek().unwrap_or(Token::Eof), Token::Eof) { - let statement = token_stream.parse::().unwrap(); + let statement = token_stream.parse::()?; dbg!(&statement); statements.push(statement); } diff --git a/src/token_stream.rs b/src/token_stream.rs index e583741..b86658d 100644 --- a/src/token_stream.rs +++ b/src/token_stream.rs @@ -1,6 +1,6 @@ use crate::{ ast::Parse, - lexer::{FullToken, Token}, + lexer::{FullToken, Position, Token}, }; pub struct TokenStream<'a, 'b> { @@ -18,16 +18,24 @@ impl<'a, 'b> TokenStream<'a, 'b> { } } - pub fn expect(&mut self, token: Token) -> Result<(), ()> { + pub fn expected_err>(&mut self, expected: T) -> Result { + Ok(Error::Expected( + expected.into(), + self.peek().unwrap_or(Token::Eof), + self.get_next_position()?, + )) + } + + pub fn expect(&mut self, token: Token) -> Result<(), Error> { if let Some(peeked) = self.peek() { if token == peeked { self.position += 1; Ok(()) } else { - Err(()) + Err(self.expected_err(token)?) } } else { - Err(()) + Err(self.expected_err(token)?) } } @@ -49,7 +57,7 @@ impl<'a, 'b> TokenStream<'a, 'b> { } } - pub fn parse(&mut self) -> Result { + pub fn parse(&mut self) -> Result { let mut ref_pos = self.position; let position = self.position; @@ -67,6 +75,15 @@ impl<'a, 'b> TokenStream<'a, 'b> { Err(e) => Err(e), } } + + fn get_next_position(&self) -> Result { + if self.tokens.is_empty() { + Err(Error::FileEmpty) + } else { + let token_idx = self.position.min(self.tokens.len() - 1); + Ok(self.tokens[token_idx].position) + } + } } impl Drop for TokenStream<'_, '_> { @@ -76,3 +93,11 @@ impl Drop for TokenStream<'_, '_> { } } } + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error("Expected {} at Ln {}, Col {}, got {:?}", .0, (.2).1, (.2).0, .1)] + Expected(String, Token, Position), + #[error("Source file contains no tokens")] + FileEmpty, +}