Add errors for lexer and parser

This commit is contained in:
Sofia 2023-08-02 19:58:10 +03:00
parent c7f11e5091
commit 9b7a31c988
4 changed files with 86 additions and 30 deletions

View File

@ -1,10 +1,13 @@
use crate::{lexer::Token, token_stream::TokenStream};
use crate::{
lexer::{Token, TokenList},
token_stream::{Error, TokenStream},
};
pub trait Parse
where
Self: std::marker::Sized,
{
fn parse(stream: TokenStream) -> Result<Self, ()>;
fn parse(stream: TokenStream) -> Result<Self, Error>;
}
#[derive(Debug, Clone)]
@ -28,13 +31,13 @@ pub enum Expression {
}
impl Parse for Expression {
fn parse(mut stream: TokenStream) -> Result<Expression, ()> {
fn parse(mut stream: TokenStream) -> Result<Expression, Error> {
let lhs = parse_primary_expression(&mut stream)?;
parse_binop_rhs(&mut stream, lhs, 0)
}
}
fn parse_primary_expression(stream: &mut TokenStream) -> Result<Expression, ()> {
fn parse_primary_expression(stream: &mut TokenStream) -> Result<Expression, Error> {
if let Ok(exp) = stream.parse() {
Ok(Expression::FunctionCall(Box::new(exp)))
} else if let Ok(block) = stream.parse() {
@ -48,10 +51,10 @@ fn parse_primary_expression(stream: &mut TokenStream) -> Result<Expression, ()>
stream.expect(Token::ParenClose)?;
exp
}
_ => Err(())?, // TODO: Add error raporting!
_ => Err(stream.expected_err("identifier, constant or parentheses")?)?,
})
} else {
Err(()) // TODO: Add error raporting!
Err(stream.expected_err("expression")?)?
}
}
@ -64,7 +67,7 @@ fn parse_binop_rhs(
stream: &mut TokenStream,
mut lhs: Expression,
expr_prec: i8,
) -> Result<Expression, ()> {
) -> Result<Expression, Error> {
while let Some(token) = stream.peek() {
let curr_token_prec = token.get_token_prec();
@ -89,7 +92,7 @@ fn parse_binop_rhs(
lhs = match &token {
Token::Plus => Expression::Binop(Add, Box::new(lhs), Box::new(rhs)),
Token::Times => Expression::Binop(Mult, Box::new(lhs), Box::new(rhs)),
_ => Err(())?, // TODO: Add error raporting!
_ => Err(stream.expected_err(TokenList(vec![Token::Plus, Token::Times]))?)?, // TODO: Add error raporting!
};
}
}
@ -101,7 +104,7 @@ fn parse_binop_rhs(
pub struct FunctionCallExpression(String, Vec<Expression>);
impl Parse for FunctionCallExpression {
fn parse(mut stream: TokenStream) -> Result<Self, ()> {
fn parse(mut stream: TokenStream) -> Result<Self, Error> {
if let Some(Token::Identifier(name)) = stream.next() {
stream.expect(Token::ParenOpen)?;
@ -119,7 +122,7 @@ impl Parse for FunctionCallExpression {
Ok(FunctionCallExpression(name, args))
} else {
Err(())? // TODO: Add error raporting!
Err(stream.expected_err("identifier")?)
}
}
}
@ -128,7 +131,7 @@ impl Parse for FunctionCallExpression {
pub struct LetStatement(pub String, pub Expression);
impl Parse for LetStatement {
fn parse(mut stream: TokenStream) -> Result<LetStatement, ()> {
fn parse(mut stream: TokenStream) -> Result<LetStatement, Error> {
stream.expect(Token::LetKeyword)?;
if let Some(Token::Identifier(variable)) = stream.next() {
@ -138,7 +141,7 @@ impl Parse for LetStatement {
stream.expect(Token::Semi)?;
Ok(LetStatement(variable, expression))
} else {
Err(()) // TODO: Add error raporting!
Err(stream.expected_err("identifier")?)
}
}
}
@ -147,7 +150,7 @@ impl Parse for LetStatement {
pub struct ImportStatement(Vec<String>);
impl Parse for ImportStatement {
fn parse(mut stream: TokenStream) -> Result<Self, ()> {
fn parse(mut stream: TokenStream) -> Result<Self, Error> {
stream.expect(Token::ImportKeyword)?;
let mut import_list = Vec::new();
@ -158,11 +161,11 @@ impl Parse for ImportStatement {
if let Some(Token::Identifier(name)) = stream.next() {
import_list.push(name);
} else {
Err(())? // TODO: Add error raporting!
Err(stream.expected_err("identifier")?)?
}
}
} else {
Err(())? // TODO: Add error raporting!
Err(stream.expected_err("identifier")?)?
}
stream.expect(Token::Semi)?;
@ -175,7 +178,7 @@ impl Parse for ImportStatement {
pub struct FunctionDefinition(pub FunctionSignature, pub Block);
impl Parse for FunctionDefinition {
fn parse(mut stream: TokenStream) -> Result<Self, ()> {
fn parse(mut stream: TokenStream) -> Result<Self, Error> {
stream.expect(Token::FnKeyword)?;
Ok(FunctionDefinition(stream.parse()?, stream.parse()?))
}
@ -187,13 +190,13 @@ pub struct FunctionSignature {
}
impl Parse for FunctionSignature {
fn parse(mut stream: TokenStream) -> Result<Self, ()> {
fn parse(mut stream: TokenStream) -> Result<Self, Error> {
if let Some(Token::Identifier(name)) = stream.next() {
stream.expect(Token::ParenOpen)?;
stream.expect(Token::ParenClose)?;
Ok(FunctionSignature { name })
} else {
Err(()) // TODO: Add error raporting!
Err(stream.expected_err("identifier")?)?
}
}
}
@ -202,7 +205,7 @@ impl Parse for FunctionSignature {
pub struct Block(pub Vec<BlockLevelStatement>, pub Option<Expression>);
impl Parse for Block {
fn parse(mut stream: TokenStream) -> Result<Self, ()> {
fn parse(mut stream: TokenStream) -> Result<Self, Error> {
let mut statements = Vec::new();
let mut return_stmt = None;
stream.expect(Token::BraceOpen)?;
@ -228,7 +231,7 @@ pub enum BlockLevelStatement {
}
impl Parse for BlockLevelStatement {
fn parse(mut stream: TokenStream) -> Result<Self, ()> {
fn parse(mut stream: TokenStream) -> Result<Self, Error> {
use BlockLevelStatement as Stmt;
Ok(match stream.peek() {
Some(Token::LetKeyword) => Stmt::Let(stream.parse()?),
@ -244,7 +247,7 @@ impl Parse for BlockLevelStatement {
stream.expect(Token::Semi)?;
Stmt::Expression(e)
} else {
Err(())? // TODO: Add error raporting!
Err(stream.expected_err("expression")?)?
}
}
})
@ -258,12 +261,14 @@ pub enum TopLevelStatement {
}
impl Parse for TopLevelStatement {
fn parse(mut stream: TokenStream) -> Result<Self, ()> {
fn parse(mut stream: TokenStream) -> Result<Self, Error> {
use TopLevelStatement as Stmt;
Ok(match stream.peek() {
Some(Token::ImportKeyword) => Stmt::Import(stream.parse()?),
Some(Token::FnKeyword) => Stmt::FunctionDefinition(stream.parse()?),
_ => Err(())?, // TODO: Add error raporting!
_ => {
Err(stream.expected_err(TokenList(vec![Token::ImportKeyword, Token::FnKeyword]))?)?
}
})
}
}

View File

@ -54,6 +54,30 @@ impl Token {
}
}
impl Into<String> for Token {
fn into(self) -> String {
format!("{:?}", self)
}
}
pub struct TokenList(pub Vec<Token>);
impl Into<String> for TokenList {
fn into(self) -> String {
self.0
.iter()
.map(|tok| tok.clone().into())
.collect::<Vec<String>>()
.join(" or ")
}
}
impl Into<TokenList> for &[Token] {
fn into(self) -> TokenList {
TokenList(self.to_owned())
}
}
#[derive(Clone)]
pub struct FullToken {
pub token: Token,
@ -79,11 +103,11 @@ pub struct Cursor<'a> {
impl<'a> Cursor<'a> {
fn next(&mut self) -> Option<char> {
let next = self.char_stream.next();
self.position.0 += 1;
if let Some('\n') = next {
self.position.1 += 1;
self.position.0 = 0;
}
self.position.0 += 1;
next
}

View File

@ -16,6 +16,8 @@ mod token_stream;
pub enum ReidError {
#[error(transparent)]
LexerError(#[from] lexer::Error),
#[error(transparent)]
ParserError(#[from] token_stream::Error),
}
pub fn compile(source: &str) -> Result<String, ReidError> {
@ -28,7 +30,7 @@ pub fn compile(source: &str) -> Result<String, ReidError> {
let mut statements = Vec::new();
while !matches!(token_stream.peek().unwrap_or(Token::Eof), Token::Eof) {
let statement = token_stream.parse::<TopLevelStatement>().unwrap();
let statement = token_stream.parse::<TopLevelStatement>()?;
dbg!(&statement);
statements.push(statement);
}

View File

@ -1,6 +1,6 @@
use crate::{
ast::Parse,
lexer::{FullToken, Token},
lexer::{FullToken, Position, Token},
};
pub struct TokenStream<'a, 'b> {
@ -18,16 +18,24 @@ impl<'a, 'b> TokenStream<'a, 'b> {
}
}
pub fn expect(&mut self, token: Token) -> Result<(), ()> {
pub fn expected_err<T: Into<String>>(&mut self, expected: T) -> Result<Error, Error> {
Ok(Error::Expected(
expected.into(),
self.peek().unwrap_or(Token::Eof),
self.get_next_position()?,
))
}
pub fn expect(&mut self, token: Token) -> Result<(), Error> {
if let Some(peeked) = self.peek() {
if token == peeked {
self.position += 1;
Ok(())
} else {
Err(())
Err(self.expected_err(token)?)
}
} else {
Err(())
Err(self.expected_err(token)?)
}
}
@ -49,7 +57,7 @@ impl<'a, 'b> TokenStream<'a, 'b> {
}
}
pub fn parse<T: Parse>(&mut self) -> Result<T, ()> {
pub fn parse<T: Parse>(&mut self) -> Result<T, Error> {
let mut ref_pos = self.position;
let position = self.position;
@ -67,6 +75,15 @@ impl<'a, 'b> TokenStream<'a, 'b> {
Err(e) => Err(e),
}
}
fn get_next_position(&self) -> Result<Position, Error> {
if self.tokens.is_empty() {
Err(Error::FileEmpty)
} else {
let token_idx = self.position.min(self.tokens.len() - 1);
Ok(self.tokens[token_idx].position)
}
}
}
impl Drop for TokenStream<'_, '_> {
@ -76,3 +93,11 @@ impl Drop for TokenStream<'_, '_> {
}
}
}
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Expected {} at Ln {}, Col {}, got {:?}", .0, (.2).1, (.2).0, .1)]
Expected(String, Token, Position),
#[error("Source file contains no tokens")]
FileEmpty,
}