diff --git a/easy.reid b/easy.reid index f38801e..268781d 100644 --- a/easy.reid +++ b/easy.reid @@ -8,4 +8,5 @@ let arithmetic = 3 + 2 * 5 + 1 * 2; let multiplier = 5 * 2; let result = arithmetic + multiplier * arithmetic; -print(result); \ No newline at end of file +print(result); +function(one, two); \ No newline at end of file diff --git a/src/lexer.rs b/src/lexer.rs index d9b2a0c..72b8b68 100644 --- a/src/lexer.rs +++ b/src/lexer.rs @@ -70,6 +70,7 @@ pub fn tokenize>(to_tokenize: T) -> Result, Strin '*' => Token::Times, '(' => Token::ParenOpen, ')' => Token::ParenClose, + ',' => Token::Comma, // Invalid token _ => Err(format!( "Unknown token '{}' at {}, {}", @@ -112,6 +113,7 @@ pub enum Token { Times, ParenOpen, // ( ParenClose, // ) + Comma, Eof, } diff --git a/src/parser.rs b/src/parser.rs index a4940c1..f115f26 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -1,6 +1,6 @@ use crate::{lexer::Token, token_stream::TokenStream}; -pub trait Parseable +pub trait Parse where Self: std::marker::Sized, { @@ -13,9 +13,10 @@ pub enum Expression { ContantI32(i32), BinopAdd(Box, Box), BinopMult(Box, Box), + FunctionCall(Box), } -impl Parseable for Expression { +impl Parse for Expression { fn parse(mut stream: TokenStream) -> Result { let lhs = parse_primary_expression(&mut stream)?; parse_binop_rhs(&mut stream, lhs, 0) @@ -23,14 +24,16 @@ impl Parseable for Expression { } fn parse_primary_expression(stream: &mut TokenStream) -> Result { - if let Some(token) = stream.next() { + if let Ok(exp) = stream.parse() { + Ok(Expression::FunctionCall(Box::new(exp))) + } else if let Some(token) = stream.next() { Ok(match &token { Token::Identifier(v) => Expression::VariableName(v.clone()), Token::DecimalValue(v) => Expression::ContantI32(v.parse().unwrap()), - _ => Err(())?, + _ => Err(())?, // TODO: Add error raporting! }) } else { - Err(()) + Err(()) // TODO: Add error raporting! } } @@ -66,7 +69,7 @@ fn parse_binop_rhs( lhs = match &token { Token::Plus => Expression::BinopAdd(Box::new(lhs), Box::new(rhs)), Token::Times => Expression::BinopMult(Box::new(lhs), Box::new(rhs)), - _ => Err(())?, + _ => Err(())?, // TODO: Add error raporting! }; } } @@ -74,18 +77,53 @@ fn parse_binop_rhs( Ok(lhs) } +#[derive(Debug, Clone)] +pub struct FunctionCallExpression(String, Vec); + +impl Parse for FunctionCallExpression { + fn parse(mut stream: TokenStream) -> Result { + if let Some(Token::Identifier(name)) = stream.next() { + stream.expect(Token::ParenOpen)?; + + let mut args = Vec::new(); + + if let Ok(exp) = stream.parse() { + args.push(exp); + + while stream.expect(Token::Comma).is_ok() { + args.push(stream.parse()?); + } + } + + stream.expect(Token::ParenClose)?; + + Ok(FunctionCallExpression(name, args)) + } else { + Err(())? // TODO: Add error raporting! + } + } +} + #[derive(Debug)] pub enum TopLevelStatement { Let(LetStatement), Import(ImportStatement), + TLExpression(Expression), } -impl Parseable for TopLevelStatement { +impl Parse for TopLevelStatement { fn parse(mut stream: TokenStream) -> Result { Ok(match stream.peek() { Some(Token::LetKeyword) => TopLevelStatement::Let(stream.parse()?), Some(Token::ImportKeyword) => TopLevelStatement::Import(stream.parse()?), - _ => Err(())?, + _ => { + if let Ok(e) = stream.parse() { + stream.expect(Token::Semicolon)?; + TopLevelStatement::TLExpression(e) + } else { + Err(())? // TODO: Add error raporting! + } + } }) } } @@ -93,7 +131,7 @@ impl Parseable for TopLevelStatement { #[derive(Debug)] pub struct LetStatement(String, Expression); -impl Parseable for LetStatement { +impl Parse for LetStatement { fn parse(mut stream: TokenStream) -> Result { stream.expect(Token::LetKeyword)?; @@ -104,7 +142,7 @@ impl Parseable for LetStatement { stream.expect(Token::Semicolon)?; Ok(LetStatement(variable, expression)) } else { - Err(()) + Err(()) // TODO: Add error raporting! } } } @@ -112,7 +150,7 @@ impl Parseable for LetStatement { #[derive(Debug)] pub struct ImportStatement(Vec); -impl Parseable for ImportStatement { +impl Parse for ImportStatement { fn parse(mut stream: TokenStream) -> Result { stream.expect(Token::ImportKeyword)?; @@ -124,11 +162,11 @@ impl Parseable for ImportStatement { if let Some(Token::Identifier(name)) = stream.next() { import_list.push(name); } else { - Err(())? + Err(())? // TODO: Add error raporting! } } } else { - Err(())? + Err(())? // TODO: Add error raporting! } stream.expect(Token::Semicolon)?; diff --git a/src/token_stream.rs b/src/token_stream.rs index ebb84af..4966b24 100644 --- a/src/token_stream.rs +++ b/src/token_stream.rs @@ -1,6 +1,6 @@ use crate::{ lexer::{FullToken, Token}, - parser::Parseable, + parser::Parse, }; pub struct TokenStream<'a, 'b> { @@ -49,7 +49,7 @@ impl<'a, 'b> TokenStream<'a, 'b> { } } - pub fn parse(&mut self) -> Result { + pub fn parse(&mut self) -> Result { let mut ref_pos = self.position; let position = self.position;