diff --git a/easy.reid b/easy.reid index 71492ae..9f32e47 100644 --- a/easy.reid +++ b/easy.reid @@ -2,6 +2,7 @@ import std::print; +let test = 5; let arithmetic = 3 + 2 * 5 + 1 * 2; let multiplier = 5 * 2; diff --git a/src/lexer.rs b/src/lexer.rs index 1af2c52..39e7d05 100644 --- a/src/lexer.rs +++ b/src/lexer.rs @@ -1,10 +1,5 @@ use std::{fmt::Debug, iter::Peekable, str::Chars}; -pub static EASIEST: &str = include_str!("../easiest.reid"); -// pub static EASY: &str = include_str!("../easy.reid"); -// pub static MEDIUM: &str = include_str!("../medium.reid"); -// pub static HARD: &str = include_str!("../hard.reid"); - static DECIMAL_NUMERICS: &[char] = &['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']; pub fn tokenize>(to_tokenize: T) -> Result, String> { @@ -50,6 +45,7 @@ pub fn tokenize>(to_tokenize: T) -> Result, Strin // Check for keywords let variant = match value.as_str() { "let" => Token::LetKeyword, + "import" => Token::ImportKeyword, _ => Token::Identifier(value), }; variant @@ -69,6 +65,11 @@ pub fn tokenize>(to_tokenize: T) -> Result, Strin // Single character tokens '=' => Token::Equals, ';' => Token::Semicolon, + ':' => Token::Colon, + '+' => Token::Plus, + '*' => Token::Times, + '(' => Token::ParenOpen, + ')' => Token::ParenClose, // Invalid token _ => Err(format!( "Unknown token '{}' at {}, {}", @@ -110,12 +111,24 @@ pub type Position = (u32, u32); #[derive(Debug, Eq, PartialEq, Clone)] pub enum Token { - LetKeyword, - Semicolon, - Equals, + // Values Identifier(String), /// Number with at most one decimal point DecimalValue(String), + + // Keywords + LetKeyword, + ImportKeyword, + + // Symbols + Semicolon, + Equals, + Colon, + Plus, + Times, + ParenOpen, // ( + ParenClose, // ) + Eof, } diff --git a/src/main.rs b/src/main.rs index e122f59..8a557df 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,13 +1,24 @@ -use crate::{lexer::EASIEST, parser::LetStatement, token_stream::TokenStream}; +use crate::{lexer::Token, parser::TopLevelStatement, token_stream::TokenStream}; + +pub static EASIEST: &str = include_str!("../easiest.reid"); +pub static EASY: &str = include_str!("../easy.reid"); +pub static MEDIUM: &str = include_str!("../medium.reid"); +pub static HARD: &str = include_str!("../hard.reid"); mod lexer; mod parser; mod token_stream; fn main() { - let tokens = lexer::tokenize(EASIEST).unwrap(); + let tokens = lexer::tokenize(EASY).unwrap(); + + dbg!(&tokens); + let mut token_stream = TokenStream::from(&tokens); - dbg!(token_stream.parse::().ok()); - dbg!(token_stream.parse::().ok()); + while let Ok(statement) = token_stream.parse::() { + dbg!(&statement); + } + + dbg!(token_stream.expect(Token::Eof).ok()); } diff --git a/src/parser.rs b/src/parser.rs index a261b75..6195296 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -27,6 +27,22 @@ impl Parseable for Expression { } } +#[derive(Debug)] +pub enum TopLevelStatement { + Let(LetStatement), + Import(ImportStatement), +} + +impl Parseable for TopLevelStatement { + fn parse(mut stream: TokenStream) -> Result { + Ok(match stream.peek() { + Some(Token::LetKeyword) => TopLevelStatement::Let(stream.parse()?), + Some(Token::ImportKeyword) => TopLevelStatement::Import(stream.parse()?), + _ => Err(())?, + }) + } +} + #[derive(Debug)] pub struct LetStatement(String, Expression); @@ -45,3 +61,31 @@ impl Parseable for LetStatement { } } } + +#[derive(Debug)] +pub struct ImportStatement(Vec); + +impl Parseable for ImportStatement { + fn parse(mut stream: TokenStream) -> Result { + stream.expect(Token::ImportKeyword)?; + + let mut import_list = Vec::new(); + + if let Some(Token::Identifier(name)) = stream.next() { + import_list.push(name); + while stream.expect(Token::Colon).is_ok() && stream.expect(Token::Colon).is_ok() { + if let Some(Token::Identifier(name)) = stream.next() { + import_list.push(name); + } else { + Err(())? + } + } + } else { + Err(())? + } + + stream.expect(Token::Semicolon)?; + + Ok(ImportStatement(import_list)) + } +}