Add import statement parsing
This commit is contained in:
parent
6170eb0990
commit
40f3738719
@ -2,6 +2,7 @@
|
||||
|
||||
import std::print;
|
||||
|
||||
let test = 5;
|
||||
let arithmetic = 3 + 2 * 5 + 1 * 2;
|
||||
let multiplier = 5 * 2;
|
||||
|
||||
|
29
src/lexer.rs
29
src/lexer.rs
@ -1,10 +1,5 @@
|
||||
use std::{fmt::Debug, iter::Peekable, str::Chars};
|
||||
|
||||
pub static EASIEST: &str = include_str!("../easiest.reid");
|
||||
// pub static EASY: &str = include_str!("../easy.reid");
|
||||
// pub static MEDIUM: &str = include_str!("../medium.reid");
|
||||
// pub static HARD: &str = include_str!("../hard.reid");
|
||||
|
||||
static DECIMAL_NUMERICS: &[char] = &['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'];
|
||||
|
||||
pub fn tokenize<T: Into<String>>(to_tokenize: T) -> Result<Vec<FullToken>, String> {
|
||||
@ -50,6 +45,7 @@ pub fn tokenize<T: Into<String>>(to_tokenize: T) -> Result<Vec<FullToken>, Strin
|
||||
// Check for keywords
|
||||
let variant = match value.as_str() {
|
||||
"let" => Token::LetKeyword,
|
||||
"import" => Token::ImportKeyword,
|
||||
_ => Token::Identifier(value),
|
||||
};
|
||||
variant
|
||||
@ -69,6 +65,11 @@ pub fn tokenize<T: Into<String>>(to_tokenize: T) -> Result<Vec<FullToken>, Strin
|
||||
// Single character tokens
|
||||
'=' => Token::Equals,
|
||||
';' => Token::Semicolon,
|
||||
':' => Token::Colon,
|
||||
'+' => Token::Plus,
|
||||
'*' => Token::Times,
|
||||
'(' => Token::ParenOpen,
|
||||
')' => Token::ParenClose,
|
||||
// Invalid token
|
||||
_ => Err(format!(
|
||||
"Unknown token '{}' at {}, {}",
|
||||
@ -110,12 +111,24 @@ pub type Position = (u32, u32);
|
||||
|
||||
#[derive(Debug, Eq, PartialEq, Clone)]
|
||||
pub enum Token {
|
||||
LetKeyword,
|
||||
Semicolon,
|
||||
Equals,
|
||||
// Values
|
||||
Identifier(String),
|
||||
/// Number with at most one decimal point
|
||||
DecimalValue(String),
|
||||
|
||||
// Keywords
|
||||
LetKeyword,
|
||||
ImportKeyword,
|
||||
|
||||
// Symbols
|
||||
Semicolon,
|
||||
Equals,
|
||||
Colon,
|
||||
Plus,
|
||||
Times,
|
||||
ParenOpen, // (
|
||||
ParenClose, // )
|
||||
|
||||
Eof,
|
||||
}
|
||||
|
||||
|
19
src/main.rs
19
src/main.rs
@ -1,13 +1,24 @@
|
||||
use crate::{lexer::EASIEST, parser::LetStatement, token_stream::TokenStream};
|
||||
use crate::{lexer::Token, parser::TopLevelStatement, token_stream::TokenStream};
|
||||
|
||||
pub static EASIEST: &str = include_str!("../easiest.reid");
|
||||
pub static EASY: &str = include_str!("../easy.reid");
|
||||
pub static MEDIUM: &str = include_str!("../medium.reid");
|
||||
pub static HARD: &str = include_str!("../hard.reid");
|
||||
|
||||
mod lexer;
|
||||
mod parser;
|
||||
mod token_stream;
|
||||
|
||||
fn main() {
|
||||
let tokens = lexer::tokenize(EASIEST).unwrap();
|
||||
let tokens = lexer::tokenize(EASY).unwrap();
|
||||
|
||||
dbg!(&tokens);
|
||||
|
||||
let mut token_stream = TokenStream::from(&tokens);
|
||||
|
||||
dbg!(token_stream.parse::<LetStatement>().ok());
|
||||
dbg!(token_stream.parse::<LetStatement>().ok());
|
||||
while let Ok(statement) = token_stream.parse::<TopLevelStatement>() {
|
||||
dbg!(&statement);
|
||||
}
|
||||
|
||||
dbg!(token_stream.expect(Token::Eof).ok());
|
||||
}
|
||||
|
@ -27,6 +27,22 @@ impl Parseable for Expression {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum TopLevelStatement {
|
||||
Let(LetStatement),
|
||||
Import(ImportStatement),
|
||||
}
|
||||
|
||||
impl Parseable for TopLevelStatement {
|
||||
fn parse(mut stream: TokenStream) -> Result<Self, ()> {
|
||||
Ok(match stream.peek() {
|
||||
Some(Token::LetKeyword) => TopLevelStatement::Let(stream.parse()?),
|
||||
Some(Token::ImportKeyword) => TopLevelStatement::Import(stream.parse()?),
|
||||
_ => Err(())?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct LetStatement(String, Expression);
|
||||
|
||||
@ -45,3 +61,31 @@ impl Parseable for LetStatement {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ImportStatement(Vec<String>);
|
||||
|
||||
impl Parseable for ImportStatement {
|
||||
fn parse(mut stream: TokenStream) -> Result<Self, ()> {
|
||||
stream.expect(Token::ImportKeyword)?;
|
||||
|
||||
let mut import_list = Vec::new();
|
||||
|
||||
if let Some(Token::Identifier(name)) = stream.next() {
|
||||
import_list.push(name);
|
||||
while stream.expect(Token::Colon).is_ok() && stream.expect(Token::Colon).is_ok() {
|
||||
if let Some(Token::Identifier(name)) = stream.next() {
|
||||
import_list.push(name);
|
||||
} else {
|
||||
Err(())?
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Err(())?
|
||||
}
|
||||
|
||||
stream.expect(Token::Semicolon)?;
|
||||
|
||||
Ok(ImportStatement(import_list))
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user