Add comments and whitespace to lexer

This commit is contained in:
Sofia 2025-07-29 16:41:07 +03:00
parent 2dd3a5904b
commit c262418f88
2 changed files with 9 additions and 4 deletions

View File

@ -115,6 +115,7 @@ pub enum Token {
Unknown(char),
Whitespace(String),
Comment(String),
Eof,
}
@ -194,6 +195,7 @@ impl ToString for Token {
Token::Slash => String::from('/'),
Token::Percent => String::from('%'),
Token::Whitespace(val) => val.clone(),
Token::Comment(val) => format!("//{}", val.clone()),
Token::Unknown(val) => val.to_string(),
}
}
@ -307,10 +309,13 @@ pub fn tokenize<T: Into<String>>(to_tokenize: T) -> Result<Vec<FullToken>, Error
}
// Comments
'/' if cursor.first() == Some('/') => {
let mut comment = String::new();
while !matches!(cursor.first(), Some('\n') | None) {
cursor.next();
if let Some(c) = cursor.next() {
comment.push(c);
}
}
continue;
Token::Comment(comment)
}
'\"' | '\'' => {
let mut value = String::new();

View File

@ -180,7 +180,7 @@ impl<'a, 'b> TokenStream<'a, 'b> {
fn previous_token(&self, mut from: usize) -> (usize, Option<&'a FullToken>) {
from -= 1;
while let Some(token) = self.tokens.get(from) {
if let Token::Whitespace(_) = token.token {
if matches!(token.token, Token::Whitespace(_) | Token::Comment(_)) {
from -= 1;
} else {
break;
@ -191,7 +191,7 @@ impl<'a, 'b> TokenStream<'a, 'b> {
fn next_token(&self, mut from: usize) -> (usize, Option<&'a FullToken>) {
while let Some(token) = self.tokens.get(from) {
if let Token::Whitespace(_) = token.token {
if matches!(token.token, Token::Whitespace(_) | Token::Comment(_)) {
from += 1;
} else {
break;