diff --git a/reid/src/ast/lexer.rs b/reid/src/ast/lexer.rs index 9d14c22..b300780 100644 --- a/reid/src/ast/lexer.rs +++ b/reid/src/ast/lexer.rs @@ -115,6 +115,7 @@ pub enum Token { Unknown(char), Whitespace(String), + Comment(String), Eof, } @@ -194,6 +195,7 @@ impl ToString for Token { Token::Slash => String::from('/'), Token::Percent => String::from('%'), Token::Whitespace(val) => val.clone(), + Token::Comment(val) => format!("//{}", val.clone()), Token::Unknown(val) => val.to_string(), } } @@ -307,10 +309,13 @@ pub fn tokenize>(to_tokenize: T) -> Result, Error } // Comments '/' if cursor.first() == Some('/') => { + let mut comment = String::new(); while !matches!(cursor.first(), Some('\n') | None) { - cursor.next(); + if let Some(c) = cursor.next() { + comment.push(c); + } } - continue; + Token::Comment(comment) } '\"' | '\'' => { let mut value = String::new(); diff --git a/reid/src/ast/token_stream.rs b/reid/src/ast/token_stream.rs index 8c810c8..b3c2516 100644 --- a/reid/src/ast/token_stream.rs +++ b/reid/src/ast/token_stream.rs @@ -180,7 +180,7 @@ impl<'a, 'b> TokenStream<'a, 'b> { fn previous_token(&self, mut from: usize) -> (usize, Option<&'a FullToken>) { from -= 1; while let Some(token) = self.tokens.get(from) { - if let Token::Whitespace(_) = token.token { + if matches!(token.token, Token::Whitespace(_) | Token::Comment(_)) { from -= 1; } else { break; @@ -191,7 +191,7 @@ impl<'a, 'b> TokenStream<'a, 'b> { fn next_token(&self, mut from: usize) -> (usize, Option<&'a FullToken>) { while let Some(token) = self.tokens.get(from) { - if let Token::Whitespace(_) = token.token { + if matches!(token.token, Token::Whitespace(_) | Token::Comment(_)) { from += 1; } else { break;