diff --git a/reid/src/lexer.rs b/reid/src/lexer.rs index 192077c..c1dd2be 100644 --- a/reid/src/lexer.rs +++ b/reid/src/lexer.rs @@ -134,6 +134,9 @@ pub fn tokenize>(to_tokenize: T) -> Result, Error let mut tokens = Vec::new(); while let Some(character) = &cursor.next() { + // Save "current" token first character position + let position = (cursor.position.0 - 1, cursor.position.1); + let variant = match character { // Whitespace w if w.is_whitespace() => continue, @@ -204,7 +207,7 @@ pub fn tokenize>(to_tokenize: T) -> Result, Error tokens.push(FullToken { token: variant, - position: cursor.position, + position, }); }