diff --git a/examples/test.lua b/examples/test.lua index c2877c4..62667fb 100644 --- a/examples/test.lua +++ b/examples/test.lua @@ -92,4 +92,6 @@ end for k, v in (ipairs(table)) do print(k, v) -end \ No newline at end of file +end + +print(_ENV.sometable) \ No newline at end of file diff --git a/src/token_stream/lexer.rs b/src/token_stream/lexer.rs index 955958a..1e383e1 100644 --- a/src/token_stream/lexer.rs +++ b/src/token_stream/lexer.rs @@ -286,7 +286,7 @@ pub fn tokenize>(to_tokenize: T) -> Result, Error } } // "words" - c if c.is_alphabetic() => { + c if c.is_alphabetic() || *c == '_' => { let mut value = character.to_string(); while let Some(c) = cursor.first() { if !(c.is_ascii_alphanumeric() || c == '_') {