This commit is contained in:
2024-11-20 01:16:46 -07:00
parent 66064a21d7
commit 7cff659275
4 changed files with 236 additions and 1 deletions

View File

@@ -1,4 +1,4 @@
mod token;
pub mod token;
use std::{
fs::File,
@@ -167,6 +167,18 @@ where
}
}
pub fn peek_next(&mut self) -> Result<Option<Token>, TokenizerError> {
let current_pos = self.reader.stream_position()?;
let column = self.column.clone();
let line = self.line.clone();
let token = self.next_token()?;
self.reader.seek(SeekFrom::Start(current_pos))?;
self.column = column;
self.line = line;
Ok(token)
}
/// Tokenizes a symbol
fn tokenize_symbol(&mut self, first_symbol: char) -> Result<Token, TokenizerError> {
/// Helper macro to create a symbol token
@@ -668,4 +680,32 @@ This is a skippable line"#,
Ok(())
}
#[test]
fn test_peek_next() -> Result<()> {
let mut tokenizer = Tokenizer::from(TEST_STRING.to_owned());
let column = tokenizer.column.clone();
let line = tokenizer.line.clone();
let peeked_token = tokenizer.peek_next()?;
assert_eq!(
peeked_token.unwrap().token_type,
TokenType::Keyword(Keyword::Fn)
);
assert_eq!(tokenizer.column, column);
assert_eq!(tokenizer.line, line);
let next_token = tokenizer.next_token()?;
assert_eq!(
next_token.unwrap().token_type,
TokenType::Keyword(Keyword::Fn)
);
assert_ne!(tokenizer.column, column);
assert_ne!(tokenizer.line, line);
Ok(())
}
}