binary wip
This commit is contained in:
@@ -17,6 +17,8 @@ pub enum ParseError {
|
||||
DuplicateIdentifier { token: Token },
|
||||
#[error("Invalid Syntax\n\nLine: {0}, Column: {1}\nReason: {reason}", token.line, token.column)]
|
||||
InvalidSyntax { token: Token, reason: String },
|
||||
#[error("This keyword is either not supported or not yet implemented\n\nLine: {0}, Column: {1}\nToken: {2}\n", token.line, token.column, token.token_type)]
|
||||
UnsupportedKeyword { token: Token},
|
||||
#[error("Unexpected EOF")]
|
||||
UnexpectedEOF,
|
||||
}
|
||||
@@ -132,15 +134,31 @@ impl Parser {
|
||||
}
|
||||
|
||||
fn expression(&mut self) -> Result<Option<tree_node::Expression>, ParseError> {
|
||||
macro_rules! matches_keyword {
|
||||
($keyword:expr, $($pattern:pat),+) => {
|
||||
matches!($keyword, $($pattern)|+)
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
let Some(current_token) = self.current_token.as_ref() else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
|
||||
if token_matches!(current_token, TokenType::EOF) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let expr = Some(match current_token.token_type {
|
||||
// match unsupported keywords
|
||||
TokenType::Keyword(e)
|
||||
if matches_keyword!(e, Keyword::Import, Keyword::Export, Keyword::Enum, Keyword::If, Keyword::Else) => {
|
||||
return Err(ParseError::UnsupportedKeyword {
|
||||
token: current_token.clone(),
|
||||
})
|
||||
},
|
||||
|
||||
// match declarations with a `let` keyword
|
||||
TokenType::Keyword(Keyword::Let) => self.declaration()?,
|
||||
|
||||
@@ -161,7 +179,7 @@ impl Parser {
|
||||
TokenType::Symbol(Symbol::LBrace) => Expression::BlockExpression(self.block()?),
|
||||
|
||||
// match literal expressions with a semi-colon afterwards
|
||||
TokenType::Number(_) | TokenType::String(_) if !self_matches_peek!(self, TokenType::Symbol(s) if s.is_operator() || s.is_comparison() || s.is_logical()) => {
|
||||
TokenType::Number(_) | TokenType::String(_) => {
|
||||
Expression::Literal(self.literal()?)
|
||||
}
|
||||
|
||||
@@ -234,7 +252,7 @@ impl Parser {
|
||||
}
|
||||
};
|
||||
|
||||
todo!()
|
||||
Ok(expr)
|
||||
}
|
||||
|
||||
fn priority(&mut self) -> Result<Box<Expression>, ParseError> {
|
||||
@@ -562,4 +580,20 @@ mod tests {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_binary() -> Result<()> {
|
||||
let input = r#"
|
||||
let x = 1 + 2;
|
||||
"#;
|
||||
|
||||
let tokenizer = Tokenizer::from(input.to_owned());
|
||||
let mut parser = Parser::new(tokenizer);
|
||||
|
||||
let expression = parser.parse()?.unwrap();
|
||||
|
||||
assert_eq!("(let x = (1 + 2))", expression.to_string());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -469,7 +469,7 @@ impl TokenizerBuffer {
|
||||
}
|
||||
self.history.extend(tokens);
|
||||
} else if seek_to < 0 {
|
||||
let seek_to = seek_to.abs() as usize;
|
||||
let seek_to = seek_to.unsigned_abs() as usize;
|
||||
let mut tokens = Vec::with_capacity(seek_to);
|
||||
for _ in 0..seek_to {
|
||||
if let Some(token) = self.history.pop_back() {
|
||||
@@ -489,11 +489,13 @@ impl TokenizerBuffer {
|
||||
|
||||
/// Adds to or removes from the History stack, allowing the user to move back and forth in the stream
|
||||
pub fn seek(&mut self, from: SeekFrom) -> Result<(), TokenizerError> {
|
||||
Ok(match from {
|
||||
match from {
|
||||
SeekFrom::Current(seek_to) => self.seek_from_current(seek_to)?,
|
||||
SeekFrom::End(_) => unimplemented!("SeekFrom::End will not be implemented"),
|
||||
SeekFrom::Start(_) => unimplemented!("SeekFrom::Start will not be implemented"),
|
||||
})
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -511,6 +513,23 @@ mod tests {
|
||||
}
|
||||
"#;
|
||||
|
||||
#[test]
|
||||
fn test_seek_from_current() -> Result<()> {
|
||||
let tokenizer = Tokenizer::from(TEST_STRING.to_owned());
|
||||
let mut buffer = TokenizerBuffer::new(tokenizer);
|
||||
|
||||
let token = buffer.next()?.unwrap();
|
||||
assert_eq!(token.token_type, TokenType::Keyword(Keyword::Fn));
|
||||
|
||||
buffer.seek(SeekFrom::Current(1))?;
|
||||
|
||||
let token = buffer.next()?.unwrap();
|
||||
|
||||
assert_eq!(token.token_type, TokenType::Symbol(Symbol::LParen));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tokenizer_from_path_ok() {
|
||||
let tokenizer = Tokenizer::from_path(TEST_FILE);
|
||||
|
||||
Reference in New Issue
Block a user