binary expressions are working fully now
This commit is contained in:
81
Cargo.lock
generated
81
Cargo.lock
generated
@@ -2,6 +2,21 @@
|
|||||||
# It is not intended for manual editing.
|
# It is not intended for manual editing.
|
||||||
version = 3
|
version = 3
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "addr2line"
|
||||||
|
version = "0.24.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1"
|
||||||
|
dependencies = [
|
||||||
|
"gimli",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "adler2"
|
||||||
|
version = "2.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anstream"
|
name = "anstream"
|
||||||
version = "0.6.18"
|
version = "0.6.18"
|
||||||
@@ -56,6 +71,30 @@ name = "anyhow"
|
|||||||
version = "1.0.93"
|
version = "1.0.93"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775"
|
checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775"
|
||||||
|
dependencies = [
|
||||||
|
"backtrace",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "backtrace"
|
||||||
|
version = "0.3.74"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a"
|
||||||
|
dependencies = [
|
||||||
|
"addr2line",
|
||||||
|
"cfg-if",
|
||||||
|
"libc",
|
||||||
|
"miniz_oxide",
|
||||||
|
"object",
|
||||||
|
"rustc-demangle",
|
||||||
|
"windows-targets",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cfg-if"
|
||||||
|
version = "1.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap"
|
name = "clap"
|
||||||
@@ -103,6 +142,12 @@ version = "1.0.3"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990"
|
checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "gimli"
|
||||||
|
version = "0.31.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "heck"
|
name = "heck"
|
||||||
version = "0.5.0"
|
version = "0.5.0"
|
||||||
@@ -115,6 +160,36 @@ version = "1.70.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
|
checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "libc"
|
||||||
|
version = "0.2.164"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "433bfe06b8c75da9b2e3fbea6e5329ff87748f0b144ef75306e674c3f6f7c13f"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "memchr"
|
||||||
|
version = "2.7.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "miniz_oxide"
|
||||||
|
version = "0.8.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1"
|
||||||
|
dependencies = [
|
||||||
|
"adler2",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "object"
|
||||||
|
version = "0.36.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e"
|
||||||
|
dependencies = [
|
||||||
|
"memchr",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.89"
|
version = "1.0.89"
|
||||||
@@ -133,6 +208,12 @@ dependencies = [
|
|||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustc-demangle"
|
||||||
|
version = "0.1.24"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "stationlang"
|
name = "stationlang"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
|
|||||||
@@ -11,4 +11,4 @@ clap = { version = "^4.5", features = ["derive"] }
|
|||||||
thiserror = { version = "^2.0" }
|
thiserror = { version = "^2.0" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
anyhow = { version = "^1.0" }
|
anyhow = { version = "^1.0", features = ["backtrace"] }
|
||||||
|
|||||||
3
rust-toolchain.toml
Normal file
3
rust-toolchain.toml
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
[toolchain]
|
||||||
|
channel = "nightly"
|
||||||
|
components = ["rustfmt", "clippy"]
|
||||||
@@ -1,3 +1,5 @@
|
|||||||
|
#![feature(error_generic_member_access)]
|
||||||
|
|
||||||
mod parser;
|
mod parser;
|
||||||
mod tokenizer;
|
mod tokenizer;
|
||||||
|
|
||||||
@@ -19,13 +21,13 @@ enum StationlangError {
|
|||||||
#[derive(Parser, Debug)]
|
#[derive(Parser, Debug)]
|
||||||
#[command(version, about, long_about = None)]
|
#[command(version, about, long_about = None)]
|
||||||
struct Args {
|
struct Args {
|
||||||
/// What file should be compiled. If not set, input will be read from stdin
|
/// What file should be compiled. If not set, input will be read from stdin.
|
||||||
#[arg(short, long)]
|
#[arg(short, long)]
|
||||||
input_file: Option<String>,
|
input_file: Option<String>,
|
||||||
/// The default stack size for the program
|
/// The stack size for the compiled program. Compilation will fail if the compiler detects that the program will exceed this stack size.
|
||||||
#[arg(short, long, default_value_t = 512)]
|
#[arg(short, long, default_value_t = 512)]
|
||||||
stack_size: usize,
|
stack_size: usize,
|
||||||
/// The output file for the compiled program. If not set, output will go to stdout
|
/// The output file for the compiled program. If not set, output will go to stdout.
|
||||||
#[arg(short, long)]
|
#[arg(short, long)]
|
||||||
output_file: Option<String>,
|
output_file: Option<String>,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,7 +4,10 @@ use crate::tokenizer::{
|
|||||||
token::{Keyword, Symbol, Token, TokenType},
|
token::{Keyword, Symbol, Token, TokenType},
|
||||||
Tokenizer, TokenizerBuffer, TokenizerError,
|
Tokenizer, TokenizerBuffer, TokenizerError,
|
||||||
};
|
};
|
||||||
use std::io::SeekFrom;
|
use std::{
|
||||||
|
backtrace::{self, Backtrace},
|
||||||
|
io::SeekFrom,
|
||||||
|
};
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
use tree_node::*;
|
use tree_node::*;
|
||||||
|
|
||||||
@@ -13,7 +16,11 @@ pub enum ParseError {
|
|||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
TokenizerError(#[from] TokenizerError),
|
TokenizerError(#[from] TokenizerError),
|
||||||
#[error("Unexpected token\n\nLine: {0}, Column: {1}\nToken: {2}\n", token.line, token.column, token.token_type)]
|
#[error("Unexpected token\n\nLine: {0}, Column: {1}\nToken: {2}\n", token.line, token.column, token.token_type)]
|
||||||
UnexpectedToken { token: Token },
|
UnexpectedToken {
|
||||||
|
token: Token,
|
||||||
|
#[backtrace]
|
||||||
|
backtrace: std::backtrace::Backtrace,
|
||||||
|
},
|
||||||
#[error("Duplicated Identifer\n\nLine: {0}, Column: {1}\nToken: {2}\n", token.line, token.column, token.token_type)]
|
#[error("Duplicated Identifer\n\nLine: {0}, Column: {1}\nToken: {2}\n", token.line, token.column, token.token_type)]
|
||||||
DuplicateIdentifier { token: Token },
|
DuplicateIdentifier { token: Token },
|
||||||
#[error("Invalid Syntax\n\nLine: {0}, Column: {1}\nReason: {reason}", token.line, token.column)]
|
#[error("Invalid Syntax\n\nLine: {0}, Column: {1}\nReason: {reason}", token.line, token.column)]
|
||||||
@@ -49,6 +56,7 @@ macro_rules! extract_token_data {
|
|||||||
_ => {
|
_ => {
|
||||||
return Err(ParseError::UnexpectedToken {
|
return Err(ParseError::UnexpectedToken {
|
||||||
token: $token.clone(),
|
token: $token.clone(),
|
||||||
|
backtrace: std::backtrace::Backtrace::capture(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -59,6 +67,7 @@ macro_rules! extract_token_data {
|
|||||||
_ => {
|
_ => {
|
||||||
return Err(ParseError::UnexpectedToken {
|
return Err(ParseError::UnexpectedToken {
|
||||||
token: $token.clone(),
|
token: $token.clone(),
|
||||||
|
backtrace: std::backtrace::Backtrace::capture(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -194,6 +203,7 @@ impl Parser {
|
|||||||
_ => {
|
_ => {
|
||||||
return Err(ParseError::UnexpectedToken {
|
return Err(ParseError::UnexpectedToken {
|
||||||
token: current_token.clone(),
|
token: current_token.clone(),
|
||||||
|
backtrace: std::backtrace::Backtrace::capture(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -202,11 +212,16 @@ impl Parser {
|
|||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// check if the next or current token is an operator
|
||||||
if self_matches_peek!(self, TokenType::Symbol(s) if s.is_operator()) {
|
if self_matches_peek!(self, TokenType::Symbol(s) if s.is_operator()) {
|
||||||
return Ok(Some(Expression::BinaryExpression(self.binary(expr)?)));
|
return Ok(Some(Expression::BinaryExpression(self.binary(expr)?)));
|
||||||
}
|
}
|
||||||
|
// This is an edge case. We need to move back one token if the current token is an operator
|
||||||
// step 2: check if the next token is an operator and if we should parse a binary expression with the previous expression
|
// so the binary expression can pick up the operator
|
||||||
|
else if self_matches_current!(self, TokenType::Symbol(s) if s.is_operator()) {
|
||||||
|
self.tokenizer.seek(SeekFrom::Current(-1))?;
|
||||||
|
return Ok(Some(Expression::BinaryExpression(self.binary(expr)?)));
|
||||||
|
}
|
||||||
|
|
||||||
Ok(Some(expr))
|
Ok(Some(expr))
|
||||||
}
|
}
|
||||||
@@ -235,22 +250,13 @@ impl Parser {
|
|||||||
}
|
}
|
||||||
_ => Err(ParseError::UnexpectedToken {
|
_ => Err(ParseError::UnexpectedToken {
|
||||||
token: current_token.clone(),
|
token: current_token.clone(),
|
||||||
|
backtrace: std::backtrace::Backtrace::capture(),
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Handles mathmatical expressions in the explicit order of PEMDAS
|
/// Handles mathmatical expressions in the explicit order of PEMDAS
|
||||||
fn binary(&mut self, previous: Expression) -> Result<BinaryExpression, ParseError> {
|
fn binary(&mut self, previous: Expression) -> Result<BinaryExpression, ParseError> {
|
||||||
macro_rules! min {
|
|
||||||
($a:expr, $b:expr) => {
|
|
||||||
if $a < $b {
|
|
||||||
$a
|
|
||||||
} else {
|
|
||||||
$b
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// We cannot use recursion here, as we need to handle the precedence of the operators
|
// We cannot use recursion here, as we need to handle the precedence of the operators
|
||||||
// We need to use a loop to parse the binary expressions.
|
// We need to use a loop to parse the binary expressions.
|
||||||
|
|
||||||
@@ -285,6 +291,7 @@ impl Parser {
|
|||||||
operators.push(operator);
|
operators.push(operator);
|
||||||
self.assign_next()?;
|
self.assign_next()?;
|
||||||
expressions.push(self.get_binary_child_node()?);
|
expressions.push(self.get_binary_child_node()?);
|
||||||
|
|
||||||
current_token = token_from_option!(self.get_next()?).clone();
|
current_token = token_from_option!(self.get_next()?).clone();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -296,40 +303,49 @@ impl Parser {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Every time we find a valid operator, we pop 2 off the expressions and add one back.
|
||||||
|
// This means that we need to keep track of the current iteration to ensure we are
|
||||||
|
// removing the correct expressions from the vector
|
||||||
|
let mut current_iteration = 0;
|
||||||
|
|
||||||
// Loop through operators, and build the binary expressions for exponential operators only
|
// Loop through operators, and build the binary expressions for exponential operators only
|
||||||
for (i, operator) in operators.iter().enumerate() {
|
for (i, operator) in operators.iter().enumerate() {
|
||||||
if operator == &Symbol::Caret {
|
if operator == &Symbol::Exp {
|
||||||
let left = expressions.remove(min!(i, expressions.len() - 1));
|
let index = i - current_iteration;
|
||||||
let right = expressions.remove(min!(i, expressions.len() - 1));
|
let left = expressions.remove(index);
|
||||||
|
let right = expressions.remove(index);
|
||||||
expressions.insert(
|
expressions.insert(
|
||||||
min!(i, expressions.len()),
|
index,
|
||||||
Expression::BinaryExpression(BinaryExpression::Exponent(
|
Expression::BinaryExpression(BinaryExpression::Exponent(
|
||||||
Box::new(left),
|
Box::new(left),
|
||||||
Box::new(right),
|
Box::new(right),
|
||||||
)),
|
)),
|
||||||
);
|
);
|
||||||
|
current_iteration += 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// remove all the exponential operators from the operators vector
|
// remove all the exponential operators from the operators vector
|
||||||
operators.retain(|symbol| symbol != &Symbol::Caret);
|
operators.retain(|symbol| symbol != &Symbol::Exp);
|
||||||
|
current_iteration = 0;
|
||||||
|
|
||||||
// Loop through operators, and build the binary expressions for multiplication and division operators
|
// Loop through operators, and build the binary expressions for multiplication and division operators
|
||||||
for (i, operator) in operators.iter().enumerate() {
|
for (i, operator) in operators.iter().enumerate() {
|
||||||
if operator == &Symbol::Asterisk || operator == &Symbol::Slash {
|
if operator == &Symbol::Asterisk || operator == &Symbol::Slash {
|
||||||
let left = expressions.remove(min!(i, expressions.len() - 1));
|
let index = i - current_iteration;
|
||||||
let right = expressions.remove(min!(i, expressions.len() - 1));
|
let left = expressions.remove(index);
|
||||||
|
let right = expressions.remove(index);
|
||||||
|
|
||||||
match operator {
|
match operator {
|
||||||
Symbol::Asterisk => expressions.insert(
|
Symbol::Asterisk => expressions.insert(
|
||||||
min!(i, expressions.len()),
|
index,
|
||||||
Expression::BinaryExpression(BinaryExpression::Multiply(
|
Expression::BinaryExpression(BinaryExpression::Multiply(
|
||||||
Box::new(left),
|
Box::new(left),
|
||||||
Box::new(right),
|
Box::new(right),
|
||||||
)),
|
)),
|
||||||
),
|
),
|
||||||
Symbol::Slash => expressions.insert(
|
Symbol::Slash => expressions.insert(
|
||||||
min!(i, expressions.len()),
|
index,
|
||||||
Expression::BinaryExpression(BinaryExpression::Divide(
|
Expression::BinaryExpression(BinaryExpression::Divide(
|
||||||
Box::new(left),
|
Box::new(left),
|
||||||
Box::new(right),
|
Box::new(right),
|
||||||
@@ -338,28 +354,31 @@ impl Parser {
|
|||||||
// safety: we have already checked for the operator
|
// safety: we have already checked for the operator
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
|
current_iteration += 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// remove all the multiplication and division operators from the operators vector
|
// remove all the multiplication and division operators from the operators vector
|
||||||
operators.retain(|symbol| symbol != &Symbol::Asterisk && symbol != &Symbol::Slash);
|
operators.retain(|symbol| symbol != &Symbol::Asterisk && symbol != &Symbol::Slash);
|
||||||
|
current_iteration = 0;
|
||||||
|
|
||||||
// Loop through operators, and build the binary expressions for addition and subtraction operators
|
// Loop through operators, and build the binary expressions for addition and subtraction operators
|
||||||
for (i, operator) in operators.iter().enumerate() {
|
for (i, operator) in operators.iter().enumerate() {
|
||||||
if operator == &Symbol::Plus || operator == &Symbol::Minus {
|
if operator == &Symbol::Plus || operator == &Symbol::Minus {
|
||||||
let left = expressions.remove(min!(i, expressions.len() - 1));
|
let index = i - current_iteration;
|
||||||
let right = expressions.remove(min!(i, expressions.len() - 1));
|
let left = expressions.remove(index);
|
||||||
|
let right = expressions.remove(index);
|
||||||
|
|
||||||
match operator {
|
match operator {
|
||||||
Symbol::Plus => expressions.insert(
|
Symbol::Plus => expressions.insert(
|
||||||
min!(i, expressions.len()),
|
index,
|
||||||
Expression::BinaryExpression(BinaryExpression::Add(
|
Expression::BinaryExpression(BinaryExpression::Add(
|
||||||
Box::new(left),
|
Box::new(left),
|
||||||
Box::new(right),
|
Box::new(right),
|
||||||
)),
|
)),
|
||||||
),
|
),
|
||||||
Symbol::Minus => expressions.insert(
|
Symbol::Minus => expressions.insert(
|
||||||
min!(i, expressions.len()),
|
index,
|
||||||
Expression::BinaryExpression(BinaryExpression::Subtract(
|
Expression::BinaryExpression(BinaryExpression::Subtract(
|
||||||
Box::new(left),
|
Box::new(left),
|
||||||
Box::new(right),
|
Box::new(right),
|
||||||
@@ -368,6 +387,7 @@ impl Parser {
|
|||||||
// safety: we have already checked for the operator
|
// safety: we have already checked for the operator
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
|
current_iteration += 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -382,8 +402,11 @@ impl Parser {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Edge case. If the current token is a semi-colon, we need to set current token to the previous token
|
// Edge case. If the current token is a semi-colon, RParen, we need to set current token to the previous token
|
||||||
if token_matches!(current_token, TokenType::Symbol(Symbol::Semicolon)) {
|
if token_matches!(
|
||||||
|
current_token,
|
||||||
|
TokenType::Symbol(Symbol::Semicolon) | TokenType::Symbol(Symbol::RParen)
|
||||||
|
) {
|
||||||
self.tokenizer.seek(SeekFrom::Current(-1))?;
|
self.tokenizer.seek(SeekFrom::Current(-1))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -399,16 +422,17 @@ impl Parser {
|
|||||||
if !token_matches!(current_token, TokenType::Symbol(Symbol::LParen)) {
|
if !token_matches!(current_token, TokenType::Symbol(Symbol::LParen)) {
|
||||||
return Err(ParseError::UnexpectedToken {
|
return Err(ParseError::UnexpectedToken {
|
||||||
token: current_token.clone(),
|
token: current_token.clone(),
|
||||||
|
backtrace: std::backtrace::Backtrace::capture(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
let expression = self.parse()?.ok_or(ParseError::UnexpectedEOF)?;
|
let expression = self.parse()?.ok_or(ParseError::UnexpectedEOF)?;
|
||||||
|
|
||||||
// make sure the next token is a right parenthesis
|
|
||||||
let current_token = token_from_option!(self.get_next()?);
|
let current_token = token_from_option!(self.get_next()?);
|
||||||
if !token_matches!(current_token, TokenType::Symbol(Symbol::RParen)) {
|
if !token_matches!(current_token, TokenType::Symbol(Symbol::RParen)) {
|
||||||
return Err(ParseError::UnexpectedToken {
|
return Err(ParseError::UnexpectedToken {
|
||||||
token: current_token.clone(),
|
token: current_token.clone(),
|
||||||
|
backtrace: std::backtrace::Backtrace::capture(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -427,6 +451,7 @@ impl Parser {
|
|||||||
if !token_matches!(current_token, TokenType::Symbol(Symbol::LParen)) {
|
if !token_matches!(current_token, TokenType::Symbol(Symbol::LParen)) {
|
||||||
return Err(ParseError::UnexpectedToken {
|
return Err(ParseError::UnexpectedToken {
|
||||||
token: current_token.clone(),
|
token: current_token.clone(),
|
||||||
|
backtrace: std::backtrace::Backtrace::capture(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -455,6 +480,7 @@ impl Parser {
|
|||||||
{
|
{
|
||||||
return Err(ParseError::UnexpectedToken {
|
return Err(ParseError::UnexpectedToken {
|
||||||
token: token_from_option!(self.get_next()?).clone(),
|
token: token_from_option!(self.get_next()?).clone(),
|
||||||
|
backtrace: backtrace::Backtrace::capture(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -481,6 +507,7 @@ impl Parser {
|
|||||||
if !token_matches!(current_token, TokenType::Symbol(Symbol::LBrace)) {
|
if !token_matches!(current_token, TokenType::Symbol(Symbol::LBrace)) {
|
||||||
return Err(ParseError::UnexpectedToken {
|
return Err(ParseError::UnexpectedToken {
|
||||||
token: current_token.clone(),
|
token: current_token.clone(),
|
||||||
|
backtrace: backtrace::Backtrace::capture(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -500,6 +527,7 @@ impl Parser {
|
|||||||
if !self_matches_current!(self, TokenType::Keyword(Keyword::Let)) {
|
if !self_matches_current!(self, TokenType::Keyword(Keyword::Let)) {
|
||||||
return Err(ParseError::UnexpectedToken {
|
return Err(ParseError::UnexpectedToken {
|
||||||
token: current_token.clone(),
|
token: current_token.clone(),
|
||||||
|
backtrace: backtrace::Backtrace::capture(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
let identifier = extract_token_data!(
|
let identifier = extract_token_data!(
|
||||||
@@ -513,6 +541,7 @@ impl Parser {
|
|||||||
if !token_matches!(current_token, TokenType::Symbol(Symbol::Assign)) {
|
if !token_matches!(current_token, TokenType::Symbol(Symbol::Assign)) {
|
||||||
return Err(ParseError::UnexpectedToken {
|
return Err(ParseError::UnexpectedToken {
|
||||||
token: current_token,
|
token: current_token,
|
||||||
|
backtrace: backtrace::Backtrace::capture(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -523,6 +552,7 @@ impl Parser {
|
|||||||
if !token_matches!(current_token, TokenType::Symbol(Symbol::Semicolon)) {
|
if !token_matches!(current_token, TokenType::Symbol(Symbol::Semicolon)) {
|
||||||
return Err(ParseError::UnexpectedToken {
|
return Err(ParseError::UnexpectedToken {
|
||||||
token: current_token.clone(),
|
token: current_token.clone(),
|
||||||
|
backtrace: backtrace::Backtrace::capture(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -540,6 +570,7 @@ impl Parser {
|
|||||||
_ => {
|
_ => {
|
||||||
return Err(ParseError::UnexpectedToken {
|
return Err(ParseError::UnexpectedToken {
|
||||||
token: current_token.clone(),
|
token: current_token.clone(),
|
||||||
|
backtrace: backtrace::Backtrace::capture(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -553,6 +584,7 @@ impl Parser {
|
|||||||
if !self_matches_current!(self, TokenType::Keyword(Keyword::Fn)) {
|
if !self_matches_current!(self, TokenType::Keyword(Keyword::Fn)) {
|
||||||
return Err(ParseError::UnexpectedToken {
|
return Err(ParseError::UnexpectedToken {
|
||||||
token: current_token.clone(),
|
token: current_token.clone(),
|
||||||
|
backtrace: Backtrace::capture(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -567,6 +599,7 @@ impl Parser {
|
|||||||
if !token_matches!(current_token, TokenType::Symbol(Symbol::LParen)) {
|
if !token_matches!(current_token, TokenType::Symbol(Symbol::LParen)) {
|
||||||
return Err(ParseError::UnexpectedToken {
|
return Err(ParseError::UnexpectedToken {
|
||||||
token: current_token.clone(),
|
token: current_token.clone(),
|
||||||
|
backtrace: Backtrace::capture(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -596,6 +629,7 @@ impl Parser {
|
|||||||
{
|
{
|
||||||
return Err(ParseError::UnexpectedToken {
|
return Err(ParseError::UnexpectedToken {
|
||||||
token: token_from_option!(self.get_next()?).clone(),
|
token: token_from_option!(self.get_next()?).clone(),
|
||||||
|
backtrace: Backtrace::capture(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -613,6 +647,7 @@ impl Parser {
|
|||||||
if !token_matches!(current_token, TokenType::Symbol(Symbol::LBrace)) {
|
if !token_matches!(current_token, TokenType::Symbol(Symbol::LBrace)) {
|
||||||
return Err(ParseError::UnexpectedToken {
|
return Err(ParseError::UnexpectedToken {
|
||||||
token: current_token.clone(),
|
token: current_token.clone(),
|
||||||
|
backtrace: Backtrace::capture(),
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -747,18 +782,15 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_binary() -> Result<()> {
|
fn test_binary_expression() -> Result<()> {
|
||||||
let expr = parser!("1 + 3 ^ 5").parse()?.unwrap();
|
let expr = parser!("4 ** 2 + 5 ** 2").parse()?.unwrap();
|
||||||
assert_eq!("(1 + (3 ^ 5))", expr.to_string());
|
assert_eq!("((4 ** 2) + (5 ** 2))", expr.to_string());
|
||||||
|
|
||||||
let input = "4 ^ 2 + 3 ^ 2";
|
let expr = parser!("45 * 2 - 15 / 5 + 5 ** 2").parse()?.unwrap();
|
||||||
|
assert_eq!("(((45 * 2) - (15 / 5)) + (5 ** 2))", expr.to_string());
|
||||||
|
|
||||||
let expr = parser!(input).parse()?.unwrap();
|
let expr = parser!("(5 - 2) * 10").parse()?.unwrap();
|
||||||
println!("Original: {}\nTranscribed: {}", input, expr.to_string());
|
assert_eq!("(((5 - 2)) * 10)", expr.to_string());
|
||||||
|
|
||||||
let expr = parser!("12 - 1 + 3 * 5").parse()?.unwrap();
|
|
||||||
|
|
||||||
assert_eq!("((12 - 1) + (3 * 5))", expr.to_string());
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ impl std::fmt::Display for BinaryExpression {
|
|||||||
BinaryExpression::Multiply(l, r) => write!(f, "({} * {})", l, r),
|
BinaryExpression::Multiply(l, r) => write!(f, "({} * {})", l, r),
|
||||||
BinaryExpression::Divide(l, r) => write!(f, "({} / {})", l, r),
|
BinaryExpression::Divide(l, r) => write!(f, "({} / {})", l, r),
|
||||||
BinaryExpression::Subtract(l, r) => write!(f, "({} - {})", l, r),
|
BinaryExpression::Subtract(l, r) => write!(f, "({} - {})", l, r),
|
||||||
BinaryExpression::Exponent(l, r) => write!(f, "({} ^ {})", l, r),
|
BinaryExpression::Exponent(l, r) => write!(f, "({} ** {})", l, r),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -138,6 +138,18 @@ impl std::fmt::Display for InvocationExpression {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
|
pub struct PropertyAccessorExpression {
|
||||||
|
pub object: Box<Expression>,
|
||||||
|
pub property: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for PropertyAccessorExpression {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "{}.{}", self.object, self.property)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
pub enum Expression {
|
pub enum Expression {
|
||||||
Literal(Literal),
|
Literal(Literal),
|
||||||
@@ -151,6 +163,7 @@ pub enum Expression {
|
|||||||
BlockExpression(BlockExpression),
|
BlockExpression(BlockExpression),
|
||||||
InvocationExpression(InvocationExpression),
|
InvocationExpression(InvocationExpression),
|
||||||
PriorityExpression(Box<Expression>),
|
PriorityExpression(Box<Expression>),
|
||||||
|
PropertyAccessorExpression(PropertyAccessorExpression)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Display for Expression {
|
impl std::fmt::Display for Expression {
|
||||||
@@ -167,6 +180,7 @@ impl std::fmt::Display for Expression {
|
|||||||
Expression::InvocationExpression(e) => write!(f, "{}", e),
|
Expression::InvocationExpression(e) => write!(f, "{}", e),
|
||||||
Expression::Variable(id) => write!(f, "{}", id),
|
Expression::Variable(id) => write!(f, "{}", id),
|
||||||
Expression::PriorityExpression(e) => write!(f, "({})", e),
|
Expression::PriorityExpression(e) => write!(f, "({})", e),
|
||||||
|
Expression::PropertyAccessorExpression(e) => write!(f, "{}", e),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -209,7 +209,7 @@ impl Tokenizer {
|
|||||||
'+' => symbol!(Plus),
|
'+' => symbol!(Plus),
|
||||||
'-' => symbol!(Minus),
|
'-' => symbol!(Minus),
|
||||||
'/' => symbol!(Slash),
|
'/' => symbol!(Slash),
|
||||||
'*' => symbol!(Asterisk),
|
|
||||||
'.' => symbol!(Dot),
|
'.' => symbol!(Dot),
|
||||||
'^' => symbol!(Caret),
|
'^' => symbol!(Caret),
|
||||||
|
|
||||||
@@ -238,6 +238,12 @@ impl Tokenizer {
|
|||||||
}
|
}
|
||||||
'!' => symbol!(LogicalNot),
|
'!' => symbol!(LogicalNot),
|
||||||
|
|
||||||
|
'*' if self.peek_next_char()? == Some('*') => {
|
||||||
|
self.next_char()?;
|
||||||
|
symbol!(Exp)
|
||||||
|
}
|
||||||
|
'*' => symbol!(Asterisk),
|
||||||
|
|
||||||
'&' if self.peek_next_char()? == Some('&') => {
|
'&' if self.peek_next_char()? == Some('&') => {
|
||||||
self.next_char()?;
|
self.next_char()?;
|
||||||
symbol!(LogicalAnd)
|
symbol!(LogicalAnd)
|
||||||
@@ -246,6 +252,7 @@ impl Tokenizer {
|
|||||||
self.next_char()?;
|
self.next_char()?;
|
||||||
symbol!(LogicalOr)
|
symbol!(LogicalOr)
|
||||||
}
|
}
|
||||||
|
|
||||||
_ => Err(TokenizerError::UnknownSymbolError(
|
_ => Err(TokenizerError::UnknownSymbolError(
|
||||||
first_symbol,
|
first_symbol,
|
||||||
self.line,
|
self.line,
|
||||||
@@ -674,7 +681,7 @@ This is a skippable line"#,
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_symbol_parse() -> Result<()> {
|
fn test_symbol_parse() -> Result<()> {
|
||||||
let mut tokenizer = Tokenizer::from(String::from(
|
let mut tokenizer = Tokenizer::from(String::from(
|
||||||
"^ ! () [] {} , . ; : + - * / < > = != && || >= <=",
|
"^ ! () [] {} , . ; : + - * / < > = != && || >= <=**",
|
||||||
));
|
));
|
||||||
|
|
||||||
let expected_tokens = vec![
|
let expected_tokens = vec![
|
||||||
@@ -702,6 +709,7 @@ This is a skippable line"#,
|
|||||||
TokenType::Symbol(Symbol::LogicalOr),
|
TokenType::Symbol(Symbol::LogicalOr),
|
||||||
TokenType::Symbol(Symbol::GreaterThanOrEqual),
|
TokenType::Symbol(Symbol::GreaterThanOrEqual),
|
||||||
TokenType::Symbol(Symbol::LessThanOrEqual),
|
TokenType::Symbol(Symbol::LessThanOrEqual),
|
||||||
|
TokenType::Symbol(Symbol::Exp),
|
||||||
];
|
];
|
||||||
|
|
||||||
for expected_token in expected_tokens {
|
for expected_token in expected_tokens {
|
||||||
|
|||||||
@@ -122,12 +122,14 @@ pub enum Symbol {
|
|||||||
LessThanOrEqual,
|
LessThanOrEqual,
|
||||||
/// Represents the `>=` symbol
|
/// Represents the `>=` symbol
|
||||||
GreaterThanOrEqual,
|
GreaterThanOrEqual,
|
||||||
|
/// Represents the `**` symbol
|
||||||
|
Exp,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Symbol {
|
impl Symbol {
|
||||||
pub fn is_operator(&self) -> bool {
|
pub fn is_operator(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
Symbol::Plus | Symbol::Minus | Symbol::Asterisk | Symbol::Slash | Symbol::Caret => true,
|
Symbol::Plus | Symbol::Minus | Symbol::Asterisk | Symbol::Slash | Symbol::Exp => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,3 +1,14 @@
|
|||||||
export fn doThings() {
|
fn myPowerItem(arg) {
|
||||||
power.myPowerItem(12.45 + 5);
|
|
||||||
|
}
|
||||||
|
|
||||||
|
fn doThings(x, y, z) {
|
||||||
|
let item = myPowerItem((12.45 + 5 * 123) ** 2);
|
||||||
|
|
||||||
|
let item2 = 5 + 2 - 5 + 123.24323 / 234 ** 21 - (15 / 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
doThings(1, 2, 3)
|
||||||
}
|
}
|
||||||
Reference in New Issue
Block a user