diff --git a/Cargo.lock b/Cargo.lock index 76ca660..62124d4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -404,6 +404,12 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "quick-error" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" + [[package]] name = "quote" version = "1.0.37" @@ -565,8 +571,8 @@ version = "0.1.0" dependencies = [ "anyhow", "clap", + "quick-error", "rust_decimal", - "thiserror", ] [[package]] @@ -603,26 +609,6 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" -[[package]] -name = "thiserror" -version = "2.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c006c85c7651b3cf2ada4584faa36773bd07bac24acfb39f3c431b36d7e667aa" -dependencies = [ - "thiserror-impl", -] - -[[package]] -name = "thiserror-impl" -version = "2.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f077553d607adc1caf65430528a576c757a71ed73944b66ebb58ef2bbd243568" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.87", -] - [[package]] name = "tinyvec" version = "1.8.0" diff --git a/Cargo.toml b/Cargo.toml index 6befb2f..a46402b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,13 +3,10 @@ name = "stationlang" version = "0.1.0" edition = "2021" -[profile.dev] -panic = "unwind" - [dependencies] clap = { version = "^4.5", features = ["derive"] } +quick-error = "2.0.1" rust_decimal = "1.36.0" -thiserror = { version = "^2.0" } [dev-dependencies] anyhow = { version = "^1.0", features = ["backtrace"] } diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 8e275b7..73cb934 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,3 +1,3 @@ [toolchain] -channel = "nightly" +channel = "stable" components = ["rustfmt", "clippy"] diff --git a/src/compiler/mod.rs b/src/compiler/mod.rs index ddb50cc..150161b 100644 --- a/src/compiler/mod.rs +++ b/src/compiler/mod.rs @@ -1,5 +1,3 @@ -use thiserror::Error; - use crate::parser::tree_node::*; use crate::parser::Parser as ASTParser; use std::collections::HashMap; @@ -9,14 +7,21 @@ use std::io::Write; /// Represents the return keyword. Used as a variable name for the register. const RETURN: &'static str = "ret"; -#[derive(Error, Debug)] -pub enum CompileError { - #[error(transparent)] - ParseError(#[from] crate::parser::ParseError), - #[error("A fatal error has occurred with the compiler. Scope could not be found.")] - ScopeError, - #[error(transparent)] - WriteError(#[from] std::io::Error), +quick_error! { + #[derive(Debug)] + pub enum CompileError { + ParseError(err: crate::parser::ParseError) { + from() + display("Parse error: {}", err) + } + ScopeError { + display("A fatal error has occurred with the compiler. Scope could not be found.") + } + WriteError(err: std::io::Error) { + from() + display("Write error: {}", err) + } + } } pub struct Compiler<'a> { diff --git a/src/main.rs b/src/main.rs index 8a640d0..a9fd23f 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,4 +1,5 @@ -#![feature(error_generic_member_access)] +#[macro_use] +extern crate quick_error; mod compiler; mod parser; @@ -21,16 +22,26 @@ macro_rules! boxed { }; } -#[derive(Debug, thiserror::Error)] -enum StationlangError { - #[error(transparent)] - TokenizerError(#[from] TokenizerError), - #[error(transparent)] - ParserError(#[from] parser::ParseError), - #[error(transparent)] - CompileError(#[from] compiler::CompileError), - #[error(transparent)] - IoError(#[from] std::io::Error), +quick_error! { + #[derive(Debug)] + enum StationlangError { + TokenizerError(err: TokenizerError) { + from() + display("Tokenizer error: {}", err) + } + ParserError(err: parser::ParseError) { + from() + display("Parser error: {}", err) + } + CompileError(err: compiler::CompileError) { + from() + display("Compile error: {}", err) + } + IoError(err: std::io::Error) { + from() + display("IO error: {}", err) + } + } } #[derive(Parser, Debug)] diff --git a/src/parser/mod.rs b/src/parser/mod.rs index 10f5f23..0ecc701 100644 --- a/src/parser/mod.rs +++ b/src/parser/mod.rs @@ -8,31 +8,33 @@ use crate::{ Tokenizer, TokenizerBuffer, TokenizerError, }, }; -use std::{ - backtrace::{self, Backtrace}, - io::SeekFrom, -}; -use thiserror::Error; +use std::io::SeekFrom; use tree_node::*; -#[derive(Debug, Error)] -pub enum ParseError { - #[error(transparent)] - TokenizerError(#[from] TokenizerError), - #[error("Unexpected token\n\nLine: {0}, Column: {1}\nToken: {2}\n", token.line, token.column, token.token_type)] - UnexpectedToken { - token: Token, - #[backtrace] - backtrace: std::backtrace::Backtrace, - }, - #[error("Duplicated Identifer\n\nLine: {0}, Column: {1}\nToken: {2}\n", token.line, token.column, token.token_type)] - DuplicateIdentifier { token: Token }, - #[error("Invalid Syntax\n\nLine: {0}, Column: {1}\nReason: {reason}", token.line, token.column)] - InvalidSyntax { token: Token, reason: String }, - #[error("This keyword is not yet implemented\n\nLine: {0}, Column: {1}\nToken: {2}\n", token.line, token.column, token.token_type)] - UnsupportedKeyword { token: Token }, - #[error("Unexpected EOF")] - UnexpectedEOF, +quick_error! { + #[derive(Debug)] + pub enum ParseError { + TokenizerError(err: TokenizerError) { + from() + display("Tokenizer Error: {}", err) + source(err) + } + UnexpectedToken(token: Token) { + display("Unexpected token: {:?}", token) + } + DuplicateIdentifier(token: Token) { + display("Duplicate identifier: {:?}", token) + } + InvalidSyntax(token: Token, reason: String) { + display("Invalid syntax: {:?}, Reason: {}", token, reason) + } + UnsupportedKeyword(token: Token) { + display("Unsupported keyword: {:?}", token) + } + UnexpectedEOF { + display("Unexpected EOF") + } + } } macro_rules! self_matches_peek { @@ -57,22 +59,14 @@ macro_rules! extract_token_data { ($token:ident, $pattern:pat, $extraction:expr) => { match $token.token_type { $pattern => $extraction, - _ => { - return Err(ParseError::UnexpectedToken { - token: $token.clone(), - backtrace: std::backtrace::Backtrace::capture(), - }) - } + _ => return Err(ParseError::UnexpectedToken($token.clone())), } }; ($token:expr, $pattern:pat, $extraction:expr) => { match $token.token_type { $pattern => $extraction, _ => { - return Err(ParseError::UnexpectedToken { - token: $token.clone(), - backtrace: std::backtrace::Backtrace::capture(), - }) + return Err(ParseError::UnexpectedToken($token.clone())); } } }; @@ -180,9 +174,7 @@ impl Parser { Keyword::Else ) => { - return Err(ParseError::UnsupportedKeyword { - token: current_token.clone(), - }) + return Err(ParseError::UnsupportedKeyword(current_token.clone())) } // match declarations with a `let` keyword @@ -218,10 +210,7 @@ impl Parser { TokenType::Symbol(Symbol::LParen) => Expression::PriorityExpression(self.priority()?), _ => { - return Err(ParseError::UnexpectedToken { - token: current_token.clone(), - backtrace: std::backtrace::Backtrace::capture(), - }) + return Err(ParseError::UnexpectedToken(current_token.clone())); } }); @@ -265,10 +254,7 @@ impl Parser { { self.invocation().map(Expression::InvocationExpression) } - _ => Err(ParseError::UnexpectedToken { - token: current_token.clone(), - backtrace: std::backtrace::Backtrace::capture(), - }), + _ => Err(ParseError::UnexpectedToken(current_token.clone())), } } @@ -281,10 +267,7 @@ impl Parser { let current_token = token_from_option!(self.get_next()?).clone(); if !token_matches!(current_token, TokenType::Symbol(Symbol::Assign)) { - return Err(ParseError::UnexpectedToken { - token: current_token.clone(), - backtrace: Backtrace::capture(), - }); + return Err(ParseError::UnexpectedToken(current_token)); } self.assign_next()?; @@ -313,10 +296,7 @@ impl Parser { | Expression::Negation(_) // -1 + 2 => {} _ => { - return Err(ParseError::InvalidSyntax { - token: current_token.clone(), - reason: "Invalid expression for binary operation".to_owned(), - }) + return Err(ParseError::InvalidSyntax(current_token.clone(), String::from("Invalid expression for binary operation"))) } } @@ -338,10 +318,10 @@ impl Parser { // validate the vectors and make sure operators.len() == expressions.len() - 1 if operators.len() != expressions.len() - 1 { - return Err(ParseError::InvalidSyntax { - token: current_token.clone(), - reason: "Invalid number of operators".to_owned(), - }); + return Err(ParseError::InvalidSyntax( + current_token.clone(), + String::from("Invalid number of operators"), + )); } // Every time we find a valid operator, we pop 2 off the expressions and add one back. @@ -437,10 +417,10 @@ impl Parser { // Ensure there is only one expression left in the expressions vector, and no operators left if expressions.len() != 1 || !operators.is_empty() { - return Err(ParseError::InvalidSyntax { - token: current_token.clone(), - reason: "Invalid number of operators".to_owned(), - }); + return Err(ParseError::InvalidSyntax( + current_token.clone(), + String::from("Invalid number of operators"), + )); } // Edge case. If the current token is a semi-colon, RParen, we need to set current token to the previous token @@ -461,10 +441,7 @@ impl Parser { fn priority(&mut self) -> Result, ParseError> { let current_token = token_from_option!(self.current_token); if !token_matches!(current_token, TokenType::Symbol(Symbol::LParen)) { - return Err(ParseError::UnexpectedToken { - token: current_token.clone(), - backtrace: std::backtrace::Backtrace::capture(), - }); + return Err(ParseError::UnexpectedToken(current_token.clone())); } self.assign_next()?; @@ -472,10 +449,7 @@ impl Parser { let current_token = token_from_option!(self.get_next()?); if !token_matches!(current_token, TokenType::Symbol(Symbol::RParen)) { - return Err(ParseError::UnexpectedToken { - token: current_token.clone(), - backtrace: std::backtrace::Backtrace::capture(), - }); + return Err(ParseError::UnexpectedToken(current_token.clone())); } Ok(boxed!(expression)) @@ -491,10 +465,7 @@ impl Parser { // Ensure the next token is a left parenthesis let current_token = token_from_option!(self.get_next()?); if !token_matches!(current_token, TokenType::Symbol(Symbol::LParen)) { - return Err(ParseError::UnexpectedToken { - token: current_token.clone(), - backtrace: std::backtrace::Backtrace::capture(), - }); + return Err(ParseError::UnexpectedToken(current_token.clone())); } let mut arguments = Vec::::new(); @@ -508,10 +479,10 @@ impl Parser { let expression = self.expression()?.ok_or(ParseError::UnexpectedEOF)?; if let Expression::BlockExpression(_) = expression { - return Err(ParseError::InvalidSyntax { - token: current_token, - reason: "Block expressions are not allowed in function invocations".to_owned(), - }); + return Err(ParseError::InvalidSyntax( + current_token, + String::from("Block expressions are not allowed in function invocations"), + )); } arguments.push(expression); @@ -520,10 +491,9 @@ impl Parser { if !self_matches_peek!(self, TokenType::Symbol(Symbol::Comma)) && !self_matches_peek!(self, TokenType::Symbol(Symbol::RParen)) { - return Err(ParseError::UnexpectedToken { - token: token_from_option!(self.get_next()?).clone(), - backtrace: backtrace::Backtrace::capture(), - }); + return Err(ParseError::UnexpectedToken( + token_from_option!(self.get_next()?).clone(), + )); } // edge case: if the next token is not a right parenthesis, increment the current token @@ -547,10 +517,7 @@ impl Parser { // sanity check: make sure the current token is a left brace if !token_matches!(current_token, TokenType::Symbol(Symbol::LBrace)) { - return Err(ParseError::UnexpectedToken { - token: current_token.clone(), - backtrace: backtrace::Backtrace::capture(), - }); + return Err(ParseError::UnexpectedToken(current_token.clone())); } while !self_matches_peek!( @@ -580,10 +547,7 @@ impl Parser { fn declaration(&mut self) -> Result { let current_token = token_from_option!(self.current_token); if !self_matches_current!(self, TokenType::Keyword(Keyword::Let)) { - return Err(ParseError::UnexpectedToken { - token: current_token.clone(), - backtrace: backtrace::Backtrace::capture(), - }); + return Err(ParseError::UnexpectedToken(current_token.clone())); } let identifier = extract_token_data!( token_from_option!(self.get_next()?), @@ -594,10 +558,7 @@ impl Parser { let current_token = token_from_option!(self.get_next()?).clone(); if !token_matches!(current_token, TokenType::Symbol(Symbol::Assign)) { - return Err(ParseError::UnexpectedToken { - token: current_token, - backtrace: backtrace::Backtrace::capture(), - }); + return Err(ParseError::UnexpectedToken(current_token.clone())); } self.assign_next()?; @@ -606,10 +567,7 @@ impl Parser { // make sure the next token is a semi-colon let current_token = token_from_option!(self.get_next()?); if !token_matches!(current_token, TokenType::Symbol(Symbol::Semicolon)) { - return Err(ParseError::UnexpectedToken { - token: current_token.clone(), - backtrace: backtrace::Backtrace::capture(), - }); + return Err(ParseError::UnexpectedToken(current_token.clone())); } Ok(Expression::DeclarationExpression( @@ -623,12 +581,7 @@ impl Parser { let literal = match current_token.token_type { TokenType::Number(ref num) => Literal::Number(num.clone()), TokenType::String(ref string) => Literal::String(string.clone()), - _ => { - return Err(ParseError::UnexpectedToken { - token: current_token.clone(), - backtrace: backtrace::Backtrace::capture(), - }) - } + _ => return Err(ParseError::UnexpectedToken(current_token.clone())), }; Ok(literal) @@ -638,10 +591,7 @@ impl Parser { let current_token = token_from_option!(self.current_token); // Sanify check that the current token is a `fn` keyword if !self_matches_current!(self, TokenType::Keyword(Keyword::Fn)) { - return Err(ParseError::UnexpectedToken { - token: current_token.clone(), - backtrace: Backtrace::capture(), - }); + return Err(ParseError::UnexpectedToken(current_token.clone())); } let fn_ident = extract_token_data!( @@ -653,10 +603,7 @@ impl Parser { // make sure next token is a left parenthesis let current_token = token_from_option!(self.get_next()?); if !token_matches!(current_token, TokenType::Symbol(Symbol::LParen)) { - return Err(ParseError::UnexpectedToken { - token: current_token.clone(), - backtrace: Backtrace::capture(), - }); + return Err(ParseError::UnexpectedToken(current_token.clone())); } let mut arguments = Vec::::new(); @@ -672,9 +619,7 @@ impl Parser { extract_token_data!(current_token, TokenType::Identifier(ref id), id.clone()); if arguments.contains(&argument) { - return Err(ParseError::DuplicateIdentifier { - token: current_token.clone(), - }); + return Err(ParseError::DuplicateIdentifier(current_token.clone())); } arguments.push(argument); @@ -683,10 +628,9 @@ impl Parser { if !self_matches_peek!(self, TokenType::Symbol(Symbol::Comma)) && !self_matches_peek!(self, TokenType::Symbol(Symbol::RParen)) { - return Err(ParseError::UnexpectedToken { - token: token_from_option!(self.get_next()?).clone(), - backtrace: Backtrace::capture(), - }); + return Err(ParseError::UnexpectedToken( + token_from_option!(self.get_next()?).clone(), + )); } // edge case: if the next token is not a right parenthesis, increment the current token @@ -701,10 +645,7 @@ impl Parser { // make sure the next token is a left brace let current_token = token_from_option!(self.get_next()?); if !token_matches!(current_token, TokenType::Symbol(Symbol::LBrace)) { - return Err(ParseError::UnexpectedToken { - token: current_token.clone(), - backtrace: Backtrace::capture(), - }); + return Err(ParseError::UnexpectedToken(current_token.clone())); }; Ok(FunctionExpression { diff --git a/src/tokenizer/mod.rs b/src/tokenizer/mod.rs index 85ab118..64c10d3 100644 --- a/src/tokenizer/mod.rs +++ b/src/tokenizer/mod.rs @@ -1,5 +1,6 @@ pub mod token; +use crate::boxed; use rust_decimal::Decimal; use std::{ cmp::Ordering, @@ -7,23 +8,31 @@ use std::{ io::{BufReader, Cursor, Read, Seek, SeekFrom}, path::PathBuf, }; -use thiserror::Error; use token::{Keyword, Number, Symbol, Token, TokenType}; -use crate::boxed; - -#[derive(Error, Debug)] -pub enum TokenizerError { - #[error("IO Error: {0}")] - IOError(#[from] std::io::Error), - #[error("Number Parse Error \"{0}\"\nLine: {1}, Column: {2}")] - NumberParseError(std::num::ParseIntError, usize, usize), - #[error("Decimal Parse Error \"{0}\"\nLine: {1}, Column: {2}")] - DecimalParseError(rust_decimal::Error, usize, usize), - #[error("Unknown Symbol \"{0}\"\nLine: {1}, Column: {2}")] - UnknownSymbolError(char, usize, usize), - #[error("Unknown Keyword or Identifier \"{0}\"\nLine: {1}, Column: {2}")] - UnknownKeywordOrIdentifierError(String, usize, usize), +quick_error! { + #[derive(Debug)] + pub enum TokenizerError { + IOError(err: std::io::Error) { + from() + display("IO Error: {}", err) + source(err) + } + NumberParseError(err: std::num::ParseIntError, line: usize, column: usize) { + display("Number Parse Error: {}\nLine: {}, Column: {}", err, line, column) + source(err) + } + DecimalParseError(err: rust_decimal::Error, line: usize, column: usize) { + display("Decimal Parse Error: {}\nLine: {}, Column: {}", err, line, column) + source(err) + } + UnknownSymbolError(char: char, line: usize, column: usize) { + display("Unknown Symbol: {}\nLine: {}, Column: {}", char, line, column) + } + UnknownKeywordOrIdentifierError(val: String, line: usize, column: usize) { + display("Unknown Keyword or Identifier: {}\nLine: {}, Column: {}", val, line, column) + } + } } pub trait Tokenize: Read + Seek {}