From c531f673a5959a85a3d0ecd42909f7371375126e Mon Sep 17 00:00:00 2001 From: Devin Bidwell Date: Tue, 9 Dec 2025 11:32:14 -0700 Subject: [PATCH] Remove quickerror in favor of thiserror --- rust_compiler/Cargo.lock | 12 +-- rust_compiler/Cargo.toml | 4 +- rust_compiler/libs/compiler/Cargo.toml | 2 +- rust_compiler/libs/compiler/src/v1.rs | 81 +++++++++---------- .../libs/compiler/src/variable_manager.rs | 25 +++--- rust_compiler/libs/parser/Cargo.toml | 3 +- rust_compiler/libs/parser/src/lib.rs | 11 ++- rust_compiler/libs/parser/src/tree_node.rs | 6 +- rust_compiler/libs/tokenizer/Cargo.toml | 3 +- rust_compiler/libs/tokenizer/src/lib.rs | 20 ++--- rust_compiler/libs/tokenizer/src/token.rs | 8 +- rust_compiler/src/ffi/mod.rs | 21 ++--- rust_compiler/src/main.rs | 37 ++++----- 13 files changed, 97 insertions(+), 136 deletions(-) diff --git a/rust_compiler/Cargo.lock b/rust_compiler/Cargo.lock index c255c15..fc4a7d8 100644 --- a/rust_compiler/Cargo.lock +++ b/rust_compiler/Cargo.lock @@ -272,8 +272,8 @@ dependencies = [ "lsp-types", "parser", "pretty_assertions", - "quick-error", "rust_decimal", + "thiserror", "tokenizer", ] @@ -571,7 +571,6 @@ dependencies = [ "helpers", "lsp-types", "pretty_assertions", - "quick-error", "thiserror", "tokenizer", ] @@ -649,12 +648,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "quick-error" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" - [[package]] name = "quote" version = "1.0.42" @@ -924,9 +917,9 @@ dependencies = [ "helpers", "lsp-types", "parser", - "quick-error", "rust_decimal", "safer-ffi", + "thiserror", "tokenizer", ] @@ -1042,7 +1035,6 @@ dependencies = [ "helpers", "logos", "lsp-types", - "quick-error", "rust_decimal", "thiserror", ] diff --git a/rust_compiler/Cargo.toml b/rust_compiler/Cargo.toml index bac327e..4b0e4c3 100644 --- a/rust_compiler/Cargo.toml +++ b/rust_compiler/Cargo.toml @@ -7,7 +7,7 @@ edition = "2021" members = ["libs/*"] [workspace.dependencies] -quick-error = "2" +thiserror = "2" rust_decimal = "1" safer-ffi = { version = "0.1" } # Safely share structs in memory between C# and Rust lsp-types = { version = "0.97" } # Allows for LSP style reporting to the frontend @@ -36,7 +36,7 @@ crate-type = ["cdylib", "rlib"] [dependencies] clap = { version = "^4.5", features = ["derive"] } lsp-types = { workspace = true } -quick-error = { workspace = true } +thiserror = { workspace = true } rust_decimal = { workspace = true } tokenizer = { path = "libs/tokenizer" } parser = { path = "libs/parser" } diff --git a/rust_compiler/libs/compiler/Cargo.toml b/rust_compiler/libs/compiler/Cargo.toml index a21718c..829e4f9 100644 --- a/rust_compiler/libs/compiler/Cargo.toml +++ b/rust_compiler/libs/compiler/Cargo.toml @@ -4,7 +4,7 @@ version = "0.1.0" edition = "2024" [dependencies] -quick-error = { workspace = true } +thiserror = { workspace = true } parser = { path = "../parser" } tokenizer = { path = "../tokenizer" } helpers = { path = "../helpers" } diff --git a/rust_compiler/libs/compiler/src/v1.rs b/rust_compiler/libs/compiler/src/v1.rs index 005b95f..3822a9c 100644 --- a/rust_compiler/libs/compiler/src/v1.rs +++ b/rust_compiler/libs/compiler/src/v1.rs @@ -11,11 +11,11 @@ use parser::{ LoopExpression, MemberAccessExpression, Span, Spanned, WhileExpression, }, }; -use quick_error::quick_error; use std::{ collections::HashMap, io::{BufWriter, Write}, }; +use thiserror::Error; use tokenizer::token::Number; macro_rules! debug { @@ -50,40 +50,37 @@ fn extract_literal(literal: Literal, allow_strings: bool) -> Result) { - display("{reason}") - } - } +#[derive(Error, Debug)] +pub enum Error { + #[error(transparent)] + Parse(#[from] parser::Error), + + #[error(transparent)] + Scope(#[from] variable_manager::Error), + + #[error("IO Error: {0}")] + IO(String), + + #[error("`{0}` has already been defined.")] + DuplicateIdentifier(String, Span), + + #[error("`{0}` is not found in the current scope.")] + UnknownIdentifier(String, Span), + + #[error("`{0}` is not valid.")] + InvalidDevice(String, Span), + + #[error("Incorrent number of arguments passed into `{0}`")] + AgrumentMismatch(String, Span), + + #[error("Attempted to re-assign a value to const variable `{0}`")] + ConstAssignment(String, Span), + + #[error("Attempted to re-assign a value to a device const `{0}`")] + DeviceAssignment(String, Span), + + #[error("{0}")] + Unknown(String, Option), } impl From for lsp_types::Diagnostic { @@ -91,13 +88,13 @@ impl From for lsp_types::Diagnostic { use Error::*; use lsp_types::*; match value { - ParseError(e) => e.into(), - IoError(e) => Diagnostic { + Parse(e) => e.into(), + IO(e) => Diagnostic { message: e.to_string(), severity: Some(DiagnosticSeverity::ERROR), ..Default::default() }, - ScopeError(e) => e.into(), + Scope(e) => e.into(), DuplicateIdentifier(_, span) | UnknownIdentifier(_, span) | InvalidDevice(_, span) @@ -122,7 +119,7 @@ impl From for lsp_types::Diagnostic { // Map io::Error to Error manually since we can't clone io::Error impl From for Error { fn from(err: std::io::Error) -> Self { - Error::IoError(err.to_string()) + Error::IO(err.to_string()) } } @@ -181,7 +178,7 @@ impl<'a, W: std::io::Write> Compiler<'a, W> { // Copy errors from parser for e in std::mem::take(&mut self.parser.errors) { - self.errors.push(Error::ParseError(e)); + self.errors.push(Error::Parse(e)); } // We treat parse_all result as potentially partial @@ -190,7 +187,7 @@ impl<'a, W: std::io::Write> Compiler<'a, W> { Ok(None) => return self.errors, Err(e) => { // Should be covered by parser.errors, but just in case - self.errors.push(Error::ParseError(e)); + self.errors.push(Error::Parse(e)); return self.errors; } }; @@ -979,7 +976,7 @@ impl<'a, W: std::io::Write> Compiler<'a, W> { for register in active_registers { let VariableLocation::Stack(stack_offset) = stack .get_location_of(format!("temp_{register}"), None) - .map_err(Error::ScopeError)? + .map_err(Error::Scope)? else { // This shouldn't happen if we just added it return Err(Error::Unknown( diff --git a/rust_compiler/libs/compiler/src/variable_manager.rs b/rust_compiler/libs/compiler/src/variable_manager.rs index f5f7e72..123ac09 100644 --- a/rust_compiler/libs/compiler/src/variable_manager.rs +++ b/rust_compiler/libs/compiler/src/variable_manager.rs @@ -5,25 +5,22 @@ use lsp_types::{Diagnostic, DiagnosticSeverity}; use parser::tree_node::{Literal, Span}; -use quick_error::quick_error; use std::collections::{HashMap, VecDeque}; +use thiserror::Error; const TEMP: [u8; 7] = [1, 2, 3, 4, 5, 6, 7]; const PERSIST: [u8; 7] = [8, 9, 10, 11, 12, 13, 14]; -quick_error! { - #[derive(Debug)] - pub enum Error { - DuplicateVariable(var: String, span: Option) { - display("{var} already exists.") - } - UnknownVariable(var: String, span: Option) { - display("{var} does not exist.") - } - Unknown(reason: String, span: Option) { - display("{reason}") - } - } +#[derive(Error, Debug)] +pub enum Error { + #[error("{0} already exists.")] + DuplicateVariable(String, Option), + + #[error("{0} does not exist.")] + UnknownVariable(String, Option), + + #[error("{0}")] + Unknown(String, Option), } impl From for lsp_types::Diagnostic { diff --git a/rust_compiler/libs/parser/Cargo.toml b/rust_compiler/libs/parser/Cargo.toml index e3c304b..1c5d935 100644 --- a/rust_compiler/libs/parser/Cargo.toml +++ b/rust_compiler/libs/parser/Cargo.toml @@ -4,11 +4,10 @@ version = "0.1.0" edition = "2024" [dependencies] -quick-error = { workspace = true } tokenizer = { path = "../tokenizer" } helpers = { path = "../helpers" } lsp-types = { workspace = true } -thiserror = "2" +thiserror = { workspace = true } [dev-dependencies] diff --git a/rust_compiler/libs/parser/src/lib.rs b/rust_compiler/libs/parser/src/lib.rs index e866c37..0a4b49e 100644 --- a/rust_compiler/libs/parser/src/lib.rs +++ b/rust_compiler/libs/parser/src/lib.rs @@ -1,7 +1,6 @@ +pub mod sys_call; #[cfg(test)] mod test; - -pub mod sys_call; pub mod tree_node; use crate::sys_call::{Math, System}; @@ -28,8 +27,8 @@ macro_rules! boxed { #[derive(Error, Debug)] pub enum Error { - #[error("Tokenizer Error: {0}")] - TokenizerError(#[from] tokenizer::Error), + #[error(transparent)] + Tokenizer(#[from] tokenizer::Error), #[error("Unexpected token: {1}")] UnexpectedToken(Span, Token), @@ -52,7 +51,7 @@ impl From for lsp_types::Diagnostic { use Error::*; use lsp_types::*; match value { - TokenizerError(e) => e.into(), + Tokenizer(e) => e.into(), UnexpectedToken(span, _) | DuplicateIdentifier(span, _) | InvalidSyntax(span, _) @@ -216,7 +215,7 @@ impl<'a> Parser<'a> { match self.tokenizer.peek() { Ok(None) => break, Err(e) => { - self.errors.push(Error::TokenizerError(e)); + self.errors.push(Error::Tokenizer(e)); break; } _ => {} diff --git a/rust_compiler/libs/parser/src/tree_node.rs b/rust_compiler/libs/parser/src/tree_node.rs index 350e4e6..b44759d 100644 --- a/rust_compiler/libs/parser/src/tree_node.rs +++ b/rust_compiler/libs/parser/src/tree_node.rs @@ -1,8 +1,6 @@ -use std::ops::Deref; - -use crate::sys_call; - use super::sys_call::SysCall; +use crate::sys_call; +use std::ops::Deref; use tokenizer::token::Number; #[derive(Debug, Eq, PartialEq, Clone)] diff --git a/rust_compiler/libs/tokenizer/Cargo.toml b/rust_compiler/libs/tokenizer/Cargo.toml index 9d50066..a08373c 100644 --- a/rust_compiler/libs/tokenizer/Cargo.toml +++ b/rust_compiler/libs/tokenizer/Cargo.toml @@ -5,11 +5,10 @@ edition = "2024" [dependencies] rust_decimal = { workspace = true } -quick-error = { workspace = true } lsp-types = { workspace = true } +thiserror = { workspace = true } helpers = { path = "../helpers" } logos = "0.16" -thiserror = "2" [dev-dependencies] anyhow = { version = "^1" } diff --git a/rust_compiler/libs/tokenizer/src/lib.rs b/rust_compiler/libs/tokenizer/src/lib.rs index 1e76e36..c70bda6 100644 --- a/rust_compiler/libs/tokenizer/src/lib.rs +++ b/rust_compiler/libs/tokenizer/src/lib.rs @@ -1,26 +1,20 @@ pub mod token; use logos::{Lexer, Logos}; -use quick_error::quick_error; use std::{ cmp::Ordering, collections::VecDeque, io::{Read, Seek, SeekFrom}, }; +use thiserror::Error; use token::{Token, TokenType}; -quick_error! { - #[derive(Debug)] - pub enum Error { - IOError(err: std::io::Error) { - from() - display("IO Error: {}", err) - source(err) - } - LexError(err: token::LexError) { - from() - } - } +#[derive(Error, Debug)] +pub enum Error { + #[error("IO Error: {0}")] + IOError(#[from()] std::io::Error), + #[error(transparent)] + LexError(#[from] token::LexError), } impl From for lsp_types::Diagnostic { diff --git a/rust_compiler/libs/tokenizer/src/token.rs b/rust_compiler/libs/tokenizer/src/token.rs index 74f00b1..f4a5c99 100644 --- a/rust_compiler/libs/tokenizer/src/token.rs +++ b/rust_compiler/libs/tokenizer/src/token.rs @@ -7,7 +7,7 @@ use thiserror::Error; #[derive(Debug, Error, Default, Clone, PartialEq)] pub enum LexError { #[error("Attempted to parse an invalid number: {2}")] - NumberParseError(usize, Span, String), + NumberParse(usize, Span, String), #[error("An invalid character was found in token stream: {2}")] InvalidInput(usize, Span, String), @@ -20,7 +20,7 @@ pub enum LexError { impl From for Diagnostic { fn from(value: LexError) -> Self { match value { - LexError::NumberParseError(line, col, str) | LexError::InvalidInput(line, col, str) => { + LexError::NumberParse(line, col, str) | LexError::InvalidInput(line, col, str) => { Diagnostic { range: Range { start: Position { @@ -281,13 +281,13 @@ fn parse_number<'a>(lexer: &mut Lexer<'a, TokenType>) -> Result() - .map_err(|_| LexError::NumberParseError(line, span, slice.to_string()))?, + .map_err(|_| LexError::NumberParse(line, span, slice.to_string()))?, ) } else { Number::Integer( clean_str .parse::() - .map_err(|_| LexError::NumberParseError(line, span, slice.to_string()))?, + .map_err(|_| LexError::NumberParse(line, span, slice.to_string()))?, ) }; diff --git a/rust_compiler/src/ffi/mod.rs b/rust_compiler/src/ffi/mod.rs index c09a539..9dc8763 100644 --- a/rust_compiler/src/ffi/mod.rs +++ b/rust_compiler/src/ffi/mod.rs @@ -4,7 +4,7 @@ use parser::{sys_call::SysCall, Parser}; use safer_ffi::prelude::*; use std::io::BufWriter; use tokenizer::{ - token::{LexError, Token, TokenType}, + token::{Token, TokenType}, Tokenizer, }; @@ -140,14 +140,12 @@ pub fn tokenize_line(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec { use tokenizer::token::LexError; use tokenizer::Error::*; - let (err_str, line, span) = match e { - LexError(e) => match e { - LexError::NumberParseError(line, span, err) - | LexError::InvalidInput(line, span, err) => { - (err.to_string(), line, span) - } - _ => continue, - }, + let (err_str, _, span) = match e { + LexError(LexError::NumberParse(line, span, err)) + | LexError(LexError::InvalidInput(line, span, err)) => { + (err.to_string(), line, span) + } + _ => continue, }; @@ -160,10 +158,7 @@ pub fn tokenize_line(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec tokens.push(FfiToken { column: span.start as i32, error: "".into(), diff --git a/rust_compiler/src/main.rs b/rust_compiler/src/main.rs index 2323398..bff25c9 100644 --- a/rust_compiler/src/main.rs +++ b/rust_compiler/src/main.rs @@ -1,8 +1,5 @@ #![allow(clippy::result_large_err)] -#[macro_use] -extern crate quick_error; - use clap::Parser; use compiler::Compiler; use parser::Parser as ASTParser; @@ -11,28 +8,22 @@ use std::{ io::{stderr, BufWriter, Read, Write}, path::PathBuf, }; +use thiserror::Error; use tokenizer::{self, Tokenizer}; -quick_error! { - #[derive(Debug)] - enum StationlangError { - TokenizerError(err: tokenizer::Error) { - from() - display("Tokenizer error: {}", err) - } - ParserError(err: parser::Error) { - from() - display("Parser error: {}", err) - } - CompileError(err: compiler::Error) { - from() - display("Compile error: {}", err) - } - IoError(err: std::io::Error) { - from() - display("IO error: {}", err) - } - } +#[derive(Error, Debug)] +enum StationlangError { + #[error(transparent)] + Tokenizer(#[from] tokenizer::Error), + + #[error(transparent)] + Parser(#[from] parser::Error), + + #[error(transparent)] + Compile(#[from] compiler::Error), + + #[error(transparent)] + IO(#[from] std::io::Error), } #[derive(Parser, Debug)]