Remove quickerror in favor of thiserror

This commit is contained in:
2025-12-09 11:32:14 -07:00
parent 23c2ba4134
commit c531f673a5
13 changed files with 97 additions and 136 deletions

View File

@@ -272,8 +272,8 @@ dependencies = [
"lsp-types", "lsp-types",
"parser", "parser",
"pretty_assertions", "pretty_assertions",
"quick-error",
"rust_decimal", "rust_decimal",
"thiserror",
"tokenizer", "tokenizer",
] ]
@@ -571,7 +571,6 @@ dependencies = [
"helpers", "helpers",
"lsp-types", "lsp-types",
"pretty_assertions", "pretty_assertions",
"quick-error",
"thiserror", "thiserror",
"tokenizer", "tokenizer",
] ]
@@ -649,12 +648,6 @@ dependencies = [
"syn 1.0.109", "syn 1.0.109",
] ]
[[package]]
name = "quick-error"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3"
[[package]] [[package]]
name = "quote" name = "quote"
version = "1.0.42" version = "1.0.42"
@@ -924,9 +917,9 @@ dependencies = [
"helpers", "helpers",
"lsp-types", "lsp-types",
"parser", "parser",
"quick-error",
"rust_decimal", "rust_decimal",
"safer-ffi", "safer-ffi",
"thiserror",
"tokenizer", "tokenizer",
] ]
@@ -1042,7 +1035,6 @@ dependencies = [
"helpers", "helpers",
"logos", "logos",
"lsp-types", "lsp-types",
"quick-error",
"rust_decimal", "rust_decimal",
"thiserror", "thiserror",
] ]

View File

@@ -7,7 +7,7 @@ edition = "2021"
members = ["libs/*"] members = ["libs/*"]
[workspace.dependencies] [workspace.dependencies]
quick-error = "2" thiserror = "2"
rust_decimal = "1" rust_decimal = "1"
safer-ffi = { version = "0.1" } # Safely share structs in memory between C# and Rust safer-ffi = { version = "0.1" } # Safely share structs in memory between C# and Rust
lsp-types = { version = "0.97" } # Allows for LSP style reporting to the frontend lsp-types = { version = "0.97" } # Allows for LSP style reporting to the frontend
@@ -36,7 +36,7 @@ crate-type = ["cdylib", "rlib"]
[dependencies] [dependencies]
clap = { version = "^4.5", features = ["derive"] } clap = { version = "^4.5", features = ["derive"] }
lsp-types = { workspace = true } lsp-types = { workspace = true }
quick-error = { workspace = true } thiserror = { workspace = true }
rust_decimal = { workspace = true } rust_decimal = { workspace = true }
tokenizer = { path = "libs/tokenizer" } tokenizer = { path = "libs/tokenizer" }
parser = { path = "libs/parser" } parser = { path = "libs/parser" }

View File

@@ -4,7 +4,7 @@ version = "0.1.0"
edition = "2024" edition = "2024"
[dependencies] [dependencies]
quick-error = { workspace = true } thiserror = { workspace = true }
parser = { path = "../parser" } parser = { path = "../parser" }
tokenizer = { path = "../tokenizer" } tokenizer = { path = "../tokenizer" }
helpers = { path = "../helpers" } helpers = { path = "../helpers" }

View File

@@ -11,11 +11,11 @@ use parser::{
LoopExpression, MemberAccessExpression, Span, Spanned, WhileExpression, LoopExpression, MemberAccessExpression, Span, Spanned, WhileExpression,
}, },
}; };
use quick_error::quick_error;
use std::{ use std::{
collections::HashMap, collections::HashMap,
io::{BufWriter, Write}, io::{BufWriter, Write},
}; };
use thiserror::Error;
use tokenizer::token::Number; use tokenizer::token::Number;
macro_rules! debug { macro_rules! debug {
@@ -50,40 +50,37 @@ fn extract_literal(literal: Literal, allow_strings: bool) -> Result<String, Erro
}) })
} }
quick_error! { #[derive(Error, Debug)]
#[derive(Debug)] pub enum Error {
pub enum Error { #[error(transparent)]
ParseError(error: parser::Error) { Parse(#[from] parser::Error),
from()
} #[error(transparent)]
IoError(error: String) { Scope(#[from] variable_manager::Error),
display("IO Error: {}", error)
} #[error("IO Error: {0}")]
ScopeError(error: variable_manager::Error) { IO(String),
from()
} #[error("`{0}` has already been defined.")]
DuplicateIdentifier(func_name: String, span: Span) { DuplicateIdentifier(String, Span),
display("`{func_name}` has already been defined")
} #[error("`{0}` is not found in the current scope.")]
UnknownIdentifier(ident: String, span: Span) { UnknownIdentifier(String, Span),
display("`{ident}` is not found in the current scope.")
} #[error("`{0}` is not valid.")]
InvalidDevice(device: String, span: Span) { InvalidDevice(String, Span),
display("`{device}` is not valid")
} #[error("Incorrent number of arguments passed into `{0}`")]
AgrumentMismatch(func_name: String, span: Span) { AgrumentMismatch(String, Span),
display("Incorrect number of arguments passed into `{func_name}`")
} #[error("Attempted to re-assign a value to const variable `{0}`")]
ConstAssignment(ident: String, span: Span) { ConstAssignment(String, Span),
display("Attempted to re-assign a value to const variable `{ident}`")
} #[error("Attempted to re-assign a value to a device const `{0}`")]
DeviceAssignment(ident: String, span: Span) { DeviceAssignment(String, Span),
display("Attempted to re-assign a value to a device const `{ident}`")
} #[error("{0}")]
Unknown(reason: String, span: Option<Span>) { Unknown(String, Option<Span>),
display("{reason}")
}
}
} }
impl From<Error> for lsp_types::Diagnostic { impl From<Error> for lsp_types::Diagnostic {
@@ -91,13 +88,13 @@ impl From<Error> for lsp_types::Diagnostic {
use Error::*; use Error::*;
use lsp_types::*; use lsp_types::*;
match value { match value {
ParseError(e) => e.into(), Parse(e) => e.into(),
IoError(e) => Diagnostic { IO(e) => Diagnostic {
message: e.to_string(), message: e.to_string(),
severity: Some(DiagnosticSeverity::ERROR), severity: Some(DiagnosticSeverity::ERROR),
..Default::default() ..Default::default()
}, },
ScopeError(e) => e.into(), Scope(e) => e.into(),
DuplicateIdentifier(_, span) DuplicateIdentifier(_, span)
| UnknownIdentifier(_, span) | UnknownIdentifier(_, span)
| InvalidDevice(_, span) | InvalidDevice(_, span)
@@ -122,7 +119,7 @@ impl From<Error> for lsp_types::Diagnostic {
// Map io::Error to Error manually since we can't clone io::Error // Map io::Error to Error manually since we can't clone io::Error
impl From<std::io::Error> for Error { impl From<std::io::Error> for Error {
fn from(err: std::io::Error) -> Self { fn from(err: std::io::Error) -> Self {
Error::IoError(err.to_string()) Error::IO(err.to_string())
} }
} }
@@ -181,7 +178,7 @@ impl<'a, W: std::io::Write> Compiler<'a, W> {
// Copy errors from parser // Copy errors from parser
for e in std::mem::take(&mut self.parser.errors) { for e in std::mem::take(&mut self.parser.errors) {
self.errors.push(Error::ParseError(e)); self.errors.push(Error::Parse(e));
} }
// We treat parse_all result as potentially partial // We treat parse_all result as potentially partial
@@ -190,7 +187,7 @@ impl<'a, W: std::io::Write> Compiler<'a, W> {
Ok(None) => return self.errors, Ok(None) => return self.errors,
Err(e) => { Err(e) => {
// Should be covered by parser.errors, but just in case // Should be covered by parser.errors, but just in case
self.errors.push(Error::ParseError(e)); self.errors.push(Error::Parse(e));
return self.errors; return self.errors;
} }
}; };
@@ -979,7 +976,7 @@ impl<'a, W: std::io::Write> Compiler<'a, W> {
for register in active_registers { for register in active_registers {
let VariableLocation::Stack(stack_offset) = stack let VariableLocation::Stack(stack_offset) = stack
.get_location_of(format!("temp_{register}"), None) .get_location_of(format!("temp_{register}"), None)
.map_err(Error::ScopeError)? .map_err(Error::Scope)?
else { else {
// This shouldn't happen if we just added it // This shouldn't happen if we just added it
return Err(Error::Unknown( return Err(Error::Unknown(

View File

@@ -5,25 +5,22 @@
use lsp_types::{Diagnostic, DiagnosticSeverity}; use lsp_types::{Diagnostic, DiagnosticSeverity};
use parser::tree_node::{Literal, Span}; use parser::tree_node::{Literal, Span};
use quick_error::quick_error;
use std::collections::{HashMap, VecDeque}; use std::collections::{HashMap, VecDeque};
use thiserror::Error;
const TEMP: [u8; 7] = [1, 2, 3, 4, 5, 6, 7]; const TEMP: [u8; 7] = [1, 2, 3, 4, 5, 6, 7];
const PERSIST: [u8; 7] = [8, 9, 10, 11, 12, 13, 14]; const PERSIST: [u8; 7] = [8, 9, 10, 11, 12, 13, 14];
quick_error! { #[derive(Error, Debug)]
#[derive(Debug)] pub enum Error {
pub enum Error { #[error("{0} already exists.")]
DuplicateVariable(var: String, span: Option<Span>) { DuplicateVariable(String, Option<Span>),
display("{var} already exists.")
} #[error("{0} does not exist.")]
UnknownVariable(var: String, span: Option<Span>) { UnknownVariable(String, Option<Span>),
display("{var} does not exist.")
} #[error("{0}")]
Unknown(reason: String, span: Option<Span>) { Unknown(String, Option<Span>),
display("{reason}")
}
}
} }
impl From<Error> for lsp_types::Diagnostic { impl From<Error> for lsp_types::Diagnostic {

View File

@@ -4,11 +4,10 @@ version = "0.1.0"
edition = "2024" edition = "2024"
[dependencies] [dependencies]
quick-error = { workspace = true }
tokenizer = { path = "../tokenizer" } tokenizer = { path = "../tokenizer" }
helpers = { path = "../helpers" } helpers = { path = "../helpers" }
lsp-types = { workspace = true } lsp-types = { workspace = true }
thiserror = "2" thiserror = { workspace = true }
[dev-dependencies] [dev-dependencies]

View File

@@ -1,7 +1,6 @@
pub mod sys_call;
#[cfg(test)] #[cfg(test)]
mod test; mod test;
pub mod sys_call;
pub mod tree_node; pub mod tree_node;
use crate::sys_call::{Math, System}; use crate::sys_call::{Math, System};
@@ -28,8 +27,8 @@ macro_rules! boxed {
#[derive(Error, Debug)] #[derive(Error, Debug)]
pub enum Error { pub enum Error {
#[error("Tokenizer Error: {0}")] #[error(transparent)]
TokenizerError(#[from] tokenizer::Error), Tokenizer(#[from] tokenizer::Error),
#[error("Unexpected token: {1}")] #[error("Unexpected token: {1}")]
UnexpectedToken(Span, Token), UnexpectedToken(Span, Token),
@@ -52,7 +51,7 @@ impl From<Error> for lsp_types::Diagnostic {
use Error::*; use Error::*;
use lsp_types::*; use lsp_types::*;
match value { match value {
TokenizerError(e) => e.into(), Tokenizer(e) => e.into(),
UnexpectedToken(span, _) UnexpectedToken(span, _)
| DuplicateIdentifier(span, _) | DuplicateIdentifier(span, _)
| InvalidSyntax(span, _) | InvalidSyntax(span, _)
@@ -216,7 +215,7 @@ impl<'a> Parser<'a> {
match self.tokenizer.peek() { match self.tokenizer.peek() {
Ok(None) => break, Ok(None) => break,
Err(e) => { Err(e) => {
self.errors.push(Error::TokenizerError(e)); self.errors.push(Error::Tokenizer(e));
break; break;
} }
_ => {} _ => {}

View File

@@ -1,8 +1,6 @@
use std::ops::Deref;
use crate::sys_call;
use super::sys_call::SysCall; use super::sys_call::SysCall;
use crate::sys_call;
use std::ops::Deref;
use tokenizer::token::Number; use tokenizer::token::Number;
#[derive(Debug, Eq, PartialEq, Clone)] #[derive(Debug, Eq, PartialEq, Clone)]

View File

@@ -5,11 +5,10 @@ edition = "2024"
[dependencies] [dependencies]
rust_decimal = { workspace = true } rust_decimal = { workspace = true }
quick-error = { workspace = true }
lsp-types = { workspace = true } lsp-types = { workspace = true }
thiserror = { workspace = true }
helpers = { path = "../helpers" } helpers = { path = "../helpers" }
logos = "0.16" logos = "0.16"
thiserror = "2"
[dev-dependencies] [dev-dependencies]
anyhow = { version = "^1" } anyhow = { version = "^1" }

View File

@@ -1,26 +1,20 @@
pub mod token; pub mod token;
use logos::{Lexer, Logos}; use logos::{Lexer, Logos};
use quick_error::quick_error;
use std::{ use std::{
cmp::Ordering, cmp::Ordering,
collections::VecDeque, collections::VecDeque,
io::{Read, Seek, SeekFrom}, io::{Read, Seek, SeekFrom},
}; };
use thiserror::Error;
use token::{Token, TokenType}; use token::{Token, TokenType};
quick_error! { #[derive(Error, Debug)]
#[derive(Debug)] pub enum Error {
pub enum Error { #[error("IO Error: {0}")]
IOError(err: std::io::Error) { IOError(#[from()] std::io::Error),
from() #[error(transparent)]
display("IO Error: {}", err) LexError(#[from] token::LexError),
source(err)
}
LexError(err: token::LexError) {
from()
}
}
} }
impl From<Error> for lsp_types::Diagnostic { impl From<Error> for lsp_types::Diagnostic {

View File

@@ -7,7 +7,7 @@ use thiserror::Error;
#[derive(Debug, Error, Default, Clone, PartialEq)] #[derive(Debug, Error, Default, Clone, PartialEq)]
pub enum LexError { pub enum LexError {
#[error("Attempted to parse an invalid number: {2}")] #[error("Attempted to parse an invalid number: {2}")]
NumberParseError(usize, Span, String), NumberParse(usize, Span, String),
#[error("An invalid character was found in token stream: {2}")] #[error("An invalid character was found in token stream: {2}")]
InvalidInput(usize, Span, String), InvalidInput(usize, Span, String),
@@ -20,7 +20,7 @@ pub enum LexError {
impl From<LexError> for Diagnostic { impl From<LexError> for Diagnostic {
fn from(value: LexError) -> Self { fn from(value: LexError) -> Self {
match value { match value {
LexError::NumberParseError(line, col, str) | LexError::InvalidInput(line, col, str) => { LexError::NumberParse(line, col, str) | LexError::InvalidInput(line, col, str) => {
Diagnostic { Diagnostic {
range: Range { range: Range {
start: Position { start: Position {
@@ -281,13 +281,13 @@ fn parse_number<'a>(lexer: &mut Lexer<'a, TokenType>) -> Result<Number, LexError
Number::Decimal( Number::Decimal(
clean_str clean_str
.parse::<Decimal>() .parse::<Decimal>()
.map_err(|_| LexError::NumberParseError(line, span, slice.to_string()))?, .map_err(|_| LexError::NumberParse(line, span, slice.to_string()))?,
) )
} else { } else {
Number::Integer( Number::Integer(
clean_str clean_str
.parse::<i128>() .parse::<i128>()
.map_err(|_| LexError::NumberParseError(line, span, slice.to_string()))?, .map_err(|_| LexError::NumberParse(line, span, slice.to_string()))?,
) )
}; };

View File

@@ -4,7 +4,7 @@ use parser::{sys_call::SysCall, Parser};
use safer_ffi::prelude::*; use safer_ffi::prelude::*;
use std::io::BufWriter; use std::io::BufWriter;
use tokenizer::{ use tokenizer::{
token::{LexError, Token, TokenType}, token::{Token, TokenType},
Tokenizer, Tokenizer,
}; };
@@ -140,14 +140,12 @@ pub fn tokenize_line(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec<Ff
Err(ref e) => { Err(ref e) => {
use tokenizer::token::LexError; use tokenizer::token::LexError;
use tokenizer::Error::*; use tokenizer::Error::*;
let (err_str, line, span) = match e { let (err_str, _, span) = match e {
LexError(e) => match e { LexError(LexError::NumberParse(line, span, err))
LexError::NumberParseError(line, span, err) | LexError(LexError::InvalidInput(line, span, err)) => {
| LexError::InvalidInput(line, span, err) => {
(err.to_string(), line, span) (err.to_string(), line, span)
} }
_ => continue,
},
_ => continue, _ => continue,
}; };
@@ -160,10 +158,7 @@ pub fn tokenize_line(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec<Ff
}) })
} }
Ok(Token { Ok(Token {
line, span, token_type, ..
span,
token_type,
..
}) => tokens.push(FfiToken { }) => tokens.push(FfiToken {
column: span.start as i32, column: span.start as i32,
error: "".into(), error: "".into(),

View File

@@ -1,8 +1,5 @@
#![allow(clippy::result_large_err)] #![allow(clippy::result_large_err)]
#[macro_use]
extern crate quick_error;
use clap::Parser; use clap::Parser;
use compiler::Compiler; use compiler::Compiler;
use parser::Parser as ASTParser; use parser::Parser as ASTParser;
@@ -11,28 +8,22 @@ use std::{
io::{stderr, BufWriter, Read, Write}, io::{stderr, BufWriter, Read, Write},
path::PathBuf, path::PathBuf,
}; };
use thiserror::Error;
use tokenizer::{self, Tokenizer}; use tokenizer::{self, Tokenizer};
quick_error! { #[derive(Error, Debug)]
#[derive(Debug)] enum StationlangError {
enum StationlangError { #[error(transparent)]
TokenizerError(err: tokenizer::Error) { Tokenizer(#[from] tokenizer::Error),
from()
display("Tokenizer error: {}", err) #[error(transparent)]
} Parser(#[from] parser::Error),
ParserError(err: parser::Error) {
from() #[error(transparent)]
display("Parser error: {}", err) Compile(#[from] compiler::Error),
}
CompileError(err: compiler::Error) { #[error(transparent)]
from() IO(#[from] std::io::Error),
display("Compile error: {}", err)
}
IoError(err: std::io::Error) {
from()
display("IO error: {}", err)
}
}
} }
#[derive(Parser, Debug)] #[derive(Parser, Debug)]