CLI error handling
All checks were successful
CI/CD Pipeline / test (pull_request) Successful in 37s
CI/CD Pipeline / build (pull_request) Has been skipped
CI/CD Pipeline / release (pull_request) Has been skipped

This commit is contained in:
2026-01-02 16:44:38 -07:00
parent 4c704b8960
commit 2070c2e4ca
4 changed files with 35 additions and 5 deletions

View File

@@ -1589,7 +1589,7 @@ impl<'a> Parser<'a> {
} else {
// we need to rewind our tokenizer to our previous location
self.tokenizer.seek(SeekFrom::Current(
self.tokenizer.loc() - current_token_index,
current_token_index - self.tokenizer.loc(),
))?;
let syscall = self.spanned(|p| p.syscall())?;

View File

@@ -149,6 +149,23 @@ fn test_const_hash_expression() -> Result<()> {
Ok(())
}
#[test]
fn test_const_hash() -> Result<()> {
// This test explicitly validates the tokenizer rewind logic.
// When parsing "const h = hash(...)", the parser:
// 1. Consumes "const", identifier, "="
// 2. Attempts to parse "hash(...)" as a literal - this fails
// 3. Must rewind the tokenizer to before "hash"
// 4. Then parse it as a syscall
// If the rewind offset is wrong (e.g., positive instead of negative),
// the tokenizer will be at the wrong position and parsing will fail.
let expr = parser!(r#"const h = hash("ComponentComputer")"#)
.parse()?
.unwrap();
assert_eq!(r#"(const h = hash("ComponentComputer"))"#, expr.to_string());
Ok(())
}
#[test]
fn test_negative_literal_const() -> Result<()> {
let expr = parser!(r#"const i = -123"#).parse()?.unwrap();

View File

@@ -145,8 +145,20 @@ impl<'a> TokenizerBuffer<'a> {
use Ordering::*;
match seek_to_int.cmp(&0) {
Greater => {
let mut tokens = Vec::with_capacity(seek_to_int as usize);
for _ in 0..seek_to_int {
let mut seek_remaining = seek_to_int as usize;
// First, consume tokens from the buffer (peeked but not yet consumed)
while seek_remaining > 0 && !self.buffer.is_empty() {
if let Some(token) = self.buffer.pop_front() {
self.history.push_back(token);
seek_remaining -= 1;
self.index += 1;
}
}
// Then get tokens from tokenizer if needed
let mut tokens = Vec::with_capacity(seek_remaining);
for _ in 0..seek_remaining {
if let Some(token) = self.tokenizer.next_token()? {
tokens.push(token);
} else {
@@ -157,6 +169,7 @@ impl<'a> TokenizerBuffer<'a> {
}
}
self.history.extend(tokens);
self.index += seek_remaining as i64;
}
Less => {
let seek_to = seek_to_int.unsigned_abs() as usize;

View File

@@ -65,8 +65,8 @@ fn run_logic<'a>() -> Result<(), Error<'a>> {
let input_string = match input_file {
Some(input_path) => {
let mut buf = String::new();
let mut file = std::fs::File::open(input_path).unwrap();
file.read_to_string(&mut buf).unwrap();
let mut file = std::fs::File::open(input_path)?;
file.read_to_string(&mut buf)?;
buf
}
None => {