CLI error handling
This commit is contained in:
@@ -1589,7 +1589,7 @@ impl<'a> Parser<'a> {
|
|||||||
} else {
|
} else {
|
||||||
// we need to rewind our tokenizer to our previous location
|
// we need to rewind our tokenizer to our previous location
|
||||||
self.tokenizer.seek(SeekFrom::Current(
|
self.tokenizer.seek(SeekFrom::Current(
|
||||||
self.tokenizer.loc() - current_token_index,
|
current_token_index - self.tokenizer.loc(),
|
||||||
))?;
|
))?;
|
||||||
let syscall = self.spanned(|p| p.syscall())?;
|
let syscall = self.spanned(|p| p.syscall())?;
|
||||||
|
|
||||||
|
|||||||
@@ -149,6 +149,23 @@ fn test_const_hash_expression() -> Result<()> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_const_hash() -> Result<()> {
|
||||||
|
// This test explicitly validates the tokenizer rewind logic.
|
||||||
|
// When parsing "const h = hash(...)", the parser:
|
||||||
|
// 1. Consumes "const", identifier, "="
|
||||||
|
// 2. Attempts to parse "hash(...)" as a literal - this fails
|
||||||
|
// 3. Must rewind the tokenizer to before "hash"
|
||||||
|
// 4. Then parse it as a syscall
|
||||||
|
// If the rewind offset is wrong (e.g., positive instead of negative),
|
||||||
|
// the tokenizer will be at the wrong position and parsing will fail.
|
||||||
|
let expr = parser!(r#"const h = hash("ComponentComputer")"#)
|
||||||
|
.parse()?
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(r#"(const h = hash("ComponentComputer"))"#, expr.to_string());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_negative_literal_const() -> Result<()> {
|
fn test_negative_literal_const() -> Result<()> {
|
||||||
let expr = parser!(r#"const i = -123"#).parse()?.unwrap();
|
let expr = parser!(r#"const i = -123"#).parse()?.unwrap();
|
||||||
|
|||||||
@@ -145,8 +145,20 @@ impl<'a> TokenizerBuffer<'a> {
|
|||||||
use Ordering::*;
|
use Ordering::*;
|
||||||
match seek_to_int.cmp(&0) {
|
match seek_to_int.cmp(&0) {
|
||||||
Greater => {
|
Greater => {
|
||||||
let mut tokens = Vec::with_capacity(seek_to_int as usize);
|
let mut seek_remaining = seek_to_int as usize;
|
||||||
for _ in 0..seek_to_int {
|
|
||||||
|
// First, consume tokens from the buffer (peeked but not yet consumed)
|
||||||
|
while seek_remaining > 0 && !self.buffer.is_empty() {
|
||||||
|
if let Some(token) = self.buffer.pop_front() {
|
||||||
|
self.history.push_back(token);
|
||||||
|
seek_remaining -= 1;
|
||||||
|
self.index += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Then get tokens from tokenizer if needed
|
||||||
|
let mut tokens = Vec::with_capacity(seek_remaining);
|
||||||
|
for _ in 0..seek_remaining {
|
||||||
if let Some(token) = self.tokenizer.next_token()? {
|
if let Some(token) = self.tokenizer.next_token()? {
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
} else {
|
} else {
|
||||||
@@ -157,6 +169,7 @@ impl<'a> TokenizerBuffer<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.history.extend(tokens);
|
self.history.extend(tokens);
|
||||||
|
self.index += seek_remaining as i64;
|
||||||
}
|
}
|
||||||
Less => {
|
Less => {
|
||||||
let seek_to = seek_to_int.unsigned_abs() as usize;
|
let seek_to = seek_to_int.unsigned_abs() as usize;
|
||||||
|
|||||||
@@ -65,8 +65,8 @@ fn run_logic<'a>() -> Result<(), Error<'a>> {
|
|||||||
let input_string = match input_file {
|
let input_string = match input_file {
|
||||||
Some(input_path) => {
|
Some(input_path) => {
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
let mut file = std::fs::File::open(input_path).unwrap();
|
let mut file = std::fs::File::open(input_path)?;
|
||||||
file.read_to_string(&mut buf).unwrap();
|
file.read_to_string(&mut buf)?;
|
||||||
buf
|
buf
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
|
|||||||
Reference in New Issue
Block a user