rust catch_unwind as a safeguard
This commit is contained in:
@@ -797,7 +797,11 @@ impl<'a, W: std::io::Write> Compiler<'a, W> {
|
|||||||
self.write_output(format!("j {end_label}"))?;
|
self.write_output(format!("j {end_label}"))?;
|
||||||
self.write_output(format!("{else_label}:"))?;
|
self.write_output(format!("{else_label}:"))?;
|
||||||
|
|
||||||
match expr.else_branch.unwrap().node {
|
match expr
|
||||||
|
.else_branch
|
||||||
|
.ok_or(Error::Unknown("Missing else branch. This should not happen and indicates a Compiler Error. Please report to the author.".into(), None))?
|
||||||
|
.node
|
||||||
|
{
|
||||||
Expression::Block(block) => self.expression_block(block.node, scope)?,
|
Expression::Block(block) => self.expression_block(block.node, scope)?,
|
||||||
Expression::If(if_expr) => self.expression_if(if_expr.node, scope)?,
|
Expression::If(if_expr) => self.expression_if(if_expr.node, scope)?,
|
||||||
_ => unreachable!("Parser ensures else branch is Block or If"),
|
_ => unreachable!("Parser ensures else branch is Block or If"),
|
||||||
|
|||||||
@@ -82,6 +82,7 @@ pub fn free_string(s: safer_ffi::String) {
|
|||||||
/// from the GC from a `GetBytes()` call on a string in C#.
|
/// from the GC from a `GetBytes()` call on a string in C#.
|
||||||
#[ffi_export]
|
#[ffi_export]
|
||||||
pub fn compile_from_string(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::String {
|
pub fn compile_from_string(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::String {
|
||||||
|
let res = std::panic::catch_unwind(|| {
|
||||||
let mut writer = BufWriter::new(Vec::new());
|
let mut writer = BufWriter::new(Vec::new());
|
||||||
|
|
||||||
let tokenizer = Tokenizer::from(String::from_utf16_lossy(input.as_slice()));
|
let tokenizer = Tokenizer::from(String::from_utf16_lossy(input.as_slice()));
|
||||||
@@ -98,10 +99,14 @@ pub fn compile_from_string(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::
|
|||||||
|
|
||||||
// Safety: I know the compiler only outputs valid utf8
|
// Safety: I know the compiler only outputs valid utf8
|
||||||
safer_ffi::String::from(unsafe { String::from_utf8_unchecked(compiled_vec) })
|
safer_ffi::String::from(unsafe { String::from_utf8_unchecked(compiled_vec) })
|
||||||
|
});
|
||||||
|
|
||||||
|
res.unwrap_or("".into())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[ffi_export]
|
#[ffi_export]
|
||||||
pub fn tokenize_line(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec<FfiToken> {
|
pub fn tokenize_line(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec<FfiToken> {
|
||||||
|
let res = std::panic::catch_unwind(|| {
|
||||||
let tokenizer = Tokenizer::from(String::from_utf16_lossy(input.as_slice()));
|
let tokenizer = Tokenizer::from(String::from_utf16_lossy(input.as_slice()));
|
||||||
|
|
||||||
let mut tokens = Vec::new();
|
let mut tokens = Vec::new();
|
||||||
@@ -123,7 +128,9 @@ pub fn tokenize_line(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec<Ff
|
|||||||
NumberParseError(_, _, col, og)
|
NumberParseError(_, _, col, og)
|
||||||
| DecimalParseError(_, _, col, og)
|
| DecimalParseError(_, _, col, og)
|
||||||
| UnknownSymbolError(_, _, col, og)
|
| UnknownSymbolError(_, _, col, og)
|
||||||
| UnknownKeywordOrIdentifierError(_, _, col, og) => (e.to_string(), col, og),
|
| UnknownKeywordOrIdentifierError(_, _, col, og) => {
|
||||||
|
(e.to_string(), col, og)
|
||||||
|
}
|
||||||
_ => continue,
|
_ => continue,
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -151,10 +158,14 @@ pub fn tokenize_line(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec<Ff
|
|||||||
}
|
}
|
||||||
|
|
||||||
tokens.into()
|
tokens.into()
|
||||||
|
});
|
||||||
|
|
||||||
|
res.unwrap_or(vec![].into())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[ffi_export]
|
#[ffi_export]
|
||||||
pub fn diagnose_source(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec<FfiDiagnostic> {
|
pub fn diagnose_source(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec<FfiDiagnostic> {
|
||||||
|
let res = std::panic::catch_unwind(|| {
|
||||||
let mut writer = BufWriter::new(Vec::new());
|
let mut writer = BufWriter::new(Vec::new());
|
||||||
let tokenizer = Tokenizer::from(String::from_utf16_lossy(input.as_slice()));
|
let tokenizer = Tokenizer::from(String::from_utf16_lossy(input.as_slice()));
|
||||||
let compiler = Compiler::new(Parser::new(tokenizer), &mut writer, None);
|
let compiler = Compiler::new(Parser::new(tokenizer), &mut writer, None);
|
||||||
@@ -168,4 +179,7 @@ pub fn diagnose_source(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec<
|
|||||||
}
|
}
|
||||||
|
|
||||||
result_vec.into()
|
result_vec.into()
|
||||||
|
});
|
||||||
|
|
||||||
|
res.unwrap_or(vec![].into())
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user