wip
This commit is contained in:
@@ -4,7 +4,7 @@ use parser::{sys_call::SysCall, Parser};
|
||||
use safer_ffi::prelude::*;
|
||||
use std::io::BufWriter;
|
||||
use tokenizer::{
|
||||
token::{Token, TokenType},
|
||||
token::{LexError, Token, TokenType},
|
||||
Tokenizer,
|
||||
};
|
||||
|
||||
@@ -96,9 +96,10 @@ pub fn free_docs_vec(v: safer_ffi::Vec<FfiDocumentedItem>) {
|
||||
#[ffi_export]
|
||||
pub fn compile_from_string(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::String {
|
||||
let res = std::panic::catch_unwind(|| {
|
||||
let input = String::from_utf16_lossy(input.as_slice());
|
||||
let mut writer = BufWriter::new(Vec::new());
|
||||
|
||||
let tokenizer = Tokenizer::from(String::from_utf16_lossy(input.as_slice()));
|
||||
let tokenizer = Tokenizer::from(input.as_str());
|
||||
let parser = Parser::new(tokenizer);
|
||||
let compiler = Compiler::new(parser, &mut writer, None);
|
||||
|
||||
@@ -120,7 +121,8 @@ pub fn compile_from_string(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::
|
||||
#[ffi_export]
|
||||
pub fn tokenize_line(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec<FfiToken> {
|
||||
let res = std::panic::catch_unwind(|| {
|
||||
let tokenizer = Tokenizer::from(String::from_utf16_lossy(input.as_slice()));
|
||||
let input = String::from_utf16_lossy(input.as_slice());
|
||||
let tokenizer = Tokenizer::from(input.as_str());
|
||||
|
||||
let mut tokens = Vec::new();
|
||||
|
||||
@@ -136,34 +138,36 @@ pub fn tokenize_line(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec<Ff
|
||||
}
|
||||
match token {
|
||||
Err(ref e) => {
|
||||
use tokenizer::token::LexError;
|
||||
use tokenizer::Error::*;
|
||||
let (err_str, col, og) = match e {
|
||||
NumberParseError(_, _, col, og)
|
||||
| DecimalParseError(_, _, col, og)
|
||||
| UnknownSymbolError(_, _, col, og)
|
||||
| UnknownKeywordOrIdentifierError(_, _, col, og) => {
|
||||
(e.to_string(), col, og)
|
||||
}
|
||||
let (err_str, line, span) = match e {
|
||||
LexError(e) => match e {
|
||||
LexError::NumberParseError(line, span, err)
|
||||
| LexError::InvalidInput(line, span, err) => {
|
||||
(err.to_string(), line, span)
|
||||
}
|
||||
_ => continue,
|
||||
},
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
tokens.push(FfiToken {
|
||||
column: *col as i32,
|
||||
column: span.start as i32,
|
||||
error: err_str.into(),
|
||||
tooltip: "".into(),
|
||||
length: og.len() as i32,
|
||||
length: (span.end - span.start) as i32,
|
||||
token_kind: 0,
|
||||
})
|
||||
}
|
||||
Ok(Token {
|
||||
column,
|
||||
original_string,
|
||||
line,
|
||||
span,
|
||||
token_type,
|
||||
..
|
||||
}) => tokens.push(FfiToken {
|
||||
column: column as i32,
|
||||
column: span.start as i32,
|
||||
error: "".into(),
|
||||
length: (original_string.unwrap_or_default().len()) as i32,
|
||||
length: (span.end - span.start) as i32,
|
||||
tooltip: token_type.docs().into(),
|
||||
token_kind: token_type.into(),
|
||||
}),
|
||||
@@ -179,8 +183,10 @@ pub fn tokenize_line(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec<Ff
|
||||
#[ffi_export]
|
||||
pub fn diagnose_source(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec<FfiDiagnostic> {
|
||||
let res = std::panic::catch_unwind(|| {
|
||||
let input = String::from_utf16_lossy(input.as_slice());
|
||||
|
||||
let mut writer = BufWriter::new(Vec::new());
|
||||
let tokenizer = Tokenizer::from(String::from_utf16_lossy(input.as_slice()));
|
||||
let tokenizer = Tokenizer::from(input.as_str());
|
||||
let compiler = Compiler::new(Parser::new(tokenizer), &mut writer, None);
|
||||
|
||||
let diagnosis = compiler.compile();
|
||||
|
||||
@@ -50,8 +50,13 @@ fn run_logic() -> Result<(), StationlangError> {
|
||||
let args = Args::parse();
|
||||
let input_file = args.input_file;
|
||||
|
||||
let tokenizer: Tokenizer = match input_file {
|
||||
Some(input_file) => Tokenizer::from_path(&input_file)?,
|
||||
let input_string = match input_file {
|
||||
Some(input_path) => {
|
||||
let mut buf = String::new();
|
||||
let mut file = std::fs::File::open(input_path).unwrap();
|
||||
file.read_to_string(&mut buf).unwrap();
|
||||
buf
|
||||
}
|
||||
None => {
|
||||
let mut buf = String::new();
|
||||
let stdin = std::io::stdin();
|
||||
@@ -62,10 +67,12 @@ fn run_logic() -> Result<(), StationlangError> {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
Tokenizer::from(buf)
|
||||
buf
|
||||
}
|
||||
};
|
||||
|
||||
let tokenizer = Tokenizer::from(input_string.as_str());
|
||||
|
||||
let parser = ASTParser::new(tokenizer);
|
||||
|
||||
let mut writer: BufWriter<Box<dyn Write>> = match args.output_file {
|
||||
|
||||
Reference in New Issue
Block a user