Before attempt to convert buffer to dyn
This commit is contained in:
37
src/main.rs
37
src/main.rs
@@ -1,21 +1,28 @@
|
||||
mod parser;
|
||||
mod tokenizer;
|
||||
|
||||
use std::io::{Read, Seek};
|
||||
|
||||
use clap::Parser;
|
||||
use parser::Parser as ASTParser;
|
||||
use tokenizer::{Tokenizer, TokenizerError};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
enum StationlangError {
|
||||
#[error("{0}")]
|
||||
#[error(transparent)]
|
||||
TokenizerError(#[from] TokenizerError),
|
||||
#[error(transparent)]
|
||||
ParserError(#[from] parser::ParseError),
|
||||
#[error(transparent)]
|
||||
IoError(#[from] std::io::Error),
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(version, about, long_about = None)]
|
||||
struct Args {
|
||||
/// What file should be compiled
|
||||
/// What file should be compiled. If not set, input will be read from stdin
|
||||
#[arg(short, long)]
|
||||
input_file: String,
|
||||
input_file: Option<String>,
|
||||
/// The default stack size for the program
|
||||
#[arg(short, long, default_value_t = 512)]
|
||||
stack_size: usize,
|
||||
@@ -28,10 +35,28 @@ fn run_logic() -> Result<(), StationlangError> {
|
||||
let args = Args::parse();
|
||||
let input_file = args.input_file;
|
||||
|
||||
let mut tokenizer = Tokenizer::from_path(&input_file)?;
|
||||
let tokenizer: Tokenizer<_> = match input_file {
|
||||
Some(input_file) => Tokenizer::from_path(&input_file)?,
|
||||
None => {
|
||||
let mut buf = String::new();
|
||||
let stdin = std::io::stdin();
|
||||
|
||||
while let Some(token) = tokenizer.next_token()? {
|
||||
println!("{:?}", token);
|
||||
let read_result = stdin.lock().read_to_string(&mut buf)?;
|
||||
|
||||
if read_result == 0 {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
Tokenizer::from(buf)
|
||||
}
|
||||
};
|
||||
|
||||
let mut parser = ASTParser::new(tokenizer);
|
||||
|
||||
let ast = parser.parse()?;
|
||||
|
||||
if let Some(ast) = ast {
|
||||
println!("{}", ast);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -135,6 +135,8 @@ where
|
||||
// match functions with a `fn` keyword
|
||||
TokenType::Keyword(Keyword::Fn) => Expression::FunctionExpression(self.function()?),
|
||||
|
||||
TokenType::Identifier(ref id) => Expression::Variable(id.clone()),
|
||||
|
||||
// match block expressions with a `{` symbol
|
||||
TokenType::Symbol(Symbol::LBrace) => Expression::BlockExpression(self.block()?),
|
||||
|
||||
@@ -353,7 +355,7 @@ mod tests {
|
||||
let input = r#"
|
||||
// This is a function. The parser is starting to get more complex
|
||||
fn add(x, y) {
|
||||
let z = 5;
|
||||
let z = x;
|
||||
}
|
||||
"#;
|
||||
|
||||
|
||||
@@ -21,6 +21,8 @@ pub enum TokenizerError {
|
||||
UnknownKeywordOrIdentifierError(String, usize, usize),
|
||||
}
|
||||
|
||||
pub trait Tokenize: Read + Seek {}
|
||||
|
||||
pub(crate) struct Tokenizer<T>
|
||||
where
|
||||
T: Read + Seek,
|
||||
|
||||
Reference in New Issue
Block a user