dyn trait for tokenizer buffer

This commit is contained in:
2024-11-21 19:28:20 -07:00
parent abaf58374f
commit ae45a3c848
3 changed files with 30 additions and 44 deletions

View File

@@ -35,7 +35,7 @@ fn run_logic() -> Result<(), StationlangError> {
let args = Args::parse(); let args = Args::parse();
let input_file = args.input_file; let input_file = args.input_file;
let tokenizer: Tokenizer<_> = match input_file { let tokenizer: Tokenizer = match input_file {
Some(input_file) => Tokenizer::from_path(&input_file)?, Some(input_file) => Tokenizer::from_path(&input_file)?,
None => { None => {
let mut buf = String::new(); let mut buf = String::new();

View File

@@ -90,16 +90,13 @@ macro_rules! token_matches {
}; };
} }
pub struct Parser<R: Read + Seek> { pub struct Parser {
tokenizer: TokenizerBuffer<R>, tokenizer: TokenizerBuffer,
current_token: Option<Token>, current_token: Option<Token>,
} }
impl<R> Parser<R> impl Parser {
where pub fn new(tokenizer: Tokenizer) -> Self {
R: Read + Seek,
{
pub fn new(tokenizer: Tokenizer<R>) -> Self {
Parser { Parser {
tokenizer: TokenizerBuffer::new(tokenizer), tokenizer: TokenizerBuffer::new(tokenizer),
current_token: None, current_token: None,

View File

@@ -23,36 +23,20 @@ pub enum TokenizerError {
pub trait Tokenize: Read + Seek {} pub trait Tokenize: Read + Seek {}
pub(crate) struct Tokenizer<T> impl<T> Tokenize for T where T: Read + Seek {}
where
T: Read + Seek, pub(crate) struct Tokenizer {
{ reader: BufReader<Box<dyn Tokenize>>,
reader: BufReader<T>,
char_buffer: [u8; 1], char_buffer: [u8; 1],
line: usize, line: usize,
column: usize, column: usize,
returned_eof: bool, returned_eof: bool,
} }
impl From<String> for Tokenizer<Cursor<Vec<u8>>> { impl Tokenizer {
fn from(input: String) -> Self {
let cursor = Cursor::new(input.into_bytes());
let reader = BufReader::new(cursor);
Self {
reader,
line: 1,
column: 1,
char_buffer: [0],
returned_eof: false,
}
}
}
impl Tokenizer<File> {
pub fn from_path(input_file: impl Into<PathBuf>) -> Result<Self, TokenizerError> { pub fn from_path(input_file: impl Into<PathBuf>) -> Result<Self, TokenizerError> {
let file = std::fs::File::open(input_file.into())?; let file = std::fs::File::open(input_file.into())?;
let reader = BufReader::new(file); let reader = BufReader::new(Box::new(file) as Box<dyn Tokenize>);
Ok(Self { Ok(Self {
reader, reader,
@@ -64,10 +48,21 @@ impl Tokenizer<File> {
} }
} }
impl<T> Tokenizer<T> impl From<String> for Tokenizer {
where fn from(input: String) -> Self {
T: Read + Seek, let reader = BufReader::new(Box::new(Cursor::new(input)) as Box<dyn Tokenize>);
{
Self {
reader,
line: 1,
column: 1,
char_buffer: [0],
returned_eof: false,
}
}
}
impl Tokenizer {
/// Consumes the tokenizer and returns the next token in the stream /// Consumes the tokenizer and returns the next token in the stream
/// If there are no more tokens in the stream, this function returns None /// If there are no more tokens in the stream, this function returns None
/// If there is an error reading the stream, this function returns an error /// If there is an error reading the stream, this function returns an error
@@ -415,20 +410,14 @@ where
} }
} }
pub struct TokenizerBuffer<T> pub struct TokenizerBuffer {
where tokenizer: Tokenizer,
T: Read + Seek,
{
tokenizer: Tokenizer<T>,
buffer: VecDeque<Token>, buffer: VecDeque<Token>,
history: VecDeque<Token>, history: VecDeque<Token>,
} }
impl<T> TokenizerBuffer<T> impl TokenizerBuffer {
where pub fn new(tokenizer: Tokenizer) -> Self {
T: Seek + Read,
{
pub fn new(tokenizer: Tokenizer<T>) -> Self {
Self { Self {
tokenizer, tokenizer,
buffer: VecDeque::new(), buffer: VecDeque::new(),