Added support for compile-time constant hash expressions
This commit is contained in:
@@ -8,6 +8,7 @@ quick-error = { workspace = true }
|
||||
parser = { path = "../parser" }
|
||||
tokenizer = { path = "../tokenizer" }
|
||||
lsp-types = { workspace = true }
|
||||
crc32fast = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = { version = "1.0" }
|
||||
|
||||
@@ -146,3 +146,25 @@ fn test_boolean_return() -> anyhow::Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_const_hash_expr() -> anyhow::Result<()> {
|
||||
let compiled = compile!(debug r#"
|
||||
const nameHash = hash("AccessCard");
|
||||
device self = "db";
|
||||
|
||||
self.Setting = nameHash;
|
||||
"#);
|
||||
|
||||
assert_eq!(
|
||||
compiled,
|
||||
indoc! {
|
||||
"
|
||||
j main
|
||||
main:
|
||||
s db Setting -732925934
|
||||
"
|
||||
}
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
#![allow(clippy::result_large_err)]
|
||||
use crate::variable_manager::{self, LocationRequest, VariableLocation, VariableScope};
|
||||
use crc32fast::hash as crc32_hash;
|
||||
use parser::{
|
||||
Parser as ASTParser,
|
||||
sys_call::{SysCall, System},
|
||||
tree_node::{
|
||||
AssignmentExpression, BinaryExpression, BlockExpression, ConstDeclarationExpression,
|
||||
DeviceDeclarationExpression, Expression, FunctionExpression, IfExpression,
|
||||
InvocationExpression, Literal, LiteralOrVariable, LogicalExpression, LoopExpression,
|
||||
MemberAccessExpression, Span, Spanned, WhileExpression,
|
||||
InvocationExpression, Literal, LiteralOr, LiteralOrVariable, LogicalExpression,
|
||||
LoopExpression, MemberAccessExpression, Span, Spanned, WhileExpression,
|
||||
},
|
||||
};
|
||||
use quick_error::quick_error;
|
||||
@@ -15,6 +16,7 @@ use std::{
|
||||
collections::HashMap,
|
||||
io::{BufWriter, Write},
|
||||
};
|
||||
use tokenizer::token::Number;
|
||||
|
||||
macro_rules! debug {
|
||||
($self: expr, $debug_value: expr) => {
|
||||
@@ -679,8 +681,30 @@ impl<'a, W: std::io::Write> Compiler<'a, W> {
|
||||
value: const_value,
|
||||
} = expr;
|
||||
|
||||
// check for a hash expression or a literal
|
||||
let value = match const_value {
|
||||
LiteralOr::Or(Spanned {
|
||||
node: SysCall::System(System::Hash(Literal::String(str_to_hash))),
|
||||
..
|
||||
}) => {
|
||||
let hash = crc32_hash(str_to_hash.as_bytes());
|
||||
|
||||
// in stationeers, crc32 is a SIGNED int.
|
||||
let hash_value_i32 = i32::from_le_bytes(hash.to_le_bytes());
|
||||
|
||||
Literal::Number(Number::Integer(hash_value_i32 as i128))
|
||||
}
|
||||
LiteralOr::Or(Spanned { span, .. }) => {
|
||||
return Err(Error::Unknown(
|
||||
"hash only supports string literals in this context.".into(),
|
||||
Some(span),
|
||||
));
|
||||
}
|
||||
LiteralOr::Literal(Spanned { node, .. }) => node,
|
||||
};
|
||||
|
||||
Ok(CompilationResult {
|
||||
location: scope.define_const(const_name.node, const_value.node)?,
|
||||
location: scope.define_const(const_name.node, value)?,
|
||||
temp_name: None,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1256,17 +1256,47 @@ impl<'a> Parser<'a> {
|
||||
));
|
||||
}
|
||||
|
||||
// literal value
|
||||
// literal or syscall, making sure the syscall is supported in hash
|
||||
self.assign_next()?;
|
||||
let lit = self.spanned(|p| p.literal())?;
|
||||
// cache the current token location
|
||||
let current_token_index = self.tokenizer.loc();
|
||||
|
||||
Ok(ConstDeclarationExpression {
|
||||
name: Spanned {
|
||||
span: ident_span,
|
||||
node: ident,
|
||||
},
|
||||
value: lit,
|
||||
})
|
||||
if let Ok(lit) = self.spanned(|p| p.literal()) {
|
||||
Ok(ConstDeclarationExpression {
|
||||
name: Spanned {
|
||||
span: ident_span,
|
||||
node: ident,
|
||||
},
|
||||
value: LiteralOr::Literal(lit),
|
||||
})
|
||||
} else {
|
||||
// we need to rewind our tokenizer to our previous location
|
||||
self.tokenizer.seek(SeekFrom::Current(
|
||||
self.tokenizer.loc() - current_token_index,
|
||||
))?;
|
||||
let syscall = self.spanned(|p| p.syscall())?;
|
||||
|
||||
if !matches!(
|
||||
syscall,
|
||||
Spanned {
|
||||
node: SysCall::System(sys_call::System::Hash(_)),
|
||||
..
|
||||
}
|
||||
) {
|
||||
return Err(Error::UnexpectedToken(
|
||||
syscall.span,
|
||||
self.current_token.clone().ok_or(Error::UnexpectedEOF)?,
|
||||
));
|
||||
}
|
||||
|
||||
Ok(ConstDeclarationExpression {
|
||||
name: Spanned {
|
||||
span: ident_span,
|
||||
node: ident,
|
||||
},
|
||||
value: LiteralOr::Or(syscall),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn declaration(&mut self) -> Result<Expression, Error> {
|
||||
|
||||
@@ -144,3 +144,10 @@ fn test_binary_expression() -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_const_hash_expression() -> Result<()> {
|
||||
let expr = parser!(r#"const i = hash("item")"#).parse()?.unwrap();
|
||||
assert_eq!("(const i = hash(\"item\"))", expr.to_string());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
use std::ops::Deref;
|
||||
|
||||
use crate::sys_call;
|
||||
|
||||
use super::sys_call::SysCall;
|
||||
use tokenizer::token::Number;
|
||||
|
||||
@@ -10,6 +12,21 @@ pub enum Literal {
|
||||
Boolean(bool),
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq, Clone)]
|
||||
pub enum LiteralOr<T> {
|
||||
Literal(Spanned<Literal>),
|
||||
Or(Spanned<T>),
|
||||
}
|
||||
|
||||
impl<T: std::fmt::Display> std::fmt::Display for LiteralOr<T> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Literal(l) => write!(f, "{l}"),
|
||||
Self::Or(o) => write!(f, "{o}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Literal {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
@@ -198,7 +215,14 @@ impl std::fmt::Display for LiteralOrVariable {
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct ConstDeclarationExpression {
|
||||
pub name: Spanned<String>,
|
||||
pub value: Spanned<Literal>,
|
||||
pub value: LiteralOr<SysCall>,
|
||||
}
|
||||
|
||||
impl ConstDeclarationExpression {
|
||||
pub fn is_syscall_supported(call: &SysCall) -> bool {
|
||||
use sys_call::System;
|
||||
matches!(call, SysCall::System(sys) if matches!(sys, System::Hash(_)))
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ConstDeclarationExpression {
|
||||
|
||||
@@ -519,6 +519,7 @@ pub struct TokenizerBuffer<'a> {
|
||||
tokenizer: Tokenizer<'a>,
|
||||
buffer: VecDeque<Token>,
|
||||
history: VecDeque<Token>,
|
||||
index: i64,
|
||||
}
|
||||
|
||||
impl<'a> TokenizerBuffer<'a> {
|
||||
@@ -527,17 +528,22 @@ impl<'a> TokenizerBuffer<'a> {
|
||||
tokenizer,
|
||||
buffer: VecDeque::new(),
|
||||
history: VecDeque::with_capacity(128),
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
pub fn next_token(&mut self) -> Result<Option<Token>, Error> {
|
||||
if let Some(token) = self.buffer.pop_front() {
|
||||
self.history.push_back(token.clone());
|
||||
self.index += 1;
|
||||
return Ok(Some(token));
|
||||
}
|
||||
let token = self.tokenizer.next_token()?;
|
||||
|
||||
if let Some(ref token) = token {
|
||||
self.history.push_back(token.clone());
|
||||
}
|
||||
|
||||
self.index += 1;
|
||||
Ok(token)
|
||||
}
|
||||
pub fn peek(&mut self) -> Result<Option<Token>, Error> {
|
||||
@@ -547,12 +553,15 @@ impl<'a> TokenizerBuffer<'a> {
|
||||
let token = self.tokenizer.peek_next()?;
|
||||
Ok(token)
|
||||
}
|
||||
fn seek_from_current(&mut self, seek_to: i64) -> Result<(), Error> {
|
||||
pub fn loc(&self) -> i64 {
|
||||
self.index
|
||||
}
|
||||
fn seek_from_current(&mut self, seek_to_int: i64) -> Result<(), Error> {
|
||||
use Ordering::*;
|
||||
match seek_to.cmp(&0) {
|
||||
match seek_to_int.cmp(&0) {
|
||||
Greater => {
|
||||
let mut tokens = Vec::with_capacity(seek_to as usize);
|
||||
for _ in 0..seek_to {
|
||||
let mut tokens = Vec::with_capacity(seek_to_int as usize);
|
||||
for _ in 0..seek_to_int {
|
||||
if let Some(token) = self.tokenizer.next_token()? {
|
||||
tokens.push(token);
|
||||
} else {
|
||||
@@ -565,7 +574,7 @@ impl<'a> TokenizerBuffer<'a> {
|
||||
self.history.extend(tokens);
|
||||
}
|
||||
Less => {
|
||||
let seek_to = seek_to.unsigned_abs() as usize;
|
||||
let seek_to = seek_to_int.unsigned_abs() as usize;
|
||||
let mut tokens = Vec::with_capacity(seek_to);
|
||||
for _ in 0..seek_to {
|
||||
if let Some(token) = self.history.pop_back() {
|
||||
@@ -577,6 +586,7 @@ impl<'a> TokenizerBuffer<'a> {
|
||||
)));
|
||||
}
|
||||
}
|
||||
self.index -= seek_to_int;
|
||||
self.buffer.extend(tokens.into_iter().rev());
|
||||
}
|
||||
_ => {}
|
||||
|
||||
@@ -168,7 +168,7 @@ impl std::fmt::Display for TokenType {
|
||||
#[derive(Debug, PartialEq, Hash, Eq, Clone, Copy)]
|
||||
pub enum Number {
|
||||
/// Represents an integer number
|
||||
Integer(u128),
|
||||
Integer(i128),
|
||||
/// Represents a decimal type number with a precision of 64 bits
|
||||
Decimal(Decimal),
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user