Added support for compile-time constant hash expressions
This commit is contained in:
10
rust_compiler/Cargo.lock
generated
10
rust_compiler/Cargo.lock
generated
@@ -252,6 +252,7 @@ name = "compiler"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
|
"crc32fast",
|
||||||
"indoc",
|
"indoc",
|
||||||
"lsp-types",
|
"lsp-types",
|
||||||
"parser",
|
"parser",
|
||||||
@@ -260,6 +261,15 @@ dependencies = [
|
|||||||
"tokenizer",
|
"tokenizer",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "crc32fast"
|
||||||
|
version = "1.5.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "diff"
|
name = "diff"
|
||||||
version = "0.1.13"
|
version = "0.1.13"
|
||||||
|
|||||||
@@ -9,8 +9,9 @@ members = ["libs/*"]
|
|||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
quick-error = "2"
|
quick-error = "2"
|
||||||
rust_decimal = "1"
|
rust_decimal = "1"
|
||||||
safer-ffi = { version = "0.1" }
|
safer-ffi = { version = "0.1" } # Safely share structs in memory between C# and Rust
|
||||||
lsp-types = { version = "0.97" }
|
lsp-types = { version = "0.97" } # Allows for LSP style reporting to the frontend
|
||||||
|
crc32fast = "1.5" # This is for `HASH(..)` calls to be optimized away
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
headers = ["safer-ffi/headers"]
|
headers = ["safer-ffi/headers"]
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ quick-error = { workspace = true }
|
|||||||
parser = { path = "../parser" }
|
parser = { path = "../parser" }
|
||||||
tokenizer = { path = "../tokenizer" }
|
tokenizer = { path = "../tokenizer" }
|
||||||
lsp-types = { workspace = true }
|
lsp-types = { workspace = true }
|
||||||
|
crc32fast = { workspace = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
anyhow = { version = "1.0" }
|
anyhow = { version = "1.0" }
|
||||||
|
|||||||
@@ -146,3 +146,25 @@ fn test_boolean_return() -> anyhow::Result<()> {
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_const_hash_expr() -> anyhow::Result<()> {
|
||||||
|
let compiled = compile!(debug r#"
|
||||||
|
const nameHash = hash("AccessCard");
|
||||||
|
device self = "db";
|
||||||
|
|
||||||
|
self.Setting = nameHash;
|
||||||
|
"#);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
compiled,
|
||||||
|
indoc! {
|
||||||
|
"
|
||||||
|
j main
|
||||||
|
main:
|
||||||
|
s db Setting -732925934
|
||||||
|
"
|
||||||
|
}
|
||||||
|
);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,13 +1,14 @@
|
|||||||
#![allow(clippy::result_large_err)]
|
#![allow(clippy::result_large_err)]
|
||||||
use crate::variable_manager::{self, LocationRequest, VariableLocation, VariableScope};
|
use crate::variable_manager::{self, LocationRequest, VariableLocation, VariableScope};
|
||||||
|
use crc32fast::hash as crc32_hash;
|
||||||
use parser::{
|
use parser::{
|
||||||
Parser as ASTParser,
|
Parser as ASTParser,
|
||||||
sys_call::{SysCall, System},
|
sys_call::{SysCall, System},
|
||||||
tree_node::{
|
tree_node::{
|
||||||
AssignmentExpression, BinaryExpression, BlockExpression, ConstDeclarationExpression,
|
AssignmentExpression, BinaryExpression, BlockExpression, ConstDeclarationExpression,
|
||||||
DeviceDeclarationExpression, Expression, FunctionExpression, IfExpression,
|
DeviceDeclarationExpression, Expression, FunctionExpression, IfExpression,
|
||||||
InvocationExpression, Literal, LiteralOrVariable, LogicalExpression, LoopExpression,
|
InvocationExpression, Literal, LiteralOr, LiteralOrVariable, LogicalExpression,
|
||||||
MemberAccessExpression, Span, Spanned, WhileExpression,
|
LoopExpression, MemberAccessExpression, Span, Spanned, WhileExpression,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use quick_error::quick_error;
|
use quick_error::quick_error;
|
||||||
@@ -15,6 +16,7 @@ use std::{
|
|||||||
collections::HashMap,
|
collections::HashMap,
|
||||||
io::{BufWriter, Write},
|
io::{BufWriter, Write},
|
||||||
};
|
};
|
||||||
|
use tokenizer::token::Number;
|
||||||
|
|
||||||
macro_rules! debug {
|
macro_rules! debug {
|
||||||
($self: expr, $debug_value: expr) => {
|
($self: expr, $debug_value: expr) => {
|
||||||
@@ -679,8 +681,30 @@ impl<'a, W: std::io::Write> Compiler<'a, W> {
|
|||||||
value: const_value,
|
value: const_value,
|
||||||
} = expr;
|
} = expr;
|
||||||
|
|
||||||
|
// check for a hash expression or a literal
|
||||||
|
let value = match const_value {
|
||||||
|
LiteralOr::Or(Spanned {
|
||||||
|
node: SysCall::System(System::Hash(Literal::String(str_to_hash))),
|
||||||
|
..
|
||||||
|
}) => {
|
||||||
|
let hash = crc32_hash(str_to_hash.as_bytes());
|
||||||
|
|
||||||
|
// in stationeers, crc32 is a SIGNED int.
|
||||||
|
let hash_value_i32 = i32::from_le_bytes(hash.to_le_bytes());
|
||||||
|
|
||||||
|
Literal::Number(Number::Integer(hash_value_i32 as i128))
|
||||||
|
}
|
||||||
|
LiteralOr::Or(Spanned { span, .. }) => {
|
||||||
|
return Err(Error::Unknown(
|
||||||
|
"hash only supports string literals in this context.".into(),
|
||||||
|
Some(span),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
LiteralOr::Literal(Spanned { node, .. }) => node,
|
||||||
|
};
|
||||||
|
|
||||||
Ok(CompilationResult {
|
Ok(CompilationResult {
|
||||||
location: scope.define_const(const_name.node, const_value.node)?,
|
location: scope.define_const(const_name.node, value)?,
|
||||||
temp_name: None,
|
temp_name: None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1256,18 +1256,48 @@ impl<'a> Parser<'a> {
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
// literal value
|
// literal or syscall, making sure the syscall is supported in hash
|
||||||
self.assign_next()?;
|
self.assign_next()?;
|
||||||
let lit = self.spanned(|p| p.literal())?;
|
// cache the current token location
|
||||||
|
let current_token_index = self.tokenizer.loc();
|
||||||
|
|
||||||
|
if let Ok(lit) = self.spanned(|p| p.literal()) {
|
||||||
|
Ok(ConstDeclarationExpression {
|
||||||
|
name: Spanned {
|
||||||
|
span: ident_span,
|
||||||
|
node: ident,
|
||||||
|
},
|
||||||
|
value: LiteralOr::Literal(lit),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
// we need to rewind our tokenizer to our previous location
|
||||||
|
self.tokenizer.seek(SeekFrom::Current(
|
||||||
|
self.tokenizer.loc() - current_token_index,
|
||||||
|
))?;
|
||||||
|
let syscall = self.spanned(|p| p.syscall())?;
|
||||||
|
|
||||||
|
if !matches!(
|
||||||
|
syscall,
|
||||||
|
Spanned {
|
||||||
|
node: SysCall::System(sys_call::System::Hash(_)),
|
||||||
|
..
|
||||||
|
}
|
||||||
|
) {
|
||||||
|
return Err(Error::UnexpectedToken(
|
||||||
|
syscall.span,
|
||||||
|
self.current_token.clone().ok_or(Error::UnexpectedEOF)?,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
Ok(ConstDeclarationExpression {
|
Ok(ConstDeclarationExpression {
|
||||||
name: Spanned {
|
name: Spanned {
|
||||||
span: ident_span,
|
span: ident_span,
|
||||||
node: ident,
|
node: ident,
|
||||||
},
|
},
|
||||||
value: lit,
|
value: LiteralOr::Or(syscall),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn declaration(&mut self) -> Result<Expression, Error> {
|
fn declaration(&mut self) -> Result<Expression, Error> {
|
||||||
let current_token = self.current_token.as_ref().ok_or(Error::UnexpectedEOF)?;
|
let current_token = self.current_token.as_ref().ok_or(Error::UnexpectedEOF)?;
|
||||||
|
|||||||
@@ -144,3 +144,10 @@ fn test_binary_expression() -> Result<()> {
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_const_hash_expression() -> Result<()> {
|
||||||
|
let expr = parser!(r#"const i = hash("item")"#).parse()?.unwrap();
|
||||||
|
assert_eq!("(const i = hash(\"item\"))", expr.to_string());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
|
|
||||||
|
use crate::sys_call;
|
||||||
|
|
||||||
use super::sys_call::SysCall;
|
use super::sys_call::SysCall;
|
||||||
use tokenizer::token::Number;
|
use tokenizer::token::Number;
|
||||||
|
|
||||||
@@ -10,6 +12,21 @@ pub enum Literal {
|
|||||||
Boolean(bool),
|
Boolean(bool),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Eq, PartialEq, Clone)]
|
||||||
|
pub enum LiteralOr<T> {
|
||||||
|
Literal(Spanned<Literal>),
|
||||||
|
Or(Spanned<T>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: std::fmt::Display> std::fmt::Display for LiteralOr<T> {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
Self::Literal(l) => write!(f, "{l}"),
|
||||||
|
Self::Or(o) => write!(f, "{o}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl std::fmt::Display for Literal {
|
impl std::fmt::Display for Literal {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
@@ -198,7 +215,14 @@ impl std::fmt::Display for LiteralOrVariable {
|
|||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
pub struct ConstDeclarationExpression {
|
pub struct ConstDeclarationExpression {
|
||||||
pub name: Spanned<String>,
|
pub name: Spanned<String>,
|
||||||
pub value: Spanned<Literal>,
|
pub value: LiteralOr<SysCall>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ConstDeclarationExpression {
|
||||||
|
pub fn is_syscall_supported(call: &SysCall) -> bool {
|
||||||
|
use sys_call::System;
|
||||||
|
matches!(call, SysCall::System(sys) if matches!(sys, System::Hash(_)))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Display for ConstDeclarationExpression {
|
impl std::fmt::Display for ConstDeclarationExpression {
|
||||||
|
|||||||
@@ -519,6 +519,7 @@ pub struct TokenizerBuffer<'a> {
|
|||||||
tokenizer: Tokenizer<'a>,
|
tokenizer: Tokenizer<'a>,
|
||||||
buffer: VecDeque<Token>,
|
buffer: VecDeque<Token>,
|
||||||
history: VecDeque<Token>,
|
history: VecDeque<Token>,
|
||||||
|
index: i64,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> TokenizerBuffer<'a> {
|
impl<'a> TokenizerBuffer<'a> {
|
||||||
@@ -527,17 +528,22 @@ impl<'a> TokenizerBuffer<'a> {
|
|||||||
tokenizer,
|
tokenizer,
|
||||||
buffer: VecDeque::new(),
|
buffer: VecDeque::new(),
|
||||||
history: VecDeque::with_capacity(128),
|
history: VecDeque::with_capacity(128),
|
||||||
|
index: 0,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn next_token(&mut self) -> Result<Option<Token>, Error> {
|
pub fn next_token(&mut self) -> Result<Option<Token>, Error> {
|
||||||
if let Some(token) = self.buffer.pop_front() {
|
if let Some(token) = self.buffer.pop_front() {
|
||||||
self.history.push_back(token.clone());
|
self.history.push_back(token.clone());
|
||||||
|
self.index += 1;
|
||||||
return Ok(Some(token));
|
return Ok(Some(token));
|
||||||
}
|
}
|
||||||
let token = self.tokenizer.next_token()?;
|
let token = self.tokenizer.next_token()?;
|
||||||
|
|
||||||
if let Some(ref token) = token {
|
if let Some(ref token) = token {
|
||||||
self.history.push_back(token.clone());
|
self.history.push_back(token.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.index += 1;
|
||||||
Ok(token)
|
Ok(token)
|
||||||
}
|
}
|
||||||
pub fn peek(&mut self) -> Result<Option<Token>, Error> {
|
pub fn peek(&mut self) -> Result<Option<Token>, Error> {
|
||||||
@@ -547,12 +553,15 @@ impl<'a> TokenizerBuffer<'a> {
|
|||||||
let token = self.tokenizer.peek_next()?;
|
let token = self.tokenizer.peek_next()?;
|
||||||
Ok(token)
|
Ok(token)
|
||||||
}
|
}
|
||||||
fn seek_from_current(&mut self, seek_to: i64) -> Result<(), Error> {
|
pub fn loc(&self) -> i64 {
|
||||||
|
self.index
|
||||||
|
}
|
||||||
|
fn seek_from_current(&mut self, seek_to_int: i64) -> Result<(), Error> {
|
||||||
use Ordering::*;
|
use Ordering::*;
|
||||||
match seek_to.cmp(&0) {
|
match seek_to_int.cmp(&0) {
|
||||||
Greater => {
|
Greater => {
|
||||||
let mut tokens = Vec::with_capacity(seek_to as usize);
|
let mut tokens = Vec::with_capacity(seek_to_int as usize);
|
||||||
for _ in 0..seek_to {
|
for _ in 0..seek_to_int {
|
||||||
if let Some(token) = self.tokenizer.next_token()? {
|
if let Some(token) = self.tokenizer.next_token()? {
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
} else {
|
} else {
|
||||||
@@ -565,7 +574,7 @@ impl<'a> TokenizerBuffer<'a> {
|
|||||||
self.history.extend(tokens);
|
self.history.extend(tokens);
|
||||||
}
|
}
|
||||||
Less => {
|
Less => {
|
||||||
let seek_to = seek_to.unsigned_abs() as usize;
|
let seek_to = seek_to_int.unsigned_abs() as usize;
|
||||||
let mut tokens = Vec::with_capacity(seek_to);
|
let mut tokens = Vec::with_capacity(seek_to);
|
||||||
for _ in 0..seek_to {
|
for _ in 0..seek_to {
|
||||||
if let Some(token) = self.history.pop_back() {
|
if let Some(token) = self.history.pop_back() {
|
||||||
@@ -577,6 +586,7 @@ impl<'a> TokenizerBuffer<'a> {
|
|||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
self.index -= seek_to_int;
|
||||||
self.buffer.extend(tokens.into_iter().rev());
|
self.buffer.extend(tokens.into_iter().rev());
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
|
|||||||
@@ -168,7 +168,7 @@ impl std::fmt::Display for TokenType {
|
|||||||
#[derive(Debug, PartialEq, Hash, Eq, Clone, Copy)]
|
#[derive(Debug, PartialEq, Hash, Eq, Clone, Copy)]
|
||||||
pub enum Number {
|
pub enum Number {
|
||||||
/// Represents an integer number
|
/// Represents an integer number
|
||||||
Integer(u128),
|
Integer(i128),
|
||||||
/// Represents a decimal type number with a precision of 64 bits
|
/// Represents a decimal type number with a precision of 64 bits
|
||||||
Decimal(Decimal),
|
Decimal(Decimal),
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user