diff --git a/csharp_mod/SlangGlue.cs b/csharp_mod/SlangGlue.cs
new file mode 100644
index 0000000..8e99ae1
--- /dev/null
+++ b/csharp_mod/SlangGlue.cs
@@ -0,0 +1,80 @@
+/*! \file */
+/*******************************************
+ * *
+ * File auto-generated by `::safer_ffi`. *
+ * *
+ * Do not manually edit this file. *
+ * *
+ *******************************************/
+
+#pragma warning disable IDE0044, IDE0049, IDE0055, IDE1006,
+#pragma warning disable SA1004, SA1008, SA1023, SA1028,
+#pragma warning disable SA1121, SA1134,
+#pragma warning disable SA1201,
+#pragma warning disable SA1300, SA1306, SA1307, SA1310, SA1313,
+#pragma warning disable SA1500, SA1505, SA1507,
+#pragma warning disable SA1600, SA1601, SA1604, SA1605, SA1611, SA1615, SA1649,
+
+namespace Slang {
+using System;
+using System.Runtime.InteropServices;
+
+public unsafe partial class Ffi {
+#if IOS
+ private const string RustLib = "slang.framework/slang";
+#else
+ private const string RustLib = "slang";
+#endif
+}
+
+///
+/// Same as [Vec][rust::Vec], but with guaranteed #[repr(C)] layout
+///
+[StructLayout(LayoutKind.Sequential, Size = 24)]
+public unsafe struct Vec_uint8_t {
+ public byte * ptr;
+
+ public UIntPtr len;
+
+ public UIntPtr cap;
+}
+
+public unsafe partial class Ffi {
+ [DllImport(RustLib, ExactSpelling = true)] public static unsafe extern
+ Vec_uint8_t compile_from_string (
+ byte /*const*/ * input);
+}
+
+[StructLayout(LayoutKind.Sequential, Size = 104)]
+public unsafe struct FfiToken_t {
+ public Vec_uint8_t text;
+
+ public Vec_uint8_t tooltip;
+
+ public Vec_uint8_t error;
+
+ public Vec_uint8_t status;
+
+ public Int32 column;
+}
+
+///
+/// Same as [Vec][rust::Vec], but with guaranteed #[repr(C)] layout
+///
+[StructLayout(LayoutKind.Sequential, Size = 24)]
+public unsafe struct Vec_FfiToken_t {
+ public FfiToken_t * ptr;
+
+ public UIntPtr len;
+
+ public UIntPtr cap;
+}
+
+public unsafe partial class Ffi {
+ [DllImport(RustLib, ExactSpelling = true)] public static unsafe extern
+ Vec_FfiToken_t tokenize_line (
+ byte /*const*/ * input);
+}
+
+
+} /* Slang */
diff --git a/rust_compiler/Cargo.lock b/rust_compiler/Cargo.lock
index 7d0923d..3b6f10b 100644
--- a/rust_compiler/Cargo.lock
+++ b/rust_compiler/Cargo.lock
@@ -779,6 +779,20 @@ version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e"
+[[package]]
+name = "slang"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "clap",
+ "compiler",
+ "parser",
+ "quick-error",
+ "rust_decimal",
+ "safer-ffi",
+ "tokenizer",
+]
+
[[package]]
name = "stabby"
version = "36.2.2"
@@ -814,20 +828,6 @@ dependencies = [
"syn 1.0.109",
]
-[[package]]
-name = "stationlang"
-version = "0.1.0"
-dependencies = [
- "anyhow",
- "clap",
- "compiler",
- "parser",
- "quick-error",
- "rust_decimal",
- "safer-ffi",
- "tokenizer",
-]
-
[[package]]
name = "strsim"
version = "0.11.1"
diff --git a/rust_compiler/Cargo.toml b/rust_compiler/Cargo.toml
index 4f5a6db..ae125f6 100644
--- a/rust_compiler/Cargo.toml
+++ b/rust_compiler/Cargo.toml
@@ -1,5 +1,5 @@
[package]
-name = "stationlang"
+name = "slang"
version = "0.1.0"
edition = "2021"
@@ -27,9 +27,9 @@ path = "src/bin/generate_headers.rs"
required-features = ["headers"]
[lib]
-name = "stationlang"
+name = "slang"
path = "src/lib.rs"
-crate-type = ["cdylib"]
+crate-type = ["cdylib", "rlib"]
[dependencies]
clap = { version = "^4.5", features = ["derive"] }
@@ -40,9 +40,5 @@ parser = { path = "libs/parser" }
compiler = { path = "libs/compiler" }
safer-ffi = { workspace = true }
-
[dev-dependencies]
anyhow = { version = "^1.0", features = ["backtrace"] }
-
-[build-dependencies]
-safer-ffi = { version = "0.1", features = ["headers"] }
diff --git a/rust_compiler/build.rs b/rust_compiler/build.rs
deleted file mode 100644
index 567d489..0000000
--- a/rust_compiler/build.rs
+++ /dev/null
@@ -1,9 +0,0 @@
-fn main() -> ::std::io::Result<()> {
- safer_ffi::headers::builder()
- .with_language(safer_ffi::headers::Language::CSharp)
- .to_file("../csharp_mod/SlangStubs.cs")?
- .generate()
- .unwrap();
-
- Ok(())
-}
diff --git a/rust_compiler/libs/compiler/src/v1.rs b/rust_compiler/libs/compiler/src/v1.rs
index 2b1709d..427ef57 100644
--- a/rust_compiler/libs/compiler/src/v1.rs
+++ b/rust_compiler/libs/compiler/src/v1.rs
@@ -68,7 +68,7 @@ struct CompilationResult {
}
pub struct Compiler<'a, W: std::io::Write> {
- parser: ASTParser,
+ parser: ASTParser<'a>,
function_locations: HashMap,
function_metadata: HashMap>,
devices: HashMap,
@@ -83,7 +83,7 @@ pub struct Compiler<'a, W: std::io::Write> {
impl<'a, W: std::io::Write> Compiler<'a, W> {
pub fn new(
- parser: ASTParser,
+ parser: ASTParser<'a>,
writer: &'a mut BufWriter,
config: Option,
) -> Self {
diff --git a/rust_compiler/libs/parser/src/lib.rs b/rust_compiler/libs/parser/src/lib.rs
index 481524b..9367959 100644
--- a/rust_compiler/libs/parser/src/lib.rs
+++ b/rust_compiler/libs/parser/src/lib.rs
@@ -113,13 +113,13 @@ macro_rules! token_matches {
};
}
-pub struct Parser {
- tokenizer: TokenizerBuffer,
+pub struct Parser<'a> {
+ tokenizer: TokenizerBuffer<'a>,
current_token: Option,
}
-impl Parser {
- pub fn new(tokenizer: Tokenizer) -> Self {
+impl<'a> Parser<'a> {
+ pub fn new(tokenizer: Tokenizer<'a>) -> Self {
Parser {
tokenizer: TokenizerBuffer::new(tokenizer),
current_token: None,
diff --git a/rust_compiler/libs/tokenizer/src/lib.rs b/rust_compiler/libs/tokenizer/src/lib.rs
index 9494cc3..43670c9 100644
--- a/rust_compiler/libs/tokenizer/src/lib.rs
+++ b/rust_compiler/libs/tokenizer/src/lib.rs
@@ -39,15 +39,15 @@ pub trait Tokenize: Read + Seek {}
impl Tokenize for T where T: Read + Seek {}
-pub struct Tokenizer {
- reader: BufReader>,
+pub struct Tokenizer<'a> {
+ reader: BufReader>,
char_buffer: [u8; 1],
line: usize,
column: usize,
returned_eof: bool,
}
-impl Tokenizer {
+impl<'a> Tokenizer<'a> {
pub fn from_path(input_file: impl Into) -> Result {
let file = std::fs::File::open(input_file.into())?;
let reader = BufReader::new(Box::new(file) as Box);
@@ -62,7 +62,7 @@ impl Tokenizer {
}
}
-impl From for Tokenizer {
+impl<'a> From for Tokenizer<'a> {
fn from(input: String) -> Self {
let reader = BufReader::new(Box::new(Cursor::new(input)) as Box);
@@ -76,13 +76,19 @@ impl From for Tokenizer {
}
}
-impl From<&str> for Tokenizer {
- fn from(value: &str) -> Self {
- Self::from(value.to_string())
+impl<'a> From<&'a str> for Tokenizer<'a> {
+ fn from(value: &'a str) -> Self {
+ Self {
+ reader: BufReader::new(Box::new(Cursor::new(value)) as Box),
+ char_buffer: [0],
+ column: 1,
+ line: 1,
+ returned_eof: false,
+ }
}
}
-impl Tokenizer {
+impl<'a> Tokenizer<'a> {
/// Consumes the tokenizer and returns the next token in the stream
/// If there are no more tokens in the stream, this function returns None
/// If there is an error reading the stream, this function returns an error
@@ -447,7 +453,7 @@ impl Tokenizer {
}
}
-impl Iterator for Tokenizer {
+impl<'a> Iterator for Tokenizer<'a> {
type Item = Result;
fn next(&mut self) -> Option {
@@ -455,14 +461,14 @@ impl Iterator for Tokenizer {
}
}
-pub struct TokenizerBuffer {
- tokenizer: Tokenizer,
+pub struct TokenizerBuffer<'a> {
+ tokenizer: Tokenizer<'a>,
buffer: VecDeque,
history: VecDeque,
}
-impl TokenizerBuffer {
- pub fn new(tokenizer: Tokenizer) -> Self {
+impl<'a> TokenizerBuffer<'a> {
+ pub fn new(tokenizer: Tokenizer<'a>) -> Self {
Self {
tokenizer,
buffer: VecDeque::new(),
diff --git a/rust_compiler/src/bin/generate_headers.rs b/rust_compiler/src/bin/generate_headers.rs
new file mode 100644
index 0000000..54cdb6a
--- /dev/null
+++ b/rust_compiler/src/bin/generate_headers.rs
@@ -0,0 +1,3 @@
+fn main() -> std::io::Result<()> {
+ ::slang::generate_headers()
+}
diff --git a/rust_compiler/src/lib.rs b/rust_compiler/src/lib.rs
index e5879a5..f2b5ead 100644
--- a/rust_compiler/src/lib.rs
+++ b/rust_compiler/src/lib.rs
@@ -15,10 +15,10 @@ pub struct FfiToken {
}
#[ffi_export]
-pub fn compile_from_string(input: safer_ffi::String) -> safer_ffi::String {
+pub fn compile_from_string(input: safer_ffi::char_p::char_p_ref<'_>) -> safer_ffi::String {
let mut writer = BufWriter::new(Vec::new());
- let tokenizer = Tokenizer::from(String::from(input));
+ let tokenizer = Tokenizer::from(input.to_str());
let parser = Parser::new(tokenizer);
let compiler = Compiler::new(parser, &mut writer, None);
@@ -35,8 +35,8 @@ pub fn compile_from_string(input: safer_ffi::String) -> safer_ffi::String {
}
#[ffi_export]
-pub fn tokenize_line(input: safer_ffi::String) -> safer_ffi::Vec {
- let tokenizer = Tokenizer::from(String::from(input));
+pub fn tokenize_line(input: safer_ffi::char_p::char_p_ref<'_>) -> safer_ffi::Vec {
+ let tokenizer = Tokenizer::from(input.to_str());
let mut tokens = Vec::::new();
@@ -73,6 +73,6 @@ pub fn tokenize_line(input: safer_ffi::String) -> safer_ffi::Vec {
pub fn generate_headers() -> std::io::Result<()> {
::safer_ffi::headers::builder()
.with_language(safer_ffi::headers::Language::CSharp)
- .to_file("SlangGlue.cs")?
+ .to_file("../csharp_mod/SlangGlue.cs")?
.generate()
}