automated C# glue FFI glue code

This commit is contained in:
2025-11-28 03:51:05 -07:00
parent 6b69cc1459
commit c97c5763ae
9 changed files with 130 additions and 54 deletions

80
csharp_mod/SlangGlue.cs Normal file
View File

@@ -0,0 +1,80 @@
/*! \file */
/*******************************************
* *
* File auto-generated by `::safer_ffi`. *
* *
* Do not manually edit this file. *
* *
*******************************************/
#pragma warning disable IDE0044, IDE0049, IDE0055, IDE1006,
#pragma warning disable SA1004, SA1008, SA1023, SA1028,
#pragma warning disable SA1121, SA1134,
#pragma warning disable SA1201,
#pragma warning disable SA1300, SA1306, SA1307, SA1310, SA1313,
#pragma warning disable SA1500, SA1505, SA1507,
#pragma warning disable SA1600, SA1601, SA1604, SA1605, SA1611, SA1615, SA1649,
namespace Slang {
using System;
using System.Runtime.InteropServices;
public unsafe partial class Ffi {
#if IOS
private const string RustLib = "slang.framework/slang";
#else
private const string RustLib = "slang";
#endif
}
/// <summary>
/// Same as [<c>Vec<T></c>][<c>rust::Vec</c>], but with guaranteed <c>#[repr(C)]</c> layout
/// </summary>
[StructLayout(LayoutKind.Sequential, Size = 24)]
public unsafe struct Vec_uint8_t {
public byte * ptr;
public UIntPtr len;
public UIntPtr cap;
}
public unsafe partial class Ffi {
[DllImport(RustLib, ExactSpelling = true)] public static unsafe extern
Vec_uint8_t compile_from_string (
byte /*const*/ * input);
}
[StructLayout(LayoutKind.Sequential, Size = 104)]
public unsafe struct FfiToken_t {
public Vec_uint8_t text;
public Vec_uint8_t tooltip;
public Vec_uint8_t error;
public Vec_uint8_t status;
public Int32 column;
}
/// <summary>
/// Same as [<c>Vec<T></c>][<c>rust::Vec</c>], but with guaranteed <c>#[repr(C)]</c> layout
/// </summary>
[StructLayout(LayoutKind.Sequential, Size = 24)]
public unsafe struct Vec_FfiToken_t {
public FfiToken_t * ptr;
public UIntPtr len;
public UIntPtr cap;
}
public unsafe partial class Ffi {
[DllImport(RustLib, ExactSpelling = true)] public static unsafe extern
Vec_FfiToken_t tokenize_line (
byte /*const*/ * input);
}
} /* Slang */

View File

@@ -779,6 +779,20 @@ version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e" checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e"
[[package]]
name = "slang"
version = "0.1.0"
dependencies = [
"anyhow",
"clap",
"compiler",
"parser",
"quick-error",
"rust_decimal",
"safer-ffi",
"tokenizer",
]
[[package]] [[package]]
name = "stabby" name = "stabby"
version = "36.2.2" version = "36.2.2"
@@ -814,20 +828,6 @@ dependencies = [
"syn 1.0.109", "syn 1.0.109",
] ]
[[package]]
name = "stationlang"
version = "0.1.0"
dependencies = [
"anyhow",
"clap",
"compiler",
"parser",
"quick-error",
"rust_decimal",
"safer-ffi",
"tokenizer",
]
[[package]] [[package]]
name = "strsim" name = "strsim"
version = "0.11.1" version = "0.11.1"

View File

@@ -1,5 +1,5 @@
[package] [package]
name = "stationlang" name = "slang"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition = "2021"
@@ -27,9 +27,9 @@ path = "src/bin/generate_headers.rs"
required-features = ["headers"] required-features = ["headers"]
[lib] [lib]
name = "stationlang" name = "slang"
path = "src/lib.rs" path = "src/lib.rs"
crate-type = ["cdylib"] crate-type = ["cdylib", "rlib"]
[dependencies] [dependencies]
clap = { version = "^4.5", features = ["derive"] } clap = { version = "^4.5", features = ["derive"] }
@@ -40,9 +40,5 @@ parser = { path = "libs/parser" }
compiler = { path = "libs/compiler" } compiler = { path = "libs/compiler" }
safer-ffi = { workspace = true } safer-ffi = { workspace = true }
[dev-dependencies] [dev-dependencies]
anyhow = { version = "^1.0", features = ["backtrace"] } anyhow = { version = "^1.0", features = ["backtrace"] }
[build-dependencies]
safer-ffi = { version = "0.1", features = ["headers"] }

View File

@@ -1,9 +0,0 @@
fn main() -> ::std::io::Result<()> {
safer_ffi::headers::builder()
.with_language(safer_ffi::headers::Language::CSharp)
.to_file("../csharp_mod/SlangStubs.cs")?
.generate()
.unwrap();
Ok(())
}

View File

@@ -68,7 +68,7 @@ struct CompilationResult {
} }
pub struct Compiler<'a, W: std::io::Write> { pub struct Compiler<'a, W: std::io::Write> {
parser: ASTParser, parser: ASTParser<'a>,
function_locations: HashMap<String, usize>, function_locations: HashMap<String, usize>,
function_metadata: HashMap<String, Vec<String>>, function_metadata: HashMap<String, Vec<String>>,
devices: HashMap<String, String>, devices: HashMap<String, String>,
@@ -83,7 +83,7 @@ pub struct Compiler<'a, W: std::io::Write> {
impl<'a, W: std::io::Write> Compiler<'a, W> { impl<'a, W: std::io::Write> Compiler<'a, W> {
pub fn new( pub fn new(
parser: ASTParser, parser: ASTParser<'a>,
writer: &'a mut BufWriter<W>, writer: &'a mut BufWriter<W>,
config: Option<CompilerConfig>, config: Option<CompilerConfig>,
) -> Self { ) -> Self {

View File

@@ -113,13 +113,13 @@ macro_rules! token_matches {
}; };
} }
pub struct Parser { pub struct Parser<'a> {
tokenizer: TokenizerBuffer, tokenizer: TokenizerBuffer<'a>,
current_token: Option<Token>, current_token: Option<Token>,
} }
impl Parser { impl<'a> Parser<'a> {
pub fn new(tokenizer: Tokenizer) -> Self { pub fn new(tokenizer: Tokenizer<'a>) -> Self {
Parser { Parser {
tokenizer: TokenizerBuffer::new(tokenizer), tokenizer: TokenizerBuffer::new(tokenizer),
current_token: None, current_token: None,

View File

@@ -39,15 +39,15 @@ pub trait Tokenize: Read + Seek {}
impl<T> Tokenize for T where T: Read + Seek {} impl<T> Tokenize for T where T: Read + Seek {}
pub struct Tokenizer { pub struct Tokenizer<'a> {
reader: BufReader<Box<dyn Tokenize>>, reader: BufReader<Box<dyn Tokenize + 'a>>,
char_buffer: [u8; 1], char_buffer: [u8; 1],
line: usize, line: usize,
column: usize, column: usize,
returned_eof: bool, returned_eof: bool,
} }
impl Tokenizer { impl<'a> Tokenizer<'a> {
pub fn from_path(input_file: impl Into<PathBuf>) -> Result<Self, Error> { pub fn from_path(input_file: impl Into<PathBuf>) -> Result<Self, Error> {
let file = std::fs::File::open(input_file.into())?; let file = std::fs::File::open(input_file.into())?;
let reader = BufReader::new(Box::new(file) as Box<dyn Tokenize>); let reader = BufReader::new(Box::new(file) as Box<dyn Tokenize>);
@@ -62,7 +62,7 @@ impl Tokenizer {
} }
} }
impl From<String> for Tokenizer { impl<'a> From<String> for Tokenizer<'a> {
fn from(input: String) -> Self { fn from(input: String) -> Self {
let reader = BufReader::new(Box::new(Cursor::new(input)) as Box<dyn Tokenize>); let reader = BufReader::new(Box::new(Cursor::new(input)) as Box<dyn Tokenize>);
@@ -76,13 +76,19 @@ impl From<String> for Tokenizer {
} }
} }
impl From<&str> for Tokenizer { impl<'a> From<&'a str> for Tokenizer<'a> {
fn from(value: &str) -> Self { fn from(value: &'a str) -> Self {
Self::from(value.to_string()) Self {
reader: BufReader::new(Box::new(Cursor::new(value)) as Box<dyn Tokenize>),
char_buffer: [0],
column: 1,
line: 1,
returned_eof: false,
}
} }
} }
impl Tokenizer { impl<'a> Tokenizer<'a> {
/// Consumes the tokenizer and returns the next token in the stream /// Consumes the tokenizer and returns the next token in the stream
/// If there are no more tokens in the stream, this function returns None /// If there are no more tokens in the stream, this function returns None
/// If there is an error reading the stream, this function returns an error /// If there is an error reading the stream, this function returns an error
@@ -447,7 +453,7 @@ impl Tokenizer {
} }
} }
impl Iterator for Tokenizer { impl<'a> Iterator for Tokenizer<'a> {
type Item = Result<Token, Error>; type Item = Result<Token, Error>;
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
@@ -455,14 +461,14 @@ impl Iterator for Tokenizer {
} }
} }
pub struct TokenizerBuffer { pub struct TokenizerBuffer<'a> {
tokenizer: Tokenizer, tokenizer: Tokenizer<'a>,
buffer: VecDeque<Token>, buffer: VecDeque<Token>,
history: VecDeque<Token>, history: VecDeque<Token>,
} }
impl TokenizerBuffer { impl<'a> TokenizerBuffer<'a> {
pub fn new(tokenizer: Tokenizer) -> Self { pub fn new(tokenizer: Tokenizer<'a>) -> Self {
Self { Self {
tokenizer, tokenizer,
buffer: VecDeque::new(), buffer: VecDeque::new(),

View File

@@ -0,0 +1,3 @@
fn main() -> std::io::Result<()> {
::slang::generate_headers()
}

View File

@@ -15,10 +15,10 @@ pub struct FfiToken {
} }
#[ffi_export] #[ffi_export]
pub fn compile_from_string(input: safer_ffi::String) -> safer_ffi::String { pub fn compile_from_string(input: safer_ffi::char_p::char_p_ref<'_>) -> safer_ffi::String {
let mut writer = BufWriter::new(Vec::new()); let mut writer = BufWriter::new(Vec::new());
let tokenizer = Tokenizer::from(String::from(input)); let tokenizer = Tokenizer::from(input.to_str());
let parser = Parser::new(tokenizer); let parser = Parser::new(tokenizer);
let compiler = Compiler::new(parser, &mut writer, None); let compiler = Compiler::new(parser, &mut writer, None);
@@ -35,8 +35,8 @@ pub fn compile_from_string(input: safer_ffi::String) -> safer_ffi::String {
} }
#[ffi_export] #[ffi_export]
pub fn tokenize_line(input: safer_ffi::String) -> safer_ffi::Vec<FfiToken> { pub fn tokenize_line(input: safer_ffi::char_p::char_p_ref<'_>) -> safer_ffi::Vec<FfiToken> {
let tokenizer = Tokenizer::from(String::from(input)); let tokenizer = Tokenizer::from(input.to_str());
let mut tokens = Vec::<FfiToken>::new(); let mut tokens = Vec::<FfiToken>::new();
@@ -73,6 +73,6 @@ pub fn tokenize_line(input: safer_ffi::String) -> safer_ffi::Vec<FfiToken> {
pub fn generate_headers() -> std::io::Result<()> { pub fn generate_headers() -> std::io::Result<()> {
::safer_ffi::headers::builder() ::safer_ffi::headers::builder()
.with_language(safer_ffi::headers::Language::CSharp) .with_language(safer_ffi::headers::Language::CSharp)
.to_file("SlangGlue.cs")? .to_file("../csharp_mod/SlangGlue.cs")?
.generate() .generate()
} }