automated C# glue FFI glue code
This commit is contained in:
80
csharp_mod/SlangGlue.cs
Normal file
80
csharp_mod/SlangGlue.cs
Normal file
@@ -0,0 +1,80 @@
|
||||
/*! \file */
|
||||
/*******************************************
|
||||
* *
|
||||
* File auto-generated by `::safer_ffi`. *
|
||||
* *
|
||||
* Do not manually edit this file. *
|
||||
* *
|
||||
*******************************************/
|
||||
|
||||
#pragma warning disable IDE0044, IDE0049, IDE0055, IDE1006,
|
||||
#pragma warning disable SA1004, SA1008, SA1023, SA1028,
|
||||
#pragma warning disable SA1121, SA1134,
|
||||
#pragma warning disable SA1201,
|
||||
#pragma warning disable SA1300, SA1306, SA1307, SA1310, SA1313,
|
||||
#pragma warning disable SA1500, SA1505, SA1507,
|
||||
#pragma warning disable SA1600, SA1601, SA1604, SA1605, SA1611, SA1615, SA1649,
|
||||
|
||||
namespace Slang {
|
||||
using System;
|
||||
using System.Runtime.InteropServices;
|
||||
|
||||
public unsafe partial class Ffi {
|
||||
#if IOS
|
||||
private const string RustLib = "slang.framework/slang";
|
||||
#else
|
||||
private const string RustLib = "slang";
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Same as [<c>Vec<T></c>][<c>rust::Vec</c>], but with guaranteed <c>#[repr(C)]</c> layout
|
||||
/// </summary>
|
||||
[StructLayout(LayoutKind.Sequential, Size = 24)]
|
||||
public unsafe struct Vec_uint8_t {
|
||||
public byte * ptr;
|
||||
|
||||
public UIntPtr len;
|
||||
|
||||
public UIntPtr cap;
|
||||
}
|
||||
|
||||
public unsafe partial class Ffi {
|
||||
[DllImport(RustLib, ExactSpelling = true)] public static unsafe extern
|
||||
Vec_uint8_t compile_from_string (
|
||||
byte /*const*/ * input);
|
||||
}
|
||||
|
||||
[StructLayout(LayoutKind.Sequential, Size = 104)]
|
||||
public unsafe struct FfiToken_t {
|
||||
public Vec_uint8_t text;
|
||||
|
||||
public Vec_uint8_t tooltip;
|
||||
|
||||
public Vec_uint8_t error;
|
||||
|
||||
public Vec_uint8_t status;
|
||||
|
||||
public Int32 column;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Same as [<c>Vec<T></c>][<c>rust::Vec</c>], but with guaranteed <c>#[repr(C)]</c> layout
|
||||
/// </summary>
|
||||
[StructLayout(LayoutKind.Sequential, Size = 24)]
|
||||
public unsafe struct Vec_FfiToken_t {
|
||||
public FfiToken_t * ptr;
|
||||
|
||||
public UIntPtr len;
|
||||
|
||||
public UIntPtr cap;
|
||||
}
|
||||
|
||||
public unsafe partial class Ffi {
|
||||
[DllImport(RustLib, ExactSpelling = true)] public static unsafe extern
|
||||
Vec_FfiToken_t tokenize_line (
|
||||
byte /*const*/ * input);
|
||||
}
|
||||
|
||||
|
||||
} /* Slang */
|
||||
28
rust_compiler/Cargo.lock
generated
28
rust_compiler/Cargo.lock
generated
@@ -779,6 +779,20 @@ version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e"
|
||||
|
||||
[[package]]
|
||||
name = "slang"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"compiler",
|
||||
"parser",
|
||||
"quick-error",
|
||||
"rust_decimal",
|
||||
"safer-ffi",
|
||||
"tokenizer",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "stabby"
|
||||
version = "36.2.2"
|
||||
@@ -814,20 +828,6 @@ dependencies = [
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "stationlang"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"compiler",
|
||||
"parser",
|
||||
"quick-error",
|
||||
"rust_decimal",
|
||||
"safer-ffi",
|
||||
"tokenizer",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "strsim"
|
||||
version = "0.11.1"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
[package]
|
||||
name = "stationlang"
|
||||
name = "slang"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
@@ -27,9 +27,9 @@ path = "src/bin/generate_headers.rs"
|
||||
required-features = ["headers"]
|
||||
|
||||
[lib]
|
||||
name = "stationlang"
|
||||
name = "slang"
|
||||
path = "src/lib.rs"
|
||||
crate-type = ["cdylib"]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
clap = { version = "^4.5", features = ["derive"] }
|
||||
@@ -40,9 +40,5 @@ parser = { path = "libs/parser" }
|
||||
compiler = { path = "libs/compiler" }
|
||||
safer-ffi = { workspace = true }
|
||||
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = { version = "^1.0", features = ["backtrace"] }
|
||||
|
||||
[build-dependencies]
|
||||
safer-ffi = { version = "0.1", features = ["headers"] }
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
fn main() -> ::std::io::Result<()> {
|
||||
safer_ffi::headers::builder()
|
||||
.with_language(safer_ffi::headers::Language::CSharp)
|
||||
.to_file("../csharp_mod/SlangStubs.cs")?
|
||||
.generate()
|
||||
.unwrap();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -68,7 +68,7 @@ struct CompilationResult {
|
||||
}
|
||||
|
||||
pub struct Compiler<'a, W: std::io::Write> {
|
||||
parser: ASTParser,
|
||||
parser: ASTParser<'a>,
|
||||
function_locations: HashMap<String, usize>,
|
||||
function_metadata: HashMap<String, Vec<String>>,
|
||||
devices: HashMap<String, String>,
|
||||
@@ -83,7 +83,7 @@ pub struct Compiler<'a, W: std::io::Write> {
|
||||
|
||||
impl<'a, W: std::io::Write> Compiler<'a, W> {
|
||||
pub fn new(
|
||||
parser: ASTParser,
|
||||
parser: ASTParser<'a>,
|
||||
writer: &'a mut BufWriter<W>,
|
||||
config: Option<CompilerConfig>,
|
||||
) -> Self {
|
||||
|
||||
@@ -113,13 +113,13 @@ macro_rules! token_matches {
|
||||
};
|
||||
}
|
||||
|
||||
pub struct Parser {
|
||||
tokenizer: TokenizerBuffer,
|
||||
pub struct Parser<'a> {
|
||||
tokenizer: TokenizerBuffer<'a>,
|
||||
current_token: Option<Token>,
|
||||
}
|
||||
|
||||
impl Parser {
|
||||
pub fn new(tokenizer: Tokenizer) -> Self {
|
||||
impl<'a> Parser<'a> {
|
||||
pub fn new(tokenizer: Tokenizer<'a>) -> Self {
|
||||
Parser {
|
||||
tokenizer: TokenizerBuffer::new(tokenizer),
|
||||
current_token: None,
|
||||
|
||||
@@ -39,15 +39,15 @@ pub trait Tokenize: Read + Seek {}
|
||||
|
||||
impl<T> Tokenize for T where T: Read + Seek {}
|
||||
|
||||
pub struct Tokenizer {
|
||||
reader: BufReader<Box<dyn Tokenize>>,
|
||||
pub struct Tokenizer<'a> {
|
||||
reader: BufReader<Box<dyn Tokenize + 'a>>,
|
||||
char_buffer: [u8; 1],
|
||||
line: usize,
|
||||
column: usize,
|
||||
returned_eof: bool,
|
||||
}
|
||||
|
||||
impl Tokenizer {
|
||||
impl<'a> Tokenizer<'a> {
|
||||
pub fn from_path(input_file: impl Into<PathBuf>) -> Result<Self, Error> {
|
||||
let file = std::fs::File::open(input_file.into())?;
|
||||
let reader = BufReader::new(Box::new(file) as Box<dyn Tokenize>);
|
||||
@@ -62,7 +62,7 @@ impl Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for Tokenizer {
|
||||
impl<'a> From<String> for Tokenizer<'a> {
|
||||
fn from(input: String) -> Self {
|
||||
let reader = BufReader::new(Box::new(Cursor::new(input)) as Box<dyn Tokenize>);
|
||||
|
||||
@@ -76,13 +76,19 @@ impl From<String> for Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for Tokenizer {
|
||||
fn from(value: &str) -> Self {
|
||||
Self::from(value.to_string())
|
||||
impl<'a> From<&'a str> for Tokenizer<'a> {
|
||||
fn from(value: &'a str) -> Self {
|
||||
Self {
|
||||
reader: BufReader::new(Box::new(Cursor::new(value)) as Box<dyn Tokenize>),
|
||||
char_buffer: [0],
|
||||
column: 1,
|
||||
line: 1,
|
||||
returned_eof: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Tokenizer {
|
||||
impl<'a> Tokenizer<'a> {
|
||||
/// Consumes the tokenizer and returns the next token in the stream
|
||||
/// If there are no more tokens in the stream, this function returns None
|
||||
/// If there is an error reading the stream, this function returns an error
|
||||
@@ -447,7 +453,7 @@ impl Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for Tokenizer {
|
||||
impl<'a> Iterator for Tokenizer<'a> {
|
||||
type Item = Result<Token, Error>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
@@ -455,14 +461,14 @@ impl Iterator for Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TokenizerBuffer {
|
||||
tokenizer: Tokenizer,
|
||||
pub struct TokenizerBuffer<'a> {
|
||||
tokenizer: Tokenizer<'a>,
|
||||
buffer: VecDeque<Token>,
|
||||
history: VecDeque<Token>,
|
||||
}
|
||||
|
||||
impl TokenizerBuffer {
|
||||
pub fn new(tokenizer: Tokenizer) -> Self {
|
||||
impl<'a> TokenizerBuffer<'a> {
|
||||
pub fn new(tokenizer: Tokenizer<'a>) -> Self {
|
||||
Self {
|
||||
tokenizer,
|
||||
buffer: VecDeque::new(),
|
||||
|
||||
3
rust_compiler/src/bin/generate_headers.rs
Normal file
3
rust_compiler/src/bin/generate_headers.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
fn main() -> std::io::Result<()> {
|
||||
::slang::generate_headers()
|
||||
}
|
||||
@@ -15,10 +15,10 @@ pub struct FfiToken {
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn compile_from_string(input: safer_ffi::String) -> safer_ffi::String {
|
||||
pub fn compile_from_string(input: safer_ffi::char_p::char_p_ref<'_>) -> safer_ffi::String {
|
||||
let mut writer = BufWriter::new(Vec::new());
|
||||
|
||||
let tokenizer = Tokenizer::from(String::from(input));
|
||||
let tokenizer = Tokenizer::from(input.to_str());
|
||||
let parser = Parser::new(tokenizer);
|
||||
let compiler = Compiler::new(parser, &mut writer, None);
|
||||
|
||||
@@ -35,8 +35,8 @@ pub fn compile_from_string(input: safer_ffi::String) -> safer_ffi::String {
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn tokenize_line(input: safer_ffi::String) -> safer_ffi::Vec<FfiToken> {
|
||||
let tokenizer = Tokenizer::from(String::from(input));
|
||||
pub fn tokenize_line(input: safer_ffi::char_p::char_p_ref<'_>) -> safer_ffi::Vec<FfiToken> {
|
||||
let tokenizer = Tokenizer::from(input.to_str());
|
||||
|
||||
let mut tokens = Vec::<FfiToken>::new();
|
||||
|
||||
@@ -73,6 +73,6 @@ pub fn tokenize_line(input: safer_ffi::String) -> safer_ffi::Vec<FfiToken> {
|
||||
pub fn generate_headers() -> std::io::Result<()> {
|
||||
::safer_ffi::headers::builder()
|
||||
.with_language(safer_ffi::headers::Language::CSharp)
|
||||
.to_file("SlangGlue.cs")?
|
||||
.to_file("../csharp_mod/SlangGlue.cs")?
|
||||
.generate()
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user