@@ -1,6 +1,7 @@
|
||||
namespace Slang;
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text;
|
||||
using StationeersIC10Editor;
|
||||
|
||||
@@ -53,7 +54,7 @@ public static unsafe class SlangExtensions
|
||||
|
||||
var color = GetColorForKind(token.token_kind);
|
||||
|
||||
int colIndex = token.column;
|
||||
int colIndex = token.column - 1;
|
||||
if (colIndex < 0)
|
||||
colIndex = 0;
|
||||
|
||||
@@ -80,20 +81,50 @@ public static unsafe class SlangExtensions
|
||||
return list;
|
||||
}
|
||||
|
||||
public static unsafe List<Diagnostic> ToList(this Vec_FfiDiagnostic_t vec)
|
||||
{
|
||||
var toReturn = new List<Diagnostic>((int)vec.len);
|
||||
|
||||
var currentPtr = vec.ptr;
|
||||
|
||||
for (int i = 0; i < (int)vec.len; i++)
|
||||
{
|
||||
var item = currentPtr[i];
|
||||
|
||||
toReturn.Add(
|
||||
new Slang.Diagnostic
|
||||
{
|
||||
Message = item.message.AsString(),
|
||||
Severity = item.severity,
|
||||
Range = new Slang.Range
|
||||
{
|
||||
EndCol = Math.Max(item.range.end_col - 2, 0),
|
||||
EndLine = item.range.end_line - 1,
|
||||
StartCol = Math.Max(item.range.start_col - 2, 0),
|
||||
StartLine = item.range.end_line - 1,
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
Ffi.free_ffi_diagnostic_vec(vec);
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
private static uint GetColorForKind(uint kind)
|
||||
{
|
||||
switch (kind)
|
||||
{
|
||||
case 1:
|
||||
return SlangFormatter.ColorInstruction; // Keyword
|
||||
case 2:
|
||||
return SlangFormatter.ColorDefault; // Identifier
|
||||
case 3:
|
||||
return SlangFormatter.ColorNumber; // Number
|
||||
case 4:
|
||||
return SlangFormatter.ColorString; // String
|
||||
case 5:
|
||||
case 2:
|
||||
return SlangFormatter.ColorString; // Number
|
||||
case 3:
|
||||
return SlangFormatter.ColorInstruction; // Boolean
|
||||
case 4:
|
||||
return SlangFormatter.ColorSelection; // Keyword
|
||||
case 5:
|
||||
return SlangFormatter.ColorLineNumber; // Identifier
|
||||
case 6:
|
||||
return SlangFormatter.ColorDefault; // Symbol
|
||||
default:
|
||||
|
||||
@@ -83,6 +83,50 @@ public unsafe partial class Ffi {
|
||||
slice_ref_uint16_t input);
|
||||
}
|
||||
|
||||
[StructLayout(LayoutKind.Sequential, Size = 16)]
|
||||
public unsafe struct FfiRange_t {
|
||||
public UInt32 start_col;
|
||||
|
||||
public UInt32 end_col;
|
||||
|
||||
public UInt32 start_line;
|
||||
|
||||
public UInt32 end_line;
|
||||
}
|
||||
|
||||
[StructLayout(LayoutKind.Sequential, Size = 48)]
|
||||
public unsafe struct FfiDiagnostic_t {
|
||||
public Vec_uint8_t message;
|
||||
|
||||
public Int32 severity;
|
||||
|
||||
public FfiRange_t range;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Same as [<c>Vec<T></c>][<c>rust::Vec</c>], but with guaranteed <c>#[repr(C)]</c> layout
|
||||
/// </summary>
|
||||
[StructLayout(LayoutKind.Sequential, Size = 24)]
|
||||
public unsafe struct Vec_FfiDiagnostic_t {
|
||||
public FfiDiagnostic_t * ptr;
|
||||
|
||||
public UIntPtr len;
|
||||
|
||||
public UIntPtr cap;
|
||||
}
|
||||
|
||||
public unsafe partial class Ffi {
|
||||
[DllImport(RustLib, ExactSpelling = true)] public static unsafe extern
|
||||
Vec_FfiDiagnostic_t diagnose_source (
|
||||
slice_ref_uint16_t input);
|
||||
}
|
||||
|
||||
public unsafe partial class Ffi {
|
||||
[DllImport(RustLib, ExactSpelling = true)] public static unsafe extern
|
||||
void free_ffi_diagnostic_vec (
|
||||
Vec_FfiDiagnostic_t v);
|
||||
}
|
||||
|
||||
[StructLayout(LayoutKind.Sequential, Size = 64)]
|
||||
public unsafe struct FfiToken_t {
|
||||
public Vec_uint8_t tooltip;
|
||||
@@ -121,12 +165,6 @@ public unsafe partial class Ffi {
|
||||
}
|
||||
|
||||
public unsafe partial class Ffi {
|
||||
/// <summary>
|
||||
/// C# handles strings as UTF16. We do NOT want to allocate that memory in C# because
|
||||
/// we want to avoid GC. So we pass it to Rust to handle all the memory allocations.
|
||||
/// This should result in the ability to tokenize many times without triggering frame drops
|
||||
/// from the GC from a <c>GetBytes()</c> call on a string in C#.
|
||||
/// </summary>
|
||||
[DllImport(RustLib, ExactSpelling = true)] public static unsafe extern
|
||||
Vec_FfiToken_t tokenize_line (
|
||||
slice_ref_uint16_t input);
|
||||
|
||||
@@ -1,20 +1,140 @@
|
||||
namespace Slang;
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using System.Timers;
|
||||
using StationeersIC10Editor;
|
||||
|
||||
public class SlangFormatter : ICodeFormatter
|
||||
{
|
||||
private CancellationTokenSource? _lspCancellationToken;
|
||||
private readonly SynchronizationContext? _mainThreadContext;
|
||||
private volatile bool IsDiagnosing = false;
|
||||
|
||||
public static readonly uint ColorInstruction = ColorFromHTML("#ffff00");
|
||||
public static readonly uint ColorString = ColorFromHTML("#ce9178");
|
||||
|
||||
public override Line ParseLine(string line)
|
||||
private HashSet<uint> _linesWithErrors = new();
|
||||
|
||||
public SlangFormatter()
|
||||
{
|
||||
return Marshal.TokenizeLine(line);
|
||||
// 1. Capture the Main Thread context.
|
||||
// This works because the Editor instantiates this class on the main thread.
|
||||
_mainThreadContext = SynchronizationContext.Current;
|
||||
}
|
||||
|
||||
public override string Compile()
|
||||
{
|
||||
L.Info("ICodeFormatter attempted to compile source code.");
|
||||
return this.Lines.RawText;
|
||||
}
|
||||
|
||||
public override Line ParseLine(string line)
|
||||
{
|
||||
HandleCodeChanged();
|
||||
return Marshal.TokenizeLine(line);
|
||||
}
|
||||
|
||||
private void HandleCodeChanged()
|
||||
{
|
||||
if (IsDiagnosing)
|
||||
return;
|
||||
|
||||
_lspCancellationToken?.Cancel();
|
||||
_lspCancellationToken?.Dispose();
|
||||
|
||||
_lspCancellationToken = new CancellationTokenSource();
|
||||
|
||||
_ = Task.Run(() => HandleLsp(_lspCancellationToken.Token), _lspCancellationToken.Token);
|
||||
}
|
||||
|
||||
private void OnTimerElapsed(object sender, ElapsedEventArgs e) { }
|
||||
|
||||
private async Task HandleLsp(CancellationToken cancellationToken)
|
||||
{
|
||||
try
|
||||
{
|
||||
await Task.Delay(200, cancellationToken);
|
||||
|
||||
if (cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// 3. Dispatch the UI update to the Main Thread
|
||||
if (_mainThreadContext != null)
|
||||
{
|
||||
// Post ensures ApplyDiagnostics runs on the captured thread (Main Thread)
|
||||
_mainThreadContext.Post(_ => ApplyDiagnostics(), null);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Fallback: If context is null (rare in Unity), try running directly
|
||||
// but warn, as this might crash if not thread-safe.
|
||||
L.Warning("SynchronizationContext was null. Attempting direct update (risky).");
|
||||
ApplyDiagnostics();
|
||||
}
|
||||
}
|
||||
finally { }
|
||||
}
|
||||
|
||||
// This runs on the Main Thread
|
||||
private void ApplyDiagnostics()
|
||||
{
|
||||
List<Diagnostic> diagnosis = Marshal.DiagnoseSource(this.RawText);
|
||||
|
||||
var dict = diagnosis.GroupBy(d => d.Range.StartLine).ToDictionary(g => g.Key);
|
||||
|
||||
var linesToRefresh = new HashSet<uint>(dict.Keys);
|
||||
linesToRefresh.UnionWith(_linesWithErrors);
|
||||
|
||||
IsDiagnosing = true;
|
||||
|
||||
foreach (var lineIndex in linesToRefresh)
|
||||
{
|
||||
// safety check for out of bounds (in case lines were deleted)
|
||||
if (lineIndex >= this.Lines.Count)
|
||||
continue;
|
||||
|
||||
var line = this.Lines[(int)lineIndex];
|
||||
|
||||
if (line is null)
|
||||
continue;
|
||||
|
||||
line.ClearTokens();
|
||||
|
||||
Dictionary<int, SemanticToken> lineDict = Marshal
|
||||
.TokenizeLine(line.Text)
|
||||
.Tokens.ToDictionary((t) => t.Column);
|
||||
|
||||
if (dict.ContainsKey(lineIndex))
|
||||
{
|
||||
foreach (var lineDiagnostic in dict[lineIndex])
|
||||
{
|
||||
lineDict[(int)lineDiagnostic.Range.StartCol] = new SemanticToken
|
||||
{
|
||||
Column = Math.Abs((int)lineDiagnostic.Range.StartCol),
|
||||
Length = Math.Abs(
|
||||
(int)(lineDiagnostic.Range.EndCol - lineDiagnostic.Range.StartCol)
|
||||
),
|
||||
Line = (int)lineIndex,
|
||||
IsError = true,
|
||||
Data = lineDiagnostic.Message,
|
||||
Color = SlangFormatter.ColorError,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var token in lineDict.Values)
|
||||
{
|
||||
line.AddToken(token);
|
||||
}
|
||||
}
|
||||
|
||||
_linesWithErrors = new HashSet<uint>(dict.Keys);
|
||||
|
||||
IsDiagnosing = false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,27 @@
|
||||
namespace Slang;
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Reflection;
|
||||
using System.Runtime.InteropServices;
|
||||
using StationeersIC10Editor;
|
||||
|
||||
public struct Range
|
||||
{
|
||||
public uint StartCol;
|
||||
public uint EndCol;
|
||||
public uint StartLine;
|
||||
public uint EndLine;
|
||||
}
|
||||
|
||||
public struct Diagnostic
|
||||
{
|
||||
public string Message;
|
||||
public int Severity;
|
||||
public Range Range;
|
||||
}
|
||||
|
||||
public static class Marshal
|
||||
{
|
||||
private static IntPtr _libraryHandle = IntPtr.Zero;
|
||||
@@ -61,38 +77,9 @@ public static class Marshal
|
||||
}
|
||||
}
|
||||
|
||||
public static unsafe Line TokenizeLine(string source)
|
||||
{
|
||||
if (String.IsNullOrEmpty(source))
|
||||
{
|
||||
return new Line(source);
|
||||
}
|
||||
|
||||
if (!EnsureLibLoaded())
|
||||
{
|
||||
return new Line(source);
|
||||
}
|
||||
|
||||
fixed (char* ptrString = source)
|
||||
{
|
||||
var input = new slice_ref_uint16_t
|
||||
{
|
||||
ptr = (ushort*)ptrString,
|
||||
len = (UIntPtr)source.Length,
|
||||
};
|
||||
return Ffi.tokenize_line(input).ToLine(source);
|
||||
}
|
||||
}
|
||||
|
||||
public static unsafe bool CompileFromString(string inputString, out string compiledString)
|
||||
{
|
||||
if (String.IsNullOrEmpty(inputString))
|
||||
{
|
||||
compiledString = String.Empty;
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!EnsureLibLoaded())
|
||||
if (String.IsNullOrEmpty(inputString) || !EnsureLibLoaded())
|
||||
{
|
||||
compiledString = String.Empty;
|
||||
return false;
|
||||
@@ -124,6 +111,46 @@ public static class Marshal
|
||||
}
|
||||
}
|
||||
|
||||
public static unsafe List<Diagnostic> DiagnoseSource(string inputString)
|
||||
{
|
||||
if (string.IsNullOrEmpty(inputString) || !EnsureLibLoaded())
|
||||
{
|
||||
return new();
|
||||
}
|
||||
|
||||
fixed (char* ptrInput = inputString)
|
||||
{
|
||||
var input = new slice_ref_uint16_t
|
||||
{
|
||||
ptr = (ushort*)ptrInput,
|
||||
len = (UIntPtr)inputString.Length,
|
||||
};
|
||||
|
||||
return Ffi.diagnose_source(input).ToList();
|
||||
}
|
||||
}
|
||||
|
||||
public static unsafe Line TokenizeLine(string inputString)
|
||||
{
|
||||
if (string.IsNullOrEmpty(inputString) || !EnsureLibLoaded())
|
||||
{
|
||||
return new Line(inputString);
|
||||
}
|
||||
|
||||
fixed (char* ptrInputStr = inputString)
|
||||
{
|
||||
var strRef = new slice_ref_uint16_t
|
||||
{
|
||||
len = (UIntPtr)inputString.Length,
|
||||
ptr = (ushort*)ptrInputStr,
|
||||
};
|
||||
|
||||
var tokens = Ffi.tokenize_line(strRef);
|
||||
|
||||
return tokens.ToLine(inputString);
|
||||
}
|
||||
}
|
||||
|
||||
private static string ExtractNativeLibrary(string libName)
|
||||
{
|
||||
string destinationPath = Path.Combine(Path.GetTempPath(), libName);
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
namespace Slang;
|
||||
|
||||
using System;
|
||||
using Assets.Scripts;
|
||||
using Assets.Scripts.Objects;
|
||||
using Assets.Scripts.Objects.Electrical;
|
||||
using Assets.Scripts.Objects.Motherboards;
|
||||
using Assets.Scripts.UI;
|
||||
using HarmonyLib;
|
||||
|
||||
[HarmonyPatch]
|
||||
|
||||
@@ -1,7 +1,3 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
using BepInEx;
|
||||
using HarmonyLib;
|
||||
@@ -65,28 +61,6 @@ namespace Slang
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Encodes the original slang source code as base64 and uses gzip to compress it, returning the resulting string.
|
||||
/// </summary>
|
||||
public static string EncodeSource(string source)
|
||||
{
|
||||
if (string.IsNullOrEmpty(source))
|
||||
{
|
||||
return "";
|
||||
}
|
||||
|
||||
byte[] bytes = Encoding.UTF8.GetBytes(source);
|
||||
|
||||
using (var memoryStream = new MemoryStream())
|
||||
{
|
||||
using (var gzipStream = new GZipStream(memoryStream, CompressionMode.Compress))
|
||||
{
|
||||
gzipStream.Write(bytes, 0, bytes.Length);
|
||||
}
|
||||
return Convert.ToBase64String(memoryStream.ToArray());
|
||||
}
|
||||
}
|
||||
|
||||
public static bool IsSlangSource(ref string input)
|
||||
{
|
||||
return SlangSourceCheck.IsMatch(input);
|
||||
|
||||
52
rust_compiler/Cargo.lock
generated
52
rust_compiler/Cargo.lock
generated
@@ -114,6 +114,12 @@ dependencies = [
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "1.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||
|
||||
[[package]]
|
||||
name = "bitvec"
|
||||
version = "1.0.1"
|
||||
@@ -247,6 +253,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"indoc",
|
||||
"lsp-types",
|
||||
"parser",
|
||||
"pretty_assertions",
|
||||
"quick-error",
|
||||
@@ -300,6 +307,15 @@ version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "320bea982e85d42441eb25c49b41218e7eaa2657e8f90bc4eca7437376751e23"
|
||||
|
||||
[[package]]
|
||||
name = "fluent-uri"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "17c704e9dbe1ddd863da1e6ff3567795087b1eb201ce80d8fa81162e1516500d"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "funty"
|
||||
version = "2.0.0"
|
||||
@@ -400,6 +416,19 @@ version = "0.2.177"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976"
|
||||
|
||||
[[package]]
|
||||
name = "lsp-types"
|
||||
version = "0.97.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "53353550a17c04ac46c585feb189c2db82154fc84b79c7a66c96c2c644f66071"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"fluent-uri",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_repr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "macro_rules_attribute"
|
||||
version = "0.1.3"
|
||||
@@ -466,6 +495,7 @@ name = "parser"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"lsp-types",
|
||||
"quick-error",
|
||||
"tokenizer",
|
||||
]
|
||||
@@ -732,6 +762,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
|
||||
dependencies = [
|
||||
"serde_core",
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -767,6 +798,17 @@ dependencies = [
|
||||
"serde_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_repr"
|
||||
version = "0.1.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.111",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sha2-const-stable"
|
||||
version = "0.1.0"
|
||||
@@ -786,6 +828,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"compiler",
|
||||
"lsp-types",
|
||||
"parser",
|
||||
"quick-error",
|
||||
"rust_decimal",
|
||||
@@ -882,6 +925,7 @@ name = "tokenizer"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"lsp-types",
|
||||
"quick-error",
|
||||
"rust_decimal",
|
||||
]
|
||||
@@ -1071,18 +1115,18 @@ checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049"
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy"
|
||||
version = "0.8.30"
|
||||
version = "0.8.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4ea879c944afe8a2b25fef16bb4ba234f47c694565e97383b36f3a878219065c"
|
||||
checksum = "fd74ec98b9250adb3ca554bdde269adf631549f51d8a8f8f0a10b50f1cb298c3"
|
||||
dependencies = [
|
||||
"zerocopy-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy-derive"
|
||||
version = "0.8.30"
|
||||
version = "0.8.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf955aa904d6040f70dc8e9384444cb1030aed272ba3cb09bbc4ab9e7c1f34f5"
|
||||
checksum = "d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
||||
@@ -10,6 +10,7 @@ members = ["libs/*"]
|
||||
quick-error = "2"
|
||||
rust_decimal = "1"
|
||||
safer-ffi = { version = "0.1" }
|
||||
lsp-types = { version = "0.97" }
|
||||
|
||||
[features]
|
||||
headers = ["safer-ffi/headers"]
|
||||
@@ -33,6 +34,7 @@ crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
clap = { version = "^4.5", features = ["derive"] }
|
||||
lsp-types = { workspace = true }
|
||||
quick-error = { workspace = true }
|
||||
rust_decimal = { workspace = true }
|
||||
tokenizer = { path = "libs/tokenizer" }
|
||||
|
||||
@@ -7,6 +7,7 @@ edition = "2024"
|
||||
quick-error = { workspace = true }
|
||||
parser = { path = "../parser" }
|
||||
tokenizer = { path = "../tokenizer" }
|
||||
lsp-types = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = { version = "1.0" }
|
||||
|
||||
@@ -82,8 +82,8 @@ fn incorrect_args_count() -> anyhow::Result<()> {
|
||||
};
|
||||
|
||||
assert!(matches!(
|
||||
compiled,
|
||||
Err(super::super::Error::AgrumentMismatch(_))
|
||||
compiled[0],
|
||||
super::super::Error::AgrumentMismatch(_, _)
|
||||
));
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -15,7 +15,7 @@ macro_rules! compile {
|
||||
&mut writer,
|
||||
None,
|
||||
);
|
||||
compiler.compile()?;
|
||||
compiler.compile();
|
||||
output!(writer)
|
||||
}};
|
||||
|
||||
@@ -36,7 +36,7 @@ macro_rules! compile {
|
||||
&mut writer,
|
||||
Some(crate::CompilerConfig { debug: true }),
|
||||
);
|
||||
compiler.compile()?;
|
||||
compiler.compile();
|
||||
output!(writer)
|
||||
}};
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -6,6 +6,7 @@ edition = "2024"
|
||||
[dependencies]
|
||||
quick-error = { workspace = true }
|
||||
tokenizer = { path = "../tokenizer" }
|
||||
lsp-types = { workspace = true }
|
||||
|
||||
|
||||
[dev-dependencies]
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,4 @@
|
||||
use crate::tree_node::{Expression, Literal};
|
||||
use crate::tree_node::{Expression, Literal, Spanned};
|
||||
|
||||
use super::LiteralOrVariable;
|
||||
|
||||
@@ -102,7 +102,7 @@ pub enum System {
|
||||
/// Represents a function that can be called to sleep for a certain amount of time.
|
||||
/// ## In Game
|
||||
/// `sleep a(r?|num)`
|
||||
Sleep(Box<Expression>),
|
||||
Sleep(Box<Spanned<Expression>>),
|
||||
/// Gets the in-game hash for a specific prefab name.
|
||||
/// ## In Game
|
||||
/// `HASH("prefabName")`
|
||||
@@ -120,7 +120,12 @@ pub enum System {
|
||||
/// lbn r? deviceHash nameHash logicType batchMode
|
||||
/// ## Examples
|
||||
/// lbn r0 HASH("StructureWallLight") HASH("wallLight") On Minimum
|
||||
LoadBatchNamed(LiteralOrVariable, Box<Expression>, Literal, Literal),
|
||||
LoadBatchNamed(
|
||||
LiteralOrVariable,
|
||||
Box<Spanned<Expression>>,
|
||||
Literal,
|
||||
Literal,
|
||||
),
|
||||
/// Loads a LogicType from all connected network devices, aggregating them via a
|
||||
/// batchMode
|
||||
/// ## In Game
|
||||
@@ -133,14 +138,14 @@ pub enum System {
|
||||
/// `s d? logicType r?`
|
||||
/// ## Example
|
||||
/// `s d0 Setting r0`
|
||||
SetOnDevice(LiteralOrVariable, Literal, Box<Expression>),
|
||||
SetOnDevice(LiteralOrVariable, Literal, Box<Spanned<Expression>>),
|
||||
/// Represents a function which stores a setting to all devices that match
|
||||
/// the given deviceHash
|
||||
/// ## In Game
|
||||
/// `sb deviceHash logicType r?`
|
||||
/// ## Example
|
||||
/// `sb HASH("Doors") Lock 1`
|
||||
SetOnDeviceBatched(LiteralOrVariable, Literal, Box<Expression>),
|
||||
SetOnDeviceBatched(LiteralOrVariable, Literal, Box<Spanned<Expression>>),
|
||||
/// Represents a function which stores a setting to all devices that match
|
||||
/// both the given deviceHash AND the given nameHash
|
||||
/// ## In Game
|
||||
@@ -151,7 +156,7 @@ pub enum System {
|
||||
LiteralOrVariable,
|
||||
LiteralOrVariable,
|
||||
Literal,
|
||||
Box<Expression>,
|
||||
Box<Spanned<Expression>>,
|
||||
),
|
||||
}
|
||||
|
||||
@@ -224,3 +229,4 @@ impl SysCall {
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -92,7 +92,7 @@ fn test_priority_expression() -> Result<()> {
|
||||
|
||||
let expression = parser.parse()?.unwrap();
|
||||
|
||||
assert_eq!("(let x = (4))", expression.to_string());
|
||||
assert_eq!("(let x = 4)", expression.to_string());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -109,7 +109,7 @@ fn test_binary_expression() -> Result<()> {
|
||||
assert_eq!("(((45 * 2) - (15 / 5)) + (5 ** 2))", expr.to_string());
|
||||
|
||||
let expr = parser!("(5 - 2) * 10").parse()?.unwrap();
|
||||
assert_eq!("(((5 - 2)) * 10)", expr.to_string());
|
||||
assert_eq!("((5 - 2) * 10)", expr.to_string());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
use std::ops::Deref;
|
||||
|
||||
use super::sys_call::SysCall;
|
||||
use tokenizer::token::Number;
|
||||
|
||||
@@ -20,12 +22,12 @@ impl std::fmt::Display for Literal {
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum BinaryExpression {
|
||||
Add(Box<Expression>, Box<Expression>),
|
||||
Multiply(Box<Expression>, Box<Expression>),
|
||||
Divide(Box<Expression>, Box<Expression>),
|
||||
Subtract(Box<Expression>, Box<Expression>),
|
||||
Exponent(Box<Expression>, Box<Expression>),
|
||||
Modulo(Box<Expression>, Box<Expression>),
|
||||
Add(Box<Spanned<Expression>>, Box<Spanned<Expression>>),
|
||||
Multiply(Box<Spanned<Expression>>, Box<Spanned<Expression>>),
|
||||
Divide(Box<Spanned<Expression>>, Box<Spanned<Expression>>),
|
||||
Subtract(Box<Spanned<Expression>>, Box<Spanned<Expression>>),
|
||||
Exponent(Box<Spanned<Expression>>, Box<Spanned<Expression>>),
|
||||
Modulo(Box<Spanned<Expression>>, Box<Spanned<Expression>>),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for BinaryExpression {
|
||||
@@ -43,15 +45,15 @@ impl std::fmt::Display for BinaryExpression {
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum LogicalExpression {
|
||||
And(Box<Expression>, Box<Expression>),
|
||||
Or(Box<Expression>, Box<Expression>),
|
||||
Not(Box<Expression>),
|
||||
Equal(Box<Expression>, Box<Expression>),
|
||||
NotEqual(Box<Expression>, Box<Expression>),
|
||||
GreaterThan(Box<Expression>, Box<Expression>),
|
||||
GreaterThanOrEqual(Box<Expression>, Box<Expression>),
|
||||
LessThan(Box<Expression>, Box<Expression>),
|
||||
LessThanOrEqual(Box<Expression>, Box<Expression>),
|
||||
And(Box<Spanned<Expression>>, Box<Spanned<Expression>>),
|
||||
Or(Box<Spanned<Expression>>, Box<Spanned<Expression>>),
|
||||
Not(Box<Spanned<Expression>>),
|
||||
Equal(Box<Spanned<Expression>>, Box<Spanned<Expression>>),
|
||||
NotEqual(Box<Spanned<Expression>>, Box<Spanned<Expression>>),
|
||||
GreaterThan(Box<Spanned<Expression>>, Box<Spanned<Expression>>),
|
||||
GreaterThanOrEqual(Box<Spanned<Expression>>, Box<Spanned<Expression>>),
|
||||
LessThan(Box<Spanned<Expression>>, Box<Spanned<Expression>>),
|
||||
LessThanOrEqual(Box<Spanned<Expression>>, Box<Spanned<Expression>>),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for LogicalExpression {
|
||||
@@ -72,8 +74,8 @@ impl std::fmt::Display for LogicalExpression {
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct AssignmentExpression {
|
||||
pub identifier: String,
|
||||
pub expression: Box<Expression>,
|
||||
pub identifier: Spanned<String>,
|
||||
pub expression: Box<Spanned<Expression>>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for AssignmentExpression {
|
||||
@@ -84,8 +86,8 @@ impl std::fmt::Display for AssignmentExpression {
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct FunctionExpression {
|
||||
pub name: String,
|
||||
pub arguments: Vec<String>,
|
||||
pub name: Spanned<String>,
|
||||
pub arguments: Vec<Spanned<String>>,
|
||||
pub body: BlockExpression,
|
||||
}
|
||||
|
||||
@@ -95,14 +97,18 @@ impl std::fmt::Display for FunctionExpression {
|
||||
f,
|
||||
"(fn {}({}) {{ {} }})",
|
||||
self.name,
|
||||
self.arguments.to_vec().join(", "),
|
||||
self.arguments
|
||||
.iter()
|
||||
.map(|e| e.to_string())
|
||||
.collect::<Vec<String>>()
|
||||
.join(", "),
|
||||
self.body
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct BlockExpression(pub Vec<Expression>);
|
||||
pub struct BlockExpression(pub Vec<Spanned<Expression>>);
|
||||
|
||||
impl std::fmt::Display for BlockExpression {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
@@ -120,8 +126,8 @@ impl std::fmt::Display for BlockExpression {
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct InvocationExpression {
|
||||
pub name: String,
|
||||
pub arguments: Vec<Expression>,
|
||||
pub name: Spanned<String>,
|
||||
pub arguments: Vec<Spanned<Expression>>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for InvocationExpression {
|
||||
@@ -142,7 +148,7 @@ impl std::fmt::Display for InvocationExpression {
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum LiteralOrVariable {
|
||||
Literal(Literal),
|
||||
Variable(String),
|
||||
Variable(Spanned<String>),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for LiteralOrVariable {
|
||||
@@ -157,7 +163,7 @@ impl std::fmt::Display for LiteralOrVariable {
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct DeviceDeclarationExpression {
|
||||
/// any variable-like name
|
||||
pub name: String,
|
||||
pub name: Spanned<String>,
|
||||
/// The device port, ex. (db, d0, d1, d2, d3, d4, d5)
|
||||
pub device: String,
|
||||
}
|
||||
@@ -170,9 +176,9 @@ impl std::fmt::Display for DeviceDeclarationExpression {
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct IfExpression {
|
||||
pub condition: Box<Expression>,
|
||||
pub body: BlockExpression,
|
||||
pub else_branch: Option<Box<Expression>>,
|
||||
pub condition: Box<Spanned<Expression>>,
|
||||
pub body: Spanned<BlockExpression>,
|
||||
pub else_branch: Option<Box<Spanned<Expression>>>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for IfExpression {
|
||||
@@ -187,7 +193,7 @@ impl std::fmt::Display for IfExpression {
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct LoopExpression {
|
||||
pub body: BlockExpression,
|
||||
pub body: Spanned<BlockExpression>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for LoopExpression {
|
||||
@@ -198,7 +204,7 @@ impl std::fmt::Display for LoopExpression {
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct WhileExpression {
|
||||
pub condition: Box<Expression>,
|
||||
pub condition: Box<Spanned<Expression>>,
|
||||
pub body: BlockExpression,
|
||||
}
|
||||
|
||||
@@ -208,27 +214,88 @@ impl std::fmt::Display for WhileExpression {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct Span {
|
||||
pub start_line: usize,
|
||||
pub end_line: usize,
|
||||
pub start_col: usize,
|
||||
pub end_col: usize,
|
||||
}
|
||||
|
||||
impl From<Span> for lsp_types::Range {
|
||||
fn from(value: Span) -> Self {
|
||||
Self {
|
||||
start: lsp_types::Position {
|
||||
line: value.start_line as u32,
|
||||
character: value.start_col as u32,
|
||||
},
|
||||
end: lsp_types::Position {
|
||||
line: value.end_line as u32,
|
||||
character: value.end_col as u32,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Span> for lsp_types::Range {
|
||||
fn from(value: &Span) -> Self {
|
||||
Self {
|
||||
start: lsp_types::Position {
|
||||
line: value.start_line as u32,
|
||||
character: value.start_col as u32,
|
||||
},
|
||||
end: lsp_types::Position {
|
||||
line: value.end_line as u32,
|
||||
character: value.end_col as u32,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Spanned<T> {
|
||||
pub span: Span,
|
||||
pub node: T,
|
||||
}
|
||||
|
||||
impl<T> std::fmt::Display for Spanned<T>
|
||||
where
|
||||
T: std::fmt::Display,
|
||||
{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Deref for Spanned<T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.node
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum Expression {
|
||||
Assignment(AssignmentExpression),
|
||||
Binary(BinaryExpression),
|
||||
Block(BlockExpression),
|
||||
Break,
|
||||
Continue,
|
||||
Declaration(String, Box<Expression>),
|
||||
DeviceDeclaration(DeviceDeclarationExpression),
|
||||
Function(FunctionExpression),
|
||||
If(IfExpression),
|
||||
Invocation(InvocationExpression),
|
||||
Literal(Literal),
|
||||
Logical(LogicalExpression),
|
||||
Loop(LoopExpression),
|
||||
Negation(Box<Expression>),
|
||||
Priority(Box<Expression>),
|
||||
Return(Box<Expression>),
|
||||
Syscall(SysCall),
|
||||
Variable(String),
|
||||
While(WhileExpression),
|
||||
Assignment(Spanned<AssignmentExpression>),
|
||||
Binary(Spanned<BinaryExpression>),
|
||||
Block(Spanned<BlockExpression>),
|
||||
Break(Span),
|
||||
Continue(Span),
|
||||
Declaration(Spanned<String>, Box<Spanned<Expression>>),
|
||||
DeviceDeclaration(Spanned<DeviceDeclarationExpression>),
|
||||
Function(Spanned<FunctionExpression>),
|
||||
If(Spanned<IfExpression>),
|
||||
Invocation(Spanned<InvocationExpression>),
|
||||
Literal(Spanned<Literal>),
|
||||
Logical(Spanned<LogicalExpression>),
|
||||
Loop(Spanned<LoopExpression>),
|
||||
Negation(Box<Spanned<Expression>>),
|
||||
Priority(Box<Spanned<Expression>>),
|
||||
Return(Box<Spanned<Expression>>),
|
||||
Syscall(Spanned<SysCall>),
|
||||
Variable(Spanned<String>),
|
||||
While(Spanned<WhileExpression>),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Expression {
|
||||
@@ -237,8 +304,8 @@ impl std::fmt::Display for Expression {
|
||||
Expression::Assignment(e) => write!(f, "{}", e),
|
||||
Expression::Binary(e) => write!(f, "{}", e),
|
||||
Expression::Block(e) => write!(f, "{}", e),
|
||||
Expression::Break => write!(f, "break"),
|
||||
Expression::Continue => write!(f, "continue"),
|
||||
Expression::Break(_) => write!(f, "break"),
|
||||
Expression::Continue(_) => write!(f, "continue"),
|
||||
Expression::Declaration(id, e) => write!(f, "(let {} = {})", id, e),
|
||||
Expression::DeviceDeclaration(e) => write!(f, "{}", e),
|
||||
Expression::Function(e) => write!(f, "{}", e),
|
||||
|
||||
@@ -6,6 +6,7 @@ edition = "2024"
|
||||
[dependencies]
|
||||
rust_decimal = { workspace = true }
|
||||
quick-error = { workspace = true }
|
||||
lsp-types = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = { version = "^1" }
|
||||
|
||||
@@ -19,18 +19,51 @@ quick_error! {
|
||||
source(err)
|
||||
}
|
||||
NumberParseError(err: std::num::ParseIntError, line: usize, column: usize, original: String) {
|
||||
display("Number Parse Error: {}\nLine: {}, Column: {}", err, line, column)
|
||||
display("Number Parse Error: {}", err)
|
||||
source(err)
|
||||
}
|
||||
DecimalParseError(err: rust_decimal::Error, line: usize, column: usize, original: String) {
|
||||
display("Decimal Parse Error: {}\nLine: {}, Column: {}", err, line, column)
|
||||
display("Decimal Parse Error: {}", err)
|
||||
source(err)
|
||||
}
|
||||
UnknownSymbolError(char: char, line: usize, column: usize, original: String) {
|
||||
display("Unknown Symbol: {}\nLine: {}, Column: {}", char, line, column)
|
||||
display("Unknown Symbol: {}", char)
|
||||
}
|
||||
UnknownKeywordOrIdentifierError(val: String, line: usize, column: usize, original: String) {
|
||||
display("Unknown Keyword or Identifier: {}\nLine: {}, Column: {}", val, line, column)
|
||||
display("Unknown Keyword or Identifier: {}", val)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Error> for lsp_types::Diagnostic {
|
||||
fn from(value: Error) -> Self {
|
||||
use Error::*;
|
||||
use lsp_types::*;
|
||||
|
||||
match value {
|
||||
IOError(e) => Diagnostic {
|
||||
message: e.to_string(),
|
||||
severity: Some(DiagnosticSeverity::ERROR),
|
||||
..Default::default()
|
||||
},
|
||||
NumberParseError(_, l, c, ref og)
|
||||
| DecimalParseError(_, l, c, ref og)
|
||||
| UnknownSymbolError(_, l, c, ref og)
|
||||
| UnknownKeywordOrIdentifierError(_, l, c, ref og) => Diagnostic {
|
||||
range: Range {
|
||||
start: Position {
|
||||
line: l as u32,
|
||||
character: c as u32,
|
||||
},
|
||||
end: Position {
|
||||
line: l as u32,
|
||||
character: (c + og.len()) as u32,
|
||||
},
|
||||
},
|
||||
message: value.to_string(),
|
||||
severity: Some(DiagnosticSeverity::ERROR),
|
||||
..Default::default()
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -87,6 +87,21 @@ pub enum TokenType {
|
||||
EOF,
|
||||
}
|
||||
|
||||
impl From<TokenType> for u32 {
|
||||
fn from(value: TokenType) -> Self {
|
||||
use TokenType::*;
|
||||
match value {
|
||||
String(_) => 1,
|
||||
Number(_) => 2,
|
||||
Boolean(_) => 3,
|
||||
Keyword(_) => 4,
|
||||
Identifier(_) => 5,
|
||||
Symbol(_) => 6,
|
||||
EOF => 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TokenType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
|
||||
185
rust_compiler/src/ffi/mod.rs
Normal file
185
rust_compiler/src/ffi/mod.rs
Normal file
@@ -0,0 +1,185 @@
|
||||
use compiler::Compiler;
|
||||
use parser::Parser;
|
||||
use safer_ffi::prelude::*;
|
||||
use std::io::BufWriter;
|
||||
use tokenizer::{
|
||||
token::{Token, TokenType},
|
||||
Tokenizer,
|
||||
};
|
||||
|
||||
#[derive_ReprC]
|
||||
#[repr(C)]
|
||||
pub struct FfiToken {
|
||||
pub tooltip: safer_ffi::String,
|
||||
pub error: safer_ffi::String,
|
||||
pub column: i32,
|
||||
pub length: i32,
|
||||
pub token_kind: u32,
|
||||
}
|
||||
|
||||
#[derive_ReprC]
|
||||
#[repr(C)]
|
||||
pub struct FfiRange {
|
||||
start_col: u32,
|
||||
end_col: u32,
|
||||
start_line: u32,
|
||||
end_line: u32,
|
||||
}
|
||||
|
||||
impl From<lsp_types::Range> for FfiRange {
|
||||
fn from(value: lsp_types::Range) -> Self {
|
||||
Self {
|
||||
start_col: value.start.character,
|
||||
end_col: value.end.character,
|
||||
start_line: value.start.line,
|
||||
end_line: value.end.line,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive_ReprC]
|
||||
#[repr(C)]
|
||||
pub struct FfiDiagnostic {
|
||||
message: safer_ffi::String,
|
||||
severity: i32,
|
||||
range: FfiRange,
|
||||
}
|
||||
|
||||
impl From<lsp_types::Diagnostic> for FfiDiagnostic {
|
||||
fn from(value: lsp_types::Diagnostic) -> Self {
|
||||
use lsp_types::*;
|
||||
Self {
|
||||
message: value.message.into(),
|
||||
severity: match value.severity.unwrap_or(DiagnosticSeverity::ERROR) {
|
||||
DiagnosticSeverity::WARNING => 2,
|
||||
DiagnosticSeverity::INFORMATION => 3,
|
||||
DiagnosticSeverity::HINT => 4,
|
||||
_ => 1,
|
||||
},
|
||||
range: value.range.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn free_ffi_token_vec(v: safer_ffi::Vec<FfiToken>) {
|
||||
drop(v)
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn free_ffi_diagnostic_vec(v: safer_ffi::Vec<FfiDiagnostic>) {
|
||||
drop(v)
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn free_string(s: safer_ffi::String) {
|
||||
drop(s)
|
||||
}
|
||||
|
||||
/// C# handles strings as UTF16. We do NOT want to allocate that memory in C# because
|
||||
/// we want to avoid GC. So we pass it to Rust to handle all the memory allocations.
|
||||
/// This should result in the ability to compile many times without triggering frame drops
|
||||
/// from the GC from a `GetBytes()` call on a string in C#.
|
||||
#[ffi_export]
|
||||
pub fn compile_from_string(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::String {
|
||||
let res = std::panic::catch_unwind(|| {
|
||||
let mut writer = BufWriter::new(Vec::new());
|
||||
|
||||
let tokenizer = Tokenizer::from(String::from_utf16_lossy(input.as_slice()));
|
||||
let parser = Parser::new(tokenizer);
|
||||
let compiler = Compiler::new(parser, &mut writer, None);
|
||||
|
||||
if !compiler.compile().is_empty() {
|
||||
return safer_ffi::String::EMPTY;
|
||||
}
|
||||
|
||||
let Ok(compiled_vec) = writer.into_inner() else {
|
||||
return safer_ffi::String::EMPTY;
|
||||
};
|
||||
|
||||
// Safety: I know the compiler only outputs valid utf8
|
||||
safer_ffi::String::from(unsafe { String::from_utf8_unchecked(compiled_vec) })
|
||||
});
|
||||
|
||||
res.unwrap_or("".into())
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn tokenize_line(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec<FfiToken> {
|
||||
let res = std::panic::catch_unwind(|| {
|
||||
let tokenizer = Tokenizer::from(String::from_utf16_lossy(input.as_slice()));
|
||||
|
||||
let mut tokens = Vec::new();
|
||||
|
||||
for token in tokenizer {
|
||||
if matches!(
|
||||
token,
|
||||
Ok(Token {
|
||||
token_type: TokenType::EOF,
|
||||
..
|
||||
})
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
match token {
|
||||
Err(ref e) => {
|
||||
use tokenizer::Error::*;
|
||||
let (err_str, col, og) = match e {
|
||||
NumberParseError(_, _, col, og)
|
||||
| DecimalParseError(_, _, col, og)
|
||||
| UnknownSymbolError(_, _, col, og)
|
||||
| UnknownKeywordOrIdentifierError(_, _, col, og) => {
|
||||
(e.to_string(), col, og)
|
||||
}
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
tokens.push(FfiToken {
|
||||
column: *col as i32,
|
||||
error: err_str.into(),
|
||||
tooltip: "".into(),
|
||||
length: og.len() as i32,
|
||||
token_kind: 0,
|
||||
})
|
||||
}
|
||||
Ok(Token {
|
||||
column,
|
||||
original_string,
|
||||
token_type,
|
||||
..
|
||||
}) => tokens.push(FfiToken {
|
||||
column: column as i32,
|
||||
error: "".into(),
|
||||
length: (original_string.unwrap_or_default().len()) as i32,
|
||||
token_kind: token_type.into(),
|
||||
tooltip: "".into(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
tokens.into()
|
||||
});
|
||||
|
||||
res.unwrap_or(vec![].into())
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn diagnose_source(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec<FfiDiagnostic> {
|
||||
let res = std::panic::catch_unwind(|| {
|
||||
let mut writer = BufWriter::new(Vec::new());
|
||||
let tokenizer = Tokenizer::from(String::from_utf16_lossy(input.as_slice()));
|
||||
let compiler = Compiler::new(Parser::new(tokenizer), &mut writer, None);
|
||||
|
||||
let diagnosis = compiler.compile();
|
||||
|
||||
let mut result_vec: Vec<FfiDiagnostic> = Vec::with_capacity(diagnosis.len());
|
||||
|
||||
for err in diagnosis {
|
||||
result_vec.push(lsp_types::Diagnostic::from(err).into());
|
||||
}
|
||||
|
||||
result_vec.into()
|
||||
});
|
||||
|
||||
res.unwrap_or(vec![].into())
|
||||
}
|
||||
@@ -1,107 +1,5 @@
|
||||
use compiler::Compiler;
|
||||
use parser::Parser;
|
||||
use safer_ffi::prelude::*;
|
||||
use std::io::BufWriter;
|
||||
use tokenizer::{token::TokenType, Error as TokenizerError, Tokenizer};
|
||||
|
||||
#[derive_ReprC]
|
||||
#[repr(C)]
|
||||
pub struct FfiToken {
|
||||
pub tooltip: safer_ffi::String,
|
||||
pub error: safer_ffi::String,
|
||||
pub column: i32,
|
||||
pub length: i32,
|
||||
pub token_kind: u32,
|
||||
}
|
||||
|
||||
fn map_token_kind(t: &TokenType) -> u32 {
|
||||
use TokenType::*;
|
||||
match t {
|
||||
Keyword(_) => 1,
|
||||
Identifier(_) => 2,
|
||||
Number(_) => 3,
|
||||
String(_) => 4,
|
||||
Boolean(_) => 5,
|
||||
Symbol(_) => 6,
|
||||
_ => 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// C# handles strings as UTF16. We do NOT want to allocate that memory in C# because
|
||||
/// we want to avoid GC. So we pass it to Rust to handle all the memory allocations.
|
||||
/// This should result in the ability to compile many times without triggering frame drops
|
||||
/// from the GC from a `GetBytes()` call on a string in C#.
|
||||
#[ffi_export]
|
||||
pub fn compile_from_string(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::String {
|
||||
let mut writer = BufWriter::new(Vec::new());
|
||||
|
||||
let tokenizer = Tokenizer::from(String::from_utf16_lossy(input.as_slice()));
|
||||
let parser = Parser::new(tokenizer);
|
||||
let compiler = Compiler::new(parser, &mut writer, None);
|
||||
|
||||
if compiler.compile().is_err() {
|
||||
return safer_ffi::String::EMPTY;
|
||||
}
|
||||
|
||||
let Ok(compiled_vec) = writer.into_inner() else {
|
||||
return safer_ffi::String::EMPTY;
|
||||
};
|
||||
|
||||
// Safety: I know the compiler only outputs valid utf8
|
||||
safer_ffi::String::from(unsafe { String::from_utf8_unchecked(compiled_vec) })
|
||||
}
|
||||
/// C# handles strings as UTF16. We do NOT want to allocate that memory in C# because
|
||||
/// we want to avoid GC. So we pass it to Rust to handle all the memory allocations.
|
||||
/// This should result in the ability to tokenize many times without triggering frame drops
|
||||
/// from the GC from a `GetBytes()` call on a string in C#.
|
||||
#[ffi_export]
|
||||
pub fn tokenize_line(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec<FfiToken> {
|
||||
let tokenizer = Tokenizer::from(String::from_utf16_lossy(input.as_slice()));
|
||||
|
||||
let mut tokens = Vec::<FfiToken>::new();
|
||||
|
||||
for token in tokenizer {
|
||||
match token {
|
||||
Err(TokenizerError::NumberParseError(_, _, col, ref str))
|
||||
| Err(TokenizerError::UnknownSymbolError(_, _, col, ref str))
|
||||
| Err(TokenizerError::DecimalParseError(_, _, col, ref str))
|
||||
| Err(TokenizerError::UnknownKeywordOrIdentifierError(_, _, col, ref str)) => {
|
||||
tokens.push(FfiToken {
|
||||
column: col as i32 - 1,
|
||||
tooltip: "".into(),
|
||||
length: str.len() as i32,
|
||||
token_kind: 0,
|
||||
// Safety: it's okay to unwrap the err here because we are matching on the `Err` variant
|
||||
error: token.unwrap_err().to_string().into(),
|
||||
});
|
||||
}
|
||||
Err(_) => return safer_ffi::Vec::EMPTY,
|
||||
Ok(token) if !matches!(token.token_type, TokenType::EOF) => tokens.push(FfiToken {
|
||||
tooltip: "".into(),
|
||||
error: "".into(),
|
||||
length: token
|
||||
.original_string
|
||||
.map(|s| s.len() as i32)
|
||||
.unwrap_or_default(),
|
||||
token_kind: map_token_kind(&token.token_type),
|
||||
column: token.column as i32 - 1,
|
||||
}),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
tokens.into()
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn free_ffi_token_vec(v: safer_ffi::Vec<FfiToken>) {
|
||||
drop(v)
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn free_string(s: safer_ffi::String) {
|
||||
drop(s)
|
||||
}
|
||||
mod ffi;
|
||||
pub(crate) mod lsp;
|
||||
|
||||
#[cfg(feature = "headers")]
|
||||
pub fn generate_headers() -> std::io::Result<()> {
|
||||
|
||||
0
rust_compiler/src/lsp/mod.rs
Normal file
0
rust_compiler/src/lsp/mod.rs
Normal file
@@ -1,3 +1,5 @@
|
||||
#![allow(clippy::result_large_err)]
|
||||
|
||||
#[macro_use]
|
||||
extern crate quick_error;
|
||||
|
||||
@@ -6,7 +8,7 @@ use compiler::Compiler;
|
||||
use parser::Parser as ASTParser;
|
||||
use std::{
|
||||
fs::File,
|
||||
io::{BufWriter, Read, Write},
|
||||
io::{stderr, BufWriter, Read, Write},
|
||||
path::PathBuf,
|
||||
};
|
||||
use tokenizer::{self, Tokenizer};
|
||||
@@ -73,7 +75,22 @@ fn run_logic() -> Result<(), StationlangError> {
|
||||
|
||||
let compiler = Compiler::new(parser, &mut writer, None);
|
||||
|
||||
compiler.compile()?;
|
||||
let mut errors = compiler.compile();
|
||||
|
||||
if !errors.is_empty() {
|
||||
let mut std_error = stderr();
|
||||
let last = errors.pop();
|
||||
let errors = errors.into_iter().map(StationlangError::from);
|
||||
|
||||
std_error.write_all(b"Compilation error:\n")?;
|
||||
|
||||
for err in errors {
|
||||
std_error.write_all(format!("{}\n", err).as_bytes())?;
|
||||
}
|
||||
|
||||
return Err(StationlangError::from(last.unwrap()));
|
||||
}
|
||||
|
||||
writer.flush()?;
|
||||
|
||||
Ok(())
|
||||
|
||||
Reference in New Issue
Block a user