Compare commits
3 Commits
2070c2e4ca
...
b8521917b8
| Author | SHA1 | Date | |
|---|---|---|---|
|
b8521917b8
|
|||
|
4ff0ff1b66
|
|||
|
6dc4342ac3
|
@@ -207,4 +207,34 @@ public static unsafe class SlangExtensions
|
|||||||
Ffi.free_docs_vec(vec);
|
Ffi.free_docs_vec(vec);
|
||||||
return toReturn;
|
return toReturn;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static unsafe List<Symbol> ToList(this Vec_FfiSymbolInfo_t vec)
|
||||||
|
{
|
||||||
|
var toReturn = new List<Symbol>((int)vec.len);
|
||||||
|
|
||||||
|
var currentPtr = vec.ptr;
|
||||||
|
|
||||||
|
for (int i = 0; i < (int)vec.len; i++)
|
||||||
|
{
|
||||||
|
var item = currentPtr[i];
|
||||||
|
|
||||||
|
toReturn.Add(
|
||||||
|
new Slang.Symbol
|
||||||
|
{
|
||||||
|
Name = item.name.AsString(),
|
||||||
|
Kind = (SymbolKind)item.kind_data.kind,
|
||||||
|
Span = new Slang.Range
|
||||||
|
{
|
||||||
|
StartLine = item.span.start_line,
|
||||||
|
StartCol = item.span.start_col,
|
||||||
|
EndLine = item.span.end_line,
|
||||||
|
EndCol = item.span.end_col,
|
||||||
|
},
|
||||||
|
Description = item.description.AsString(),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return toReturn;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -147,6 +147,51 @@ public unsafe partial class Ffi {
|
|||||||
slice_ref_uint16_t input);
|
slice_ref_uint16_t input);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[StructLayout(LayoutKind.Sequential, Size = 12)]
|
||||||
|
public unsafe struct FfiSymbolKindData_t {
|
||||||
|
public UInt32 kind;
|
||||||
|
|
||||||
|
public UInt32 arg_count;
|
||||||
|
|
||||||
|
public UInt32 syscall_type;
|
||||||
|
}
|
||||||
|
|
||||||
|
[StructLayout(LayoutKind.Sequential, Size = 80)]
|
||||||
|
public unsafe struct FfiSymbolInfo_t {
|
||||||
|
public Vec_uint8_t name;
|
||||||
|
|
||||||
|
public FfiSymbolKindData_t kind_data;
|
||||||
|
|
||||||
|
public FfiRange_t span;
|
||||||
|
|
||||||
|
public Vec_uint8_t description;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Same as [<c>Vec<T></c>][<c>rust::Vec</c>], but with guaranteed <c>#[repr(C)]</c> layout
|
||||||
|
/// </summary>
|
||||||
|
[StructLayout(LayoutKind.Sequential, Size = 24)]
|
||||||
|
public unsafe struct Vec_FfiSymbolInfo_t {
|
||||||
|
public FfiSymbolInfo_t * ptr;
|
||||||
|
|
||||||
|
public UIntPtr len;
|
||||||
|
|
||||||
|
public UIntPtr cap;
|
||||||
|
}
|
||||||
|
|
||||||
|
[StructLayout(LayoutKind.Sequential, Size = 48)]
|
||||||
|
public unsafe struct FfiDiagnosticsAndSymbols_t {
|
||||||
|
public Vec_FfiDiagnostic_t diagnostics;
|
||||||
|
|
||||||
|
public Vec_FfiSymbolInfo_t symbols;
|
||||||
|
}
|
||||||
|
|
||||||
|
public unsafe partial class Ffi {
|
||||||
|
[DllImport(RustLib, ExactSpelling = true)] public static unsafe extern
|
||||||
|
FfiDiagnosticsAndSymbols_t diagnose_source_with_symbols (
|
||||||
|
slice_ref_uint16_t input);
|
||||||
|
}
|
||||||
|
|
||||||
[StructLayout(LayoutKind.Sequential, Size = 48)]
|
[StructLayout(LayoutKind.Sequential, Size = 48)]
|
||||||
public unsafe struct FfiDocumentedItem_t {
|
public unsafe struct FfiDocumentedItem_t {
|
||||||
public Vec_uint8_t item_name;
|
public Vec_uint8_t item_name;
|
||||||
@@ -184,6 +229,12 @@ public unsafe partial class Ffi {
|
|||||||
Vec_FfiDiagnostic_t v);
|
Vec_FfiDiagnostic_t v);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public unsafe partial class Ffi {
|
||||||
|
[DllImport(RustLib, ExactSpelling = true)] public static unsafe extern
|
||||||
|
void free_ffi_diagnostics_and_symbols (
|
||||||
|
FfiDiagnosticsAndSymbols_t v);
|
||||||
|
}
|
||||||
|
|
||||||
[StructLayout(LayoutKind.Sequential, Size = 64)]
|
[StructLayout(LayoutKind.Sequential, Size = 64)]
|
||||||
public unsafe struct FfiToken_t {
|
public unsafe struct FfiToken_t {
|
||||||
public Vec_uint8_t tooltip;
|
public Vec_uint8_t tooltip;
|
||||||
|
|||||||
@@ -171,18 +171,17 @@ public class SlangFormatter : ICodeFormatter
|
|||||||
return;
|
return;
|
||||||
|
|
||||||
// Running this potentially CPU intensive work on a background thread.
|
// Running this potentially CPU intensive work on a background thread.
|
||||||
var dict = await Task.Run(
|
var (diagnostics, symbols) = await Task.Run(
|
||||||
() =>
|
() =>
|
||||||
{
|
{
|
||||||
return Marshal
|
return Marshal.DiagnoseSourceWithSymbols(inputSrc);
|
||||||
.DiagnoseSource(inputSrc)
|
|
||||||
.GroupBy(d => d.Range.StartLine)
|
|
||||||
.ToDictionary(g => g.Key);
|
|
||||||
},
|
},
|
||||||
cancellationToken
|
cancellationToken
|
||||||
);
|
);
|
||||||
|
|
||||||
ApplyDiagnostics(dict);
|
var dict = diagnostics.GroupBy(d => d.Range.StartLine).ToDictionary(g => g.Key);
|
||||||
|
|
||||||
|
ApplyDiagnosticsAndSymbols(dict, symbols);
|
||||||
|
|
||||||
// If we have valid code, update the IC10 output
|
// If we have valid code, update the IC10 output
|
||||||
if (dict.Count > 0)
|
if (dict.Count > 0)
|
||||||
@@ -266,11 +265,11 @@ public class SlangFormatter : ICodeFormatter
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Takes diagnostics from the Rust FFI compiler and applies it as semantic tokens to the
|
/// Takes diagnostics and symbols from the Rust FFI compiler and applies them as semantic tokens to the
|
||||||
/// source in this editor.
|
/// source in this editor.
|
||||||
/// This runs on the Main Thread
|
/// This runs on the Main Thread
|
||||||
/// </summary>
|
/// </summary>
|
||||||
private void ApplyDiagnostics(Dictionary<uint, IGrouping<uint, Diagnostic>> dict)
|
private void ApplyDiagnosticsAndSymbols(Dictionary<uint, IGrouping<uint, Diagnostic>> dict, List<Symbol> symbols)
|
||||||
{
|
{
|
||||||
HashSet<uint> linesToRefresh;
|
HashSet<uint> linesToRefresh;
|
||||||
|
|
||||||
@@ -289,6 +288,12 @@ public class SlangFormatter : ICodeFormatter
|
|||||||
{
|
{
|
||||||
linesToRefresh = new HashSet<uint>(dict.Keys);
|
linesToRefresh = new HashSet<uint>(dict.Keys);
|
||||||
linesToRefresh.UnionWith(_linesWithErrors);
|
linesToRefresh.UnionWith(_linesWithErrors);
|
||||||
|
|
||||||
|
// Also add lines with symbols that may have been modified
|
||||||
|
foreach (var symbol in symbols)
|
||||||
|
{
|
||||||
|
linesToRefresh.Add(symbol.Span.StartLine);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_lastLineCount = this.Lines.Count;
|
_lastLineCount = this.Lines.Count;
|
||||||
@@ -328,9 +333,49 @@ public class SlangFormatter : ICodeFormatter
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 3. Add symbol tooltips for symbols on this line
|
||||||
|
foreach (var symbol in symbols)
|
||||||
|
{
|
||||||
|
if (symbol.Span.StartLine == lineIndex)
|
||||||
|
{
|
||||||
|
var column = (int)symbol.Span.StartCol;
|
||||||
|
var length = Math.Max(1, (int)(symbol.Span.EndCol - symbol.Span.StartCol));
|
||||||
|
|
||||||
|
// If there's already a token at this position (from syntax highlighting), use it
|
||||||
|
// Otherwise, create a new token for the symbol
|
||||||
|
if (allTokensDict.ContainsKey(column))
|
||||||
|
{
|
||||||
|
// Update existing token with symbol tooltip
|
||||||
|
var existingToken = allTokensDict[column];
|
||||||
|
allTokensDict[column] = new SemanticToken(
|
||||||
|
line: existingToken.Line,
|
||||||
|
column: existingToken.Column,
|
||||||
|
length: existingToken.Length,
|
||||||
|
type: existingToken.Type,
|
||||||
|
style: existingToken.Style,
|
||||||
|
data: symbol.Description, // Use symbol description as tooltip
|
||||||
|
isError: existingToken.IsError
|
||||||
|
);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Create new token for symbol
|
||||||
|
allTokensDict[column] = new SemanticToken(
|
||||||
|
line: (int)lineIndex,
|
||||||
|
column,
|
||||||
|
length,
|
||||||
|
type: 0,
|
||||||
|
style: ColorIdentifier,
|
||||||
|
data: symbol.Description,
|
||||||
|
isError: false
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
var allTokens = allTokensDict.Values.ToList();
|
var allTokens = allTokensDict.Values.ToList();
|
||||||
|
|
||||||
// 3. Update the line (this clears existing tokens and uses the list we just built)
|
// 4. Update the line (this clears existing tokens and uses the list we just built)
|
||||||
line.Update(allTokens);
|
line.Update(allTokens);
|
||||||
|
|
||||||
ReattachMetadata(line, allTokens);
|
ReattachMetadata(line, allTokens);
|
||||||
@@ -339,6 +384,16 @@ public class SlangFormatter : ICodeFormatter
|
|||||||
_linesWithErrors = new HashSet<uint>(dict.Keys);
|
_linesWithErrors = new HashSet<uint>(dict.Keys);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Takes diagnostics from the Rust FFI compiler and applies it as semantic tokens to the
|
||||||
|
/// source in this editor.
|
||||||
|
/// This runs on the Main Thread
|
||||||
|
/// </summary>
|
||||||
|
private void ApplyDiagnostics(Dictionary<uint, IGrouping<uint, Diagnostic>> dict)
|
||||||
|
{
|
||||||
|
ApplyDiagnosticsAndSymbols(dict, new List<Symbol>());
|
||||||
|
}
|
||||||
|
|
||||||
// Helper to map SemanticToken data (tooltips/errors) back to the tokens in the line
|
// Helper to map SemanticToken data (tooltips/errors) back to the tokens in the line
|
||||||
private void ReattachMetadata(StyledLine line, List<SemanticToken> semanticTokens)
|
private void ReattachMetadata(StyledLine line, List<SemanticToken> semanticTokens)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -47,6 +47,33 @@ public struct SourceMapEntry
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public struct Symbol
|
||||||
|
{
|
||||||
|
public string Name;
|
||||||
|
public Range Span;
|
||||||
|
public SymbolKind Kind;
|
||||||
|
public string Description;
|
||||||
|
|
||||||
|
public override string ToString()
|
||||||
|
{
|
||||||
|
return $"{Kind}: {Name} at {Span}";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public enum SymbolKind
|
||||||
|
{
|
||||||
|
Function = 0,
|
||||||
|
Syscall = 1,
|
||||||
|
Variable = 2,
|
||||||
|
}
|
||||||
|
|
||||||
|
public struct SymbolData
|
||||||
|
{
|
||||||
|
public uint Kind;
|
||||||
|
public uint ArgCount;
|
||||||
|
public uint SyscallType; // 0=System, 1=Math
|
||||||
|
}
|
||||||
|
|
||||||
public static class Marshal
|
public static class Marshal
|
||||||
{
|
{
|
||||||
private static IntPtr _libraryHandle = IntPtr.Zero;
|
private static IntPtr _libraryHandle = IntPtr.Zero;
|
||||||
@@ -164,6 +191,59 @@ public static class Marshal
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static unsafe (List<Diagnostic>, List<Symbol>) DiagnoseSourceWithSymbols(string inputString)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrEmpty(inputString) || !EnsureLibLoaded())
|
||||||
|
{
|
||||||
|
return (new(), new());
|
||||||
|
}
|
||||||
|
|
||||||
|
fixed (char* ptrInput = inputString)
|
||||||
|
{
|
||||||
|
var input = new slice_ref_uint16_t
|
||||||
|
{
|
||||||
|
ptr = (ushort*)ptrInput,
|
||||||
|
len = (UIntPtr)inputString.Length,
|
||||||
|
};
|
||||||
|
|
||||||
|
var result = Ffi.diagnose_source_with_symbols(input);
|
||||||
|
|
||||||
|
// Convert diagnostics
|
||||||
|
var diagnostics = result.diagnostics.ToList();
|
||||||
|
|
||||||
|
// Convert symbols
|
||||||
|
var symbols = new List<Symbol>();
|
||||||
|
var symbolPtr = result.symbols.ptr;
|
||||||
|
var symbolCount = (int)result.symbols.len;
|
||||||
|
|
||||||
|
for (int i = 0; i < symbolCount; i++)
|
||||||
|
{
|
||||||
|
var ffiSymbol = symbolPtr[i];
|
||||||
|
var kind = (SymbolKind)ffiSymbol.kind_data.kind;
|
||||||
|
|
||||||
|
// Use the actual description from the FFI (includes doc comments and syscall docs)
|
||||||
|
var description = ffiSymbol.description.AsString();
|
||||||
|
|
||||||
|
symbols.Add(new Symbol
|
||||||
|
{
|
||||||
|
Name = ffiSymbol.name.AsString(),
|
||||||
|
Kind = kind,
|
||||||
|
Span = new Range(
|
||||||
|
ffiSymbol.span.start_line,
|
||||||
|
ffiSymbol.span.start_col,
|
||||||
|
ffiSymbol.span.end_line,
|
||||||
|
ffiSymbol.span.end_col
|
||||||
|
),
|
||||||
|
Description = description,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ffi.free_ffi_diagnostics_and_symbols(result);
|
||||||
|
|
||||||
|
return (diagnostics, symbols);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public static unsafe List<SemanticToken> TokenizeLine(string inputString)
|
public static unsafe List<SemanticToken> TokenizeLine(string inputString)
|
||||||
{
|
{
|
||||||
if (string.IsNullOrEmpty(inputString) || !EnsureLibLoaded())
|
if (string.IsNullOrEmpty(inputString) || !EnsureLibLoaded())
|
||||||
|
|||||||
62
rust_compiler/Cargo.lock
generated
62
rust_compiler/Cargo.lock
generated
@@ -173,7 +173,7 @@ dependencies = [
|
|||||||
"proc-macro-crate",
|
"proc-macro-crate",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.111",
|
"syn 2.0.112",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -224,9 +224,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap"
|
name = "clap"
|
||||||
version = "4.5.53"
|
version = "4.5.54"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8"
|
checksum = "c6e6ff9dcd79cff5cd969a17a545d79e84ab086e444102a591e288a8aa3ce394"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clap_builder",
|
"clap_builder",
|
||||||
"clap_derive",
|
"clap_derive",
|
||||||
@@ -234,9 +234,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap_builder"
|
name = "clap_builder"
|
||||||
version = "4.5.53"
|
version = "4.5.54"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00"
|
checksum = "fa42cf4d2b7a41bc8f663a7cab4031ebafa1bf3875705bfaf8466dc60ab52c00"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anstream",
|
"anstream",
|
||||||
"anstyle",
|
"anstyle",
|
||||||
@@ -253,7 +253,7 @@ dependencies = [
|
|||||||
"heck",
|
"heck",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.111",
|
"syn 2.0.112",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -523,9 +523,9 @@ checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "itoa"
|
name = "itoa"
|
||||||
version = "1.0.15"
|
version = "1.0.17"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
|
checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "js-sys"
|
name = "js-sys"
|
||||||
@@ -571,7 +571,7 @@ dependencies = [
|
|||||||
"regex-automata",
|
"regex-automata",
|
||||||
"regex-syntax",
|
"regex-syntax",
|
||||||
"rustc_version",
|
"rustc_version",
|
||||||
"syn 2.0.111",
|
"syn 2.0.112",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -726,9 +726,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.103"
|
version = "1.0.104"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8"
|
checksum = "9695f8df41bb4f3d222c95a67532365f569318332d03d5f3f67f37b20e6ebdf0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
@@ -909,12 +909,6 @@ version = "1.0.22"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d"
|
checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "ryu"
|
|
||||||
version = "1.0.20"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "safer-ffi"
|
name = "safer-ffi"
|
||||||
version = "0.1.13"
|
version = "0.1.13"
|
||||||
@@ -992,20 +986,20 @@ checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.111",
|
"syn 2.0.112",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_json"
|
name = "serde_json"
|
||||||
version = "1.0.145"
|
version = "1.0.148"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c"
|
checksum = "3084b546a1dd6289475996f182a22aba973866ea8e8b02c51d9f46b1336a22da"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"itoa",
|
"itoa",
|
||||||
"memchr",
|
"memchr",
|
||||||
"ryu",
|
|
||||||
"serde",
|
"serde",
|
||||||
"serde_core",
|
"serde_core",
|
||||||
|
"zmij",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1016,7 +1010,7 @@ checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.111",
|
"syn 2.0.112",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1108,9 +1102,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "2.0.111"
|
version = "2.0.112"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87"
|
checksum = "21f182278bf2d2bcb3c88b1b08a37df029d71ce3d3ae26168e3c653b213b99d4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@@ -1153,7 +1147,7 @@ checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.111",
|
"syn 2.0.112",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1185,9 +1179,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "toml_datetime"
|
name = "toml_datetime"
|
||||||
version = "0.7.4+spec-1.0.0"
|
version = "0.7.5+spec-1.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "fe3cea6b2aa3b910092f6abd4053ea464fab5f9c170ba5e9a6aead16ec4af2b6"
|
checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde_core",
|
"serde_core",
|
||||||
]
|
]
|
||||||
@@ -1206,9 +1200,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "toml_parser"
|
name = "toml_parser"
|
||||||
version = "1.0.5+spec-1.0.0"
|
version = "1.0.6+spec-1.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4c03bee5ce3696f31250db0bbaff18bc43301ce0e8db2ed1f07cbb2acf89984c"
|
checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"winnow",
|
"winnow",
|
||||||
]
|
]
|
||||||
@@ -1303,7 +1297,7 @@ dependencies = [
|
|||||||
"bumpalo",
|
"bumpalo",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.111",
|
"syn 2.0.112",
|
||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -1471,5 +1465,11 @@ checksum = "d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.111",
|
"syn 2.0.112",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "zmij"
|
||||||
|
version = "1.0.8"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "317f17ff091ac4515f17cc7a190d2769a8c9a96d227de5d64b500b01cda8f2cd"
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
|
pub mod symbols;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test;
|
mod test;
|
||||||
mod v1;
|
mod v1;
|
||||||
mod variable_manager;
|
mod variable_manager;
|
||||||
|
|
||||||
|
pub use symbols::{CompilationMetadata, SymbolInfo, SymbolKind, SyscallType};
|
||||||
pub use v1::{CompilationResult, Compiler, CompilerConfig, Error};
|
pub use v1::{CompilationResult, Compiler, CompilerConfig, Error};
|
||||||
|
|||||||
343
rust_compiler/libs/compiler/src/symbols.rs
Normal file
343
rust_compiler/libs/compiler/src/symbols.rs
Normal file
@@ -0,0 +1,343 @@
|
|||||||
|
use helpers::Span;
|
||||||
|
use std::borrow::Cow;
|
||||||
|
|
||||||
|
/// Represents a symbol (function, syscall, variable, etc.) that can be referenced in code.
|
||||||
|
/// Designed to be LSP-compatible for easy integration with language servers.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct SymbolInfo<'a> {
|
||||||
|
/// The name of the symbol
|
||||||
|
pub name: Cow<'a, str>,
|
||||||
|
/// The kind of symbol and associated metadata
|
||||||
|
pub kind: SymbolKind<'a>,
|
||||||
|
/// The source location of this symbol (for IDE features)
|
||||||
|
pub span: Option<Span>,
|
||||||
|
/// Optional description for tooltips and documentation
|
||||||
|
pub description: Option<Cow<'a, str>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> SymbolInfo<'a> {
|
||||||
|
/// Converts to an LSP SymbolInformation for protocol compatibility.
|
||||||
|
pub fn to_lsp_symbol_information(&self, uri: lsp_types::Uri) -> lsp_types::SymbolInformation {
|
||||||
|
lsp_types::SymbolInformation {
|
||||||
|
name: self.name.to_string(),
|
||||||
|
kind: self.kind.to_lsp_symbol_kind(),
|
||||||
|
#[allow(deprecated)]
|
||||||
|
deprecated: None,
|
||||||
|
location: lsp_types::Location {
|
||||||
|
uri,
|
||||||
|
range: self.span.as_ref().map(|s| (*s).into()).unwrap_or_default(),
|
||||||
|
},
|
||||||
|
container_name: None,
|
||||||
|
tags: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts to an LSP CompletionItem for autocomplete.
|
||||||
|
pub fn to_lsp_completion_item(&self) -> lsp_types::CompletionItem {
|
||||||
|
lsp_types::CompletionItem {
|
||||||
|
label: self.name.to_string(),
|
||||||
|
kind: Some(self.kind.to_lsp_completion_kind()),
|
||||||
|
documentation: self
|
||||||
|
.description
|
||||||
|
.as_ref()
|
||||||
|
.map(|d| lsp_types::Documentation::String(d.to_string())),
|
||||||
|
detail: Some(self.kind.detail_string()),
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Discriminates between different kinds of symbols.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum SymbolKind<'a> {
|
||||||
|
/// A user-defined function
|
||||||
|
Function {
|
||||||
|
/// Names of parameters in order
|
||||||
|
parameters: Vec<Cow<'a, str>>,
|
||||||
|
/// Type hint for the return type (if applicable)
|
||||||
|
return_type: Option<Cow<'a, str>>,
|
||||||
|
},
|
||||||
|
/// A system or math syscall
|
||||||
|
Syscall {
|
||||||
|
/// Whether it's a System or Math syscall
|
||||||
|
syscall_type: SyscallType,
|
||||||
|
/// Number of expected arguments
|
||||||
|
argument_count: usize,
|
||||||
|
},
|
||||||
|
/// A variable declaration
|
||||||
|
Variable {
|
||||||
|
/// Type hint for the variable (if applicable)
|
||||||
|
type_hint: Option<Cow<'a, str>>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> SymbolKind<'a> {
|
||||||
|
/// Converts to LSP SymbolKind for protocol compatibility.
|
||||||
|
fn to_lsp_symbol_kind(&self) -> lsp_types::SymbolKind {
|
||||||
|
match self {
|
||||||
|
SymbolKind::Function { .. } => lsp_types::SymbolKind::FUNCTION,
|
||||||
|
SymbolKind::Syscall { .. } => lsp_types::SymbolKind::FUNCTION, // Syscalls are function-like
|
||||||
|
SymbolKind::Variable { .. } => lsp_types::SymbolKind::VARIABLE,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts to LSP CompletionItemKind for autocomplete filtering.
|
||||||
|
fn to_lsp_completion_kind(&self) -> lsp_types::CompletionItemKind {
|
||||||
|
match self {
|
||||||
|
SymbolKind::Function { .. } => lsp_types::CompletionItemKind::FUNCTION,
|
||||||
|
SymbolKind::Syscall { .. } => lsp_types::CompletionItemKind::FUNCTION,
|
||||||
|
SymbolKind::Variable { .. } => lsp_types::CompletionItemKind::VARIABLE,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a human-readable detail string for display in IDEs.
|
||||||
|
fn detail_string(&self) -> String {
|
||||||
|
match self {
|
||||||
|
SymbolKind::Function {
|
||||||
|
parameters,
|
||||||
|
return_type,
|
||||||
|
} => {
|
||||||
|
let params = parameters
|
||||||
|
.iter()
|
||||||
|
.map(|p| p.to_string())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(", ");
|
||||||
|
let ret = return_type
|
||||||
|
.as_ref()
|
||||||
|
.map(|t| format!(" -> {}", t))
|
||||||
|
.unwrap_or_default();
|
||||||
|
format!("fn({}){}", params, ret)
|
||||||
|
}
|
||||||
|
SymbolKind::Syscall {
|
||||||
|
syscall_type,
|
||||||
|
argument_count,
|
||||||
|
} => {
|
||||||
|
format!(
|
||||||
|
"{}(... {} args)",
|
||||||
|
match syscall_type {
|
||||||
|
SyscallType::System => "syscall",
|
||||||
|
SyscallType::Math => "math",
|
||||||
|
},
|
||||||
|
argument_count
|
||||||
|
)
|
||||||
|
}
|
||||||
|
SymbolKind::Variable { type_hint } => type_hint
|
||||||
|
.as_ref()
|
||||||
|
.map(|t| t.to_string())
|
||||||
|
.unwrap_or_else(|| "var".to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Distinguishes between System and Math syscalls.
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||||
|
pub enum SyscallType {
|
||||||
|
System,
|
||||||
|
Math,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Metadata collected during compilation, including all referenced symbols.
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
pub struct CompilationMetadata<'a> {
|
||||||
|
/// All symbols encountered during compilation (functions, syscalls, variables)
|
||||||
|
pub symbols: Vec<SymbolInfo<'a>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> CompilationMetadata<'a> {
|
||||||
|
/// Creates a new empty compilation metadata.
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
symbols: Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds a symbol to the metadata.
|
||||||
|
pub fn add_symbol(&mut self, symbol: SymbolInfo<'a>) {
|
||||||
|
self.symbols.push(symbol);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds a function symbol.
|
||||||
|
pub fn add_function(
|
||||||
|
&mut self,
|
||||||
|
name: Cow<'a, str>,
|
||||||
|
parameters: Vec<Cow<'a, str>>,
|
||||||
|
span: Option<Span>,
|
||||||
|
) {
|
||||||
|
self.add_function_with_doc(name, parameters, span, None);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds a function symbol with optional doc comment.
|
||||||
|
pub fn add_function_with_doc(
|
||||||
|
&mut self,
|
||||||
|
name: Cow<'a, str>,
|
||||||
|
parameters: Vec<Cow<'a, str>>,
|
||||||
|
span: Option<Span>,
|
||||||
|
description: Option<Cow<'a, str>>,
|
||||||
|
) {
|
||||||
|
self.add_symbol(SymbolInfo {
|
||||||
|
name,
|
||||||
|
kind: SymbolKind::Function {
|
||||||
|
parameters,
|
||||||
|
return_type: None,
|
||||||
|
},
|
||||||
|
span,
|
||||||
|
description,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds a syscall symbol.
|
||||||
|
pub fn add_syscall(
|
||||||
|
&mut self,
|
||||||
|
name: Cow<'a, str>,
|
||||||
|
syscall_type: SyscallType,
|
||||||
|
argument_count: usize,
|
||||||
|
span: Option<Span>,
|
||||||
|
) {
|
||||||
|
self.add_syscall_with_doc(name, syscall_type, argument_count, span, None);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds a syscall symbol with optional doc comment.
|
||||||
|
pub fn add_syscall_with_doc(
|
||||||
|
&mut self,
|
||||||
|
name: Cow<'a, str>,
|
||||||
|
syscall_type: SyscallType,
|
||||||
|
argument_count: usize,
|
||||||
|
span: Option<Span>,
|
||||||
|
description: Option<Cow<'a, str>>,
|
||||||
|
) {
|
||||||
|
self.add_symbol(SymbolInfo {
|
||||||
|
name,
|
||||||
|
kind: SymbolKind::Syscall {
|
||||||
|
syscall_type,
|
||||||
|
argument_count,
|
||||||
|
},
|
||||||
|
span,
|
||||||
|
description,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds a variable symbol.
|
||||||
|
pub fn add_variable(&mut self, name: Cow<'a, str>, span: Option<Span>) {
|
||||||
|
self.add_variable_with_doc(name, span, None);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds a variable symbol with optional doc comment.
|
||||||
|
pub fn add_variable_with_doc(
|
||||||
|
&mut self,
|
||||||
|
name: Cow<'a, str>,
|
||||||
|
span: Option<Span>,
|
||||||
|
description: Option<Cow<'a, str>>,
|
||||||
|
) {
|
||||||
|
self.add_symbol(SymbolInfo {
|
||||||
|
name,
|
||||||
|
kind: SymbolKind::Variable { type_hint: None },
|
||||||
|
span,
|
||||||
|
description,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns all symbols of a specific kind.
|
||||||
|
pub fn symbols_of_kind(&self, kind: &str) -> Vec<&SymbolInfo<'a>> {
|
||||||
|
self.symbols
|
||||||
|
.iter()
|
||||||
|
.filter(|sym| match (&sym.kind, kind) {
|
||||||
|
(SymbolKind::Function { .. }, "function") => true,
|
||||||
|
(SymbolKind::Syscall { .. }, "syscall") => true,
|
||||||
|
(SymbolKind::Variable { .. }, "variable") => true,
|
||||||
|
_ => false,
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts all symbols to LSP SymbolInformation for protocol compatibility.
|
||||||
|
pub fn to_lsp_symbols(&self, uri: lsp_types::Uri) -> Vec<lsp_types::SymbolInformation> {
|
||||||
|
self.symbols
|
||||||
|
.iter()
|
||||||
|
.map(|sym| sym.to_lsp_symbol_information(uri.clone()))
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts all symbols to LSP CompletionItems for autocomplete.
|
||||||
|
pub fn to_lsp_completion_items(&self) -> Vec<lsp_types::CompletionItem> {
|
||||||
|
self.symbols
|
||||||
|
.iter()
|
||||||
|
.map(|sym| sym.to_lsp_completion_item())
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_metadata_creation() {
|
||||||
|
let metadata = CompilationMetadata::new();
|
||||||
|
assert!(metadata.symbols.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_add_function_symbol() {
|
||||||
|
let mut metadata = CompilationMetadata::new();
|
||||||
|
metadata.add_function("test_func".into(), vec!["x".into(), "y".into()], None);
|
||||||
|
assert_eq!(metadata.symbols.len(), 1);
|
||||||
|
assert_eq!(metadata.symbols[0].name, "test_func");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_add_syscall_symbol() {
|
||||||
|
let mut metadata = CompilationMetadata::new();
|
||||||
|
metadata.add_syscall("hash".into(), SyscallType::System, 1, None);
|
||||||
|
assert_eq!(metadata.symbols.len(), 1);
|
||||||
|
assert_eq!(metadata.symbols[0].name, "hash");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_symbols_of_kind() {
|
||||||
|
let mut metadata = CompilationMetadata::new();
|
||||||
|
metadata.add_function("func1".into(), vec![], None);
|
||||||
|
metadata.add_syscall("hash".into(), SyscallType::System, 1, None);
|
||||||
|
metadata.add_variable("x".into(), None);
|
||||||
|
|
||||||
|
let functions = metadata.symbols_of_kind("function");
|
||||||
|
assert_eq!(functions.len(), 1);
|
||||||
|
|
||||||
|
let syscalls = metadata.symbols_of_kind("syscall");
|
||||||
|
assert_eq!(syscalls.len(), 1);
|
||||||
|
|
||||||
|
let variables = metadata.symbols_of_kind("variable");
|
||||||
|
assert_eq!(variables.len(), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_lsp_completion_items() {
|
||||||
|
let mut metadata = CompilationMetadata::new();
|
||||||
|
metadata.add_function("test_func".into(), vec![], None);
|
||||||
|
metadata.add_syscall("hash".into(), SyscallType::System, 1, None);
|
||||||
|
metadata.add_variable("x".into(), None);
|
||||||
|
|
||||||
|
let completions = metadata.to_lsp_completion_items();
|
||||||
|
assert_eq!(completions.len(), 3);
|
||||||
|
|
||||||
|
// Verify function
|
||||||
|
assert_eq!(completions[0].label, "test_func");
|
||||||
|
assert_eq!(
|
||||||
|
completions[0].kind,
|
||||||
|
Some(lsp_types::CompletionItemKind::FUNCTION)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify syscall
|
||||||
|
assert_eq!(completions[1].label, "hash");
|
||||||
|
assert_eq!(
|
||||||
|
completions[1].kind,
|
||||||
|
Some(lsp_types::CompletionItemKind::FUNCTION)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify variable
|
||||||
|
assert_eq!(completions[2].label, "x");
|
||||||
|
assert_eq!(
|
||||||
|
completions[2].kind,
|
||||||
|
Some(lsp_types::CompletionItemKind::VARIABLE)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -47,6 +47,15 @@ macro_rules! compile {
|
|||||||
output,
|
output,
|
||||||
}
|
}
|
||||||
}};
|
}};
|
||||||
|
|
||||||
|
(metadata $source:expr) => {{
|
||||||
|
let compiler = crate::Compiler::new(
|
||||||
|
parser::Parser::new(tokenizer::Tokenizer::from($source)),
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
let res = compiler.compile();
|
||||||
|
res.metadata
|
||||||
|
}};
|
||||||
}
|
}
|
||||||
mod binary_expression;
|
mod binary_expression;
|
||||||
mod branching;
|
mod branching;
|
||||||
@@ -61,5 +70,6 @@ mod loops;
|
|||||||
mod math_syscall;
|
mod math_syscall;
|
||||||
mod negation_priority;
|
mod negation_priority;
|
||||||
mod scoping;
|
mod scoping;
|
||||||
|
mod symbol_documentation;
|
||||||
mod syscall;
|
mod syscall;
|
||||||
mod tuple_literals;
|
mod tuple_literals;
|
||||||
|
|||||||
120
rust_compiler/libs/compiler/src/test/symbol_documentation.rs
Normal file
120
rust_compiler/libs/compiler/src/test/symbol_documentation.rs
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use anyhow::Result;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_variable_doc_comment() -> Result<()> {
|
||||||
|
let metadata = compile!(metadata "/// this is a documented variable\nlet myVar = 42;");
|
||||||
|
|
||||||
|
let var_symbol = metadata
|
||||||
|
.symbols
|
||||||
|
.iter()
|
||||||
|
.find(|s| s.name == "myVar")
|
||||||
|
.expect("myVar symbol not found");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
var_symbol.description.as_ref().map(|d| d.as_ref()),
|
||||||
|
Some("this is a documented variable")
|
||||||
|
);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_const_doc_comment() -> Result<()> {
|
||||||
|
let metadata = compile!(metadata "/// const documentation\nconst myConst = 100;");
|
||||||
|
|
||||||
|
let const_symbol = metadata
|
||||||
|
.symbols
|
||||||
|
.iter()
|
||||||
|
.find(|s| s.name == "myConst")
|
||||||
|
.expect("myConst symbol not found");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
const_symbol.description.as_ref().map(|d| d.as_ref()),
|
||||||
|
Some("const documentation")
|
||||||
|
);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_device_doc_comment() -> Result<()> {
|
||||||
|
let metadata = compile!(metadata "/// device documentation\ndevice myDevice = \"d0\";");
|
||||||
|
|
||||||
|
let device_symbol = metadata
|
||||||
|
.symbols
|
||||||
|
.iter()
|
||||||
|
.find(|s| s.name == "myDevice")
|
||||||
|
.expect("myDevice symbol not found");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
device_symbol.description.as_ref().map(|d| d.as_ref()),
|
||||||
|
Some("device documentation")
|
||||||
|
);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_function_doc_comment() -> Result<()> {
|
||||||
|
let metadata = compile!(metadata "/// function documentation\nfn test() { }");
|
||||||
|
|
||||||
|
let fn_symbol = metadata
|
||||||
|
.symbols
|
||||||
|
.iter()
|
||||||
|
.find(|s| s.name == "test")
|
||||||
|
.expect("test symbol not found");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
fn_symbol.description.as_ref().map(|d| d.as_ref()),
|
||||||
|
Some("function documentation")
|
||||||
|
);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_syscall_documentation() -> Result<()> {
|
||||||
|
let metadata = compile!(metadata "fn test() { clr(d0); }");
|
||||||
|
|
||||||
|
let clr_symbol = metadata
|
||||||
|
.symbols
|
||||||
|
.iter()
|
||||||
|
.find(|s| s.name == "clr")
|
||||||
|
.expect("clr syscall not found");
|
||||||
|
|
||||||
|
// clr should have its built-in documentation
|
||||||
|
assert!(clr_symbol.description.is_some());
|
||||||
|
assert!(!clr_symbol.description.as_ref().unwrap().is_empty());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_variable_references_have_tooltips() -> Result<()> {
|
||||||
|
let metadata = compile!(metadata "/// documented variable\nlet myVar = 5;\nlet x = myVar + 2;\nmyVar = 10;");
|
||||||
|
|
||||||
|
// Count how many times 'myVar' appears in symbols
|
||||||
|
let myvar_symbols: Vec<_> = metadata
|
||||||
|
.symbols
|
||||||
|
.iter()
|
||||||
|
.filter(|s| s.name == "myVar")
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// We should have at least 2: declaration + 1 reference (in myVar + 2)
|
||||||
|
// The assignment `myVar = 10` is a write, not a read, so doesn't create a reference
|
||||||
|
assert!(
|
||||||
|
myvar_symbols.len() >= 2,
|
||||||
|
"Expected at least 2 'myVar' symbols (declaration + reference), got {}",
|
||||||
|
myvar_symbols.len()
|
||||||
|
);
|
||||||
|
|
||||||
|
// All should have the same description
|
||||||
|
let expected_desc = "documented variable";
|
||||||
|
for sym in &myvar_symbols {
|
||||||
|
assert_eq!(
|
||||||
|
sym.description.as_ref().map(|d| d.as_ref()),
|
||||||
|
Some(expected_desc),
|
||||||
|
"Symbol description mismatch at {:?}",
|
||||||
|
sym.span
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -145,6 +145,7 @@ struct CompileLocation<'a> {
|
|||||||
pub struct CompilationResult<'a> {
|
pub struct CompilationResult<'a> {
|
||||||
pub errors: Vec<Error<'a>>,
|
pub errors: Vec<Error<'a>>,
|
||||||
pub instructions: Instructions<'a>,
|
pub instructions: Instructions<'a>,
|
||||||
|
pub metadata: crate::CompilationMetadata<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Metadata for the currently compiling function
|
/// Metadata for the currently compiling function
|
||||||
@@ -202,6 +203,8 @@ pub struct Compiler<'a> {
|
|||||||
pub source_map: HashMap<usize, Vec<Span>>,
|
pub source_map: HashMap<usize, Vec<Span>>,
|
||||||
/// Accumulative errors from the compilation process
|
/// Accumulative errors from the compilation process
|
||||||
pub errors: Vec<Error<'a>>,
|
pub errors: Vec<Error<'a>>,
|
||||||
|
/// Metadata about symbols encountered during compilation
|
||||||
|
pub metadata: crate::CompilationMetadata<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Compiler<'a> {
|
impl<'a> Compiler<'a> {
|
||||||
@@ -219,6 +222,7 @@ impl<'a> Compiler<'a> {
|
|||||||
loop_stack: Vec::new(),
|
loop_stack: Vec::new(),
|
||||||
source_map: HashMap::new(),
|
source_map: HashMap::new(),
|
||||||
errors: Vec::new(),
|
errors: Vec::new(),
|
||||||
|
metadata: crate::CompilationMetadata::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -237,6 +241,7 @@ impl<'a> Compiler<'a> {
|
|||||||
return CompilationResult {
|
return CompilationResult {
|
||||||
errors: self.errors,
|
errors: self.errors,
|
||||||
instructions: self.instructions,
|
instructions: self.instructions,
|
||||||
|
metadata: self.metadata,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
@@ -245,6 +250,7 @@ impl<'a> Compiler<'a> {
|
|||||||
return CompilationResult {
|
return CompilationResult {
|
||||||
errors: self.errors,
|
errors: self.errors,
|
||||||
instructions: self.instructions,
|
instructions: self.instructions,
|
||||||
|
metadata: self.metadata,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -270,6 +276,7 @@ impl<'a> Compiler<'a> {
|
|||||||
return CompilationResult {
|
return CompilationResult {
|
||||||
errors: self.errors,
|
errors: self.errors,
|
||||||
instructions: self.instructions,
|
instructions: self.instructions,
|
||||||
|
metadata: self.metadata,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -283,6 +290,7 @@ impl<'a> Compiler<'a> {
|
|||||||
CompilationResult {
|
CompilationResult {
|
||||||
errors: self.errors,
|
errors: self.errors,
|
||||||
instructions: self.instructions,
|
instructions: self.instructions,
|
||||||
|
metadata: self.metadata,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -457,10 +465,23 @@ impl<'a> Compiler<'a> {
|
|||||||
},
|
},
|
||||||
Expression::Variable(name) => {
|
Expression::Variable(name) => {
|
||||||
match scope.get_location_of(&name.node, Some(name.span)) {
|
match scope.get_location_of(&name.node, Some(name.span)) {
|
||||||
Ok(loc) => Ok(Some(CompileLocation {
|
Ok(loc) => {
|
||||||
location: loc,
|
// Track this variable reference in metadata (for tooltips on all usages, not just declaration)
|
||||||
temp_name: None, // User variable, do not free
|
let doc_comment: Option<Cow<'a, str>> = self
|
||||||
})),
|
.parser
|
||||||
|
.get_declaration_doc(name.node.as_ref())
|
||||||
|
.map(|s| Cow::Owned(s) as Cow<'a, str>);
|
||||||
|
self.metadata.add_variable_with_doc(
|
||||||
|
name.node.clone(),
|
||||||
|
Some(name.span),
|
||||||
|
doc_comment,
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(Some(CompileLocation {
|
||||||
|
location: loc,
|
||||||
|
temp_name: None, // User variable, do not free
|
||||||
|
}))
|
||||||
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
// fallback, check devices
|
// fallback, check devices
|
||||||
if let Some(device) = self.devices.get(&name.node) {
|
if let Some(device) = self.devices.get(&name.node) {
|
||||||
@@ -644,6 +665,14 @@ impl<'a> Compiler<'a> {
|
|||||||
if let Expression::Variable(ref name) = expr.node
|
if let Expression::Variable(ref name) = expr.node
|
||||||
&& let Some(device_id) = self.devices.get(&name.node)
|
&& let Some(device_id) = self.devices.get(&name.node)
|
||||||
{
|
{
|
||||||
|
// Track this device reference in metadata (for tooltips on all usages, not just declaration)
|
||||||
|
let doc_comment = self
|
||||||
|
.parser
|
||||||
|
.get_declaration_doc(name.node.as_ref())
|
||||||
|
.map(Cow::Owned);
|
||||||
|
self.metadata
|
||||||
|
.add_variable_with_doc(name.node.clone(), Some(expr.span), doc_comment);
|
||||||
|
|
||||||
return Ok((Operand::Device(device_id.clone()), None));
|
return Ok((Operand::Device(device_id.clone()), None));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -696,6 +725,14 @@ impl<'a> Compiler<'a> {
|
|||||||
let name_str = var_name.node;
|
let name_str = var_name.node;
|
||||||
let name_span = var_name.span;
|
let name_span = var_name.span;
|
||||||
|
|
||||||
|
// Track the variable in metadata
|
||||||
|
let doc_comment = self
|
||||||
|
.parser
|
||||||
|
.get_declaration_doc(name_str.as_ref())
|
||||||
|
.map(Cow::Owned);
|
||||||
|
self.metadata
|
||||||
|
.add_variable_with_doc(name_str.clone(), Some(name_span), doc_comment);
|
||||||
|
|
||||||
// optimization. Check for a negated numeric literal (including nested negations)
|
// optimization. Check for a negated numeric literal (including nested negations)
|
||||||
// e.g., -5, -(-5), -(-(5)), etc.
|
// e.g., -5, -(-5), -(-(5)), etc.
|
||||||
if let Some(num) = self.try_fold_negation(&expr.node) {
|
if let Some(num) = self.try_fold_negation(&expr.node) {
|
||||||
@@ -1055,6 +1092,17 @@ impl<'a> Compiler<'a> {
|
|||||||
value: const_value,
|
value: const_value,
|
||||||
} = expr;
|
} = expr;
|
||||||
|
|
||||||
|
// Track the const variable in metadata
|
||||||
|
let doc_comment = self
|
||||||
|
.parser
|
||||||
|
.get_declaration_doc(const_name.node.as_ref())
|
||||||
|
.map(Cow::Owned);
|
||||||
|
self.metadata.add_variable_with_doc(
|
||||||
|
const_name.node.clone(),
|
||||||
|
Some(const_name.span),
|
||||||
|
doc_comment,
|
||||||
|
);
|
||||||
|
|
||||||
// check for a hash expression or a literal
|
// check for a hash expression or a literal
|
||||||
let value = match const_value {
|
let value = match const_value {
|
||||||
LiteralOr::Or(Spanned {
|
LiteralOr::Or(Spanned {
|
||||||
@@ -1478,6 +1526,29 @@ impl<'a> Compiler<'a> {
|
|||||||
) -> Result<(), Error<'a>> {
|
) -> Result<(), Error<'a>> {
|
||||||
let TupleDeclarationExpression { names, value } = tuple_decl;
|
let TupleDeclarationExpression { names, value } = tuple_decl;
|
||||||
|
|
||||||
|
// Track each variable in the tuple declaration
|
||||||
|
// Get doc for the first variable
|
||||||
|
let first_var_name = names
|
||||||
|
.iter()
|
||||||
|
.find(|n| n.node.as_ref() != "_")
|
||||||
|
.map(|n| n.node.to_string());
|
||||||
|
let doc_comment = first_var_name
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|name| self.parser.get_declaration_doc(name))
|
||||||
|
.map(Cow::Owned);
|
||||||
|
|
||||||
|
for (i, name_spanned) in names.iter().enumerate() {
|
||||||
|
if name_spanned.node.as_ref() != "_" {
|
||||||
|
// Only attach doc comment to the first variable
|
||||||
|
let comment = if i == 0 { doc_comment.clone() } else { None };
|
||||||
|
self.metadata.add_variable_with_doc(
|
||||||
|
name_spanned.node.clone(),
|
||||||
|
Some(name_spanned.span),
|
||||||
|
comment,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
match value.node {
|
match value.node {
|
||||||
Expression::Invocation(invoke_expr) => {
|
Expression::Invocation(invoke_expr) => {
|
||||||
// Execute the function call - tuple values will be on the stack
|
// Execute the function call - tuple values will be on the stack
|
||||||
@@ -1916,6 +1987,17 @@ impl<'a> Compiler<'a> {
|
|||||||
&mut self,
|
&mut self,
|
||||||
expr: DeviceDeclarationExpression<'a>,
|
expr: DeviceDeclarationExpression<'a>,
|
||||||
) -> Result<(), Error<'a>> {
|
) -> Result<(), Error<'a>> {
|
||||||
|
// Track the device declaration in metadata
|
||||||
|
let doc_comment = self
|
||||||
|
.parser
|
||||||
|
.get_declaration_doc(expr.name.node.as_ref())
|
||||||
|
.map(Cow::Owned);
|
||||||
|
self.metadata.add_variable_with_doc(
|
||||||
|
expr.name.node.clone(),
|
||||||
|
Some(expr.name.span),
|
||||||
|
doc_comment,
|
||||||
|
);
|
||||||
|
|
||||||
if self.devices.contains_key(&expr.name.node) {
|
if self.devices.contains_key(&expr.name.node) {
|
||||||
self.errors.push(Error::DuplicateIdentifier(
|
self.errors.push(Error::DuplicateIdentifier(
|
||||||
expr.name.node.clone(),
|
expr.name.node.clone(),
|
||||||
@@ -2920,6 +3002,17 @@ impl<'a> Compiler<'a> {
|
|||||||
span: Span,
|
span: Span,
|
||||||
scope: &mut VariableScope<'a, '_>,
|
scope: &mut VariableScope<'a, '_>,
|
||||||
) -> Result<Option<CompileLocation<'a>>, Error<'a>> {
|
) -> Result<Option<CompileLocation<'a>>, Error<'a>> {
|
||||||
|
// Track the syscall in metadata
|
||||||
|
let syscall_name = expr.name();
|
||||||
|
let doc = expr.docs().into();
|
||||||
|
self.metadata.add_syscall_with_doc(
|
||||||
|
Cow::Borrowed(syscall_name),
|
||||||
|
crate::SyscallType::System,
|
||||||
|
expr.arg_count(),
|
||||||
|
Some(span),
|
||||||
|
Some(doc),
|
||||||
|
);
|
||||||
|
|
||||||
macro_rules! cleanup {
|
macro_rules! cleanup {
|
||||||
($($to_clean:expr),*) => {
|
($($to_clean:expr),*) => {
|
||||||
$(
|
$(
|
||||||
@@ -3317,6 +3410,17 @@ impl<'a> Compiler<'a> {
|
|||||||
span: Span,
|
span: Span,
|
||||||
scope: &mut VariableScope<'a, '_>,
|
scope: &mut VariableScope<'a, '_>,
|
||||||
) -> Result<Option<CompileLocation<'a>>, Error<'a>> {
|
) -> Result<Option<CompileLocation<'a>>, Error<'a>> {
|
||||||
|
// Track the syscall in metadata
|
||||||
|
let syscall_name = expr.name();
|
||||||
|
let doc = expr.docs().into();
|
||||||
|
self.metadata.add_syscall_with_doc(
|
||||||
|
Cow::Borrowed(syscall_name),
|
||||||
|
crate::SyscallType::Math,
|
||||||
|
expr.arg_count(),
|
||||||
|
Some(span),
|
||||||
|
Some(doc),
|
||||||
|
);
|
||||||
|
|
||||||
macro_rules! cleanup {
|
macro_rules! cleanup {
|
||||||
($($to_clean:expr),*) => {
|
($($to_clean:expr),*) => {
|
||||||
$(
|
$(
|
||||||
@@ -3577,6 +3681,19 @@ impl<'a> Compiler<'a> {
|
|||||||
|
|
||||||
let span = expr.span;
|
let span = expr.span;
|
||||||
|
|
||||||
|
// Track the function definition in metadata
|
||||||
|
let param_names: Vec<Cow<'a, str>> = arguments.iter().map(|a| a.node.clone()).collect();
|
||||||
|
let doc_comment = self
|
||||||
|
.parser
|
||||||
|
.get_declaration_doc(name.node.as_ref())
|
||||||
|
.map(Cow::Owned);
|
||||||
|
self.metadata.add_function_with_doc(
|
||||||
|
name.node.clone(),
|
||||||
|
param_names,
|
||||||
|
Some(name.span),
|
||||||
|
doc_comment,
|
||||||
|
);
|
||||||
|
|
||||||
if self.function_meta.locations.contains_key(&name.node) {
|
if self.function_meta.locations.contains_key(&name.node) {
|
||||||
self.errors
|
self.errors
|
||||||
.push(Error::DuplicateIdentifier(name.node.clone(), name.span));
|
.push(Error::DuplicateIdentifier(name.node.clone(), name.span));
|
||||||
|
|||||||
@@ -117,6 +117,10 @@ pub struct Parser<'a> {
|
|||||||
current_token: Option<Token<'a>>,
|
current_token: Option<Token<'a>>,
|
||||||
last_token_span: Option<Span>,
|
last_token_span: Option<Span>,
|
||||||
pub errors: Vec<Error<'a>>,
|
pub errors: Vec<Error<'a>>,
|
||||||
|
/// Caches the most recent doc comment for attaching to the next declaration
|
||||||
|
cached_doc_comment: Option<String>,
|
||||||
|
/// Maps variable/declaration names to their doc comments
|
||||||
|
pub declaration_docs: std::collections::HashMap<String, String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Parser<'a> {
|
impl<'a> Parser<'a> {
|
||||||
@@ -126,6 +130,8 @@ impl<'a> Parser<'a> {
|
|||||||
current_token: None,
|
current_token: None,
|
||||||
last_token_span: None,
|
last_token_span: None,
|
||||||
errors: Vec::new(),
|
errors: Vec::new(),
|
||||||
|
cached_doc_comment: None,
|
||||||
|
declaration_docs: std::collections::HashMap::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -151,6 +157,26 @@ impl<'a> Parser<'a> {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Pops and returns the cached doc comment, if any
|
||||||
|
pub fn pop_doc_comment(&mut self) -> Option<String> {
|
||||||
|
self.cached_doc_comment.take()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Caches a doc comment for attachment to the next declaration
|
||||||
|
pub fn cache_doc_comment(&mut self, comment: String) {
|
||||||
|
self.cached_doc_comment = Some(comment);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Stores a doc comment for a declaration (by name)
|
||||||
|
pub fn store_declaration_doc(&mut self, name: String, doc: String) {
|
||||||
|
self.declaration_docs.insert(name, doc);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Retrieves and removes a doc comment for a declaration
|
||||||
|
pub fn get_declaration_doc(&mut self, name: &str) -> Option<String> {
|
||||||
|
self.declaration_docs.get(name).cloned()
|
||||||
|
}
|
||||||
|
|
||||||
fn unexpected_eof(&self) -> Error<'a> {
|
fn unexpected_eof(&self) -> Error<'a> {
|
||||||
Error::UnexpectedEOF(self.last_token_span)
|
Error::UnexpectedEOF(self.last_token_span)
|
||||||
}
|
}
|
||||||
@@ -288,7 +314,36 @@ impl<'a> Parser<'a> {
|
|||||||
if let Some(token) = &self.current_token {
|
if let Some(token) = &self.current_token {
|
||||||
self.last_token_span = Some(Self::token_to_span(token));
|
self.last_token_span = Some(Self::token_to_span(token));
|
||||||
}
|
}
|
||||||
self.current_token = self.tokenizer.next_token()?;
|
|
||||||
|
// Keep reading tokens, caching doc comments and skipping them
|
||||||
|
loop {
|
||||||
|
self.current_token = self.tokenizer.next_token_with_comments()?;
|
||||||
|
|
||||||
|
match &self.current_token {
|
||||||
|
Some(token) => {
|
||||||
|
if let TokenType::Comment(comment) = &token.token_type {
|
||||||
|
// Cache doc comments for attachment to the next declaration
|
||||||
|
if let tokenizer::token::Comment::Doc(doc_text) = comment {
|
||||||
|
self.cache_doc_comment(doc_text.to_string());
|
||||||
|
}
|
||||||
|
// Skip all comments (both doc and regular)
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we have a cached doc comment and encounter an identifier, associate them
|
||||||
|
if let TokenType::Identifier(ref id) = token.token_type {
|
||||||
|
if let Some(doc) = self.cached_doc_comment.take() {
|
||||||
|
self.store_declaration_doc(id.to_string(), doc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Non-comment token, use it as current
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
None => break, // EOF
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -511,7 +566,6 @@ impl<'a> Parser<'a> {
|
|||||||
|
|
||||||
TokenType::Keyword(Keyword::Const) => {
|
TokenType::Keyword(Keyword::Const) => {
|
||||||
let spanned_const = self.spanned(|p| p.const_declaration())?;
|
let spanned_const = self.spanned(|p| p.const_declaration())?;
|
||||||
|
|
||||||
Some(Spanned {
|
Some(Spanned {
|
||||||
span: spanned_const.span,
|
span: spanned_const.span,
|
||||||
node: Expression::ConstDeclaration(spanned_const),
|
node: Expression::ConstDeclaration(spanned_const),
|
||||||
|
|||||||
@@ -127,6 +127,52 @@ impl<'a> std::fmt::Display for Math<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'a> Math<'a> {
|
||||||
|
/// Returns the name of this math function (e.g., "acos", "sin", "sqrt", etc.)
|
||||||
|
pub fn name(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
Math::Acos(_) => "acos",
|
||||||
|
Math::Asin(_) => "asin",
|
||||||
|
Math::Atan(_) => "atan",
|
||||||
|
Math::Atan2(_, _) => "atan2",
|
||||||
|
Math::Abs(_) => "abs",
|
||||||
|
Math::Ceil(_) => "ceil",
|
||||||
|
Math::Cos(_) => "cos",
|
||||||
|
Math::Floor(_) => "floor",
|
||||||
|
Math::Log(_) => "log",
|
||||||
|
Math::Max(_, _) => "max",
|
||||||
|
Math::Min(_, _) => "min",
|
||||||
|
Math::Rand => "rand",
|
||||||
|
Math::Sin(_) => "sin",
|
||||||
|
Math::Sqrt(_) => "sqrt",
|
||||||
|
Math::Tan(_) => "tan",
|
||||||
|
Math::Trunc(_) => "trunc",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the number of arguments this math function expects
|
||||||
|
pub fn arg_count(&self) -> usize {
|
||||||
|
match self {
|
||||||
|
Math::Acos(_) => 1,
|
||||||
|
Math::Asin(_) => 1,
|
||||||
|
Math::Atan(_) => 1,
|
||||||
|
Math::Atan2(_, _) => 2,
|
||||||
|
Math::Abs(_) => 1,
|
||||||
|
Math::Ceil(_) => 1,
|
||||||
|
Math::Cos(_) => 1,
|
||||||
|
Math::Floor(_) => 1,
|
||||||
|
Math::Log(_) => 1,
|
||||||
|
Math::Max(_, _) => 2,
|
||||||
|
Math::Min(_, _) => 2,
|
||||||
|
Math::Rand => 0,
|
||||||
|
Math::Sin(_) => 1,
|
||||||
|
Math::Sqrt(_) => 1,
|
||||||
|
Math::Tan(_) => 1,
|
||||||
|
Math::Trunc(_) => 1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
documented! {
|
documented! {
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
pub enum System<'a> {
|
pub enum System<'a> {
|
||||||
@@ -297,6 +343,48 @@ impl<'a> std::fmt::Display for System<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'a> System<'a> {
|
||||||
|
/// Returns the name of this syscall (e.g., "yield", "sleep", "hash", etc.)
|
||||||
|
pub fn name(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
System::Yield => "yield",
|
||||||
|
System::Sleep(_) => "sleep",
|
||||||
|
System::Clr(_) => "clr",
|
||||||
|
System::Hash(_) => "hash",
|
||||||
|
System::LoadFromDevice(_, _) => "loadFromDevice",
|
||||||
|
System::LoadBatch(_, _, _) => "loadBatch",
|
||||||
|
System::LoadBatchNamed(_, _, _, _) => "loadBatchNamed",
|
||||||
|
System::SetOnDevice(_, _, _) => "setOnDevice",
|
||||||
|
System::SetOnDeviceBatched(_, _, _) => "setOnDeviceBatched",
|
||||||
|
System::SetOnDeviceBatchedNamed(_, _, _, _) => "setOnDeviceBatchedNamed",
|
||||||
|
System::LoadSlot(_, _, _) => "loadSlot",
|
||||||
|
System::SetSlot(_, _, _, _) => "setSlot",
|
||||||
|
System::LoadReagent(_, _, _) => "loadReagent",
|
||||||
|
System::Rmap(_, _) => "rmap",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the number of arguments this syscall expects
|
||||||
|
pub fn arg_count(&self) -> usize {
|
||||||
|
match self {
|
||||||
|
System::Yield => 0,
|
||||||
|
System::Sleep(_) => 1,
|
||||||
|
System::Clr(_) => 1,
|
||||||
|
System::Hash(_) => 1,
|
||||||
|
System::LoadFromDevice(_, _) => 2,
|
||||||
|
System::LoadBatch(_, _, _) => 3,
|
||||||
|
System::LoadBatchNamed(_, _, _, _) => 4,
|
||||||
|
System::SetOnDevice(_, _, _) => 3,
|
||||||
|
System::SetOnDeviceBatched(_, _, _) => 3,
|
||||||
|
System::SetOnDeviceBatchedNamed(_, _, _, _) => 4,
|
||||||
|
System::LoadSlot(_, _, _) => 3,
|
||||||
|
System::SetSlot(_, _, _, _) => 4,
|
||||||
|
System::LoadReagent(_, _, _) => 3,
|
||||||
|
System::Rmap(_, _) => 2,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[allow(clippy::large_enum_variant)]
|
#[allow(clippy::large_enum_variant)]
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
/// This represents built in functions that cannot be overwritten, but can be invoked by the user as functions.
|
/// This represents built in functions that cannot be overwritten, but can be invoked by the user as functions.
|
||||||
|
|||||||
@@ -68,6 +68,12 @@ impl<'a> Tokenizer<'a> {
|
|||||||
|
|
||||||
Ok(current.map(|t| t.map(|t| self.get_token(t)))?)
|
Ok(current.map(|t| t.map(|t| self.get_token(t)))?)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns the next token, including comments. Used to preserve doc comments.
|
||||||
|
pub fn next_token_with_comments(&mut self) -> Result<Option<Token<'a>>, Error> {
|
||||||
|
let current = self.lexer.next().transpose();
|
||||||
|
Ok(current.map(|t| t.map(|t| self.get_token(t)))?)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ... Iterator and TokenizerBuffer implementations remain unchanged ...
|
// ... Iterator and TokenizerBuffer implementations remain unchanged ...
|
||||||
@@ -127,12 +133,28 @@ impl<'a> TokenizerBuffer<'a> {
|
|||||||
self.index += 1;
|
self.index += 1;
|
||||||
Ok(token)
|
Ok(token)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn next_token_with_comments(&mut self) -> Result<Option<Token<'a>>, Error> {
|
||||||
|
if let Some(token) = self.buffer.pop_front() {
|
||||||
|
self.history.push_back(token.clone());
|
||||||
|
self.index += 1;
|
||||||
|
return Ok(Some(token));
|
||||||
|
}
|
||||||
|
let token = self.tokenizer.next_token_with_comments()?;
|
||||||
|
|
||||||
|
if let Some(ref token) = token {
|
||||||
|
self.history.push_back(token.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
self.index += 1;
|
||||||
|
Ok(token)
|
||||||
|
}
|
||||||
pub fn peek(&mut self) -> Result<Option<Token<'a>>, Error> {
|
pub fn peek(&mut self) -> Result<Option<Token<'a>>, Error> {
|
||||||
if let Some(token) = self.buffer.front() {
|
if let Some(token) = self.buffer.front() {
|
||||||
return Ok(Some(token.clone()));
|
return Ok(Some(token.clone()));
|
||||||
}
|
}
|
||||||
|
|
||||||
let Some(new_token) = self.tokenizer.next_token()? else {
|
let Some(new_token) = self.tokenizer.next_token_with_comments()? else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
self.buffer.push_front(new_token.clone());
|
self.buffer.push_front(new_token.clone());
|
||||||
|
|||||||
@@ -94,6 +94,30 @@ impl From<lsp_types::Diagnostic> for FfiDiagnostic {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive_ReprC]
|
||||||
|
#[repr(C)]
|
||||||
|
pub struct FfiSymbolKindData {
|
||||||
|
pub kind: u32, // 0=Function, 1=Syscall, 2=Variable
|
||||||
|
pub arg_count: u32,
|
||||||
|
pub syscall_type: u32, // 0=System, 1=Math (only for Syscall kind)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive_ReprC]
|
||||||
|
#[repr(C)]
|
||||||
|
pub struct FfiSymbolInfo {
|
||||||
|
pub name: safer_ffi::String,
|
||||||
|
pub kind_data: FfiSymbolKindData,
|
||||||
|
pub span: FfiRange,
|
||||||
|
pub description: safer_ffi::String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive_ReprC]
|
||||||
|
#[repr(C)]
|
||||||
|
pub struct FfiDiagnosticsAndSymbols {
|
||||||
|
pub diagnostics: safer_ffi::Vec<FfiDiagnostic>,
|
||||||
|
pub symbols: safer_ffi::Vec<FfiSymbolInfo>,
|
||||||
|
}
|
||||||
|
|
||||||
#[ffi_export]
|
#[ffi_export]
|
||||||
pub fn free_ffi_compilation_result(input: FfiCompilationResult) {
|
pub fn free_ffi_compilation_result(input: FfiCompilationResult) {
|
||||||
drop(input)
|
drop(input)
|
||||||
@@ -109,6 +133,11 @@ pub fn free_ffi_diagnostic_vec(v: safer_ffi::Vec<FfiDiagnostic>) {
|
|||||||
drop(v)
|
drop(v)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[ffi_export]
|
||||||
|
pub fn free_ffi_diagnostics_and_symbols(v: FfiDiagnosticsAndSymbols) {
|
||||||
|
drop(v)
|
||||||
|
}
|
||||||
|
|
||||||
#[ffi_export]
|
#[ffi_export]
|
||||||
pub fn free_string(s: safer_ffi::String) {
|
pub fn free_string(s: safer_ffi::String) {
|
||||||
drop(s)
|
drop(s)
|
||||||
@@ -182,6 +211,10 @@ pub fn tokenize_line(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec<Ff
|
|||||||
let input = String::from_utf16_lossy(input.as_slice());
|
let input = String::from_utf16_lossy(input.as_slice());
|
||||||
let tokenizer = Tokenizer::from(input.as_str());
|
let tokenizer = Tokenizer::from(input.as_str());
|
||||||
|
|
||||||
|
// Build a lookup table for syscall documentation
|
||||||
|
let syscall_docs: std::collections::HashMap<&'static str, String> =
|
||||||
|
SysCall::get_all_documentation().into_iter().collect();
|
||||||
|
|
||||||
let mut tokens = Vec::new();
|
let mut tokens = Vec::new();
|
||||||
|
|
||||||
for token in tokenizer {
|
for token in tokenizer {
|
||||||
@@ -217,13 +250,26 @@ pub fn tokenize_line(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec<Ff
|
|||||||
}
|
}
|
||||||
Ok(Token {
|
Ok(Token {
|
||||||
span, token_type, ..
|
span, token_type, ..
|
||||||
}) => tokens.push(FfiToken {
|
}) => {
|
||||||
column: span.start as i32,
|
let mut tooltip = token_type.docs();
|
||||||
error: "".into(),
|
|
||||||
length: (span.end - span.start) as i32,
|
// If no docs from token type, check if it's a syscall
|
||||||
tooltip: token_type.docs().into(),
|
if tooltip.is_empty() {
|
||||||
token_kind: token_type.into(),
|
if let TokenType::Identifier(id) = &token_type {
|
||||||
}),
|
if let Some(doc) = syscall_docs.get(id.as_ref()) {
|
||||||
|
tooltip = doc.clone();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tokens.push(FfiToken {
|
||||||
|
column: span.start as i32,
|
||||||
|
error: "".into(),
|
||||||
|
length: (span.end - span.start) as i32,
|
||||||
|
tooltip: tooltip.into(),
|
||||||
|
token_kind: token_type.into(),
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -257,6 +303,88 @@ pub fn diagnose_source(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec<
|
|||||||
res.unwrap_or(vec![].into())
|
res.unwrap_or(vec![].into())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[ffi_export]
|
||||||
|
pub fn diagnose_source_with_symbols(
|
||||||
|
input: safer_ffi::slice::Ref<'_, u16>,
|
||||||
|
) -> FfiDiagnosticsAndSymbols {
|
||||||
|
let res = std::panic::catch_unwind(|| {
|
||||||
|
let input = String::from_utf16_lossy(input.as_slice());
|
||||||
|
|
||||||
|
let tokenizer = Tokenizer::from(input.as_str());
|
||||||
|
let compiler = Compiler::new(Parser::new(tokenizer), None);
|
||||||
|
|
||||||
|
let CompilationResult {
|
||||||
|
errors: diagnosis,
|
||||||
|
metadata,
|
||||||
|
..
|
||||||
|
} = compiler.compile();
|
||||||
|
|
||||||
|
// Convert diagnostics
|
||||||
|
let mut diagnostics_vec: Vec<FfiDiagnostic> = Vec::with_capacity(diagnosis.len());
|
||||||
|
for err in diagnosis {
|
||||||
|
diagnostics_vec.push(lsp_types::Diagnostic::from(err).into());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert symbols
|
||||||
|
let mut symbols_vec: Vec<FfiSymbolInfo> = Vec::with_capacity(metadata.symbols.len());
|
||||||
|
for symbol in &metadata.symbols {
|
||||||
|
let (kind, arg_count, syscall_type) = match &symbol.kind {
|
||||||
|
compiler::SymbolKind::Function { parameters, .. } => {
|
||||||
|
(0, parameters.len() as u32, 0)
|
||||||
|
}
|
||||||
|
compiler::SymbolKind::Syscall {
|
||||||
|
syscall_type,
|
||||||
|
argument_count,
|
||||||
|
} => {
|
||||||
|
let sc_type = match syscall_type {
|
||||||
|
compiler::SyscallType::System => 0,
|
||||||
|
compiler::SyscallType::Math => 1,
|
||||||
|
};
|
||||||
|
(1, *argument_count as u32, sc_type)
|
||||||
|
}
|
||||||
|
compiler::SymbolKind::Variable { .. } => (2, 0, 0),
|
||||||
|
};
|
||||||
|
|
||||||
|
let span = symbol
|
||||||
|
.span
|
||||||
|
.as_ref()
|
||||||
|
.map(|s| (*s).into())
|
||||||
|
.unwrap_or(FfiRange {
|
||||||
|
start_line: 0,
|
||||||
|
end_line: 0,
|
||||||
|
start_col: 0,
|
||||||
|
end_col: 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
symbols_vec.push(FfiSymbolInfo {
|
||||||
|
name: symbol.name.to_string().into(),
|
||||||
|
kind_data: FfiSymbolKindData {
|
||||||
|
kind,
|
||||||
|
arg_count,
|
||||||
|
syscall_type,
|
||||||
|
},
|
||||||
|
span,
|
||||||
|
description: symbol
|
||||||
|
.description
|
||||||
|
.as_ref()
|
||||||
|
.map(|d| d.to_string())
|
||||||
|
.unwrap_or_default()
|
||||||
|
.into(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
FfiDiagnosticsAndSymbols {
|
||||||
|
diagnostics: diagnostics_vec.into(),
|
||||||
|
symbols: symbols_vec.into(),
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
res.unwrap_or(FfiDiagnosticsAndSymbols {
|
||||||
|
diagnostics: vec![].into(),
|
||||||
|
symbols: vec![].into(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
#[ffi_export]
|
#[ffi_export]
|
||||||
pub fn get_docs() -> safer_ffi::Vec<FfiDocumentedItem> {
|
pub fn get_docs() -> safer_ffi::Vec<FfiDocumentedItem> {
|
||||||
let res = std::panic::catch_unwind(|| {
|
let res = std::panic::catch_unwind(|| {
|
||||||
|
|||||||
Reference in New Issue
Block a user