working in-game error diagnostics. memory access violation bug present. Need to debug

This commit is contained in:
2025-12-01 14:50:05 -07:00
parent 25d9222bd4
commit 8ea274f3bf
5 changed files with 81 additions and 64 deletions

View File

@@ -98,9 +98,9 @@ public static unsafe class SlangExtensions
Severity = item.severity, Severity = item.severity,
Range = new Slang.Range Range = new Slang.Range
{ {
EndCol = item.range.end_col - 1, EndCol = Math.Max(item.range.end_col - 2, 0),
EndLine = item.range.end_line - 1, EndLine = item.range.end_line - 1,
StartCol = item.range.start_col - 1, StartCol = Math.Max(item.range.start_col - 2, 0),
StartLine = item.range.end_line - 1, StartLine = item.range.end_line - 1,
}, },
} }
@@ -122,9 +122,9 @@ public static unsafe class SlangExtensions
case 3: case 3:
return SlangFormatter.ColorInstruction; // Boolean return SlangFormatter.ColorInstruction; // Boolean
case 4: case 4:
return SlangFormatter.ColorInstruction; // Keyword return SlangFormatter.ColorSelection; // Keyword
case 5: case 5:
return SlangFormatter.ColorInstruction; // Identifier return SlangFormatter.ColorLineNumber; // Identifier
case 6: case 6:
return SlangFormatter.ColorDefault; // Symbol return SlangFormatter.ColorDefault; // Symbol
default: default:

View File

@@ -1,5 +1,6 @@
namespace Slang; namespace Slang;
using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Threading; using System.Threading;
@@ -9,7 +10,6 @@ using StationeersIC10Editor;
public class SlangFormatter : ICodeFormatter public class SlangFormatter : ICodeFormatter
{ {
private System.Timers.Timer _timer;
private CancellationTokenSource? _lspCancellationToken; private CancellationTokenSource? _lspCancellationToken;
private readonly SynchronizationContext? _mainThreadContext; private readonly SynchronizationContext? _mainThreadContext;
private volatile bool IsDiagnosing = false; private volatile bool IsDiagnosing = false;
@@ -17,16 +17,13 @@ public class SlangFormatter : ICodeFormatter
public static readonly uint ColorInstruction = ColorFromHTML("#ffff00"); public static readonly uint ColorInstruction = ColorFromHTML("#ffff00");
public static readonly uint ColorString = ColorFromHTML("#ce9178"); public static readonly uint ColorString = ColorFromHTML("#ce9178");
private object _textLock = new(); private HashSet<uint> _linesWithErrors = new();
public SlangFormatter() public SlangFormatter()
{ {
// 1. Capture the Main Thread context. // 1. Capture the Main Thread context.
// This works because the Editor instantiates this class on the main thread. // This works because the Editor instantiates this class on the main thread.
_mainThreadContext = SynchronizationContext.Current; _mainThreadContext = SynchronizationContext.Current;
_timer = new System.Timers.Timer(250);
_timer.AutoReset = false;
} }
public override string Compile() public override string Compile()
@@ -50,89 +47,94 @@ public class SlangFormatter : ICodeFormatter
_lspCancellationToken = new CancellationTokenSource(); _lspCancellationToken = new CancellationTokenSource();
_ = HandleLsp(_lspCancellationToken.Token, this.RawText); _ = Task.Run(() => HandleLsp(_lspCancellationToken.Token), _lspCancellationToken.Token);
} }
private void OnTimerElapsed(object sender, ElapsedEventArgs e) { } private void OnTimerElapsed(object sender, ElapsedEventArgs e) { }
private async Task HandleLsp(CancellationToken cancellationToken, string text) private async Task HandleLsp(CancellationToken cancellationToken)
{ {
try try
{ {
await Task.Delay(500, cancellationToken); await Task.Delay(200, cancellationToken);
if (cancellationToken.IsCancellationRequested) if (cancellationToken.IsCancellationRequested)
{
return; return;
}
List<Diagnostic> diagnosis = Marshal.DiagnoseSource(text);
var dict = diagnosis
.GroupBy(d => d.Range.StartLine)
.ToDictionary(g => g.Key, g => g.ToList());
// 3. Dispatch the UI update to the Main Thread // 3. Dispatch the UI update to the Main Thread
if (_mainThreadContext != null) if (_mainThreadContext != null)
{ {
// Post ensures ApplyDiagnostics runs on the captured thread (Main Thread) // Post ensures ApplyDiagnostics runs on the captured thread (Main Thread)
_mainThreadContext.Post(_ => ApplyDiagnostics(dict), null); _mainThreadContext.Post(_ => ApplyDiagnostics(), null);
} }
else else
{ {
// Fallback: If context is null (rare in Unity), try running directly // Fallback: If context is null (rare in Unity), try running directly
// but warn, as this might crash if not thread-safe. // but warn, as this might crash if not thread-safe.
L.Warning("SynchronizationContext was null. Attempting direct update (risky)."); L.Warning("SynchronizationContext was null. Attempting direct update (risky).");
ApplyDiagnostics(dict); ApplyDiagnostics();
} }
} }
finally { } finally { }
} }
// This runs on the Main Thread // This runs on the Main Thread
private void ApplyDiagnostics(Dictionary<uint, List<Diagnostic>> dict) private void ApplyDiagnostics()
{ {
List<Diagnostic> diagnosis = Marshal.DiagnoseSource(this.RawText);
var dict = diagnosis.GroupBy(d => d.Range.StartLine).ToDictionary(g => g.Key);
var linesToRefresh = new HashSet<uint>(dict.Keys);
linesToRefresh.UnionWith(_linesWithErrors);
IsDiagnosing = true; IsDiagnosing = true;
// Standard LSP uses 0-based indexing.
for (int i = 0; i < this.Lines.Count; i++)
{
uint lineIndex = (uint)i;
if (dict.TryGetValue(lineIndex, out var lineDiagnostics)) foreach (var lineIndex in linesToRefresh)
{
var line = this.Lines[i];
if (line is null)
{ {
// safety check for out of bounds (in case lines were deleted)
if (lineIndex >= this.Lines.Count)
continue; continue;
}
var tokenMap = line.Tokens.ToDictionary((t) => t.Column); var line = this.Lines[(int)lineIndex];
foreach (var diag in lineDiagnostics) if (line is null)
{ continue;
var newToken = new SemanticToken
{
Column = (int)diag.Range.StartCol,
Length = (int)(diag.Range.EndCol - diag.Range.StartCol),
Line = i,
IsError = true,
Data = diag.Message,
Color = ICodeFormatter.ColorError,
};
L.Info(
$"Col: {newToken.Column} -- Length: {newToken.Length} -- Msg: {newToken.Data}"
);
tokenMap[newToken.Column] = newToken;
}
line.ClearTokens(); line.ClearTokens();
foreach (var token in tokenMap.Values) Dictionary<int, SemanticToken> lineDict = Marshal
.TokenizeLine(line.Text)
.Tokens.ToDictionary((t) => t.Column);
if (dict.ContainsKey(lineIndex))
{
foreach (var lineDiagnostic in dict[lineIndex])
{
lineDict[(int)lineDiagnostic.Range.StartCol] = new SemanticToken
{
Column = Math.Abs((int)lineDiagnostic.Range.StartCol),
Length = Math.Abs(
(int)(lineDiagnostic.Range.EndCol - lineDiagnostic.Range.StartCol)
),
Line = (int)lineIndex,
IsError = true,
Data = lineDiagnostic.Message,
Color = SlangFormatter.ColorError,
};
}
}
foreach (var token in lineDict.Values)
{ {
line.AddToken(token); line.AddToken(token);
} }
} }
}
_linesWithErrors = new HashSet<uint>(dict.Keys);
IsDiagnosing = false; IsDiagnosing = false;
} }
} }

View File

@@ -31,16 +31,16 @@ quick_error! {
source(err) source(err)
} }
UnexpectedToken(span: Span, token: Token) { UnexpectedToken(span: Span, token: Token) {
display("Unexpected token: {:?}", token) display("Unexpected token: {}", token.token_type)
} }
DuplicateIdentifier(span: Span, token: Token) { DuplicateIdentifier(span: Span, token: Token) {
display("Duplicate identifier: {:?}", token) display("Duplicate identifier: {}", token.token_type)
} }
InvalidSyntax(span: Span, reason: String) { InvalidSyntax(span: Span, reason: String) {
display("Invalid syntax: {:?}, Reason: {}", span, reason) display("Invalid syntax: {}", reason)
} }
UnsupportedKeyword(span: Span, token: Token) { UnsupportedKeyword(span: Span, token: Token) {
display("Unsupported keyword: {:?}", token) display("Unsupported keyword: {}", token.token_type)
} }
UnexpectedEOF { UnexpectedEOF {
display("Unexpected EOF") display("Unexpected EOF")

View File

@@ -19,18 +19,18 @@ quick_error! {
source(err) source(err)
} }
NumberParseError(err: std::num::ParseIntError, line: usize, column: usize, original: String) { NumberParseError(err: std::num::ParseIntError, line: usize, column: usize, original: String) {
display("Number Parse Error: {}\nLine: {}, Column: {}", err, line, column) display("Number Parse Error: {}", err)
source(err) source(err)
} }
DecimalParseError(err: rust_decimal::Error, line: usize, column: usize, original: String) { DecimalParseError(err: rust_decimal::Error, line: usize, column: usize, original: String) {
display("Decimal Parse Error: {}\nLine: {}, Column: {}", err, line, column) display("Decimal Parse Error: {}", err)
source(err) source(err)
} }
UnknownSymbolError(char: char, line: usize, column: usize, original: String) { UnknownSymbolError(char: char, line: usize, column: usize, original: String) {
display("Unknown Symbol: {}\nLine: {}, Column: {}", char, line, column) display("Unknown Symbol: {}", char)
} }
UnknownKeywordOrIdentifierError(val: String, line: usize, column: usize, original: String) { UnknownKeywordOrIdentifierError(val: String, line: usize, column: usize, original: String) {
display("Unknown Keyword or Identifier: {}\nLine: {}, Column: {}", val, line, column) display("Unknown Keyword or Identifier: {}", val)
} }
} }
} }

View File

@@ -106,8 +106,6 @@ pub fn tokenize_line(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec<Ff
let mut tokens = Vec::new(); let mut tokens = Vec::new();
// Error reporting is handled in `diagnose_source`. We only care about successful tokens here
// for syntax highlighting
for token in tokenizer { for token in tokenizer {
if matches!( if matches!(
token, token,
@@ -119,7 +117,24 @@ pub fn tokenize_line(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec<Ff
continue; continue;
} }
match token { match token {
Err(_) => {} Err(ref e) => {
use tokenizer::Error::*;
let (err_str, col, og) = match e {
NumberParseError(_, _, col, og)
| DecimalParseError(_, _, col, og)
| UnknownSymbolError(_, _, col, og)
| UnknownKeywordOrIdentifierError(_, _, col, og) => (e.to_string(), col, og),
_ => continue,
};
tokens.push(FfiToken {
column: *col as i32,
error: err_str.into(),
tooltip: "".into(),
length: og.len() as i32,
token_kind: 0,
})
}
Ok(Token { Ok(Token {
column, column,
original_string, original_string,