Stationpedia docs

This commit is contained in:
2025-12-02 17:59:40 -07:00
parent bf1daf12cc
commit 75f1c5c44a
11 changed files with 211 additions and 24 deletions

View File

@@ -1,6 +1,6 @@
use compiler::Compiler;
use helpers::Documentation;
use parser::Parser;
use parser::{sys_call::SysCall, Parser};
use safer_ffi::prelude::*;
use std::io::BufWriter;
use tokenizer::{
@@ -27,6 +27,13 @@ pub struct FfiRange {
end_line: u32,
}
#[derive_ReprC]
#[repr(C)]
pub struct FfiDocumentedItem {
item_name: safer_ffi::String,
docs: safer_ffi::String,
}
impl From<lsp_types::Range> for FfiRange {
fn from(value: lsp_types::Range) -> Self {
Self {
@@ -77,6 +84,11 @@ pub fn free_string(s: safer_ffi::String) {
drop(s)
}
#[ffi_export]
pub fn free_docs_vec(v: safer_ffi::Vec<FfiDocumentedItem>) {
drop(v)
}
/// C# handles strings as UTF16. We do NOT want to allocate that memory in C# because
/// we want to avoid GC. So we pass it to Rust to handle all the memory allocations.
/// This should result in the ability to compile many times without triggering frame drops
@@ -184,3 +196,26 @@ pub fn diagnose_source(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec<
res.unwrap_or(vec![].into())
}
#[ffi_export]
pub fn get_docs() -> safer_ffi::Vec<FfiDocumentedItem> {
let res = std::panic::catch_unwind(|| {
let mut docs = SysCall::get_all_documentation();
docs.extend(TokenType::get_all_documentation());
docs
});
let Ok(result) = res else {
return vec![].into();
};
result
.into_iter()
.map(|(key, doc)| FfiDocumentedItem {
item_name: key.into(),
docs: doc.into(),
})
.collect::<Vec<_>>()
.into()
}