Stationpedia docs
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
use compiler::Compiler;
|
||||
use helpers::Documentation;
|
||||
use parser::Parser;
|
||||
use parser::{sys_call::SysCall, Parser};
|
||||
use safer_ffi::prelude::*;
|
||||
use std::io::BufWriter;
|
||||
use tokenizer::{
|
||||
@@ -27,6 +27,13 @@ pub struct FfiRange {
|
||||
end_line: u32,
|
||||
}
|
||||
|
||||
#[derive_ReprC]
|
||||
#[repr(C)]
|
||||
pub struct FfiDocumentedItem {
|
||||
item_name: safer_ffi::String,
|
||||
docs: safer_ffi::String,
|
||||
}
|
||||
|
||||
impl From<lsp_types::Range> for FfiRange {
|
||||
fn from(value: lsp_types::Range) -> Self {
|
||||
Self {
|
||||
@@ -77,6 +84,11 @@ pub fn free_string(s: safer_ffi::String) {
|
||||
drop(s)
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn free_docs_vec(v: safer_ffi::Vec<FfiDocumentedItem>) {
|
||||
drop(v)
|
||||
}
|
||||
|
||||
/// C# handles strings as UTF16. We do NOT want to allocate that memory in C# because
|
||||
/// we want to avoid GC. So we pass it to Rust to handle all the memory allocations.
|
||||
/// This should result in the ability to compile many times without triggering frame drops
|
||||
@@ -184,3 +196,26 @@ pub fn diagnose_source(input: safer_ffi::slice::Ref<'_, u16>) -> safer_ffi::Vec<
|
||||
|
||||
res.unwrap_or(vec![].into())
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn get_docs() -> safer_ffi::Vec<FfiDocumentedItem> {
|
||||
let res = std::panic::catch_unwind(|| {
|
||||
let mut docs = SysCall::get_all_documentation();
|
||||
docs.extend(TokenType::get_all_documentation());
|
||||
|
||||
docs
|
||||
});
|
||||
|
||||
let Ok(result) = res else {
|
||||
return vec![].into();
|
||||
};
|
||||
|
||||
result
|
||||
.into_iter()
|
||||
.map(|(key, doc)| FfiDocumentedItem {
|
||||
item_name: key.into(),
|
||||
docs: doc.into(),
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.into()
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user