Add some initial documentation support for functions

This commit is contained in:
Sofia 2025-08-14 16:28:33 +03:00
parent 9e37ae7aac
commit 7b4f38406d
17 changed files with 190 additions and 39 deletions

2
Cargo.lock generated
View File

@ -667,6 +667,8 @@ version = "1.0.0-beta.1"
dependencies = [ dependencies = [
"dashmap 6.1.0", "dashmap 6.1.0",
"reid", "reid",
"serde",
"serde_json",
"socket", "socket",
"tokio", "tokio",
"tower-lsp", "tower-lsp",

7
examples/a3.reid Normal file
View File

@ -0,0 +1,7 @@
fn main() -> i32 {
let a = 4f32;
if (a % 2) == 0 {
return 1;
}
return 0;
}

View File

@ -2,11 +2,15 @@ import std::print;
import std::from_str; import std::from_str;
import std::String; import std::String;
/// Asd
struct Otus { struct Otus {
field: u32, field: u32,
} }
impl Otus { impl Otus {
/// Some test documentation
/// Here
/// qwe
fn test(&self) -> u32 { fn test(&self) -> u32 {
*self.field *self.field
} }
@ -28,7 +32,6 @@ fn main() -> u32 {
let mut list = u64::malloc(15); let mut list = u64::malloc(15);
list[4] = 17; list[4] = 17;
print(from_str("value: ") + list[4]); print(from_str("value: ") + list[4]);
return i32::sizeof() as u32; return i32::sizeof() as u32;

View File

@ -1,5 +1,6 @@
// Arithmetic, function calls and imports! // Arithmetic, function calls and imports!
/// Test stuff
fn changer(param: &mut u32) { fn changer(param: &mut u32) {
*param = 17; *param = 17;
} }

View File

@ -9,3 +9,5 @@ tokio = { version = "1.47.0", features = ["full"] }
tower-lsp = "0.20.0" tower-lsp = "0.20.0"
reid = { path = "../reid", version = "1.0.0-beta.4", registry="gitea-teascade", features=[] } reid = { path = "../reid", version = "1.0.0-beta.4", registry="gitea-teascade", features=[] }
dashmap = "6.1.0" dashmap = "6.1.0"
serde = "*"
serde_json = "*"

View File

@ -92,6 +92,7 @@ pub struct SemanticToken {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Autocomplete { pub struct Autocomplete {
pub text: String, pub text: String,
pub documentation: Option<String>,
pub kind: AutocompleteKind, pub kind: AutocompleteKind,
} }
@ -401,6 +402,33 @@ pub fn analyze(
return Ok(None); return Ok(None);
} }
// pub fn find_documentation(meta: &Metadata, tokens: &Vec<FullToken>) -> Option<String> {
// let mut documentation = None;
// for idx in meta.range.start..=meta.range.end {
// if let Some(token) = tokens.get(idx) {
// dbg!(&token);
// if matches!(token.token, Token::Whitespace(_) | Token::Doc(_) | Token::Comment(_)) {
// if let Token::Doc(doctext) = &token.token {
// documentation = Some(
// match documentation {
// Some(t) => t + " ",
// None => String::new(),
// } + doctext.trim(),
// );
// }
// } else {
// dbg!(&token);
// break;
// }
// } else {
// dbg!(&idx);
// break;
// }
// }
// dbg!(&documentation);
// documentation
// }
pub fn analyze_context( pub fn analyze_context(
context: &mir::Context, context: &mir::Context,
module: &mir::Module, module: &mir::Module,
@ -712,20 +740,18 @@ pub fn analyze_context(
if !function.is_pub { if !function.is_pub {
continue; continue;
} }
if function.name.starts_with(&import_name) { autocompletes.push(Autocomplete {
autocompletes.push(Autocomplete { text: function.name.clone(),
text: function.name.clone(), documentation: function.documentation.clone(),
kind: AutocompleteKind::Function(function.parameters.clone(), function.return_type.clone()), kind: AutocompleteKind::Function(function.parameters.clone(), function.return_type.clone()),
}); });
}
} }
for typedef in &module.typedefs { for typedef in &module.typedefs {
if typedef.name.starts_with(&import_name) { autocompletes.push(Autocomplete {
autocompletes.push(Autocomplete { text: typedef.name.clone(),
text: typedef.name.clone(), documentation: None,
kind: AutocompleteKind::Type, kind: AutocompleteKind::Type,
}); });
}
} }
} }
@ -823,7 +849,6 @@ pub fn analyze_block(
let ty_idx = scope.token_idx(&named_variable_ref.2.after(idx + 1), |t| { let ty_idx = scope.token_idx(&named_variable_ref.2.after(idx + 1), |t| {
matches!(t, Token::Identifier(_)) matches!(t, Token::Identifier(_))
}); });
dbg!(ty_idx);
if let Some(ty_idx) = ty_idx { if let Some(ty_idx) = ty_idx {
let ty_symbol = if let Some((source_id, symbol_id)) = scope.types.get(&named_variable_ref.0) { let ty_symbol = if let Some((source_id, symbol_id)) = scope.types.get(&named_variable_ref.0) {
scope scope
@ -902,9 +927,10 @@ pub fn analyze_expr(
source_module source_module
.associated_functions .associated_functions
.iter() .iter()
.filter(|(t, fun)| *t == accessed_type && fun.name.starts_with(name)) .filter(|(t, _)| *t == accessed_type)
.map(|(_, fun)| Autocomplete { .map(|(_, fun)| Autocomplete {
text: fun.name.clone(), text: fun.name.clone(),
documentation: fun.documentation.clone(),
kind: AutocompleteKind::Function(fun.parameters.clone(), fun.return_type.clone()), kind: AutocompleteKind::Function(fun.parameters.clone(), fun.return_type.clone()),
}), }),
); );
@ -933,8 +959,9 @@ pub fn analyze_expr(
if let Some(typedef) = typedef { if let Some(typedef) = typedef {
autocompletes.extend(match &typedef.kind { autocompletes.extend(match &typedef.kind {
mir::TypeDefinitionKind::Struct(StructType(fields)) => { mir::TypeDefinitionKind::Struct(StructType(fields)) => {
fields.iter().filter(|f| f.0.starts_with(name)).map(|f| Autocomplete { fields.iter().map(|f| Autocomplete {
text: f.0.clone(), text: f.0.clone(),
documentation: None,
kind: AutocompleteKind::Field(f.1.clone()), kind: AutocompleteKind::Field(f.1.clone()),
}) })
} }
@ -1067,18 +1094,19 @@ pub fn analyze_expr(
let mut function_autocomplete = source_module let mut function_autocomplete = source_module
.associated_functions .associated_functions
.iter() .iter()
.filter(|(t, fun)| *t == invoked_ty && fun.name.starts_with(name)) .filter(|(t, _)| *t == invoked_ty)
.map(|(_, fun)| Autocomplete { .map(|(_, fun)| Autocomplete {
text: fun.name.clone(), text: fun.name.clone(),
documentation: fun.documentation.clone(),
kind: AutocompleteKind::Function(fun.parameters.clone(), fun.return_type.clone()), kind: AutocompleteKind::Function(fun.parameters.clone(), fun.return_type.clone()),
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
function_autocomplete.extend( function_autocomplete.extend(
get_intrinsic_assoc_functions(&invoked_ty) get_intrinsic_assoc_functions(&invoked_ty)
.iter() .iter()
.filter(|fun| fun.name.starts_with(name))
.map(|fun| Autocomplete { .map(|fun| Autocomplete {
text: fun.name.clone(), text: fun.name.clone(),
documentation: Some("intrinsic function documentation".to_string()),
kind: AutocompleteKind::Function(fun.parameters.clone(), fun.return_type.clone()), kind: AutocompleteKind::Function(fun.parameters.clone(), fun.return_type.clone()),
}) })
.collect::<Vec<_>>(), .collect::<Vec<_>>(),

View File

@ -6,17 +6,18 @@ use reid::ast::lexer::{FullToken, Position};
use reid::error_raporting::{self, ErrorModules, ReidError}; use reid::error_raporting::{self, ErrorModules, ReidError};
use reid::mir::SourceModuleId; use reid::mir::SourceModuleId;
use reid::parse_module; use reid::parse_module;
use serde::{Deserialize, Serialize};
use tokio::sync::Mutex; use tokio::sync::Mutex;
use tower_lsp::lsp_types::{ use tower_lsp::lsp_types::{
self, CompletionItem, CompletionOptions, CompletionParams, CompletionResponse, Diagnostic, DiagnosticSeverity, self, CompletionItem, CompletionItemKind, CompletionOptions, CompletionParams, CompletionResponse, Diagnostic,
DidChangeTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, DocumentFilter, DiagnosticSeverity, DidChangeTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams,
GotoDefinitionParams, GotoDefinitionResponse, Hover, HoverContents, HoverParams, HoverProviderCapability, DocumentFilter, GotoDefinitionParams, GotoDefinitionResponse, Hover, HoverContents, HoverParams,
InitializeParams, InitializeResult, InitializedParams, Location, MarkupContent, MarkupKind, MessageType, OneOf, HoverProviderCapability, InitializeParams, InitializeResult, InitializedParams, Location, MarkupContent,
Range, ReferenceParams, RenameParams, SemanticToken, SemanticTokensLegend, SemanticTokensOptions, MarkupKind, MessageType, OneOf, Range, ReferenceParams, RenameParams, SemanticToken, SemanticTokensLegend,
SemanticTokensParams, SemanticTokensResult, SemanticTokensServerCapabilities, ServerCapabilities, TextDocumentItem, SemanticTokensOptions, SemanticTokensParams, SemanticTokensResult, SemanticTokensServerCapabilities,
TextDocumentRegistrationOptions, TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions, ServerCapabilities, TextDocumentItem, TextDocumentRegistrationOptions, TextDocumentSyncCapability,
TextDocumentSyncSaveOptions, TextEdit, Url, WorkspaceEdit, WorkspaceFoldersServerCapabilities, TextDocumentSyncKind, TextDocumentSyncOptions, TextDocumentSyncSaveOptions, TextEdit, Url, WorkspaceEdit,
WorkspaceServerCapabilities, WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities,
}; };
use tower_lsp::{Client, LanguageServer, LspService, Server, jsonrpc}; use tower_lsp::{Client, LanguageServer, LspService, Server, jsonrpc};
@ -33,6 +34,12 @@ struct Backend {
module_id_counter: Mutex<SourceModuleId>, module_id_counter: Mutex<SourceModuleId>,
} }
#[derive(Serialize, Deserialize, Debug)]
struct CompletionData {
token_idx: usize,
path: PathBuf,
}
#[tower_lsp::async_trait] #[tower_lsp::async_trait]
impl LanguageServer for Backend { impl LanguageServer for Backend {
async fn initialize(&self, _: InitializeParams) -> jsonrpc::Result<InitializeResult> { async fn initialize(&self, _: InitializeParams) -> jsonrpc::Result<InitializeResult> {
@ -52,7 +59,17 @@ impl LanguageServer for Backend {
let capabilities = ServerCapabilities { let capabilities = ServerCapabilities {
hover_provider: Some(HoverProviderCapability::Simple(true)), hover_provider: Some(HoverProviderCapability::Simple(true)),
completion_provider: Some(CompletionOptions { ..Default::default() }), completion_provider: Some(CompletionOptions {
trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
all_commit_characters: None,
completion_item: Some(lsp_types::CompletionOptionsCompletionItem {
label_details_support: Some(true),
}),
resolve_provider: Some(true),
work_done_progress_options: lsp_types::WorkDoneProgressOptions {
work_done_progress: Some(true),
},
}),
text_document_sync: Some(TextDocumentSyncCapability::Options(sync)), text_document_sync: Some(TextDocumentSyncCapability::Options(sync)),
workspace: Some(WorkspaceServerCapabilities { workspace: Some(WorkspaceServerCapabilities {
workspace_folders: Some(WorkspaceFoldersServerCapabilities { workspace_folders: Some(WorkspaceFoldersServerCapabilities {
@ -121,11 +138,26 @@ impl LanguageServer for Backend {
// dbg!(position, token); // dbg!(position, token);
let list = if let Some((idx, _)) = token { let list = if let Some((idx, _)) = token {
if let Some(analysis) = self.analysis.get(&path).unwrap().state.map.get(&idx) { if let Some(token_analysis) = self.analysis.get(&path).unwrap().state.map.get(&idx) {
analysis token_analysis
.autocomplete .autocomplete
.iter() .iter()
.map(|s| CompletionItem::new_simple(s.text.to_string(), s.kind.to_string())) .map(|autocomplete| {
let mut item =
CompletionItem::new_simple(autocomplete.text.to_string(), autocomplete.kind.to_string());
item.data = Some(
serde_json::to_value(CompletionData {
token_idx: idx,
path: path.clone(),
})
.unwrap(),
);
item.documentation = autocomplete
.documentation
.as_ref()
.and_then(|d| Some(lsp_types::Documentation::String(d.clone())));
item
})
.collect() .collect()
} else { } else {
Vec::new() Vec::new()
@ -138,6 +170,20 @@ impl LanguageServer for Backend {
Ok(Some(CompletionResponse::Array(list))) Ok(Some(CompletionResponse::Array(list)))
} }
async fn completion_resolve(&self, params: CompletionItem) -> jsonrpc::Result<CompletionItem> {
let data: Option<CompletionData> = if let Some(data) = &params.data {
serde_json::from_value(data.clone()).ok()
} else {
None
};
if let Some(data) = data {
let analysis = self.analysis.get(&data.path).unwrap();
let token = analysis.tokens.get(data.token_idx).unwrap();
if let Some(token_analysis) = analysis.state.map.get(&data.token_idx) {}
}
Ok(params)
}
async fn hover(&self, params: HoverParams) -> jsonrpc::Result<Option<Hover>> { async fn hover(&self, params: HoverParams) -> jsonrpc::Result<Option<Hover>> {
let path = PathBuf::from(params.text_document_position_params.text_document.uri.path()); let path = PathBuf::from(params.text_document_position_params.text_document.uri.path());
let analysis = self.analysis.get(&path); let analysis = self.analysis.get(&path);

View File

@ -116,6 +116,7 @@ pub enum Token {
Whitespace(String), Whitespace(String),
Comment(String), Comment(String),
Doc(String),
Eof, Eof,
} }
@ -196,6 +197,7 @@ impl ToString for Token {
Token::Percent => String::from('%'), Token::Percent => String::from('%'),
Token::Whitespace(val) => val.clone(), Token::Whitespace(val) => val.clone(),
Token::Comment(val) => format!("//{}", val.clone()), Token::Comment(val) => format!("//{}", val.clone()),
Token::Doc(val) => format!("///{}", val.clone()),
Token::Unknown(val) => val.to_string(), Token::Unknown(val) => val.to_string(),
} }
} }
@ -309,13 +311,25 @@ pub fn tokenize<T: Into<String>>(to_tokenize: T) -> Result<Vec<FullToken>, Error
} }
// Comments // Comments
'/' if cursor.first() == Some('/') => { '/' if cursor.first() == Some('/') => {
cursor.next();
let doc = if cursor.first() == Some('/') {
cursor.next();
true
} else {
false
};
let mut comment = String::new(); let mut comment = String::new();
while !matches!(cursor.first(), Some('\n') | None) { while !matches!(cursor.first(), Some('\n') | None) {
if let Some(c) = cursor.next() { if let Some(c) = cursor.next() {
comment.push(c); comment.push(c);
} }
} }
Token::Comment(comment) if doc {
Token::Doc(comment)
} else {
Token::Comment(comment)
}
} }
'\"' | '\'' => { '\"' | '\'' => {
let mut value = String::new(); let mut value = String::new();

View File

@ -192,6 +192,7 @@ pub struct FunctionDefinition(pub FunctionSignature, pub bool, pub Block, pub To
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct FunctionSignature { pub struct FunctionSignature {
pub name: String, pub name: String,
pub documentation: Option<String>,
pub self_kind: SelfKind, pub self_kind: SelfKind,
pub params: Vec<(String, Type, TokenRange)>, pub params: Vec<(String, Type, TokenRange)>,
pub return_type: Option<Type>, pub return_type: Option<Type>,

View File

@ -696,6 +696,10 @@ impl Parse for ImportStatement {
impl Parse for FunctionDefinition { impl Parse for FunctionDefinition {
fn parse(mut stream: TokenStream) -> Result<Self, Error> { fn parse(mut stream: TokenStream) -> Result<Self, Error> {
let documentation = stream.find_documentation();
dbg!(&stream.get_range());
dbg!(&documentation);
let is_pub = if let Some(Token::PubKeyword) = stream.peek() { let is_pub = if let Some(Token::PubKeyword) = stream.peek() {
stream.next(); // Consume pub stream.next(); // Consume pub
true true
@ -704,8 +708,10 @@ impl Parse for FunctionDefinition {
}; };
stream.expect(Token::FnKeyword)?; stream.expect(Token::FnKeyword)?;
let mut signature: FunctionSignature = stream.parse()?;
signature.documentation = documentation;
Ok(FunctionDefinition( Ok(FunctionDefinition(
stream.parse()?, signature,
is_pub, is_pub,
stream.parse()?, stream.parse()?,
stream.get_range().unwrap(), stream.get_range().unwrap(),
@ -810,6 +816,7 @@ impl Parse for FunctionSignature {
Ok(FunctionSignature { Ok(FunctionSignature {
name, name,
documentation: None,
params, params,
self_kind, self_kind,
return_type, return_type,
@ -1089,9 +1096,12 @@ impl Parse for TopLevelStatement {
Ok(match stream.peek() { Ok(match stream.peek() {
Some(Token::ImportKeyword) => Stmt::Import(stream.parse()?), Some(Token::ImportKeyword) => Stmt::Import(stream.parse()?),
Some(Token::Extern) => { Some(Token::Extern) => {
let documentation = stream.find_documentation();
stream.next(); // Consume Extern stream.next(); // Consume Extern
stream.expect(Token::FnKeyword)?; stream.expect(Token::FnKeyword)?;
let extern_fn = Stmt::ExternFunction(stream.parse()?); let mut signature: FunctionSignature = stream.parse()?;
signature.documentation = documentation;
let extern_fn = Stmt::ExternFunction(signature);
stream.expect_nonfatal(Token::Semi).ok(); stream.expect_nonfatal(Token::Semi).ok();
extern_fn extern_fn
} }

View File

@ -43,6 +43,7 @@ impl ast::Module {
ExternFunction(signature) => { ExternFunction(signature) => {
let def = mir::FunctionDefinition { let def = mir::FunctionDefinition {
name: signature.name.clone(), name: signature.name.clone(),
documentation: signature.documentation.clone(),
linkage_name: None, linkage_name: None,
is_pub: false, is_pub: false,
is_imported: false, is_imported: false,
@ -176,6 +177,7 @@ impl ast::FunctionDefinition {
})); }));
mir::FunctionDefinition { mir::FunctionDefinition {
name: signature.name.clone(), name: signature.name.clone(),
documentation: signature.documentation.clone(),
linkage_name: None, linkage_name: None,
is_pub: *is_pub, is_pub: *is_pub,
is_imported: false, is_imported: false,

View File

@ -87,6 +87,28 @@ impl<'a, 'b> TokenStream<'a, 'b> {
} }
} }
pub fn find_documentation(&mut self) -> Option<String> {
let mut from = self.position;
let mut documentation = None;
while let Some(token) = self.tokens.get(from) {
if matches!(token.token, Token::Whitespace(_) | Token::Comment(_) | Token::Doc(_)) {
from += 1;
if let Token::Doc(doctext) = &token.token {
documentation = Some(
match documentation {
Some(t) => t + " ",
None => String::new(),
} + doctext.trim(),
);
}
} else {
break;
}
}
dbg!(self.position, from, &documentation);
documentation
}
pub fn expect_nonfatal(&mut self, token: Token) -> Result<(), ()> { pub fn expect_nonfatal(&mut self, token: Token) -> Result<(), ()> {
if let (pos, Some(peeked)) = self.next_token(self.position) { if let (pos, Some(peeked)) = self.next_token(self.position) {
if token == peeked.token { if token == peeked.token {
@ -249,7 +271,7 @@ impl<'a, 'b> TokenStream<'a, 'b> {
fn previous_token(&self, mut from: usize) -> (usize, Option<&'a FullToken>) { fn previous_token(&self, mut from: usize) -> (usize, Option<&'a FullToken>) {
from -= 1; from -= 1;
while let Some(token) = self.tokens.get(from) { while let Some(token) = self.tokens.get(from) {
if matches!(token.token, Token::Whitespace(_) | Token::Comment(_)) { if matches!(token.token, Token::Whitespace(_) | Token::Comment(_) | Token::Doc(_)) {
from -= 1; from -= 1;
} else { } else {
break; break;
@ -260,7 +282,7 @@ impl<'a, 'b> TokenStream<'a, 'b> {
fn next_token(&self, mut from: usize) -> (usize, Option<&'a FullToken>) { fn next_token(&self, mut from: usize) -> (usize, Option<&'a FullToken>) {
while let Some(token) = self.tokens.get(from) { while let Some(token) = self.tokens.get(from) {
if matches!(token.token, Token::Whitespace(_) | Token::Comment(_)) { if matches!(token.token, Token::Whitespace(_) | Token::Comment(_) | Token::Doc(_)) {
from += 1; from += 1;
} else { } else {
break; break;

View File

@ -71,6 +71,7 @@ pub fn form_intrinsics() -> Vec<FunctionDefinition> {
intrinsics.push(FunctionDefinition { intrinsics.push(FunctionDefinition {
name: MALLOC_IDENT.to_owned(), name: MALLOC_IDENT.to_owned(),
documentation: Some("temp".to_string()),
linkage_name: Some("malloc".to_owned()), linkage_name: Some("malloc".to_owned()),
is_pub: false, is_pub: false,
is_imported: true, is_imported: true,
@ -96,6 +97,7 @@ pub fn simple_intrinsic<T: Into<String> + Clone>(
) -> FunctionDefinition { ) -> FunctionDefinition {
FunctionDefinition { FunctionDefinition {
name: name.into(), name: name.into(),
documentation: Some("temp".to_string()),
linkage_name: None, linkage_name: None,
is_pub: true, is_pub: true,
is_imported: false, is_imported: false,
@ -115,6 +117,7 @@ pub fn get_intrinsic_assoc_functions(ty: &TypeKind) -> Vec<FunctionDefinition> {
if let TypeKind::Array(_, len) = ty { if let TypeKind::Array(_, len) = ty {
intrinsics.push(FunctionDefinition { intrinsics.push(FunctionDefinition {
name: "length".to_owned(), name: "length".to_owned(),
documentation: Some("temp".to_string()),
linkage_name: None, linkage_name: None,
is_pub: true, is_pub: true,
is_imported: false, is_imported: false,
@ -252,6 +255,7 @@ pub fn get_intrinsic_assoc_functions(ty: &TypeKind) -> Vec<FunctionDefinition> {
)); ));
intrinsics.push(FunctionDefinition { intrinsics.push(FunctionDefinition {
name: "powi".to_owned(), name: "powi".to_owned(),
documentation: Some("temp".to_string()),
linkage_name: None, linkage_name: None,
is_pub: true, is_pub: true,
is_imported: false, is_imported: false,
@ -293,6 +297,7 @@ pub fn get_intrinsic_assoc_functions(ty: &TypeKind) -> Vec<FunctionDefinition> {
if ty.signed() { if ty.signed() {
intrinsics.push(FunctionDefinition { intrinsics.push(FunctionDefinition {
name: "abs".to_owned(), name: "abs".to_owned(),
documentation: Some("temp".to_string()),
linkage_name: None, linkage_name: None,
is_pub: true, is_pub: true,
is_imported: false, is_imported: false,
@ -323,6 +328,7 @@ pub fn get_intrinsic_assoc_functions(ty: &TypeKind) -> Vec<FunctionDefinition> {
} }
intrinsics.push(FunctionDefinition { intrinsics.push(FunctionDefinition {
name: "sizeof".to_owned(), name: "sizeof".to_owned(),
documentation: Some("temp".to_string()),
linkage_name: None, linkage_name: None,
is_pub: true, is_pub: true,
is_imported: false, is_imported: false,
@ -334,6 +340,7 @@ pub fn get_intrinsic_assoc_functions(ty: &TypeKind) -> Vec<FunctionDefinition> {
}); });
intrinsics.push(FunctionDefinition { intrinsics.push(FunctionDefinition {
name: "malloc".to_owned(), name: "malloc".to_owned(),
documentation: Some("temp".to_string()),
linkage_name: None, linkage_name: None,
is_pub: true, is_pub: true,
is_imported: false, is_imported: false,
@ -350,6 +357,7 @@ pub fn get_intrinsic_assoc_functions(ty: &TypeKind) -> Vec<FunctionDefinition> {
intrinsics.push(FunctionDefinition { intrinsics.push(FunctionDefinition {
name: "memcpy".to_owned(), name: "memcpy".to_owned(),
documentation: Some("temp".to_string()),
linkage_name: None, linkage_name: None,
is_pub: true, is_pub: true,
is_imported: false, is_imported: false,
@ -378,6 +386,7 @@ pub fn get_intrinsic_assoc_functions(ty: &TypeKind) -> Vec<FunctionDefinition> {
intrinsics.push(FunctionDefinition { intrinsics.push(FunctionDefinition {
name: "null".to_owned(), name: "null".to_owned(),
documentation: Some("temp".to_string()),
linkage_name: None, linkage_name: None,
is_pub: true, is_pub: true,
is_imported: false, is_imported: false,
@ -390,6 +399,7 @@ pub fn get_intrinsic_assoc_functions(ty: &TypeKind) -> Vec<FunctionDefinition> {
intrinsics.push(FunctionDefinition { intrinsics.push(FunctionDefinition {
name: "is_null".to_owned(), name: "is_null".to_owned(),
documentation: Some("temp".to_string()),
linkage_name: None, linkage_name: None,
is_pub: true, is_pub: true,
is_imported: false, is_imported: false,

View File

@ -226,8 +226,6 @@ impl mir::Module {
} }
} }
dbg!(&typedefs_sorted);
for typedef in typedefs_sorted { for typedef in typedefs_sorted {
let type_key = CustomTypeKey(typedef.name.clone(), typedef.source_module); let type_key = CustomTypeKey(typedef.name.clone(), typedef.source_module);
type_map.insert(type_key.clone(), typedef.clone()); type_map.insert(type_key.clone(), typedef.clone());
@ -336,7 +334,6 @@ impl mir::Module {
if module_id == self.module_id { if module_id == self.module_id {
format!("reid.{}.", self.name) format!("reid.{}.", self.name)
} else { } else {
dbg!(self.module_id, module_id);
format!("reid.{}.", modules.get(&module_id).unwrap().name) format!("reid.{}.", modules.get(&module_id).unwrap().name)
} }
} else { } else {

View File

@ -152,6 +152,9 @@ impl Display for StructField {
impl Display for FunctionDefinition { impl Display for FunctionDefinition {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if let Some(documentation) = &self.documentation {
writeln!(f, "/// {}", documentation)?;
}
write!( write!(
f, f,
"{}fn {}({}) -> {:#} ", "{}fn {}({}) -> {:#} ",

View File

@ -311,6 +311,7 @@ impl<'map> Pass for LinkerPass<'map> {
importer_module.functions.push(FunctionDefinition { importer_module.functions.push(FunctionDefinition {
name: function.name.clone(), name: function.name.clone(),
documentation: function.documentation.clone(),
linkage_name: None, linkage_name: None,
is_pub: false, is_pub: false,
is_imported: false, is_imported: false,
@ -458,6 +459,7 @@ impl<'map> Pass for LinkerPass<'map> {
ty.clone(), ty.clone(),
FunctionDefinition { FunctionDefinition {
name: func_name.clone(), name: func_name.clone(),
documentation: func.documentation.clone(),
linkage_name: Some(format!("{}::{}", ty, func_name)), linkage_name: Some(format!("{}::{}", ty, func_name)),
is_pub: false, is_pub: false,
is_imported: false, is_imported: false,

View File

@ -318,6 +318,7 @@ pub struct FunctionCall {
#[derive(Debug)] #[derive(Debug)]
pub struct FunctionDefinition { pub struct FunctionDefinition {
pub name: String, pub name: String,
pub documentation: Option<String>,
pub linkage_name: Option<String>, pub linkage_name: Option<String>,
/// Whether this function is visible to outside modules /// Whether this function is visible to outside modules
pub is_pub: bool, pub is_pub: bool,