Compare commits
28 Commits
c7f1b81c9d
...
766a853b48
Author | SHA1 | Date | |
---|---|---|---|
766a853b48 | |||
109fedb624 | |||
d27ec2bb70 | |||
5706fd99e3 | |||
3b3b21d4dc | |||
7809aeb2b5 | |||
cd2ebb5224 | |||
8b0d09c08d | |||
a215a2116a | |||
0abeb0c4cd | |||
ac0d79f816 | |||
909728a564 | |||
82537224e7 | |||
79b3c6b3ef | |||
1ae164b1d6 | |||
9b9baabc81 | |||
13c462cb9b | |||
8739fe16d1 | |||
1438ba7bd1 | |||
d9911a8ff5 | |||
48dd17b320 | |||
dcc53498e7 | |||
018f3e2561 | |||
6a9133baff | |||
3f3de9e2c0 | |||
b965ca11b9 | |||
3537318466 | |||
dbc43f51ee |
@ -28,6 +28,7 @@ fn main() -> u32 {
|
|||||||
let mut list = u64::malloc(15);
|
let mut list = u64::malloc(15);
|
||||||
list[4] = 17;
|
list[4] = 17;
|
||||||
|
|
||||||
|
|
||||||
print(from_str("value: ") + list[4]);
|
print(from_str("value: ") + list[4]);
|
||||||
|
|
||||||
return i32::sizeof() as u32;
|
return i32::sizeof() as u32;
|
||||||
|
@ -22,6 +22,8 @@ fn main() -> u32 {
|
|||||||
let otus = Otus { field: 17 };
|
let otus = Otus { field: 17 };
|
||||||
print(from_str("otus: ") + otus.test() as u64);
|
print(from_str("otus: ") + otus.test() as u64);
|
||||||
|
|
||||||
|
otus.field;
|
||||||
|
|
||||||
|
|
||||||
return otus.test();
|
return otus.test();
|
||||||
}
|
}
|
||||||
|
8
examples/module_importee.reid
Normal file
8
examples/module_importee.reid
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
|
||||||
|
struct Otus {
|
||||||
|
field: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn test() -> Otus {
|
||||||
|
Otus {field: 4}
|
||||||
|
}
|
11
examples/module_impoter.reid
Normal file
11
examples/module_impoter.reid
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
// Arithmetic, function calls and imports!
|
||||||
|
|
||||||
|
import module_importee::Otus;
|
||||||
|
import module_importee::test;
|
||||||
|
|
||||||
|
fn main() -> u32 {
|
||||||
|
let value = 0b110;
|
||||||
|
let other = 0o17;
|
||||||
|
|
||||||
|
return value * other + test().field * -value;
|
||||||
|
}
|
@ -4,7 +4,7 @@
|
|||||||
* ------------------------------------------------------------------------------------------ */
|
* ------------------------------------------------------------------------------------------ */
|
||||||
|
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import { workspace, ExtensionContext, window } from 'vscode';
|
import { workspace, ExtensionContext, window, languages, SemanticTokensBuilder } from 'vscode';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
Executable,
|
Executable,
|
||||||
@ -53,7 +53,7 @@ export function activate(context: ExtensionContext) {
|
|||||||
synchronize: {
|
synchronize: {
|
||||||
// Notify the server about file changes to '.clientrc files contained in the workspace
|
// Notify the server about file changes to '.clientrc files contained in the workspace
|
||||||
fileEvents: workspace.createFileSystemWatcher('**/.clientrc')
|
fileEvents: workspace.createFileSystemWatcher('**/.clientrc')
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
// Create the language client and start the client.
|
// Create the language client and start the client.
|
||||||
@ -68,8 +68,22 @@ export function activate(context: ExtensionContext) {
|
|||||||
client.info(`Loaded Reid Language Server from ${server_path}`);
|
client.info(`Loaded Reid Language Server from ${server_path}`);
|
||||||
|
|
||||||
|
|
||||||
workspace.onDidOpenTextDocument((e) => {
|
workspace.onDidOpenTextDocument((e) => { });
|
||||||
});
|
|
||||||
|
client.info("Registering semantic tokens provide");
|
||||||
|
context.subscriptions.push(languages.registerDocumentSemanticTokensProvider({
|
||||||
|
language: 'reid',
|
||||||
|
scheme: 'file'
|
||||||
|
}, {
|
||||||
|
provideDocumentSemanticTokens: () => {
|
||||||
|
client.info("hello!");
|
||||||
|
const builder = new SemanticTokensBuilder();
|
||||||
|
return builder.build();
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
tokenTypes: [],
|
||||||
|
tokenModifiers: [],
|
||||||
|
}));
|
||||||
|
|
||||||
// Start the client. This will also launch the server
|
// Start the client. This will also launch the server
|
||||||
client.start();
|
client.start();
|
||||||
|
78
reid-lsp/language-configuration.json
Normal file
78
reid-lsp/language-configuration.json
Normal file
@ -0,0 +1,78 @@
|
|||||||
|
{
|
||||||
|
"comments": {
|
||||||
|
"lineComment": "//",
|
||||||
|
},
|
||||||
|
"brackets": [
|
||||||
|
[
|
||||||
|
"{",
|
||||||
|
"}"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"[",
|
||||||
|
"]"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"(",
|
||||||
|
")"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"autoClosingPairs": [
|
||||||
|
{
|
||||||
|
"open": "{",
|
||||||
|
"close": "}"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"open": "[",
|
||||||
|
"close": "]"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"open": "(",
|
||||||
|
"close": ")"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"open": "'",
|
||||||
|
"close": "'",
|
||||||
|
"notIn": [
|
||||||
|
"string",
|
||||||
|
"comment"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"open": "\"",
|
||||||
|
"close": "\"",
|
||||||
|
"notIn": [
|
||||||
|
"string"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"autoCloseBefore": ";:.,=}])>` \n\t",
|
||||||
|
"surroundingPairs": [
|
||||||
|
[
|
||||||
|
"{",
|
||||||
|
"}"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"[",
|
||||||
|
"]"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"(",
|
||||||
|
")"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"\"",
|
||||||
|
"\""
|
||||||
|
],
|
||||||
|
],
|
||||||
|
"folding": {
|
||||||
|
"markers": {
|
||||||
|
"start": "^\\s*//\\s*#?region\\b",
|
||||||
|
"end": "^\\s*//\\s*#?endregion\\b"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"wordPattern": "[a-Z](\\w*)",
|
||||||
|
"indentationRules": {
|
||||||
|
"increaseIndentPattern": "^((?!\\/\\/).)*(\\{[^}\"'`]*|\\([^)\"'`]*|\\[[^\\]\"'`]*)$",
|
||||||
|
"decreaseIndentPattern": "^((?!.*?\\/\\*).*\\*/)?\\s*[\\)\\}\\]].*$"
|
||||||
|
}
|
||||||
|
}
|
@ -12,9 +12,6 @@
|
|||||||
"categories": [
|
"categories": [
|
||||||
"Other"
|
"Other"
|
||||||
],
|
],
|
||||||
"activationEvents": [
|
|
||||||
"onLanguage:reid"
|
|
||||||
],
|
|
||||||
"main": "./dist/extension.js",
|
"main": "./dist/extension.js",
|
||||||
"contributes": {
|
"contributes": {
|
||||||
"languages": [
|
"languages": [
|
||||||
@ -25,7 +22,8 @@
|
|||||||
],
|
],
|
||||||
"aliases": [
|
"aliases": [
|
||||||
"Reid"
|
"Reid"
|
||||||
]
|
],
|
||||||
|
"configuration": "./language-configuration.json"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"configuration": {
|
"configuration": {
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,27 +1,35 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use dashmap::DashMap;
|
use dashmap::DashMap;
|
||||||
use reid::ast::lexer::{FullToken, Position};
|
use reid::ast::lexer::{FullToken, Position};
|
||||||
use reid::error_raporting::{self, ErrorModules, ReidError};
|
use reid::error_raporting::{self, ErrorModules, ReidError};
|
||||||
use reid::mir::{SourceModuleId, TypeKind};
|
use reid::mir::SourceModuleId;
|
||||||
use reid::parse_module;
|
use reid::parse_module;
|
||||||
|
use tokio::sync::Mutex;
|
||||||
use tower_lsp::lsp_types::{
|
use tower_lsp::lsp_types::{
|
||||||
self, CompletionItem, CompletionOptions, CompletionParams, CompletionResponse, Diagnostic, DiagnosticSeverity,
|
self, CompletionItem, CompletionOptions, CompletionParams, CompletionResponse, Diagnostic, DiagnosticSeverity,
|
||||||
DidChangeTextDocumentParams, DidOpenTextDocumentParams, Hover, HoverContents, HoverParams, HoverProviderCapability,
|
DidChangeTextDocumentParams, DidOpenTextDocumentParams, DocumentFilter, GotoDefinitionParams,
|
||||||
InitializeParams, InitializeResult, InitializedParams, MarkupContent, MarkupKind, MessageType, OneOf, Range,
|
GotoDefinitionResponse, Hover, HoverContents, HoverParams, HoverProviderCapability, InitializeParams,
|
||||||
ServerCapabilities, TextDocumentItem, TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
|
InitializeResult, InitializedParams, Location, MarkupContent, MarkupKind, MessageType, OneOf, Range,
|
||||||
WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities,
|
ReferenceParams, RenameParams, SemanticToken, SemanticTokensLegend, SemanticTokensOptions, SemanticTokensParams,
|
||||||
|
SemanticTokensResult, SemanticTokensServerCapabilities, ServerCapabilities, TextDocumentItem,
|
||||||
|
TextDocumentRegistrationOptions, TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
|
||||||
|
TextEdit, Url, WorkspaceEdit, WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities,
|
||||||
};
|
};
|
||||||
use tower_lsp::{Client, LanguageServer, LspService, Server, jsonrpc};
|
use tower_lsp::{Client, LanguageServer, LspService, Server, jsonrpc};
|
||||||
|
|
||||||
use crate::analysis::{StaticAnalysis, analyze};
|
use crate::analysis::{MODIFIER_LEGEND, StateMap, StaticAnalysis, TOKEN_LEGEND, analyze};
|
||||||
|
|
||||||
mod analysis;
|
mod analysis;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct Backend {
|
struct Backend {
|
||||||
client: Client,
|
client: Client,
|
||||||
analysis: DashMap<String, StaticAnalysis>,
|
analysis: DashMap<PathBuf, StaticAnalysis>,
|
||||||
|
module_to_path: DashMap<SourceModuleId, PathBuf>,
|
||||||
|
path_to_module: DashMap<PathBuf, SourceModuleId>,
|
||||||
|
module_id_counter: Mutex<SourceModuleId>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tower_lsp::async_trait]
|
#[tower_lsp::async_trait]
|
||||||
@ -38,20 +46,46 @@ impl LanguageServer for Backend {
|
|||||||
will_save_wait_until: None,
|
will_save_wait_until: None,
|
||||||
save: None,
|
save: None,
|
||||||
};
|
};
|
||||||
Ok(InitializeResult {
|
|
||||||
capabilities: ServerCapabilities {
|
let capabilities = ServerCapabilities {
|
||||||
hover_provider: Some(HoverProviderCapability::Simple(true)),
|
hover_provider: Some(HoverProviderCapability::Simple(true)),
|
||||||
completion_provider: Some(CompletionOptions { ..Default::default() }),
|
completion_provider: Some(CompletionOptions { ..Default::default() }),
|
||||||
text_document_sync: Some(TextDocumentSyncCapability::Options(sync)),
|
text_document_sync: Some(TextDocumentSyncCapability::Options(sync)),
|
||||||
workspace: Some(WorkspaceServerCapabilities {
|
workspace: Some(WorkspaceServerCapabilities {
|
||||||
workspace_folders: Some(WorkspaceFoldersServerCapabilities {
|
workspace_folders: Some(WorkspaceFoldersServerCapabilities {
|
||||||
supported: Some(true),
|
supported: Some(true),
|
||||||
change_notifications: Some(OneOf::Left(true)),
|
change_notifications: Some(OneOf::Left(true)),
|
||||||
}),
|
|
||||||
file_operations: None,
|
|
||||||
}),
|
}),
|
||||||
..Default::default()
|
file_operations: None,
|
||||||
},
|
}),
|
||||||
|
semantic_tokens_provider: Some(SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions(
|
||||||
|
lsp_types::SemanticTokensRegistrationOptions {
|
||||||
|
text_document_registration_options: TextDocumentRegistrationOptions {
|
||||||
|
document_selector: Some(vec![DocumentFilter {
|
||||||
|
language: Some("reid".to_owned()),
|
||||||
|
scheme: Some("file".to_owned()),
|
||||||
|
pattern: None,
|
||||||
|
}]),
|
||||||
|
},
|
||||||
|
semantic_tokens_options: SemanticTokensOptions {
|
||||||
|
work_done_progress_options: Default::default(),
|
||||||
|
legend: SemanticTokensLegend {
|
||||||
|
token_types: TOKEN_LEGEND.into(),
|
||||||
|
token_modifiers: MODIFIER_LEGEND.into(),
|
||||||
|
},
|
||||||
|
range: None,
|
||||||
|
full: Some(lsp_types::SemanticTokensFullOptions::Bool(true)),
|
||||||
|
},
|
||||||
|
static_registration_options: Default::default(),
|
||||||
|
},
|
||||||
|
)),
|
||||||
|
references_provider: Some(OneOf::Left(true)),
|
||||||
|
definition_provider: Some(OneOf::Left(true)),
|
||||||
|
rename_provider: Some(OneOf::Left(true)),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
Ok(InitializeResult {
|
||||||
|
capabilities,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -68,8 +102,7 @@ impl LanguageServer for Backend {
|
|||||||
|
|
||||||
async fn completion(&self, params: CompletionParams) -> jsonrpc::Result<Option<CompletionResponse>> {
|
async fn completion(&self, params: CompletionParams) -> jsonrpc::Result<Option<CompletionResponse>> {
|
||||||
let path = PathBuf::from(params.text_document_position.text_document.uri.path());
|
let path = PathBuf::from(params.text_document_position.text_document.uri.path());
|
||||||
let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();
|
let analysis = self.analysis.get(&path);
|
||||||
let analysis = self.analysis.get(&file_name);
|
|
||||||
let position = params.text_document_position.position;
|
let position = params.text_document_position.position;
|
||||||
|
|
||||||
let token = if let Some(analysis) = &analysis {
|
let token = if let Some(analysis) = &analysis {
|
||||||
@ -82,11 +115,10 @@ impl LanguageServer for Backend {
|
|||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
dbg!(position, token);
|
// dbg!(position, token);
|
||||||
|
|
||||||
let list = if let Some((idx, _)) = token {
|
let list = if let Some((idx, _)) = token {
|
||||||
if let Some(analysis) = self.analysis.get(&file_name).unwrap().token_analysis.get(&idx) {
|
if let Some(analysis) = self.analysis.get(&path).unwrap().state.map.get(&idx) {
|
||||||
dbg!(&analysis);
|
|
||||||
analysis
|
analysis
|
||||||
.autocomplete
|
.autocomplete
|
||||||
.iter()
|
.iter()
|
||||||
@ -99,14 +131,13 @@ impl LanguageServer for Backend {
|
|||||||
Vec::new()
|
Vec::new()
|
||||||
};
|
};
|
||||||
|
|
||||||
dbg!(&list);
|
// dbg!(&list);
|
||||||
Ok(Some(CompletionResponse::Array(list)))
|
Ok(Some(CompletionResponse::Array(list)))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn hover(&self, params: HoverParams) -> jsonrpc::Result<Option<Hover>> {
|
async fn hover(&self, params: HoverParams) -> jsonrpc::Result<Option<Hover>> {
|
||||||
let path = PathBuf::from(params.text_document_position_params.text_document.uri.path());
|
let path = PathBuf::from(params.text_document_position_params.text_document.uri.path());
|
||||||
let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();
|
let analysis = self.analysis.get(&path);
|
||||||
let analysis = self.analysis.get(&file_name);
|
|
||||||
let position = params.text_document_position_params.position;
|
let position = params.text_document_position_params.position;
|
||||||
|
|
||||||
let token = if let Some(analysis) = &analysis {
|
let token = if let Some(analysis) = &analysis {
|
||||||
@ -120,7 +151,7 @@ impl LanguageServer for Backend {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let (range, ty) = if let Some((idx, token)) = token {
|
let (range, ty) = if let Some((idx, token)) = token {
|
||||||
if let Some(analysis) = self.analysis.get(&file_name).unwrap().token_analysis.get(&idx) {
|
if let Some(analysis) = self.analysis.get(&path).unwrap().state.map.get(&idx) {
|
||||||
let start = token.position;
|
let start = token.position;
|
||||||
let end = token.position.add(token.token.len() as u32);
|
let end = token.position.add(token.token.len() as u32);
|
||||||
let range = Range {
|
let range = Range {
|
||||||
@ -172,17 +203,235 @@ impl LanguageServer for Backend {
|
|||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn semantic_tokens_full(
|
||||||
|
&self,
|
||||||
|
params: SemanticTokensParams,
|
||||||
|
) -> jsonrpc::Result<Option<SemanticTokensResult>> {
|
||||||
|
let path = PathBuf::from(params.text_document.uri.path());
|
||||||
|
let analysis = self.analysis.get(&path);
|
||||||
|
|
||||||
|
let mut semantic_tokens = Vec::new();
|
||||||
|
if let Some(analysis) = analysis {
|
||||||
|
let mut prev_line = 0;
|
||||||
|
let mut prev_start = 0;
|
||||||
|
for (i, token) in analysis.tokens.iter().enumerate() {
|
||||||
|
let vscode_line = token.position.1.max(1) - 1;
|
||||||
|
let vscode_col = token.position.0.max(1) - 1;
|
||||||
|
|
||||||
|
let delta_line = vscode_line - prev_line;
|
||||||
|
let delta_start = if delta_line == 0 {
|
||||||
|
vscode_col - prev_start
|
||||||
|
} else {
|
||||||
|
vscode_col
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(token_analysis) = analysis.state.map.get(&i) {
|
||||||
|
if let Some(symbol_id) = token_analysis.symbol {
|
||||||
|
let symbol = analysis.state.get_local_symbol(symbol_id);
|
||||||
|
if let Some(idx) = symbol.kind.into_token_idx(&self.state_map()) {
|
||||||
|
let semantic_token = SemanticToken {
|
||||||
|
delta_line,
|
||||||
|
delta_start,
|
||||||
|
length: token.token.len() as u32,
|
||||||
|
token_type: idx,
|
||||||
|
token_modifiers_bitset: symbol.kind.get_modifier().unwrap_or(0),
|
||||||
|
};
|
||||||
|
semantic_tokens.push(semantic_token);
|
||||||
|
prev_line = vscode_line;
|
||||||
|
prev_start = vscode_col;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Some(SemanticTokensResult::Tokens(lsp_types::SemanticTokens {
|
||||||
|
result_id: None,
|
||||||
|
data: semantic_tokens,
|
||||||
|
})))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn goto_definition(&self, params: GotoDefinitionParams) -> jsonrpc::Result<Option<GotoDefinitionResponse>> {
|
||||||
|
let path = PathBuf::from(params.text_document_position_params.text_document.uri.path());
|
||||||
|
let analysis = self.analysis.get(&path);
|
||||||
|
let position = params.text_document_position_params.position;
|
||||||
|
|
||||||
|
if let Some(analysis) = &analysis {
|
||||||
|
let token = analysis.tokens.iter().enumerate().find(|(_, tok)| {
|
||||||
|
tok.position.1 == position.line + 1
|
||||||
|
&& (tok.position.0 <= position.character + 1
|
||||||
|
&& (tok.position.0 + tok.token.len() as u32) > position.character + 1)
|
||||||
|
});
|
||||||
|
|
||||||
|
if let Some(token) = token {
|
||||||
|
if let Some((module_id, def_token)) = analysis.find_definition(token.0, &self.state_map()) {
|
||||||
|
return if let Some(path) = self.module_to_path.get(&module_id) {
|
||||||
|
Ok(Some(GotoDefinitionResponse::Scalar(lsp_types::Location {
|
||||||
|
uri: Url::from_file_path(path.value()).unwrap(),
|
||||||
|
range: token_to_range(def_token),
|
||||||
|
})))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn references(&self, params: ReferenceParams) -> jsonrpc::Result<Option<Vec<Location>>> {
|
||||||
|
let path = PathBuf::from(params.text_document_position.text_document.uri.path());
|
||||||
|
let analysis = self.analysis.get(&path);
|
||||||
|
let position = params.text_document_position.position;
|
||||||
|
|
||||||
|
if let Some(analysis) = &analysis {
|
||||||
|
let token = analysis.tokens.iter().enumerate().find(|(_, tok)| {
|
||||||
|
tok.position.1 == position.line + 1
|
||||||
|
&& (tok.position.0 <= position.character + 1
|
||||||
|
&& (tok.position.0 + tok.token.len() as u32) > position.character + 1)
|
||||||
|
});
|
||||||
|
if let Some(token) = token {
|
||||||
|
let reference_tokens = analysis.find_references(token.0, &self.state_map());
|
||||||
|
dbg!(&reference_tokens);
|
||||||
|
let mut locations = Vec::new();
|
||||||
|
if let Some(reference_tokens) = reference_tokens {
|
||||||
|
for (module_id, symbol_idx) in reference_tokens {
|
||||||
|
if let Some(path) = self.module_to_path.get(&module_id) {
|
||||||
|
let url = Url::from_file_path(path.value()).unwrap();
|
||||||
|
if let Some(inner_analysis) = self.analysis.get(path.value()) {
|
||||||
|
if let Some(token_idx) = inner_analysis.state.symbol_to_token.get(&symbol_idx) {
|
||||||
|
let token = inner_analysis.tokens.get(*token_idx).unwrap();
|
||||||
|
|
||||||
|
locations.push(lsp_types::Location {
|
||||||
|
uri: url,
|
||||||
|
range: token_to_range(token),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(Some(locations))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn rename(&self, params: RenameParams) -> jsonrpc::Result<Option<WorkspaceEdit>> {
|
||||||
|
let path = PathBuf::from(params.text_document_position.text_document.uri.path());
|
||||||
|
let analysis = self.analysis.get(&path);
|
||||||
|
let position = params.text_document_position.position;
|
||||||
|
|
||||||
|
if let Some(analysis) = &analysis {
|
||||||
|
let token = analysis.tokens.iter().enumerate().find(|(_, tok)| {
|
||||||
|
tok.position.1 == position.line + 1
|
||||||
|
&& (tok.position.0 <= position.character + 1
|
||||||
|
&& (tok.position.0 + tok.token.len() as u32) > position.character + 1)
|
||||||
|
});
|
||||||
|
if let Some(token) = token {
|
||||||
|
let symbols = analysis.find_references(token.0, &self.state_map());
|
||||||
|
let mut changes: HashMap<Url, Vec<TextEdit>> = HashMap::new();
|
||||||
|
if let Some(symbols) = symbols {
|
||||||
|
for (module_id, symbol_id) in symbols {
|
||||||
|
let path = self.module_to_path.get(&module_id);
|
||||||
|
if let Some(path) = path {
|
||||||
|
let url = Url::from_file_path(path.value()).unwrap();
|
||||||
|
let analysis = self.analysis.get(&path.clone());
|
||||||
|
|
||||||
|
if let Some(analysis) = analysis {
|
||||||
|
if let Some(token_idx) = analysis.state.symbol_to_token.get(&symbol_id) {
|
||||||
|
let token = analysis.tokens.get(*token_idx).unwrap();
|
||||||
|
|
||||||
|
// edits = changes.get(k)
|
||||||
|
let edit = TextEdit {
|
||||||
|
range: token_to_range(token),
|
||||||
|
new_text: params.new_name.clone(),
|
||||||
|
};
|
||||||
|
if let Some(edits) = changes.get_mut(&url) {
|
||||||
|
edits.push(edit);
|
||||||
|
} else {
|
||||||
|
changes.insert(url, vec![edit]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Some(WorkspaceEdit {
|
||||||
|
changes: Some(changes),
|
||||||
|
document_changes: None,
|
||||||
|
change_annotations: None,
|
||||||
|
}))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn token_to_range(token: &FullToken) -> lsp_types::Range {
|
||||||
|
Range {
|
||||||
|
start: lsp_types::Position {
|
||||||
|
line: token.position.1.max(1) - 1,
|
||||||
|
character: token.position.0.max(1) - 1,
|
||||||
|
},
|
||||||
|
end: lsp_types::Position {
|
||||||
|
line: token.position.1.max(1) - 1,
|
||||||
|
character: token.position.0.max(1) - 1 + token.token.len() as u32,
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Backend {
|
impl Backend {
|
||||||
async fn recompile(&self, params: TextDocumentItem) {
|
fn state_map(&self) -> StateMap {
|
||||||
let path = PathBuf::from(params.uri.clone().path());
|
let mut state_map = HashMap::new();
|
||||||
let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();
|
for path_state in self.analysis.iter() {
|
||||||
|
let (path, state) = path_state.pair();
|
||||||
|
if let Some(module_id) = self.path_to_module.get(path) {
|
||||||
|
state_map.insert(*module_id, state.state.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
state_map
|
||||||
|
}
|
||||||
|
|
||||||
let mut map = Default::default();
|
async fn recompile(&self, params: TextDocumentItem) {
|
||||||
let parse_res = parse(¶ms.text, path.clone(), &mut map);
|
let file_path = PathBuf::from(params.uri.clone().path());
|
||||||
|
|
||||||
|
let mut map: ErrorModules = Default::default();
|
||||||
|
for url_module in self.path_to_module.iter() {
|
||||||
|
let (url, module) = url_module.pair();
|
||||||
|
map.add_module(
|
||||||
|
url.file_name().unwrap().to_str().unwrap().to_owned(),
|
||||||
|
Some(url.clone()),
|
||||||
|
Some(*module),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let module_id = if let Some(module_id) = self.path_to_module.get(&file_path) {
|
||||||
|
*module_id
|
||||||
|
} else {
|
||||||
|
let mut lock = self.module_id_counter.lock().await;
|
||||||
|
let module_id = lock.increment();
|
||||||
|
drop(lock);
|
||||||
|
self.path_to_module.insert(file_path.clone(), module_id);
|
||||||
|
self.module_to_path.insert(module_id, file_path.clone());
|
||||||
|
module_id
|
||||||
|
};
|
||||||
|
|
||||||
|
let parse_res = parse(¶ms.text, file_path.clone(), &mut map, module_id);
|
||||||
let (tokens, result) = match parse_res {
|
let (tokens, result) = match parse_res {
|
||||||
Ok((module_id, tokens)) => (tokens.clone(), analyze(module_id, tokens, path, &mut map)),
|
Ok((module_id, tokens)) => (
|
||||||
|
tokens.clone(),
|
||||||
|
analyze(module_id, tokens, file_path.clone(), &mut map, &self.state_map()),
|
||||||
|
),
|
||||||
Err(e) => (Vec::new(), Err(e)),
|
Err(e) => (Vec::new(), Err(e)),
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -202,7 +451,7 @@ impl Backend {
|
|||||||
self.client.log_message(MessageType::INFO, format!("{}", error)).await;
|
self.client.log_message(MessageType::INFO, format!("{}", error)).await;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.analysis.insert(file_name.clone(), analysis);
|
self.analysis.insert(file_path, analysis);
|
||||||
}
|
}
|
||||||
Ok(_) => {}
|
Ok(_) => {}
|
||||||
Err(mut reid_error) => {
|
Err(mut reid_error) => {
|
||||||
@ -249,10 +498,21 @@ fn reid_error_into_diagnostic(error: &error_raporting::ErrorKind, tokens: &Vec<F
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse(source: &str, path: PathBuf, map: &mut ErrorModules) -> Result<(SourceModuleId, Vec<FullToken>), ReidError> {
|
fn parse(
|
||||||
|
source: &str,
|
||||||
|
path: PathBuf,
|
||||||
|
map: &mut ErrorModules,
|
||||||
|
module_id: SourceModuleId,
|
||||||
|
) -> Result<(SourceModuleId, Vec<FullToken>), ReidError> {
|
||||||
let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();
|
let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();
|
||||||
|
|
||||||
Ok(parse_module(source, file_name.clone(), map)?)
|
Ok(parse_module(
|
||||||
|
source,
|
||||||
|
file_name.clone(),
|
||||||
|
Some(path),
|
||||||
|
map,
|
||||||
|
Some(module_id),
|
||||||
|
)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
@ -263,6 +523,9 @@ async fn main() {
|
|||||||
let (service, socket) = LspService::new(|client| Backend {
|
let (service, socket) = LspService::new(|client| Backend {
|
||||||
client,
|
client,
|
||||||
analysis: DashMap::new(),
|
analysis: DashMap::new(),
|
||||||
|
module_to_path: DashMap::new(),
|
||||||
|
path_to_module: DashMap::new(),
|
||||||
|
module_id_counter: Mutex::new(SourceModuleId(0)),
|
||||||
});
|
});
|
||||||
Server::new(stdin, stdout, socket).serve(service).await;
|
Server::new(stdin, stdout, socket).serve(service).await;
|
||||||
}
|
}
|
||||||
|
@ -272,9 +272,9 @@ pub enum TopLevelStatement {
|
|||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct BinopDefinition {
|
pub struct BinopDefinition {
|
||||||
pub lhs: (String, Type),
|
pub lhs: (String, Type, TokenRange),
|
||||||
pub op: BinaryOperator,
|
pub op: BinaryOperator,
|
||||||
pub rhs: (String, Type),
|
pub rhs: (String, Type, TokenRange),
|
||||||
pub return_ty: Type,
|
pub return_ty: Type,
|
||||||
pub block: Block,
|
pub block: Block,
|
||||||
pub signature_range: TokenRange,
|
pub signature_range: TokenRange,
|
||||||
|
@ -1126,6 +1126,7 @@ impl Parse for BinopDefinition {
|
|||||||
let Some(Token::Identifier(lhs_name)) = stream.next() else {
|
let Some(Token::Identifier(lhs_name)) = stream.next() else {
|
||||||
return Err(stream.expected_err("lhs name")?);
|
return Err(stream.expected_err("lhs name")?);
|
||||||
};
|
};
|
||||||
|
let lhs_range = stream.get_range_prev_curr().unwrap();
|
||||||
stream.expect(Token::Colon)?;
|
stream.expect(Token::Colon)?;
|
||||||
let lhs_type = stream.parse()?;
|
let lhs_type = stream.parse()?;
|
||||||
stream.expect(Token::ParenClose)?;
|
stream.expect(Token::ParenClose)?;
|
||||||
@ -1136,6 +1137,7 @@ impl Parse for BinopDefinition {
|
|||||||
let Some(Token::Identifier(rhs_name)) = stream.next() else {
|
let Some(Token::Identifier(rhs_name)) = stream.next() else {
|
||||||
return Err(stream.expected_err("rhs name")?);
|
return Err(stream.expected_err("rhs name")?);
|
||||||
};
|
};
|
||||||
|
let rhs_range = stream.get_range_prev_curr().unwrap();
|
||||||
stream.expect(Token::Colon)?;
|
stream.expect(Token::Colon)?;
|
||||||
let rhs_type = stream.parse()?;
|
let rhs_type = stream.parse()?;
|
||||||
stream.expect(Token::ParenClose)?;
|
stream.expect(Token::ParenClose)?;
|
||||||
@ -1145,9 +1147,9 @@ impl Parse for BinopDefinition {
|
|||||||
stream.expect(Token::Arrow)?;
|
stream.expect(Token::Arrow)?;
|
||||||
|
|
||||||
Ok(BinopDefinition {
|
Ok(BinopDefinition {
|
||||||
lhs: (lhs_name, lhs_type),
|
lhs: (lhs_name, lhs_type, lhs_range),
|
||||||
op: operator,
|
op: operator,
|
||||||
rhs: (rhs_name, rhs_type),
|
rhs: (rhs_name, rhs_type, rhs_range),
|
||||||
return_ty: stream.parse()?,
|
return_ty: stream.parse()?,
|
||||||
block: stream.parse()?,
|
block: stream.parse()?,
|
||||||
signature_range,
|
signature_range,
|
||||||
@ -1168,11 +1170,11 @@ impl Parse for AssociatedFunctionBlock {
|
|||||||
match stream.peek() {
|
match stream.peek() {
|
||||||
Some(Token::FnKeyword) | Some(Token::PubKeyword) => {
|
Some(Token::FnKeyword) | Some(Token::PubKeyword) => {
|
||||||
let mut fun: FunctionDefinition = stream.parse()?;
|
let mut fun: FunctionDefinition = stream.parse()?;
|
||||||
fun.0.self_kind = match fun.0.self_kind {
|
match &mut fun.0.self_kind {
|
||||||
SelfKind::Owned(_) => SelfKind::Owned(ty.clone()),
|
SelfKind::Owned(inner_ty) => inner_ty.0 = ty.0.clone(),
|
||||||
SelfKind::Borrow(_) => SelfKind::Borrow(ty.clone()),
|
SelfKind::Borrow(inner_ty) => inner_ty.0 = ty.0.clone(),
|
||||||
SelfKind::MutBorrow(_) => SelfKind::MutBorrow(ty.clone()),
|
SelfKind::MutBorrow(inner_ty) => inner_ty.0 = ty.0.clone(),
|
||||||
SelfKind::None => SelfKind::None,
|
SelfKind::None => {}
|
||||||
};
|
};
|
||||||
functions.push(fun);
|
functions.push(fun);
|
||||||
}
|
}
|
||||||
|
@ -63,6 +63,7 @@ impl ast::Module {
|
|||||||
.collect(),
|
.collect(),
|
||||||
kind: mir::FunctionDefinitionKind::Extern(false),
|
kind: mir::FunctionDefinitionKind::Extern(false),
|
||||||
source: Some(module_id),
|
source: Some(module_id),
|
||||||
|
signature_meta: signature.range.as_meta(module_id),
|
||||||
};
|
};
|
||||||
functions.push(def);
|
functions.push(def);
|
||||||
}
|
}
|
||||||
@ -103,13 +104,13 @@ impl ast::Module {
|
|||||||
lhs: mir::FunctionParam {
|
lhs: mir::FunctionParam {
|
||||||
name: lhs.0.clone(),
|
name: lhs.0.clone(),
|
||||||
ty: lhs.1 .0.into_mir(module_id),
|
ty: lhs.1 .0.into_mir(module_id),
|
||||||
meta: lhs.1 .1.as_meta(module_id),
|
meta: lhs.2.as_meta(module_id),
|
||||||
},
|
},
|
||||||
op: op.mir(),
|
op: op.mir(),
|
||||||
rhs: mir::FunctionParam {
|
rhs: mir::FunctionParam {
|
||||||
name: rhs.0.clone(),
|
name: rhs.0.clone(),
|
||||||
ty: rhs.1 .0.into_mir(module_id),
|
ty: rhs.1 .0.into_mir(module_id),
|
||||||
meta: rhs.1 .1.as_meta(module_id),
|
meta: rhs.2.as_meta(module_id),
|
||||||
},
|
},
|
||||||
return_type: return_ty.0.into_mir(module_id),
|
return_type: return_ty.0.into_mir(module_id),
|
||||||
fn_kind: mir::FunctionDefinitionKind::Local(
|
fn_kind: mir::FunctionDefinitionKind::Local(
|
||||||
@ -186,6 +187,7 @@ impl ast::FunctionDefinition {
|
|||||||
parameters: params,
|
parameters: params,
|
||||||
kind: mir::FunctionDefinitionKind::Local(block.into_mir(module_id), (range).as_meta(module_id)),
|
kind: mir::FunctionDefinitionKind::Local(block.into_mir(module_id), (range).as_meta(module_id)),
|
||||||
source: Some(module_id),
|
source: Some(module_id),
|
||||||
|
signature_meta: signature.range.as_meta(module_id),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -54,6 +54,7 @@ pub fn form_intrinsics() -> Vec<FunctionDefinition> {
|
|||||||
}],
|
}],
|
||||||
kind: FunctionDefinitionKind::Extern(false),
|
kind: FunctionDefinitionKind::Extern(false),
|
||||||
source: None,
|
source: None,
|
||||||
|
signature_meta: Default::default(),
|
||||||
});
|
});
|
||||||
|
|
||||||
intrinsics
|
intrinsics
|
||||||
@ -85,6 +86,7 @@ pub fn get_intrinsic_assoc_func(ty: &TypeKind, name: &str) -> Option<FunctionDef
|
|||||||
}],
|
}],
|
||||||
kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicConst(*len))),
|
kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicConst(*len))),
|
||||||
source: None,
|
source: None,
|
||||||
|
signature_meta: Default::default(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
@ -101,6 +103,7 @@ pub fn get_intrinsic_assoc_func(ty: &TypeKind, name: &str) -> Option<FunctionDef
|
|||||||
parameters: Vec::new(),
|
parameters: Vec::new(),
|
||||||
kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicSizeOf(ty.clone()))),
|
kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicSizeOf(ty.clone()))),
|
||||||
source: None,
|
source: None,
|
||||||
|
signature_meta: Default::default(),
|
||||||
}),
|
}),
|
||||||
"malloc" => Some(FunctionDefinition {
|
"malloc" => Some(FunctionDefinition {
|
||||||
name: "malloc".to_owned(),
|
name: "malloc".to_owned(),
|
||||||
@ -115,6 +118,7 @@ pub fn get_intrinsic_assoc_func(ty: &TypeKind, name: &str) -> Option<FunctionDef
|
|||||||
}],
|
}],
|
||||||
kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicMalloc(ty.clone()))),
|
kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicMalloc(ty.clone()))),
|
||||||
source: None,
|
source: None,
|
||||||
|
signature_meta: Default::default(),
|
||||||
}),
|
}),
|
||||||
"null" => Some(FunctionDefinition {
|
"null" => Some(FunctionDefinition {
|
||||||
name: "null".to_owned(),
|
name: "null".to_owned(),
|
||||||
@ -125,6 +129,7 @@ pub fn get_intrinsic_assoc_func(ty: &TypeKind, name: &str) -> Option<FunctionDef
|
|||||||
parameters: Vec::new(),
|
parameters: Vec::new(),
|
||||||
kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicNullPtr(ty.clone()))),
|
kind: FunctionDefinitionKind::Intrinsic(Box::new(IntrinsicNullPtr(ty.clone()))),
|
||||||
source: None,
|
source: None,
|
||||||
|
signature_meta: Default::default(),
|
||||||
}),
|
}),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
use std::{
|
use std::{
|
||||||
collections::HashMap,
|
collections::HashMap,
|
||||||
fmt::{Debug, Write},
|
fmt::{Debug, Write},
|
||||||
|
path::PathBuf,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
@ -95,21 +96,46 @@ pub struct ErrorModule {
|
|||||||
#[derive(Debug, Clone, PartialEq, Eq, Default)]
|
#[derive(Debug, Clone, PartialEq, Eq, Default)]
|
||||||
pub struct ErrorModules {
|
pub struct ErrorModules {
|
||||||
pub(super) module_map: HashMap<mir::SourceModuleId, ErrorModule>,
|
pub(super) module_map: HashMap<mir::SourceModuleId, ErrorModule>,
|
||||||
|
pub(super) source_id_map: HashMap<PathBuf, mir::SourceModuleId>,
|
||||||
module_counter: mir::SourceModuleId,
|
module_counter: mir::SourceModuleId,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ErrorModules {
|
impl ErrorModules {
|
||||||
pub fn add_module<T: Into<String>>(&mut self, name: T) -> Option<mir::SourceModuleId> {
|
pub fn add_module<T: Into<String>>(
|
||||||
let id = self.module_counter.increment();
|
&mut self,
|
||||||
self.module_map.insert(
|
name: T,
|
||||||
id,
|
path: Option<PathBuf>,
|
||||||
ErrorModule {
|
external_module_id: Option<SourceModuleId>,
|
||||||
name: name.into(),
|
) -> Option<mir::SourceModuleId> {
|
||||||
tokens: None,
|
let module_id = path.as_ref().and_then(|p| self.source_id_map.get(p));
|
||||||
source: None,
|
|
||||||
},
|
if let Some(module_id) = module_id {
|
||||||
);
|
Some(*module_id)
|
||||||
Some(id)
|
} else {
|
||||||
|
let id = if let Some(module_id) = external_module_id {
|
||||||
|
self.module_counter = SourceModuleId(module_id.0.max(self.module_counter.0));
|
||||||
|
if let Some(_) = self.module_map.get(&module_id) {
|
||||||
|
panic!("Can not use external module id: Module already exists!")
|
||||||
|
}
|
||||||
|
module_id
|
||||||
|
} else {
|
||||||
|
self.module_counter.increment()
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(path) = path {
|
||||||
|
self.source_id_map.insert(path, id);
|
||||||
|
}
|
||||||
|
|
||||||
|
self.module_map.insert(
|
||||||
|
id,
|
||||||
|
ErrorModule {
|
||||||
|
name: name.into(),
|
||||||
|
tokens: None,
|
||||||
|
source: None,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
Some(id)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_tokens(&mut self, id: mir::SourceModuleId, tokens: Vec<FullToken>) {
|
pub fn set_tokens(&mut self, id: mir::SourceModuleId, tokens: Vec<FullToken>) {
|
||||||
|
@ -69,7 +69,10 @@ use reid_lib::{compile::CompileOutput, Context};
|
|||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ast::TopLevelStatement,
|
ast::TopLevelStatement,
|
||||||
mir::macros::{form_macros, MacroModule, MacroPass},
|
mir::{
|
||||||
|
macros::{form_macros, MacroModule, MacroPass},
|
||||||
|
SourceModuleId,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub mod ast;
|
pub mod ast;
|
||||||
@ -83,9 +86,11 @@ mod util;
|
|||||||
pub fn parse_module<'map, T: Into<String>>(
|
pub fn parse_module<'map, T: Into<String>>(
|
||||||
source: &str,
|
source: &str,
|
||||||
name: T,
|
name: T,
|
||||||
|
path: Option<PathBuf>,
|
||||||
map: &'map mut ErrorModules,
|
map: &'map mut ErrorModules,
|
||||||
|
module_id: Option<SourceModuleId>,
|
||||||
) -> Result<(mir::SourceModuleId, Vec<FullToken>), ReidError> {
|
) -> Result<(mir::SourceModuleId, Vec<FullToken>), ReidError> {
|
||||||
let id = map.add_module(name.into()).unwrap();
|
let id = map.add_module(name.into(), path, module_id).unwrap();
|
||||||
map.set_source(id, source.to_owned());
|
map.set_source(id, source.to_owned());
|
||||||
|
|
||||||
let tokens = ReidError::from_lexer(lexer::tokenize(source), map.clone(), id)?;
|
let tokens = ReidError::from_lexer(lexer::tokenize(source), map.clone(), id)?;
|
||||||
@ -317,7 +322,7 @@ pub fn compile_and_pass<'map>(
|
|||||||
let path = path.canonicalize().unwrap();
|
let path = path.canonicalize().unwrap();
|
||||||
let name = path.file_name().unwrap().to_str().unwrap().to_owned();
|
let name = path.file_name().unwrap().to_str().unwrap().to_owned();
|
||||||
|
|
||||||
let (id, tokens) = parse_module(source, name, module_map)?;
|
let (id, tokens) = parse_module(source, name, Some(path.clone()), module_map, None)?;
|
||||||
let module = compile_module(id, tokens, module_map, Some(path.clone()), true)?.map_err(|(_, e)| e)?;
|
let module = compile_module(id, tokens, module_map, Some(path.clone()), true)?.map_err(|(_, e)| e)?;
|
||||||
|
|
||||||
let mut mir_context = mir::Context::from(vec![module], path.parent().unwrap().to_owned());
|
let mut mir_context = mir::Context::from(vec![module], path.parent().unwrap().to_owned());
|
||||||
|
@ -51,7 +51,7 @@ pub enum ErrorKind {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn compile_std(module_map: &mut ErrorModules) -> Result<Module, ReidError> {
|
pub fn compile_std(module_map: &mut ErrorModules) -> Result<Module, ReidError> {
|
||||||
let (id, tokens) = parse_module(STD_SOURCE, STD_NAME, module_map)?;
|
let (id, tokens) = parse_module(STD_SOURCE, STD_NAME, None, module_map, None)?;
|
||||||
let module = compile_module(id, tokens, module_map, None, false)?.map_err(|(_, e)| e)?;
|
let module = compile_module(id, tokens, module_map, None, false)?.map_err(|(_, e)| e)?;
|
||||||
|
|
||||||
let module_id = module.module_id;
|
let module_id = module.module_id;
|
||||||
@ -143,7 +143,13 @@ impl<'map> Pass for LinkerPass<'map> {
|
|||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
let (id, tokens) = match parse_module(&source, module_name.clone(), &mut self.module_map) {
|
let (id, tokens) = match parse_module(
|
||||||
|
&source,
|
||||||
|
module_name.clone(),
|
||||||
|
Some(file_path.clone()),
|
||||||
|
&mut self.module_map,
|
||||||
|
None,
|
||||||
|
) {
|
||||||
Ok(val) => val,
|
Ok(val) => val,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
state.ok::<_, Infallible>(
|
state.ok::<_, Infallible>(
|
||||||
@ -208,6 +214,7 @@ impl<'map> Pass for LinkerPass<'map> {
|
|||||||
|
|
||||||
if let Some(func) = imported.functions.iter_mut().find(|f| f.name == *import_name) {
|
if let Some(func) = imported.functions.iter_mut().find(|f| f.name == *import_name) {
|
||||||
let func_name = func.name.clone();
|
let func_name = func.name.clone();
|
||||||
|
let func_signature = func.signature();
|
||||||
|
|
||||||
if !func.is_pub {
|
if !func.is_pub {
|
||||||
state.ok::<_, Infallible>(
|
state.ok::<_, Infallible>(
|
||||||
@ -252,6 +259,7 @@ impl<'map> Pass for LinkerPass<'map> {
|
|||||||
parameters: param_tys,
|
parameters: param_tys,
|
||||||
kind: super::FunctionDefinitionKind::Extern(true),
|
kind: super::FunctionDefinitionKind::Extern(true),
|
||||||
source: Some(imported.module_id),
|
source: Some(imported.module_id),
|
||||||
|
signature_meta: func_signature,
|
||||||
});
|
});
|
||||||
} else if let Some(ty) = imported.typedefs.iter_mut().find(|f| f.name == *import_name) {
|
} else if let Some(ty) = imported.typedefs.iter_mut().find(|f| f.name == *import_name) {
|
||||||
let external_key = CustomTypeKey(ty.name.clone(), ty.source_module);
|
let external_key = CustomTypeKey(ty.name.clone(), ty.source_module);
|
||||||
@ -344,6 +352,7 @@ impl<'map> Pass for LinkerPass<'map> {
|
|||||||
parameters: param_tys,
|
parameters: param_tys,
|
||||||
kind: super::FunctionDefinitionKind::Extern(true),
|
kind: super::FunctionDefinitionKind::Extern(true),
|
||||||
source: Some(import_id),
|
source: Some(import_id),
|
||||||
|
signature_meta: func.signature_meta,
|
||||||
},
|
},
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
@ -305,6 +305,7 @@ pub struct FunctionDefinition {
|
|||||||
pub parameters: Vec<FunctionParam>,
|
pub parameters: Vec<FunctionParam>,
|
||||||
pub kind: FunctionDefinitionKind,
|
pub kind: FunctionDefinitionKind,
|
||||||
pub source: Option<SourceModuleId>,
|
pub source: Option<SourceModuleId>,
|
||||||
|
pub signature_meta: Metadata,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, PartialOrd)]
|
#[derive(Debug, Clone, PartialEq, PartialOrd)]
|
||||||
@ -340,11 +341,7 @@ impl FunctionDefinition {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn signature(&self) -> Metadata {
|
pub fn signature(&self) -> Metadata {
|
||||||
match &self.kind {
|
self.signature_meta
|
||||||
FunctionDefinitionKind::Local(_, metadata) => metadata.clone(),
|
|
||||||
FunctionDefinitionKind::Extern(_) => Metadata::default(),
|
|
||||||
FunctionDefinitionKind::Intrinsic(_) => Metadata::default(),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -14,7 +14,7 @@ mod util;
|
|||||||
fn test_compile(source: &str, name: &str) -> CompileOutput {
|
fn test_compile(source: &str, name: &str) -> CompileOutput {
|
||||||
assert_err(assert_err(std::panic::catch_unwind(|| {
|
assert_err(assert_err(std::panic::catch_unwind(|| {
|
||||||
let mut map = Default::default();
|
let mut map = Default::default();
|
||||||
let (id, tokens) = assert_err(parse_module(source, name, &mut map));
|
let (id, tokens) = assert_err(parse_module(source, name, None, &mut map, None));
|
||||||
|
|
||||||
let module = assert_err(assert_err(compile_module(id, tokens, &mut map, None, true)).map_err(|(_, e)| e));
|
let module = assert_err(assert_err(compile_module(id, tokens, &mut map, None, true)).map_err(|(_, e)| e));
|
||||||
let mut mir_context = mir::Context::from(vec![module], Default::default());
|
let mut mir_context = mir::Context::from(vec![module], Default::default());
|
||||||
|
Loading…
Reference in New Issue
Block a user