Add autocomplete for imports
This commit is contained in:
parent
97a5c3a65e
commit
bb9f69ee53
@ -1,10 +1,12 @@
|
||||
use std::{collections::HashMap, path::PathBuf};
|
||||
use std::{collections::HashMap, fmt::format, path::PathBuf};
|
||||
|
||||
use reid::{
|
||||
ast::lexer::FullToken,
|
||||
ast::{self, FunctionDefinition, lexer::FullToken},
|
||||
compile_module,
|
||||
error_raporting::{ErrorModules, ReidError},
|
||||
mir::{self, Context, FunctionCall, IfExpression, SourceModuleId, StructType, TypeKind, WhileStatement},
|
||||
mir::{
|
||||
self, Context, FunctionCall, FunctionParam, IfExpression, SourceModuleId, StructType, TypeKind, WhileStatement,
|
||||
},
|
||||
perform_all_passes,
|
||||
};
|
||||
|
||||
@ -20,6 +22,34 @@ pub struct StaticAnalysis {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SemanticAnalysis {
|
||||
pub ty: Option<TypeKind>,
|
||||
pub autocomplete: Vec<Autocomplete>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Autocomplete {
|
||||
pub text: String,
|
||||
pub kind: AutocompleteKind,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum AutocompleteKind {
|
||||
Type,
|
||||
Function(Vec<FunctionParam>, TypeKind),
|
||||
}
|
||||
|
||||
impl ToString for AutocompleteKind {
|
||||
fn to_string(&self) -> String {
|
||||
match self {
|
||||
AutocompleteKind::Type => String::from("type"),
|
||||
AutocompleteKind::Function(params, ret_ty) => {
|
||||
let params = params
|
||||
.iter()
|
||||
.map(|p| format!("{}: {}", p.name, p.ty))
|
||||
.collect::<Vec<_>>();
|
||||
format!("({}) -> {}", params.join(", "), ret_ty)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn analyze(
|
||||
@ -28,47 +58,110 @@ pub fn analyze(
|
||||
path: PathBuf,
|
||||
map: &mut ErrorModules,
|
||||
) -> Result<Option<StaticAnalysis>, ReidError> {
|
||||
let (module, error) = match compile_module(module_id, tokens, map, Some(path.clone()), true)? {
|
||||
let (module, mut parse_error) = match compile_module(module_id, tokens, map, Some(path.clone()), true)? {
|
||||
Ok(module) => (module, None),
|
||||
Err((m, err)) => (m.process(module_id), Some(err)),
|
||||
};
|
||||
|
||||
let module_id = module.module_id;
|
||||
let mut context = Context::from(vec![module], path.parent().unwrap().to_owned());
|
||||
perform_all_passes(&mut context, map)?;
|
||||
match perform_all_passes(&mut context, map) {
|
||||
Ok(_) => {}
|
||||
Err(pass_error) => {
|
||||
if let Some(err) = &mut parse_error {
|
||||
err.extend(pass_error);
|
||||
} else {
|
||||
parse_error = Some(pass_error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for module in context.modules.values() {
|
||||
if module.module_id != module_id {
|
||||
continue;
|
||||
}
|
||||
return Ok(Some(analyze_context(&context, &module, error)));
|
||||
return Ok(Some(analyze_context(&context, &module, parse_error)));
|
||||
}
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
pub fn set_tokens(map: &mut TokenAnalysisMap, meta: &mir::Metadata, analysis: SemanticAnalysis) {
|
||||
for token in meta.range.start..meta.range.end {
|
||||
map.insert(token, analysis.clone());
|
||||
pub fn init_types(map: &mut TokenAnalysisMap, meta: &mir::Metadata, ty: Option<TypeKind>) {
|
||||
for token in meta.range.start..=meta.range.end {
|
||||
map.insert(
|
||||
token,
|
||||
SemanticAnalysis {
|
||||
ty: ty.clone(),
|
||||
autocomplete: Vec::new(),
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_autocomplete(map: &mut TokenAnalysisMap, meta: &mir::Metadata, autocomplete: Vec<Autocomplete>) {
|
||||
for token in meta.range.start..=meta.range.end {
|
||||
if let Some(token) = map.get_mut(&token) {
|
||||
token.autocomplete = autocomplete.clone();
|
||||
} else {
|
||||
map.insert(
|
||||
token,
|
||||
SemanticAnalysis {
|
||||
ty: None,
|
||||
autocomplete: autocomplete.clone(),
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn analyze_context(context: &mir::Context, module: &mir::Module, error: Option<ReidError>) -> StaticAnalysis {
|
||||
let mut map = HashMap::new();
|
||||
for import in &module.imports {
|
||||
set_tokens(&mut map, &import.1, SemanticAnalysis { ty: None });
|
||||
init_types(&mut map, &import.1, None);
|
||||
if let Some((module_name, _)) = import.0.get(0) {
|
||||
let (import_name, import_meta) = import.0.get(1).cloned().unwrap_or((
|
||||
String::new(),
|
||||
mir::Metadata {
|
||||
source_module_id: module.module_id,
|
||||
range: reid::ast::token_stream::TokenRange {
|
||||
start: import.1.range.end - 1,
|
||||
end: import.1.range.end - 1,
|
||||
},
|
||||
position: None,
|
||||
},
|
||||
));
|
||||
let mut autocompletes = Vec::new();
|
||||
|
||||
if let Some((_, module)) = context.modules.iter().find(|m| m.1.name == *module_name) {
|
||||
for function in &module.functions {
|
||||
if !function.is_pub {
|
||||
continue;
|
||||
}
|
||||
if function.name.starts_with(&import_name) {
|
||||
autocompletes.push(Autocomplete {
|
||||
text: function.name.clone(),
|
||||
kind: AutocompleteKind::Function(function.parameters.clone(), function.return_type.clone()),
|
||||
});
|
||||
}
|
||||
}
|
||||
for typedef in &module.typedefs {
|
||||
if typedef.name.starts_with(&import_name) {
|
||||
autocompletes.push(Autocomplete {
|
||||
text: typedef.name.clone(),
|
||||
kind: AutocompleteKind::Type,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
dbg!(import_meta, &autocompletes);
|
||||
set_autocomplete(&mut map, &import_meta, autocompletes);
|
||||
}
|
||||
}
|
||||
|
||||
for typedef in &module.typedefs {
|
||||
match &typedef.kind {
|
||||
mir::TypeDefinitionKind::Struct(StructType(fields)) => {
|
||||
for field in fields {
|
||||
set_tokens(
|
||||
&mut map,
|
||||
&field.2,
|
||||
SemanticAnalysis {
|
||||
ty: Some(field.1.clone()),
|
||||
},
|
||||
);
|
||||
init_types(&mut map, &field.2, Some(field.1.clone()));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -84,13 +177,7 @@ pub fn analyze_context(context: &mir::Context, module: &mir::Module, error: Opti
|
||||
|
||||
for (_, function) in &module.associated_functions {
|
||||
for param in &function.parameters {
|
||||
set_tokens(
|
||||
&mut map,
|
||||
¶m.meta,
|
||||
SemanticAnalysis {
|
||||
ty: Some(param.ty.clone()),
|
||||
},
|
||||
);
|
||||
init_types(&mut map, ¶m.meta, Some(param.ty.clone()));
|
||||
}
|
||||
|
||||
match &function.kind {
|
||||
@ -102,13 +189,7 @@ pub fn analyze_context(context: &mir::Context, module: &mir::Module, error: Opti
|
||||
|
||||
for function in &module.functions {
|
||||
for param in &function.parameters {
|
||||
set_tokens(
|
||||
&mut map,
|
||||
¶m.meta,
|
||||
SemanticAnalysis {
|
||||
ty: Some(param.ty.clone()),
|
||||
},
|
||||
);
|
||||
init_types(&mut map, ¶m.meta, Some(param.ty.clone()));
|
||||
}
|
||||
|
||||
match &function.kind {
|
||||
@ -134,15 +215,13 @@ pub fn analyze_block(
|
||||
for statement in &block.statements {
|
||||
match &statement.0 {
|
||||
mir::StmtKind::Let(named_variable_ref, _, expression) => {
|
||||
set_tokens(
|
||||
init_types(
|
||||
map,
|
||||
&named_variable_ref.2,
|
||||
SemanticAnalysis {
|
||||
ty: expression
|
||||
.return_type(&Default::default(), source_module.module_id)
|
||||
.ok()
|
||||
.map(|(_, ty)| ty),
|
||||
},
|
||||
expression
|
||||
.return_type(&Default::default(), source_module.module_id)
|
||||
.ok()
|
||||
.map(|(_, ty)| ty),
|
||||
);
|
||||
// return analyze_in_expr(&expression, module_id, token_idx);
|
||||
}
|
||||
@ -172,15 +251,12 @@ pub fn analyze_expr(
|
||||
expr: &mir::Expression,
|
||||
map: &mut TokenAnalysisMap,
|
||||
) {
|
||||
set_tokens(
|
||||
init_types(
|
||||
map,
|
||||
&expr.1,
|
||||
SemanticAnalysis {
|
||||
ty: expr
|
||||
.return_type(&Default::default(), source_module.module_id)
|
||||
.ok()
|
||||
.map(|(_, t)| t),
|
||||
},
|
||||
expr.return_type(&Default::default(), source_module.module_id)
|
||||
.ok()
|
||||
.map(|(_, t)| t),
|
||||
);
|
||||
|
||||
match &expr.0 {
|
||||
|
@ -67,10 +67,40 @@ impl LanguageServer for Backend {
|
||||
}
|
||||
|
||||
async fn completion(&self, params: CompletionParams) -> jsonrpc::Result<Option<CompletionResponse>> {
|
||||
Ok(Some(CompletionResponse::Array(vec![
|
||||
CompletionItem::new_simple("Hello".to_string(), "Some detail".to_string()),
|
||||
CompletionItem::new_simple("Bye".to_string(), "More detail".to_string()),
|
||||
])))
|
||||
let path = PathBuf::from(params.text_document_position.text_document.uri.path());
|
||||
let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();
|
||||
let analysis = self.analysis.get(&file_name);
|
||||
let position = params.text_document_position.position;
|
||||
|
||||
let token = if let Some(analysis) = &analysis {
|
||||
analysis.tokens.iter().enumerate().find(|(_, tok)| {
|
||||
tok.position.1 == position.line + 1
|
||||
&& (tok.position.0 <= position.character
|
||||
&& (tok.position.0 + tok.token.len() as u32) > position.character)
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
dbg!(position, token);
|
||||
|
||||
let list = if let Some((idx, _)) = token {
|
||||
if let Some(analysis) = self.analysis.get(&file_name).unwrap().token_analysis.get(&idx) {
|
||||
dbg!(&analysis);
|
||||
analysis
|
||||
.autocomplete
|
||||
.iter()
|
||||
.map(|s| CompletionItem::new_simple(s.text.to_string(), s.kind.to_string()))
|
||||
.collect()
|
||||
} else {
|
||||
Vec::new()
|
||||
}
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
dbg!(&list);
|
||||
Ok(Some(CompletionResponse::Array(list)))
|
||||
}
|
||||
|
||||
async fn hover(&self, params: HoverParams) -> jsonrpc::Result<Option<Hover>> {
|
||||
|
@ -184,7 +184,7 @@ pub struct LetStatement {
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ImportStatement(pub Vec<String>, pub TokenRange);
|
||||
pub struct ImportStatement(pub Vec<(String, TokenRange)>, pub TokenRange);
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct FunctionDefinition(pub FunctionSignature, pub bool, pub Block, pub TokenRange);
|
||||
|
@ -631,12 +631,14 @@ impl Parse for ImportStatement {
|
||||
let mut import_list = Vec::new();
|
||||
|
||||
if let Some(Token::Identifier(name)) = stream.next() {
|
||||
import_list.push(name);
|
||||
import_list.push((name, stream.get_range_prev_single().unwrap()));
|
||||
while stream.expect(Token::Colon).is_ok() && stream.expect(Token::Colon).is_ok() {
|
||||
if let Some(Token::Identifier(name)) = stream.next() {
|
||||
import_list.push(name);
|
||||
if let Some(Token::Identifier(name)) = stream.peek() {
|
||||
stream.next(); // Consume identifier
|
||||
import_list.push((name, stream.get_range_prev_single().unwrap()));
|
||||
} else {
|
||||
Err(stream.expected_err("identifier")?)?
|
||||
stream.expected_err_nonfatal("identifier");
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -30,7 +30,14 @@ impl ast::Module {
|
||||
for stmt in &self.top_level_statements {
|
||||
match stmt {
|
||||
Import(import) => {
|
||||
imports.push(mir::Import(import.0.clone(), import.1.as_meta(module_id)));
|
||||
imports.push(mir::Import(
|
||||
import
|
||||
.0
|
||||
.iter()
|
||||
.map(|(s, range)| (s.clone(), range.as_meta(module_id)))
|
||||
.collect(),
|
||||
import.1.as_meta(module_id),
|
||||
));
|
||||
}
|
||||
FunctionDefinition(function_def) => functions.push(function_def.into_mir(module_id)),
|
||||
ExternFunction(signature) => {
|
||||
|
@ -212,6 +212,14 @@ impl<'a, 'b> TokenStream<'a, 'b> {
|
||||
})
|
||||
}
|
||||
|
||||
/// Gets range of the previous token only.
|
||||
pub fn get_range_prev_single(&self) -> Option<TokenRange> {
|
||||
self.ref_position.as_ref().map(|ref_pos| TokenRange {
|
||||
start: self.previous_token(self.position).0,
|
||||
end: self.previous_token(self.position).0,
|
||||
})
|
||||
}
|
||||
|
||||
fn previous_token(&self, mut from: usize) -> (usize, Option<&'a FullToken>) {
|
||||
from -= 1;
|
||||
while let Some(token) = self.tokens.get(from) {
|
||||
|
@ -181,6 +181,10 @@ impl ReidError {
|
||||
pub fn from_kind(errors: Vec<ErrorKind>, map: ErrorModules) -> ReidError {
|
||||
ReidError { map, errors }
|
||||
}
|
||||
|
||||
pub fn extend(&mut self, other: ReidError) {
|
||||
self.errors.extend(other.errors);
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for ReidError {}
|
||||
|
@ -130,6 +130,7 @@ pub fn compile_module<'map>(
|
||||
};
|
||||
|
||||
if errors.len() > 0 {
|
||||
dbg!(&ast_module);
|
||||
return Ok(Err((
|
||||
ast_module,
|
||||
ReidError::from_kind(
|
||||
|
@ -84,7 +84,11 @@ impl Display for GlobalKind {
|
||||
|
||||
impl Display for Import {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "import {}", self.0.join("::"))
|
||||
write!(
|
||||
f,
|
||||
"import {}",
|
||||
self.0.iter().map(|(s, _)| s.clone()).collect::<Vec<_>>().join("::")
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -124,7 +124,9 @@ impl<'map> Pass for LinkerPass<'map> {
|
||||
state.ok::<_, Infallible>(Err(ErrorKind::InnerModulesNotYetSupported(import.clone())), import.1);
|
||||
}
|
||||
|
||||
let module_name = unsafe { path.get_unchecked(0) };
|
||||
let Some((module_name, _)) = path.get(0) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let mut imported = if let Some(mod_id) = module_ids.get(module_name) {
|
||||
modules.get(mod_id).unwrap()
|
||||
@ -197,7 +199,9 @@ impl<'map> Pass for LinkerPass<'map> {
|
||||
}
|
||||
.borrow_mut();
|
||||
|
||||
let import_name = unsafe { path.get_unchecked(1) };
|
||||
let Some((import_name, _)) = path.get(1) else {
|
||||
continue;
|
||||
};
|
||||
let import_id = imported.module_id;
|
||||
|
||||
let mut imported_types = Vec::new();
|
||||
|
@ -256,7 +256,7 @@ pub enum ReturnKind {
|
||||
pub struct NamedVariableRef(pub TypeKind, pub String, pub Metadata);
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct Import(pub Vec<String>, pub Metadata);
|
||||
pub struct Import(pub Vec<(String, Metadata)>, pub Metadata);
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ExprKind {
|
||||
|
Loading…
Reference in New Issue
Block a user