diff --git a/uvls/Cargo.toml b/uvls/Cargo.toml index 705b00c4..7ef075d5 100644 --- a/uvls/Cargo.toml +++ b/uvls/Cargo.toml @@ -49,3 +49,4 @@ open = "4.0.1" nom = "7.1.3" percent-encoding = "2.2.0" html-escape = "0.2.13" +filetime = "0.2" diff --git a/uvls/src/core/ast.rs b/uvls/src/core/ast.rs index 9bd8c551..9b2493d3 100644 --- a/uvls/src/core/ast.rs +++ b/uvls/src/core/ast.rs @@ -61,7 +61,7 @@ impl TreeMap { } //Ast container each symbol kind lives in its own vector #[derive(Clone, Debug, Default)] -struct Ast { +pub struct Ast { keywords: Vec, namespace: Option, includes: Vec, @@ -172,7 +172,7 @@ impl Ast { .flat_map(|v| v.iter().cloned()) } //utility iterators over different elements of interest - fn all_imports(&self) -> impl Iterator + DoubleEndedIterator { + pub fn all_imports(&self) -> impl Iterator + DoubleEndedIterator { (0..self.import.len()).map(Symbol::Import) } fn get_import(&self, index: usize) -> Option<&Import> { @@ -217,6 +217,9 @@ impl Ast { fn all_lang_lvls(&self) -> impl Iterator { (0..self.includes.len()).map(Symbol::LangLvl) } + pub fn imports(&self) -> &[Import] { + &self.import + } //Search a symbol by byte offset in O(N) fn find(&self, offset: usize) -> Option { self.all_imports() @@ -262,6 +265,7 @@ pub struct AstDocument { pub uri: Url, pub id: FileID, } + impl AstDocument { pub fn parent(&self, sym: Symbol, merge_root_features: bool) -> Option { if merge_root_features && matches!(sym, Symbol::Feature(..)) { @@ -367,6 +371,9 @@ impl AstDocument { pub fn get_reference(&self, index: usize) -> Option<&Reference> { self.ast.references.get(index) } + pub fn get_ast(&self) -> Ast { + self.ast.clone() + } pub fn lsp_range(&self, sym: Symbol) -> Option { self.ast.lsp_range(sym, &self.source) } diff --git a/uvls/src/core/ast/def.rs b/uvls/src/core/ast/def.rs index 59ad5b7e..a708f8fd 100644 --- a/uvls/src/core/ast/def.rs +++ b/uvls/src/core/ast/def.rs @@ -46,6 +46,19 @@ impl Path { path.push_str(".uvl"); path } + + //creates from root_path and the path an absolute path + pub fn to_file(&self, root_path: &str) -> String { + let slash = "/"; + let mut dir: Vec<&str> = root_path.split(slash).collect(); + let absoultpath = self.names.iter().map(|i| i.as_str()).join(slash); + if let Some(name) = dir.pop() { + let root_dir = root_path.replace(name, ""); + let path = "file://".to_string() + &root_dir + &absoultpath + ".uvl"; + return path; + } + absoultpath + } } //Type definitions for symbols diff --git a/uvls/src/core/ast/graph.rs b/uvls/src/core/ast/graph.rs index b48f4809..c6120a92 100644 --- a/uvls/src/core/ast/graph.rs +++ b/uvls/src/core/ast/graph.rs @@ -600,7 +600,7 @@ fn visit_ref(graph: &mut VisitorGraph, _: &mut GraphNode, _: Path) { } } } -fn visit_group(graph: &mut VisitorGraph, mut parent: &mut GraphNode, mode: GroupMode) { +fn visit_group(graph: &mut VisitorGraph, parent: &mut GraphNode, mode: GroupMode) { //let sym = Symbol::Group(graph.ast.groups.len()); //graph.push_child(parent, sym); parent.group_mode = Some(mode); diff --git a/uvls/src/core/ast/transform.rs b/uvls/src/core/ast/transform.rs index 365f5f52..cc179a9a 100644 --- a/uvls/src/core/ast/transform.rs +++ b/uvls/src/core/ast/transform.rs @@ -1331,12 +1331,13 @@ pub fn visit_root(source: Rope, tree: Tree, uri: Url, timestamp: Instant) -> Ast state.connect(); (state.ast, state.errors) }; - let mut path = uri_to_path(&uri).unwrap(); - if let Some(ns) = ast.namespace.as_ref() { - let len = path.len().saturating_sub(ns.names.len()); - path.truncate(len); - path.extend_from_slice(&ns.names); - } + let path = uri_to_path(&uri).unwrap(); + // without this Code namespaces are ignored for imports and the normal path is used + // if let Some(ns) = ast.namespace.as_ref() { + // let len = path.len().saturating_sub(ns.names.len()); + // path.truncate(len); + // path.extend_from_slice(&ns.names); + // } AstDocument { id: FileID::from_uri(&uri), path, diff --git a/uvls/src/core/cache.rs b/uvls/src/core/cache.rs index a38e5f00..56b34c44 100644 --- a/uvls/src/core/cache.rs +++ b/uvls/src/core/cache.rs @@ -1,11 +1,12 @@ use crate::core::*; use check::ErrorsAcc; -use compact_str::CompactStringExt; +use compact_str::{CompactString, CompactStringExt}; use hashbrown::{HashMap, HashSet}; use log::info; use module::{ConfigModule, Module}; use petgraph::prelude::*; use resolve::*; +use std::path; use std::sync::Arc; use ustr::Ustr; #[derive(Debug, Clone, PartialEq)] @@ -251,6 +252,7 @@ impl FileSystem { .find(|n| matches!(self.graph[*n], FSNode::Dir)) .unwrap() } + //all subfiles from origin under path, returns (prefix,filename,filenode) pub fn sub_files<'a>( &'a self, @@ -264,6 +266,136 @@ impl FileSystem { _ => true, }) } + /** + * find all subfiles, subdirectories and files on the same level as the currently opened file, + * but only those that have not been loaded yet + */ + pub fn all_sub_files<'a>( + &self, + origin_unc: FileID, + prefix: &[Ustr], + postfix: CompactString, + ) -> impl Iterator + 'a { + let mut stack: Vec<(compact_str::CompactString, Ustr, FSNode)> = Vec::new(); + let mut dirs: Vec = Vec::new(); + //remove "file://"" from uri, because only the path is needed + match origin_unc.as_str().strip_prefix("file://") { + Some(origin) => { + let mut suffix_helper: Vec<&str> = origin.split("/").collect(); + let suffix = suffix_helper.pop().unwrap(); + let mut root_dir = origin.strip_suffix(suffix).unwrap(); + // Handling for Windows systems + if std::env::consts::OS == "windows" + && !path::Path::new(root_dir).is_dir() + && root_dir.starts_with("/") + { + root_dir = root_dir.strip_prefix("/").unwrap(); + } + let path = path::Path::new(root_dir); + //Retrieve all uvl files and subfiles from the current directory + for entry in walkdir::WalkDir::new(path) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.path().is_file()) + .filter(|e| { + e.path() + .extension() + .map(|e| e == std::ffi::OsStr::new("uvl")) + .unwrap_or(false) + }) + { + match entry.path().to_str() { + Some(path) => { + // check if the file has not been loaded yet and if it is not an open file + if path != origin + && None + == self.file2node.keys().find(|&&ele| { + match ele.as_str().strip_prefix("file://") { + Some(check_path) => check_path.eq(path), + None => false, + } + }) + { + let name_op = path.strip_prefix(root_dir); + match name_op { + Some(name) => { + // checks if already written text is prefix of the path + let mut valid_path = true; + let mut check_path: Vec<&str> = + name.clone().split("/").collect(); + for i in prefix.iter() { + let check_prefix = i.as_str(); + + match check_path + .iter() + .position(|&ele| ele == check_prefix) + { + Some(0) => { + let _ = check_path.remove(0); + } + _ => { + valid_path = false; + break; + } + } + } + match check_path + .iter() + .position(|&ele| ele.starts_with(postfix.as_str())) + { + Some(0) => {} + _ => valid_path = false, + } + let name_up = check_path.join("/"); + if valid_path { + let new_name = name_up + .replace("/", ".") + .replace("\\", ".") + .replace(".uvl", ""); + // safe file for autoCompletion + stack.push(( + new_name.as_str().into(), + Ustr::from(new_name.as_str()), + FSNode::File(FileID::new(&path)), + )); + let mut is_dir = true; + let mut dir_names: Vec<&str> = + new_name.split(".").collect(); + if !dir_names.is_empty() { + let _ = dir_names.pop(); + } + // add all parent directories, for autocompletion, but only if they are not yet added + while is_dir { + let dir_name = dir_names.join("."); + if dir_name.is_empty() || dirs.contains(&dir_name) { + is_dir = false; + } else { + stack.push(( + dir_name.as_str().into(), + Ustr::from(&dir_name.as_str()), + FSNode::Dir, + )); + dirs.push(dir_name); + let _ = dir_names.pop(); + } + } + } + } + _ => {} + } + } + } + _ => {} + } + } + } + _ => { + info!("uri has wrong form: {} ", origin_unc.as_str()); + } + } + + std::iter::from_fn(move || stack.pop()) + } } #[derive(Debug, Clone)] @@ -358,13 +490,33 @@ impl Cache { for (k, v) in configs.iter() { if let Some(content) = v.config.as_ref() { info!("uri {}", content.file.as_str()); - if files.contains_key(&content.file) { - let dirty = trans_dirty.contains(&content.file) + let fileid = if files.contains_key(&content.file) { + content.file + } else { + // Windows inconvenience (we now select the files-key which equals out uri): + *files + .keys() + .find(|key| { + key.as_str().strip_prefix("file:///").unwrap_or("") + == content + .file + .as_str() + .strip_prefix("file://") + .unwrap_or("") + .replace("\\", "/") + }) + .unwrap_or(&FileID::new("X")) + }; + if files.contains_key(&fileid) { + let dirty = trans_dirty.contains(&fileid) + || trans_dirty.contains(&FileID::from_uri( + &tower_lsp::lsp_types::Url::parse(fileid.as_str()).unwrap(), + )) || dirty.contains(k) || !old.config_modules.contains_key(k); if dirty { //recreate - let mut module = Module::new(content.file, &fs, &linked_ast); + let mut module = Module::new(fileid, &fs, &linked_ast); if !module.ok { config_modules.insert( *k, @@ -394,7 +546,12 @@ impl Cache { config_modules.insert(*k, old.config_modules[k].clone()); } } else { - errors.span(content.file_span.clone(), *k, 100, "file no found"); + errors.span( + content.file_span.clone(), + *k, + 100, + "File not found, please open it explicitly in your editor", + ); } } } diff --git a/uvls/src/core/pipeline.rs b/uvls/src/core/pipeline.rs index 1f4680a1..5e1c3cb3 100644 --- a/uvls/src/core/pipeline.rs +++ b/uvls/src/core/pipeline.rs @@ -1,7 +1,8 @@ -use crate::{core::*, ide::inlays::InlayHandler, smt}; +use crate::{core::*, ide::inlays::InlayHandler, load_blocking, smt}; use check::*; use dashmap::DashMap; use document::*; +use filetime::{set_file_mtime, FileTime}; use hashbrown::HashMap; use log::info; use ropey::Rope; @@ -161,6 +162,9 @@ async fn link_handler( mut rx: mpsc::Receiver, tx_cache: watch::Sender>, tx_err: mpsc::Sender, + tx_ast: watch::Sender>, + tx_root_imports: watch::Sender>, + mut rx_sync: mpsc::Receiver, ) { //First we gather changes to avoid redundant recomputation let mut latest_configs: HashMap> = HashMap::new(); @@ -170,7 +174,7 @@ async fn link_handler( let mut dirty = false; let mut revision = 0; //Each change is one revision info!("started link handler"); - spawn(link_executor(rx_execute, tx_cache, tx_err)); + spawn(link_executor(rx_execute, tx_cache, tx_err, tx_root_imports)); let mut timer = tokio::time::interval(tokio::time::Duration::from_millis(100)); loop { select! { @@ -187,6 +191,8 @@ async fn link_handler( dirty=true; } LinkMsg::UpdateAst(ast)=>{ + let ast_clone = ast.get_ast().clone(); + let uri_str = ast.uri.clone().to_string(); if timestamps.get(&ast.uri).map(|old|old < &ast.timestamp).unwrap_or(true){ timestamps.insert(ast.uri.clone(),ast.timestamp); let id = FileID::new(ast.uri.as_str()); @@ -195,6 +201,8 @@ async fn link_handler( revision +=1; dirty=true; + let _= rx_sync.recv().await; + let _ = tx_ast.send(Arc::new((ast_clone,uri_str))); } LinkMsg::UpdateConfig(conf)=>{ @@ -235,6 +243,7 @@ async fn link_handler( )>, tx_cache: watch::Sender>, tx_err: mpsc::Sender, + tx_root_imports: watch::Sender>, ) { let mut timestamps: HashMap = HashMap::new(); info!("started link execute"); @@ -255,13 +264,48 @@ async fn link_handler( //link files incrementally let root = RootGraph::new(&ast, &configs, revision, &old, &mut err, &mut timestamps); - let _ = tx_cache.send(Arc::new(root)); + let _ = tx_cache.send(Arc::new(root.clone())); let _ = tx_err .send(DiagnosticUpdate { timestamp: revision, error_state: err.errors, }) .await; + let _ = tx_root_imports.send(Arc::new(root)); + } + } +} + +//handler which takes care that all imported files are loaded recursively +async fn import_handler(pipeline: AsyncPipeline) { + let mut rx = pipeline.rx_ast.clone(); + let rx_root = pipeline.rx_root_imports.clone(); + loop { + // Send message to notify rx sender to send the last edited file URI. + // This synchronization is important to ensure that rx sender does not send + // two or more messages and the import handler gets only one processed + let _ = pipeline.tx_sync.send(true).await; + //wait that Ast Document is updated + if rx.changed().await.is_err() { + break; + } + let arc = pipeline.rx_ast.borrow().clone(); + let ast = arc.0.clone(); + if let Ok(uri) = Url::parse(arc.1.as_str()) { + for import in ast.imports() { + let relative_path_string = import.path.to_file(uri.path()); + if let Ok(url_import) = Url::parse(relative_path_string.as_str()) { + //check if import is already loaded, if not, load + let pip = pipeline.clone(); + + //only load import if it isn't loaded yet + if !rx_root.borrow().contains(&url_import) { + tokio::task::spawn_blocking(move || { + load_blocking(url_import, &pip); + }); + } + } + } } } } @@ -276,22 +320,38 @@ pub struct AsyncPipeline { tx_err: mpsc::Sender, //latest version of the linked files rx_root: watch::Receiver>, + //latest version of the linkded files, but this receiver is used for loading imported files + rx_root_imports: watch::Receiver>, //fires when a file changed tx_dirty_tree: broadcast::Sender<()>, revision_counter: Arc, client: tower_lsp::Client, //code inlays are managed globally inlay_handler: InlayHandler, + //fires when astDocument get updated + rx_ast: watch::Receiver>, + //fires to inform rx_ast Receiver that it can send + tx_sync: mpsc::Sender, } impl AsyncPipeline { pub fn new(client: tower_lsp::Client) -> Self { + let (tx_ast, rx_ast) = watch::channel(Arc::new((Ast::default(), "".to_string()))); + let (tx_sync, rx_sync) = mpsc::channel(1); let (tx_link, rx_link) = mpsc::channel(1024); let (tx_root, rx_root) = watch::channel(Arc::new(RootGraph::default())); + let (tx_root_imports, rx_root_imports) = watch::channel(Arc::new(RootGraph::default())); let (tx_err, rx_err) = mpsc::channel(1024); let revision_counter = Arc::new(AtomicU64::new(0)); let (tx_dirty, _) = broadcast::channel(1024); let inlay_handler = InlayHandler::new(client.clone()); - spawn(link_handler(rx_link, tx_root, tx_err.clone())); + spawn(link_handler( + rx_link, + tx_root, + tx_err.clone(), + tx_ast, + tx_root_imports, + rx_sync, + )); spawn(check::diagnostic_handler(rx_err, client.clone())); spawn(smt::check_handler( rx_root.clone(), @@ -308,6 +368,9 @@ impl AsyncPipeline { tx_link, tx_err, rx_root, + rx_root_imports, + rx_ast, + tx_sync, } } pub fn touch(&self, uri: &Url) { @@ -430,6 +493,10 @@ impl AsyncPipeline { pub fn root(&self) -> watch::Receiver> { self.rx_root.clone() } + + pub fn root_imports(&self) -> watch::Receiver> { + self.rx_root_imports.clone() + } //wait until uri newer than timestamp in the root graph pub async fn snapshot_root_sync( &self, @@ -487,14 +554,54 @@ impl AsyncPipeline { let time = Instant::now(); if let Some(draft) = self.snapshot_draft(uri).await? { info!("waited {:?} for draft", time.elapsed()); - Ok(Some(if sync { + let result = Ok(Some(if sync { let timestamp = draft.timestamp(); (draft, self.snapshot_root_sync(uri, timestamp).await?) } else { (draft, self.snapshot_root(uri).await?) - })) + })); + self.load_uvl_after_json(uri); + result } else { Ok(None) } } + // if the file is a uvl.json then load the corresponding uvl file + // this method is called after the complete red tree of the uri is created + pub fn load_uvl_after_json(&self, uri: &Url) { + let root_binding = self.rx_root.borrow(); + let doc = root_binding.file_by_uri(&uri); + match doc { + // it is a uvl, can be ignored + Some(_) => (), + // it is a uvl.json + None => { + let config = root_binding.config_by_uri(&uri); + match config { + Some(config_doc) => { + let url_file = config_doc.config.as_ref().unwrap().file.url(); + if !root_binding.contains(&url_file) { + let pipeline = self.clone(); + let open_url = uri.clone(); + tokio::task::spawn_blocking(move || { + load_blocking(url_file, &pipeline); + //update modified so uvl.json can be loaded + let _ = set_file_mtime( + open_url.to_file_path().unwrap(), + FileTime::now(), + ); + load_blocking(open_url, &pipeline); + }); + } + } + None => {} + } + } + } + } + //start the import handler + pub fn import_handler(&self) { + let pipeline = self.clone(); + spawn(import_handler(pipeline)); + } } diff --git a/uvls/src/core/semantic.rs b/uvls/src/core/semantic.rs index 13861b89..94bf05f9 100644 --- a/uvls/src/core/semantic.rs +++ b/uvls/src/core/semantic.rs @@ -81,6 +81,7 @@ impl RootGraph { .or(self.file_by_uri(uri).map(|i| i.timestamp)) } pub fn contains_id(&self, id: FileID) -> bool { + info!("file id: {:?}", id); self.files.contains_key(&id) || self.configs.contains_key(&id) } pub fn type_of(&self, sym: RootSymbol) -> Option { @@ -192,8 +193,16 @@ impl RootGraph { { let mut file_paths = HashSet::new(); for file in files.values() { - if !file_paths.insert(file.path.as_slice()) { - if let Some(ns) = file.namespace() { + if let Some(ns) = file.namespace() { + //create path with namespace if namespace exists and check if it is already defined + let mut path: Vec = + file.path.clone().iter().map(|s| s.to_string()).collect(); + let ns_path: Vec = ns.names.iter().map(|s| s.to_string()).collect(); + let len = file.path.len().saturating_sub(ns.names.len()); + path.truncate(len); + path.extend_from_slice(&ns_path); + let path_str = path.join("."); + if !file_paths.insert(path_str) { if err.errors.contains_key(&file.id) { err.span(ns.range(), file.id, 100, "namespace already defined"); } diff --git a/uvls/src/ide/completion.rs b/uvls/src/ide/completion.rs index e5831804..2d99dbbe 100644 --- a/uvls/src/ide/completion.rs +++ b/uvls/src/ide/completion.rs @@ -96,7 +96,6 @@ pub fn make_path, I: Iterator>(i: I) -> CompactString { #[derive(PartialEq, Eq, Debug, Clone)] pub enum CompletionEnv { Numeric, - String, Constraint, GroupMode, Feature, @@ -188,22 +187,26 @@ pub fn contains(range: Range, pos: &Position) -> bool { pub fn estimate_expr(node: Node, pos: &Position, source: &Rope) -> CompletionEnv { if node.is_error() && node.start_position().row == node.end_position().row { let err_raw: String = source.byte_slice(node.byte_range()).into(); + if err_raw.contains("=>") + || err_raw.contains("<=>") + || err_raw.contains('&') + || err_raw.contains('|') + { + return CompletionEnv::Constraint; + } if err_raw.contains('+') || err_raw.contains('-') || err_raw.contains('*') || err_raw.contains('/') || err_raw.contains('>') || err_raw.contains('<') - || err_raw.contains("=>") - || err_raw.contains("<=>") - || err_raw.contains('&') - || err_raw.contains('|') || err_raw.contains("==") { - return CompletionEnv::Constraint; + return CompletionEnv::Numeric; } } match node.kind() { + "number" => CompletionEnv::Numeric, "function" => { let mut cursor = node.walk(); cursor.goto_first_child(); @@ -222,10 +225,12 @@ pub fn estimate_expr(node: Node, pos: &Position, source: &Rope) -> CompletionEnv if cursor.field_name().map(|i| i == "arg").unwrap_or(false) { args.push(parse::parse_path(cursor.node(), source)); } + info!("{:?}", cursor.node().kind()); if !cursor.goto_next_sibling() { break; } } + info!("args {:?} offset {}", &args, arg_offset); match node .child_by_field_name("op") .map(|op| source.slice_raw(op.byte_range())) @@ -253,7 +258,7 @@ pub fn estimate_expr(node: Node, pos: &Position, source: &Rope) -> CompletionEnv .into(); match op.as_str() { "=>" | "&" | "|" | "<=>" => CompletionEnv::Constraint, - _ => CompletionEnv::Constraint, + _ => CompletionEnv::Numeric, } } "nested_expr" | "path" => estimate_expr(node.parent().unwrap(), pos, source), @@ -501,318 +506,6 @@ fn estimate_context(pos: &Position, draft: &Draft) -> Option { } } } - -fn compute_constraint_completion( - pos: &TextDocumentPositionParams, - draft: &Draft, - snapshot: &Snapshot, - top: &mut TopN, - ctx: &CompletionQuery, - origin: FileID, -) { - match draft { - Draft::JSON { .. } => {} - Draft::UVL { source, tree, .. } => { - let (_offset, edit_node) = position_to_node(source, tree, &pos.position); - - let node = longest_path(edit_node, source) - .unwrap_or((Path::default(), edit_node)) - .1; - - // checks if node is constraint - if !(0..node.start_position().row) - .into_iter() - .map(|line| { - source - .get_line(line) - .unwrap() - .as_str() - .unwrap() - .contains("constraints") - }) - .contains(&true) - { - add_keywords(&ctx.postfix, top, 2.1, ["\'$1\' ".into(), "!".into()]); - add_function_keywords(&ctx.postfix, top, 2.0); - completion_symbol(&snapshot, origin, &ctx, top, vec![]); - return; - } - - // set cursor to childs. - let start_idx = source.line_to_byte(node.start_position().row); - let mut cursor = node; - if cursor.kind() == "ERROR" { - cursor = cursor.child(cursor.child_count() - 1).unwrap(); - } - // Add individual Childs to list - let mut childs: Vec = vec![]; - while cursor.start_byte() >= start_idx { - if source - .get_byte_slice(cursor.byte_range()) - .unwrap() - .as_str() - .unwrap() - .contains("\n") - { - cursor = cursor.child(cursor.child_count() - 2).unwrap(); - continue; - } - childs.push(cursor); - - match cursor.prev_sibling() { - Some(node) => cursor = node, - _ => break, - } - } - - if childs.len() > 1 { - childs = childs.into_iter().filter(|c| c.kind() != "!").collect(); - } - - let mut complete_function = |node: &Node| { - if node.kind() != "function" { - return; - } - match source.get_byte_slice(node.byte_range()).unwrap().as_str() { - Some(str) => { - if str.contains("len") { - completion_symbol(&snapshot, origin, &ctx, top, vec![Type::String]); - add_keywords(&ctx.postfix, top, 2.1, ["\'$1\' ".into()]); - } - if str.contains("avg") - | str.contains("ceil") - | str.contains("floor") - | str.contains("sum") - { - completion_symbol(&snapshot, origin, &ctx, top, vec![Type::Real]); - } - } - _ => (), - } - }; - - // Depending on the length of the list give recomendations - match (childs.len(), ctx.offset) { - (1, CompletionOffset::Continuous) => { - if childs[0].kind() == "!" { - completion_symbol(&snapshot, origin, &ctx, top, vec![Type::Bool]); - return; - } - if childs[0].kind() == "function" { - complete_function(&childs[0]); - return; - } - match ctx.env { - CompletionEnv::Aggregate { context: _ } => (), - _ => { - add_function_keywords(&ctx.postfix, top, 2.0); - completion_symbol(&snapshot, origin, &ctx, top, vec![]); - } - } - } - (1, CompletionOffset::Dot) => { - completion_symbol(&snapshot, origin, &ctx, top, vec![]); - } - (1, _) => { - let child = childs[0]; - match child.kind() { - "name" | "path" => { - let found = resolve_name( - &snapshot, - origin, - &source, - source - .get_byte_slice(child.byte_range()) - .unwrap() - .as_str() - .unwrap(), - ); - if found.len() == 0 { - add_logic_op(&ctx.postfix, top, 6.1); - add_numeric_op(&ctx.postfix, top, 6.1); - add_string_op(&ctx.postfix, top, 6.1); - } - for ty in found { - match ty { - Type::Bool => add_logic_op(&ctx.postfix, top, 6.1), - Type::Real => add_numeric_op(&ctx.postfix, top, 6.1), - Type::String => add_string_op(&ctx.postfix, top, 6.1), - _ => { - add_logic_op(&ctx.postfix, top, 6.1); - add_numeric_op(&ctx.postfix, top, 6.1); - add_string_op(&ctx.postfix, top, 6.1); - } - } - } - } - "number" | "function" => add_numeric_op(&ctx.postfix, top, 6.1), - "string" => add_string_op(&ctx.postfix, top, 6.1), - _ => (), - } - } - (2, _) | (3, CompletionOffset::Continuous) => { - let child = if childs.len() == 3 { - if childs[0].kind() == "function" { - childs[0] - } else { - childs[2] - } - } else { - childs[1] - }; - match child.kind() { - "name" | "path" => { - let tys = resolve_name( - &snapshot, - origin, - &source, - source - .get_byte_slice(child.byte_range()) - .unwrap() - .as_str() - .unwrap(), - ); - if tys.len() == 0 { - completion_symbol(&snapshot, origin, &ctx, top, vec![]) - } - for ty in tys { - match ty { - Type::Bool => { - completion_symbol(&snapshot, origin, &ctx, top, vec![ty]) - } - Type::Real => { - completion_symbol(&snapshot, origin, &ctx, top, vec![ty]); - add_function_keywords(&ctx.postfix, top, 1.0); - } - Type::String => { - completion_symbol( - &snapshot, - origin, - &ctx, - top, - vec![Type::String], - ); - add_keywords(&ctx.postfix, top, 2.1, ["\'$1\' ".into()]); - } - _ => completion_symbol(&snapshot, origin, &ctx, top, vec![]), - } - } - } - "number" => { - completion_symbol(&snapshot, origin, &ctx, top, vec![Type::Real]); - add_function_keywords(&ctx.postfix, top, 1.0); - } - "function" => { - if ctx.offset == CompletionOffset::SameLine { - completion_symbol(&snapshot, origin, &ctx, top, vec![Type::Real]); - add_function_keywords(&ctx.postfix, top, 1.0); - } else { - complete_function(&child); - } - } - "string" => { - completion_symbol(&snapshot, origin, &ctx, top, vec![Type::String]); - add_keywords(&ctx.postfix, top, 2.1, ["\'$1\' ".into()]); - } - "binary_expr" | "unary_expr" => { - completion_symbol(&snapshot, origin, &ctx, top, vec![Type::Bool]); - add_keywords(&ctx.postfix, top, 4.2, ["!".into()]); - } - _ => (), - } - } - (3, _) => { - add_logic_op(&ctx.postfix, top, 6.1); - return; - } - _ => (), - } - } - } -} - -fn resolve_name(snapshot: &Snapshot, origin: FileID, _source: &Rope, child: &str) -> Vec { - let mut result: Vec = vec![]; - let file = snapshot.file(origin); - let child_prefix = child.split(".").map(|s| Ustr::from(s)).collect_vec(); - - // go through imports and check if child is imported - for import in file.all_imports() { - // check if child is imported from this import - let import_prefix = file.import_prefix(import); - if import_prefix.len() > child_prefix.len() { - continue; - } - - let mut is_import = true; - for i in 0..import_prefix.len() { - if is_import && import_prefix[i] != child_prefix[i] { - is_import = false; - } - } - - if !is_import { - continue; - } - - // Child is imported from this import! - // generate new_child string with removed import statements (import.feature -> feature) - - let mut child_prefix_copy = child_prefix.clone(); - child_prefix_copy.drain(0..import_prefix.len()); - - let new_child = child_prefix_copy - .into_iter() - .map(|ustr| ustr.as_str()) - .fold(String::new(), |a, b| { - if a.len() > 0 { - return a + "." + b; - } - a + b - }); - - // Generate FileID from import - let mut new_path = origin.filepath().clone(); - new_path.pop(); // remove own file.uvl - for i in 0..(import_prefix.len() - 1) { - new_path.push(import_prefix[i].as_str()); - } - new_path.push(import_prefix.last().unwrap().to_string() + ".uvl"); - - // If successfull call recursively - if let Some(new_origin) = snapshot.file_id(&Url::from_file_path(new_path).unwrap()) { - result.append(resolve_name(snapshot, new_origin, _source, &new_child).as_mut()); - } - } - - // Get symbols in local file that match the child - let entries = file.get_symbols(Ustr::from(child)); - - // map entries to their Type - for entry in entries.clone() { - match entry { - Symbol::Feature(i) => result.push(file.get_feature(i).unwrap().ty), - Symbol::Attribute(i) => { - match snapshot - .file(origin) - .get_attribute(i) - .unwrap() - .value - .value - .clone() - { - Value::Number(_) => result.push(Type::Real), - Value::String(_) => result.push(Type::String), - Value::Bool(_) => result.push(Type::Bool), - _ => (), - } - } - _ => (), - } - } - return result; -} - #[derive(Clone, Copy, PartialEq, Eq, Debug)] enum CompletionKind { Feature, @@ -972,18 +665,17 @@ fn add_logic_op(query: &str, top: &mut TopN, w: f32) { query, top, w, - ["& ".into(), "| ".into(), "=> ".into(), "<=> ".into()], + [ + "& ".into(), + "| ".into(), + "=> ".into(), + "<=> ".into(), + "> ".into(), + "< ".into(), + "== ".into(), + ], ); } - -fn add_numeric_op(query: &str, top: &mut TopN, w: f32) { - add_keywords(query, top, w, ["> ".into(), "< ".into(), "== ".into()]); -} - -fn add_string_op(query: &str, top: &mut TopN, w: f32) { - add_keywords(query, top, w, ["== ".into()]); -} - fn add_function_keywords(query: &str, top: &mut TopN, w: f32) { add_keywords( query, @@ -995,6 +687,7 @@ fn add_function_keywords(query: &str, top: &mut TopN, w: f32) { "len($1) ".into(), "floor($1) ".into(), "ceil($1) ".into(), + "!".into(), ], ); } @@ -1082,7 +775,6 @@ fn completion_symbol_local( prefix: &[Ustr], query: &CompletionQuery, top: &mut TopN, - filter_types: Vec, ) { let file = snapshot.file(root.file); info!("Module {:?} under {:?}", root, prefix); @@ -1105,9 +797,6 @@ fn completion_symbol_local( { return true; } - if filter_types.len() > 0 && !filter_types.contains(&ty) { - return true; - } let text = make_path(prefix.iter().chain(sym_prefix.iter())); top.push(CompletionOpt::new( ty.into(), @@ -1129,7 +818,6 @@ fn completion_symbol( origin: FileID, query: &CompletionQuery, top: &mut TopN, - filter_types: Vec, ) { let mut modules: HashMap<_, Vec> = HashMap::new(); //Store reachable documents under the //search perfix under a secondary prefix @@ -1166,9 +854,7 @@ fn completion_symbol( _ => false, }); } - _ => { - completion_symbol_local(snapshot, origin, i, &[], query, top, filter_types.clone()) - } + _ => completion_symbol_local(snapshot, origin, i, &[], query, top), } } let root = FileID::max(); //Perform nn from all reachable documents to all other @@ -1230,7 +916,6 @@ fn completion_symbol( &path, query, top, - filter_types.clone(), ); } //info!("{:#?}", pred); @@ -1269,10 +954,10 @@ fn compute_completions_impl( .resolve(origin, &ctx.prefix) .filter(|f| f.file == origin) { - completion_symbol_local(&snapshot, origin, i, &[], ctx, &mut top, vec![]) + completion_symbol_local(&snapshot, origin, i, &[], ctx, &mut top) } } - CompletionEnv::Feature => { + CompletionEnv::Constraint | CompletionEnv::Numeric | CompletionEnv::Feature => { match (&ctx.env, &ctx.offset) { //heuristic to provide nearly correct predictions, to //make it more accurate we need to respect @@ -1301,23 +986,29 @@ fn compute_completions_impl( "Boolean".into(), ], ); - completion_symbol(&snapshot, origin, &ctx, &mut top, vec![]); + completion_symbol(&snapshot, origin, &ctx, &mut top); } if matches!(offset, CompletionOffset::Dot) { - completion_symbol(&snapshot, origin, &ctx, &mut top, vec![]); + completion_symbol(&snapshot, origin, &ctx, &mut top); } } ( - CompletionEnv::Constraint, - CompletionOffset::Cut - | CompletionOffset::Continuous - | CompletionOffset::SameLine, + CompletionEnv::Constraint | CompletionEnv::Numeric, + CompletionOffset::SameLine, + ) => { + add_logic_op(&ctx.postfix, &mut top, 6.1); + add_function_keywords(&ctx.postfix, &mut top, 2.0); + completion_symbol(&snapshot, origin, &ctx, &mut top); + } + ( + CompletionEnv::Constraint | CompletionEnv::Numeric, + CompletionOffset::Cut | CompletionOffset::Continuous, ) => { add_function_keywords(&ctx.postfix, &mut top, 2.0); - completion_symbol(&snapshot, origin, &ctx, &mut top, vec![]); + completion_symbol(&snapshot, origin, &ctx, &mut top); } _ => { - completion_symbol(&snapshot, origin, &ctx, &mut top, vec![]); + completion_symbol(&snapshot, origin, &ctx, &mut top); } } is_incomplete = true @@ -1328,6 +1019,7 @@ fn compute_completions_impl( add_keywords(&ctx.postfix, &mut top, 2.0, ["as ".into()]) } _ => { + // complete all files and dic which are already loaded for (path, name, node) in snapshot.fs().sub_files(origin, &ctx.prefix) { let len = path.as_str().chars().filter(|c| c == &'.').count(); top.push(CompletionOpt::new( @@ -1342,6 +1034,25 @@ fn compute_completions_impl( &ctx, )) } + // complete all files and dic which are not loaded + for (path, name, node) in + snapshot + .fs() + .all_sub_files(origin, &ctx.prefix, ctx.postfix.clone()) + { + let len = path.as_str().chars().filter(|c| c == &'.').count(); + top.push(CompletionOpt::new( + match node { + FSNode::Dir => CompletionKind::Folder, + _ => CompletionKind::File, + }, + name, + path.clone(), + len, + TextOP::Put(path), + &ctx, + )) + } } } is_incomplete = true @@ -1359,37 +1070,35 @@ fn compute_completions_impl( } } CompletionEnv::Aggregate { context } => { - if ctx.offset != CompletionOffset::SameLine { - snapshot.resolve_attributes( - origin, - context.as_ref().map(|p| p.names.as_slice()).unwrap_or(&[]), - |attrib, prefix| { - let common = prefix - .iter() - .zip(ctx.prefix.iter()) - .take_while(|(i, k)| i == k) - .count(); - if common < ctx.prefix.len() { - return; - } - let file = snapshot.file(attrib.file); - let prefix_str = make_path(prefix[common..].iter()); - let kind = file.type_of(attrib.sym).unwrap().into(); - if kind != CompletionKind::DontCare { - top.push(CompletionOpt::new( - kind, - *prefix.last().unwrap(), - prefix_str.clone(), - prefix.len(), - TextOP::Put(prefix_str), - &ctx, - )); - } - }, - ); - } - if context.is_none() && ctx.offset == CompletionOffset::Continuous { - completion_symbol(&snapshot, origin, &ctx, &mut top, vec![Type::Real]); + snapshot.resolve_attributes( + origin, + context.as_ref().map(|p| p.names.as_slice()).unwrap_or(&[]), + |attrib, prefix| { + let common = prefix + .iter() + .zip(ctx.prefix.iter()) + .take_while(|(i, k)| i == k) + .count(); + if common < ctx.prefix.len() { + return; + } + let file = snapshot.file(attrib.file); + let prefix_str = make_path(prefix[common..].iter()); + let kind = file.type_of(attrib.sym).unwrap().into(); + if kind != CompletionKind::DontCare { + top.push(CompletionOpt::new( + kind, + *prefix.last().unwrap(), + prefix_str.clone(), + prefix.len(), + TextOP::Put(prefix_str), + &ctx, + )); + } + }, + ); + if context.is_none() { + completion_symbol(&snapshot, origin, &ctx, &mut top); } } CompletionEnv::ConfigRootKey => add_keywords( @@ -1422,16 +1131,7 @@ pub fn compute_completions( }; info!("Stat completion in {:?} {:#?}", origin, ctx); if let (Some(ctx), Some(origin)) = (ctx, origin) { - let (mut top, is_incomplete) = - compute_completions_impl(snapshot.clone(), draft, pos.clone(), &ctx, origin); - if matches!( - ctx.env, - CompletionEnv::Constraint - | CompletionEnv::Numeric - | CompletionEnv::Aggregate { context: _ } - ) { - compute_constraint_completion(&pos, draft, &snapshot, &mut top, &ctx, origin); - } + let (top, is_incomplete) = compute_completions_impl(snapshot, draft, pos, &ctx, origin); let items = top .into_sorted_vec() .into_iter() diff --git a/uvls/src/main.rs b/uvls/src/main.rs index 58de89c3..38a1ce83 100644 --- a/uvls/src/main.rs +++ b/uvls/src/main.rs @@ -8,7 +8,6 @@ use log::info; use percent_encoding::percent_decode_str; use serde::Serialize; use std::io::{Read, Write}; -use std::path::{Path, PathBuf}; use std::sync::Arc; use std::time::SystemTime; use tokio::{join, spawn}; @@ -30,6 +29,7 @@ impl Default for Settings { Settings { has_webview: false } } } + //The LSP struct Backend { client: Client, @@ -73,12 +73,13 @@ impl Backend { } } //load a file, this is tricky because the editor can also load it at the same time -fn load_blocking(uri: Url, pipeline: &AsyncPipeline) { +pub fn load_blocking(uri: Url, pipeline: &AsyncPipeline) { if let Err(e) = std::fs::File::open(uri.to_file_path().unwrap()).and_then(|mut f| { let meta = f.metadata()?; let modified = meta.modified()?; if !pipeline.should_load(&uri, modified) { + info!("load problem"); return Ok(()); } let mut data = String::new(); @@ -89,22 +90,7 @@ fn load_blocking(uri: Url, pipeline: &AsyncPipeline) { info!("Failed to load file {} : {}", uri, e); } } -//load all files under given a path -fn load_all_blocking(path: &Path, pipeline: AsyncPipeline) { - for e in walkdir::WalkDir::new(path) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| e.path().is_file()) - .filter(|e| { - e.path() - .extension() - .map(|e| e == std::ffi::OsStr::new("uvl")) - .unwrap_or(false) - }) - { - load_blocking(Url::from_file_path(e.path()).unwrap(), &pipeline) - } -} + fn shutdown_error() -> tower_lsp::jsonrpc::Error { tower_lsp::jsonrpc::Error { code: tower_lsp::jsonrpc::ErrorCode::InternalError, @@ -117,22 +103,6 @@ fn shutdown_error() -> tower_lsp::jsonrpc::Error { impl LanguageServer for Backend { async fn initialize(&self, init_params: InitializeParams) -> Result { #[allow(deprecated)] - let root_folder = init_params - .root_path - .as_deref() - .or_else(|| init_params.root_uri.as_ref().map(|p| p.path())) - .map(PathBuf::from); - if let Some(root_folder) = root_folder { - let semantic = self.pipeline.clone(); - //cheap fix for better intial load, we should really use priority model to prefer - //editor owned files - spawn(async move { - tokio::task::spawn_blocking(move || { - load_all_blocking(&root_folder, semantic); - }) - .await - }); - } if init_params .client_info .map(|info| matches!(info.name.as_str(), "Visual Studio Code")) @@ -141,6 +111,8 @@ impl LanguageServer for Backend { self.settings.lock().has_webview = true; } + self.pipeline.import_handler(); + Ok(InitializeResult { server_info: Some(ServerInfo { name: String::from("uvl lsp"), @@ -224,11 +196,21 @@ impl LanguageServer for Backend { } async fn did_open(&self, params: DidOpenTextDocumentParams) { info!("received did_open {:?}", params.text_document.uri); - self.pipeline.open( - params.text_document.uri, - params.text_document.text, - DocumentState::OwnedByEditor, - ); + if self + .pipeline + .root() + .borrow() + .contains(¶ms.text_document.uri) + { + self.pipeline.open( + params.text_document.uri, + params.text_document.text, + DocumentState::OwnedByEditor, + ); + } else { + load_blocking(params.text_document.uri, &self.pipeline); + } + info!("done did_open"); } async fn did_change(&self, params: DidChangeTextDocumentParams) { @@ -365,6 +347,7 @@ impl LanguageServer for Backend { match i.typ { FileChangeType::CREATED => { self.load(i.uri); + break; } FileChangeType::CHANGED => { self.load(i.uri);