diff --git a/Cargo.lock b/Cargo.lock index c91236fc689..f2069c55e36 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -354,9 +354,9 @@ checksum = "5f2a4a2034423744d2cc7ca2068453168dcdb82c438419e639a26bd87839c674" [[package]] name = "fsevent" -version = "0.4.0" +version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ab7d1bd1bd33cc98b0889831b72da23c0aa4df9cec7e0702f46ecea04b35db6" +checksum = "97f347202c95c98805c216f9e1df210e8ebaec9fdb2365700a43c10797a35e63" dependencies = [ "bitflags", "fsevent-sys", @@ -364,9 +364,9 @@ dependencies = [ [[package]] name = "fsevent-sys" -version = "2.0.1" +version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f41b048a94555da0f42f1d632e2e19510084fb8e303b0daa2816e733fb3644a0" +checksum = "77a29c77f1ca394c3e73a9a5d24cfcabb734682d9634fc398f2204a63c994120" dependencies = [ "libc", ] @@ -483,9 +483,9 @@ dependencies = [ [[package]] name = "inotify" -version = "0.7.1" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4816c66d2c8ae673df83366c18341538f234a26d65a9ecea5c348b453ac1d02f" +checksum = "46dd0a94b393c730779ccfd2a872b67b1eb67be3fc33082e733bdb38b5fde4d4" dependencies = [ "bitflags", "inotify-sys", @@ -766,11 +766,13 @@ dependencies = [ [[package]] name = "notify" -version = "4.0.15" +version = "5.0.0-pre.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80ae4a7688d1fab81c5bf19c64fc8db920be8d519ce6336ed4e7efe024724dbd" +checksum = "77d03607cf88b4b160ba0e9ed425fff3cee3b55ac813f0c685b3a3772da37d0e" dependencies = [ + "anymap", "bitflags", + "crossbeam-channel", "filetime", "fsevent", "fsevent-sys", @@ -952,7 +954,9 @@ dependencies = [ "relative-path", "rustc-hash", "salsa", + "stdx", "test_utils", + "vfs", ] [[package]] @@ -1232,22 +1236,6 @@ dependencies = [ "smol_str", ] -[[package]] -name = "ra_vfs" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbf31a173fc77ec59c27cf39af6baa137b40f4dbd45a8b3eccb1b2e4cfc922c1" -dependencies = [ - "crossbeam-channel", - "jod-thread", - "log", - "notify", - "parking_lot", - "relative-path", - "rustc-hash", - "walkdir", -] - [[package]] name = "rand" version = "0.7.3" @@ -1405,7 +1393,6 @@ dependencies = [ "ra_syntax", "ra_text_edit", "ra_tt", - "ra_vfs", "rand", "rustc-hash", "serde", @@ -1414,6 +1401,8 @@ dependencies = [ "tempfile", "test_utils", "threadpool", + "vfs", + "vfs-notify", "winapi 0.3.8", ] @@ -1763,12 +1752,23 @@ dependencies = [ [[package]] name = "vfs" version = "0.1.0" +dependencies = [ + "paths", + "rustc-hash", +] + +[[package]] +name = "vfs-notify" +version = "0.1.0" dependencies = [ "crossbeam-channel", "globset", "jod-thread", + "log", + "notify", "paths", "rustc-hash", + "vfs", "walkdir", ] diff --git a/crates/paths/src/lib.rs b/crates/paths/src/lib.rs index 32267f2e6e3..45b19c45a0d 100644 --- a/crates/paths/src/lib.rs +++ b/crates/paths/src/lib.rs @@ -2,7 +2,7 @@ //! relative paths. use std::{ convert::{TryFrom, TryInto}, - io, ops, + ops, path::{Component, Path, PathBuf}, }; @@ -46,9 +46,6 @@ impl TryFrom<&str> for AbsPathBuf { } impl AbsPathBuf { - pub fn canonicalized(path: &Path) -> io::Result { - path.canonicalize().map(|it| AbsPathBuf::try_from(it).unwrap()) - } pub fn as_path(&self) -> &AbsPath { AbsPath::new_unchecked(self.0.as_path()) } diff --git a/crates/ra_assists/src/tests.rs b/crates/ra_assists/src/tests.rs index 62dd3547fa2..55576813fc5 100644 --- a/crates/ra_assists/src/tests.rs +++ b/crates/ra_assists/src/tests.rs @@ -1,10 +1,8 @@ mod generated; -use std::sync::Arc; - use hir::Semantics; use ra_db::{fixture::WithFixture, FileId, FileRange, SourceDatabaseExt}; -use ra_ide_db::{symbol_index::SymbolsDatabase, RootDatabase}; +use ra_ide_db::RootDatabase; use ra_syntax::TextRange; use test_utils::{ assert_eq_text, extract_offset, extract_range, extract_range_or_offset, RangeOrOffset, @@ -13,11 +11,7 @@ use test_utils::{ use crate::{handlers::Handler, Assist, AssistConfig, AssistContext, Assists}; pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) { - let (mut db, file_id) = RootDatabase::with_single_file(text); - // FIXME: ideally, this should be done by the above `RootDatabase::with_single_file`, - // but it looks like this might need specialization? :( - db.set_local_roots(Arc::new(vec![db.file_source_root(file_id)])); - (db, file_id) + RootDatabase::with_single_file(text) } pub(crate) fn check_assist(assist: Handler, ra_fixture_before: &str, ra_fixture_after: &str) { @@ -72,8 +66,7 @@ enum ExpectedResult<'a> { fn check(handler: Handler, before: &str, expected: ExpectedResult) { let (text_without_caret, file_with_caret_id, range_or_offset, db) = if before.contains("//-") { - let (mut db, position) = RootDatabase::with_position(before); - db.set_local_roots(Arc::new(vec![db.file_source_root(position.file_id)])); + let (db, position) = RootDatabase::with_position(before); ( db.file_text(position.file_id).as_ref().to_owned(), position.file_id, diff --git a/crates/ra_db/Cargo.toml b/crates/ra_db/Cargo.toml index 8ab409158c4..372fb242b10 100644 --- a/crates/ra_db/Cargo.toml +++ b/crates/ra_db/Cargo.toml @@ -17,3 +17,5 @@ ra_cfg = { path = "../ra_cfg" } ra_prof = { path = "../ra_prof" } ra_tt = { path = "../ra_tt" } test_utils = { path = "../test_utils" } +vfs = { path = "../vfs" } +stdx = { path = "../stdx" } diff --git a/crates/ra_db/src/fixture.rs b/crates/ra_db/src/fixture.rs index af8fe11ec62..f7d9118a9a3 100644 --- a/crates/ra_db/src/fixture.rs +++ b/crates/ra_db/src/fixture.rs @@ -57,17 +57,16 @@ //! fn insert_source_code_here() {} //! " //! ``` - -use std::str::FromStr; -use std::sync::Arc; +use std::{str::FromStr, sync::Arc}; use ra_cfg::CfgOptions; use rustc_hash::FxHashMap; use test_utils::{extract_offset, parse_fixture, parse_single_fixture, FixtureMeta, CURSOR_MARKER}; +use vfs::{file_set::FileSet, VfsPath}; use crate::{ - input::CrateName, CrateGraph, CrateId, Edition, Env, FileId, FilePosition, RelativePathBuf, - SourceDatabaseExt, SourceRoot, SourceRootId, + input::CrateName, CrateGraph, CrateId, Edition, Env, FileId, FilePosition, SourceDatabaseExt, + SourceRoot, SourceRootId, }; pub const WORKSPACE: SourceRootId = SourceRootId(0); @@ -105,10 +104,10 @@ impl WithFixture for DB {} fn with_single_file(db: &mut dyn SourceDatabaseExt, ra_fixture: &str) -> FileId { let file_id = FileId(0); - let rel_path: RelativePathBuf = "/main.rs".into(); + let mut file_set = vfs::file_set::FileSet::default(); + file_set.insert(file_id, vfs::VfsPath::new_virtual_path("/main.rs".to_string())); - let mut source_root = SourceRoot::new_local(); - source_root.insert_file(rel_path.clone(), file_id); + let source_root = SourceRoot::new_local(file_set); let fixture = parse_single_fixture(ra_fixture); @@ -128,7 +127,6 @@ fn with_single_file(db: &mut dyn SourceDatabaseExt, ra_fixture: &str) -> FileId meta.cfg, meta.env, Default::default(), - Default::default(), ); crate_graph } else { @@ -140,13 +138,11 @@ fn with_single_file(db: &mut dyn SourceDatabaseExt, ra_fixture: &str) -> FileId CfgOptions::default(), Env::default(), Default::default(), - Default::default(), ); crate_graph }; db.set_file_text(file_id, Arc::new(ra_fixture.to_string())); - db.set_file_relative_path(file_id, rel_path); db.set_file_source_root(file_id, WORKSPACE); db.set_source_root(WORKSPACE, Arc::new(source_root)); db.set_crate_graph(Arc::new(crate_graph)); @@ -162,7 +158,7 @@ fn with_files(db: &mut dyn SourceDatabaseExt, fixture: &str) -> Option = None; - let mut source_root = SourceRoot::new_local(); + let mut file_set = FileSet::default(); let mut source_root_id = WORKSPACE; let mut source_root_prefix = "/".to_string(); let mut file_id = FileId(0); @@ -172,8 +168,8 @@ fn with_files(db: &mut dyn SourceDatabaseExt, fixture: &str) -> Option { - let source_root = std::mem::replace(&mut source_root, SourceRoot::new_local()); - db.set_source_root(source_root_id, Arc::new(source_root)); + let file_set = std::mem::replace(&mut file_set, FileSet::default()); + db.set_source_root(source_root_id, Arc::new(SourceRoot::new_local(file_set))); source_root_id.0 += 1; source_root_prefix = path; continue; @@ -190,7 +186,6 @@ fn with_files(db: &mut dyn SourceDatabaseExt, fixture: &str) -> Option Option Option Option, + pub(crate) file_set: FileSet, } impl SourceRoot { - pub fn new_local() -> SourceRoot { - SourceRoot { is_library: false, files: Default::default() } + pub fn new_local(file_set: FileSet) -> SourceRoot { + SourceRoot { is_library: false, file_set } } - pub fn new_library() -> SourceRoot { - SourceRoot { is_library: true, files: Default::default() } + pub fn new_library(file_set: FileSet) -> SourceRoot { + SourceRoot { is_library: true, file_set } } - pub fn insert_file(&mut self, path: RelativePathBuf, file_id: FileId) { - self.files.insert(path, file_id); - } - pub fn remove_file(&mut self, path: &RelativePath) { - self.files.remove(path); - } - pub fn walk(&self) -> impl Iterator + '_ { - self.files.values().copied() - } - pub fn file_by_relative_path(&self, path: &RelativePath) -> Option { - self.files.get(path).copied() + pub fn iter(&self) -> impl Iterator + '_ { + self.file_set.iter() } } @@ -141,7 +120,6 @@ pub struct CrateData { pub display_name: Option, pub cfg_options: CfgOptions, pub env: Env, - pub extern_source: ExternSource, pub dependencies: Vec, pub proc_macro: Vec, } @@ -152,22 +130,11 @@ pub enum Edition { Edition2015, } -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct ExternSourceId(pub u32); - #[derive(Default, Debug, Clone, PartialEq, Eq)] pub struct Env { entries: FxHashMap, } -// FIXME: Redesign vfs for solve the following limitation ? -// Note: Some env variables (e.g. OUT_DIR) are located outside of the -// crate. We store a map to allow remap it to ExternSourceId -#[derive(Default, Debug, Clone, PartialEq, Eq)] -pub struct ExternSource { - extern_paths: FxHashMap, -} - #[derive(Debug, Clone, PartialEq, Eq)] pub struct Dependency { pub crate_id: CrateId, @@ -182,7 +149,6 @@ impl CrateGraph { display_name: Option, cfg_options: CfgOptions, env: Env, - extern_source: ExternSource, proc_macro: Vec<(SmolStr, Arc)>, ) -> CrateId { let proc_macro = @@ -194,7 +160,6 @@ impl CrateGraph { display_name, cfg_options, env, - extern_source, proc_macro, dependencies: Vec::new(), }; @@ -334,20 +299,6 @@ impl Env { } } -impl ExternSource { - pub fn extern_path(&self, path: &Path) -> Option<(ExternSourceId, RelativePathBuf)> { - self.extern_paths.iter().find_map(|(root_path, id)| { - let rel_path = path.strip_prefix(root_path).ok()?; - let rel_path = RelativePathBuf::from_path(rel_path).ok()?; - Some((*id, rel_path)) - }) - } - - pub fn set_extern_path(&mut self, root_path: &Path, root: ExternSourceId) { - self.extern_paths.insert(root_path.to_path_buf(), root); - } -} - #[derive(Debug)] pub struct ParseEditionError { invalid_input: String, @@ -378,7 +329,6 @@ mod tests { CfgOptions::default(), Env::default(), Default::default(), - Default::default(), ); let crate2 = graph.add_crate_root( FileId(2u32), @@ -387,7 +337,6 @@ mod tests { CfgOptions::default(), Env::default(), Default::default(), - Default::default(), ); let crate3 = graph.add_crate_root( FileId(3u32), @@ -396,7 +345,6 @@ mod tests { CfgOptions::default(), Env::default(), Default::default(), - Default::default(), ); assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok()); assert!(graph.add_dep(crate2, CrateName::new("crate3").unwrap(), crate3).is_ok()); @@ -413,7 +361,6 @@ mod tests { CfgOptions::default(), Env::default(), Default::default(), - Default::default(), ); let crate2 = graph.add_crate_root( FileId(2u32), @@ -422,7 +369,6 @@ mod tests { CfgOptions::default(), Env::default(), Default::default(), - Default::default(), ); assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok()); assert!(graph.add_dep(crate2, CrateName::new("crate2").unwrap(), crate2).is_err()); @@ -438,7 +384,6 @@ mod tests { CfgOptions::default(), Env::default(), Default::default(), - Default::default(), ); let crate2 = graph.add_crate_root( FileId(2u32), @@ -447,7 +392,6 @@ mod tests { CfgOptions::default(), Env::default(), Default::default(), - Default::default(), ); let crate3 = graph.add_crate_root( FileId(3u32), @@ -456,7 +400,6 @@ mod tests { CfgOptions::default(), Env::default(), Default::default(), - Default::default(), ); assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok()); assert!(graph.add_dep(crate2, CrateName::new("crate3").unwrap(), crate3).is_ok()); @@ -472,7 +415,6 @@ mod tests { CfgOptions::default(), Env::default(), Default::default(), - Default::default(), ); let crate2 = graph.add_crate_root( FileId(2u32), @@ -481,7 +423,6 @@ mod tests { CfgOptions::default(), Env::default(), Default::default(), - Default::default(), ); assert!(graph .add_dep(crate1, CrateName::normalize_dashes("crate-name-with-dashes"), crate2) diff --git a/crates/ra_db/src/lib.rs b/crates/ra_db/src/lib.rs index 80ddb6058af..875290259be 100644 --- a/crates/ra_db/src/lib.rs +++ b/crates/ra_db/src/lib.rs @@ -12,12 +12,13 @@ use rustc_hash::FxHashSet; pub use crate::{ cancellation::Canceled, input::{ - CrateData, CrateGraph, CrateId, CrateName, Dependency, Edition, Env, ExternSource, - ExternSourceId, FileId, ProcMacroId, SourceRoot, SourceRootId, + CrateData, CrateGraph, CrateId, CrateName, Dependency, Edition, Env, FileId, ProcMacroId, + SourceRoot, SourceRootId, }, }; pub use relative_path::{RelativePath, RelativePathBuf}; pub use salsa; +pub use vfs::{file_set::FileSet, AbsPathBuf, VfsPath}; #[macro_export] macro_rules! impl_intern_key { @@ -125,8 +126,6 @@ pub trait SourceDatabaseExt: SourceDatabase { #[salsa::input] fn file_text(&self, file_id: FileId) -> Arc; /// Path to a file, relative to the root of its source root. - #[salsa::input] - fn file_relative_path(&self, file_id: FileId) -> RelativePathBuf; /// Source root of the file. #[salsa::input] fn file_source_root(&self, file_id: FileId) -> SourceRootId; @@ -161,24 +160,9 @@ impl FileLoader for FileLoaderDelegate<&'_ T> { } fn resolve_path(&self, anchor: FileId, path: &str) -> Option { // FIXME: this *somehow* should be platform agnostic... - if std::path::Path::new(path).is_absolute() { - let krate = *self.relevant_crates(anchor).iter().next()?; - let (extern_source_id, relative_file) = - self.0.crate_graph()[krate].extern_source.extern_path(path.as_ref())?; - - let source_root = self.0.source_root(SourceRootId(extern_source_id.0)); - source_root.file_by_relative_path(&relative_file) - } else { - let rel_path = { - let mut rel_path = self.0.file_relative_path(anchor); - assert!(rel_path.pop()); - rel_path.push(path); - rel_path.normalize() - }; - let source_root = self.0.file_source_root(anchor); - let source_root = self.0.source_root(source_root); - source_root.file_by_relative_path(&rel_path) - } + let source_root = self.0.file_source_root(anchor); + let source_root = self.0.source_root(source_root); + source_root.file_set.resolve_path(anchor, path) } fn relevant_crates(&self, file_id: FileId) -> Arc> { diff --git a/crates/ra_hir/src/has_source.rs b/crates/ra_hir/src/has_source.rs index 63b8fd3694d..76c32fc17aa 100644 --- a/crates/ra_hir/src/has_source.rs +++ b/crates/ra_hir/src/has_source.rs @@ -2,7 +2,7 @@ use either::Either; use hir_def::{ - nameres::ModuleSource, + nameres::{ModuleOrigin, ModuleSource}, src::{HasChildSource, HasSource as _}, Lookup, VariantId, }; @@ -29,6 +29,14 @@ impl Module { def_map[self.id.local_id].definition_source(db.upcast()) } + pub fn is_mod_rs(self, db: &dyn HirDatabase) -> bool { + let def_map = db.crate_def_map(self.id.krate); + match def_map[self.id.local_id].origin { + ModuleOrigin::File { is_mod_rs, .. } => is_mod_rs, + _ => false, + } + } + /// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`. /// `None` for the crate root. pub fn declaration_source(self, db: &dyn HirDatabase) -> Option> { diff --git a/crates/ra_hir_def/src/nameres.rs b/crates/ra_hir_def/src/nameres.rs index b8560fdc9ee..060273db400 100644 --- a/crates/ra_hir_def/src/nameres.rs +++ b/crates/ra_hir_def/src/nameres.rs @@ -104,6 +104,7 @@ pub enum ModuleOrigin { }, /// Note that non-inline modules, by definition, live inside non-macro file. File { + is_mod_rs: bool, declaration: AstId, definition: FileId, }, diff --git a/crates/ra_hir_def/src/nameres/collector.rs b/crates/ra_hir_def/src/nameres/collector.rs index b8f6aac8f24..cbce0431597 100644 --- a/crates/ra_hir_def/src/nameres/collector.rs +++ b/crates/ra_hir_def/src/nameres/collector.rs @@ -777,11 +777,11 @@ impl ModCollector<'_, '_> { name, path_attr, ) { - Ok((file_id, mod_dir)) => { + Ok((file_id, is_mod_rs, mod_dir)) => { let module_id = self.push_child_module( name.clone(), ast_id, - Some(file_id), + Some((file_id, is_mod_rs)), &visibility, ); let raw_items = self.def_collector.db.raw_items(file_id.into()); @@ -814,7 +814,7 @@ impl ModCollector<'_, '_> { &mut self, name: Name, declaration: AstId, - definition: Option, + definition: Option<(FileId, bool)>, visibility: &crate::visibility::RawVisibility, ) -> LocalModuleId { let vis = self @@ -827,7 +827,9 @@ impl ModCollector<'_, '_> { modules[res].parent = Some(self.module_id); modules[res].origin = match definition { None => ModuleOrigin::Inline { definition: declaration }, - Some(definition) => ModuleOrigin::File { declaration, definition }, + Some((definition, is_mod_rs)) => { + ModuleOrigin::File { declaration, definition, is_mod_rs } + } }; for (name, mac) in modules[self.module_id].scope.collect_legacy_macros() { modules[res].scope.define_legacy_macro(name, mac) diff --git a/crates/ra_hir_def/src/nameres/mod_resolution.rs b/crates/ra_hir_def/src/nameres/mod_resolution.rs index 19fe0615abd..39e9a6d9778 100644 --- a/crates/ra_hir_def/src/nameres/mod_resolution.rs +++ b/crates/ra_hir_def/src/nameres/mod_resolution.rs @@ -44,7 +44,7 @@ impl ModDir { file_id: HirFileId, name: &Name, attr_path: Option<&SmolStr>, - ) -> Result<(FileId, ModDir), String> { + ) -> Result<(FileId, bool, ModDir), String> { let file_id = file_id.original_file(db.upcast()); let mut candidate_files = Vec::new(); @@ -64,11 +64,12 @@ impl ModDir { if let Some(file_id) = db.resolve_path(file_id, candidate.as_str()) { let mut root_non_dir_owner = false; let mut mod_path = RelativePathBuf::new(); - if !(candidate.ends_with("mod.rs") || attr_path.is_some()) { + let is_mod_rs = candidate.ends_with("mod.rs"); + if !(is_mod_rs || attr_path.is_some()) { root_non_dir_owner = true; mod_path.push(&name.to_string()); } - return Ok((file_id, ModDir { path: mod_path, root_non_dir_owner })); + return Ok((file_id, is_mod_rs, ModDir { path: mod_path, root_non_dir_owner })); } } Err(candidate_files.remove(0)) diff --git a/crates/ra_ide/src/lib.rs b/crates/ra_ide/src/lib.rs index 47823718fd1..ecac5134e69 100644 --- a/crates/ra_ide/src/lib.rs +++ b/crates/ra_ide/src/lib.rs @@ -47,7 +47,7 @@ use std::sync::Arc; use ra_cfg::CfgOptions; use ra_db::{ salsa::{self, ParallelDatabase}, - CheckCanceled, Env, FileLoader, SourceDatabase, + CheckCanceled, Env, FileLoader, FileSet, SourceDatabase, VfsPath, }; use ra_ide_db::{ symbol_index::{self, FileSymbol}, @@ -78,7 +78,8 @@ pub use crate::{ pub use hir::Documentation; pub use ra_assists::{Assist, AssistConfig, AssistId, ResolvedAssist}; pub use ra_db::{ - Canceled, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange, SourceRootId, + Canceled, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange, SourceRoot, + SourceRootId, }; pub use ra_ide_db::{ change::AnalysisChange, @@ -212,11 +213,14 @@ impl Analysis { // `AnalysisHost` for creating a fully-featured analysis. pub fn from_single_file(text: String) -> (Analysis, FileId) { let mut host = AnalysisHost::default(); - let source_root = SourceRootId(0); - let mut change = AnalysisChange::new(); - change.add_root(source_root, true); - let mut crate_graph = CrateGraph::default(); let file_id = FileId(0); + let mut file_set = FileSet::default(); + file_set.insert(file_id, VfsPath::new_virtual_path("/main.rs".to_string())); + let source_root = SourceRoot::new_local(file_set); + + let mut change = AnalysisChange::new(); + change.set_roots(vec![source_root]); + let mut crate_graph = CrateGraph::default(); // FIXME: cfg options // Default to enable test for single file. let mut cfg_options = CfgOptions::default(); @@ -228,9 +232,8 @@ impl Analysis { cfg_options, Env::default(), Default::default(), - Default::default(), ); - change.add_file(source_root, file_id, "main.rs".into(), Arc::new(text)); + change.change_file(file_id, Some(Arc::new(text))); change.set_crate_graph(crate_graph); host.apply_change(change); (host.analysis(), file_id) diff --git a/crates/ra_ide/src/mock_analysis.rs b/crates/ra_ide/src/mock_analysis.rs index 76910d09bda..58fafecab0c 100644 --- a/crates/ra_ide/src/mock_analysis.rs +++ b/crates/ra_ide/src/mock_analysis.rs @@ -1,15 +1,12 @@ //! FIXME: write short doc here - -use std::str::FromStr; -use std::sync::Arc; +use std::{str::FromStr, sync::Arc}; use ra_cfg::CfgOptions; -use ra_db::{CrateName, Env}; +use ra_db::{CrateName, Env, FileSet, SourceRoot, VfsPath}; use test_utils::{extract_offset, extract_range, parse_fixture, FixtureEntry, CURSOR_MARKER}; use crate::{ Analysis, AnalysisChange, AnalysisHost, CrateGraph, Edition, FileId, FilePosition, FileRange, - SourceRootId, }; #[derive(Debug)] @@ -159,9 +156,8 @@ impl MockAnalysis { } pub fn analysis_host(self) -> AnalysisHost { let mut host = AnalysisHost::default(); - let source_root = SourceRootId(0); let mut change = AnalysisChange::new(); - change.add_root(source_root, true); + let mut file_set = FileSet::default(); let mut crate_graph = CrateGraph::default(); let mut root_crate = None; for (i, data) in self.files.into_iter().enumerate() { @@ -179,7 +175,6 @@ impl MockAnalysis { cfg_options, env, Default::default(), - Default::default(), )); } else if path.ends_with("/lib.rs") { let base = &path[..path.len() - "/lib.rs".len()]; @@ -191,7 +186,6 @@ impl MockAnalysis { cfg_options, env, Default::default(), - Default::default(), ); if let Some(root_crate) = root_crate { crate_graph @@ -199,9 +193,12 @@ impl MockAnalysis { .unwrap(); } } - change.add_file(source_root, file_id, path.into(), Arc::new(data.content().to_owned())); + let path = VfsPath::new_virtual_path(path.to_string()); + file_set.insert(file_id, path); + change.change_file(file_id, Some(Arc::new(data.content().to_owned()))); } change.set_crate_graph(crate_graph); + change.set_roots(vec![SourceRoot::new_local(file_set)]); host.apply_change(change); host } diff --git a/crates/ra_ide/src/parent_module.rs b/crates/ra_ide/src/parent_module.rs index fa1535da5b0..bc7f6547004 100644 --- a/crates/ra_ide/src/parent_module.rs +++ b/crates/ra_ide/src/parent_module.rs @@ -145,7 +145,6 @@ mod tests { CfgOptions::default(), Env::default(), Default::default(), - Default::default(), ); let mut change = AnalysisChange::new(); change.set_crate_graph(crate_graph); diff --git a/crates/ra_ide/src/references/rename.rs b/crates/ra_ide/src/references/rename.rs index 99c2581b7df..6edf565b536 100644 --- a/crates/ra_ide/src/references/rename.rs +++ b/crates/ra_ide/src/references/rename.rs @@ -1,7 +1,7 @@ //! FIXME: write short doc here use hir::{Module, ModuleDef, ModuleSource, Semantics}; -use ra_db::{RelativePathBuf, SourceDatabaseExt}; +use ra_db::SourceDatabaseExt; use ra_ide_db::{ defs::{classify_name, classify_name_ref, Definition, NameClass, NameRefClass}, RootDatabase, @@ -109,9 +109,8 @@ fn rename_mod( let file_id = src.file_id.original_file(db); match src.value { ModuleSource::SourceFile(..) => { - let mod_path: RelativePathBuf = db.file_relative_path(file_id); // mod is defined in path/to/dir/mod.rs - let dst = if mod_path.file_stem() == Some("mod") { + let dst = if module.is_mod_rs(db) { format!("../{}/mod.rs", new_name) } else { format!("{}.rs", new_name) diff --git a/crates/ra_ide/src/ssr.rs b/crates/ra_ide/src/ssr.rs index 03f18c617fd..6cb96608bed 100644 --- a/crates/ra_ide/src/ssr.rs +++ b/crates/ra_ide/src/ssr.rs @@ -41,7 +41,7 @@ pub fn parse_search_replace( match_finder.add_rule(rule); for &root in db.local_roots().iter() { let sr = db.source_root(root); - for file_id in sr.walk() { + for file_id in sr.iter() { if let Some(edit) = match_finder.edits_for_file(file_id) { edits.push(SourceFileEdit { file_id, edit }); } diff --git a/crates/ra_ide_db/src/change.rs b/crates/ra_ide_db/src/change.rs index 98993d571bf..a95f6c13c1f 100644 --- a/crates/ra_ide_db/src/change.rs +++ b/crates/ra_ide_db/src/change.rs @@ -9,26 +9,22 @@ use ra_db::{ SourceRootId, }; use ra_prof::{memory_usage, profile, Bytes}; -use rustc_hash::FxHashMap; +use rustc_hash::FxHashSet; use crate::{symbol_index::SymbolsDatabase, RootDatabase}; #[derive(Default)] pub struct AnalysisChange { - new_roots: Vec<(SourceRootId, bool)>, - roots_changed: FxHashMap, - files_changed: Vec<(FileId, Arc)>, + roots: Option>, + files_changed: Vec<(FileId, Option>)>, crate_graph: Option, } impl fmt::Debug for AnalysisChange { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { let mut d = fmt.debug_struct("AnalysisChange"); - if !self.new_roots.is_empty() { - d.field("new_roots", &self.new_roots); - } - if !self.roots_changed.is_empty() { - d.field("roots_changed", &self.roots_changed); + if let Some(roots) = &self.roots { + d.field("roots", roots); } if !self.files_changed.is_empty() { d.field("files_changed", &self.files_changed.len()); @@ -45,30 +41,14 @@ impl AnalysisChange { AnalysisChange::default() } - pub fn add_root(&mut self, root_id: SourceRootId, is_local: bool) { - self.new_roots.push((root_id, is_local)); + pub fn set_roots(&mut self, roots: Vec) { + self.roots = Some(roots); } - pub fn add_file( - &mut self, - root_id: SourceRootId, - file_id: FileId, - path: RelativePathBuf, - text: Arc, - ) { - let file = AddFile { file_id, path, text }; - self.roots_changed.entry(root_id).or_default().added.push(file); - } - - pub fn change_file(&mut self, file_id: FileId, new_text: Arc) { + pub fn change_file(&mut self, file_id: FileId, new_text: Option>) { self.files_changed.push((file_id, new_text)) } - pub fn remove_file(&mut self, root_id: SourceRootId, file_id: FileId, path: RelativePathBuf) { - let file = RemoveFile { file_id, path }; - self.roots_changed.entry(root_id).or_default().removed.push(file); - } - pub fn set_crate_graph(&mut self, graph: CrateGraph) { self.crate_graph = Some(graph); } @@ -114,31 +94,32 @@ impl RootDatabase { let _p = profile("RootDatabase::apply_change"); self.request_cancellation(); log::info!("apply_change {:?}", change); - if !change.new_roots.is_empty() { - let mut local_roots = Vec::clone(&self.local_roots()); - let mut libraries = Vec::clone(&self.library_roots()); - for (root_id, is_local) in change.new_roots { - let root = - if is_local { SourceRoot::new_local() } else { SourceRoot::new_library() }; + if let Some(roots) = change.roots { + let mut local_roots = FxHashSet::default(); + let mut library_roots = FxHashSet::default(); + for (idx, root) in roots.into_iter().enumerate() { + let root_id = SourceRootId(idx as u32); let durability = durability(&root); - self.set_source_root_with_durability(root_id, Arc::new(root), durability); - if is_local { - local_roots.push(root_id); + if root.is_library { + library_roots.insert(root_id); } else { - libraries.push(root_id) + local_roots.insert(root_id); } + for file_id in root.iter() { + self.set_file_source_root_with_durability(file_id, root_id, durability); + } + self.set_source_root_with_durability(root_id, Arc::new(root), durability); } self.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); - self.set_library_roots_with_durability(Arc::new(libraries), Durability::HIGH); + self.set_library_roots_with_durability(Arc::new(library_roots), Durability::HIGH); } - for (root_id, root_change) in change.roots_changed { - self.apply_root_change(root_id, root_change); - } for (file_id, text) in change.files_changed { let source_root_id = self.file_source_root(file_id); let source_root = self.source_root(source_root_id); let durability = durability(&source_root); + // XXX: can't actually remove the file, just reset the text + let text = text.unwrap_or_default(); self.set_file_text_with_durability(file_id, text, durability) } if let Some(crate_graph) = change.crate_graph { @@ -146,26 +127,6 @@ impl RootDatabase { } } - fn apply_root_change(&mut self, root_id: SourceRootId, root_change: RootChange) { - let mut source_root = SourceRoot::clone(&self.source_root(root_id)); - let durability = durability(&source_root); - for add_file in root_change.added { - self.set_file_text_with_durability(add_file.file_id, add_file.text, durability); - self.set_file_relative_path_with_durability( - add_file.file_id, - add_file.path.clone(), - durability, - ); - self.set_file_source_root_with_durability(add_file.file_id, root_id, durability); - source_root.insert_file(add_file.path, add_file.file_id); - } - for remove_file in root_change.removed { - self.set_file_text_with_durability(remove_file.file_id, Default::default(), durability); - source_root.remove_file(&remove_file.path); - } - self.set_source_root_with_durability(root_id, Arc::new(source_root), durability); - } - pub fn maybe_collect_garbage(&mut self) { if cfg!(feature = "wasm") { return; diff --git a/crates/ra_ide_db/src/search.rs b/crates/ra_ide_db/src/search.rs index 335a1ad03c7..44d5c35e684 100644 --- a/crates/ra_ide_db/src/search.rs +++ b/crates/ra_ide_db/src/search.rs @@ -157,14 +157,14 @@ impl Definition { if let Some(Visibility::Public) = vis { let source_root_id = db.file_source_root(file_id); let source_root = db.source_root(source_root_id); - let mut res = source_root.walk().map(|id| (id, None)).collect::>(); + let mut res = source_root.iter().map(|id| (id, None)).collect::>(); let krate = module.krate(); for rev_dep in krate.reverse_dependencies(db) { let root_file = rev_dep.root_file(db); let source_root_id = db.file_source_root(root_file); let source_root = db.source_root(source_root_id); - res.extend(source_root.walk().map(|id| (id, None))); + res.extend(source_root.iter().map(|id| (id, None))); } return SearchScope::new(res); } diff --git a/crates/ra_ide_db/src/symbol_index.rs b/crates/ra_ide_db/src/symbol_index.rs index 25c99813f26..6929055b26e 100644 --- a/crates/ra_ide_db/src/symbol_index.rs +++ b/crates/ra_ide_db/src/symbol_index.rs @@ -42,7 +42,7 @@ use ra_syntax::{ SyntaxNode, SyntaxNodePtr, TextRange, WalkEvent, }; use rayon::prelude::*; -use rustc_hash::FxHashMap; +use rustc_hash::{FxHashMap, FxHashSet}; use crate::RootDatabase; @@ -93,11 +93,11 @@ pub trait SymbolsDatabase: hir::db::HirDatabase + SourceDatabaseExt + ParallelDa /// The set of "local" (that is, from the current workspace) roots. /// Files in local roots are assumed to change frequently. #[salsa::input] - fn local_roots(&self) -> Arc>; + fn local_roots(&self) -> Arc>; /// The set of roots for crates.io libraries. /// Files in libraries are assumed to never change. #[salsa::input] - fn library_roots(&self) -> Arc>; + fn library_roots(&self) -> Arc>; } fn library_symbols( @@ -111,7 +111,7 @@ fn library_symbols( .map(|&root_id| { let root = db.source_root(root_id); let files = root - .walk() + .iter() .map(|it| (it, SourceDatabaseExt::file_text(db, it))) .collect::>(); let symbol_index = SymbolIndex::for_files( @@ -175,7 +175,7 @@ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec { let mut files = Vec::new(); for &root in db.local_roots().iter() { let sr = db.source_root(root); - files.extend(sr.walk()) + files.extend(sr.iter()) } let snap = Snap(db.snapshot()); diff --git a/crates/ra_project_model/src/lib.rs b/crates/ra_project_model/src/lib.rs index 9541362f594..fe3e8168943 100644 --- a/crates/ra_project_model/src/lib.rs +++ b/crates/ra_project_model/src/lib.rs @@ -13,7 +13,7 @@ use std::{ use anyhow::{bail, Context, Result}; use ra_cfg::CfgOptions; -use ra_db::{CrateGraph, CrateName, Edition, Env, ExternSource, ExternSourceId, FileId}; +use ra_db::{CrateGraph, CrateName, Edition, Env, FileId}; use rustc_hash::{FxHashMap, FxHashSet}; use serde_json::from_reader; @@ -246,7 +246,6 @@ impl ProjectWorkspace { pub fn to_crate_graph( &self, target: Option<&str>, - extern_source_roots: &FxHashMap, proc_macro_client: &ProcMacroClient, load: &mut dyn FnMut(&Path) -> Option, ) -> CrateGraph { @@ -280,15 +279,11 @@ impl ProjectWorkspace { }; let mut env = Env::default(); - let mut extern_source = ExternSource::default(); if let Some(out_dir) = &krate.out_dir { // NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!() if let Some(out_dir) = out_dir.to_str().map(|s| s.to_owned()) { env.set("OUT_DIR", out_dir); } - if let Some(&extern_source_id) = extern_source_roots.get(out_dir) { - extern_source.set_extern_path(&out_dir, extern_source_id); - } } let proc_macro = krate .proc_macro_dylib_path @@ -304,7 +299,6 @@ impl ProjectWorkspace { None, cfg_options, env, - extern_source, proc_macro.unwrap_or_default(), ), )) @@ -341,7 +335,6 @@ impl ProjectWorkspace { let file_id = load(&sysroot[krate].root)?; let env = Env::default(); - let extern_source = ExternSource::default(); let proc_macro = vec![]; let crate_name = CrateName::new(&sysroot[krate].name) .expect("Sysroot crate names should not contain dashes"); @@ -352,7 +345,6 @@ impl ProjectWorkspace { Some(crate_name), cfg_options.clone(), env, - extern_source, proc_macro, ); Some((krate, crate_id)) @@ -409,15 +401,11 @@ impl ProjectWorkspace { opts }; let mut env = Env::default(); - let mut extern_source = ExternSource::default(); if let Some(out_dir) = &cargo[pkg].out_dir { // NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!() if let Some(out_dir) = out_dir.to_str().map(|s| s.to_owned()) { env.set("OUT_DIR", out_dir); } - if let Some(&extern_source_id) = extern_source_roots.get(out_dir) { - extern_source.set_extern_path(&out_dir, extern_source_id); - } } let proc_macro = cargo[pkg] .proc_macro_dylib_path @@ -431,7 +419,6 @@ impl ProjectWorkspace { Some(CrateName::normalize_dashes(&cargo[pkg].name)), cfg_options, env, - extern_source, proc_macro.clone(), ); if cargo[tgt].kind == TargetKind::Lib { diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index f289a02f68e..68d04f3e32c 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml @@ -38,7 +38,8 @@ ra_prof = { path = "../ra_prof" } ra_project_model = { path = "../ra_project_model" } ra_syntax = { path = "../ra_syntax" } ra_text_edit = { path = "../ra_text_edit" } -ra_vfs = "0.6.0" +vfs = { path = "../vfs" } +vfs-notify = { path = "../vfs-notify" } ra_cfg = { path = "../ra_cfg"} # This should only be used in CLI diff --git a/crates/rust-analyzer/src/cargo_target_spec.rs b/crates/rust-analyzer/src/cargo_target_spec.rs index 5c22dce0d44..65f90c83c04 100644 --- a/crates/rust-analyzer/src/cargo_target_spec.rs +++ b/crates/rust-analyzer/src/cargo_target_spec.rs @@ -1,5 +1,7 @@ //! See `CargoTargetSpec` +use std::path::PathBuf; + use ra_cfg::CfgExpr; use ra_ide::{FileId, RunnableKind, TestId}; use ra_project_model::{self, TargetKind}; @@ -12,6 +14,7 @@ use crate::{global_state::GlobalStateSnapshot, Result}; /// build/test/run the target. #[derive(Clone)] pub(crate) struct CargoTargetSpec { + pub(crate) workspace_root: PathBuf, pub(crate) package: String, pub(crate) target: String, pub(crate) target_kind: TargetKind, @@ -101,6 +104,7 @@ impl CargoTargetSpec { None => return Ok(None), }; let res = CargoTargetSpec { + workspace_root: cargo_ws.workspace_root().to_path_buf(), package: cargo_ws.package_flag(&cargo_ws[cargo_ws[target].package]), target: cargo_ws[target].name.clone(), target_kind: cargo_ws[target].kind, diff --git a/crates/rust-analyzer/src/cli/analysis_bench.rs b/crates/rust-analyzer/src/cli/analysis_bench.rs index b20efe98d8c..4fe99ff6805 100644 --- a/crates/rust-analyzer/src/cli/analysis_bench.rs +++ b/crates/rust-analyzer/src/cli/analysis_bench.rs @@ -1,6 +1,7 @@ //! Benchmark operations like highlighting or goto definition. use std::{ + convert::TryFrom, path::{Path, PathBuf}, str::FromStr, sync::Arc, @@ -10,7 +11,7 @@ use std::{ use anyhow::{format_err, Result}; use ra_db::{ salsa::{Database, Durability}, - FileId, SourceDatabaseExt, + AbsPathBuf, FileId, }; use ra_ide::{Analysis, AnalysisChange, AnalysisHost, CompletionConfig, FilePosition, LineCol}; @@ -53,8 +54,7 @@ pub fn analysis_bench( let start = Instant::now(); eprint!("loading: "); - let (mut host, roots) = load_cargo(path, load_output_dirs, with_proc_macro)?; - let db = host.raw_database(); + let (mut host, vfs) = load_cargo(path, load_output_dirs, with_proc_macro)?; eprintln!("{:?}\n", start.elapsed()); let file_id = { @@ -62,22 +62,9 @@ pub fn analysis_bench( BenchWhat::Highlight { path } => path, BenchWhat::Complete(pos) | BenchWhat::GotoDef(pos) => &pos.path, }; - let path = std::env::current_dir()?.join(path).canonicalize()?; - roots - .iter() - .find_map(|(source_root_id, project_root)| { - if project_root.is_member() { - for file_id in db.source_root(*source_root_id).walk() { - let rel_path = db.file_relative_path(file_id); - let abs_path = rel_path.to_path(project_root.path()); - if abs_path == path { - return Some(file_id); - } - } - } - None - }) - .ok_or_else(|| format_err!("Can't find {}", path.display()))? + let path = AbsPathBuf::try_from(path.clone()).unwrap(); + let path = path.into(); + vfs.file_id(&path).ok_or_else(|| format_err!("Can't find {}", path))? }; match &what { @@ -149,7 +136,7 @@ fn do_work T, T>(host: &mut AnalysisHost, file_id: FileId, w let mut text = host.analysis().file_text(file_id).unwrap().to_string(); text.push_str("\n/* Hello world */\n"); let mut change = AnalysisChange::new(); - change.change_file(file_id, Arc::new(text)); + change.change_file(file_id, Some(Arc::new(text))); host.apply_change(change); } work(&host.analysis()); diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 72183da1516..90868760b9b 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -28,26 +28,14 @@ pub fn analysis_stats( with_proc_macro: bool, ) -> Result<()> { let db_load_time = Instant::now(); - let (mut host, roots) = load_cargo(path, load_output_dirs, with_proc_macro)?; + let (mut host, vfs) = load_cargo(path, load_output_dirs, with_proc_macro)?; let db = host.raw_database(); - println!("Database loaded, {} roots, {:?}", roots.len(), db_load_time.elapsed()); + println!("Database loaded {:?}", db_load_time.elapsed()); let analysis_time = Instant::now(); let mut num_crates = 0; let mut visited_modules = HashSet::new(); let mut visit_queue = Vec::new(); - let members = - roots - .into_iter() - .filter_map(|(source_root_id, project_root)| { - if with_deps || project_root.is_member() { - Some(source_root_id) - } else { - None - } - }) - .collect::>(); - let mut krates = Crate::all(db); if randomize { krates.shuffle(&mut thread_rng()); @@ -55,7 +43,10 @@ pub fn analysis_stats( for krate in krates { let module = krate.root_module(db).expect("crate without root module"); let file_id = module.definition_source(db).file_id; - if members.contains(&db.file_source_root(file_id.original_file(db))) { + let file_id = file_id.original_file(db); + let source_root = db.file_source_root(file_id); + let source_root = db.source_root(source_root); + if !source_root.is_library || with_deps { num_crates += 1; visit_queue.push(module); } @@ -128,7 +119,7 @@ pub fn analysis_stats( if verbosity.is_verbose() { let src = f.source(db); let original_file = src.file_id.original_file(db); - let path = db.file_relative_path(original_file); + let path = vfs.file_path(original_file); let syntax_range = src.value.syntax().text_range(); format_to!(msg, " ({:?} {:?})", path, syntax_range); } @@ -196,7 +187,7 @@ pub fn analysis_stats( let root = db.parse_or_expand(src.file_id).unwrap(); let node = src.map(|e| e.to_node(&root).syntax().clone()); let original_range = original_range(db, node.as_ref()); - let path = db.file_relative_path(original_range.file_id); + let path = vfs.file_path(original_range.file_id); let line_index = host.analysis().file_line_index(original_range.file_id).unwrap(); let text_range = original_range.range; diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs index 60daefa3ef7..82b3a8a5326 100644 --- a/crates/rust-analyzer/src/cli/diagnostics.rs +++ b/crates/rust-analyzer/src/cli/diagnostics.rs @@ -2,68 +2,57 @@ //! code if any errors are found. use anyhow::anyhow; +use hir::Crate; use ra_db::SourceDatabaseExt; use ra_ide::Severity; use std::{collections::HashSet, path::Path}; use crate::cli::{load_cargo::load_cargo, Result}; -use hir::Semantics; pub fn diagnostics( path: &Path, load_output_dirs: bool, with_proc_macro: bool, - all: bool, + _all: bool, ) -> Result<()> { - let (host, roots) = load_cargo(path, load_output_dirs, with_proc_macro)?; + let (host, _vfs) = load_cargo(path, load_output_dirs, with_proc_macro)?; let db = host.raw_database(); let analysis = host.analysis(); - let semantics = Semantics::new(db); - let members = roots - .into_iter() - .filter_map(|(source_root_id, project_root)| { - // filter out dependencies - if project_root.is_member() { - Some(source_root_id) - } else { - None - } - }) - .collect::>(); let mut found_error = false; let mut visited_files = HashSet::new(); - for source_root_id in members { - for file_id in db.source_root(source_root_id).walk() { - // Filter out files which are not actually modules (unless `--all` flag is - // passed). In the rust-analyzer repository this filters out the parser test files. - if semantics.to_module_def(file_id).is_some() || all { - if !visited_files.contains(&file_id) { - let crate_name = if let Some(module) = semantics.to_module_def(file_id) { - if let Some(name) = module.krate().display_name(db) { - format!("{}", name) - } else { - String::from("unknown") - } - } else { - String::from("unknown") - }; - println!( - "processing crate: {}, module: {}", - crate_name, - db.file_relative_path(file_id) - ); - for diagnostic in analysis.diagnostics(file_id).unwrap() { - if matches!(diagnostic.severity, Severity::Error) { - found_error = true; - } - println!("{:?}", diagnostic); - } + let mut work = Vec::new(); + let krates = Crate::all(db); + for krate in krates { + let module = krate.root_module(db).expect("crate without root module"); + let file_id = module.definition_source(db).file_id; + let file_id = file_id.original_file(db); + let source_root = db.file_source_root(file_id); + let source_root = db.source_root(source_root); + if !source_root.is_library { + work.push(module); + } + } - visited_files.insert(file_id); + for module in work { + let file_id = module.definition_source(db).file_id.original_file(db); + if !visited_files.contains(&file_id) { + let crate_name = if let Some(name) = module.krate().display_name(db) { + format!("{}", name) + } else { + String::from("unknown") + }; + println!("processing crate: {}, module: {}", crate_name, _vfs.file_path(file_id)); + for diagnostic in analysis.diagnostics(file_id).unwrap() { + if matches!(diagnostic.severity, Severity::Error) { + found_error = true; } + + println!("{:?}", diagnostic); } + + visited_files.insert(file_id); } } diff --git a/crates/rust-analyzer/src/cli/load_cargo.rs b/crates/rust-analyzer/src/cli/load_cargo.rs index 97367d7c60e..00bbbaf4078 100644 --- a/crates/rust-analyzer/src/cli/load_cargo.rs +++ b/crates/rust-analyzer/src/cli/load_cargo.rs @@ -1,32 +1,21 @@ //! Loads a Cargo project into a static instance of analysis, without support //! for incorporating changes. - -use std::path::{Path, PathBuf}; +use std::{convert::TryFrom, path::Path, sync::Arc}; use anyhow::Result; use crossbeam_channel::{unbounded, Receiver}; -use ra_db::{ExternSourceId, FileId, SourceRootId}; +use ra_db::{AbsPathBuf, CrateGraph}; use ra_ide::{AnalysisChange, AnalysisHost}; -use ra_project_model::{ - CargoConfig, PackageRoot, ProcMacroClient, ProjectManifest, ProjectWorkspace, -}; -use ra_vfs::{RootEntry, Vfs, VfsChange, VfsTask, Watch}; -use rustc_hash::{FxHashMap, FxHashSet}; +use ra_project_model::{CargoConfig, ProcMacroClient, ProjectManifest, ProjectWorkspace}; +use vfs::loader::Handle; -use crate::vfs_glob::RustPackageFilterBuilder; - -fn vfs_file_to_id(f: ra_vfs::VfsFile) -> FileId { - FileId(f.0) -} -fn vfs_root_to_id(r: ra_vfs::VfsRoot) -> SourceRootId { - SourceRootId(r.0) -} +use crate::global_state::{ProjectFolders, SourceRootConfig}; pub fn load_cargo( root: &Path, load_out_dirs_from_check: bool, with_proc_macro: bool, -) -> Result<(AnalysisHost, FxHashMap)> { +) -> Result<(AnalysisHost, vfs::Vfs)> { let root = std::env::current_dir()?.join(root); let root = ProjectManifest::discover_single(&root)?; let ws = ProjectWorkspace::load( @@ -35,123 +24,74 @@ pub fn load_cargo( true, )?; - let mut extern_dirs = FxHashSet::default(); - let (sender, receiver) = unbounded(); - let sender = Box::new(move |t| sender.send(t).unwrap()); + let mut vfs = vfs::Vfs::default(); + let mut loader = { + let loader = + vfs_notify::LoaderHandle::spawn(Box::new(move |msg| sender.send(msg).unwrap())); + Box::new(loader) + }; - let mut roots = Vec::new(); - let project_roots = ws.to_roots(); - for root in &project_roots { - roots.push(RootEntry::new( - root.path().to_owned(), - RustPackageFilterBuilder::default().set_member(root.is_member()).into_vfs_filter(), - )); - - if let Some(out_dir) = root.out_dir() { - extern_dirs.insert(out_dir.to_path_buf()); - roots.push(RootEntry::new( - out_dir.to_owned(), - RustPackageFilterBuilder::default().set_member(root.is_member()).into_vfs_filter(), - )) - } - } - - let (mut vfs, roots) = Vfs::new(roots, sender, Watch(false)); - - let source_roots = roots - .into_iter() - .map(|vfs_root| { - let source_root_id = vfs_root_to_id(vfs_root); - let project_root = project_roots - .iter() - .find(|it| it.path() == vfs.root2path(vfs_root)) - .unwrap() - .clone(); - (source_root_id, project_root) - }) - .collect::>(); - - let proc_macro_client = if !with_proc_macro { - ProcMacroClient::dummy() - } else { + let proc_macro_client = if with_proc_macro { let path = std::env::current_exe()?; ProcMacroClient::extern_process(path, &["proc-macro"]).unwrap() + } else { + ProcMacroClient::dummy() }; - let host = load(&source_roots, ws, &mut vfs, receiver, extern_dirs, &proc_macro_client); - Ok((host, source_roots)) + + let crate_graph = ws.to_crate_graph(None, &proc_macro_client, &mut |path: &Path| { + let path = AbsPathBuf::try_from(path.to_path_buf()).unwrap(); + let contents = loader.load_sync(&path); + let path = vfs::VfsPath::from(path); + vfs.set_file_contents(path.clone(), contents); + vfs.file_id(&path) + }); + + let project_folders = ProjectFolders::new(&[ws]); + loader.set_config(vfs::loader::Config { load: project_folders.load, watch: vec![] }); + + log::debug!("crate graph: {:?}", crate_graph); + let host = load(crate_graph, project_folders.source_root_config, &mut vfs, &receiver); + Ok((host, vfs)) } pub(crate) fn load( - source_roots: &FxHashMap, - ws: ProjectWorkspace, - vfs: &mut Vfs, - receiver: Receiver, - extern_dirs: FxHashSet, - proc_macro_client: &ProcMacroClient, + crate_graph: CrateGraph, + source_root_config: SourceRootConfig, + vfs: &mut vfs::Vfs, + receiver: &Receiver, ) -> AnalysisHost { let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::().ok()); let mut host = AnalysisHost::new(lru_cap); let mut analysis_change = AnalysisChange::new(); // wait until Vfs has loaded all roots - let mut roots_loaded = FxHashSet::default(); - let mut extern_source_roots = FxHashMap::default(); for task in receiver { - vfs.handle_task(task); - let mut done = false; - for change in vfs.commit_changes() { - match change { - VfsChange::AddRoot { root, files } => { - let source_root_id = vfs_root_to_id(root); - let is_local = source_roots[&source_root_id].is_member(); - log::debug!( - "loaded source root {:?} with path {:?}", - source_root_id, - vfs.root2path(root) - ); - analysis_change.add_root(source_root_id, is_local); - - let vfs_root_path = vfs.root2path(root); - if extern_dirs.contains(&vfs_root_path) { - extern_source_roots.insert(vfs_root_path, ExternSourceId(root.0)); - } - - let mut file_map = FxHashMap::default(); - for (vfs_file, path, text) in files { - let file_id = vfs_file_to_id(vfs_file); - analysis_change.add_file(source_root_id, file_id, path.clone(), text); - file_map.insert(path, file_id); - } - roots_loaded.insert(source_root_id); - if roots_loaded.len() == vfs.n_roots() { - done = true; - } + match task { + vfs::loader::Message::Progress { n_entries_done, n_entries_total } => { + if n_entries_done == n_entries_total { + break; } - VfsChange::AddFile { root, file, path, text } => { - let source_root_id = vfs_root_to_id(root); - let file_id = vfs_file_to_id(file); - analysis_change.add_file(source_root_id, file_id, path, text); - } - VfsChange::RemoveFile { .. } | VfsChange::ChangeFile { .. } => { - // We just need the first scan, so just ignore these + } + vfs::loader::Message::Loaded { files } => { + for (path, contents) in files { + vfs.set_file_contents(path.into(), contents) } } } - if done { - break; + } + let changes = vfs.take_changes(); + for file in changes { + if file.exists() { + let contents = vfs.file_contents(file.file_id).to_vec(); + if let Ok(text) = String::from_utf8(contents) { + analysis_change.change_file(file.file_id, Some(Arc::new(text))) + } } } + let source_roots = source_root_config.partition(&vfs); + analysis_change.set_roots(source_roots); - let crate_graph = - ws.to_crate_graph(None, &extern_source_roots, proc_macro_client, &mut |path: &Path| { - // Some path from metadata will be non canonicalized, e.g. /foo/../bar/lib.rs - let path = path.canonicalize().ok()?; - let vfs_file = vfs.load(&path); - log::debug!("vfs file {:?} -> {:?}", path, vfs_file); - vfs_file.map(vfs_file_to_id) - }); - log::debug!("crate graph: {:?}", crate_graph); analysis_change.set_crate_graph(crate_graph); host.apply_change(analysis_change); @@ -167,7 +107,7 @@ mod tests { #[test] fn test_loading_rust_analyzer() { let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap(); - let (host, _roots) = load_cargo(path, false, false).unwrap(); + let (host, _vfs) = load_cargo(path, false, false).unwrap(); let n_crates = Crate::all(host.raw_database()).len(); // RA has quite a few crates, but the exact count doesn't matter assert!(n_crates > 20); diff --git a/crates/rust-analyzer/src/from_proto.rs b/crates/rust-analyzer/src/from_proto.rs index 40f856e6e86..40d440c67f7 100644 --- a/crates/rust-analyzer/src/from_proto.rs +++ b/crates/rust-analyzer/src/from_proto.rs @@ -1,10 +1,22 @@ //! Conversion lsp_types types to rust-analyzer specific ones. +use std::convert::TryFrom; + use ra_db::{FileId, FilePosition, FileRange}; use ra_ide::{LineCol, LineIndex}; use ra_syntax::{TextRange, TextSize}; +use vfs::AbsPathBuf; use crate::{global_state::GlobalStateSnapshot, Result}; +pub(crate) fn abs_path(url: &lsp_types::Url) -> Result { + let path = url.to_file_path().map_err(|()| "url is not a file")?; + Ok(AbsPathBuf::try_from(path).unwrap()) +} + +pub(crate) fn vfs_path(url: &lsp_types::Url) -> Result { + abs_path(url).map(vfs::VfsPath::from) +} + pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> TextSize { let line_col = LineCol { line: position.line as u32, col_utf16: position.character as u32 }; line_index.offset(line_col) diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index d04ef4c6133..e2ddb79332a 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -3,30 +3,28 @@ //! //! Each tick provides an immutable snapshot of the state as `WorldSnapshot`. -use std::{ - path::{Path, PathBuf}, - sync::Arc, -}; +use std::{convert::TryFrom, path::Path, sync::Arc}; use crossbeam_channel::{unbounded, Receiver}; use lsp_types::Url; use parking_lot::RwLock; +use ra_db::{CrateId, SourceRoot, VfsPath}; use ra_flycheck::{Flycheck, FlycheckConfig}; -use ra_ide::{Analysis, AnalysisChange, AnalysisHost, CrateGraph, FileId, SourceRootId}; +use ra_ide::{Analysis, AnalysisChange, AnalysisHost, CrateGraph, FileId}; use ra_project_model::{CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target}; -use ra_vfs::{LineEndings, RootEntry, Vfs, VfsChange, VfsFile, VfsTask, Watch}; use stdx::format_to; +use vfs::{file_set::FileSetConfig, loader::Handle, AbsPathBuf}; use crate::{ config::{Config, FilesWatcher}, diagnostics::{CheckFixes, DiagnosticCollection}, + from_proto, + line_endings::LineEndings, main_loop::request_metrics::{LatestRequests, RequestMetrics}, to_proto::url_from_abs_path, - vfs_glob::{Glob, RustPackageFilterBuilder}, - LspError, Result, + Result, }; -use ra_db::{CrateId, ExternSourceId}; -use rustc_hash::{FxHashMap, FxHashSet}; +use rustc_hash::FxHashMap; fn create_flycheck(workspaces: &[ProjectWorkspace], config: &FlycheckConfig) -> Option { // FIXME: Figure out the multi-workspace situation @@ -50,15 +48,16 @@ fn create_flycheck(workspaces: &[ProjectWorkspace], config: &FlycheckConfig) -> #[derive(Debug)] pub struct GlobalState { pub config: Config, - pub local_roots: Vec, pub workspaces: Arc>, pub analysis_host: AnalysisHost, - pub vfs: Arc>, - pub task_receiver: Receiver, + pub loader: Box, + pub task_receiver: Receiver, pub flycheck: Option, pub diagnostics: DiagnosticCollection, pub proc_macro_client: ProcMacroClient, + pub(crate) vfs: Arc)>>, pub(crate) latest_requests: Arc>, + source_root_config: SourceRootConfig, } /// An immutable snapshot of the world's state at a point in time. @@ -68,62 +67,21 @@ pub struct GlobalStateSnapshot { pub analysis: Analysis, pub check_fixes: CheckFixes, pub(crate) latest_requests: Arc>, - vfs: Arc>, + vfs: Arc)>>, } impl GlobalState { pub fn new( workspaces: Vec, lru_capacity: Option, - exclude_globs: &[Glob], config: Config, ) -> GlobalState { let mut change = AnalysisChange::new(); - let mut extern_dirs: FxHashSet = FxHashSet::default(); + let project_folders = ProjectFolders::new(&workspaces); - let mut local_roots = Vec::new(); - let roots: Vec<_> = { - let create_filter = |is_member| { - RustPackageFilterBuilder::default() - .set_member(is_member) - .exclude(exclude_globs.iter().cloned()) - .into_vfs_filter() - }; - let mut roots = Vec::new(); - for root in workspaces.iter().flat_map(ProjectWorkspace::to_roots) { - let path = root.path().to_owned(); - if root.is_member() { - local_roots.push(path.clone()); - } - roots.push(RootEntry::new(path, create_filter(root.is_member()))); - if let Some(out_dir) = root.out_dir() { - extern_dirs.insert(out_dir.to_path_buf()); - roots.push(RootEntry::new( - out_dir.to_path_buf(), - create_filter(root.is_member()), - )) - } - } - roots - }; - - let (task_sender, task_receiver) = unbounded(); - let task_sender = Box::new(move |t| task_sender.send(t).unwrap()); - let watch = Watch(matches!(config.files.watcher, FilesWatcher::Notify)); - let (mut vfs, vfs_roots) = Vfs::new(roots, task_sender, watch); - - let mut extern_source_roots = FxHashMap::default(); - for r in vfs_roots { - let vfs_root_path = vfs.root2path(r); - let is_local = local_roots.iter().any(|it| vfs_root_path.starts_with(it)); - change.add_root(SourceRootId(r.0), is_local); - - // FIXME: add path2root in vfs to simpily this logic - if extern_dirs.contains(&vfs_root_path) { - extern_source_roots.insert(vfs_root_path, ExternSourceId(r.0)); - } - } + let (task_sender, task_receiver) = unbounded::(); + let mut vfs = vfs::Vfs::default(); let proc_macro_client = match &config.proc_macro_srv { None => ProcMacroClient::dummy(), @@ -140,18 +98,30 @@ impl GlobalState { }, }; + let mut loader = { + let loader = vfs_notify::LoaderHandle::spawn(Box::new(move |msg| { + task_sender.send(msg).unwrap() + })); + Box::new(loader) + }; + let watch = match config.files.watcher { + FilesWatcher::Client => vec![], + FilesWatcher::Notify => project_folders.watch, + }; + loader.set_config(vfs::loader::Config { load: project_folders.load, watch }); + // Create crate graph from all the workspaces let mut crate_graph = CrateGraph::default(); let mut load = |path: &Path| { - // Some path from metadata will be non canonicalized, e.g. /foo/../bar/lib.rs - let path = path.canonicalize().ok()?; - let vfs_file = vfs.load(&path); - vfs_file.map(|f| FileId(f.0)) + let path = AbsPathBuf::try_from(path.to_path_buf()).ok()?; + let contents = loader.load_sync(&path); + let path = vfs::VfsPath::from(path); + vfs.set_file_contents(path.clone(), contents); + vfs.file_id(&path) }; for ws in workspaces.iter() { crate_graph.extend(ws.to_crate_graph( config.cargo.target.as_deref(), - &extern_source_roots, &proc_macro_client, &mut load, )); @@ -162,18 +132,21 @@ impl GlobalState { let mut analysis_host = AnalysisHost::new(lru_capacity); analysis_host.apply_change(change); - GlobalState { + let mut res = GlobalState { config, - local_roots, workspaces: Arc::new(workspaces), analysis_host, - vfs: Arc::new(RwLock::new(vfs)), + loader, + vfs: Arc::new(RwLock::new((vfs, FxHashMap::default()))), task_receiver, latest_requests: Default::default(), flycheck, diagnostics: Default::default(), proc_macro_client, - } + source_root_config: project_folders.source_root_config, + }; + res.process_changes(); + res } pub fn update_configuration(&mut self, config: Config) { @@ -186,33 +159,40 @@ impl GlobalState { self.config = config; } - /// Returns a vec of libraries - /// FIXME: better API here - pub fn process_changes(&mut self, roots_scanned: &mut usize) -> bool { - let changes = self.vfs.write().commit_changes(); - if changes.is_empty() { - return false; - } - let mut change = AnalysisChange::new(); - for c in changes { - match c { - VfsChange::AddRoot { root, files } => { - *roots_scanned += 1; - for (file, path, text) in files { - change.add_file(SourceRootId(root.0), FileId(file.0), path, text); - } - } - VfsChange::AddFile { root, file, path, text } => { - change.add_file(SourceRootId(root.0), FileId(file.0), path, text); - } - VfsChange::RemoveFile { root, file, path } => { - change.remove_file(SourceRootId(root.0), FileId(file.0), path) - } - VfsChange::ChangeFile { file, text } => { - change.change_file(FileId(file.0), text); - } + pub fn process_changes(&mut self) -> bool { + let change = { + let mut change = AnalysisChange::new(); + let (vfs, line_endings_map) = &mut *self.vfs.write(); + let changed_files = vfs.take_changes(); + if changed_files.is_empty() { + return false; } - } + + let fs_op = changed_files.iter().any(|it| it.is_created_or_deleted()); + if fs_op { + let roots = self.source_root_config.partition(&vfs); + change.set_roots(roots) + } + + for file in changed_files { + let text = if file.exists() { + let bytes = vfs.file_contents(file.file_id).to_vec(); + match String::from_utf8(bytes).ok() { + Some(text) => { + let (text, line_endings) = LineEndings::normalize(text); + line_endings_map.insert(file.file_id, line_endings); + Some(Arc::new(text)) + } + None => None, + } + } else { + None + }; + change.change_file(file.file_id, text); + } + change + }; + self.analysis_host.apply_change(change); true } @@ -242,35 +222,31 @@ impl GlobalState { } impl GlobalStateSnapshot { - pub fn analysis(&self) -> &Analysis { + pub(crate) fn analysis(&self) -> &Analysis { &self.analysis } - pub fn url_to_file_id(&self, url: &Url) -> Result { - let path = url.to_file_path().map_err(|()| format!("invalid uri: {}", url))?; - let file = self.vfs.read().path2file(&path).ok_or_else(|| { - // Show warning as this file is outside current workspace - // FIXME: just handle such files, and remove `LspError::UNKNOWN_FILE`. - LspError { - code: LspError::UNKNOWN_FILE, - message: "Rust file outside current workspace is not supported yet.".to_string(), - } - })?; - Ok(FileId(file.0)) + pub(crate) fn url_to_file_id(&self, url: &Url) -> Result { + let path = from_proto::abs_path(url)?; + let path = path.into(); + let res = + self.vfs.read().0.file_id(&path).ok_or_else(|| format!("file not found: {}", path))?; + Ok(res) } - pub fn file_id_to_url(&self, id: FileId) -> Url { - file_id_to_url(&self.vfs.read(), id) + pub(crate) fn file_id_to_url(&self, id: FileId) -> Url { + file_id_to_url(&self.vfs.read().0, id) } - pub fn file_line_endings(&self, id: FileId) -> LineEndings { - self.vfs.read().file_line_endings(VfsFile(id.0)) + pub(crate) fn file_line_endings(&self, id: FileId) -> LineEndings { + self.vfs.read().1[&id] } pub fn anchored_path(&self, file_id: FileId, path: &str) -> Url { - let mut base = self.vfs.read().file2path(VfsFile(file_id.0)); + let mut base = self.vfs.read().0.file_path(file_id); base.pop(); let path = base.join(path); + let path = path.as_path().unwrap(); url_from_abs_path(&path) } @@ -279,7 +255,8 @@ impl GlobalStateSnapshot { crate_id: CrateId, ) -> Option<(&CargoWorkspace, Target)> { let file_id = self.analysis().crate_root(crate_id).ok()?; - let path = self.vfs.read().file2path(VfsFile(file_id.0)); + let path = self.vfs.read().0.file_path(file_id); + let path = path.as_path()?; self.workspaces.iter().find_map(|ws| match ws { ProjectWorkspace::Cargo { cargo, .. } => { cargo.target_by_root(&path).map(|it| (cargo, it)) @@ -307,14 +284,86 @@ impl GlobalStateSnapshot { ); buf } +} - pub fn workspace_root_for(&self, file_id: FileId) -> Option<&Path> { - let path = self.vfs.read().file2path(VfsFile(file_id.0)); - self.workspaces.iter().find_map(|ws| ws.workspace_root_for(&path)) +pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url { + let path = vfs.file_path(id); + let path = path.as_path().unwrap(); + url_from_abs_path(&path) +} + +#[derive(Default)] +pub(crate) struct ProjectFolders { + pub(crate) load: Vec, + pub(crate) watch: Vec, + pub(crate) source_root_config: SourceRootConfig, +} + +impl ProjectFolders { + pub(crate) fn new(workspaces: &[ProjectWorkspace]) -> ProjectFolders { + let mut res = ProjectFolders::default(); + let mut fsc = FileSetConfig::builder(); + let mut local_filesets = vec![]; + + for root in workspaces.iter().flat_map(|it| it.to_roots()) { + let path = root.path().to_owned(); + + let mut file_set_roots: Vec = vec![]; + + let path = AbsPathBuf::try_from(path).unwrap(); + let entry = if root.is_member() { + vfs::loader::Entry::local_cargo_package(path.clone()) + } else { + vfs::loader::Entry::cargo_package_dependency(path.clone()) + }; + res.load.push(entry); + if root.is_member() { + res.watch.push(res.load.len() - 1); + } + + if let Some(out_dir) = root.out_dir() { + let out_dir = AbsPathBuf::try_from(out_dir.to_path_buf()).unwrap(); + res.load.push(vfs::loader::Entry::rs_files_recursively(out_dir.clone())); + if root.is_member() { + res.watch.push(res.load.len() - 1); + } + file_set_roots.push(out_dir.into()); + } + file_set_roots.push(path.into()); + + if root.is_member() { + local_filesets.push(fsc.len()); + } + fsc.add_file_set(file_set_roots) + } + + let fsc = fsc.build(); + res.source_root_config = SourceRootConfig { fsc, local_filesets }; + + res } } -pub(crate) fn file_id_to_url(vfs: &Vfs, id: FileId) -> Url { - let path = vfs.file2path(VfsFile(id.0)); - url_from_abs_path(&path) +#[derive(Default, Debug)] +pub(crate) struct SourceRootConfig { + pub(crate) fsc: FileSetConfig, + pub(crate) local_filesets: Vec, +} + +impl SourceRootConfig { + pub fn partition(&self, vfs: &vfs::Vfs) -> Vec { + self.fsc + .partition(vfs) + .into_iter() + .enumerate() + .map(|(idx, file_set)| { + let is_local = self.local_filesets.contains(&idx); + if is_local { + SourceRoot::new_local(file_set) + } else { + SourceRoot::new_library(file_set) + } + }) + .collect() + } } diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs index 64e70955f3a..b38067079fc 100644 --- a/crates/rust-analyzer/src/lib.rs +++ b/crates/rust-analyzer/src/lib.rs @@ -17,7 +17,6 @@ macro_rules! eprintln { ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; } -mod vfs_glob; mod caps; mod cargo_target_spec; mod to_proto; @@ -29,6 +28,7 @@ pub mod config; mod global_state; mod diagnostics; mod semantic_tokens; +mod line_endings; use serde::de::DeserializeOwned; diff --git a/crates/rust-analyzer/src/line_endings.rs b/crates/rust-analyzer/src/line_endings.rs new file mode 100644 index 00000000000..7e6db954e47 --- /dev/null +++ b/crates/rust-analyzer/src/line_endings.rs @@ -0,0 +1,64 @@ +//! We maintain invariant that all internal strings use `\n` as line separator. +//! This module does line ending conversion and detection (so that we can +//! convert back to `\r\n` on the way out). + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub(crate) enum LineEndings { + Unix, + Dos, +} + +impl LineEndings { + /// Replaces `\r\n` with `\n` in-place in `src`. + pub(crate) fn normalize(src: String) -> (String, LineEndings) { + if !src.as_bytes().contains(&b'\r') { + return (src, LineEndings::Unix); + } + + // We replace `\r\n` with `\n` in-place, which doesn't break utf-8 encoding. + // While we *can* call `as_mut_vec` and do surgery on the live string + // directly, let's rather steal the contents of `src`. This makes the code + // safe even if a panic occurs. + + let mut buf = src.into_bytes(); + let mut gap_len = 0; + let mut tail = buf.as_mut_slice(); + loop { + let idx = match find_crlf(&tail[gap_len..]) { + None => tail.len(), + Some(idx) => idx + gap_len, + }; + tail.copy_within(gap_len..idx, 0); + tail = &mut tail[idx - gap_len..]; + if tail.len() == gap_len { + break; + } + gap_len += 1; + } + + // Account for removed `\r`. + // After `set_len`, `buf` is guaranteed to contain utf-8 again. + let new_len = buf.len() - gap_len; + let src = unsafe { + buf.set_len(new_len); + String::from_utf8_unchecked(buf) + }; + return (src, LineEndings::Dos); + + fn find_crlf(src: &[u8]) -> Option { + let mut search_idx = 0; + while let Some(idx) = find_cr(&src[search_idx..]) { + if src[search_idx..].get(idx + 1) != Some(&b'\n') { + search_idx += idx + 1; + continue; + } + return Some(search_idx + idx); + } + None + } + + fn find_cr(src: &[u8]) -> Option { + src.iter().enumerate().find_map(|(idx, &b)| if b == b'\r' { Some(idx) } else { None }) + } + } +} diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 674b1323bcb..b9d29685624 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -2,11 +2,9 @@ //! requests/replies and notifications back to the client. mod handlers; -mod subscriptions; pub(crate) mod request_metrics; use std::{ - borrow::Cow, env, error::Error, fmt, @@ -20,16 +18,12 @@ use crossbeam_channel::{never, select, unbounded, RecvError, Sender}; use lsp_server::{ Connection, ErrorCode, Message, Notification, ReqQueue, Request, RequestId, Response, }; -use lsp_types::{ - request::Request as _, DidChangeTextDocumentParams, NumberOrString, - TextDocumentContentChangeEvent, WorkDoneProgress, WorkDoneProgressBegin, - WorkDoneProgressCreateParams, WorkDoneProgressEnd, WorkDoneProgressReport, -}; -use ra_flycheck::{CheckTask, Status}; +use lsp_types::{request::Request as _, NumberOrString, TextDocumentContentChangeEvent}; +use ra_flycheck::CheckTask; use ra_ide::{Canceled, FileId, LineIndex}; use ra_prof::profile; use ra_project_model::{PackageRoot, ProjectWorkspace}; -use ra_vfs::VfsTask; +use rustc_hash::FxHashSet; use serde::{de::DeserializeOwned, Serialize}; use threadpool::ThreadPool; @@ -39,9 +33,10 @@ use crate::{ from_proto, global_state::{file_id_to_url, GlobalState, GlobalStateSnapshot}, lsp_ext, - main_loop::{request_metrics::RequestMetrics, subscriptions::Subscriptions}, + main_loop::request_metrics::RequestMetrics, Result, }; +use ra_db::VfsPath; #[derive(Debug)] pub struct LspError { @@ -128,13 +123,6 @@ pub fn main_loop(config: Config, connection: Connection) -> Result<()> { .collect::>() }; - let globs = config - .files - .exclude - .iter() - .map(|glob| crate::vfs_glob::Glob::new(glob)) - .collect::, _>>()?; - if let FilesWatcher::Client = config.files.watcher { let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions { watchers: workspaces @@ -159,11 +147,9 @@ pub fn main_loop(config: Config, connection: Connection) -> Result<()> { connection.sender.send(request.into()).unwrap(); } - GlobalState::new(workspaces, config.lru_capacity, &globs, config) + GlobalState::new(workspaces, config.lru_capacity, config) }; - loop_state.roots_total = global_state.vfs.read().n_roots(); - let pool = ThreadPool::default(); let (task_sender, task_receiver) = unbounded::(); @@ -192,7 +178,9 @@ pub fn main_loop(config: Config, connection: Connection) -> Result<()> { break; }; } + assert!(!global_state.vfs.read().0.has_changes()); loop_turn(&pool, &task_sender, &connection, &mut global_state, &mut loop_state, event)?; + assert!(!global_state.vfs.read().0.has_changes()); } } global_state.analysis_host.request_cancellation(); @@ -222,7 +210,7 @@ enum Task { enum Event { Msg(Message), Task(Task), - Vfs(VfsTask), + Vfs(vfs::loader::Message), CheckWatcher(CheckTask), } @@ -270,11 +258,20 @@ type Incoming = lsp_server::Incoming<(&'static str, Instant)>; #[derive(Default)] struct LoopState { req_queue: ReqQueue<(&'static str, Instant), ReqHandler>, - subscriptions: Subscriptions, - workspace_loaded: bool, - roots_progress_reported: Option, - roots_scanned: usize, - roots_total: usize, + mem_docs: FxHashSet, + status: Status, +} + +#[derive(Eq, PartialEq)] +enum Status { + Loading, + Ready, +} + +impl Default for Status { + fn default() -> Self { + Status::Loading + } } fn loop_turn( @@ -295,14 +292,36 @@ fn loop_turn( log::info!("queued count = {}", queue_count); } + let mut became_ready = false; match event { Event::Task(task) => { on_task(task, &connection.sender, &mut loop_state.req_queue.incoming, global_state); global_state.maybe_collect_garbage(); } - Event::Vfs(task) => { - global_state.vfs.write().handle_task(task); - } + Event::Vfs(task) => match task { + vfs::loader::Message::Loaded { files } => { + let vfs = &mut global_state.vfs.write().0; + for (path, contents) in files { + let path = VfsPath::from(path); + if !loop_state.mem_docs.contains(&path) { + vfs.set_file_contents(path, contents) + } + } + } + vfs::loader::Message::Progress { n_entries_total, n_entries_done } => { + if n_entries_done == n_entries_done { + loop_state.status = Status::Ready; + became_ready = true; + } + report_progress( + loop_state, + &connection.sender, + n_entries_done, + n_entries_total, + "roots scanned", + ) + } + }, Event::CheckWatcher(task) => on_check_task(task, global_state, task_sender)?, Event::Msg(msg) => match msg { Message::Request(req) => on_request( @@ -324,32 +343,29 @@ fn loop_turn( }, }; - let mut state_changed = global_state.process_changes(&mut loop_state.roots_scanned); + let state_changed = global_state.process_changes(); - let show_progress = - !loop_state.workspace_loaded && global_state.config.client_caps.work_done_progress; - - if !loop_state.workspace_loaded && loop_state.roots_scanned == loop_state.roots_total { - state_changed = true; - loop_state.workspace_loaded = true; + if became_ready { if let Some(flycheck) = &global_state.flycheck { flycheck.update(); } } - if show_progress { - send_startup_progress(&connection.sender, loop_state); - } + if loop_state.status == Status::Ready && (state_changed || became_ready) { + let subscriptions = loop_state + .mem_docs + .iter() + .map(|path| global_state.vfs.read().0.file_id(&path).unwrap()) + .collect::>(); - if state_changed && loop_state.workspace_loaded { update_file_notifications_on_threadpool( pool, global_state.snapshot(), task_sender.clone(), - loop_state.subscriptions.subscriptions(), + subscriptions.clone(), ); pool.execute({ - let subs = loop_state.subscriptions.subscriptions(); + let subs = subscriptions; let snap = global_state.snapshot(); move || snap.analysis().prime_caches(subs).unwrap_or_else(|_: Canceled| ()) }); @@ -465,7 +481,7 @@ fn on_request( fn on_notification( msg_sender: &Sender, - state: &mut GlobalState, + global_state: &mut GlobalState, loop_state: &mut LoopState, not: Notification, ) -> Result<()> { @@ -484,12 +500,15 @@ fn on_notification( }; let not = match notification_cast::(not) { Ok(params) => { - let uri = params.text_document.uri; - let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?; - if let Some(file_id) = - state.vfs.write().add_file_overlay(&path, params.text_document.text) - { - loop_state.subscriptions.add_sub(FileId(file_id.0)); + if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) { + if !loop_state.mem_docs.insert(path.clone()) { + log::error!("duplicate DidOpenTextDocument: {}", path) + } + global_state + .vfs + .write() + .0 + .set_file_contents(path, Some(params.text_document.text.into_bytes())); } return Ok(()); } @@ -497,23 +516,13 @@ fn on_notification( }; let not = match notification_cast::(not) { Ok(params) => { - let DidChangeTextDocumentParams { text_document, content_changes } = params; - let world = state.snapshot(); - let file_id = from_proto::file_id(&world, &text_document.uri)?; - let line_index = world.analysis().file_line_index(file_id)?; - let uri = text_document.uri; - let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?; - state.vfs.write().change_file_overlay(&path, |old_text| { - apply_document_changes(old_text, Cow::Borrowed(&line_index), content_changes); - }); - return Ok(()); - } - Err(not) => not, - }; - let not = match notification_cast::(not) { - Ok(_params) => { - if let Some(flycheck) = &state.flycheck { - flycheck.update(); + if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) { + assert!(loop_state.mem_docs.contains(&path)); + let vfs = &mut global_state.vfs.write().0; + let file_id = vfs.file_id(&path).unwrap(); + let mut text = String::from_utf8(vfs.file_contents(file_id).to_vec()).unwrap(); + apply_document_changes(&mut text, params.content_changes); + vfs.set_file_contents(path, Some(text.into_bytes())) } return Ok(()); } @@ -521,19 +530,34 @@ fn on_notification( }; let not = match notification_cast::(not) { Ok(params) => { - let uri = params.text_document.uri; - let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?; - if let Some(file_id) = state.vfs.write().remove_file_overlay(path.as_path()) { - loop_state.subscriptions.remove_sub(FileId(file_id.0)); + if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) { + if !loop_state.mem_docs.remove(&path) { + log::error!("orphan DidCloseTextDocument: {}", path) + } + if let Some(path) = path.as_path() { + global_state.loader.invalidate(path.to_path_buf()); + } } - let params = - lsp_types::PublishDiagnosticsParams { uri, diagnostics: Vec::new(), version: None }; + let params = lsp_types::PublishDiagnosticsParams { + uri: params.text_document.uri, + diagnostics: Vec::new(), + version: None, + }; let not = notification_new::(params); msg_sender.send(not.into()).unwrap(); return Ok(()); } Err(not) => not, }; + let not = match notification_cast::(not) { + Ok(_params) => { + if let Some(flycheck) = &global_state.flycheck { + flycheck.update(); + } + return Ok(()); + } + Err(not) => not, + }; let not = match notification_cast::(not) { Ok(_) => { // As stated in https://github.com/microsoft/language-server-protocol/issues/676, @@ -575,11 +599,10 @@ fn on_notification( }; let not = match notification_cast::(not) { Ok(params) => { - let mut vfs = state.vfs.write(); for change in params.changes { - let uri = change.uri; - let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?; - vfs.notify_changed(path) + if let Ok(path) = from_proto::abs_path(&change.uri) { + global_state.loader.invalidate(path) + } } return Ok(()); } @@ -594,9 +617,9 @@ fn on_notification( fn apply_document_changes( old_text: &mut String, - mut line_index: Cow<'_, LineIndex>, content_changes: Vec, ) { + let mut line_index = LineIndex::new(old_text); // The changes we got must be applied sequentially, but can cross lines so we // have to keep our line index updated. // Some clients (e.g. Code) sort the ranges in reverse. As an optimization, we @@ -621,7 +644,7 @@ fn apply_document_changes( match change.range { Some(range) => { if !index_valid.covers(range.end.line) { - line_index = Cow::Owned(LineIndex::new(&old_text)); + line_index = LineIndex::new(&old_text); } index_valid = IndexValid::UpToLineExclusive(range.start.line); let range = from_proto::text_range(&line_index, range); @@ -652,18 +675,11 @@ fn on_check_task( &workspace_root, ); for diag in diagnostics { - let path = diag - .location - .uri - .to_file_path() - .map_err(|()| format!("invalid uri: {}", diag.location.uri))?; - let file_id = match global_state.vfs.read().path2file(&path) { + let path = from_proto::vfs_path(&diag.location.uri)?; + let file_id = match global_state.vfs.read().0.file_id(&path) { Some(file) => FileId(file.0), None => { - log::error!( - "File with cargo diagnostic not found in VFS: {}", - path.display() - ); + log::error!("File with cargo diagnostic not found in VFS: {}", path); return Ok(()); } }; @@ -679,7 +695,7 @@ fn on_check_task( CheckTask::Status(status) => { if global_state.config.client_caps.work_done_progress { let progress = match status { - Status::Being => { + ra_flycheck::Status::Being => { lsp_types::WorkDoneProgress::Begin(lsp_types::WorkDoneProgressBegin { title: "Running `cargo check`".to_string(), cancellable: Some(false), @@ -687,14 +703,14 @@ fn on_check_task( percentage: None, }) } - Status::Progress(target) => { + ra_flycheck::Status::Progress(target) => { lsp_types::WorkDoneProgress::Report(lsp_types::WorkDoneProgressReport { cancellable: Some(false), message: Some(target), percentage: None, }) } - Status::End => { + ra_flycheck::Status::End => { lsp_types::WorkDoneProgress::End(lsp_types::WorkDoneProgressEnd { message: None, }) @@ -720,7 +736,7 @@ fn on_diagnostic_task(task: DiagnosticTask, msg_sender: &Sender, state: let subscriptions = state.diagnostics.handle_task(task); for file_id in subscriptions { - let url = file_id_to_url(&state.vfs.read(), file_id); + let url = file_id_to_url(&state.vfs.read().0, file_id); let diagnostics = state.diagnostics.diagnostics_for(file_id).cloned().collect(); let params = lsp_types::PublishDiagnosticsParams { uri: url, diagnostics, version: None }; let not = notification_new::(params); @@ -728,57 +744,46 @@ fn on_diagnostic_task(task: DiagnosticTask, msg_sender: &Sender, state: } } -fn send_startup_progress(sender: &Sender, loop_state: &mut LoopState) { - let total: usize = loop_state.roots_total; - let prev = loop_state.roots_progress_reported; - let progress = loop_state.roots_scanned; - loop_state.roots_progress_reported = Some(progress); +fn report_progress( + loop_state: &mut LoopState, + sender: &Sender, + done: usize, + total: usize, + message: &str, +) { + let token = lsp_types::ProgressToken::String(format!("rustAnalyzer/{}", message)); + let message = Some(format!("{}/{} {}", done, total, message)); + let percentage = Some(100.0 * done as f64 / total.max(1) as f64); + let work_done_progress = if done == 0 { + let work_done_progress_create = loop_state.req_queue.outgoing.register( + lsp_types::request::WorkDoneProgressCreate::METHOD.to_string(), + lsp_types::WorkDoneProgressCreateParams { token: token.clone() }, + DO_NOTHING, + ); + sender.send(work_done_progress_create.into()).unwrap(); - match (prev, loop_state.workspace_loaded) { - (None, false) => { - let request = loop_state.req_queue.outgoing.register( - lsp_types::request::WorkDoneProgressCreate::METHOD.to_string(), - WorkDoneProgressCreateParams { - token: lsp_types::ProgressToken::String("rustAnalyzer/startup".into()), - }, - DO_NOTHING, - ); - sender.send(request.into()).unwrap(); - send_startup_progress_notif( - sender, - WorkDoneProgress::Begin(WorkDoneProgressBegin { - title: "rust-analyzer".into(), - cancellable: None, - message: Some(format!("{}/{} packages", progress, total)), - percentage: Some(100.0 * progress as f64 / total as f64), - }), - ); - } - (Some(prev), false) if progress != prev => send_startup_progress_notif( - sender, - WorkDoneProgress::Report(WorkDoneProgressReport { - cancellable: None, - message: Some(format!("{}/{} packages", progress, total)), - percentage: Some(100.0 * progress as f64 / total as f64), - }), - ), - (_, true) => send_startup_progress_notif( - sender, - WorkDoneProgress::End(WorkDoneProgressEnd { - message: Some(format!("rust-analyzer loaded, {} packages", progress)), - }), - ), - _ => {} - } - - fn send_startup_progress_notif(sender: &Sender, work_done_progress: WorkDoneProgress) { - let notif = - notification_new::(lsp_types::ProgressParams { - token: lsp_types::ProgressToken::String("rustAnalyzer/startup".into()), - value: lsp_types::ProgressParamsValue::WorkDone(work_done_progress), - }); - sender.send(notif.into()).unwrap(); - } + lsp_types::WorkDoneProgress::Begin(lsp_types::WorkDoneProgressBegin { + title: "rust-analyzer".into(), + cancellable: None, + message, + percentage, + }) + } else if done < total { + lsp_types::WorkDoneProgress::Report(lsp_types::WorkDoneProgressReport { + cancellable: None, + message, + percentage, + }) + } else { + assert!(done == total); + lsp_types::WorkDoneProgress::End(lsp_types::WorkDoneProgressEnd { message }) + }; + let notification = + notification_new::(lsp_types::ProgressParams { + token, + value: lsp_types::ProgressParamsValue::WorkDone(work_done_progress), + }); + sender.send(notification.into()).unwrap(); } struct PoolDispatcher<'a> { @@ -976,18 +981,12 @@ where #[cfg(test)] mod tests { - use std::borrow::Cow; - use lsp_types::{Position, Range, TextDocumentContentChangeEvent}; - use ra_ide::LineIndex; + + use super::*; #[test] - fn apply_document_changes() { - fn run(text: &mut String, changes: Vec) { - let line_index = Cow::Owned(LineIndex::new(&text)); - super::apply_document_changes(text, line_index, changes); - } - + fn test_apply_document_changes() { macro_rules! c { [$($sl:expr, $sc:expr; $el:expr, $ec:expr => $text:expr),+] => { vec![$(TextDocumentContentChangeEvent { @@ -1002,9 +1001,9 @@ mod tests { } let mut text = String::new(); - run(&mut text, vec![]); + apply_document_changes(&mut text, vec![]); assert_eq!(text, ""); - run( + apply_document_changes( &mut text, vec![TextDocumentContentChangeEvent { range: None, @@ -1013,36 +1012,39 @@ mod tests { }], ); assert_eq!(text, "the"); - run(&mut text, c![0, 3; 0, 3 => " quick"]); + apply_document_changes(&mut text, c![0, 3; 0, 3 => " quick"]); assert_eq!(text, "the quick"); - run(&mut text, c![0, 0; 0, 4 => "", 0, 5; 0, 5 => " foxes"]); + apply_document_changes(&mut text, c![0, 0; 0, 4 => "", 0, 5; 0, 5 => " foxes"]); assert_eq!(text, "quick foxes"); - run(&mut text, c![0, 11; 0, 11 => "\ndream"]); + apply_document_changes(&mut text, c![0, 11; 0, 11 => "\ndream"]); assert_eq!(text, "quick foxes\ndream"); - run(&mut text, c![1, 0; 1, 0 => "have "]); + apply_document_changes(&mut text, c![1, 0; 1, 0 => "have "]); assert_eq!(text, "quick foxes\nhave dream"); - run(&mut text, c![0, 0; 0, 0 => "the ", 1, 4; 1, 4 => " quiet", 1, 16; 1, 16 => "s\n"]); + apply_document_changes( + &mut text, + c![0, 0; 0, 0 => "the ", 1, 4; 1, 4 => " quiet", 1, 16; 1, 16 => "s\n"], + ); assert_eq!(text, "the quick foxes\nhave quiet dreams\n"); - run(&mut text, c![0, 15; 0, 15 => "\n", 2, 17; 2, 17 => "\n"]); + apply_document_changes(&mut text, c![0, 15; 0, 15 => "\n", 2, 17; 2, 17 => "\n"]); assert_eq!(text, "the quick foxes\n\nhave quiet dreams\n\n"); - run( + apply_document_changes( &mut text, c![1, 0; 1, 0 => "DREAM", 2, 0; 2, 0 => "they ", 3, 0; 3, 0 => "DON'T THEY?"], ); assert_eq!(text, "the quick foxes\nDREAM\nthey have quiet dreams\nDON'T THEY?\n"); - run(&mut text, c![0, 10; 1, 5 => "", 2, 0; 2, 12 => ""]); + apply_document_changes(&mut text, c![0, 10; 1, 5 => "", 2, 0; 2, 12 => ""]); assert_eq!(text, "the quick \nthey have quiet dreams\n"); text = String::from("❤️"); - run(&mut text, c![0, 0; 0, 0 => "a"]); + apply_document_changes(&mut text, c![0, 0; 0, 0 => "a"]); assert_eq!(text, "a❤️"); text = String::from("a\nb"); - run(&mut text, c![0, 1; 1, 0 => "\nțc", 0, 1; 1, 1 => "d"]); + apply_document_changes(&mut text, c![0, 1; 1, 0 => "\nțc", 0, 1; 1, 1 => "d"]); assert_eq!(text, "adcb"); text = String::from("a\nb"); - run(&mut text, c![0, 1; 1, 0 => "ț\nc", 0, 2; 0, 2 => "c"]); + apply_document_changes(&mut text, c![0, 1; 1, 0 => "ț\nc", 0, 2; 0, 2 => "c"]); assert_eq!(text, "ațc\ncb"); } } diff --git a/crates/rust-analyzer/src/main_loop/handlers.rs b/crates/rust-analyzer/src/main_loop/handlers.rs index 2d7e649d2dc..a44959abe7d 100644 --- a/crates/rust-analyzer/src/main_loop/handlers.rs +++ b/crates/rust-analyzer/src/main_loop/handlers.rs @@ -396,7 +396,6 @@ pub fn handle_runnables( let line_index = snap.analysis().file_line_index(file_id)?; let offset = params.position.map(|it| from_proto::offset(&line_index, it)); let mut res = Vec::new(); - let workspace_root = snap.workspace_root_for(file_id); let cargo_spec = CargoTargetSpec::for_file(&snap, file_id)?; for runnable in snap.analysis().runnables(file_id)? { if let Some(offset) = offset { @@ -420,7 +419,7 @@ pub fn handle_runnables( location: None, kind: lsp_ext::RunnableKind::Cargo, args: lsp_ext::CargoRunnable { - workspace_root: workspace_root.map(|root| root.to_owned()), + workspace_root: Some(spec.workspace_root.clone()), cargo_args: vec![ cmd.to_string(), "--package".to_string(), @@ -437,7 +436,7 @@ pub fn handle_runnables( location: None, kind: lsp_ext::RunnableKind::Cargo, args: lsp_ext::CargoRunnable { - workspace_root: workspace_root.map(|root| root.to_owned()), + workspace_root: None, cargo_args: vec!["check".to_string(), "--workspace".to_string()], executable_args: Vec::new(), }, diff --git a/crates/rust-analyzer/src/main_loop/subscriptions.rs b/crates/rust-analyzer/src/main_loop/subscriptions.rs deleted file mode 100644 index 2c76418be09..00000000000 --- a/crates/rust-analyzer/src/main_loop/subscriptions.rs +++ /dev/null @@ -1,22 +0,0 @@ -//! Keeps track of file subscriptions -- the set of currently opened files for -//! which we want to publish diagnostics, syntax highlighting, etc. - -use ra_ide::FileId; -use rustc_hash::FxHashSet; - -#[derive(Default, Debug)] -pub(crate) struct Subscriptions { - subs: FxHashSet, -} - -impl Subscriptions { - pub(crate) fn add_sub(&mut self, file_id: FileId) { - self.subs.insert(file_id); - } - pub(crate) fn remove_sub(&mut self, file_id: FileId) { - self.subs.remove(&file_id); - } - pub(crate) fn subscriptions(&self) -> Vec { - self.subs.iter().copied().collect() - } -} diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index ec153097efe..7b45b169d6d 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs @@ -10,11 +10,10 @@ use ra_ide::{ ResolvedAssist, Runnable, Severity, SourceChange, SourceFileEdit, TextEdit, }; use ra_syntax::{SyntaxKind, TextRange, TextSize}; -use ra_vfs::LineEndings; use crate::{ - cargo_target_spec::CargoTargetSpec, global_state::GlobalStateSnapshot, lsp_ext, - semantic_tokens, Result, + cargo_target_spec::CargoTargetSpec, global_state::GlobalStateSnapshot, + line_endings::LineEndings, lsp_ext, semantic_tokens, Result, }; pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position { @@ -650,6 +649,7 @@ pub(crate) fn runnable( runnable: Runnable, ) -> Result { let spec = CargoTargetSpec::for_file(snap, file_id)?; + let workspace_root = spec.as_ref().map(|it| it.workspace_root.clone()); let target = spec.as_ref().map(|s| s.target.clone()); let (cargo_args, executable_args) = CargoTargetSpec::runnable_args(spec, &runnable.kind, &runnable.cfg_exprs)?; @@ -661,7 +661,7 @@ pub(crate) fn runnable( location: Some(location), kind: lsp_ext::RunnableKind::Cargo, args: lsp_ext::CargoRunnable { - workspace_root: snap.workspace_root_for(file_id).map(|root| root.to_owned()), + workspace_root: workspace_root, cargo_args, executable_args, }, diff --git a/crates/rust-analyzer/src/vfs_glob.rs b/crates/rust-analyzer/src/vfs_glob.rs deleted file mode 100644 index ff37a70080d..00000000000 --- a/crates/rust-analyzer/src/vfs_glob.rs +++ /dev/null @@ -1,98 +0,0 @@ -//! Exclusion rules for vfs. -//! -//! By default, we include only `.rs` files, and skip some know offenders like -//! `/target` or `/node_modules` altogether. -//! -//! It's also possible to add custom exclusion globs. - -use globset::{GlobSet, GlobSetBuilder}; -use ra_vfs::{Filter, RelativePath}; - -pub use globset::{Glob, GlobBuilder}; - -const ALWAYS_IGNORED: &[&str] = &["target/**", "**/node_modules/**", "**/.git/**"]; -const IGNORED_FOR_NON_MEMBERS: &[&str] = &["examples/**", "tests/**", "benches/**"]; - -pub struct RustPackageFilterBuilder { - is_member: bool, - exclude: GlobSetBuilder, -} - -impl Default for RustPackageFilterBuilder { - fn default() -> RustPackageFilterBuilder { - RustPackageFilterBuilder { is_member: false, exclude: GlobSetBuilder::new() } - } -} - -impl RustPackageFilterBuilder { - pub fn set_member(mut self, is_member: bool) -> RustPackageFilterBuilder { - self.is_member = is_member; - self - } - - pub fn exclude(mut self, globs: impl IntoIterator) -> RustPackageFilterBuilder { - for glob in globs.into_iter() { - self.exclude.add(glob); - } - self - } - - pub fn into_vfs_filter(self) -> Box { - let RustPackageFilterBuilder { is_member, mut exclude } = self; - for &glob in ALWAYS_IGNORED { - exclude.add(Glob::new(glob).unwrap()); - } - if !is_member { - for &glob in IGNORED_FOR_NON_MEMBERS { - exclude.add(Glob::new(glob).unwrap()); - } - } - Box::new(RustPackageFilter { exclude: exclude.build().unwrap() }) - } -} - -struct RustPackageFilter { - exclude: GlobSet, -} - -impl Filter for RustPackageFilter { - fn include_dir(&self, dir_path: &RelativePath) -> bool { - !self.exclude.is_match(dir_path.as_str()) - } - - fn include_file(&self, file_path: &RelativePath) -> bool { - file_path.extension() == Some("rs") - } -} - -#[test] -fn test_globs() { - let filter = RustPackageFilterBuilder::default().set_member(true).into_vfs_filter(); - - assert!(filter.include_dir(RelativePath::new("src/tests"))); - assert!(filter.include_dir(RelativePath::new("src/target"))); - assert!(filter.include_dir(RelativePath::new("tests"))); - assert!(filter.include_dir(RelativePath::new("benches"))); - - assert!(!filter.include_dir(RelativePath::new("target"))); - assert!(!filter.include_dir(RelativePath::new("src/foo/.git"))); - assert!(!filter.include_dir(RelativePath::new("foo/node_modules"))); - - let filter = RustPackageFilterBuilder::default().set_member(false).into_vfs_filter(); - - assert!(filter.include_dir(RelativePath::new("src/tests"))); - assert!(filter.include_dir(RelativePath::new("src/target"))); - - assert!(!filter.include_dir(RelativePath::new("target"))); - assert!(!filter.include_dir(RelativePath::new("src/foo/.git"))); - assert!(!filter.include_dir(RelativePath::new("foo/node_modules"))); - assert!(!filter.include_dir(RelativePath::new("tests"))); - assert!(!filter.include_dir(RelativePath::new("benches"))); - - let filter = RustPackageFilterBuilder::default() - .set_member(true) - .exclude(std::iter::once(Glob::new("src/llvm-project/**").unwrap())) - .into_vfs_filter(); - - assert!(!filter.include_dir(RelativePath::new("src/llvm-project/clang"))); -} diff --git a/crates/rust-analyzer/tests/heavy_tests/main.rs b/crates/rust-analyzer/tests/heavy_tests/main.rs index 48ce831af7a..e0de377b443 100644 --- a/crates/rust-analyzer/tests/heavy_tests/main.rs +++ b/crates/rust-analyzer/tests/heavy_tests/main.rs @@ -52,7 +52,7 @@ use std::collections::Spam; partial_result_params: PartialResultParams::default(), work_done_progress_params: WorkDoneProgressParams::default(), }); - assert!(format!("{}", res).contains("HashMap")); + assert!(res.to_string().contains("HashMap")); eprintln!("completion took {:?}", completion_start.elapsed()); } diff --git a/crates/rust-analyzer/tests/heavy_tests/support.rs b/crates/rust-analyzer/tests/heavy_tests/support.rs index f58790ded73..bb85853552c 100644 --- a/crates/rust-analyzer/tests/heavy_tests/support.rs +++ b/crates/rust-analyzer/tests/heavy_tests/support.rs @@ -212,7 +212,7 @@ impl Server { ProgressParams { token: lsp_types::ProgressToken::String(ref token), value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(_)), - } if token == "rustAnalyzer/startup" => true, + } if token == "rustAnalyzer/roots scanned" => true, _ => false, } } diff --git a/crates/vfs-notify/Cargo.toml b/crates/vfs-notify/Cargo.toml new file mode 100644 index 00000000000..4737a52a744 --- /dev/null +++ b/crates/vfs-notify/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "vfs-notify" +version = "0.1.0" +authors = ["rust-analyzer developers"] +edition = "2018" + +[dependencies] +log = "0.4.8" +rustc-hash = "1.0" +jod-thread = "0.1.0" +walkdir = "2.3.1" +globset = "0.4.5" +crossbeam-channel = "0.4.0" +notify = "5.0.0-pre.3" + +vfs = { path = "../vfs" } +paths = { path = "../paths" } diff --git a/crates/vfs-notify/src/include.rs b/crates/vfs-notify/src/include.rs new file mode 100644 index 00000000000..7378766f51d --- /dev/null +++ b/crates/vfs-notify/src/include.rs @@ -0,0 +1,43 @@ +//! See `Include`. + +use std::convert::TryFrom; + +use globset::{Glob, GlobSet, GlobSetBuilder}; +use paths::{RelPath, RelPathBuf}; + +/// `Include` is the opposite of .gitignore. +/// +/// It describes the set of files inside some directory. +/// +/// The current implementation is very limited, it allows white-listing file +/// globs and black-listing directories. +#[derive(Debug, Clone)] +pub(crate) struct Include { + include_files: GlobSet, + exclude_dirs: Vec, +} + +impl Include { + pub(crate) fn new(include: Vec) -> Include { + let mut include_files = GlobSetBuilder::new(); + let mut exclude_dirs = Vec::new(); + + for glob in include { + if glob.starts_with("!/") { + if let Ok(path) = RelPathBuf::try_from(&glob["!/".len()..]) { + exclude_dirs.push(path) + } + } else { + include_files.add(Glob::new(&glob).unwrap()); + } + } + let include_files = include_files.build().unwrap(); + Include { include_files, exclude_dirs } + } + pub(crate) fn include_file(&self, path: &RelPath) -> bool { + self.include_files.is_match(path) + } + pub(crate) fn exclude_dir(&self, path: &RelPath) -> bool { + self.exclude_dirs.iter().any(|excluded| path.starts_with(excluded)) + } +} diff --git a/crates/vfs-notify/src/lib.rs b/crates/vfs-notify/src/lib.rs new file mode 100644 index 00000000000..baee6ddc868 --- /dev/null +++ b/crates/vfs-notify/src/lib.rs @@ -0,0 +1,247 @@ +//! An implementation of `loader::Handle`, based on `walkdir` and `notify`. +//! +//! The file watching bits here are untested and quite probably buggy. For this +//! reason, by default we don't watch files and rely on editor's file watching +//! capabilities. +//! +//! Hopefully, one day a reliable file watching/walking crate appears on +//! crates.io, and we can reduce this to trivial glue code. +mod include; + +use std::convert::{TryFrom, TryInto}; + +use crossbeam_channel::{select, unbounded, Receiver}; +use notify::{RecommendedWatcher, RecursiveMode, Watcher}; +use paths::{AbsPath, AbsPathBuf}; +use rustc_hash::FxHashSet; +use vfs::loader; +use walkdir::WalkDir; + +use crate::include::Include; + +#[derive(Debug)] +pub struct LoaderHandle { + // Relative order of fields below is significant. + sender: crossbeam_channel::Sender, + _thread: jod_thread::JoinHandle, +} + +#[derive(Debug)] +enum Message { + Config(loader::Config), + Invalidate(AbsPathBuf), +} + +impl loader::Handle for LoaderHandle { + fn spawn(sender: loader::Sender) -> LoaderHandle { + let actor = LoaderActor::new(sender); + let (sender, receiver) = unbounded::(); + let thread = jod_thread::spawn(move || actor.run(receiver)); + LoaderHandle { sender, _thread: thread } + } + fn set_config(&mut self, config: loader::Config) { + self.sender.send(Message::Config(config)).unwrap() + } + fn invalidate(&mut self, path: AbsPathBuf) { + self.sender.send(Message::Invalidate(path)).unwrap(); + } + fn load_sync(&mut self, path: &AbsPathBuf) -> Option> { + read(path) + } +} + +type NotifyEvent = notify::Result; + +struct LoaderActor { + config: Vec<(AbsPathBuf, Include, bool)>, + watched_paths: FxHashSet, + sender: loader::Sender, + // Drop order of fields bellow is significant, + watcher: Option, + watcher_receiver: Receiver, +} + +#[derive(Debug)] +enum Event { + Message(Message), + NotifyEvent(NotifyEvent), +} + +impl LoaderActor { + fn new(sender: loader::Sender) -> LoaderActor { + let (watcher_sender, watcher_receiver) = unbounded(); + let watcher = log_notify_error(Watcher::new_immediate(move |event| { + watcher_sender.send(event).unwrap() + })); + + LoaderActor { + watcher, + watcher_receiver, + watched_paths: FxHashSet::default(), + sender, + config: Vec::new(), + } + } + + fn run(mut self, receiver: Receiver) { + while let Some(event) = self.next_event(&receiver) { + log::debug!("vfs-notify event: {:?}", event); + match event { + Event::Message(msg) => match msg { + Message::Config(config) => { + let n_entries_total = config.load.len(); + self.send(loader::Message::Progress { n_entries_total, n_entries_done: 0 }); + + self.unwatch_all(); + self.config.clear(); + + for (i, entry) in config.load.into_iter().enumerate() { + let watch = config.watch.contains(&i); + let files = self.load_entry(entry, watch); + self.send(loader::Message::Loaded { files }); + self.send(loader::Message::Progress { + n_entries_total, + n_entries_done: i + 1, + }); + } + self.config.sort_by(|x, y| x.0.cmp(&y.0)); + } + Message::Invalidate(path) => { + let contents = read(path.as_path()); + let files = vec![(path, contents)]; + self.send(loader::Message::Loaded { files }); + } + }, + Event::NotifyEvent(event) => { + if let Some(event) = log_notify_error(event) { + let files = event + .paths + .into_iter() + .map(|path| AbsPathBuf::try_from(path).unwrap()) + .filter_map(|path| { + let is_dir = path.is_dir(); + let is_file = path.is_file(); + + let config_idx = + match self.config.binary_search_by(|it| it.0.cmp(&path)) { + Ok(it) => it, + Err(it) => it.saturating_sub(1), + }; + let include = self.config.get(config_idx).and_then(|it| { + let rel_path = path.strip_prefix(&it.0)?; + Some((rel_path, &it.1)) + }); + + if let Some((rel_path, include)) = include { + if is_dir && include.exclude_dir(&rel_path) + || is_file && !include.include_file(&rel_path) + { + return None; + } + } + + if is_dir { + self.watch(path); + return None; + } + if !is_file { + return None; + } + let contents = read(&path); + Some((path, contents)) + }) + .collect(); + self.send(loader::Message::Loaded { files }) + } + } + } + } + } + fn next_event(&self, receiver: &Receiver) -> Option { + select! { + recv(receiver) -> it => it.ok().map(Event::Message), + recv(&self.watcher_receiver) -> it => Some(Event::NotifyEvent(it.unwrap())), + } + } + fn load_entry( + &mut self, + entry: loader::Entry, + watch: bool, + ) -> Vec<(AbsPathBuf, Option>)> { + match entry { + loader::Entry::Files(files) => files + .into_iter() + .map(|file| { + if watch { + self.watch(file.clone()) + } + let contents = read(file.as_path()); + (file, contents) + }) + .collect::>(), + loader::Entry::Directory { path, include } => { + let include = Include::new(include); + self.config.push((path.clone(), include.clone(), watch)); + + let files = WalkDir::new(&path) + .into_iter() + .filter_entry(|entry| { + let abs_path: &AbsPath = entry.path().try_into().unwrap(); + match abs_path.strip_prefix(&path) { + Some(rel_path) => { + !(entry.file_type().is_dir() && include.exclude_dir(rel_path)) + } + None => false, + } + }) + .filter_map(|entry| entry.ok()) + .filter_map(|entry| { + let is_dir = entry.file_type().is_dir(); + let is_file = entry.file_type().is_file(); + let abs_path = AbsPathBuf::try_from(entry.into_path()).unwrap(); + if is_dir { + self.watch(abs_path.clone()); + } + let rel_path = abs_path.strip_prefix(&path)?; + if is_file && include.include_file(&rel_path) { + Some(abs_path) + } else { + None + } + }); + + files + .map(|file| { + let contents = read(file.as_path()); + (file, contents) + }) + .collect() + } + } + } + + fn watch(&mut self, path: AbsPathBuf) { + if let Some(watcher) = &mut self.watcher { + log_notify_error(watcher.watch(&path, RecursiveMode::NonRecursive)); + self.watched_paths.insert(path); + } + } + fn unwatch_all(&mut self) { + if let Some(watcher) = &mut self.watcher { + for path in self.watched_paths.drain() { + log_notify_error(watcher.unwatch(path)); + } + } + } + fn send(&mut self, msg: loader::Message) { + (self.sender)(msg) + } +} + +fn read(path: &AbsPath) -> Option> { + std::fs::read(path).ok() +} + +fn log_notify_error(res: notify::Result) -> Option { + res.map_err(|err| log::warn!("notify error: {}", err)).ok() +} diff --git a/crates/vfs/Cargo.toml b/crates/vfs/Cargo.toml index c03e6363b8e..2630690027f 100644 --- a/crates/vfs/Cargo.toml +++ b/crates/vfs/Cargo.toml @@ -6,9 +6,5 @@ edition = "2018" [dependencies] rustc-hash = "1.0" -jod-thread = "0.1.0" -walkdir = "2.3.1" -globset = "0.4.5" -crossbeam-channel = "0.4.0" paths = { path = "../paths" } diff --git a/crates/vfs/src/file_set.rs b/crates/vfs/src/file_set.rs index 724606a3d41..0173f746499 100644 --- a/crates/vfs/src/file_set.rs +++ b/crates/vfs/src/file_set.rs @@ -4,7 +4,6 @@ //! the default `FileSet`. use std::{fmt, iter}; -use paths::AbsPathBuf; use rustc_hash::FxHashMap; use crate::{FileId, Vfs, VfsPath}; @@ -41,7 +40,7 @@ impl fmt::Debug for FileSet { #[derive(Debug)] pub struct FileSetConfig { n_file_sets: usize, - roots: Vec<(AbsPathBuf, usize)>, + roots: Vec<(VfsPath, usize)>, } impl Default for FileSetConfig { @@ -66,11 +65,7 @@ impl FileSetConfig { self.n_file_sets } fn classify(&self, path: &VfsPath) -> usize { - let path = match path.as_path() { - Some(it) => it, - None => return self.len() - 1, - }; - let idx = match self.roots.binary_search_by(|(p, _)| p.as_path().cmp(path)) { + let idx = match self.roots.binary_search_by(|(p, _)| p.cmp(path)) { Ok(it) => it, Err(it) => it.saturating_sub(1), }; @@ -83,7 +78,7 @@ impl FileSetConfig { } pub struct FileSetConfigBuilder { - roots: Vec>, + roots: Vec>, } impl Default for FileSetConfigBuilder { @@ -96,12 +91,12 @@ impl FileSetConfigBuilder { pub fn len(&self) -> usize { self.roots.len() } - pub fn add_file_set(&mut self, roots: Vec) { + pub fn add_file_set(&mut self, roots: Vec) { self.roots.push(roots) } pub fn build(self) -> FileSetConfig { let n_file_sets = self.roots.len() + 1; - let mut roots: Vec<(AbsPathBuf, usize)> = self + let mut roots: Vec<(VfsPath, usize)> = self .roots .into_iter() .enumerate() diff --git a/crates/vfs/src/lib.rs b/crates/vfs/src/lib.rs index 055219b0cd1..024e5801875 100644 --- a/crates/vfs/src/lib.rs +++ b/crates/vfs/src/lib.rs @@ -38,7 +38,6 @@ mod vfs_path; mod path_interner; pub mod file_set; pub mod loader; -pub mod walkdir_loader; use std::{fmt, mem}; diff --git a/crates/vfs/src/loader.rs b/crates/vfs/src/loader.rs index 5a0ca68f385..a216b5f130b 100644 --- a/crates/vfs/src/loader.rs +++ b/crates/vfs/src/loader.rs @@ -3,19 +3,20 @@ use std::fmt; use paths::AbsPathBuf; +#[derive(Debug)] pub enum Entry { Files(Vec), - Directory { path: AbsPathBuf, globs: Vec }, + Directory { path: AbsPathBuf, include: Vec }, } +#[derive(Debug)] pub struct Config { pub load: Vec, pub watch: Vec, } pub enum Message { - DidSwitchConfig { n_entries: usize }, - DidLoadAllEntries, + Progress { n_entries_total: usize, n_entries_done: usize }, Loaded { files: Vec<(AbsPathBuf, Option>)> }, } @@ -32,15 +33,15 @@ pub trait Handle: fmt::Debug { impl Entry { pub fn rs_files_recursively(base: AbsPathBuf) -> Entry { - Entry::Directory { path: base, globs: globs(&["*.rs"]) } + Entry::Directory { path: base, include: globs(&["*.rs", "!/.git/"]) } } pub fn local_cargo_package(base: AbsPathBuf) -> Entry { - Entry::Directory { path: base, globs: globs(&["*.rs", "!/target/"]) } + Entry::Directory { path: base, include: globs(&["*.rs", "!/target/", "!/.git/"]) } } pub fn cargo_package_dependency(base: AbsPathBuf) -> Entry { Entry::Directory { path: base, - globs: globs(&["*.rs", "!/tests/", "!/examples/", "!/benches/"]), + include: globs(&["*.rs", "!/tests/", "!/examples/", "!/benches/", "!/.git/"]), } } } @@ -55,10 +56,11 @@ impl fmt::Debug for Message { Message::Loaded { files } => { f.debug_struct("Loaded").field("n_files", &files.len()).finish() } - Message::DidSwitchConfig { n_entries } => { - f.debug_struct("DidSwitchConfig").field("n_entries", n_entries).finish() - } - Message::DidLoadAllEntries => f.debug_struct("DidLoadAllEntries").finish(), + Message::Progress { n_entries_total, n_entries_done } => f + .debug_struct("Progress") + .field("n_entries_total", n_entries_total) + .field("n_entries_done", n_entries_done) + .finish(), } } } diff --git a/crates/vfs/src/vfs_path.rs b/crates/vfs/src/vfs_path.rs index de5dc0bf348..0a8a86c6214 100644 --- a/crates/vfs/src/vfs_path.rs +++ b/crates/vfs/src/vfs_path.rs @@ -9,9 +9,17 @@ use paths::{AbsPath, AbsPathBuf}; pub struct VfsPath(VfsPathRepr); impl VfsPath { + /// Creates an "in-memory" path from `/`-separates string. + /// This is most useful for testing, to avoid windows/linux differences + pub fn new_virtual_path(path: String) -> VfsPath { + assert!(path.starts_with('/')); + VfsPath(VfsPathRepr::VirtualPath(VirtualPath(path))) + } + pub fn as_path(&self) -> Option<&AbsPath> { match &self.0 { VfsPathRepr::PathBuf(it) => Some(it.as_path()), + VfsPathRepr::VirtualPath(_) => None, } } pub fn join(&self, path: &str) -> VfsPath { @@ -20,11 +28,24 @@ impl VfsPath { let res = it.join(path).normalize(); VfsPath(VfsPathRepr::PathBuf(res)) } + VfsPathRepr::VirtualPath(it) => { + let res = it.join(path); + VfsPath(VfsPathRepr::VirtualPath(res)) + } } } pub fn pop(&mut self) -> bool { match &mut self.0 { VfsPathRepr::PathBuf(it) => it.pop(), + VfsPathRepr::VirtualPath(it) => it.pop(), + } + } + pub fn starts_with(&self, other: &VfsPath) -> bool { + match (&self.0, &other.0) { + (VfsPathRepr::PathBuf(lhs), VfsPathRepr::PathBuf(rhs)) => lhs.starts_with(rhs), + (VfsPathRepr::PathBuf(_), _) => false, + (VfsPathRepr::VirtualPath(lhs), VfsPathRepr::VirtualPath(rhs)) => lhs.starts_with(rhs), + (VfsPathRepr::VirtualPath(_), _) => false, } } } @@ -32,11 +53,12 @@ impl VfsPath { #[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] enum VfsPathRepr { PathBuf(AbsPathBuf), + VirtualPath(VirtualPath), } impl From for VfsPath { fn from(v: AbsPathBuf) -> Self { - VfsPath(VfsPathRepr::PathBuf(v)) + VfsPath(VfsPathRepr::PathBuf(v.normalize())) } } @@ -44,6 +66,33 @@ impl fmt::Display for VfsPath { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match &self.0 { VfsPathRepr::PathBuf(it) => fmt::Display::fmt(&it.display(), f), + VfsPathRepr::VirtualPath(VirtualPath(it)) => fmt::Display::fmt(it, f), } } } + +#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] +struct VirtualPath(String); + +impl VirtualPath { + fn starts_with(&self, other: &VirtualPath) -> bool { + self.0.starts_with(&other.0) + } + fn pop(&mut self) -> bool { + let pos = match self.0.rfind('/') { + Some(pos) => pos, + None => return false, + }; + self.0 = self.0[..pos].to_string(); + true + } + fn join(&self, mut path: &str) -> VirtualPath { + let mut res = self.clone(); + while path.starts_with("../") { + assert!(res.pop()); + path = &path["../".len()..] + } + res.0 = format!("{}/{}", res.0, path); + res + } +} diff --git a/crates/vfs/src/walkdir_loader.rs b/crates/vfs/src/walkdir_loader.rs deleted file mode 100644 index 13e59e3f343..00000000000 --- a/crates/vfs/src/walkdir_loader.rs +++ /dev/null @@ -1,108 +0,0 @@ -//! A walkdir-based implementation of `loader::Handle`, which doesn't try to -//! watch files. -use std::convert::TryFrom; - -use globset::{Glob, GlobSetBuilder}; -use paths::{AbsPath, AbsPathBuf}; -use walkdir::WalkDir; - -use crate::loader; - -#[derive(Debug)] -pub struct WalkdirLoaderHandle { - // Relative order of fields below is significant. - sender: crossbeam_channel::Sender, - _thread: jod_thread::JoinHandle, -} - -enum Message { - Config(loader::Config), - Invalidate(AbsPathBuf), -} - -impl loader::Handle for WalkdirLoaderHandle { - fn spawn(sender: loader::Sender) -> WalkdirLoaderHandle { - let actor = WalkdirLoaderActor { sender }; - let (sender, receiver) = crossbeam_channel::unbounded::(); - let thread = jod_thread::spawn(move || actor.run(receiver)); - WalkdirLoaderHandle { sender, _thread: thread } - } - fn set_config(&mut self, config: loader::Config) { - self.sender.send(Message::Config(config)).unwrap() - } - fn invalidate(&mut self, path: AbsPathBuf) { - self.sender.send(Message::Invalidate(path)).unwrap(); - } - fn load_sync(&mut self, path: &AbsPathBuf) -> Option> { - read(path) - } -} - -struct WalkdirLoaderActor { - sender: loader::Sender, -} - -impl WalkdirLoaderActor { - fn run(mut self, receiver: crossbeam_channel::Receiver) { - for msg in receiver { - match msg { - Message::Config(config) => { - self.send(loader::Message::DidSwitchConfig { n_entries: config.load.len() }); - for entry in config.load.into_iter() { - let files = self.load_entry(entry); - self.send(loader::Message::Loaded { files }); - } - drop(config.watch); - self.send(loader::Message::DidLoadAllEntries); - } - Message::Invalidate(path) => { - let contents = read(path.as_path()); - let files = vec![(path, contents)]; - self.send(loader::Message::Loaded { files }); - } - } - } - } - fn load_entry(&mut self, entry: loader::Entry) -> Vec<(AbsPathBuf, Option>)> { - match entry { - loader::Entry::Files(files) => files - .into_iter() - .map(|file| { - let contents = read(file.as_path()); - (file, contents) - }) - .collect::>(), - loader::Entry::Directory { path, globs } => { - let globset = { - let mut builder = GlobSetBuilder::new(); - for glob in &globs { - builder.add(Glob::new(glob).unwrap()); - } - builder.build().unwrap() - }; - - let files = WalkDir::new(path) - .into_iter() - .filter_map(|it| it.ok()) - .filter(|it| it.file_type().is_file()) - .map(|it| it.into_path()) - .map(|it| AbsPathBuf::try_from(it).unwrap()) - .filter(|it| globset.is_match(&it)); - - files - .map(|file| { - let contents = read(file.as_path()); - (file, contents) - }) - .collect() - } - } - } - fn send(&mut self, msg: loader::Message) { - (self.sender)(msg) - } -} - -fn read(path: &AbsPath) -> Option> { - std::fs::read(path).ok() -}