From 939f05f3e33e9f00d5205d60af3a862ae4d58bd6 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Thu, 6 Feb 2020 12:43:56 +0100 Subject: Move to a crate --- crates/ra_ide/src/ide_db/change.rs | 386 ----------------------- crates/ra_ide/src/ide_db/feature_flags.rs | 71 ----- crates/ra_ide/src/ide_db/line_index.rs | 283 ----------------- crates/ra_ide/src/ide_db/line_index_utils.rs | 334 -------------------- crates/ra_ide/src/ide_db/mod.rs | 137 --------- crates/ra_ide/src/ide_db/symbol_index.rs | 445 --------------------------- 6 files changed, 1656 deletions(-) delete mode 100644 crates/ra_ide/src/ide_db/change.rs delete mode 100644 crates/ra_ide/src/ide_db/feature_flags.rs delete mode 100644 crates/ra_ide/src/ide_db/line_index.rs delete mode 100644 crates/ra_ide/src/ide_db/line_index_utils.rs delete mode 100644 crates/ra_ide/src/ide_db/mod.rs delete mode 100644 crates/ra_ide/src/ide_db/symbol_index.rs (limited to 'crates/ra_ide/src/ide_db') diff --git a/crates/ra_ide/src/ide_db/change.rs b/crates/ra_ide/src/ide_db/change.rs deleted file mode 100644 index 62ffa6920..000000000 --- a/crates/ra_ide/src/ide_db/change.rs +++ /dev/null @@ -1,386 +0,0 @@ -//! FIXME: write short doc here - -use std::{fmt, sync::Arc, time}; - -use ra_db::{ - salsa::{Database, Durability, SweepStrategy}, - CrateGraph, CrateId, FileId, RelativePathBuf, SourceDatabase, SourceDatabaseExt, SourceRoot, - SourceRootId, -}; -use ra_prof::{memory_usage, profile, Bytes}; -use ra_syntax::SourceFile; -#[cfg(not(feature = "wasm"))] -use rayon::prelude::*; -use rustc_hash::FxHashMap; - -use crate::ide_db::{ - symbol_index::{SymbolIndex, SymbolsDatabase}, - DebugData, RootDatabase, -}; - -#[derive(Default)] -pub struct AnalysisChange { - new_roots: Vec<(SourceRootId, bool)>, - roots_changed: FxHashMap, - files_changed: Vec<(FileId, Arc)>, - libraries_added: Vec, - crate_graph: Option, - debug_data: DebugData, -} - -impl fmt::Debug for AnalysisChange { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - let mut d = fmt.debug_struct("AnalysisChange"); - if !self.new_roots.is_empty() { - d.field("new_roots", &self.new_roots); - } - if !self.roots_changed.is_empty() { - d.field("roots_changed", &self.roots_changed); - } - if !self.files_changed.is_empty() { - d.field("files_changed", &self.files_changed.len()); - } - if !self.libraries_added.is_empty() { - d.field("libraries_added", &self.libraries_added.len()); - } - if !self.crate_graph.is_none() { - d.field("crate_graph", &self.crate_graph); - } - d.finish() - } -} - -impl AnalysisChange { - pub fn new() -> AnalysisChange { - AnalysisChange::default() - } - - pub fn add_root(&mut self, root_id: SourceRootId, is_local: bool) { - self.new_roots.push((root_id, is_local)); - } - - pub fn add_file( - &mut self, - root_id: SourceRootId, - file_id: FileId, - path: RelativePathBuf, - text: Arc, - ) { - let file = AddFile { file_id, path, text }; - self.roots_changed.entry(root_id).or_default().added.push(file); - } - - pub fn change_file(&mut self, file_id: FileId, new_text: Arc) { - self.files_changed.push((file_id, new_text)) - } - - pub fn remove_file(&mut self, root_id: SourceRootId, file_id: FileId, path: RelativePathBuf) { - let file = RemoveFile { file_id, path }; - self.roots_changed.entry(root_id).or_default().removed.push(file); - } - - pub fn add_library(&mut self, data: LibraryData) { - self.libraries_added.push(data) - } - - pub fn set_crate_graph(&mut self, graph: CrateGraph) { - self.crate_graph = Some(graph); - } - - pub fn set_debug_crate_name(&mut self, crate_id: CrateId, name: String) { - self.debug_data.crate_names.insert(crate_id, name); - } - - pub fn set_debug_root_path(&mut self, source_root_id: SourceRootId, path: String) { - self.debug_data.root_paths.insert(source_root_id, path); - } -} - -#[derive(Debug)] -struct AddFile { - file_id: FileId, - path: RelativePathBuf, - text: Arc, -} - -#[derive(Debug)] -struct RemoveFile { - file_id: FileId, - path: RelativePathBuf, -} - -#[derive(Default)] -struct RootChange { - added: Vec, - removed: Vec, -} - -impl fmt::Debug for RootChange { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - fmt.debug_struct("AnalysisChange") - .field("added", &self.added.len()) - .field("removed", &self.removed.len()) - .finish() - } -} - -pub struct LibraryData { - root_id: SourceRootId, - root_change: RootChange, - symbol_index: SymbolIndex, -} - -impl fmt::Debug for LibraryData { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("LibraryData") - .field("root_id", &self.root_id) - .field("root_change", &self.root_change) - .field("n_symbols", &self.symbol_index.len()) - .finish() - } -} - -impl LibraryData { - pub fn prepare( - root_id: SourceRootId, - files: Vec<(FileId, RelativePathBuf, Arc)>, - ) -> LibraryData { - let _p = profile("LibraryData::prepare"); - - #[cfg(not(feature = "wasm"))] - let iter = files.par_iter(); - #[cfg(feature = "wasm")] - let iter = files.iter(); - - let symbol_index = SymbolIndex::for_files(iter.map(|(file_id, _, text)| { - let parse = SourceFile::parse(text); - (*file_id, parse) - })); - let mut root_change = RootChange::default(); - root_change.added = files - .into_iter() - .map(|(file_id, path, text)| AddFile { file_id, path, text }) - .collect(); - LibraryData { root_id, root_change, symbol_index } - } -} - -const GC_COOLDOWN: time::Duration = time::Duration::from_millis(100); - -impl RootDatabase { - pub(crate) fn request_cancellation(&mut self) { - let _p = profile("RootDatabase::request_cancellation"); - self.salsa_runtime_mut().synthetic_write(Durability::LOW); - } - - pub(crate) fn apply_change(&mut self, change: AnalysisChange) { - let _p = profile("RootDatabase::apply_change"); - self.request_cancellation(); - log::info!("apply_change {:?}", change); - if !change.new_roots.is_empty() { - let mut local_roots = Vec::clone(&self.local_roots()); - for (root_id, is_local) in change.new_roots { - let root = - if is_local { SourceRoot::new_local() } else { SourceRoot::new_library() }; - let durability = durability(&root); - self.set_source_root_with_durability(root_id, Arc::new(root), durability); - if is_local { - local_roots.push(root_id); - } - } - self.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); - } - - for (root_id, root_change) in change.roots_changed { - self.apply_root_change(root_id, root_change); - } - for (file_id, text) in change.files_changed { - let source_root_id = self.file_source_root(file_id); - let source_root = self.source_root(source_root_id); - let durability = durability(&source_root); - self.set_file_text_with_durability(file_id, text, durability) - } - if !change.libraries_added.is_empty() { - let mut libraries = Vec::clone(&self.library_roots()); - for library in change.libraries_added { - libraries.push(library.root_id); - self.set_source_root_with_durability( - library.root_id, - Arc::new(SourceRoot::new_library()), - Durability::HIGH, - ); - self.set_library_symbols_with_durability( - library.root_id, - Arc::new(library.symbol_index), - Durability::HIGH, - ); - self.apply_root_change(library.root_id, library.root_change); - } - self.set_library_roots_with_durability(Arc::new(libraries), Durability::HIGH); - } - if let Some(crate_graph) = change.crate_graph { - self.set_crate_graph_with_durability(Arc::new(crate_graph), Durability::HIGH) - } - - Arc::make_mut(&mut self.debug_data).merge(change.debug_data) - } - - fn apply_root_change(&mut self, root_id: SourceRootId, root_change: RootChange) { - let mut source_root = SourceRoot::clone(&self.source_root(root_id)); - let durability = durability(&source_root); - for add_file in root_change.added { - self.set_file_text_with_durability(add_file.file_id, add_file.text, durability); - self.set_file_relative_path_with_durability( - add_file.file_id, - add_file.path.clone(), - durability, - ); - self.set_file_source_root_with_durability(add_file.file_id, root_id, durability); - source_root.insert_file(add_file.path, add_file.file_id); - } - for remove_file in root_change.removed { - self.set_file_text_with_durability(remove_file.file_id, Default::default(), durability); - source_root.remove_file(&remove_file.path); - } - self.set_source_root_with_durability(root_id, Arc::new(source_root), durability); - } - - pub(crate) fn maybe_collect_garbage(&mut self) { - if cfg!(feature = "wasm") { - return; - } - - if self.last_gc_check.elapsed() > GC_COOLDOWN { - self.last_gc_check = crate::wasm_shims::Instant::now(); - } - } - - pub(crate) fn collect_garbage(&mut self) { - if cfg!(feature = "wasm") { - return; - } - - let _p = profile("RootDatabase::collect_garbage"); - self.last_gc = crate::wasm_shims::Instant::now(); - - let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); - - self.query(ra_db::ParseQuery).sweep(sweep); - self.query(hir::db::ParseMacroQuery).sweep(sweep); - - // Macros do take significant space, but less then the syntax trees - // self.query(hir::db::MacroDefQuery).sweep(sweep); - // self.query(hir::db::MacroArgQuery).sweep(sweep); - // self.query(hir::db::MacroExpandQuery).sweep(sweep); - - self.query(hir::db::AstIdMapQuery).sweep(sweep); - - self.query(hir::db::BodyWithSourceMapQuery).sweep(sweep); - - self.query(hir::db::ExprScopesQuery).sweep(sweep); - self.query(hir::db::DoInferQuery).sweep(sweep); - self.query(hir::db::BodyQuery).sweep(sweep); - } - - pub(crate) fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes)> { - let mut acc: Vec<(String, Bytes)> = vec![]; - let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); - macro_rules! sweep_each_query { - ($($q:path)*) => {$( - let before = memory_usage().allocated; - self.query($q).sweep(sweep); - let after = memory_usage().allocated; - let q: $q = Default::default(); - let name = format!("{:?}", q); - acc.push((name, before - after)); - - let before = memory_usage().allocated; - self.query($q).sweep(sweep.discard_everything()); - let after = memory_usage().allocated; - let q: $q = Default::default(); - let name = format!("{:?} (deps)", q); - acc.push((name, before - after)); - )*} - } - sweep_each_query![ - // SourceDatabase - ra_db::ParseQuery - ra_db::SourceRootCratesQuery - - // AstDatabase - hir::db::AstIdMapQuery - hir::db::InternMacroQuery - hir::db::MacroArgQuery - hir::db::MacroDefQuery - hir::db::ParseMacroQuery - hir::db::MacroExpandQuery - - // DefDatabase - hir::db::RawItemsQuery - hir::db::ComputeCrateDefMapQuery - hir::db::StructDataQuery - hir::db::UnionDataQuery - hir::db::EnumDataQuery - hir::db::ImplDataQuery - hir::db::TraitDataQuery - hir::db::TypeAliasDataQuery - hir::db::FunctionDataQuery - hir::db::ConstDataQuery - hir::db::StaticDataQuery - hir::db::BodyWithSourceMapQuery - hir::db::BodyQuery - hir::db::ExprScopesQuery - hir::db::GenericParamsQuery - hir::db::AttrsQuery - hir::db::ModuleLangItemsQuery - hir::db::CrateLangItemsQuery - hir::db::LangItemQuery - hir::db::DocumentationQuery - - // InternDatabase - hir::db::InternFunctionQuery - hir::db::InternStructQuery - hir::db::InternUnionQuery - hir::db::InternEnumQuery - hir::db::InternConstQuery - hir::db::InternStaticQuery - hir::db::InternTraitQuery - hir::db::InternTypeAliasQuery - hir::db::InternImplQuery - - // HirDatabase - hir::db::DoInferQuery - hir::db::TyQuery - hir::db::ValueTyQuery - hir::db::ImplSelfTyQuery - hir::db::ImplTraitQuery - hir::db::FieldTypesQuery - hir::db::CallableItemSignatureQuery - hir::db::GenericPredicatesForParamQuery - hir::db::GenericPredicatesQuery - hir::db::GenericDefaultsQuery - hir::db::ImplsInCrateQuery - hir::db::ImplsForTraitQuery - hir::db::TraitSolverQuery - hir::db::InternTypeCtorQuery - hir::db::InternChalkImplQuery - hir::db::InternAssocTyValueQuery - hir::db::AssociatedTyDataQuery - hir::db::AssociatedTyValueQuery - hir::db::TraitSolveQuery - hir::db::TraitDatumQuery - hir::db::StructDatumQuery - hir::db::ImplDatumQuery - ]; - acc.sort_by_key(|it| std::cmp::Reverse(it.1)); - acc - } -} - -fn durability(source_root: &SourceRoot) -> Durability { - if source_root.is_library { - Durability::HIGH - } else { - Durability::LOW - } -} diff --git a/crates/ra_ide/src/ide_db/feature_flags.rs b/crates/ra_ide/src/ide_db/feature_flags.rs deleted file mode 100644 index 85617640d..000000000 --- a/crates/ra_ide/src/ide_db/feature_flags.rs +++ /dev/null @@ -1,71 +0,0 @@ -//! FIXME: write short doc here - -use rustc_hash::FxHashMap; - -/// Feature flags hold fine-grained toggles for all *user-visible* features of -/// rust-analyzer. -/// -/// The exists such that users are able to disable any annoying feature (and, -/// with many users and many features, some features are bound to be annoying -/// for some users) -/// -/// Note that we purposefully use run-time checked strings, and not something -/// checked at compile time, to keep things simple and flexible. -/// -/// Also note that, at the moment, `FeatureFlags` also store features for -/// `ra_lsp_server`. This should be benign layering violation. -#[derive(Debug)] -pub struct FeatureFlags { - flags: FxHashMap, -} - -impl FeatureFlags { - fn new(flags: &[(&str, bool)]) -> FeatureFlags { - let flags = flags - .iter() - .map(|&(name, value)| { - check_flag_name(name); - (name.to_string(), value) - }) - .collect(); - FeatureFlags { flags } - } - - pub fn set(&mut self, flag: &str, value: bool) -> Result<(), ()> { - match self.flags.get_mut(flag) { - None => Err(()), - Some(slot) => { - *slot = value; - Ok(()) - } - } - } - - pub fn get(&self, flag: &str) -> bool { - match self.flags.get(flag) { - None => panic!("unknown flag: {:?}", flag), - Some(value) => *value, - } - } -} - -impl Default for FeatureFlags { - fn default() -> FeatureFlags { - FeatureFlags::new(&[ - ("lsp.diagnostics", true), - ("completion.insertion.add-call-parenthesis", true), - ("completion.enable-postfix", true), - ("notifications.workspace-loaded", true), - ("notifications.cargo-toml-not-found", true), - ]) - } -} - -fn check_flag_name(flag: &str) { - for c in flag.bytes() { - match c { - b'a'..=b'z' | b'-' | b'.' => (), - _ => panic!("flag name does not match conventions: {:?}", flag), - } - } -} diff --git a/crates/ra_ide/src/ide_db/line_index.rs b/crates/ra_ide/src/ide_db/line_index.rs deleted file mode 100644 index 6f99ca3a7..000000000 --- a/crates/ra_ide/src/ide_db/line_index.rs +++ /dev/null @@ -1,283 +0,0 @@ -//! FIXME: write short doc here - -use ra_syntax::TextUnit; -use rustc_hash::FxHashMap; -use superslice::Ext; - -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct LineIndex { - pub(crate) newlines: Vec, - pub(crate) utf16_lines: FxHashMap>, -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct LineCol { - /// Zero-based - pub line: u32, - /// Zero-based - pub col_utf16: u32, -} - -#[derive(Clone, Debug, Hash, PartialEq, Eq)] -pub(crate) struct Utf16Char { - pub(crate) start: TextUnit, - pub(crate) end: TextUnit, -} - -impl Utf16Char { - fn len(&self) -> TextUnit { - self.end - self.start - } -} - -impl LineIndex { - pub fn new(text: &str) -> LineIndex { - let mut utf16_lines = FxHashMap::default(); - let mut utf16_chars = Vec::new(); - - let mut newlines = vec![0.into()]; - let mut curr_row = 0.into(); - let mut curr_col = 0.into(); - let mut line = 0; - for c in text.chars() { - curr_row += TextUnit::of_char(c); - if c == '\n' { - newlines.push(curr_row); - - // Save any utf-16 characters seen in the previous line - if !utf16_chars.is_empty() { - utf16_lines.insert(line, utf16_chars); - utf16_chars = Vec::new(); - } - - // Prepare for processing the next line - curr_col = 0.into(); - line += 1; - continue; - } - - let char_len = TextUnit::of_char(c); - if char_len.to_usize() > 1 { - utf16_chars.push(Utf16Char { start: curr_col, end: curr_col + char_len }); - } - - curr_col += char_len; - } - - // Save any utf-16 characters seen in the last line - if !utf16_chars.is_empty() { - utf16_lines.insert(line, utf16_chars); - } - - LineIndex { newlines, utf16_lines } - } - - pub fn line_col(&self, offset: TextUnit) -> LineCol { - let line = self.newlines.upper_bound(&offset) - 1; - let line_start_offset = self.newlines[line]; - let col = offset - line_start_offset; - - LineCol { line: line as u32, col_utf16: self.utf8_to_utf16_col(line as u32, col) as u32 } - } - - pub fn offset(&self, line_col: LineCol) -> TextUnit { - //FIXME: return Result - let col = self.utf16_to_utf8_col(line_col.line, line_col.col_utf16); - self.newlines[line_col.line as usize] + col - } - - fn utf8_to_utf16_col(&self, line: u32, mut col: TextUnit) -> usize { - if let Some(utf16_chars) = self.utf16_lines.get(&line) { - let mut correction = TextUnit::from_usize(0); - for c in utf16_chars { - if col >= c.end { - correction += c.len() - TextUnit::from_usize(1); - } else { - // From here on, all utf16 characters come *after* the character we are mapping, - // so we don't need to take them into account - break; - } - } - - col -= correction; - } - - col.to_usize() - } - - fn utf16_to_utf8_col(&self, line: u32, col: u32) -> TextUnit { - let mut col: TextUnit = col.into(); - if let Some(utf16_chars) = self.utf16_lines.get(&line) { - for c in utf16_chars { - if col >= c.start { - col += c.len() - TextUnit::from_usize(1); - } else { - // From here on, all utf16 characters come *after* the character we are mapping, - // so we don't need to take them into account - break; - } - } - } - - col - } -} - -#[cfg(test)] -/// Simple reference implementation to use in proptests -pub fn to_line_col(text: &str, offset: TextUnit) -> LineCol { - let mut res = LineCol { line: 0, col_utf16: 0 }; - for (i, c) in text.char_indices() { - if i + c.len_utf8() > offset.to_usize() { - // if it's an invalid offset, inside a multibyte char - // return as if it was at the start of the char - break; - } - if c == '\n' { - res.line += 1; - res.col_utf16 = 0; - } else { - res.col_utf16 += 1; - } - } - res -} - -#[cfg(test)] -mod test_line_index { - use super::*; - use proptest::{prelude::*, proptest}; - use ra_text_edit::test_utils::{arb_offset, arb_text}; - - #[test] - fn test_line_index() { - let text = "hello\nworld"; - let index = LineIndex::new(text); - assert_eq!(index.line_col(0.into()), LineCol { line: 0, col_utf16: 0 }); - assert_eq!(index.line_col(1.into()), LineCol { line: 0, col_utf16: 1 }); - assert_eq!(index.line_col(5.into()), LineCol { line: 0, col_utf16: 5 }); - assert_eq!(index.line_col(6.into()), LineCol { line: 1, col_utf16: 0 }); - assert_eq!(index.line_col(7.into()), LineCol { line: 1, col_utf16: 1 }); - assert_eq!(index.line_col(8.into()), LineCol { line: 1, col_utf16: 2 }); - assert_eq!(index.line_col(10.into()), LineCol { line: 1, col_utf16: 4 }); - assert_eq!(index.line_col(11.into()), LineCol { line: 1, col_utf16: 5 }); - assert_eq!(index.line_col(12.into()), LineCol { line: 1, col_utf16: 6 }); - - let text = "\nhello\nworld"; - let index = LineIndex::new(text); - assert_eq!(index.line_col(0.into()), LineCol { line: 0, col_utf16: 0 }); - assert_eq!(index.line_col(1.into()), LineCol { line: 1, col_utf16: 0 }); - assert_eq!(index.line_col(2.into()), LineCol { line: 1, col_utf16: 1 }); - assert_eq!(index.line_col(6.into()), LineCol { line: 1, col_utf16: 5 }); - assert_eq!(index.line_col(7.into()), LineCol { line: 2, col_utf16: 0 }); - } - - fn arb_text_with_offset() -> BoxedStrategy<(TextUnit, String)> { - arb_text().prop_flat_map(|text| (arb_offset(&text), Just(text))).boxed() - } - - fn to_line_col(text: &str, offset: TextUnit) -> LineCol { - let mut res = LineCol { line: 0, col_utf16: 0 }; - for (i, c) in text.char_indices() { - if i + c.len_utf8() > offset.to_usize() { - // if it's an invalid offset, inside a multibyte char - // return as if it was at the start of the char - break; - } - if c == '\n' { - res.line += 1; - res.col_utf16 = 0; - } else { - res.col_utf16 += 1; - } - } - res - } - - proptest! { - #[test] - fn test_line_index_proptest((offset, text) in arb_text_with_offset()) { - let expected = to_line_col(&text, offset); - let line_index = LineIndex::new(&text); - let actual = line_index.line_col(offset); - - assert_eq!(actual, expected); - } - } -} - -#[cfg(test)] -mod test_utf8_utf16_conv { - use super::*; - - #[test] - fn test_char_len() { - assert_eq!('メ'.len_utf8(), 3); - assert_eq!('メ'.len_utf16(), 1); - } - - #[test] - fn test_empty_index() { - let col_index = LineIndex::new( - " -const C: char = 'x'; -", - ); - assert_eq!(col_index.utf16_lines.len(), 0); - } - - #[test] - fn test_single_char() { - let col_index = LineIndex::new( - " -const C: char = 'メ'; -", - ); - - assert_eq!(col_index.utf16_lines.len(), 1); - assert_eq!(col_index.utf16_lines[&1].len(), 1); - assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() }); - - // UTF-8 to UTF-16, no changes - assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15); - - // UTF-8 to UTF-16 - assert_eq!(col_index.utf8_to_utf16_col(1, 22.into()), 20); - - // UTF-16 to UTF-8, no changes - assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextUnit::from(15)); - - // UTF-16 to UTF-8 - assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextUnit::from(21)); - } - - #[test] - fn test_string() { - let col_index = LineIndex::new( - " -const C: char = \"メ メ\"; -", - ); - - assert_eq!(col_index.utf16_lines.len(), 1); - assert_eq!(col_index.utf16_lines[&1].len(), 2); - assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() }); - assert_eq!(col_index.utf16_lines[&1][1], Utf16Char { start: 21.into(), end: 24.into() }); - - // UTF-8 to UTF-16 - assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15); - - assert_eq!(col_index.utf8_to_utf16_col(1, 21.into()), 19); - assert_eq!(col_index.utf8_to_utf16_col(1, 25.into()), 21); - - assert!(col_index.utf8_to_utf16_col(2, 15.into()) == 15); - - // UTF-16 to UTF-8 - assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextUnit::from_usize(15)); - - assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextUnit::from_usize(20)); - assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextUnit::from_usize(23)); - - assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextUnit::from_usize(15)); - } -} diff --git a/crates/ra_ide/src/ide_db/line_index_utils.rs b/crates/ra_ide/src/ide_db/line_index_utils.rs deleted file mode 100644 index faa3d665f..000000000 --- a/crates/ra_ide/src/ide_db/line_index_utils.rs +++ /dev/null @@ -1,334 +0,0 @@ -//! FIXME: write short doc here - -use ra_syntax::{TextRange, TextUnit}; -use ra_text_edit::{AtomTextEdit, TextEdit}; - -use crate::ide_db::line_index::{LineCol, LineIndex, Utf16Char}; - -#[derive(Debug, Clone)] -enum Step { - Newline(TextUnit), - Utf16Char(TextRange), -} - -#[derive(Debug)] -struct LineIndexStepIter<'a> { - line_index: &'a LineIndex, - next_newline_idx: usize, - utf16_chars: Option<(TextUnit, std::slice::Iter<'a, Utf16Char>)>, -} - -impl<'a> LineIndexStepIter<'a> { - fn from(line_index: &LineIndex) -> LineIndexStepIter { - let mut x = LineIndexStepIter { line_index, next_newline_idx: 0, utf16_chars: None }; - // skip first newline since it's not real - x.next(); - x - } -} - -impl<'a> Iterator for LineIndexStepIter<'a> { - type Item = Step; - fn next(&mut self) -> Option { - self.utf16_chars - .as_mut() - .and_then(|(newline, x)| { - let x = x.next()?; - Some(Step::Utf16Char(TextRange::from_to(*newline + x.start, *newline + x.end))) - }) - .or_else(|| { - let next_newline = *self.line_index.newlines.get(self.next_newline_idx)?; - self.utf16_chars = self - .line_index - .utf16_lines - .get(&(self.next_newline_idx as u32)) - .map(|x| (next_newline, x.iter())); - self.next_newline_idx += 1; - Some(Step::Newline(next_newline)) - }) - } -} - -#[derive(Debug)] -struct OffsetStepIter<'a> { - text: &'a str, - offset: TextUnit, -} - -impl<'a> Iterator for OffsetStepIter<'a> { - type Item = Step; - fn next(&mut self) -> Option { - let (next, next_offset) = self - .text - .char_indices() - .filter_map(|(i, c)| { - if c == '\n' { - let next_offset = self.offset + TextUnit::from_usize(i + 1); - let next = Step::Newline(next_offset); - Some((next, next_offset)) - } else { - let char_len = TextUnit::of_char(c); - if char_len.to_usize() > 1 { - let start = self.offset + TextUnit::from_usize(i); - let end = start + char_len; - let next = Step::Utf16Char(TextRange::from_to(start, end)); - let next_offset = end; - Some((next, next_offset)) - } else { - None - } - } - }) - .next()?; - let next_idx = (next_offset - self.offset).to_usize(); - self.text = &self.text[next_idx..]; - self.offset = next_offset; - Some(next) - } -} - -#[derive(Debug)] -enum NextSteps<'a> { - Use, - ReplaceMany(OffsetStepIter<'a>), - AddMany(OffsetStepIter<'a>), -} - -#[derive(Debug)] -struct TranslatedEdit<'a> { - delete: TextRange, - insert: &'a str, - diff: i64, -} - -struct Edits<'a> { - edits: &'a [AtomTextEdit], - current: Option>, - acc_diff: i64, -} - -impl<'a> Edits<'a> { - fn from_text_edit(text_edit: &'a TextEdit) -> Edits<'a> { - let mut x = Edits { edits: text_edit.as_atoms(), current: None, acc_diff: 0 }; - x.advance_edit(); - x - } - fn advance_edit(&mut self) { - self.acc_diff += self.current.as_ref().map_or(0, |x| x.diff); - match self.edits.split_first() { - Some((next, rest)) => { - let delete = self.translate_range(next.delete); - let diff = next.insert.len() as i64 - next.delete.len().to_usize() as i64; - self.current = Some(TranslatedEdit { delete, insert: &next.insert, diff }); - self.edits = rest; - } - None => { - self.current = None; - } - } - } - - fn next_inserted_steps(&mut self) -> Option> { - let cur = self.current.as_ref()?; - let res = Some(OffsetStepIter { offset: cur.delete.start(), text: &cur.insert }); - self.advance_edit(); - res - } - - fn next_steps(&mut self, step: &Step) -> NextSteps { - let step_pos = match *step { - Step::Newline(n) => n, - Step::Utf16Char(r) => r.end(), - }; - match &mut self.current { - Some(edit) => { - if step_pos <= edit.delete.start() { - NextSteps::Use - } else if step_pos <= edit.delete.end() { - let iter = OffsetStepIter { offset: edit.delete.start(), text: &edit.insert }; - // empty slice to avoid returning steps again - edit.insert = &edit.insert[edit.insert.len()..]; - NextSteps::ReplaceMany(iter) - } else { - let iter = OffsetStepIter { offset: edit.delete.start(), text: &edit.insert }; - // empty slice to avoid returning steps again - edit.insert = &edit.insert[edit.insert.len()..]; - self.advance_edit(); - NextSteps::AddMany(iter) - } - } - None => NextSteps::Use, - } - } - - fn translate_range(&self, range: TextRange) -> TextRange { - if self.acc_diff == 0 { - range - } else { - let start = self.translate(range.start()); - let end = self.translate(range.end()); - TextRange::from_to(start, end) - } - } - - fn translate(&self, x: TextUnit) -> TextUnit { - if self.acc_diff == 0 { - x - } else { - TextUnit::from((x.to_usize() as i64 + self.acc_diff) as u32) - } - } - - fn translate_step(&self, x: &Step) -> Step { - if self.acc_diff == 0 { - x.clone() - } else { - match *x { - Step::Newline(n) => Step::Newline(self.translate(n)), - Step::Utf16Char(r) => Step::Utf16Char(self.translate_range(r)), - } - } - } -} - -#[derive(Debug)] -struct RunningLineCol { - line: u32, - last_newline: TextUnit, - col_adjust: TextUnit, -} - -impl RunningLineCol { - fn new() -> RunningLineCol { - RunningLineCol { line: 0, last_newline: TextUnit::from(0), col_adjust: TextUnit::from(0) } - } - - fn to_line_col(&self, offset: TextUnit) -> LineCol { - LineCol { - line: self.line, - col_utf16: ((offset - self.last_newline) - self.col_adjust).into(), - } - } - - fn add_line(&mut self, newline: TextUnit) { - self.line += 1; - self.last_newline = newline; - self.col_adjust = TextUnit::from(0); - } - - fn adjust_col(&mut self, range: TextRange) { - self.col_adjust += range.len() - TextUnit::from(1); - } -} - -pub fn translate_offset_with_edit( - line_index: &LineIndex, - offset: TextUnit, - text_edit: &TextEdit, -) -> LineCol { - let mut state = Edits::from_text_edit(&text_edit); - - let mut res = RunningLineCol::new(); - - macro_rules! test_step { - ($x:ident) => { - match &$x { - Step::Newline(n) => { - if offset < *n { - return res.to_line_col(offset); - } else { - res.add_line(*n); - } - } - Step::Utf16Char(x) => { - if offset < x.end() { - // if the offset is inside a multibyte char it's invalid - // clamp it to the start of the char - let clamp = offset.min(x.start()); - return res.to_line_col(clamp); - } else { - res.adjust_col(*x); - } - } - } - }; - } - - for orig_step in LineIndexStepIter::from(line_index) { - loop { - let translated_step = state.translate_step(&orig_step); - match state.next_steps(&translated_step) { - NextSteps::Use => { - test_step!(translated_step); - break; - } - NextSteps::ReplaceMany(ns) => { - for n in ns { - test_step!(n); - } - break; - } - NextSteps::AddMany(ns) => { - for n in ns { - test_step!(n); - } - } - } - } - } - - loop { - match state.next_inserted_steps() { - None => break, - Some(ns) => { - for n in ns { - test_step!(n); - } - } - } - } - - res.to_line_col(offset) -} - -#[cfg(test)] -mod test { - use proptest::{prelude::*, proptest}; - use ra_text_edit::test_utils::{arb_offset, arb_text_with_edit}; - use ra_text_edit::TextEdit; - - use crate::ide_db::line_index; - - use super::*; - - #[derive(Debug)] - struct ArbTextWithEditAndOffset { - text: String, - edit: TextEdit, - edited_text: String, - offset: TextUnit, - } - - fn arb_text_with_edit_and_offset() -> BoxedStrategy { - arb_text_with_edit() - .prop_flat_map(|x| { - let edited_text = x.edit.apply(&x.text); - let arb_offset = arb_offset(&edited_text); - (Just(x), Just(edited_text), arb_offset).prop_map(|(x, edited_text, offset)| { - ArbTextWithEditAndOffset { text: x.text, edit: x.edit, edited_text, offset } - }) - }) - .boxed() - } - - proptest! { - #[test] - fn test_translate_offset_with_edit(x in arb_text_with_edit_and_offset()) { - let expected = line_index::to_line_col(&x.edited_text, x.offset); - let line_index = LineIndex::new(&x.text); - let actual = translate_offset_with_edit(&line_index, x.offset, &x.edit); - - assert_eq!(actual, expected); - } - } -} diff --git a/crates/ra_ide/src/ide_db/mod.rs b/crates/ra_ide/src/ide_db/mod.rs deleted file mode 100644 index 0df4d510f..000000000 --- a/crates/ra_ide/src/ide_db/mod.rs +++ /dev/null @@ -1,137 +0,0 @@ -//! FIXME: write short doc here - -pub mod line_index; -pub mod line_index_utils; -pub mod feature_flags; -pub mod symbol_index; -pub mod change; - -use std::sync::Arc; - -use ra_db::{ - salsa::{self, Database, Durability}, - Canceled, CheckCanceled, CrateId, FileId, FileLoader, FileLoaderDelegate, RelativePath, - SourceDatabase, SourceRootId, -}; -use rustc_hash::FxHashMap; - -use crate::ide_db::{ - feature_flags::FeatureFlags, line_index::LineIndex, symbol_index::SymbolsDatabase, -}; - -#[salsa::database( - ra_db::SourceDatabaseStorage, - ra_db::SourceDatabaseExtStorage, - LineIndexDatabaseStorage, - symbol_index::SymbolsDatabaseStorage, - hir::db::InternDatabaseStorage, - hir::db::AstDatabaseStorage, - hir::db::DefDatabaseStorage, - hir::db::HirDatabaseStorage -)] -#[derive(Debug)] -pub(crate) struct RootDatabase { - runtime: salsa::Runtime, - pub(crate) feature_flags: Arc, - pub(crate) debug_data: Arc, - pub(crate) last_gc: crate::wasm_shims::Instant, - pub(crate) last_gc_check: crate::wasm_shims::Instant, -} - -impl FileLoader for RootDatabase { - fn file_text(&self, file_id: FileId) -> Arc { - FileLoaderDelegate(self).file_text(file_id) - } - fn resolve_relative_path( - &self, - anchor: FileId, - relative_path: &RelativePath, - ) -> Option { - FileLoaderDelegate(self).resolve_relative_path(anchor, relative_path) - } - fn relevant_crates(&self, file_id: FileId) -> Arc> { - FileLoaderDelegate(self).relevant_crates(file_id) - } -} - -impl salsa::Database for RootDatabase { - fn salsa_runtime(&self) -> &salsa::Runtime { - &self.runtime - } - fn salsa_runtime_mut(&mut self) -> &mut salsa::Runtime { - &mut self.runtime - } - fn on_propagated_panic(&self) -> ! { - Canceled::throw() - } - fn salsa_event(&self, event: impl Fn() -> salsa::Event) { - match event().kind { - salsa::EventKind::DidValidateMemoizedValue { .. } - | salsa::EventKind::WillExecute { .. } => { - self.check_canceled(); - } - _ => (), - } - } -} - -impl Default for RootDatabase { - fn default() -> RootDatabase { - RootDatabase::new(None, FeatureFlags::default()) - } -} - -impl RootDatabase { - pub fn new(lru_capacity: Option, feature_flags: FeatureFlags) -> RootDatabase { - let mut db = RootDatabase { - runtime: salsa::Runtime::default(), - last_gc: crate::wasm_shims::Instant::now(), - last_gc_check: crate::wasm_shims::Instant::now(), - feature_flags: Arc::new(feature_flags), - debug_data: Default::default(), - }; - db.set_crate_graph_with_durability(Default::default(), Durability::HIGH); - db.set_local_roots_with_durability(Default::default(), Durability::HIGH); - db.set_library_roots_with_durability(Default::default(), Durability::HIGH); - let lru_capacity = lru_capacity.unwrap_or(ra_db::DEFAULT_LRU_CAP); - db.query_mut(ra_db::ParseQuery).set_lru_capacity(lru_capacity); - db.query_mut(hir::db::ParseMacroQuery).set_lru_capacity(lru_capacity); - db.query_mut(hir::db::MacroExpandQuery).set_lru_capacity(lru_capacity); - db - } -} - -impl salsa::ParallelDatabase for RootDatabase { - fn snapshot(&self) -> salsa::Snapshot { - salsa::Snapshot::new(RootDatabase { - runtime: self.runtime.snapshot(self), - last_gc: self.last_gc, - last_gc_check: self.last_gc_check, - feature_flags: Arc::clone(&self.feature_flags), - debug_data: Arc::clone(&self.debug_data), - }) - } -} - -#[salsa::query_group(LineIndexDatabaseStorage)] -pub(crate) trait LineIndexDatabase: ra_db::SourceDatabase + CheckCanceled { - fn line_index(&self, file_id: FileId) -> Arc; -} - -fn line_index(db: &impl LineIndexDatabase, file_id: FileId) -> Arc { - let text = db.file_text(file_id); - Arc::new(LineIndex::new(&*text)) -} - -#[derive(Debug, Default, Clone)] -pub(crate) struct DebugData { - pub(crate) root_paths: FxHashMap, - pub(crate) crate_names: FxHashMap, -} - -impl DebugData { - pub(crate) fn merge(&mut self, other: DebugData) { - self.root_paths.extend(other.root_paths.into_iter()); - self.crate_names.extend(other.crate_names.into_iter()); - } -} diff --git a/crates/ra_ide/src/ide_db/symbol_index.rs b/crates/ra_ide/src/ide_db/symbol_index.rs deleted file mode 100644 index c66eeb8e2..000000000 --- a/crates/ra_ide/src/ide_db/symbol_index.rs +++ /dev/null @@ -1,445 +0,0 @@ -//! This module handles fuzzy-searching of functions, structs and other symbols -//! by name across the whole workspace and dependencies. -//! -//! It works by building an incrementally-updated text-search index of all -//! symbols. The backbone of the index is the **awesome** `fst` crate by -//! @BurntSushi. -//! -//! In a nutshell, you give a set of strings to `fst`, and it builds a -//! finite state machine describing this set of strings. The strings which -//! could fuzzy-match a pattern can also be described by a finite state machine. -//! What is freaking cool is that you can now traverse both state machines in -//! lock-step to enumerate the strings which are both in the input set and -//! fuzz-match the query. Or, more formally, given two languages described by -//! FSTs, one can build a product FST which describes the intersection of the -//! languages. -//! -//! `fst` does not support cheap updating of the index, but it supports unioning -//! of state machines. So, to account for changing source code, we build an FST -//! for each library (which is assumed to never change) and an FST for each Rust -//! file in the current workspace, and run a query against the union of all -//! those FSTs. -use std::{ - fmt, - hash::{Hash, Hasher}, - mem, - sync::Arc, -}; - -use fst::{self, Streamer}; -use ra_db::{ - salsa::{self, ParallelDatabase}, - FileId, SourceDatabaseExt, SourceRootId, -}; -use ra_syntax::{ - ast::{self, NameOwner}, - match_ast, AstNode, Parse, SmolStr, SourceFile, - SyntaxKind::{self, *}, - SyntaxNode, SyntaxNodePtr, TextRange, WalkEvent, -}; -#[cfg(not(feature = "wasm"))] -use rayon::prelude::*; - -use crate::ide_db::RootDatabase; - -#[derive(Debug)] -pub struct Query { - query: String, - lowercased: String, - only_types: bool, - libs: bool, - exact: bool, - limit: usize, -} - -impl Query { - pub fn new(query: String) -> Query { - let lowercased = query.to_lowercase(); - Query { - query, - lowercased, - only_types: false, - libs: false, - exact: false, - limit: usize::max_value(), - } - } - - pub fn only_types(&mut self) { - self.only_types = true; - } - - pub fn libs(&mut self) { - self.libs = true; - } - - pub fn exact(&mut self) { - self.exact = true; - } - - pub fn limit(&mut self, limit: usize) { - self.limit = limit - } -} - -#[salsa::query_group(SymbolsDatabaseStorage)] -pub(crate) trait SymbolsDatabase: hir::db::HirDatabase { - fn file_symbols(&self, file_id: FileId) -> Arc; - #[salsa::input] - fn library_symbols(&self, id: SourceRootId) -> Arc; - /// The set of "local" (that is, from the current workspace) roots. - /// Files in local roots are assumed to change frequently. - #[salsa::input] - fn local_roots(&self) -> Arc>; - /// The set of roots for crates.io libraries. - /// Files in libraries are assumed to never change. - #[salsa::input] - fn library_roots(&self) -> Arc>; -} - -fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Arc { - db.check_canceled(); - let parse = db.parse(file_id); - - let symbols = source_file_to_file_symbols(&parse.tree(), file_id); - - // FIXME: add macros here - - Arc::new(SymbolIndex::new(symbols)) -} - -pub(crate) fn world_symbols(db: &RootDatabase, query: Query) -> Vec { - /// Need to wrap Snapshot to provide `Clone` impl for `map_with` - struct Snap(salsa::Snapshot); - impl Clone for Snap { - fn clone(&self) -> Snap { - Snap(self.0.snapshot()) - } - } - - let buf: Vec> = if query.libs { - let snap = Snap(db.snapshot()); - #[cfg(not(feature = "wasm"))] - let buf = db - .library_roots() - .par_iter() - .map_with(snap, |db, &lib_id| db.0.library_symbols(lib_id)) - .collect(); - - #[cfg(feature = "wasm")] - let buf = db.library_roots().iter().map(|&lib_id| snap.0.library_symbols(lib_id)).collect(); - - buf - } else { - let mut files = Vec::new(); - for &root in db.local_roots().iter() { - let sr = db.source_root(root); - files.extend(sr.walk()) - } - - let snap = Snap(db.snapshot()); - #[cfg(not(feature = "wasm"))] - let buf = - files.par_iter().map_with(snap, |db, &file_id| db.0.file_symbols(file_id)).collect(); - - #[cfg(feature = "wasm")] - let buf = files.iter().map(|&file_id| snap.0.file_symbols(file_id)).collect(); - - buf - }; - query.search(&buf) -} - -pub(crate) fn index_resolve(db: &RootDatabase, name_ref: &ast::NameRef) -> Vec { - let name = name_ref.text(); - let mut query = Query::new(name.to_string()); - query.exact(); - query.limit(4); - world_symbols(db, query) -} - -#[derive(Default)] -pub(crate) struct SymbolIndex { - symbols: Vec, - map: fst::Map, -} - -impl fmt::Debug for SymbolIndex { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("SymbolIndex").field("n_symbols", &self.symbols.len()).finish() - } -} - -impl PartialEq for SymbolIndex { - fn eq(&self, other: &SymbolIndex) -> bool { - self.symbols == other.symbols - } -} - -impl Eq for SymbolIndex {} - -impl Hash for SymbolIndex { - fn hash(&self, hasher: &mut H) { - self.symbols.hash(hasher) - } -} - -impl SymbolIndex { - fn new(mut symbols: Vec) -> SymbolIndex { - fn cmp_key<'a>(s1: &'a FileSymbol) -> impl Ord + 'a { - unicase::Ascii::new(s1.name.as_str()) - } - #[cfg(not(feature = "wasm"))] - symbols.par_sort_by(|s1, s2| cmp_key(s1).cmp(&cmp_key(s2))); - - #[cfg(feature = "wasm")] - symbols.sort_by(|s1, s2| cmp_key(s1).cmp(&cmp_key(s2))); - - let mut builder = fst::MapBuilder::memory(); - - let mut last_batch_start = 0; - - for idx in 0..symbols.len() { - if symbols.get(last_batch_start).map(cmp_key) == symbols.get(idx + 1).map(cmp_key) { - continue; - } - - let start = last_batch_start; - let end = idx + 1; - last_batch_start = end; - - let key = symbols[start].name.as_str().to_lowercase(); - let value = SymbolIndex::range_to_map_value(start, end); - - builder.insert(key, value).unwrap(); - } - - let map = fst::Map::from_bytes(builder.into_inner().unwrap()).unwrap(); - SymbolIndex { symbols, map } - } - - pub(crate) fn len(&self) -> usize { - self.symbols.len() - } - - pub(crate) fn memory_size(&self) -> usize { - self.map.as_fst().size() + self.symbols.len() * mem::size_of::() - } - - #[cfg(not(feature = "wasm"))] - pub(crate) fn for_files( - files: impl ParallelIterator)>, - ) -> SymbolIndex { - let symbols = files - .flat_map(|(file_id, file)| source_file_to_file_symbols(&file.tree(), file_id)) - .collect::>(); - SymbolIndex::new(symbols) - } - - #[cfg(feature = "wasm")] - pub(crate) fn for_files( - files: impl Iterator)>, - ) -> SymbolIndex { - let symbols = files - .flat_map(|(file_id, file)| source_file_to_file_symbols(&file.tree(), file_id)) - .collect::>(); - SymbolIndex::new(symbols) - } - - fn range_to_map_value(start: usize, end: usize) -> u64 { - debug_assert![start <= (std::u32::MAX as usize)]; - debug_assert![end <= (std::u32::MAX as usize)]; - - ((start as u64) << 32) | end as u64 - } - - fn map_value_to_range(value: u64) -> (usize, usize) { - let end = value as u32 as usize; - let start = (value >> 32) as usize; - (start, end) - } -} - -impl Query { - pub(crate) fn search(self, indices: &[Arc]) -> Vec { - let mut op = fst::map::OpBuilder::new(); - for file_symbols in indices.iter() { - let automaton = fst::automaton::Subsequence::new(&self.lowercased); - op = op.add(file_symbols.map.search(automaton)) - } - let mut stream = op.union(); - let mut res = Vec::new(); - while let Some((_, indexed_values)) = stream.next() { - if res.len() >= self.limit { - break; - } - for indexed_value in indexed_values { - let symbol_index = &indices[indexed_value.index]; - let (start, end) = SymbolIndex::map_value_to_range(indexed_value.value); - - for symbol in &symbol_index.symbols[start..end] { - if self.only_types && !is_type(symbol.ptr.kind()) { - continue; - } - if self.exact && symbol.name != self.query { - continue; - } - res.push(symbol.clone()); - } - } - } - res - } -} - -fn is_type(kind: SyntaxKind) -> bool { - match kind { - STRUCT_DEF | ENUM_DEF | TRAIT_DEF | TYPE_ALIAS_DEF => true, - _ => false, - } -} - -/// The actual data that is stored in the index. It should be as compact as -/// possible. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub(crate) struct FileSymbol { - pub(crate) file_id: FileId, - pub(crate) name: SmolStr, - pub(crate) ptr: SyntaxNodePtr, - pub(crate) name_range: Option, - pub(crate) container_name: Option, -} - -fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec { - let mut symbols = Vec::new(); - let mut stack = Vec::new(); - - for event in source_file.syntax().preorder() { - match event { - WalkEvent::Enter(node) => { - if let Some(mut symbol) = to_file_symbol(&node, file_id) { - symbol.container_name = stack.last().cloned(); - - stack.push(symbol.name.clone()); - symbols.push(symbol); - } - } - - WalkEvent::Leave(node) => { - if to_symbol(&node).is_some() { - stack.pop(); - } - } - } - } - - symbols -} - -fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { - fn decl(node: N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { - let name = node.name()?; - let name_range = name.syntax().text_range(); - let name = name.text().clone(); - let ptr = SyntaxNodePtr::new(node.syntax()); - - Some((name, ptr, name_range)) - } - match_ast! { - match node { - ast::FnDef(it) => { decl(it) }, - ast::StructDef(it) => { decl(it) }, - ast::EnumDef(it) => { decl(it) }, - ast::TraitDef(it) => { decl(it) }, - ast::Module(it) => { decl(it) }, - ast::TypeAliasDef(it) => { decl(it) }, - ast::ConstDef(it) => { decl(it) }, - ast::StaticDef(it) => { decl(it) }, - _ => None, - } - } -} - -fn to_file_symbol(node: &SyntaxNode, file_id: FileId) -> Option { - to_symbol(node).map(move |(name, ptr, name_range)| FileSymbol { - name, - ptr, - file_id, - name_range: Some(name_range), - container_name: None, - }) -} - -#[cfg(test)] -mod tests { - use crate::{display::NavigationTarget, mock_analysis::single_file, Query}; - use ra_syntax::{ - SmolStr, - SyntaxKind::{FN_DEF, STRUCT_DEF}, - }; - - #[test] - fn test_world_symbols_with_no_container() { - let code = r#" - enum FooInner { } - "#; - - let mut symbols = get_symbols_matching(code, "FooInner"); - - let s = symbols.pop().unwrap(); - - assert_eq!(s.name(), "FooInner"); - assert!(s.container_name().is_none()); - } - - #[test] - fn test_world_symbols_include_container_name() { - let code = r#" -fn foo() { - enum FooInner { } -} - "#; - - let mut symbols = get_symbols_matching(code, "FooInner"); - - let s = symbols.pop().unwrap(); - - assert_eq!(s.name(), "FooInner"); - assert_eq!(s.container_name(), Some(&SmolStr::new("foo"))); - - let code = r#" -mod foo { - struct FooInner; -} - "#; - - let mut symbols = get_symbols_matching(code, "FooInner"); - - let s = symbols.pop().unwrap(); - - assert_eq!(s.name(), "FooInner"); - assert_eq!(s.container_name(), Some(&SmolStr::new("foo"))); - } - - #[test] - fn test_world_symbols_are_case_sensitive() { - let code = r#" -fn foo() {} - -struct Foo; - "#; - - let symbols = get_symbols_matching(code, "Foo"); - - let fn_match = symbols.iter().find(|s| s.name() == "foo").map(|s| s.kind()); - let struct_match = symbols.iter().find(|s| s.name() == "Foo").map(|s| s.kind()); - - assert_eq!(fn_match, Some(FN_DEF)); - assert_eq!(struct_match, Some(STRUCT_DEF)); - } - - fn get_symbols_matching(text: &str, query: &str) -> Vec { - let (analysis, _) = single_file(text); - analysis.symbol_search(Query::new(query.into())).unwrap() - } -} -- cgit v1.2.3