From bb5c189b7dae1ea63ccd5d7a0c2e097d7c676f77 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Thu, 13 Aug 2020 16:39:16 +0200 Subject: Rename ra_ide_db -> ide_db --- crates/ide_db/Cargo.toml | 30 +++ crates/ide_db/src/change.rs | 318 ++++++++++++++++++++++++++ crates/ide_db/src/defs.rs | 348 ++++++++++++++++++++++++++++ crates/ide_db/src/imports_locator.rs | 64 ++++++ crates/ide_db/src/lib.rs | 139 ++++++++++++ crates/ide_db/src/line_index.rs | 281 +++++++++++++++++++++++ crates/ide_db/src/search.rs | 322 ++++++++++++++++++++++++++ crates/ide_db/src/source_change.rs | 59 +++++ crates/ide_db/src/symbol_index.rs | 429 +++++++++++++++++++++++++++++++++++ crates/ide_db/src/wasm_shims.rs | 19 ++ 10 files changed, 2009 insertions(+) create mode 100644 crates/ide_db/Cargo.toml create mode 100644 crates/ide_db/src/change.rs create mode 100644 crates/ide_db/src/defs.rs create mode 100644 crates/ide_db/src/imports_locator.rs create mode 100644 crates/ide_db/src/lib.rs create mode 100644 crates/ide_db/src/line_index.rs create mode 100644 crates/ide_db/src/search.rs create mode 100644 crates/ide_db/src/source_change.rs create mode 100644 crates/ide_db/src/symbol_index.rs create mode 100644 crates/ide_db/src/wasm_shims.rs (limited to 'crates/ide_db') diff --git a/crates/ide_db/Cargo.toml b/crates/ide_db/Cargo.toml new file mode 100644 index 000000000..885212162 --- /dev/null +++ b/crates/ide_db/Cargo.toml @@ -0,0 +1,30 @@ +[package] +name = "ide_db" +version = "0.0.0" +license = "MIT OR Apache-2.0" +authors = ["rust-analyzer developers"] +edition = "2018" + +[lib] +doctest = false + +[features] +wasm = [] + +[dependencies] +log = "0.4.8" +rayon = "1.3.0" +fst = { version = "0.4", default-features = false } +rustc-hash = "1.1.0" +once_cell = "1.3.1" +either = "1.5.3" + +stdx = { path = "../stdx" } +syntax = { path = "../syntax" } +text_edit = { path = "../text_edit" } +base_db = { path = "../base_db" } +profile = { path = "../profile" } +test_utils = { path = "../test_utils" } +# ra_ide should depend only on the top-level `hir` package. if you need +# something from some `hir_xxx` subpackage, reexport the API via `hir`. +hir = { path = "../hir" } diff --git a/crates/ide_db/src/change.rs b/crates/ide_db/src/change.rs new file mode 100644 index 000000000..8b4fd7ab8 --- /dev/null +++ b/crates/ide_db/src/change.rs @@ -0,0 +1,318 @@ +//! Defines a unit of change that can applied to a state of IDE to get the next +//! state. Changes are transactional. + +use std::{fmt, sync::Arc, time}; + +use base_db::{ + salsa::{Database, Durability, SweepStrategy}, + CrateGraph, FileId, SourceDatabase, SourceDatabaseExt, SourceRoot, SourceRootId, +}; +use profile::{memory_usage, Bytes}; +use rustc_hash::FxHashSet; + +use crate::{symbol_index::SymbolsDatabase, RootDatabase}; + +#[derive(Default)] +pub struct AnalysisChange { + roots: Option>, + files_changed: Vec<(FileId, Option>)>, + crate_graph: Option, +} + +impl fmt::Debug for AnalysisChange { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + let mut d = fmt.debug_struct("AnalysisChange"); + if let Some(roots) = &self.roots { + d.field("roots", roots); + } + if !self.files_changed.is_empty() { + d.field("files_changed", &self.files_changed.len()); + } + if self.crate_graph.is_some() { + d.field("crate_graph", &self.crate_graph); + } + d.finish() + } +} + +impl AnalysisChange { + pub fn new() -> AnalysisChange { + AnalysisChange::default() + } + + pub fn set_roots(&mut self, roots: Vec) { + self.roots = Some(roots); + } + + pub fn change_file(&mut self, file_id: FileId, new_text: Option>) { + self.files_changed.push((file_id, new_text)) + } + + pub fn set_crate_graph(&mut self, graph: CrateGraph) { + self.crate_graph = Some(graph); + } +} + +#[derive(Debug)] +struct AddFile { + file_id: FileId, + path: String, + text: Arc, +} + +#[derive(Debug)] +struct RemoveFile { + file_id: FileId, + path: String, +} + +#[derive(Default)] +struct RootChange { + added: Vec, + removed: Vec, +} + +impl fmt::Debug for RootChange { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + fmt.debug_struct("AnalysisChange") + .field("added", &self.added.len()) + .field("removed", &self.removed.len()) + .finish() + } +} + +const GC_COOLDOWN: time::Duration = time::Duration::from_millis(100); + +impl RootDatabase { + pub fn request_cancellation(&mut self) { + let _p = profile::span("RootDatabase::request_cancellation"); + self.salsa_runtime_mut().synthetic_write(Durability::LOW); + } + + pub fn apply_change(&mut self, change: AnalysisChange) { + let _p = profile::span("RootDatabase::apply_change"); + self.request_cancellation(); + log::info!("apply_change {:?}", change); + if let Some(roots) = change.roots { + let mut local_roots = FxHashSet::default(); + let mut library_roots = FxHashSet::default(); + for (idx, root) in roots.into_iter().enumerate() { + let root_id = SourceRootId(idx as u32); + let durability = durability(&root); + if root.is_library { + library_roots.insert(root_id); + } else { + local_roots.insert(root_id); + } + for file_id in root.iter() { + self.set_file_source_root_with_durability(file_id, root_id, durability); + } + self.set_source_root_with_durability(root_id, Arc::new(root), durability); + } + self.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); + self.set_library_roots_with_durability(Arc::new(library_roots), Durability::HIGH); + } + + for (file_id, text) in change.files_changed { + let source_root_id = self.file_source_root(file_id); + let source_root = self.source_root(source_root_id); + let durability = durability(&source_root); + // XXX: can't actually remove the file, just reset the text + let text = text.unwrap_or_default(); + self.set_file_text_with_durability(file_id, text, durability) + } + if let Some(crate_graph) = change.crate_graph { + self.set_crate_graph_with_durability(Arc::new(crate_graph), Durability::HIGH) + } + } + + pub fn maybe_collect_garbage(&mut self) { + if cfg!(feature = "wasm") { + return; + } + + if self.last_gc_check.elapsed() > GC_COOLDOWN { + self.last_gc_check = crate::wasm_shims::Instant::now(); + } + } + + pub fn collect_garbage(&mut self) { + if cfg!(feature = "wasm") { + return; + } + + let _p = profile::span("RootDatabase::collect_garbage"); + self.last_gc = crate::wasm_shims::Instant::now(); + + let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); + + base_db::ParseQuery.in_db(self).sweep(sweep); + hir::db::ParseMacroQuery.in_db(self).sweep(sweep); + + // Macros do take significant space, but less then the syntax trees + // self.query(hir::db::MacroDefQuery).sweep(sweep); + // self.query(hir::db::MacroArgTextQuery).sweep(sweep); + // self.query(hir::db::MacroExpandQuery).sweep(sweep); + + hir::db::AstIdMapQuery.in_db(self).sweep(sweep); + + hir::db::BodyWithSourceMapQuery.in_db(self).sweep(sweep); + + hir::db::ExprScopesQuery.in_db(self).sweep(sweep); + hir::db::InferQueryQuery.in_db(self).sweep(sweep); + hir::db::BodyQuery.in_db(self).sweep(sweep); + } + + // Feature: Memory Usage + // + // Clears rust-analyzer's internal database and prints memory usage statistics. + // + // |=== + // | Editor | Action Name + // + // | VS Code | **Rust Analyzer: Memory Usage (Clears Database)** + // |=== + pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes)> { + let mut acc: Vec<(String, Bytes)> = vec![]; + let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); + macro_rules! sweep_each_query { + ($($q:path)*) => {$( + let before = memory_usage().allocated; + $q.in_db(self).sweep(sweep); + let after = memory_usage().allocated; + let q: $q = Default::default(); + let name = format!("{:?}", q); + acc.push((name, before - after)); + + let before = memory_usage().allocated; + $q.in_db(self).sweep(sweep.discard_everything()); + let after = memory_usage().allocated; + let q: $q = Default::default(); + let name = format!("{:?} (deps)", q); + acc.push((name, before - after)); + + let before = memory_usage().allocated; + $q.in_db(self).purge(); + let after = memory_usage().allocated; + let q: $q = Default::default(); + let name = format!("{:?} (purge)", q); + acc.push((name, before - after)); + )*} + } + sweep_each_query![ + // SourceDatabase + base_db::ParseQuery + base_db::CrateGraphQuery + + // SourceDatabaseExt + base_db::FileTextQuery + base_db::FileSourceRootQuery + base_db::SourceRootQuery + base_db::SourceRootCratesQuery + + // AstDatabase + hir::db::AstIdMapQuery + hir::db::MacroArgTextQuery + hir::db::MacroDefQuery + hir::db::ParseMacroQuery + hir::db::MacroExpandQuery + + // DefDatabase + hir::db::ItemTreeQuery + hir::db::CrateDefMapQueryQuery + hir::db::StructDataQuery + hir::db::UnionDataQuery + hir::db::EnumDataQuery + hir::db::ImplDataQuery + hir::db::TraitDataQuery + hir::db::TypeAliasDataQuery + hir::db::FunctionDataQuery + hir::db::ConstDataQuery + hir::db::StaticDataQuery + hir::db::BodyWithSourceMapQuery + hir::db::BodyQuery + hir::db::ExprScopesQuery + hir::db::GenericParamsQuery + hir::db::AttrsQuery + hir::db::ModuleLangItemsQuery + hir::db::CrateLangItemsQuery + hir::db::LangItemQuery + hir::db::DocumentationQuery + hir::db::ImportMapQuery + + // HirDatabase + hir::db::InferQueryQuery + hir::db::TyQuery + hir::db::ValueTyQuery + hir::db::ImplSelfTyQuery + hir::db::ImplTraitQuery + hir::db::FieldTypesQuery + hir::db::CallableItemSignatureQuery + hir::db::GenericPredicatesForParamQuery + hir::db::GenericPredicatesQuery + hir::db::GenericDefaultsQuery + hir::db::InherentImplsInCrateQuery + hir::db::TraitImplsInCrateQuery + hir::db::TraitImplsInDepsQuery + hir::db::AssociatedTyDataQuery + hir::db::AssociatedTyDataQuery + hir::db::TraitDatumQuery + hir::db::StructDatumQuery + hir::db::ImplDatumQuery + hir::db::FnDefDatumQuery + hir::db::ReturnTypeImplTraitsQuery + hir::db::InternCallableDefQuery + hir::db::InternTypeParamIdQuery + hir::db::InternImplTraitIdQuery + hir::db::InternClosureQuery + hir::db::AssociatedTyValueQuery + hir::db::TraitSolveQuery + + // SymbolsDatabase + crate::symbol_index::FileSymbolsQuery + crate::symbol_index::LibrarySymbolsQuery + crate::symbol_index::LocalRootsQuery + crate::symbol_index::LibraryRootsQuery + + // LineIndexDatabase + crate::LineIndexQuery + ]; + + // To collect interned data, we need to bump the revision counter by performing a synthetic + // write. + // We do this after collecting the non-interned queries to correctly attribute memory used + // by interned data. + self.salsa_runtime_mut().synthetic_write(Durability::HIGH); + + sweep_each_query![ + // AstDatabase + hir::db::InternMacroQuery + hir::db::InternEagerExpansionQuery + + // InternDatabase + hir::db::InternFunctionQuery + hir::db::InternStructQuery + hir::db::InternUnionQuery + hir::db::InternEnumQuery + hir::db::InternConstQuery + hir::db::InternStaticQuery + hir::db::InternTraitQuery + hir::db::InternTypeAliasQuery + hir::db::InternImplQuery + + // HirDatabase + hir::db::InternTypeParamIdQuery + ]; + + acc.sort_by_key(|it| std::cmp::Reverse(it.1)); + acc + } +} + +fn durability(source_root: &SourceRoot) -> Durability { + if source_root.is_library { + Durability::HIGH + } else { + Durability::LOW + } +} diff --git a/crates/ide_db/src/defs.rs b/crates/ide_db/src/defs.rs new file mode 100644 index 000000000..7b5d6ac49 --- /dev/null +++ b/crates/ide_db/src/defs.rs @@ -0,0 +1,348 @@ +//! `NameDefinition` keeps information about the element we want to search references for. +//! The element is represented by `NameKind`. It's located inside some `container` and +//! has a `visibility`, which defines a search scope. +//! Note that the reference search is possible for not all of the classified items. + +// FIXME: this badly needs rename/rewrite (matklad, 2020-02-06). + +use hir::{ + db::HirDatabase, Crate, Field, HasVisibility, ImplDef, Local, MacroDef, Module, ModuleDef, + Name, PathResolution, Semantics, TypeParam, Visibility, +}; +use syntax::{ + ast::{self, AstNode}, + match_ast, SyntaxNode, +}; + +use crate::RootDatabase; + +// FIXME: a more precise name would probably be `Symbol`? +#[derive(Debug, PartialEq, Eq, Copy, Clone)] +pub enum Definition { + Macro(MacroDef), + Field(Field), + ModuleDef(ModuleDef), + SelfType(ImplDef), + Local(Local), + TypeParam(TypeParam), +} + +impl Definition { + pub fn module(&self, db: &RootDatabase) -> Option { + match self { + Definition::Macro(it) => it.module(db), + Definition::Field(it) => Some(it.parent_def(db).module(db)), + Definition::ModuleDef(it) => it.module(db), + Definition::SelfType(it) => Some(it.module(db)), + Definition::Local(it) => Some(it.module(db)), + Definition::TypeParam(it) => Some(it.module(db)), + } + } + + pub fn visibility(&self, db: &RootDatabase) -> Option { + match self { + Definition::Macro(_) => None, + Definition::Field(sf) => Some(sf.visibility(db)), + Definition::ModuleDef(def) => def.definition_visibility(db), + Definition::SelfType(_) => None, + Definition::Local(_) => None, + Definition::TypeParam(_) => None, + } + } + + pub fn name(&self, db: &RootDatabase) -> Option { + let name = match self { + Definition::Macro(it) => it.name(db)?, + Definition::Field(it) => it.name(db), + Definition::ModuleDef(def) => match def { + hir::ModuleDef::Module(it) => it.name(db)?, + hir::ModuleDef::Function(it) => it.name(db), + hir::ModuleDef::Adt(def) => match def { + hir::Adt::Struct(it) => it.name(db), + hir::Adt::Union(it) => it.name(db), + hir::Adt::Enum(it) => it.name(db), + }, + hir::ModuleDef::EnumVariant(it) => it.name(db), + hir::ModuleDef::Const(it) => it.name(db)?, + hir::ModuleDef::Static(it) => it.name(db)?, + hir::ModuleDef::Trait(it) => it.name(db), + hir::ModuleDef::TypeAlias(it) => it.name(db), + hir::ModuleDef::BuiltinType(_) => return None, + }, + Definition::SelfType(_) => return None, + Definition::Local(it) => it.name(db)?, + Definition::TypeParam(it) => it.name(db), + }; + Some(name) + } +} + +#[derive(Debug)] +pub enum NameClass { + ExternCrate(Crate), + Definition(Definition), + /// `None` in `if let None = Some(82) {}` + ConstReference(Definition), + FieldShorthand { + local: Local, + field: Definition, + }, +} + +impl NameClass { + pub fn into_definition(self, db: &dyn HirDatabase) -> Option { + Some(match self { + NameClass::ExternCrate(krate) => Definition::ModuleDef(krate.root_module(db).into()), + NameClass::Definition(it) => it, + NameClass::ConstReference(_) => return None, + NameClass::FieldShorthand { local, field: _ } => Definition::Local(local), + }) + } + + pub fn definition(self, db: &dyn HirDatabase) -> Definition { + match self { + NameClass::ExternCrate(krate) => Definition::ModuleDef(krate.root_module(db).into()), + NameClass::Definition(it) | NameClass::ConstReference(it) => it, + NameClass::FieldShorthand { local: _, field } => field, + } + } +} + +pub fn classify_name(sema: &Semantics, name: &ast::Name) -> Option { + let _p = profile::span("classify_name"); + + let parent = name.syntax().parent()?; + + if let Some(bind_pat) = ast::IdentPat::cast(parent.clone()) { + if let Some(def) = sema.resolve_bind_pat_to_const(&bind_pat) { + return Some(NameClass::ConstReference(Definition::ModuleDef(def))); + } + } + + match_ast! { + match parent { + ast::Rename(it) => { + if let Some(use_tree) = it.syntax().parent().and_then(ast::UseTree::cast) { + let path = use_tree.path()?; + let path_segment = path.segment()?; + let name_ref_class = path_segment + .name_ref() + // The rename might be from a `self` token, so fallback to the name higher + // in the use tree. + .or_else(||{ + if path_segment.self_token().is_none() { + return None; + } + + let use_tree = use_tree + .syntax() + .parent() + .as_ref() + // Skip over UseTreeList + .and_then(SyntaxNode::parent) + .and_then(ast::UseTree::cast)?; + let path = use_tree.path()?; + let path_segment = path.segment()?; + path_segment.name_ref() + }) + .and_then(|name_ref| classify_name_ref(sema, &name_ref))?; + + Some(NameClass::Definition(name_ref_class.definition(sema.db))) + } else { + let extern_crate = it.syntax().parent().and_then(ast::ExternCrate::cast)?; + let resolved = sema.resolve_extern_crate(&extern_crate)?; + Some(NameClass::ExternCrate(resolved)) + } + }, + ast::IdentPat(it) => { + let local = sema.to_def(&it)?; + + if let Some(record_field_pat) = it.syntax().parent().and_then(ast::RecordPatField::cast) { + if record_field_pat.name_ref().is_none() { + if let Some(field) = sema.resolve_record_field_pat(&record_field_pat) { + let field = Definition::Field(field); + return Some(NameClass::FieldShorthand { local, field }); + } + } + } + + Some(NameClass::Definition(Definition::Local(local))) + }, + ast::RecordField(it) => { + let field: hir::Field = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::Field(field))) + }, + ast::Module(it) => { + let def = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::ModuleDef(def.into()))) + }, + ast::Struct(it) => { + let def: hir::Struct = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::ModuleDef(def.into()))) + }, + ast::Union(it) => { + let def: hir::Union = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::ModuleDef(def.into()))) + }, + ast::Enum(it) => { + let def: hir::Enum = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::ModuleDef(def.into()))) + }, + ast::Trait(it) => { + let def: hir::Trait = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::ModuleDef(def.into()))) + }, + ast::Static(it) => { + let def: hir::Static = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::ModuleDef(def.into()))) + }, + ast::Variant(it) => { + let def: hir::EnumVariant = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::ModuleDef(def.into()))) + }, + ast::Fn(it) => { + let def: hir::Function = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::ModuleDef(def.into()))) + }, + ast::Const(it) => { + let def: hir::Const = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::ModuleDef(def.into()))) + }, + ast::TypeAlias(it) => { + let def: hir::TypeAlias = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::ModuleDef(def.into()))) + }, + ast::MacroCall(it) => { + let def = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::Macro(def))) + }, + ast::TypeParam(it) => { + let def = sema.to_def(&it)?; + Some(NameClass::Definition(Definition::TypeParam(def))) + }, + _ => None, + } + } +} + +#[derive(Debug)] +pub enum NameRefClass { + ExternCrate(Crate), + Definition(Definition), + FieldShorthand { local: Local, field: Definition }, +} + +impl NameRefClass { + pub fn definition(self, db: &dyn HirDatabase) -> Definition { + match self { + NameRefClass::ExternCrate(krate) => Definition::ModuleDef(krate.root_module(db).into()), + NameRefClass::Definition(def) => def, + NameRefClass::FieldShorthand { local, field: _ } => Definition::Local(local), + } + } +} + +// Note: we don't have unit-tests for this rather important function. +// It is primarily exercised via goto definition tests in `ra_ide`. +pub fn classify_name_ref( + sema: &Semantics, + name_ref: &ast::NameRef, +) -> Option { + let _p = profile::span("classify_name_ref"); + + let parent = name_ref.syntax().parent()?; + + if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) { + if let Some(func) = sema.resolve_method_call(&method_call) { + return Some(NameRefClass::Definition(Definition::ModuleDef(func.into()))); + } + } + + if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) { + if let Some(field) = sema.resolve_field(&field_expr) { + return Some(NameRefClass::Definition(Definition::Field(field))); + } + } + + if let Some(record_field) = ast::RecordExprField::for_field_name(name_ref) { + if let Some((field, local)) = sema.resolve_record_field(&record_field) { + let field = Definition::Field(field); + let res = match local { + None => NameRefClass::Definition(field), + Some(local) => NameRefClass::FieldShorthand { field, local }, + }; + return Some(res); + } + } + + if let Some(record_field_pat) = ast::RecordPatField::cast(parent.clone()) { + if let Some(field) = sema.resolve_record_field_pat(&record_field_pat) { + let field = Definition::Field(field); + return Some(NameRefClass::Definition(field)); + } + } + + if ast::AssocTypeArg::cast(parent.clone()).is_some() { + // `Trait` + // ^^^^^ + let path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?; + let resolved = sema.resolve_path(&path)?; + if let PathResolution::Def(ModuleDef::Trait(tr)) = resolved { + if let Some(ty) = tr + .items(sema.db) + .iter() + .filter_map(|assoc| match assoc { + hir::AssocItem::TypeAlias(it) => Some(*it), + _ => None, + }) + .find(|alias| alias.name(sema.db).to_string() == **name_ref.text()) + { + return Some(NameRefClass::Definition(Definition::ModuleDef( + ModuleDef::TypeAlias(ty), + ))); + } + } + } + + if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) { + if let Some(path) = macro_call.path() { + if path.qualifier().is_none() { + // Only use this to resolve single-segment macro calls like `foo!()`. Multi-segment + // paths are handled below (allowing `log<|>::info!` to resolve to the log crate). + if let Some(macro_def) = sema.resolve_macro_call(¯o_call) { + return Some(NameRefClass::Definition(Definition::Macro(macro_def))); + } + } + } + } + + if let Some(path) = name_ref.syntax().ancestors().find_map(ast::Path::cast) { + if let Some(resolved) = sema.resolve_path(&path) { + return Some(NameRefClass::Definition(resolved.into())); + } + } + + let extern_crate = ast::ExternCrate::cast(parent)?; + let resolved = sema.resolve_extern_crate(&extern_crate)?; + Some(NameRefClass::ExternCrate(resolved)) +} + +impl From for Definition { + fn from(path_resolution: PathResolution) -> Self { + match path_resolution { + PathResolution::Def(def) => Definition::ModuleDef(def), + PathResolution::AssocItem(item) => { + let def = match item { + hir::AssocItem::Function(it) => it.into(), + hir::AssocItem::Const(it) => it.into(), + hir::AssocItem::TypeAlias(it) => it.into(), + }; + Definition::ModuleDef(def) + } + PathResolution::Local(local) => Definition::Local(local), + PathResolution::TypeParam(par) => Definition::TypeParam(par), + PathResolution::Macro(def) => Definition::Macro(def), + PathResolution::SelfType(impl_def) => Definition::SelfType(impl_def), + } + } +} diff --git a/crates/ide_db/src/imports_locator.rs b/crates/ide_db/src/imports_locator.rs new file mode 100644 index 000000000..1d0c20291 --- /dev/null +++ b/crates/ide_db/src/imports_locator.rs @@ -0,0 +1,64 @@ +//! This module contains an import search funcionality that is provided to the ra_assists module. +//! Later, this should be moved away to a separate crate that is accessible from the ra_assists module. + +use hir::{Crate, MacroDef, ModuleDef, Semantics}; +use syntax::{ast, AstNode, SyntaxKind::NAME}; + +use crate::{ + defs::{classify_name, Definition}, + symbol_index::{self, FileSymbol, Query}, + RootDatabase, +}; +use either::Either; +use rustc_hash::FxHashSet; + +pub fn find_imports<'a>( + sema: &Semantics<'a, RootDatabase>, + krate: Crate, + name_to_import: &str, +) -> Vec> { + let _p = profile::span("search_for_imports"); + let db = sema.db; + + // Query dependencies first. + let mut candidates: FxHashSet<_> = + krate.query_external_importables(db, name_to_import).collect(); + + // Query the local crate using the symbol index. + let local_results = { + let mut query = Query::new(name_to_import.to_string()); + query.exact(); + query.limit(40); + symbol_index::crate_symbols(db, krate.into(), query) + }; + + candidates.extend( + local_results + .into_iter() + .filter_map(|import_candidate| get_name_definition(sema, &import_candidate)) + .filter_map(|name_definition_to_import| match name_definition_to_import { + Definition::ModuleDef(module_def) => Some(Either::Left(module_def)), + Definition::Macro(macro_def) => Some(Either::Right(macro_def)), + _ => None, + }), + ); + + candidates.into_iter().collect() +} + +fn get_name_definition<'a>( + sema: &Semantics<'a, RootDatabase>, + import_candidate: &FileSymbol, +) -> Option { + let _p = profile::span("get_name_definition"); + let file_id = import_candidate.file_id; + + let candidate_node = import_candidate.ptr.to_node(sema.parse(file_id).syntax()); + let candidate_name_node = if candidate_node.kind() != NAME { + candidate_node.children().find(|it| it.kind() == NAME)? + } else { + candidate_node + }; + let name = ast::Name::cast(candidate_name_node)?; + classify_name(sema, &name)?.into_definition(sema.db) +} diff --git a/crates/ide_db/src/lib.rs b/crates/ide_db/src/lib.rs new file mode 100644 index 000000000..fd474cd0f --- /dev/null +++ b/crates/ide_db/src/lib.rs @@ -0,0 +1,139 @@ +//! This crate defines the core datastructure representing IDE state -- `RootDatabase`. +//! +//! It is mainly a `HirDatabase` for semantic analysis, plus a `SymbolsDatabase`, for fuzzy search. + +pub mod line_index; +pub mod symbol_index; +pub mod change; +pub mod defs; +pub mod search; +pub mod imports_locator; +pub mod source_change; +mod wasm_shims; + +use std::{fmt, sync::Arc}; + +use base_db::{ + salsa::{self, Durability}, + Canceled, CheckCanceled, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, + Upcast, +}; +use hir::db::{AstDatabase, DefDatabase, HirDatabase}; +use rustc_hash::FxHashSet; + +use crate::{line_index::LineIndex, symbol_index::SymbolsDatabase}; + +#[salsa::database( + base_db::SourceDatabaseStorage, + base_db::SourceDatabaseExtStorage, + LineIndexDatabaseStorage, + symbol_index::SymbolsDatabaseStorage, + hir::db::InternDatabaseStorage, + hir::db::AstDatabaseStorage, + hir::db::DefDatabaseStorage, + hir::db::HirDatabaseStorage +)] +pub struct RootDatabase { + storage: salsa::Storage, + pub last_gc: crate::wasm_shims::Instant, + pub last_gc_check: crate::wasm_shims::Instant, +} + +impl fmt::Debug for RootDatabase { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("RootDatabase").finish() + } +} + +impl Upcast for RootDatabase { + fn upcast(&self) -> &(dyn AstDatabase + 'static) { + &*self + } +} + +impl Upcast for RootDatabase { + fn upcast(&self) -> &(dyn DefDatabase + 'static) { + &*self + } +} + +impl Upcast for RootDatabase { + fn upcast(&self) -> &(dyn HirDatabase + 'static) { + &*self + } +} + +impl FileLoader for RootDatabase { + fn file_text(&self, file_id: FileId) -> Arc { + FileLoaderDelegate(self).file_text(file_id) + } + fn resolve_path(&self, anchor: FileId, path: &str) -> Option { + FileLoaderDelegate(self).resolve_path(anchor, path) + } + fn relevant_crates(&self, file_id: FileId) -> Arc> { + FileLoaderDelegate(self).relevant_crates(file_id) + } +} + +impl salsa::Database for RootDatabase { + fn on_propagated_panic(&self) -> ! { + Canceled::throw() + } + fn salsa_event(&self, event: salsa::Event) { + match event.kind { + salsa::EventKind::DidValidateMemoizedValue { .. } + | salsa::EventKind::WillExecute { .. } => { + self.check_canceled(); + } + _ => (), + } + } +} + +impl Default for RootDatabase { + fn default() -> RootDatabase { + RootDatabase::new(None) + } +} + +impl RootDatabase { + pub fn new(lru_capacity: Option) -> RootDatabase { + let mut db = RootDatabase { + storage: salsa::Storage::default(), + last_gc: crate::wasm_shims::Instant::now(), + last_gc_check: crate::wasm_shims::Instant::now(), + }; + db.set_crate_graph_with_durability(Default::default(), Durability::HIGH); + db.set_local_roots_with_durability(Default::default(), Durability::HIGH); + db.set_library_roots_with_durability(Default::default(), Durability::HIGH); + db.update_lru_capacity(lru_capacity); + db + } + + pub fn update_lru_capacity(&mut self, lru_capacity: Option) { + let lru_capacity = lru_capacity.unwrap_or(base_db::DEFAULT_LRU_CAP); + base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity); + hir::db::ParseMacroQuery.in_db_mut(self).set_lru_capacity(lru_capacity); + hir::db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(lru_capacity); + } +} + +impl salsa::ParallelDatabase for RootDatabase { + fn snapshot(&self) -> salsa::Snapshot { + salsa::Snapshot::new(RootDatabase { + storage: self.storage.snapshot(), + last_gc: self.last_gc, + last_gc_check: self.last_gc_check, + }) + } +} + +#[salsa::query_group(LineIndexDatabaseStorage)] +pub trait LineIndexDatabase: base_db::SourceDatabase + CheckCanceled { + fn line_index(&self, file_id: FileId) -> Arc; +} + +fn line_index(db: &dyn LineIndexDatabase, file_id: FileId) -> Arc { + let text = db.file_text(file_id); + Arc::new(LineIndex::new(&*text)) +} diff --git a/crates/ide_db/src/line_index.rs b/crates/ide_db/src/line_index.rs new file mode 100644 index 000000000..a381f7fb8 --- /dev/null +++ b/crates/ide_db/src/line_index.rs @@ -0,0 +1,281 @@ +//! `LineIndex` maps flat `TextSize` offsets into `(Line, Column)` +//! representation. +use std::iter; + +use rustc_hash::FxHashMap; +use stdx::partition_point; +use syntax::{TextRange, TextSize}; + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct LineIndex { + /// Offset the the beginning of each line, zero-based + pub(crate) newlines: Vec, + /// List of non-ASCII characters on each line + pub(crate) utf16_lines: FxHashMap>, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub struct LineCol { + /// Zero-based + pub line: u32, + /// Zero-based + pub col_utf16: u32, +} + +#[derive(Clone, Debug, Hash, PartialEq, Eq)] +pub(crate) struct Utf16Char { + /// Start offset of a character inside a line, zero-based + pub(crate) start: TextSize, + /// End offset of a character inside a line, zero-based + pub(crate) end: TextSize, +} + +impl Utf16Char { + /// Returns the length in 8-bit UTF-8 code units. + fn len(&self) -> TextSize { + self.end - self.start + } + + /// Returns the length in 16-bit UTF-16 code units. + fn len_utf16(&self) -> usize { + if self.len() == TextSize::from(4) { + 2 + } else { + 1 + } + } +} + +impl LineIndex { + pub fn new(text: &str) -> LineIndex { + let mut utf16_lines = FxHashMap::default(); + let mut utf16_chars = Vec::new(); + + let mut newlines = vec![0.into()]; + let mut curr_row = 0.into(); + let mut curr_col = 0.into(); + let mut line = 0; + for c in text.chars() { + let c_len = TextSize::of(c); + curr_row += c_len; + if c == '\n' { + newlines.push(curr_row); + + // Save any utf-16 characters seen in the previous line + if !utf16_chars.is_empty() { + utf16_lines.insert(line, utf16_chars); + utf16_chars = Vec::new(); + } + + // Prepare for processing the next line + curr_col = 0.into(); + line += 1; + continue; + } + + if !c.is_ascii() { + utf16_chars.push(Utf16Char { start: curr_col, end: curr_col + c_len }); + } + + curr_col += c_len; + } + + // Save any utf-16 characters seen in the last line + if !utf16_chars.is_empty() { + utf16_lines.insert(line, utf16_chars); + } + + LineIndex { newlines, utf16_lines } + } + + pub fn line_col(&self, offset: TextSize) -> LineCol { + let line = partition_point(&self.newlines, |&it| it <= offset) - 1; + let line_start_offset = self.newlines[line]; + let col = offset - line_start_offset; + + LineCol { line: line as u32, col_utf16: self.utf8_to_utf16_col(line as u32, col) as u32 } + } + + pub fn offset(&self, line_col: LineCol) -> TextSize { + //FIXME: return Result + let col = self.utf16_to_utf8_col(line_col.line, line_col.col_utf16); + self.newlines[line_col.line as usize] + col + } + + pub fn lines(&self, range: TextRange) -> impl Iterator + '_ { + let lo = partition_point(&self.newlines, |&it| it < range.start()); + let hi = partition_point(&self.newlines, |&it| it <= range.end()); + let all = iter::once(range.start()) + .chain(self.newlines[lo..hi].iter().copied()) + .chain(iter::once(range.end())); + + all.clone() + .zip(all.skip(1)) + .map(|(lo, hi)| TextRange::new(lo, hi)) + .filter(|it| !it.is_empty()) + } + + fn utf8_to_utf16_col(&self, line: u32, col: TextSize) -> usize { + let mut res: usize = col.into(); + if let Some(utf16_chars) = self.utf16_lines.get(&line) { + for c in utf16_chars { + if c.end <= col { + res -= usize::from(c.len()) - c.len_utf16(); + } else { + // From here on, all utf16 characters come *after* the character we are mapping, + // so we don't need to take them into account + break; + } + } + } + res + } + + fn utf16_to_utf8_col(&self, line: u32, mut col: u32) -> TextSize { + if let Some(utf16_chars) = self.utf16_lines.get(&line) { + for c in utf16_chars { + if col > u32::from(c.start) { + col += u32::from(c.len()) - c.len_utf16() as u32; + } else { + // From here on, all utf16 characters come *after* the character we are mapping, + // so we don't need to take them into account + break; + } + } + } + + col.into() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_line_index() { + let text = "hello\nworld"; + let index = LineIndex::new(text); + assert_eq!(index.line_col(0.into()), LineCol { line: 0, col_utf16: 0 }); + assert_eq!(index.line_col(1.into()), LineCol { line: 0, col_utf16: 1 }); + assert_eq!(index.line_col(5.into()), LineCol { line: 0, col_utf16: 5 }); + assert_eq!(index.line_col(6.into()), LineCol { line: 1, col_utf16: 0 }); + assert_eq!(index.line_col(7.into()), LineCol { line: 1, col_utf16: 1 }); + assert_eq!(index.line_col(8.into()), LineCol { line: 1, col_utf16: 2 }); + assert_eq!(index.line_col(10.into()), LineCol { line: 1, col_utf16: 4 }); + assert_eq!(index.line_col(11.into()), LineCol { line: 1, col_utf16: 5 }); + assert_eq!(index.line_col(12.into()), LineCol { line: 1, col_utf16: 6 }); + + let text = "\nhello\nworld"; + let index = LineIndex::new(text); + assert_eq!(index.line_col(0.into()), LineCol { line: 0, col_utf16: 0 }); + assert_eq!(index.line_col(1.into()), LineCol { line: 1, col_utf16: 0 }); + assert_eq!(index.line_col(2.into()), LineCol { line: 1, col_utf16: 1 }); + assert_eq!(index.line_col(6.into()), LineCol { line: 1, col_utf16: 5 }); + assert_eq!(index.line_col(7.into()), LineCol { line: 2, col_utf16: 0 }); + } + + #[test] + fn test_char_len() { + assert_eq!('メ'.len_utf8(), 3); + assert_eq!('メ'.len_utf16(), 1); + } + + #[test] + fn test_empty_index() { + let col_index = LineIndex::new( + " +const C: char = 'x'; +", + ); + assert_eq!(col_index.utf16_lines.len(), 0); + } + + #[test] + fn test_single_char() { + let col_index = LineIndex::new( + " +const C: char = 'メ'; +", + ); + + assert_eq!(col_index.utf16_lines.len(), 1); + assert_eq!(col_index.utf16_lines[&1].len(), 1); + assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() }); + + // UTF-8 to UTF-16, no changes + assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15); + + // UTF-8 to UTF-16 + assert_eq!(col_index.utf8_to_utf16_col(1, 22.into()), 20); + + // UTF-16 to UTF-8, no changes + assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15)); + + // UTF-16 to UTF-8 + assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21)); + + let col_index = LineIndex::new("a𐐏b"); + assert_eq!(col_index.utf16_to_utf8_col(0, 3), TextSize::from(5)); + } + + #[test] + fn test_string() { + let col_index = LineIndex::new( + " +const C: char = \"メ メ\"; +", + ); + + assert_eq!(col_index.utf16_lines.len(), 1); + assert_eq!(col_index.utf16_lines[&1].len(), 2); + assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() }); + assert_eq!(col_index.utf16_lines[&1][1], Utf16Char { start: 21.into(), end: 24.into() }); + + // UTF-8 to UTF-16 + assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15); + + assert_eq!(col_index.utf8_to_utf16_col(1, 21.into()), 19); + assert_eq!(col_index.utf8_to_utf16_col(1, 25.into()), 21); + + assert!(col_index.utf8_to_utf16_col(2, 15.into()) == 15); + + // UTF-16 to UTF-8 + assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15)); + + // メ UTF-8: 0xE3 0x83 0xA1, UTF-16: 0x30E1 + assert_eq!(col_index.utf16_to_utf8_col(1, 17), TextSize::from(17)); // first メ at 17..20 + assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextSize::from(20)); // space + assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21)); // second メ at 21..24 + + assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextSize::from(15)); + } + + #[test] + fn test_splitlines() { + fn r(lo: u32, hi: u32) -> TextRange { + TextRange::new(lo.into(), hi.into()) + } + + let text = "a\nbb\nccc\n"; + let line_index = LineIndex::new(text); + + let actual = line_index.lines(r(0, 9)).collect::>(); + let expected = vec![r(0, 2), r(2, 5), r(5, 9)]; + assert_eq!(actual, expected); + + let text = ""; + let line_index = LineIndex::new(text); + + let actual = line_index.lines(r(0, 0)).collect::>(); + let expected = vec![]; + assert_eq!(actual, expected); + + let text = "\n"; + let line_index = LineIndex::new(text); + + let actual = line_index.lines(r(0, 1)).collect::>(); + let expected = vec![r(0, 1)]; + assert_eq!(actual, expected) + } +} diff --git a/crates/ide_db/src/search.rs b/crates/ide_db/src/search.rs new file mode 100644 index 000000000..b9360bf12 --- /dev/null +++ b/crates/ide_db/src/search.rs @@ -0,0 +1,322 @@ +//! Implementation of find-usages functionality. +//! +//! It is based on the standard ide trick: first, we run a fast text search to +//! get a super-set of matches. Then, we we confirm each match using precise +//! name resolution. + +use std::{convert::TryInto, mem}; + +use base_db::{FileId, FileRange, SourceDatabaseExt}; +use hir::{DefWithBody, HasSource, Module, ModuleSource, Semantics, Visibility}; +use once_cell::unsync::Lazy; +use rustc_hash::FxHashMap; +use syntax::{ast, match_ast, AstNode, TextRange, TextSize}; + +use crate::{ + defs::{classify_name_ref, Definition, NameRefClass}, + RootDatabase, +}; + +#[derive(Debug, Clone)] +pub struct Reference { + pub file_range: FileRange, + pub kind: ReferenceKind, + pub access: Option, +} + +#[derive(Debug, Clone, PartialEq)] +pub enum ReferenceKind { + FieldShorthandForField, + FieldShorthandForLocal, + StructLiteral, + Other, +} + +#[derive(Debug, Copy, Clone, PartialEq)] +pub enum ReferenceAccess { + Read, + Write, +} + +/// Generally, `search_scope` returns files that might contain references for the element. +/// For `pub(crate)` things it's a crate, for `pub` things it's a crate and dependant crates. +/// In some cases, the location of the references is known to within a `TextRange`, +/// e.g. for things like local variables. +pub struct SearchScope { + entries: FxHashMap>, +} + +impl SearchScope { + fn new(entries: FxHashMap>) -> SearchScope { + SearchScope { entries } + } + + pub fn empty() -> SearchScope { + SearchScope::new(FxHashMap::default()) + } + + pub fn single_file(file: FileId) -> SearchScope { + SearchScope::new(std::iter::once((file, None)).collect()) + } + + pub fn files(files: &[FileId]) -> SearchScope { + SearchScope::new(files.iter().map(|f| (*f, None)).collect()) + } + + pub fn intersection(&self, other: &SearchScope) -> SearchScope { + let (mut small, mut large) = (&self.entries, &other.entries); + if small.len() > large.len() { + mem::swap(&mut small, &mut large) + } + + let res = small + .iter() + .filter_map(|(file_id, r1)| { + let r2 = large.get(file_id)?; + let r = intersect_ranges(*r1, *r2)?; + Some((*file_id, r)) + }) + .collect(); + + return SearchScope::new(res); + + fn intersect_ranges( + r1: Option, + r2: Option, + ) -> Option> { + match (r1, r2) { + (None, r) | (r, None) => Some(r), + (Some(r1), Some(r2)) => { + let r = r1.intersect(r2)?; + Some(Some(r)) + } + } + } + } +} + +impl IntoIterator for SearchScope { + type Item = (FileId, Option); + type IntoIter = std::collections::hash_map::IntoIter>; + + fn into_iter(self) -> Self::IntoIter { + self.entries.into_iter() + } +} + +impl Definition { + fn search_scope(&self, db: &RootDatabase) -> SearchScope { + let _p = profile::span("search_scope"); + let module = match self.module(db) { + Some(it) => it, + None => return SearchScope::empty(), + }; + let module_src = module.definition_source(db); + let file_id = module_src.file_id.original_file(db); + + if let Definition::Local(var) = self { + let range = match var.parent(db) { + DefWithBody::Function(f) => f.source(db).value.syntax().text_range(), + DefWithBody::Const(c) => c.source(db).value.syntax().text_range(), + DefWithBody::Static(s) => s.source(db).value.syntax().text_range(), + }; + let mut res = FxHashMap::default(); + res.insert(file_id, Some(range)); + return SearchScope::new(res); + } + + let vis = self.visibility(db); + + if let Some(Visibility::Module(module)) = vis.and_then(|it| it.into()) { + let module: Module = module.into(); + let mut res = FxHashMap::default(); + + let mut to_visit = vec![module]; + let mut is_first = true; + while let Some(module) = to_visit.pop() { + let src = module.definition_source(db); + let file_id = src.file_id.original_file(db); + match src.value { + ModuleSource::Module(m) => { + if is_first { + let range = Some(m.syntax().text_range()); + res.insert(file_id, range); + } else { + // We have already added the enclosing file to the search scope, + // so do nothing. + } + } + ModuleSource::SourceFile(_) => { + res.insert(file_id, None); + } + }; + is_first = false; + to_visit.extend(module.children(db)); + } + + return SearchScope::new(res); + } + + if let Some(Visibility::Public) = vis { + let source_root_id = db.file_source_root(file_id); + let source_root = db.source_root(source_root_id); + let mut res = source_root.iter().map(|id| (id, None)).collect::>(); + + let krate = module.krate(); + for rev_dep in krate.reverse_dependencies(db) { + let root_file = rev_dep.root_file(db); + let source_root_id = db.file_source_root(root_file); + let source_root = db.source_root(source_root_id); + res.extend(source_root.iter().map(|id| (id, None))); + } + return SearchScope::new(res); + } + + let mut res = FxHashMap::default(); + let range = match module_src.value { + ModuleSource::Module(m) => Some(m.syntax().text_range()), + ModuleSource::SourceFile(_) => None, + }; + res.insert(file_id, range); + SearchScope::new(res) + } + + pub fn find_usages( + &self, + sema: &Semantics, + search_scope: Option, + ) -> Vec { + let _p = profile::span("Definition::find_usages"); + + let search_scope = { + let base = self.search_scope(sema.db); + match search_scope { + None => base, + Some(scope) => base.intersection(&scope), + } + }; + + let name = match self.name(sema.db) { + None => return Vec::new(), + Some(it) => it.to_string(), + }; + + let pat = name.as_str(); + let mut refs = vec![]; + + for (file_id, search_range) in search_scope { + let text = sema.db.file_text(file_id); + let search_range = + search_range.unwrap_or(TextRange::up_to(TextSize::of(text.as_str()))); + + let tree = Lazy::new(|| sema.parse(file_id).syntax().clone()); + + for (idx, _) in text.match_indices(pat) { + let offset: TextSize = idx.try_into().unwrap(); + if !search_range.contains_inclusive(offset) { + continue; + } + + let name_ref: ast::NameRef = + if let Some(name_ref) = sema.find_node_at_offset_with_descend(&tree, offset) { + name_ref + } else { + continue; + }; + + match classify_name_ref(&sema, &name_ref) { + Some(NameRefClass::Definition(def)) if &def == self => { + let kind = if is_record_lit_name_ref(&name_ref) + || is_call_expr_name_ref(&name_ref) + { + ReferenceKind::StructLiteral + } else { + ReferenceKind::Other + }; + + let file_range = sema.original_range(name_ref.syntax()); + refs.push(Reference { + file_range, + kind, + access: reference_access(&def, &name_ref), + }); + } + Some(NameRefClass::FieldShorthand { local, field }) => { + match self { + Definition::Field(_) if &field == self => refs.push(Reference { + file_range: sema.original_range(name_ref.syntax()), + kind: ReferenceKind::FieldShorthandForField, + access: reference_access(&field, &name_ref), + }), + Definition::Local(l) if &local == l => refs.push(Reference { + file_range: sema.original_range(name_ref.syntax()), + kind: ReferenceKind::FieldShorthandForLocal, + access: reference_access(&Definition::Local(local), &name_ref), + }), + + _ => {} // not a usage + }; + } + _ => {} // not a usage + } + } + } + refs + } +} + +fn reference_access(def: &Definition, name_ref: &ast::NameRef) -> Option { + // Only Locals and Fields have accesses for now. + match def { + Definition::Local(_) | Definition::Field(_) => {} + _ => return None, + }; + + let mode = name_ref.syntax().ancestors().find_map(|node| { + match_ast! { + match (node) { + ast::BinExpr(expr) => { + if expr.op_kind()?.is_assignment() { + // If the variable or field ends on the LHS's end then it's a Write (covers fields and locals). + // FIXME: This is not terribly accurate. + if let Some(lhs) = expr.lhs() { + if lhs.syntax().text_range().end() == name_ref.syntax().text_range().end() { + return Some(ReferenceAccess::Write); + } + } + } + Some(ReferenceAccess::Read) + }, + _ => None + } + } + }); + + // Default Locals and Fields to read + mode.or(Some(ReferenceAccess::Read)) +} + +fn is_call_expr_name_ref(name_ref: &ast::NameRef) -> bool { + name_ref + .syntax() + .ancestors() + .find_map(ast::CallExpr::cast) + .and_then(|c| match c.expr()? { + ast::Expr::PathExpr(p) => { + Some(p.path()?.segment()?.name_ref().as_ref() == Some(name_ref)) + } + _ => None, + }) + .unwrap_or(false) +} + +fn is_record_lit_name_ref(name_ref: &ast::NameRef) -> bool { + name_ref + .syntax() + .ancestors() + .find_map(ast::RecordExpr::cast) + .and_then(|l| l.path()) + .and_then(|p| p.segment()) + .map(|p| p.name_ref().as_ref() == Some(name_ref)) + .unwrap_or(false) +} diff --git a/crates/ide_db/src/source_change.rs b/crates/ide_db/src/source_change.rs new file mode 100644 index 000000000..f1590ec66 --- /dev/null +++ b/crates/ide_db/src/source_change.rs @@ -0,0 +1,59 @@ +//! This modules defines type to represent changes to the source code, that flow +//! from the server to the client. +//! +//! It can be viewed as a dual for `AnalysisChange`. + +use base_db::FileId; +use text_edit::TextEdit; + +#[derive(Default, Debug, Clone)] +pub struct SourceChange { + pub source_file_edits: Vec, + pub file_system_edits: Vec, + pub is_snippet: bool, +} + +impl SourceChange { + /// Creates a new SourceChange with the given label + /// from the edits. + pub fn from_edits( + source_file_edits: Vec, + file_system_edits: Vec, + ) -> Self { + SourceChange { source_file_edits, file_system_edits, is_snippet: false } + } +} + +#[derive(Debug, Clone)] +pub struct SourceFileEdit { + pub file_id: FileId, + pub edit: TextEdit, +} + +impl From for SourceChange { + fn from(edit: SourceFileEdit) -> SourceChange { + vec![edit].into() + } +} + +impl From> for SourceChange { + fn from(source_file_edits: Vec) -> SourceChange { + SourceChange { source_file_edits, file_system_edits: Vec::new(), is_snippet: false } + } +} + +#[derive(Debug, Clone)] +pub enum FileSystemEdit { + CreateFile { anchor: FileId, dst: String }, + MoveFile { src: FileId, anchor: FileId, dst: String }, +} + +impl From for SourceChange { + fn from(edit: FileSystemEdit) -> SourceChange { + SourceChange { + source_file_edits: Vec::new(), + file_system_edits: vec![edit], + is_snippet: false, + } + } +} diff --git a/crates/ide_db/src/symbol_index.rs b/crates/ide_db/src/symbol_index.rs new file mode 100644 index 000000000..654df898e --- /dev/null +++ b/crates/ide_db/src/symbol_index.rs @@ -0,0 +1,429 @@ +//! This module handles fuzzy-searching of functions, structs and other symbols +//! by name across the whole workspace and dependencies. +//! +//! It works by building an incrementally-updated text-search index of all +//! symbols. The backbone of the index is the **awesome** `fst` crate by +//! @BurntSushi. +//! +//! In a nutshell, you give a set of strings to `fst`, and it builds a +//! finite state machine describing this set of strings. The strings which +//! could fuzzy-match a pattern can also be described by a finite state machine. +//! What is freaking cool is that you can now traverse both state machines in +//! lock-step to enumerate the strings which are both in the input set and +//! fuzz-match the query. Or, more formally, given two languages described by +//! FSTs, one can build a product FST which describes the intersection of the +//! languages. +//! +//! `fst` does not support cheap updating of the index, but it supports unioning +//! of state machines. So, to account for changing source code, we build an FST +//! for each library (which is assumed to never change) and an FST for each Rust +//! file in the current workspace, and run a query against the union of all +//! those FSTs. + +use std::{ + cmp::Ordering, + fmt, + hash::{Hash, Hasher}, + mem, + sync::Arc, +}; + +use base_db::{ + salsa::{self, ParallelDatabase}, + CrateId, FileId, SourceDatabaseExt, SourceRootId, +}; +use fst::{self, Streamer}; +use hir::db::DefDatabase; +use rayon::prelude::*; +use rustc_hash::{FxHashMap, FxHashSet}; +use syntax::{ + ast::{self, NameOwner}, + match_ast, AstNode, Parse, SmolStr, SourceFile, + SyntaxKind::{self, *}, + SyntaxNode, SyntaxNodePtr, TextRange, WalkEvent, +}; + +use crate::RootDatabase; + +#[derive(Debug)] +pub struct Query { + query: String, + lowercased: String, + only_types: bool, + libs: bool, + exact: bool, + limit: usize, +} + +impl Query { + pub fn new(query: String) -> Query { + let lowercased = query.to_lowercase(); + Query { + query, + lowercased, + only_types: false, + libs: false, + exact: false, + limit: usize::max_value(), + } + } + + pub fn only_types(&mut self) { + self.only_types = true; + } + + pub fn libs(&mut self) { + self.libs = true; + } + + pub fn exact(&mut self) { + self.exact = true; + } + + pub fn limit(&mut self, limit: usize) { + self.limit = limit + } +} + +#[salsa::query_group(SymbolsDatabaseStorage)] +pub trait SymbolsDatabase: hir::db::HirDatabase + SourceDatabaseExt { + fn file_symbols(&self, file_id: FileId) -> Arc; + fn library_symbols(&self) -> Arc>; + /// The set of "local" (that is, from the current workspace) roots. + /// Files in local roots are assumed to change frequently. + #[salsa::input] + fn local_roots(&self) -> Arc>; + /// The set of roots for crates.io libraries. + /// Files in libraries are assumed to never change. + #[salsa::input] + fn library_roots(&self) -> Arc>; +} + +fn library_symbols(db: &dyn SymbolsDatabase) -> Arc> { + let _p = profile::span("library_symbols"); + + let roots = db.library_roots(); + let res = roots + .iter() + .map(|&root_id| { + let root = db.source_root(root_id); + let files = root + .iter() + .map(|it| (it, SourceDatabaseExt::file_text(db, it))) + .collect::>(); + let symbol_index = SymbolIndex::for_files( + files.into_par_iter().map(|(file, text)| (file, SourceFile::parse(&text))), + ); + (root_id, symbol_index) + }) + .collect(); + Arc::new(res) +} + +fn file_symbols(db: &dyn SymbolsDatabase, file_id: FileId) -> Arc { + db.check_canceled(); + let parse = db.parse(file_id); + + let symbols = source_file_to_file_symbols(&parse.tree(), file_id); + + // FIXME: add macros here + + Arc::new(SymbolIndex::new(symbols)) +} + +/// Need to wrap Snapshot to provide `Clone` impl for `map_with` +struct Snap(DB); +impl Clone for Snap> { + fn clone(&self) -> Snap> { + Snap(self.0.snapshot()) + } +} + +// Feature: Workspace Symbol +// +// Uses fuzzy-search to find types, modules and functions by name across your +// project and dependencies. This is **the** most useful feature, which improves code +// navigation tremendously. It mostly works on top of the built-in LSP +// functionality, however `#` and `*` symbols can be used to narrow down the +// search. Specifically, +// +// - `Foo` searches for `Foo` type in the current workspace +// - `foo#` searches for `foo` function in the current workspace +// - `Foo*` searches for `Foo` type among dependencies, including `stdlib` +// - `foo#*` searches for `foo` function among dependencies +// +// That is, `#` switches from "types" to all symbols, `*` switches from the current +// workspace to dependencies. +// +// |=== +// | Editor | Shortcut +// +// | VS Code | kbd:[Ctrl+T] +// |=== +pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec { + let _p = profile::span("world_symbols").detail(|| query.query.clone()); + + let tmp1; + let tmp2; + let buf: Vec<&SymbolIndex> = if query.libs { + tmp1 = db.library_symbols(); + tmp1.values().collect() + } else { + let mut files = Vec::new(); + for &root in db.local_roots().iter() { + let sr = db.source_root(root); + files.extend(sr.iter()) + } + + let snap = Snap(db.snapshot()); + tmp2 = files + .par_iter() + .map_with(snap, |db, &file_id| db.0.file_symbols(file_id)) + .collect::>(); + tmp2.iter().map(|it| &**it).collect() + }; + query.search(&buf) +} + +pub fn crate_symbols(db: &RootDatabase, krate: CrateId, query: Query) -> Vec { + // FIXME(#4842): This now depends on CrateDefMap, why not build the entire symbol index from + // that instead? + + let def_map = db.crate_def_map(krate); + let mut files = Vec::new(); + let mut modules = vec![def_map.root]; + while let Some(module) = modules.pop() { + let data = &def_map[module]; + files.extend(data.origin.file_id()); + modules.extend(data.children.values()); + } + + let snap = Snap(db.snapshot()); + + let buf = files + .par_iter() + .map_with(snap, |db, &file_id| db.0.file_symbols(file_id)) + .collect::>(); + let buf = buf.iter().map(|it| &**it).collect::>(); + + query.search(&buf) +} + +pub fn index_resolve(db: &RootDatabase, name_ref: &ast::NameRef) -> Vec { + let name = name_ref.text(); + let mut query = Query::new(name.to_string()); + query.exact(); + query.limit(4); + world_symbols(db, query) +} + +#[derive(Default)] +pub struct SymbolIndex { + symbols: Vec, + map: fst::Map>, +} + +impl fmt::Debug for SymbolIndex { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("SymbolIndex").field("n_symbols", &self.symbols.len()).finish() + } +} + +impl PartialEq for SymbolIndex { + fn eq(&self, other: &SymbolIndex) -> bool { + self.symbols == other.symbols + } +} + +impl Eq for SymbolIndex {} + +impl Hash for SymbolIndex { + fn hash(&self, hasher: &mut H) { + self.symbols.hash(hasher) + } +} + +impl SymbolIndex { + fn new(mut symbols: Vec) -> SymbolIndex { + fn cmp(lhs: &FileSymbol, rhs: &FileSymbol) -> Ordering { + let lhs_chars = lhs.name.chars().map(|c| c.to_ascii_lowercase()); + let rhs_chars = rhs.name.chars().map(|c| c.to_ascii_lowercase()); + lhs_chars.cmp(rhs_chars) + } + + symbols.par_sort_by(cmp); + + let mut builder = fst::MapBuilder::memory(); + + let mut last_batch_start = 0; + + for idx in 0..symbols.len() { + if let Some(next_symbol) = symbols.get(idx + 1) { + if cmp(&symbols[last_batch_start], next_symbol) == Ordering::Equal { + continue; + } + } + + let start = last_batch_start; + let end = idx + 1; + last_batch_start = end; + + let key = symbols[start].name.as_str().to_ascii_lowercase(); + let value = SymbolIndex::range_to_map_value(start, end); + + builder.insert(key, value).unwrap(); + } + + let map = fst::Map::new(builder.into_inner().unwrap()).unwrap(); + SymbolIndex { symbols, map } + } + + pub fn len(&self) -> usize { + self.symbols.len() + } + + pub fn memory_size(&self) -> usize { + self.map.as_fst().size() + self.symbols.len() * mem::size_of::() + } + + pub(crate) fn for_files( + files: impl ParallelIterator)>, + ) -> SymbolIndex { + let symbols = files + .flat_map(|(file_id, file)| source_file_to_file_symbols(&file.tree(), file_id)) + .collect::>(); + SymbolIndex::new(symbols) + } + + fn range_to_map_value(start: usize, end: usize) -> u64 { + debug_assert![start <= (std::u32::MAX as usize)]; + debug_assert![end <= (std::u32::MAX as usize)]; + + ((start as u64) << 32) | end as u64 + } + + fn map_value_to_range(value: u64) -> (usize, usize) { + let end = value as u32 as usize; + let start = (value >> 32) as usize; + (start, end) + } +} + +impl Query { + pub(crate) fn search(self, indices: &[&SymbolIndex]) -> Vec { + let mut op = fst::map::OpBuilder::new(); + for file_symbols in indices.iter() { + let automaton = fst::automaton::Subsequence::new(&self.lowercased); + op = op.add(file_symbols.map.search(automaton)) + } + let mut stream = op.union(); + let mut res = Vec::new(); + while let Some((_, indexed_values)) = stream.next() { + for indexed_value in indexed_values { + let symbol_index = &indices[indexed_value.index]; + let (start, end) = SymbolIndex::map_value_to_range(indexed_value.value); + + for symbol in &symbol_index.symbols[start..end] { + if self.only_types && !is_type(symbol.kind) { + continue; + } + if self.exact && symbol.name != self.query { + continue; + } + + res.push(symbol.clone()); + if res.len() >= self.limit { + return res; + } + } + } + } + res + } +} + +fn is_type(kind: SyntaxKind) -> bool { + matches!(kind, STRUCT | ENUM | TRAIT | TYPE_ALIAS) +} + +/// The actual data that is stored in the index. It should be as compact as +/// possible. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct FileSymbol { + pub file_id: FileId, + pub name: SmolStr, + pub kind: SyntaxKind, + pub range: TextRange, + pub ptr: SyntaxNodePtr, + pub name_range: Option, + pub container_name: Option, +} + +fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec { + let mut symbols = Vec::new(); + let mut stack = Vec::new(); + + for event in source_file.syntax().preorder() { + match event { + WalkEvent::Enter(node) => { + if let Some(mut symbol) = to_file_symbol(&node, file_id) { + symbol.container_name = stack.last().cloned(); + + stack.push(symbol.name.clone()); + symbols.push(symbol); + } + } + + WalkEvent::Leave(node) => { + if to_symbol(&node).is_some() { + stack.pop(); + } + } + } + } + + symbols +} + +fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { + fn decl(node: N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { + let name = node.name()?; + let name_range = name.syntax().text_range(); + let name = name.text().clone(); + let ptr = SyntaxNodePtr::new(node.syntax()); + + Some((name, ptr, name_range)) + } + match_ast! { + match node { + ast::Fn(it) => decl(it), + ast::Struct(it) => decl(it), + ast::Enum(it) => decl(it), + ast::Trait(it) => decl(it), + ast::Module(it) => decl(it), + ast::TypeAlias(it) => decl(it), + ast::Const(it) => decl(it), + ast::Static(it) => decl(it), + ast::MacroCall(it) => { + if it.is_macro_rules().is_some() { + decl(it) + } else { + None + } + }, + _ => None, + } + } +} + +fn to_file_symbol(node: &SyntaxNode, file_id: FileId) -> Option { + to_symbol(node).map(move |(name, ptr, name_range)| FileSymbol { + name, + kind: node.kind(), + range: node.text_range(), + ptr, + file_id, + name_range: Some(name_range), + container_name: None, + }) +} diff --git a/crates/ide_db/src/wasm_shims.rs b/crates/ide_db/src/wasm_shims.rs new file mode 100644 index 000000000..7af9f9d9b --- /dev/null +++ b/crates/ide_db/src/wasm_shims.rs @@ -0,0 +1,19 @@ +//! A version of `std::time::Instant` that doesn't panic in WASM. + +#[cfg(not(feature = "wasm"))] +pub use std::time::Instant; + +#[cfg(feature = "wasm")] +#[derive(Clone, Copy, Debug)] +pub struct Instant; + +#[cfg(feature = "wasm")] +impl Instant { + pub fn now() -> Self { + Self + } + + pub fn elapsed(&self) -> std::time::Duration { + std::time::Duration::new(0, 0) + } +} -- cgit v1.2.3 From fc34403018079ea053f26d0a31b7517053c7dd8c Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Thu, 13 Aug 2020 17:33:38 +0200 Subject: Rename ra_assists -> assists --- crates/ide_db/src/imports_locator.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'crates/ide_db') diff --git a/crates/ide_db/src/imports_locator.rs b/crates/ide_db/src/imports_locator.rs index 1d0c20291..ed67e3553 100644 --- a/crates/ide_db/src/imports_locator.rs +++ b/crates/ide_db/src/imports_locator.rs @@ -1,5 +1,5 @@ -//! This module contains an import search funcionality that is provided to the ra_assists module. -//! Later, this should be moved away to a separate crate that is accessible from the ra_assists module. +//! This module contains an import search funcionality that is provided to the assists module. +//! Later, this should be moved away to a separate crate that is accessible from the assists module. use hir::{Crate, MacroDef, ModuleDef, Semantics}; use syntax::{ast, AstNode, SyntaxKind::NAME}; -- cgit v1.2.3 From 1b0c7701cc97cd7bef8bb9729011d4cf291a60c5 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Thu, 13 Aug 2020 17:42:52 +0200 Subject: Rename ra_ide -> ide --- crates/ide_db/Cargo.toml | 2 +- crates/ide_db/src/defs.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) (limited to 'crates/ide_db') diff --git a/crates/ide_db/Cargo.toml b/crates/ide_db/Cargo.toml index 885212162..692fb6415 100644 --- a/crates/ide_db/Cargo.toml +++ b/crates/ide_db/Cargo.toml @@ -25,6 +25,6 @@ text_edit = { path = "../text_edit" } base_db = { path = "../base_db" } profile = { path = "../profile" } test_utils = { path = "../test_utils" } -# ra_ide should depend only on the top-level `hir` package. if you need +# ide should depend only on the top-level `hir` package. if you need # something from some `hir_xxx` subpackage, reexport the API via `hir`. hir = { path = "../hir" } diff --git a/crates/ide_db/src/defs.rs b/crates/ide_db/src/defs.rs index 7b5d6ac49..0d0affc27 100644 --- a/crates/ide_db/src/defs.rs +++ b/crates/ide_db/src/defs.rs @@ -243,7 +243,7 @@ impl NameRefClass { } // Note: we don't have unit-tests for this rather important function. -// It is primarily exercised via goto definition tests in `ra_ide`. +// It is primarily exercised via goto definition tests in `ide`. pub fn classify_name_ref( sema: &Semantics, name_ref: &ast::NameRef, -- cgit v1.2.3 From aa1a7a5414e59c7f1c6e74002df1b6a04937459e Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Tue, 18 Aug 2020 16:41:21 +0200 Subject: Introduce Label --- crates/ide_db/src/label.rs | 49 ++++++++++++++++++++++++++++++++++++++++++++++ crates/ide_db/src/lib.rs | 1 + 2 files changed, 50 insertions(+) create mode 100644 crates/ide_db/src/label.rs (limited to 'crates/ide_db') diff --git a/crates/ide_db/src/label.rs b/crates/ide_db/src/label.rs new file mode 100644 index 000000000..c0e89e72f --- /dev/null +++ b/crates/ide_db/src/label.rs @@ -0,0 +1,49 @@ +//! See `Label` +use std::fmt; + +/// A type to specify UI label, like an entry in the list of assists. Enforces +/// proper casing: +/// +/// Frobnicate bar +/// +/// Note the upper-case first letter and the absence of `.` at the end. +#[derive(Clone)] +pub struct Label(String); + +impl PartialEq for Label { + fn eq(&self, other: &str) -> bool { + self.0 == other + } +} + +impl PartialEq<&'_ str> for Label { + fn eq(&self, other: &&str) -> bool { + self == *other + } +} + +impl From