From ae71a631fd657368e8593feb5e025d23147afe60 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Thu, 13 Aug 2020 16:36:55 +0200 Subject: Rename ra_hir -> hir --- crates/hir/src/code_model.rs | 1712 +++++++++++++++++++++++++++++ crates/hir/src/db.rs | 21 + crates/hir/src/diagnostics.rs | 6 + crates/hir/src/from_id.rs | 247 +++++ crates/hir/src/has_source.rs | 135 +++ crates/hir/src/lib.rs | 59 + crates/hir/src/semantics.rs | 840 ++++++++++++++ crates/hir/src/semantics/source_to_def.rs | 275 +++++ crates/hir/src/source_analyzer.rs | 534 +++++++++ 9 files changed, 3829 insertions(+) create mode 100644 crates/hir/src/code_model.rs create mode 100644 crates/hir/src/db.rs create mode 100644 crates/hir/src/diagnostics.rs create mode 100644 crates/hir/src/from_id.rs create mode 100644 crates/hir/src/has_source.rs create mode 100644 crates/hir/src/lib.rs create mode 100644 crates/hir/src/semantics.rs create mode 100644 crates/hir/src/semantics/source_to_def.rs create mode 100644 crates/hir/src/source_analyzer.rs (limited to 'crates/hir/src') diff --git a/crates/hir/src/code_model.rs b/crates/hir/src/code_model.rs new file mode 100644 index 000000000..8ffb9e99b --- /dev/null +++ b/crates/hir/src/code_model.rs @@ -0,0 +1,1712 @@ +//! FIXME: write short doc here +use std::{iter, sync::Arc}; + +use arrayvec::ArrayVec; +use base_db::{CrateId, Edition, FileId}; +use either::Either; +use hir_def::{ + adt::ReprKind, + adt::StructKind, + adt::VariantData, + builtin_type::BuiltinType, + docs::Documentation, + expr::{BindingAnnotation, Pat, PatId}, + import_map, + per_ns::PerNs, + resolver::{HasResolver, Resolver}, + src::HasSource as _, + type_ref::{Mutability, TypeRef}, + AdtId, AssocContainerId, ConstId, DefWithBodyId, EnumId, FunctionId, GenericDefId, HasModule, + ImplId, LocalEnumVariantId, LocalFieldId, LocalModuleId, Lookup, ModuleId, StaticId, StructId, + TraitId, TypeAliasId, TypeParamId, UnionId, +}; +use hir_expand::{ + diagnostics::DiagnosticSink, + name::{name, AsName}, + MacroDefId, MacroDefKind, +}; +use hir_ty::{ + autoderef, + display::{HirDisplayError, HirFormatter}, + method_resolution, ApplicationTy, CallableDefId, Canonical, FnSig, GenericPredicate, + InEnvironment, Substs, TraitEnvironment, Ty, TyDefId, TypeCtor, +}; +use rustc_hash::FxHashSet; +use stdx::impl_from; +use syntax::{ + ast::{self, AttrsOwner, NameOwner}, + AstNode, +}; + +use crate::{ + db::{DefDatabase, HirDatabase}, + has_source::HasSource, + HirDisplay, InFile, Name, +}; + +/// hir::Crate describes a single crate. It's the main interface with which +/// a crate's dependencies interact. Mostly, it should be just a proxy for the +/// root module. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Crate { + pub(crate) id: CrateId, +} + +#[derive(Debug)] +pub struct CrateDependency { + pub krate: Crate, + pub name: Name, +} + +impl Crate { + pub fn dependencies(self, db: &dyn HirDatabase) -> Vec { + db.crate_graph()[self.id] + .dependencies + .iter() + .map(|dep| { + let krate = Crate { id: dep.crate_id }; + let name = dep.as_name(); + CrateDependency { krate, name } + }) + .collect() + } + + // FIXME: add `transitive_reverse_dependencies`. + pub fn reverse_dependencies(self, db: &dyn HirDatabase) -> Vec { + let crate_graph = db.crate_graph(); + crate_graph + .iter() + .filter(|&krate| { + crate_graph[krate].dependencies.iter().any(|it| it.crate_id == self.id) + }) + .map(|id| Crate { id }) + .collect() + } + + pub fn root_module(self, db: &dyn HirDatabase) -> Module { + let module_id = db.crate_def_map(self.id).root; + Module::new(self, module_id) + } + + pub fn root_file(self, db: &dyn HirDatabase) -> FileId { + db.crate_graph()[self.id].root_file_id + } + + pub fn edition(self, db: &dyn HirDatabase) -> Edition { + db.crate_graph()[self.id].edition + } + + pub fn display_name(self, db: &dyn HirDatabase) -> Option { + db.crate_graph()[self.id].display_name.clone() + } + + pub fn query_external_importables( + self, + db: &dyn DefDatabase, + query: &str, + ) -> impl Iterator> { + import_map::search_dependencies( + db, + self.into(), + import_map::Query::new(query).anchor_end().case_sensitive().limit(40), + ) + .into_iter() + .map(|item| match item { + ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id.into()), + ItemInNs::Macros(mac_id) => Either::Right(mac_id.into()), + }) + } + + pub fn all(db: &dyn HirDatabase) -> Vec { + db.crate_graph().iter().map(|id| Crate { id }).collect() + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Module { + pub(crate) id: ModuleId, +} + +/// The defs which can be visible in the module. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ModuleDef { + Module(Module), + Function(Function), + Adt(Adt), + // Can't be directly declared, but can be imported. + EnumVariant(EnumVariant), + Const(Const), + Static(Static), + Trait(Trait), + TypeAlias(TypeAlias), + BuiltinType(BuiltinType), +} +impl_from!( + Module, + Function, + Adt(Struct, Enum, Union), + EnumVariant, + Const, + Static, + Trait, + TypeAlias, + BuiltinType + for ModuleDef +); + +impl ModuleDef { + pub fn module(self, db: &dyn HirDatabase) -> Option { + match self { + ModuleDef::Module(it) => it.parent(db), + ModuleDef::Function(it) => Some(it.module(db)), + ModuleDef::Adt(it) => Some(it.module(db)), + ModuleDef::EnumVariant(it) => Some(it.module(db)), + ModuleDef::Const(it) => Some(it.module(db)), + ModuleDef::Static(it) => Some(it.module(db)), + ModuleDef::Trait(it) => Some(it.module(db)), + ModuleDef::TypeAlias(it) => Some(it.module(db)), + ModuleDef::BuiltinType(_) => None, + } + } + + pub fn definition_visibility(&self, db: &dyn HirDatabase) -> Option { + let module = match self { + ModuleDef::Module(it) => it.parent(db)?, + ModuleDef::Function(it) => return Some(it.visibility(db)), + ModuleDef::Adt(it) => it.module(db), + ModuleDef::EnumVariant(it) => { + let parent = it.parent_enum(db); + let module = it.module(db); + return module.visibility_of(db, &ModuleDef::Adt(Adt::Enum(parent))); + } + ModuleDef::Const(it) => return Some(it.visibility(db)), + ModuleDef::Static(it) => it.module(db), + ModuleDef::Trait(it) => it.module(db), + ModuleDef::TypeAlias(it) => return Some(it.visibility(db)), + ModuleDef::BuiltinType(_) => return None, + }; + + module.visibility_of(db, self) + } + + pub fn name(self, db: &dyn HirDatabase) -> Option { + match self { + ModuleDef::Adt(it) => Some(it.name(db)), + ModuleDef::Trait(it) => Some(it.name(db)), + ModuleDef::Function(it) => Some(it.name(db)), + ModuleDef::EnumVariant(it) => Some(it.name(db)), + ModuleDef::TypeAlias(it) => Some(it.name(db)), + + ModuleDef::Module(it) => it.name(db), + ModuleDef::Const(it) => it.name(db), + ModuleDef::Static(it) => it.name(db), + + ModuleDef::BuiltinType(it) => Some(it.as_name()), + } + } +} + +pub use hir_def::{ + attr::Attrs, item_scope::ItemInNs, item_tree::ItemTreeNode, visibility::Visibility, + AssocItemId, AssocItemLoc, +}; + +impl Module { + pub(crate) fn new(krate: Crate, crate_module_id: LocalModuleId) -> Module { + Module { id: ModuleId { krate: krate.id, local_id: crate_module_id } } + } + + /// Name of this module. + pub fn name(self, db: &dyn HirDatabase) -> Option { + let def_map = db.crate_def_map(self.id.krate); + let parent = def_map[self.id.local_id].parent?; + def_map[parent].children.iter().find_map(|(name, module_id)| { + if *module_id == self.id.local_id { + Some(name.clone()) + } else { + None + } + }) + } + + /// Returns the crate this module is part of. + pub fn krate(self) -> Crate { + Crate { id: self.id.krate } + } + + /// Topmost parent of this module. Every module has a `crate_root`, but some + /// might be missing `krate`. This can happen if a module's file is not included + /// in the module tree of any target in `Cargo.toml`. + pub fn crate_root(self, db: &dyn HirDatabase) -> Module { + let def_map = db.crate_def_map(self.id.krate); + self.with_module_id(def_map.root) + } + + /// Iterates over all child modules. + pub fn children(self, db: &dyn HirDatabase) -> impl Iterator { + let def_map = db.crate_def_map(self.id.krate); + let children = def_map[self.id.local_id] + .children + .iter() + .map(|(_, module_id)| self.with_module_id(*module_id)) + .collect::>(); + children.into_iter() + } + + /// Finds a parent module. + pub fn parent(self, db: &dyn HirDatabase) -> Option { + let def_map = db.crate_def_map(self.id.krate); + let parent_id = def_map[self.id.local_id].parent?; + Some(self.with_module_id(parent_id)) + } + + pub fn path_to_root(self, db: &dyn HirDatabase) -> Vec { + let mut res = vec![self]; + let mut curr = self; + while let Some(next) = curr.parent(db) { + res.push(next); + curr = next + } + res + } + + /// Returns a `ModuleScope`: a set of items, visible in this module. + pub fn scope( + self, + db: &dyn HirDatabase, + visible_from: Option, + ) -> Vec<(Name, ScopeDef)> { + db.crate_def_map(self.id.krate)[self.id.local_id] + .scope + .entries() + .filter_map(|(name, def)| { + if let Some(m) = visible_from { + let filtered = + def.filter_visibility(|vis| vis.is_visible_from(db.upcast(), m.id)); + if filtered.is_none() && !def.is_none() { + None + } else { + Some((name, filtered)) + } + } else { + Some((name, def)) + } + }) + .flat_map(|(name, def)| { + ScopeDef::all_items(def).into_iter().map(move |item| (name.clone(), item)) + }) + .collect() + } + + pub fn visibility_of(self, db: &dyn HirDatabase, def: &ModuleDef) -> Option { + db.crate_def_map(self.id.krate)[self.id.local_id].scope.visibility_of(def.clone().into()) + } + + pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) { + let _p = profile::span("Module::diagnostics"); + let crate_def_map = db.crate_def_map(self.id.krate); + crate_def_map.add_diagnostics(db.upcast(), self.id.local_id, sink); + for decl in self.declarations(db) { + match decl { + crate::ModuleDef::Function(f) => f.diagnostics(db, sink), + crate::ModuleDef::Module(m) => { + // Only add diagnostics from inline modules + if crate_def_map[m.id.local_id].origin.is_inline() { + m.diagnostics(db, sink) + } + } + _ => (), + } + } + + for impl_def in self.impl_defs(db) { + for item in impl_def.items(db) { + if let AssocItem::Function(f) = item { + f.diagnostics(db, sink); + } + } + } + } + + pub fn declarations(self, db: &dyn HirDatabase) -> Vec { + let def_map = db.crate_def_map(self.id.krate); + def_map[self.id.local_id].scope.declarations().map(ModuleDef::from).collect() + } + + pub fn impl_defs(self, db: &dyn HirDatabase) -> Vec { + let def_map = db.crate_def_map(self.id.krate); + def_map[self.id.local_id].scope.impls().map(ImplDef::from).collect() + } + + pub(crate) fn with_module_id(self, module_id: LocalModuleId) -> Module { + Module::new(self.krate(), module_id) + } + + /// Finds a path that can be used to refer to the given item from within + /// this module, if possible. + pub fn find_use_path( + self, + db: &dyn DefDatabase, + item: impl Into, + ) -> Option { + hir_def::find_path::find_path(db, item.into(), self.into()) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Field { + pub(crate) parent: VariantDef, + pub(crate) id: LocalFieldId, +} + +#[derive(Debug, PartialEq, Eq)] +pub enum FieldSource { + Named(ast::RecordField), + Pos(ast::TupleField), +} + +impl Field { + pub fn name(&self, db: &dyn HirDatabase) -> Name { + self.parent.variant_data(db).fields()[self.id].name.clone() + } + + /// Returns the type as in the signature of the struct (i.e., with + /// placeholder types for type parameters). This is good for showing + /// signature help, but not so good to actually get the type of the field + /// when you actually have a variable of the struct. + pub fn signature_ty(&self, db: &dyn HirDatabase) -> Type { + let var_id = self.parent.into(); + let generic_def_id: GenericDefId = match self.parent { + VariantDef::Struct(it) => it.id.into(), + VariantDef::Union(it) => it.id.into(), + VariantDef::EnumVariant(it) => it.parent.id.into(), + }; + let substs = Substs::type_params(db, generic_def_id); + let ty = db.field_types(var_id)[self.id].clone().subst(&substs); + Type::new(db, self.parent.module(db).id.krate, var_id, ty) + } + + pub fn parent_def(&self, _db: &dyn HirDatabase) -> VariantDef { + self.parent + } +} + +impl HasVisibility for Field { + fn visibility(&self, db: &dyn HirDatabase) -> Visibility { + let variant_data = self.parent.variant_data(db); + let visibility = &variant_data.fields()[self.id].visibility; + let parent_id: hir_def::VariantId = self.parent.into(); + visibility.resolve(db.upcast(), &parent_id.resolver(db.upcast())) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Struct { + pub(crate) id: StructId, +} + +impl Struct { + pub fn module(self, db: &dyn HirDatabase) -> Module { + Module { id: self.id.lookup(db.upcast()).container.module(db.upcast()) } + } + + pub fn krate(self, db: &dyn HirDatabase) -> Option { + Some(self.module(db).krate()) + } + + pub fn name(self, db: &dyn HirDatabase) -> Name { + db.struct_data(self.id).name.clone() + } + + pub fn fields(self, db: &dyn HirDatabase) -> Vec { + db.struct_data(self.id) + .variant_data + .fields() + .iter() + .map(|(id, _)| Field { parent: self.into(), id }) + .collect() + } + + pub fn ty(self, db: &dyn HirDatabase) -> Type { + Type::from_def(db, self.id.lookup(db.upcast()).container.module(db.upcast()).krate, self.id) + } + + pub fn repr(self, db: &dyn HirDatabase) -> Option { + db.struct_data(self.id).repr.clone() + } + + fn variant_data(self, db: &dyn HirDatabase) -> Arc { + db.struct_data(self.id).variant_data.clone() + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Union { + pub(crate) id: UnionId, +} + +impl Union { + pub fn name(self, db: &dyn HirDatabase) -> Name { + db.union_data(self.id).name.clone() + } + + pub fn module(self, db: &dyn HirDatabase) -> Module { + Module { id: self.id.lookup(db.upcast()).container.module(db.upcast()) } + } + + pub fn ty(self, db: &dyn HirDatabase) -> Type { + Type::from_def(db, self.id.lookup(db.upcast()).container.module(db.upcast()).krate, self.id) + } + + pub fn fields(self, db: &dyn HirDatabase) -> Vec { + db.union_data(self.id) + .variant_data + .fields() + .iter() + .map(|(id, _)| Field { parent: self.into(), id }) + .collect() + } + + fn variant_data(self, db: &dyn HirDatabase) -> Arc { + db.union_data(self.id).variant_data.clone() + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Enum { + pub(crate) id: EnumId, +} + +impl Enum { + pub fn module(self, db: &dyn HirDatabase) -> Module { + Module { id: self.id.lookup(db.upcast()).container.module(db.upcast()) } + } + + pub fn krate(self, db: &dyn HirDatabase) -> Option { + Some(self.module(db).krate()) + } + + pub fn name(self, db: &dyn HirDatabase) -> Name { + db.enum_data(self.id).name.clone() + } + + pub fn variants(self, db: &dyn HirDatabase) -> Vec { + db.enum_data(self.id) + .variants + .iter() + .map(|(id, _)| EnumVariant { parent: self, id }) + .collect() + } + + pub fn ty(self, db: &dyn HirDatabase) -> Type { + Type::from_def(db, self.id.lookup(db.upcast()).container.module(db.upcast()).krate, self.id) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct EnumVariant { + pub(crate) parent: Enum, + pub(crate) id: LocalEnumVariantId, +} + +impl EnumVariant { + pub fn module(self, db: &dyn HirDatabase) -> Module { + self.parent.module(db) + } + pub fn parent_enum(self, _db: &dyn HirDatabase) -> Enum { + self.parent + } + + pub fn name(self, db: &dyn HirDatabase) -> Name { + db.enum_data(self.parent.id).variants[self.id].name.clone() + } + + pub fn fields(self, db: &dyn HirDatabase) -> Vec { + self.variant_data(db) + .fields() + .iter() + .map(|(id, _)| Field { parent: self.into(), id }) + .collect() + } + + pub fn kind(self, db: &dyn HirDatabase) -> StructKind { + self.variant_data(db).kind() + } + + pub(crate) fn variant_data(self, db: &dyn HirDatabase) -> Arc { + db.enum_data(self.parent.id).variants[self.id].variant_data.clone() + } +} + +/// A Data Type +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum Adt { + Struct(Struct), + Union(Union), + Enum(Enum), +} +impl_from!(Struct, Union, Enum for Adt); + +impl Adt { + pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool { + let subst = db.generic_defaults(self.into()); + subst.iter().any(|ty| &ty.value == &Ty::Unknown) + } + + /// Turns this ADT into a type. Any type parameters of the ADT will be + /// turned into unknown types, which is good for e.g. finding the most + /// general set of completions, but will not look very nice when printed. + pub fn ty(self, db: &dyn HirDatabase) -> Type { + let id = AdtId::from(self); + Type::from_def(db, id.module(db.upcast()).krate, id) + } + + pub fn module(self, db: &dyn HirDatabase) -> Module { + match self { + Adt::Struct(s) => s.module(db), + Adt::Union(s) => s.module(db), + Adt::Enum(e) => e.module(db), + } + } + + pub fn krate(self, db: &dyn HirDatabase) -> Option { + Some(self.module(db).krate()) + } + + pub fn name(self, db: &dyn HirDatabase) -> Name { + match self { + Adt::Struct(s) => s.name(db), + Adt::Union(u) => u.name(db), + Adt::Enum(e) => e.name(db), + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum VariantDef { + Struct(Struct), + Union(Union), + EnumVariant(EnumVariant), +} +impl_from!(Struct, Union, EnumVariant for VariantDef); + +impl VariantDef { + pub fn fields(self, db: &dyn HirDatabase) -> Vec { + match self { + VariantDef::Struct(it) => it.fields(db), + VariantDef::Union(it) => it.fields(db), + VariantDef::EnumVariant(it) => it.fields(db), + } + } + + pub fn module(self, db: &dyn HirDatabase) -> Module { + match self { + VariantDef::Struct(it) => it.module(db), + VariantDef::Union(it) => it.module(db), + VariantDef::EnumVariant(it) => it.module(db), + } + } + + pub fn name(&self, db: &dyn HirDatabase) -> Name { + match self { + VariantDef::Struct(s) => s.name(db), + VariantDef::Union(u) => u.name(db), + VariantDef::EnumVariant(e) => e.name(db), + } + } + + pub(crate) fn variant_data(self, db: &dyn HirDatabase) -> Arc { + match self { + VariantDef::Struct(it) => it.variant_data(db), + VariantDef::Union(it) => it.variant_data(db), + VariantDef::EnumVariant(it) => it.variant_data(db), + } + } +} + +/// The defs which have a body. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum DefWithBody { + Function(Function), + Static(Static), + Const(Const), +} +impl_from!(Function, Const, Static for DefWithBody); + +impl DefWithBody { + pub fn module(self, db: &dyn HirDatabase) -> Module { + match self { + DefWithBody::Const(c) => c.module(db), + DefWithBody::Function(f) => f.module(db), + DefWithBody::Static(s) => s.module(db), + } + } + + pub fn name(self, db: &dyn HirDatabase) -> Option { + match self { + DefWithBody::Function(f) => Some(f.name(db)), + DefWithBody::Static(s) => s.name(db), + DefWithBody::Const(c) => c.name(db), + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Function { + pub(crate) id: FunctionId, +} + +impl Function { + pub fn module(self, db: &dyn HirDatabase) -> Module { + self.id.lookup(db.upcast()).module(db.upcast()).into() + } + + pub fn krate(self, db: &dyn HirDatabase) -> Option { + Some(self.module(db).krate()) + } + + pub fn name(self, db: &dyn HirDatabase) -> Name { + db.function_data(self.id).name.clone() + } + + pub fn has_self_param(self, db: &dyn HirDatabase) -> bool { + db.function_data(self.id).has_self_param + } + + pub fn params(self, db: &dyn HirDatabase) -> Vec { + db.function_data(self.id).params.clone() + } + + pub fn is_unsafe(self, db: &dyn HirDatabase) -> bool { + db.function_data(self.id).is_unsafe + } + + pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) { + hir_ty::diagnostics::validate_body(db, self.id.into(), sink) + } +} + +impl HasVisibility for Function { + fn visibility(&self, db: &dyn HirDatabase) -> Visibility { + let function_data = db.function_data(self.id); + let visibility = &function_data.visibility; + visibility.resolve(db.upcast(), &self.id.resolver(db.upcast())) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Const { + pub(crate) id: ConstId, +} + +impl Const { + pub fn module(self, db: &dyn HirDatabase) -> Module { + Module { id: self.id.lookup(db.upcast()).module(db.upcast()) } + } + + pub fn krate(self, db: &dyn HirDatabase) -> Option { + Some(self.module(db).krate()) + } + + pub fn name(self, db: &dyn HirDatabase) -> Option { + db.const_data(self.id).name.clone() + } +} + +impl HasVisibility for Const { + fn visibility(&self, db: &dyn HirDatabase) -> Visibility { + let function_data = db.const_data(self.id); + let visibility = &function_data.visibility; + visibility.resolve(db.upcast(), &self.id.resolver(db.upcast())) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Static { + pub(crate) id: StaticId, +} + +impl Static { + pub fn module(self, db: &dyn HirDatabase) -> Module { + Module { id: self.id.lookup(db.upcast()).module(db.upcast()) } + } + + pub fn krate(self, db: &dyn HirDatabase) -> Option { + Some(self.module(db).krate()) + } + + pub fn name(self, db: &dyn HirDatabase) -> Option { + db.static_data(self.id).name.clone() + } + + pub fn is_mut(self, db: &dyn HirDatabase) -> bool { + db.static_data(self.id).mutable + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Trait { + pub(crate) id: TraitId, +} + +impl Trait { + pub fn module(self, db: &dyn HirDatabase) -> Module { + Module { id: self.id.lookup(db.upcast()).container.module(db.upcast()) } + } + + pub fn name(self, db: &dyn HirDatabase) -> Name { + db.trait_data(self.id).name.clone() + } + + pub fn items(self, db: &dyn HirDatabase) -> Vec { + db.trait_data(self.id).items.iter().map(|(_name, it)| (*it).into()).collect() + } + + pub fn is_auto(self, db: &dyn HirDatabase) -> bool { + db.trait_data(self.id).auto + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct TypeAlias { + pub(crate) id: TypeAliasId, +} + +impl TypeAlias { + pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool { + let subst = db.generic_defaults(self.id.into()); + subst.iter().any(|ty| &ty.value == &Ty::Unknown) + } + + pub fn module(self, db: &dyn HirDatabase) -> Module { + Module { id: self.id.lookup(db.upcast()).module(db.upcast()) } + } + + pub fn krate(self, db: &dyn HirDatabase) -> Option { + Some(self.module(db).krate()) + } + + pub fn type_ref(self, db: &dyn HirDatabase) -> Option { + db.type_alias_data(self.id).type_ref.clone() + } + + pub fn ty(self, db: &dyn HirDatabase) -> Type { + Type::from_def(db, self.id.lookup(db.upcast()).module(db.upcast()).krate, self.id) + } + + pub fn name(self, db: &dyn HirDatabase) -> Name { + db.type_alias_data(self.id).name.clone() + } +} + +impl HasVisibility for TypeAlias { + fn visibility(&self, db: &dyn HirDatabase) -> Visibility { + let function_data = db.type_alias_data(self.id); + let visibility = &function_data.visibility; + visibility.resolve(db.upcast(), &self.id.resolver(db.upcast())) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct MacroDef { + pub(crate) id: MacroDefId, +} + +impl MacroDef { + /// FIXME: right now, this just returns the root module of the crate that + /// defines this macro. The reasons for this is that macros are expanded + /// early, in `hir_expand`, where modules simply do not exist yet. + pub fn module(self, db: &dyn HirDatabase) -> Option { + let krate = self.id.krate?; + let module_id = db.crate_def_map(krate).root; + Some(Module::new(Crate { id: krate }, module_id)) + } + + /// XXX: this parses the file + pub fn name(self, db: &dyn HirDatabase) -> Option { + self.source(db).value.name().map(|it| it.as_name()) + } + + /// Indicate it is a proc-macro + pub fn is_proc_macro(&self) -> bool { + matches!(self.id.kind, MacroDefKind::CustomDerive(_)) + } + + /// Indicate it is a derive macro + pub fn is_derive_macro(&self) -> bool { + matches!(self.id.kind, MacroDefKind::CustomDerive(_) | MacroDefKind::BuiltInDerive(_)) + } +} + +/// Invariant: `inner.as_assoc_item(db).is_some()` +/// We do not actively enforce this invariant. +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub enum AssocItem { + Function(Function), + Const(Const), + TypeAlias(TypeAlias), +} +pub enum AssocItemContainer { + Trait(Trait), + ImplDef(ImplDef), +} +pub trait AsAssocItem { + fn as_assoc_item(self, db: &dyn HirDatabase) -> Option; +} + +impl AsAssocItem for Function { + fn as_assoc_item(self, db: &dyn HirDatabase) -> Option { + as_assoc_item(db, AssocItem::Function, self.id) + } +} +impl AsAssocItem for Const { + fn as_assoc_item(self, db: &dyn HirDatabase) -> Option { + as_assoc_item(db, AssocItem::Const, self.id) + } +} +impl AsAssocItem for TypeAlias { + fn as_assoc_item(self, db: &dyn HirDatabase) -> Option { + as_assoc_item(db, AssocItem::TypeAlias, self.id) + } +} +fn as_assoc_item(db: &dyn HirDatabase, ctor: CTOR, id: ID) -> Option +where + ID: Lookup>, + DEF: From, + CTOR: FnOnce(DEF) -> AssocItem, + AST: ItemTreeNode, +{ + match id.lookup(db.upcast()).container { + AssocContainerId::TraitId(_) | AssocContainerId::ImplId(_) => Some(ctor(DEF::from(id))), + AssocContainerId::ContainerId(_) => None, + } +} + +impl AssocItem { + pub fn module(self, db: &dyn HirDatabase) -> Module { + match self { + AssocItem::Function(f) => f.module(db), + AssocItem::Const(c) => c.module(db), + AssocItem::TypeAlias(t) => t.module(db), + } + } + pub fn container(self, db: &dyn HirDatabase) -> AssocItemContainer { + let container = match self { + AssocItem::Function(it) => it.id.lookup(db.upcast()).container, + AssocItem::Const(it) => it.id.lookup(db.upcast()).container, + AssocItem::TypeAlias(it) => it.id.lookup(db.upcast()).container, + }; + match container { + AssocContainerId::TraitId(id) => AssocItemContainer::Trait(id.into()), + AssocContainerId::ImplId(id) => AssocItemContainer::ImplDef(id.into()), + AssocContainerId::ContainerId(_) => panic!("invalid AssocItem"), + } + } +} + +impl HasVisibility for AssocItem { + fn visibility(&self, db: &dyn HirDatabase) -> Visibility { + match self { + AssocItem::Function(f) => f.visibility(db), + AssocItem::Const(c) => c.visibility(db), + AssocItem::TypeAlias(t) => t.visibility(db), + } + } +} + +#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)] +pub enum GenericDef { + Function(Function), + Adt(Adt), + Trait(Trait), + TypeAlias(TypeAlias), + ImplDef(ImplDef), + // enum variants cannot have generics themselves, but their parent enums + // can, and this makes some code easier to write + EnumVariant(EnumVariant), + // consts can have type parameters from their parents (i.e. associated consts of traits) + Const(Const), +} +impl_from!( + Function, + Adt(Struct, Enum, Union), + Trait, + TypeAlias, + ImplDef, + EnumVariant, + Const + for GenericDef +); + +impl GenericDef { + pub fn params(self, db: &dyn HirDatabase) -> Vec { + let generics: Arc = db.generic_params(self.into()); + generics + .types + .iter() + .map(|(local_id, _)| TypeParam { id: TypeParamId { parent: self.into(), local_id } }) + .collect() + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub struct Local { + pub(crate) parent: DefWithBodyId, + pub(crate) pat_id: PatId, +} + +impl Local { + pub fn is_param(self, db: &dyn HirDatabase) -> bool { + let src = self.source(db); + match src.value { + Either::Left(bind_pat) => { + bind_pat.syntax().ancestors().any(|it| ast::Param::can_cast(it.kind())) + } + Either::Right(_self_param) => true, + } + } + + // FIXME: why is this an option? It shouldn't be? + pub fn name(self, db: &dyn HirDatabase) -> Option { + let body = db.body(self.parent.into()); + match &body[self.pat_id] { + Pat::Bind { name, .. } => Some(name.clone()), + _ => None, + } + } + + pub fn is_self(self, db: &dyn HirDatabase) -> bool { + self.name(db) == Some(name![self]) + } + + pub fn is_mut(self, db: &dyn HirDatabase) -> bool { + let body = db.body(self.parent.into()); + match &body[self.pat_id] { + Pat::Bind { mode, .. } => match mode { + BindingAnnotation::Mutable | BindingAnnotation::RefMut => true, + _ => false, + }, + _ => false, + } + } + + pub fn parent(self, _db: &dyn HirDatabase) -> DefWithBody { + self.parent.into() + } + + pub fn module(self, db: &dyn HirDatabase) -> Module { + self.parent(db).module(db) + } + + pub fn ty(self, db: &dyn HirDatabase) -> Type { + let def = DefWithBodyId::from(self.parent); + let infer = db.infer(def); + let ty = infer[self.pat_id].clone(); + let krate = def.module(db.upcast()).krate; + Type::new(db, krate, def, ty) + } + + pub fn source(self, db: &dyn HirDatabase) -> InFile> { + let (_body, source_map) = db.body_with_source_map(self.parent.into()); + let src = source_map.pat_syntax(self.pat_id).unwrap(); // Hmm... + let root = src.file_syntax(db.upcast()); + src.map(|ast| { + ast.map_left(|it| it.cast().unwrap().to_node(&root)).map_right(|it| it.to_node(&root)) + }) + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub struct TypeParam { + pub(crate) id: TypeParamId, +} + +impl TypeParam { + pub fn name(self, db: &dyn HirDatabase) -> Name { + let params = db.generic_params(self.id.parent); + params.types[self.id.local_id].name.clone().unwrap_or_else(Name::missing) + } + + pub fn module(self, db: &dyn HirDatabase) -> Module { + self.id.parent.module(db.upcast()).into() + } + + pub fn ty(self, db: &dyn HirDatabase) -> Type { + let resolver = self.id.parent.resolver(db.upcast()); + let environment = TraitEnvironment::lower(db, &resolver); + let ty = Ty::Placeholder(self.id); + Type { + krate: self.id.parent.module(db.upcast()).krate, + ty: InEnvironment { value: ty, environment }, + } + } + + pub fn default(self, db: &dyn HirDatabase) -> Option { + let params = db.generic_defaults(self.id.parent); + let local_idx = hir_ty::param_idx(db, self.id)?; + let resolver = self.id.parent.resolver(db.upcast()); + let environment = TraitEnvironment::lower(db, &resolver); + let ty = params.get(local_idx)?.clone(); + let subst = Substs::type_params(db, self.id.parent); + let ty = ty.subst(&subst.prefix(local_idx)); + Some(Type { + krate: self.id.parent.module(db.upcast()).krate, + ty: InEnvironment { value: ty, environment }, + }) + } +} + +// FIXME: rename from `ImplDef` to `Impl` +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct ImplDef { + pub(crate) id: ImplId, +} + +impl ImplDef { + pub fn all_in_crate(db: &dyn HirDatabase, krate: Crate) -> Vec { + let inherent = db.inherent_impls_in_crate(krate.id); + let trait_ = db.trait_impls_in_crate(krate.id); + + inherent.all_impls().chain(trait_.all_impls()).map(Self::from).collect() + } + pub fn for_trait(db: &dyn HirDatabase, krate: Crate, trait_: Trait) -> Vec { + let impls = db.trait_impls_in_crate(krate.id); + impls.for_trait(trait_.id).map(Self::from).collect() + } + + pub fn target_trait(self, db: &dyn HirDatabase) -> Option { + db.impl_data(self.id).target_trait.clone() + } + + pub fn target_type(self, db: &dyn HirDatabase) -> TypeRef { + db.impl_data(self.id).target_type.clone() + } + + pub fn target_ty(self, db: &dyn HirDatabase) -> Type { + let impl_data = db.impl_data(self.id); + let resolver = self.id.resolver(db.upcast()); + let ctx = hir_ty::TyLoweringContext::new(db, &resolver); + let environment = TraitEnvironment::lower(db, &resolver); + let ty = Ty::from_hir(&ctx, &impl_data.target_type); + Type { + krate: self.id.lookup(db.upcast()).container.module(db.upcast()).krate, + ty: InEnvironment { value: ty, environment }, + } + } + + pub fn items(self, db: &dyn HirDatabase) -> Vec { + db.impl_data(self.id).items.iter().map(|it| (*it).into()).collect() + } + + pub fn is_negative(self, db: &dyn HirDatabase) -> bool { + db.impl_data(self.id).is_negative + } + + pub fn module(self, db: &dyn HirDatabase) -> Module { + self.id.lookup(db.upcast()).container.module(db.upcast()).into() + } + + pub fn krate(self, db: &dyn HirDatabase) -> Crate { + Crate { id: self.module(db).id.krate } + } + + pub fn is_builtin_derive(self, db: &dyn HirDatabase) -> Option> { + let src = self.source(db); + let item = src.file_id.is_builtin_derive(db.upcast())?; + let hygenic = hir_expand::hygiene::Hygiene::new(db.upcast(), item.file_id); + + let attr = item + .value + .attrs() + .filter_map(|it| { + let path = hir_def::path::ModPath::from_src(it.path()?, &hygenic)?; + if path.as_ident()?.to_string() == "derive" { + Some(it) + } else { + None + } + }) + .last()?; + + Some(item.with_value(attr)) + } +} + +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct Type { + krate: CrateId, + ty: InEnvironment, +} + +impl Type { + pub(crate) fn new_with_resolver( + db: &dyn HirDatabase, + resolver: &Resolver, + ty: Ty, + ) -> Option { + let krate = resolver.krate()?; + Some(Type::new_with_resolver_inner(db, krate, resolver, ty)) + } + pub(crate) fn new_with_resolver_inner( + db: &dyn HirDatabase, + krate: CrateId, + resolver: &Resolver, + ty: Ty, + ) -> Type { + let environment = TraitEnvironment::lower(db, &resolver); + Type { krate, ty: InEnvironment { value: ty, environment } } + } + + fn new(db: &dyn HirDatabase, krate: CrateId, lexical_env: impl HasResolver, ty: Ty) -> Type { + let resolver = lexical_env.resolver(db.upcast()); + let environment = TraitEnvironment::lower(db, &resolver); + Type { krate, ty: InEnvironment { value: ty, environment } } + } + + fn from_def( + db: &dyn HirDatabase, + krate: CrateId, + def: impl HasResolver + Into + Into, + ) -> Type { + let substs = Substs::build_for_def(db, def).fill_with_unknown().build(); + let ty = db.ty(def.into()).subst(&substs); + Type::new(db, krate, def, ty) + } + + pub fn is_unit(&self) -> bool { + matches!( + self.ty.value, + Ty::Apply(ApplicationTy { ctor: TypeCtor::Tuple { cardinality: 0 }, .. }) + ) + } + pub fn is_bool(&self) -> bool { + matches!(self.ty.value, Ty::Apply(ApplicationTy { ctor: TypeCtor::Bool, .. })) + } + + pub fn is_mutable_reference(&self) -> bool { + matches!( + self.ty.value, + Ty::Apply(ApplicationTy { ctor: TypeCtor::Ref(Mutability::Mut), .. }) + ) + } + + pub fn is_unknown(&self) -> bool { + matches!(self.ty.value, Ty::Unknown) + } + + /// Checks that particular type `ty` implements `std::future::Future`. + /// This function is used in `.await` syntax completion. + pub fn impls_future(&self, db: &dyn HirDatabase) -> bool { + let krate = self.krate; + + let std_future_trait = + db.lang_item(krate, "future_trait".into()).and_then(|it| it.as_trait()); + let std_future_trait = match std_future_trait { + Some(it) => it, + None => return false, + }; + + let canonical_ty = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) }; + method_resolution::implements_trait( + &canonical_ty, + db, + self.ty.environment.clone(), + krate, + std_future_trait, + ) + } + + pub fn impls_trait(&self, db: &dyn HirDatabase, trait_: Trait, args: &[Type]) -> bool { + let trait_ref = hir_ty::TraitRef { + trait_: trait_.id, + substs: Substs::build_for_def(db, trait_.id) + .push(self.ty.value.clone()) + .fill(args.iter().map(|t| t.ty.value.clone())) + .build(), + }; + + let goal = Canonical { + value: hir_ty::InEnvironment::new( + self.ty.environment.clone(), + hir_ty::Obligation::Trait(trait_ref), + ), + kinds: Arc::new([]), + }; + + db.trait_solve(self.krate, goal).is_some() + } + + pub fn as_callable(&self, db: &dyn HirDatabase) -> Option { + let def = match self.ty.value { + Ty::Apply(ApplicationTy { ctor: TypeCtor::FnDef(def), parameters: _ }) => Some(def), + _ => None, + }; + + let sig = self.ty.value.callable_sig(db)?; + Some(Callable { ty: self.clone(), sig, def, is_bound_method: false }) + } + + pub fn is_closure(&self) -> bool { + matches!(&self.ty.value, Ty::Apply(ApplicationTy { ctor: TypeCtor::Closure { .. }, .. })) + } + + pub fn is_fn(&self) -> bool { + matches!(&self.ty.value, + Ty::Apply(ApplicationTy { ctor: TypeCtor::FnDef(..), .. }) | + Ty::Apply(ApplicationTy { ctor: TypeCtor::FnPtr { .. }, .. }) + ) + } + + pub fn is_packed(&self, db: &dyn HirDatabase) -> bool { + let adt_id = match self.ty.value { + Ty::Apply(ApplicationTy { ctor: TypeCtor::Adt(adt_id), .. }) => adt_id, + _ => return false, + }; + + let adt = adt_id.into(); + match adt { + Adt::Struct(s) => matches!(s.repr(db), Some(ReprKind::Packed)), + _ => false, + } + } + + pub fn is_raw_ptr(&self) -> bool { + matches!(&self.ty.value, Ty::Apply(ApplicationTy { ctor: TypeCtor::RawPtr(..), .. })) + } + + pub fn contains_unknown(&self) -> bool { + return go(&self.ty.value); + + fn go(ty: &Ty) -> bool { + match ty { + Ty::Unknown => true, + Ty::Apply(a_ty) => a_ty.parameters.iter().any(go), + _ => false, + } + } + } + + pub fn fields(&self, db: &dyn HirDatabase) -> Vec<(Field, Type)> { + if let Ty::Apply(a_ty) = &self.ty.value { + let variant_id = match a_ty.ctor { + TypeCtor::Adt(AdtId::StructId(s)) => s.into(), + TypeCtor::Adt(AdtId::UnionId(u)) => u.into(), + _ => return Vec::new(), + }; + + return db + .field_types(variant_id) + .iter() + .map(|(local_id, ty)| { + let def = Field { parent: variant_id.into(), id: local_id }; + let ty = ty.clone().subst(&a_ty.parameters); + (def, self.derived(ty)) + }) + .collect(); + }; + Vec::new() + } + + pub fn tuple_fields(&self, _db: &dyn HirDatabase) -> Vec { + let mut res = Vec::new(); + if let Ty::Apply(a_ty) = &self.ty.value { + if let TypeCtor::Tuple { .. } = a_ty.ctor { + for ty in a_ty.parameters.iter() { + let ty = ty.clone(); + res.push(self.derived(ty)); + } + } + }; + res + } + + pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator + 'a { + // There should be no inference vars in types passed here + // FIXME check that? + let canonical = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) }; + let environment = self.ty.environment.clone(); + let ty = InEnvironment { value: canonical, environment }; + autoderef(db, Some(self.krate), ty) + .map(|canonical| canonical.value) + .map(move |ty| self.derived(ty)) + } + + // This would be nicer if it just returned an iterator, but that runs into + // lifetime problems, because we need to borrow temp `CrateImplDefs`. + pub fn iterate_assoc_items( + self, + db: &dyn HirDatabase, + krate: Crate, + mut callback: impl FnMut(AssocItem) -> Option, + ) -> Option { + for krate in self.ty.value.def_crates(db, krate.id)? { + let impls = db.inherent_impls_in_crate(krate); + + for impl_def in impls.for_self_ty(&self.ty.value) { + for &item in db.impl_data(*impl_def).items.iter() { + if let Some(result) = callback(item.into()) { + return Some(result); + } + } + } + } + None + } + + pub fn iterate_method_candidates( + &self, + db: &dyn HirDatabase, + krate: Crate, + traits_in_scope: &FxHashSet, + name: Option<&Name>, + mut callback: impl FnMut(&Ty, Function) -> Option, + ) -> Option { + // There should be no inference vars in types passed here + // FIXME check that? + // FIXME replace Unknown by bound vars here + let canonical = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) }; + + let env = self.ty.environment.clone(); + let krate = krate.id; + + method_resolution::iterate_method_candidates( + &canonical, + db, + env, + krate, + traits_in_scope, + name, + method_resolution::LookupMode::MethodCall, + |ty, it| match it { + AssocItemId::FunctionId(f) => callback(ty, f.into()), + _ => None, + }, + ) + } + + pub fn iterate_path_candidates( + &self, + db: &dyn HirDatabase, + krate: Crate, + traits_in_scope: &FxHashSet, + name: Option<&Name>, + mut callback: impl FnMut(&Ty, AssocItem) -> Option, + ) -> Option { + // There should be no inference vars in types passed here + // FIXME check that? + // FIXME replace Unknown by bound vars here + let canonical = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) }; + + let env = self.ty.environment.clone(); + let krate = krate.id; + + method_resolution::iterate_method_candidates( + &canonical, + db, + env, + krate, + traits_in_scope, + name, + method_resolution::LookupMode::Path, + |ty, it| callback(ty, it.into()), + ) + } + + pub fn as_adt(&self) -> Option { + let (adt, _subst) = self.ty.value.as_adt()?; + Some(adt.into()) + } + + pub fn as_dyn_trait(&self) -> Option { + self.ty.value.dyn_trait().map(Into::into) + } + + pub fn as_impl_traits(&self, db: &dyn HirDatabase) -> Option> { + self.ty.value.impl_trait_bounds(db).map(|it| { + it.into_iter() + .filter_map(|pred| match pred { + hir_ty::GenericPredicate::Implemented(trait_ref) => { + Some(Trait::from(trait_ref.trait_)) + } + _ => None, + }) + .collect() + }) + } + + pub fn as_associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option { + self.ty.value.associated_type_parent_trait(db).map(Into::into) + } + + // FIXME: provide required accessors such that it becomes implementable from outside. + pub fn is_equal_for_find_impls(&self, other: &Type) -> bool { + match (&self.ty.value, &other.ty.value) { + (Ty::Apply(a_original_ty), Ty::Apply(ApplicationTy { ctor, parameters })) => match ctor + { + TypeCtor::Ref(..) => match parameters.as_single() { + Ty::Apply(a_ty) => a_original_ty.ctor == a_ty.ctor, + _ => false, + }, + _ => a_original_ty.ctor == *ctor, + }, + _ => false, + } + } + + fn derived(&self, ty: Ty) -> Type { + Type { + krate: self.krate, + ty: InEnvironment { value: ty, environment: self.ty.environment.clone() }, + } + } + + pub fn walk(&self, db: &dyn HirDatabase, mut cb: impl FnMut(Type)) { + // TypeWalk::walk for a Ty at first visits parameters and only after that the Ty itself. + // We need a different order here. + + fn walk_substs( + db: &dyn HirDatabase, + type_: &Type, + substs: &Substs, + cb: &mut impl FnMut(Type), + ) { + for ty in substs.iter() { + walk_type(db, &type_.derived(ty.clone()), cb); + } + } + + fn walk_bounds( + db: &dyn HirDatabase, + type_: &Type, + bounds: &[GenericPredicate], + cb: &mut impl FnMut(Type), + ) { + for pred in bounds { + match pred { + GenericPredicate::Implemented(trait_ref) => { + cb(type_.clone()); + walk_substs(db, type_, &trait_ref.substs, cb); + } + _ => (), + } + } + } + + fn walk_type(db: &dyn HirDatabase, type_: &Type, cb: &mut impl FnMut(Type)) { + let ty = type_.ty.value.strip_references(); + match ty { + Ty::Apply(ApplicationTy { ctor, parameters }) => { + match ctor { + TypeCtor::Adt(_) => { + cb(type_.derived(ty.clone())); + } + TypeCtor::AssociatedType(_) => { + if let Some(_) = ty.associated_type_parent_trait(db) { + cb(type_.derived(ty.clone())); + } + } + _ => (), + } + + // adt params, tuples, etc... + walk_substs(db, type_, parameters, cb); + } + Ty::Opaque(opaque_ty) => { + if let Some(bounds) = ty.impl_trait_bounds(db) { + walk_bounds(db, &type_.derived(ty.clone()), &bounds, cb); + } + + walk_substs(db, type_, &opaque_ty.parameters, cb); + } + Ty::Placeholder(_) => { + if let Some(bounds) = ty.impl_trait_bounds(db) { + walk_bounds(db, &type_.derived(ty.clone()), &bounds, cb); + } + } + Ty::Dyn(bounds) => { + walk_bounds(db, &type_.derived(ty.clone()), bounds.as_ref(), cb); + } + + _ => (), + } + } + + walk_type(db, self, &mut cb); + } +} + +impl HirDisplay for Type { + fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { + self.ty.value.hir_fmt(f) + } +} + +// FIXME: closures +#[derive(Debug)] +pub struct Callable { + ty: Type, + sig: FnSig, + def: Option, + pub(crate) is_bound_method: bool, +} + +pub enum CallableKind { + Function(Function), + TupleStruct(Struct), + TupleEnumVariant(EnumVariant), + Closure, +} + +impl Callable { + pub fn kind(&self) -> CallableKind { + match self.def { + Some(CallableDefId::FunctionId(it)) => CallableKind::Function(it.into()), + Some(CallableDefId::StructId(it)) => CallableKind::TupleStruct(it.into()), + Some(CallableDefId::EnumVariantId(it)) => CallableKind::TupleEnumVariant(it.into()), + None => CallableKind::Closure, + } + } + pub fn receiver_param(&self, db: &dyn HirDatabase) -> Option { + let func = match self.def { + Some(CallableDefId::FunctionId(it)) if self.is_bound_method => it, + _ => return None, + }; + let src = func.lookup(db.upcast()).source(db.upcast()); + let param_list = src.value.param_list()?; + param_list.self_param() + } + pub fn n_params(&self) -> usize { + self.sig.params().len() - if self.is_bound_method { 1 } else { 0 } + } + pub fn params( + &self, + db: &dyn HirDatabase, + ) -> Vec<(Option>, Type)> { + let types = self + .sig + .params() + .iter() + .skip(if self.is_bound_method { 1 } else { 0 }) + .map(|ty| self.ty.derived(ty.clone())); + let patterns = match self.def { + Some(CallableDefId::FunctionId(func)) => { + let src = func.lookup(db.upcast()).source(db.upcast()); + src.value.param_list().map(|param_list| { + param_list + .self_param() + .map(|it| Some(Either::Left(it))) + .filter(|_| !self.is_bound_method) + .into_iter() + .chain(param_list.params().map(|it| it.pat().map(Either::Right))) + }) + } + _ => None, + }; + patterns.into_iter().flatten().chain(iter::repeat(None)).zip(types).collect() + } + pub fn return_type(&self) -> Type { + self.ty.derived(self.sig.ret().clone()) + } +} + +/// For IDE only +#[derive(Debug)] +pub enum ScopeDef { + ModuleDef(ModuleDef), + MacroDef(MacroDef), + GenericParam(TypeParam), + ImplSelfType(ImplDef), + AdtSelfType(Adt), + Local(Local), + Unknown, +} + +impl ScopeDef { + pub fn all_items(def: PerNs) -> ArrayVec<[Self; 3]> { + let mut items = ArrayVec::new(); + + match (def.take_types(), def.take_values()) { + (Some(m1), None) => items.push(ScopeDef::ModuleDef(m1.into())), + (None, Some(m2)) => items.push(ScopeDef::ModuleDef(m2.into())), + (Some(m1), Some(m2)) => { + // Some items, like unit structs and enum variants, are + // returned as both a type and a value. Here we want + // to de-duplicate them. + if m1 != m2 { + items.push(ScopeDef::ModuleDef(m1.into())); + items.push(ScopeDef::ModuleDef(m2.into())); + } else { + items.push(ScopeDef::ModuleDef(m1.into())); + } + } + (None, None) => {} + }; + + if let Some(macro_def_id) = def.take_macros() { + items.push(ScopeDef::MacroDef(macro_def_id.into())); + } + + if items.is_empty() { + items.push(ScopeDef::Unknown); + } + + items + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum AttrDef { + Module(Module), + Field(Field), + Adt(Adt), + Function(Function), + EnumVariant(EnumVariant), + Static(Static), + Const(Const), + Trait(Trait), + TypeAlias(TypeAlias), + MacroDef(MacroDef), +} + +impl_from!( + Module, + Field, + Adt(Struct, Enum, Union), + EnumVariant, + Static, + Const, + Function, + Trait, + TypeAlias, + MacroDef + for AttrDef +); + +pub trait HasAttrs { + fn attrs(self, db: &dyn HirDatabase) -> Attrs; +} + +impl> HasAttrs for T { + fn attrs(self, db: &dyn HirDatabase) -> Attrs { + let def: AttrDef = self.into(); + db.attrs(def.into()) + } +} + +pub trait Docs { + fn docs(&self, db: &dyn HirDatabase) -> Option; +} +impl + Copy> Docs for T { + fn docs(&self, db: &dyn HirDatabase) -> Option { + let def: AttrDef = (*self).into(); + db.documentation(def.into()) + } +} + +pub trait HasVisibility { + fn visibility(&self, db: &dyn HirDatabase) -> Visibility; + fn is_visible_from(&self, db: &dyn HirDatabase, module: Module) -> bool { + let vis = self.visibility(db); + vis.is_visible_from(db.upcast(), module.id) + } +} diff --git a/crates/hir/src/db.rs b/crates/hir/src/db.rs new file mode 100644 index 000000000..07333c453 --- /dev/null +++ b/crates/hir/src/db.rs @@ -0,0 +1,21 @@ +//! FIXME: write short doc here + +pub use hir_def::db::{ + AttrsQuery, BodyQuery, BodyWithSourceMapQuery, ConstDataQuery, CrateDefMapQueryQuery, + CrateLangItemsQuery, DefDatabase, DefDatabaseStorage, DocumentationQuery, EnumDataQuery, + ExprScopesQuery, FunctionDataQuery, GenericParamsQuery, ImplDataQuery, ImportMapQuery, + InternConstQuery, InternDatabase, InternDatabaseStorage, InternEnumQuery, InternFunctionQuery, + InternImplQuery, InternStaticQuery, InternStructQuery, InternTraitQuery, InternTypeAliasQuery, + InternUnionQuery, ItemTreeQuery, LangItemQuery, ModuleLangItemsQuery, StaticDataQuery, + StructDataQuery, TraitDataQuery, TypeAliasDataQuery, UnionDataQuery, +}; +pub use hir_expand::db::{ + AstDatabase, AstDatabaseStorage, AstIdMapQuery, InternEagerExpansionQuery, InternMacroQuery, + MacroArgTextQuery, MacroDefQuery, MacroExpandQuery, ParseMacroQuery, +}; +pub use hir_ty::db::*; + +#[test] +fn hir_database_is_object_safe() { + fn _assert_object_safe(_: &dyn HirDatabase) {} +} diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs new file mode 100644 index 000000000..363164b9b --- /dev/null +++ b/crates/hir/src/diagnostics.rs @@ -0,0 +1,6 @@ +//! FIXME: write short doc here +pub use hir_def::diagnostics::UnresolvedModule; +pub use hir_expand::diagnostics::{Diagnostic, DiagnosticSink, DiagnosticSinkBuilder}; +pub use hir_ty::diagnostics::{ + MismatchedArgCount, MissingFields, MissingMatchArms, MissingOkInTailExpr, NoSuchField, +}; diff --git a/crates/hir/src/from_id.rs b/crates/hir/src/from_id.rs new file mode 100644 index 000000000..a53ac1e08 --- /dev/null +++ b/crates/hir/src/from_id.rs @@ -0,0 +1,247 @@ +//! Utility module for converting between hir_def ids and code_model wrappers. +//! +//! It's unclear if we need this long-term, but it's definitelly useful while we +//! are splitting the hir. + +use hir_def::{ + expr::PatId, AdtId, AssocItemId, AttrDefId, DefWithBodyId, EnumVariantId, FieldId, + GenericDefId, ModuleDefId, VariantId, +}; + +use crate::{ + code_model::ItemInNs, Adt, AssocItem, AttrDef, DefWithBody, EnumVariant, Field, GenericDef, + Local, MacroDef, ModuleDef, VariantDef, +}; + +macro_rules! from_id { + ($(($id:path, $ty:path)),*) => {$( + impl From<$id> for $ty { + fn from(id: $id) -> $ty { + $ty { id } + } + } + impl From<$ty> for $id { + fn from(ty: $ty) -> $id { + ty.id + } + } + )*} +} + +from_id![ + (base_db::CrateId, crate::Crate), + (hir_def::ModuleId, crate::Module), + (hir_def::StructId, crate::Struct), + (hir_def::UnionId, crate::Union), + (hir_def::EnumId, crate::Enum), + (hir_def::TypeAliasId, crate::TypeAlias), + (hir_def::TraitId, crate::Trait), + (hir_def::StaticId, crate::Static), + (hir_def::ConstId, crate::Const), + (hir_def::FunctionId, crate::Function), + (hir_def::ImplId, crate::ImplDef), + (hir_def::TypeParamId, crate::TypeParam), + (hir_expand::MacroDefId, crate::MacroDef) +]; + +impl From for Adt { + fn from(id: AdtId) -> Self { + match id { + AdtId::StructId(it) => Adt::Struct(it.into()), + AdtId::UnionId(it) => Adt::Union(it.into()), + AdtId::EnumId(it) => Adt::Enum(it.into()), + } + } +} + +impl From for AdtId { + fn from(id: Adt) -> Self { + match id { + Adt::Struct(it) => AdtId::StructId(it.id), + Adt::Union(it) => AdtId::UnionId(it.id), + Adt::Enum(it) => AdtId::EnumId(it.id), + } + } +} + +impl From for EnumVariant { + fn from(id: EnumVariantId) -> Self { + EnumVariant { parent: id.parent.into(), id: id.local_id } + } +} + +impl From for EnumVariantId { + fn from(def: EnumVariant) -> Self { + EnumVariantId { parent: def.parent.id, local_id: def.id } + } +} + +impl From for ModuleDef { + fn from(id: ModuleDefId) -> Self { + match id { + ModuleDefId::ModuleId(it) => ModuleDef::Module(it.into()), + ModuleDefId::FunctionId(it) => ModuleDef::Function(it.into()), + ModuleDefId::AdtId(it) => ModuleDef::Adt(it.into()), + ModuleDefId::EnumVariantId(it) => ModuleDef::EnumVariant(it.into()), + ModuleDefId::ConstId(it) => ModuleDef::Const(it.into()), + ModuleDefId::StaticId(it) => ModuleDef::Static(it.into()), + ModuleDefId::TraitId(it) => ModuleDef::Trait(it.into()), + ModuleDefId::TypeAliasId(it) => ModuleDef::TypeAlias(it.into()), + ModuleDefId::BuiltinType(it) => ModuleDef::BuiltinType(it), + } + } +} + +impl From for ModuleDefId { + fn from(id: ModuleDef) -> Self { + match id { + ModuleDef::Module(it) => ModuleDefId::ModuleId(it.into()), + ModuleDef::Function(it) => ModuleDefId::FunctionId(it.into()), + ModuleDef::Adt(it) => ModuleDefId::AdtId(it.into()), + ModuleDef::EnumVariant(it) => ModuleDefId::EnumVariantId(it.into()), + ModuleDef::Const(it) => ModuleDefId::ConstId(it.into()), + ModuleDef::Static(it) => ModuleDefId::StaticId(it.into()), + ModuleDef::Trait(it) => ModuleDefId::TraitId(it.into()), + ModuleDef::TypeAlias(it) => ModuleDefId::TypeAliasId(it.into()), + ModuleDef::BuiltinType(it) => ModuleDefId::BuiltinType(it), + } + } +} + +impl From for DefWithBodyId { + fn from(def: DefWithBody) -> Self { + match def { + DefWithBody::Function(it) => DefWithBodyId::FunctionId(it.id), + DefWithBody::Static(it) => DefWithBodyId::StaticId(it.id), + DefWithBody::Const(it) => DefWithBodyId::ConstId(it.id), + } + } +} + +impl From for DefWithBody { + fn from(def: DefWithBodyId) -> Self { + match def { + DefWithBodyId::FunctionId(it) => DefWithBody::Function(it.into()), + DefWithBodyId::StaticId(it) => DefWithBody::Static(it.into()), + DefWithBodyId::ConstId(it) => DefWithBody::Const(it.into()), + } + } +} + +impl From for AssocItem { + fn from(def: AssocItemId) -> Self { + match def { + AssocItemId::FunctionId(it) => AssocItem::Function(it.into()), + AssocItemId::TypeAliasId(it) => AssocItem::TypeAlias(it.into()), + AssocItemId::ConstId(it) => AssocItem::Const(it.into()), + } + } +} + +impl From for GenericDefId { + fn from(def: GenericDef) -> Self { + match def { + GenericDef::Function(it) => GenericDefId::FunctionId(it.id), + GenericDef::Adt(it) => GenericDefId::AdtId(it.into()), + GenericDef::Trait(it) => GenericDefId::TraitId(it.id), + GenericDef::TypeAlias(it) => GenericDefId::TypeAliasId(it.id), + GenericDef::ImplDef(it) => GenericDefId::ImplId(it.id), + GenericDef::EnumVariant(it) => { + GenericDefId::EnumVariantId(EnumVariantId { parent: it.parent.id, local_id: it.id }) + } + GenericDef::Const(it) => GenericDefId::ConstId(it.id), + } + } +} + +impl From for GenericDefId { + fn from(id: Adt) -> Self { + match id { + Adt::Struct(it) => it.id.into(), + Adt::Union(it) => it.id.into(), + Adt::Enum(it) => it.id.into(), + } + } +} + +impl From for VariantDef { + fn from(def: VariantId) -> Self { + match def { + VariantId::StructId(it) => VariantDef::Struct(it.into()), + VariantId::EnumVariantId(it) => VariantDef::EnumVariant(it.into()), + VariantId::UnionId(it) => VariantDef::Union(it.into()), + } + } +} + +impl From for VariantId { + fn from(def: VariantDef) -> Self { + match def { + VariantDef::Struct(it) => VariantId::StructId(it.id), + VariantDef::EnumVariant(it) => VariantId::EnumVariantId(it.into()), + VariantDef::Union(it) => VariantId::UnionId(it.id), + } + } +} + +impl From for FieldId { + fn from(def: Field) -> Self { + FieldId { parent: def.parent.into(), local_id: def.id } + } +} + +impl From for Field { + fn from(def: FieldId) -> Self { + Field { parent: def.parent.into(), id: def.local_id } + } +} + +impl From for AttrDefId { + fn from(def: AttrDef) -> Self { + match def { + AttrDef::Module(it) => AttrDefId::ModuleId(it.id), + AttrDef::Field(it) => AttrDefId::FieldId(it.into()), + AttrDef::Adt(it) => AttrDefId::AdtId(it.into()), + AttrDef::Function(it) => AttrDefId::FunctionId(it.id), + AttrDef::EnumVariant(it) => AttrDefId::EnumVariantId(it.into()), + AttrDef::Static(it) => AttrDefId::StaticId(it.id), + AttrDef::Const(it) => AttrDefId::ConstId(it.id), + AttrDef::Trait(it) => AttrDefId::TraitId(it.id), + AttrDef::TypeAlias(it) => AttrDefId::TypeAliasId(it.id), + AttrDef::MacroDef(it) => AttrDefId::MacroDefId(it.id), + } + } +} + +impl From for GenericDefId { + fn from(item: AssocItem) -> Self { + match item { + AssocItem::Function(f) => f.id.into(), + AssocItem::Const(c) => c.id.into(), + AssocItem::TypeAlias(t) => t.id.into(), + } + } +} + +impl From<(DefWithBodyId, PatId)> for Local { + fn from((parent, pat_id): (DefWithBodyId, PatId)) -> Self { + Local { parent, pat_id } + } +} + +impl From for ItemInNs { + fn from(macro_def: MacroDef) -> Self { + ItemInNs::Macros(macro_def.into()) + } +} + +impl From for ItemInNs { + fn from(module_def: ModuleDef) -> Self { + match module_def { + ModuleDef::Static(_) | ModuleDef::Const(_) | ModuleDef::Function(_) => { + ItemInNs::Values(module_def.into()) + } + _ => ItemInNs::Types(module_def.into()), + } + } +} diff --git a/crates/hir/src/has_source.rs b/crates/hir/src/has_source.rs new file mode 100644 index 000000000..a50d4ff02 --- /dev/null +++ b/crates/hir/src/has_source.rs @@ -0,0 +1,135 @@ +//! FIXME: write short doc here + +use either::Either; +use hir_def::{ + nameres::{ModuleOrigin, ModuleSource}, + src::{HasChildSource, HasSource as _}, + Lookup, VariantId, +}; +use syntax::ast; + +use crate::{ + db::HirDatabase, Const, Enum, EnumVariant, Field, FieldSource, Function, ImplDef, MacroDef, + Module, Static, Struct, Trait, TypeAlias, TypeParam, Union, +}; + +pub use hir_expand::InFile; + +pub trait HasSource { + type Ast; + fn source(self, db: &dyn HirDatabase) -> InFile; +} + +/// NB: Module is !HasSource, because it has two source nodes at the same time: +/// definition and declaration. +impl Module { + /// Returns a node which defines this module. That is, a file or a `mod foo {}` with items. + pub fn definition_source(self, db: &dyn HirDatabase) -> InFile { + let def_map = db.crate_def_map(self.id.krate); + def_map[self.id.local_id].definition_source(db.upcast()) + } + + pub fn is_mod_rs(self, db: &dyn HirDatabase) -> bool { + let def_map = db.crate_def_map(self.id.krate); + match def_map[self.id.local_id].origin { + ModuleOrigin::File { is_mod_rs, .. } => is_mod_rs, + _ => false, + } + } + + /// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`. + /// `None` for the crate root. + pub fn declaration_source(self, db: &dyn HirDatabase) -> Option> { + let def_map = db.crate_def_map(self.id.krate); + def_map[self.id.local_id].declaration_source(db.upcast()) + } +} + +impl HasSource for Field { + type Ast = FieldSource; + fn source(self, db: &dyn HirDatabase) -> InFile { + let var = VariantId::from(self.parent); + let src = var.child_source(db.upcast()); + src.map(|it| match it[self.id].clone() { + Either::Left(it) => FieldSource::Pos(it), + Either::Right(it) => FieldSource::Named(it), + }) + } +} +impl HasSource for Struct { + type Ast = ast::Struct; + fn source(self, db: &dyn HirDatabase) -> InFile { + self.id.lookup(db.upcast()).source(db.upcast()) + } +} +impl HasSource for Union { + type Ast = ast::Union; + fn source(self, db: &dyn HirDatabase) -> InFile { + self.id.lookup(db.upcast()).source(db.upcast()) + } +} +impl HasSource for Enum { + type Ast = ast::Enum; + fn source(self, db: &dyn HirDatabase) -> InFile { + self.id.lookup(db.upcast()).source(db.upcast()) + } +} +impl HasSource for EnumVariant { + type Ast = ast::Variant; + fn source(self, db: &dyn HirDatabase) -> InFile { + self.parent.id.child_source(db.upcast()).map(|map| map[self.id].clone()) + } +} +impl HasSource for Function { + type Ast = ast::Fn; + fn source(self, db: &dyn HirDatabase) -> InFile { + self.id.lookup(db.upcast()).source(db.upcast()) + } +} +impl HasSource for Const { + type Ast = ast::Const; + fn source(self, db: &dyn HirDatabase) -> InFile { + self.id.lookup(db.upcast()).source(db.upcast()) + } +} +impl HasSource for Static { + type Ast = ast::Static; + fn source(self, db: &dyn HirDatabase) -> InFile { + self.id.lookup(db.upcast()).source(db.upcast()) + } +} +impl HasSource for Trait { + type Ast = ast::Trait; + fn source(self, db: &dyn HirDatabase) -> InFile { + self.id.lookup(db.upcast()).source(db.upcast()) + } +} +impl HasSource for TypeAlias { + type Ast = ast::TypeAlias; + fn source(self, db: &dyn HirDatabase) -> InFile { + self.id.lookup(db.upcast()).source(db.upcast()) + } +} +impl HasSource for MacroDef { + type Ast = ast::MacroCall; + fn source(self, db: &dyn HirDatabase) -> InFile { + InFile { + file_id: self.id.ast_id.expect("MacroDef without ast_id").file_id, + value: self.id.ast_id.expect("MacroDef without ast_id").to_node(db.upcast()), + } + } +} +impl HasSource for ImplDef { + type Ast = ast::Impl; + fn source(self, db: &dyn HirDatabase) -> InFile { + self.id.lookup(db.upcast()).source(db.upcast()) + } +} + +impl HasSource for TypeParam { + type Ast = Either; + fn source(self, db: &dyn HirDatabase) -> InFile { + let child_source = self.id.parent.child_source(db.upcast()); + child_source.map(|it| it[self.id.local_id].clone()) + } +} diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs new file mode 100644 index 000000000..24a0f6b4b --- /dev/null +++ b/crates/hir/src/lib.rs @@ -0,0 +1,59 @@ +//! HIR (previously known as descriptors) provides a high-level object oriented +//! access to Rust code. +//! +//! The principal difference between HIR and syntax trees is that HIR is bound +//! to a particular crate instance. That is, it has cfg flags and features +//! applied. So, the relation between syntax and HIR is many-to-one. +//! +//! HIR is the public API of the all of the compiler logic above syntax trees. +//! It is written in "OO" style. Each type is self contained (as in, it knows it's +//! parents and full context). It should be "clean code". +//! +//! `hir_*` crates are the implementation of the compiler logic. +//! They are written in "ECS" style, with relatively little abstractions. +//! Many types are not self-contained, and explicitly use local indexes, arenas, etc. +//! +//! `hir` is what insulates the "we don't know how to actually write an incremental compiler" +//! from the ide with completions, hovers, etc. It is a (soft, internal) boundary: +//! https://www.tedinski.com/2018/02/06/system-boundaries.html. + +#![recursion_limit = "512"] + +mod semantics; +pub mod db; +mod source_analyzer; + +pub mod diagnostics; + +mod from_id; +mod code_model; + +mod has_source; + +pub use crate::{ + code_model::{ + Adt, AsAssocItem, AssocItem, AssocItemContainer, AttrDef, Callable, CallableKind, Const, + Crate, CrateDependency, DefWithBody, Docs, Enum, EnumVariant, Field, FieldSource, Function, + GenericDef, HasAttrs, HasVisibility, ImplDef, Local, MacroDef, Module, ModuleDef, ScopeDef, + Static, Struct, Trait, Type, TypeAlias, TypeParam, Union, VariantDef, Visibility, + }, + has_source::HasSource, + semantics::{original_range, PathResolution, Semantics, SemanticsScope}, +}; + +pub use hir_def::{ + adt::StructKind, + attr::Attrs, + body::scope::ExprScopes, + builtin_type::BuiltinType, + docs::Documentation, + nameres::ModuleSource, + path::{ModPath, Path, PathKind}, + type_ref::{Mutability, TypeRef}, +}; +pub use hir_expand::{ + hygiene::Hygiene, name::Name, HirFileId, InFile, MacroCallId, MacroCallLoc, + MacroDefId, /* FIXME */ + MacroFile, Origin, +}; +pub use hir_ty::display::HirDisplay; diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs new file mode 100644 index 000000000..1467d825d --- /dev/null +++ b/crates/hir/src/semantics.rs @@ -0,0 +1,840 @@ +//! See `Semantics`. + +mod source_to_def; + +use std::{cell::RefCell, fmt, iter::successors}; + +use base_db::{FileId, FileRange}; +use hir_def::{ + resolver::{self, HasResolver, Resolver}, + AsMacroCall, FunctionId, TraitId, VariantId, +}; +use hir_expand::{hygiene::Hygiene, name::AsName, ExpansionInfo}; +use hir_ty::associated_type_shorthand_candidates; +use itertools::Itertools; +use rustc_hash::{FxHashMap, FxHashSet}; +use syntax::{ + algo::{find_node_at_offset, skip_trivia_token}, + ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextSize, +}; + +use crate::{ + db::HirDatabase, + diagnostics::Diagnostic, + semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, + source_analyzer::{resolve_hir_path, resolve_hir_path_qualifier, SourceAnalyzer}, + AssocItem, Callable, Crate, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef, + Module, ModuleDef, Name, Origin, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam, TypeRef, + VariantDef, +}; +use resolver::TypeNs; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum PathResolution { + /// An item + Def(ModuleDef), + /// A local binding (only value namespace) + Local(Local), + /// A generic parameter + TypeParam(TypeParam), + SelfType(ImplDef), + Macro(MacroDef), + AssocItem(AssocItem), +} + +impl PathResolution { + fn in_type_ns(&self) -> Option { + match self { + PathResolution::Def(ModuleDef::Adt(adt)) => Some(TypeNs::AdtId((*adt).into())), + PathResolution::Def(ModuleDef::BuiltinType(builtin)) => { + Some(TypeNs::BuiltinType(*builtin)) + } + PathResolution::Def(ModuleDef::Const(_)) + | PathResolution::Def(ModuleDef::EnumVariant(_)) + | PathResolution::Def(ModuleDef::Function(_)) + | PathResolution::Def(ModuleDef::Module(_)) + | PathResolution::Def(ModuleDef::Static(_)) + | PathResolution::Def(ModuleDef::Trait(_)) => None, + PathResolution::Def(ModuleDef::TypeAlias(alias)) => { + Some(TypeNs::TypeAliasId((*alias).into())) + } + PathResolution::Local(_) | PathResolution::Macro(_) => None, + PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())), + PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())), + PathResolution::AssocItem(AssocItem::Const(_)) + | PathResolution::AssocItem(AssocItem::Function(_)) => None, + PathResolution::AssocItem(AssocItem::TypeAlias(alias)) => { + Some(TypeNs::TypeAliasId((*alias).into())) + } + } + } + + /// Returns an iterator over associated types that may be specified after this path (using + /// `Ty::Assoc` syntax). + pub fn assoc_type_shorthand_candidates( + &self, + db: &dyn HirDatabase, + mut cb: impl FnMut(TypeAlias) -> Option, + ) -> Option { + associated_type_shorthand_candidates(db, self.in_type_ns()?, |_, _, id| cb(id.into())) + } +} + +/// Primary API to get semantic information, like types, from syntax trees. +pub struct Semantics<'db, DB> { + pub db: &'db DB, + imp: SemanticsImpl<'db>, +} + +pub struct SemanticsImpl<'db> { + pub db: &'db dyn HirDatabase, + s2d_cache: RefCell, + expansion_info_cache: RefCell>>, + cache: RefCell>, +} + +impl fmt::Debug for Semantics<'_, DB> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Semantics {{ ... }}") + } +} + +impl<'db, DB: HirDatabase> Semantics<'db, DB> { + pub fn new(db: &DB) -> Semantics { + let impl_ = SemanticsImpl::new(db); + Semantics { db, imp: impl_ } + } + + pub fn parse(&self, file_id: FileId) -> ast::SourceFile { + self.imp.parse(file_id) + } + + pub fn expand(&self, macro_call: &ast::MacroCall) -> Option { + self.imp.expand(macro_call) + } + + pub fn expand_hypothetical( + &self, + actual_macro_call: &ast::MacroCall, + hypothetical_args: &ast::TokenTree, + token_to_map: SyntaxToken, + ) -> Option<(SyntaxNode, SyntaxToken)> { + self.imp.expand_hypothetical(actual_macro_call, hypothetical_args, token_to_map) + } + + pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { + self.imp.descend_into_macros(token) + } + + pub fn descend_node_at_offset( + &self, + node: &SyntaxNode, + offset: TextSize, + ) -> Option { + self.imp.descend_node_at_offset(node, offset).find_map(N::cast) + } + + pub fn original_range(&self, node: &SyntaxNode) -> FileRange { + self.imp.original_range(node) + } + + pub fn diagnostics_display_range(&self, diagnostics: &dyn Diagnostic) -> FileRange { + self.imp.diagnostics_display_range(diagnostics) + } + + pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator + '_ { + self.imp.ancestors_with_macros(node) + } + + pub fn ancestors_at_offset_with_macros( + &self, + node: &SyntaxNode, + offset: TextSize, + ) -> impl Iterator + '_ { + self.imp.ancestors_at_offset_with_macros(node, offset) + } + + /// Find a AstNode by offset inside SyntaxNode, if it is inside *Macrofile*, + /// search up until it is of the target AstNode type + pub fn find_node_at_offset_with_macros( + &self, + node: &SyntaxNode, + offset: TextSize, + ) -> Option { + self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) + } + + /// Find a AstNode by offset inside SyntaxNode, if it is inside *MacroCall*, + /// descend it and find again + pub fn find_node_at_offset_with_descend( + &self, + node: &SyntaxNode, + offset: TextSize, + ) -> Option { + if let Some(it) = find_node_at_offset(&node, offset) { + return Some(it); + } + + self.imp.descend_node_at_offset(node, offset).find_map(N::cast) + } + + pub fn type_of_expr(&self, expr: &ast::Expr) -> Option { + self.imp.type_of_expr(expr) + } + + pub fn type_of_pat(&self, pat: &ast::Pat) -> Option { + self.imp.type_of_pat(pat) + } + + pub fn type_of_self(&self, param: &ast::SelfParam) -> Option { + self.imp.type_of_self(param) + } + + pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option { + self.imp.resolve_method_call(call).map(Function::from) + } + + pub fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option { + self.imp.resolve_method_call_as_callable(call) + } + + pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option { + self.imp.resolve_field(field) + } + + pub fn resolve_record_field( + &self, + field: &ast::RecordExprField, + ) -> Option<(Field, Option)> { + self.imp.resolve_record_field(field) + } + + pub fn resolve_record_field_pat(&self, field: &ast::RecordPatField) -> Option { + self.imp.resolve_record_field_pat(field) + } + + pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option { + self.imp.resolve_macro_call(macro_call) + } + + pub fn resolve_path(&self, path: &ast::Path) -> Option { + self.imp.resolve_path(path) + } + + pub fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option { + self.imp.resolve_extern_crate(extern_crate) + } + + pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option { + self.imp.resolve_variant(record_lit).map(VariantDef::from) + } + + pub fn lower_path(&self, path: &ast::Path) -> Option { + self.imp.lower_path(path) + } + + pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option { + self.imp.resolve_bind_pat_to_const(pat) + } + + // FIXME: use this instead? + // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option; + + pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> { + self.imp.record_literal_missing_fields(literal) + } + + pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> { + self.imp.record_pattern_missing_fields(pattern) + } + + pub fn to_def(&self, src: &T) -> Option { + let src = self.imp.find_file(src.syntax().clone()).with_value(src).cloned(); + T::to_def(&self.imp, src) + } + + pub fn to_module_def(&self, file: FileId) -> Option { + self.imp.to_module_def(file) + } + + pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> { + self.imp.scope(node) + } + + pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> { + self.imp.scope_at_offset(node, offset) + } + + pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { + self.imp.scope_for_def(def) + } + + pub fn assert_contains_node(&self, node: &SyntaxNode) { + self.imp.assert_contains_node(node) + } + + pub fn is_unsafe_method_call(&self, method_call_expr: ast::MethodCallExpr) -> bool { + self.imp.is_unsafe_method_call(method_call_expr) + } + + pub fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool { + self.imp.is_unsafe_ref_expr(ref_expr) + } + + pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool { + self.imp.is_unsafe_ident_pat(ident_pat) + } +} + +impl<'db> SemanticsImpl<'db> { + fn new(db: &'db dyn HirDatabase) -> Self { + SemanticsImpl { + db, + s2d_cache: Default::default(), + cache: Default::default(), + expansion_info_cache: Default::default(), + } + } + + fn parse(&self, file_id: FileId) -> ast::SourceFile { + let tree = self.db.parse(file_id).tree(); + self.cache(tree.syntax().clone(), file_id.into()); + tree + } + + fn expand(&self, macro_call: &ast::MacroCall) -> Option { + let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); + let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); + let file_id = sa.expand(self.db, macro_call)?; + let node = self.db.parse_or_expand(file_id)?; + self.cache(node.clone(), file_id); + Some(node) + } + + fn expand_hypothetical( + &self, + actual_macro_call: &ast::MacroCall, + hypothetical_args: &ast::TokenTree, + token_to_map: SyntaxToken, + ) -> Option<(SyntaxNode, SyntaxToken)> { + let macro_call = + self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call); + let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); + let krate = sa.resolver.krate()?; + let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { + sa.resolver.resolve_path_as_macro(self.db.upcast(), &path) + })?; + hir_expand::db::expand_hypothetical( + self.db.upcast(), + macro_call_id, + hypothetical_args, + token_to_map, + ) + } + + fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { + let _p = profile::span("descend_into_macros"); + let parent = token.parent(); + let parent = self.find_file(parent); + let sa = self.analyze2(parent.as_ref(), None); + + let token = successors(Some(parent.with_value(token)), |token| { + self.db.check_canceled(); + let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?; + let tt = macro_call.token_tree()?; + if !tt.syntax().text_range().contains_range(token.value.text_range()) { + return None; + } + let file_id = sa.expand(self.db, token.with_value(¯o_call))?; + let token = self + .expansion_info_cache + .borrow_mut() + .entry(file_id) + .or_insert_with(|| file_id.expansion_info(self.db.upcast())) + .as_ref()? + .map_token_down(token.as_ref())?; + + self.cache(find_root(&token.value.parent()), token.file_id); + + Some(token) + }) + .last() + .unwrap(); + + token.value + } + + fn descend_node_at_offset( + &self, + node: &SyntaxNode, + offset: TextSize, + ) -> impl Iterator + '_ { + // Handle macro token cases + node.token_at_offset(offset) + .map(|token| self.descend_into_macros(token)) + .map(|it| self.ancestors_with_macros(it.parent())) + .flatten() + } + + fn original_range(&self, node: &SyntaxNode) -> FileRange { + let node = self.find_file(node.clone()); + original_range(self.db, node.as_ref()) + } + + fn diagnostics_display_range(&self, diagnostics: &dyn Diagnostic) -> FileRange { + let src = diagnostics.display_source(); + let root = self.db.parse_or_expand(src.file_id).unwrap(); + let node = src.value.to_node(&root); + self.cache(root, src.file_id); + original_range(self.db, src.with_value(&node)) + } + + fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator + '_ { + let node = self.find_file(node); + node.ancestors_with_macros(self.db.upcast()).map(|it| it.value) + } + + fn ancestors_at_offset_with_macros( + &self, + node: &SyntaxNode, + offset: TextSize, + ) -> impl Iterator + '_ { + node.token_at_offset(offset) + .map(|token| self.ancestors_with_macros(token.parent())) + .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) + } + + fn type_of_expr(&self, expr: &ast::Expr) -> Option { + self.analyze(expr.syntax()).type_of_expr(self.db, &expr) + } + + fn type_of_pat(&self, pat: &ast::Pat) -> Option { + self.analyze(pat.syntax()).type_of_pat(self.db, &pat) + } + + fn type_of_self(&self, param: &ast::SelfParam) -> Option { + self.analyze(param.syntax()).type_of_self(self.db, ¶m) + } + + fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option { + self.analyze(call.syntax()).resolve_method_call(self.db, call) + } + + fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option { + // FIXME: this erases Substs + let func = self.resolve_method_call(call)?; + let ty = self.db.value_ty(func.into()); + let resolver = self.analyze(call.syntax()).resolver; + let ty = Type::new_with_resolver(self.db, &resolver, ty.value)?; + let mut res = ty.as_callable(self.db)?; + res.is_bound_method = true; + Some(res) + } + + fn resolve_field(&self, field: &ast::FieldExpr) -> Option { + self.analyze(field.syntax()).resolve_field(self.db, field) + } + + fn resolve_record_field(&self, field: &ast::RecordExprField) -> Option<(Field, Option)> { + self.analyze(field.syntax()).resolve_record_field(self.db, field) + } + + fn resolve_record_field_pat(&self, field: &ast::RecordPatField) -> Option { + self.analyze(field.syntax()).resolve_record_field_pat(self.db, field) + } + + fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option { + let sa = self.analyze(macro_call.syntax()); + let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); + sa.resolve_macro_call(self.db, macro_call) + } + + fn resolve_path(&self, path: &ast::Path) -> Option { + self.analyze(path.syntax()).resolve_path(self.db, path) + } + + fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option { + let krate = self.scope(extern_crate.syntax()).krate()?; + krate.dependencies(self.db).into_iter().find_map(|dep| { + if dep.name == extern_crate.name_ref()?.as_name() { + Some(dep.krate) + } else { + None + } + }) + } + + fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option { + self.analyze(record_lit.syntax()).resolve_variant(self.db, record_lit) + } + + fn lower_path(&self, path: &ast::Path) -> Option { + let src = self.find_file(path.syntax().clone()); + Path::from_src(path.clone(), &Hygiene::new(self.db.upcast(), src.file_id.into())) + } + + fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option { + self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat) + } + + fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> { + self.analyze(literal.syntax()) + .record_literal_missing_fields(self.db, literal) + .unwrap_or_default() + } + + fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> { + self.analyze(pattern.syntax()) + .record_pattern_missing_fields(self.db, pattern) + .unwrap_or_default() + } + + fn with_ctx T, T>(&self, f: F) -> T { + let mut cache = self.s2d_cache.borrow_mut(); + let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache }; + f(&mut ctx) + } + + fn to_module_def(&self, file: FileId) -> Option { + self.with_ctx(|ctx| ctx.file_to_def(file)).map(Module::from) + } + + fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> { + let node = self.find_file(node.clone()); + let resolver = self.analyze2(node.as_ref(), None).resolver; + SemanticsScope { db: self.db, resolver } + } + + fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> { + let node = self.find_file(node.clone()); + let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver; + SemanticsScope { db: self.db, resolver } + } + + fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { + let resolver = def.id.resolver(self.db.upcast()); + SemanticsScope { db: self.db, resolver } + } + + fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer { + let src = self.find_file(node.clone()); + self.analyze2(src.as_ref(), None) + } + + fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option) -> SourceAnalyzer { + let _p = profile::span("Semantics::analyze2"); + + let container = match self.with_ctx(|ctx| ctx.find_container(src)) { + Some(it) => it, + None => return SourceAnalyzer::new_for_resolver(Resolver::default(), src), + }; + + let resolver = match container { + ChildContainer::DefWithBodyId(def) => { + return SourceAnalyzer::new_for_body(self.db, def, src, offset) + } + ChildContainer::TraitId(it) => it.resolver(self.db.upcast()), + ChildContainer::ImplId(it) => it.resolver(self.db.upcast()), + ChildContainer::ModuleId(it) => it.resolver(self.db.upcast()), + ChildContainer::EnumId(it) => it.resolver(self.db.upcast()), + ChildContainer::VariantId(it) => it.resolver(self.db.upcast()), + ChildContainer::TypeAliasId(it) => it.resolver(self.db.upcast()), + ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()), + }; + SourceAnalyzer::new_for_resolver(resolver, src) + } + + fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) { + assert!(root_node.parent().is_none()); + let mut cache = self.cache.borrow_mut(); + let prev = cache.insert(root_node, file_id); + assert!(prev == None || prev == Some(file_id)) + } + + fn assert_contains_node(&self, node: &SyntaxNode) { + self.find_file(node.clone()); + } + + fn lookup(&self, root_node: &SyntaxNode) -> Option { + let cache = self.cache.borrow(); + cache.get(root_node).copied() + } + + fn find_file(&self, node: SyntaxNode) -> InFile { + let root_node = find_root(&node); + let file_id = self.lookup(&root_node).unwrap_or_else(|| { + panic!( + "\n\nFailed to lookup {:?} in this Semantics.\n\ + Make sure to use only query nodes, derived from this instance of Semantics.\n\ + root node: {:?}\n\ + known nodes: {}\n\n", + node, + root_node, + self.cache + .borrow() + .keys() + .map(|it| format!("{:?}", it)) + .collect::>() + .join(", ") + ) + }); + InFile::new(file_id, node) + } + + pub fn is_unsafe_method_call(&self, method_call_expr: ast::MethodCallExpr) -> bool { + method_call_expr + .expr() + .and_then(|expr| { + let field_expr = if let ast::Expr::FieldExpr(field_expr) = expr { + field_expr + } else { + return None; + }; + let ty = self.type_of_expr(&field_expr.expr()?)?; + if !ty.is_packed(self.db) { + return None; + } + + let func = self.resolve_method_call(&method_call_expr).map(Function::from)?; + let is_unsafe = func.has_self_param(self.db) + && matches!(func.params(self.db).first(), Some(TypeRef::Reference(..))); + Some(is_unsafe) + }) + .unwrap_or(false) + } + + pub fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool { + ref_expr + .expr() + .and_then(|expr| { + let field_expr = match expr { + ast::Expr::FieldExpr(field_expr) => field_expr, + _ => return None, + }; + let expr = field_expr.expr()?; + self.type_of_expr(&expr) + }) + // Binding a reference to a packed type is possibly unsafe. + .map(|ty| ty.is_packed(self.db)) + .unwrap_or(false) + + // FIXME This needs layout computation to be correct. It will highlight + // more than it should with the current implementation. + } + + pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool { + if !ident_pat.ref_token().is_some() { + return false; + } + + ident_pat + .syntax() + .parent() + .and_then(|parent| { + // `IdentPat` can live under `RecordPat` directly under `RecordPatField` or + // `RecordPatFieldList`. `RecordPatField` also lives under `RecordPatFieldList`, + // so this tries to lookup the `IdentPat` anywhere along that structure to the + // `RecordPat` so we can get the containing type. + let record_pat = ast::RecordPatField::cast(parent.clone()) + .and_then(|record_pat| record_pat.syntax().parent()) + .or_else(|| Some(parent.clone())) + .and_then(|parent| { + ast::RecordPatFieldList::cast(parent)? + .syntax() + .parent() + .and_then(ast::RecordPat::cast) + }); + + // If this doesn't match a `RecordPat`, fallback to a `LetStmt` to see if + // this is initialized from a `FieldExpr`. + if let Some(record_pat) = record_pat { + self.type_of_pat(&ast::Pat::RecordPat(record_pat)) + } else if let Some(let_stmt) = ast::LetStmt::cast(parent) { + let field_expr = match let_stmt.initializer()? { + ast::Expr::FieldExpr(field_expr) => field_expr, + _ => return None, + }; + + self.type_of_expr(&field_expr.expr()?) + } else { + None + } + }) + // Binding a reference to a packed type is possibly unsafe. + .map(|ty| ty.is_packed(self.db)) + .unwrap_or(false) + } +} + +pub trait ToDef: AstNode + Clone { + type Def; + + fn to_def(sema: &SemanticsImpl, src: InFile) -> Option; +} + +macro_rules! to_def_impls { + ($(($def:path, $ast:path, $meth:ident)),* ,) => {$( + impl ToDef for $ast { + type Def = $def; + fn to_def(sema: &SemanticsImpl, src: InFile) -> Option { + sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from) + } + } + )*} +} + +to_def_impls![ + (crate::Module, ast::Module, module_to_def), + (crate::Struct, ast::Struct, struct_to_def), + (crate::Enum, ast::Enum, enum_to_def), + (crate::Union, ast::Union, union_to_def), + (crate::Trait, ast::Trait, trait_to_def), + (crate::ImplDef, ast::Impl, impl_to_def), + (crate::TypeAlias, ast::TypeAlias, type_alias_to_def), + (crate::Const, ast::Const, const_to_def), + (crate::Static, ast::Static, static_to_def), + (crate::Function, ast::Fn, fn_to_def), + (crate::Field, ast::RecordField, record_field_to_def), + (crate::Field, ast::TupleField, tuple_field_to_def), + (crate::EnumVariant, ast::Variant, enum_variant_to_def), + (crate::TypeParam, ast::TypeParam, type_param_to_def), + (crate::MacroDef, ast::MacroCall, macro_call_to_def), // this one is dubious, not all calls are macros + (crate::Local, ast::IdentPat, bind_pat_to_def), +]; + +fn find_root(node: &SyntaxNode) -> SyntaxNode { + node.ancestors().last().unwrap() +} + +#[derive(Debug)] +pub struct SemanticsScope<'a> { + pub db: &'a dyn HirDatabase, + resolver: Resolver, +} + +impl<'a> SemanticsScope<'a> { + pub fn module(&self) -> Option { + Some(Module { id: self.resolver.module()? }) + } + + pub fn krate(&self) -> Option { + Some(Crate { id: self.resolver.krate()? }) + } + + /// Note: `FxHashSet` should be treated as an opaque type, passed into `Type + // FIXME: rename to visible_traits to not repeat scope? + pub fn traits_in_scope(&self) -> FxHashSet { + let resolver = &self.resolver; + resolver.traits_in_scope(self.db.upcast()) + } + + pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) { + let resolver = &self.resolver; + + resolver.process_all_names(self.db.upcast(), &mut |name, def| { + let def = match def { + resolver::ScopeDef::PerNs(it) => { + let items = ScopeDef::all_items(it); + for item in items { + f(name.clone(), item); + } + return; + } + resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()), + resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()), + resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(TypeParam { id }), + resolver::ScopeDef::Local(pat_id) => { + let parent = resolver.body_owner().unwrap().into(); + ScopeDef::Local(Local { parent, pat_id }) + } + }; + f(name, def) + }) + } + + pub fn resolve_hir_path(&self, path: &Path) -> Option { + resolve_hir_path(self.db, &self.resolver, path) + } + + /// Resolves a path where we know it is a qualifier of another path. + /// + /// For example, if we have: + /// ``` + /// mod my { + /// pub mod foo { + /// struct Bar; + /// } + /// + /// pub fn foo() {} + /// } + /// ``` + /// then we know that `foo` in `my::foo::Bar` refers to the module, not the function. + pub fn resolve_hir_path_qualifier(&self, path: &Path) -> Option { + resolve_hir_path_qualifier(self.db, &self.resolver, path) + } +} + +// FIXME: Change `HasSource` trait to work with `Semantics` and remove this? +pub fn original_range(db: &dyn HirDatabase, node: InFile<&SyntaxNode>) -> FileRange { + if let Some(range) = original_range_opt(db, node) { + let original_file = range.file_id.original_file(db.upcast()); + if range.file_id == original_file.into() { + return FileRange { file_id: original_file, range: range.value }; + } + + log::error!("Fail to mapping up more for {:?}", range); + return FileRange { file_id: range.file_id.original_file(db.upcast()), range: range.value }; + } + + // Fall back to whole macro call + if let Some(expansion) = node.file_id.expansion_info(db.upcast()) { + if let Some(call_node) = expansion.call_node() { + return FileRange { + file_id: call_node.file_id.original_file(db.upcast()), + range: call_node.value.text_range(), + }; + } + } + + FileRange { file_id: node.file_id.original_file(db.upcast()), range: node.value.text_range() } +} + +fn original_range_opt( + db: &dyn HirDatabase, + node: InFile<&SyntaxNode>, +) -> Option> { + let expansion = node.file_id.expansion_info(db.upcast())?; + + // the input node has only one token ? + let single = skip_trivia_token(node.value.first_token()?, Direction::Next)? + == skip_trivia_token(node.value.last_token()?, Direction::Prev)?; + + Some(node.value.descendants().find_map(|it| { + let first = skip_trivia_token(it.first_token()?, Direction::Next)?; + let first = ascend_call_token(db, &expansion, node.with_value(first))?; + + let last = skip_trivia_token(it.last_token()?, Direction::Prev)?; + let last = ascend_call_token(db, &expansion, node.with_value(last))?; + + if (!single && first == last) || (first.file_id != last.file_id) { + return None; + } + + Some(first.with_value(first.value.text_range().cover(last.value.text_range()))) + })?) +} + +fn ascend_call_token( + db: &dyn HirDatabase, + expansion: &ExpansionInfo, + token: InFile, +) -> Option> { + let (mapped, origin) = expansion.map_token_up(token.as_ref())?; + if origin != Origin::Call { + return None; + } + if let Some(info) = mapped.file_id.expansion_info(db.upcast()) { + return ascend_call_token(db, &info, mapped); + } + Some(mapped) +} diff --git a/crates/hir/src/semantics/source_to_def.rs b/crates/hir/src/semantics/source_to_def.rs new file mode 100644 index 000000000..5918b9541 --- /dev/null +++ b/crates/hir/src/semantics/source_to_def.rs @@ -0,0 +1,275 @@ +//! Maps *syntax* of various definitions to their semantic ids. + +use base_db::FileId; +use hir_def::{ + child_by_source::ChildBySource, + dyn_map::DynMap, + expr::PatId, + keys::{self, Key}, + ConstId, DefWithBodyId, EnumId, EnumVariantId, FieldId, FunctionId, GenericDefId, ImplId, + ModuleId, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, UnionId, VariantId, +}; +use hir_expand::{name::AsName, AstId, MacroDefKind}; +use rustc_hash::FxHashMap; +use stdx::impl_from; +use syntax::{ + ast::{self, NameOwner}, + match_ast, AstNode, SyntaxNode, +}; + +use crate::{db::HirDatabase, InFile, MacroDefId}; + +pub(super) type SourceToDefCache = FxHashMap; + +pub(super) struct SourceToDefCtx<'a, 'b> { + pub(super) db: &'b dyn HirDatabase, + pub(super) cache: &'a mut SourceToDefCache, +} + +impl SourceToDefCtx<'_, '_> { + pub(super) fn file_to_def(&mut self, file: FileId) -> Option { + let _p = profile::span("SourceBinder::to_module_def"); + let (krate, local_id) = self.db.relevant_crates(file).iter().find_map(|&crate_id| { + let crate_def_map = self.db.crate_def_map(crate_id); + let local_id = crate_def_map.modules_for_file(file).next()?; + Some((crate_id, local_id)) + })?; + Some(ModuleId { krate, local_id }) + } + + pub(super) fn module_to_def(&mut self, src: InFile) -> Option { + let _p = profile::span("module_to_def"); + let parent_declaration = src + .as_ref() + .map(|it| it.syntax()) + .cloned() + .ancestors_with_macros(self.db.upcast()) + .skip(1) + .find_map(|it| { + let m = ast::Module::cast(it.value.clone())?; + Some(it.with_value(m)) + }); + + let parent_module = match parent_declaration { + Some(parent_declaration) => self.module_to_def(parent_declaration), + None => { + let file_id = src.file_id.original_file(self.db.upcast()); + self.file_to_def(file_id) + } + }?; + + let child_name = src.value.name()?.as_name(); + let def_map = self.db.crate_def_map(parent_module.krate); + let child_id = *def_map[parent_module.local_id].children.get(&child_name)?; + Some(ModuleId { krate: parent_module.krate, local_id: child_id }) + } + + pub(super) fn trait_to_def(&mut self, src: InFile) -> Option { + self.to_def(src, keys::TRAIT) + } + pub(super) fn impl_to_def(&mut self, src: InFile) -> Option { + self.to_def(src, keys::IMPL) + } + pub(super) fn fn_to_def(&mut self, src: InFile) -> Option { + self.to_def(src, keys::FUNCTION) + } + pub(super) fn struct_to_def(&mut self, src: InFile) -> Option { + self.to_def(src, keys::STRUCT) + } + pub(super) fn enum_to_def(&mut self, src: InFile) -> Option { + self.to_def(src, keys::ENUM) + } + pub(super) fn union_to_def(&mut self, src: InFile) -> Option { + self.to_def(src, keys::UNION) + } + pub(super) fn static_to_def(&mut self, src: InFile) -> Option { + self.to_def(src, keys::STATIC) + } + pub(super) fn const_to_def(&mut self, src: InFile) -> Option { + self.to_def(src, keys::CONST) + } + pub(super) fn type_alias_to_def(&mut self, src: InFile) -> Option { + self.to_def(src, keys::TYPE_ALIAS) + } + pub(super) fn record_field_to_def(&mut self, src: InFile) -> Option { + self.to_def(src, keys::RECORD_FIELD) + } + pub(super) fn tuple_field_to_def(&mut self, src: InFile) -> Option { + self.to_def(src, keys::TUPLE_FIELD) + } + pub(super) fn enum_variant_to_def( + &mut self, + src: InFile, + ) -> Option { + self.to_def(src, keys::VARIANT) + } + pub(super) fn bind_pat_to_def( + &mut self, + src: InFile, + ) -> Option<(DefWithBodyId, PatId)> { + let container = self.find_pat_container(src.as_ref().map(|it| it.syntax()))?; + let (_body, source_map) = self.db.body_with_source_map(container); + let src = src.map(ast::Pat::from); + let pat_id = source_map.node_pat(src.as_ref())?; + Some((container, pat_id)) + } + + fn to_def( + &mut self, + src: InFile, + key: Key, + ) -> Option { + let container = self.find_container(src.as_ref().map(|it| it.syntax()))?; + let db = self.db; + let dyn_map = + &*self.cache.entry(container).or_insert_with(|| container.child_by_source(db)); + dyn_map[key].get(&src).copied() + } + + pub(super) fn type_param_to_def(&mut self, src: InFile) -> Option { + let container: ChildContainer = + self.find_type_param_container(src.as_ref().map(|it| it.syntax()))?.into(); + let db = self.db; + let dyn_map = + &*self.cache.entry(container).or_insert_with(|| container.child_by_source(db)); + dyn_map[keys::TYPE_PARAM].get(&src).copied() + } + + // FIXME: use DynMap as well? + pub(super) fn macro_call_to_def(&mut self, src: InFile) -> Option { + let kind = MacroDefKind::Declarative; + let file_id = src.file_id.original_file(self.db.upcast()); + let krate = self.file_to_def(file_id)?.krate; + let file_ast_id = self.db.ast_id_map(src.file_id).ast_id(&src.value); + let ast_id = Some(AstId::new(src.file_id, file_ast_id)); + Some(MacroDefId { krate: Some(krate), ast_id, kind, local_inner: false }) + } + + pub(super) fn find_container(&mut self, src: InFile<&SyntaxNode>) -> Option { + for container in src.cloned().ancestors_with_macros(self.db.upcast()).skip(1) { + let res: ChildContainer = match_ast! { + match (container.value) { + ast::Module(it) => { + let def = self.module_to_def(container.with_value(it))?; + def.into() + }, + ast::Trait(it) => { + let def = self.trait_to_def(container.with_value(it))?; + def.into() + }, + ast::Impl(it) => { + let def = self.impl_to_def(container.with_value(it))?; + def.into() + }, + ast::Fn(it) => { + let def = self.fn_to_def(container.with_value(it))?; + DefWithBodyId::from(def).into() + }, + ast::Struct(it) => { + let def = self.struct_to_def(container.with_value(it))?; + VariantId::from(def).into() + }, + ast::Enum(it) => { + let def = self.enum_to_def(container.with_value(it))?; + def.into() + }, + ast::Union(it) => { + let def = self.union_to_def(container.with_value(it))?; + VariantId::from(def).into() + }, + ast::Static(it) => { + let def = self.static_to_def(container.with_value(it))?; + DefWithBodyId::from(def).into() + }, + ast::Const(it) => { + let def = self.const_to_def(container.with_value(it))?; + DefWithBodyId::from(def).into() + }, + ast::TypeAlias(it) => { + let def = self.type_alias_to_def(container.with_value(it))?; + def.into() + }, + _ => continue, + } + }; + return Some(res); + } + + let def = self.file_to_def(src.file_id.original_file(self.db.upcast()))?; + Some(def.into()) + } + + fn find_type_param_container(&mut self, src: InFile<&SyntaxNode>) -> Option { + for container in src.cloned().ancestors_with_macros(self.db.upcast()).skip(1) { + let res: GenericDefId = match_ast! { + match (container.value) { + ast::Fn(it) => self.fn_to_def(container.with_value(it))?.into(), + ast::Struct(it) => self.struct_to_def(container.with_value(it))?.into(), + ast::Enum(it) => self.enum_to_def(container.with_value(it))?.into(), + ast::Trait(it) => self.trait_to_def(container.with_value(it))?.into(), + ast::TypeAlias(it) => self.type_alias_to_def(container.with_value(it))?.into(), + ast::Impl(it) => self.impl_to_def(container.with_value(it))?.into(), + _ => continue, + } + }; + return Some(res); + } + None + } + + fn find_pat_container(&mut self, src: InFile<&SyntaxNode>) -> Option { + for container in src.cloned().ancestors_with_macros(self.db.upcast()).skip(1) { + let res: DefWithBodyId = match_ast! { + match (container.value) { + ast::Const(it) => self.const_to_def(container.with_value(it))?.into(), + ast::Static(it) => self.static_to_def(container.with_value(it))?.into(), + ast::Fn(it) => self.fn_to_def(container.with_value(it))?.into(), + _ => continue, + } + }; + return Some(res); + } + None + } +} + +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +pub(crate) enum ChildContainer { + DefWithBodyId(DefWithBodyId), + ModuleId(ModuleId), + TraitId(TraitId), + ImplId(ImplId), + EnumId(EnumId), + VariantId(VariantId), + TypeAliasId(TypeAliasId), + /// XXX: this might be the same def as, for example an `EnumId`. However, + /// here the children generic parameters, and not, eg enum variants. + GenericDefId(GenericDefId), +} +impl_from! { + DefWithBodyId, + ModuleId, + TraitId, + ImplId, + EnumId, + VariantId, + TypeAliasId, + GenericDefId + for ChildContainer +} + +impl ChildContainer { + fn child_by_source(self, db: &dyn HirDatabase) -> DynMap { + let db = db.upcast(); + match self { + ChildContainer::DefWithBodyId(it) => it.child_by_source(db), + ChildContainer::ModuleId(it) => it.child_by_source(db), + ChildContainer::TraitId(it) => it.child_by_source(db), + ChildContainer::ImplId(it) => it.child_by_source(db), + ChildContainer::EnumId(it) => it.child_by_source(db), + ChildContainer::VariantId(it) => it.child_by_source(db), + ChildContainer::TypeAliasId(_) => DynMap::default(), + ChildContainer::GenericDefId(it) => it.child_by_source(db), + } + } +} diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs new file mode 100644 index 000000000..8750584f9 --- /dev/null +++ b/crates/hir/src/source_analyzer.rs @@ -0,0 +1,534 @@ +//! Lookup hir elements using positions in the source code. This is a lossy +//! transformation: in general, a single source might correspond to several +//! modules, functions, etc, due to macros, cfgs and `#[path=]` attributes on +//! modules. +//! +//! So, this modules should not be used during hir construction, it exists +//! purely for "IDE needs". +use std::{iter::once, sync::Arc}; + +use hir_def::{ + body::{ + scope::{ExprScopes, ScopeId}, + Body, BodySourceMap, + }, + expr::{ExprId, Pat, PatId}, + resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs}, + AsMacroCall, DefWithBodyId, FieldId, FunctionId, LocalFieldId, VariantId, +}; +use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile}; +use hir_ty::{ + diagnostics::{record_literal_missing_fields, record_pattern_missing_fields}, + InferenceResult, Substs, Ty, +}; +use syntax::{ + ast::{self, AstNode}, + SyntaxNode, TextRange, TextSize, +}; + +use crate::{ + db::HirDatabase, semantics::PathResolution, Adt, Const, EnumVariant, Field, Function, Local, + MacroDef, ModPath, ModuleDef, Path, PathKind, Static, Struct, Trait, Type, TypeAlias, + TypeParam, +}; +use base_db::CrateId; + +/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of +/// original source files. It should not be used inside the HIR itself. +#[derive(Debug)] +pub(crate) struct SourceAnalyzer { + file_id: HirFileId, + pub(crate) resolver: Resolver, + body: Option>, + body_source_map: Option>, + infer: Option>, + scopes: Option>, +} + +impl SourceAnalyzer { + pub(crate) fn new_for_body( + db: &dyn HirDatabase, + def: DefWithBodyId, + node: InFile<&SyntaxNode>, + offset: Option, + ) -> SourceAnalyzer { + let (body, source_map) = db.body_with_source_map(def); + let scopes = db.expr_scopes(def); + let scope = match offset { + None => scope_for(&scopes, &source_map, node), + Some(offset) => scope_for_offset(db, &scopes, &source_map, node.with_value(offset)), + }; + let resolver = resolver_for_scope(db.upcast(), def, scope); + SourceAnalyzer { + resolver, + body: Some(body), + body_source_map: Some(source_map), + infer: Some(db.infer(def)), + scopes: Some(scopes), + file_id: node.file_id, + } + } + + pub(crate) fn new_for_resolver( + resolver: Resolver, + node: InFile<&SyntaxNode>, + ) -> SourceAnalyzer { + SourceAnalyzer { + resolver, + body: None, + body_source_map: None, + infer: None, + scopes: None, + file_id: node.file_id, + } + } + + fn expr_id(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option { + let src = match expr { + ast::Expr::MacroCall(call) => { + self.expand_expr(db, InFile::new(self.file_id, call.clone()))? + } + _ => InFile::new(self.file_id, expr.clone()), + }; + let sm = self.body_source_map.as_ref()?; + sm.node_expr(src.as_ref()) + } + + fn pat_id(&self, pat: &ast::Pat) -> Option { + // FIXME: macros, see `expr_id` + let src = InFile { file_id: self.file_id, value: pat }; + self.body_source_map.as_ref()?.node_pat(src) + } + + fn expand_expr( + &self, + db: &dyn HirDatabase, + expr: InFile, + ) -> Option> { + let macro_file = self.body_source_map.as_ref()?.node_macro_file(expr.as_ref())?; + let expanded = db.parse_or_expand(macro_file)?; + + let res = match ast::MacroCall::cast(expanded.clone()) { + Some(call) => self.expand_expr(db, InFile::new(macro_file, call))?, + _ => InFile::new(macro_file, ast::Expr::cast(expanded)?), + }; + Some(res) + } + + pub(crate) fn type_of_expr(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option { + let expr_id = self.expr_id(db, expr)?; + let ty = self.infer.as_ref()?[expr_id].clone(); + Type::new_with_resolver(db, &self.resolver, ty) + } + + pub(crate) fn type_of_pat(&self, db: &dyn HirDatabase, pat: &ast::Pat) -> Option { + let pat_id = self.pat_id(pat)?; + let ty = self.infer.as_ref()?[pat_id].clone(); + Type::new_with_resolver(db, &self.resolver, ty) + } + + pub(crate) fn type_of_self( + &self, + db: &dyn HirDatabase, + param: &ast::SelfParam, + ) -> Option { + let src = InFile { file_id: self.file_id, value: param }; + let pat_id = self.body_source_map.as_ref()?.node_self_param(src)?; + let ty = self.infer.as_ref()?[pat_id].clone(); + Type::new_with_resolver(db, &self.resolver, ty) + } + + pub(crate) fn resolve_method_call( + &self, + db: &dyn HirDatabase, + call: &ast::MethodCallExpr, + ) -> Option { + let expr_id = self.expr_id(db, &call.clone().into())?; + self.infer.as_ref()?.method_resolution(expr_id) + } + + pub(crate) fn resolve_field( + &self, + db: &dyn HirDatabase, + field: &ast::FieldExpr, + ) -> Option { + let expr_id = self.expr_id(db, &field.clone().into())?; + self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into()) + } + + pub(crate) fn resolve_record_field( + &self, + db: &dyn HirDatabase, + field: &ast::RecordExprField, + ) -> Option<(Field, Option)> { + let expr = field.expr()?; + let expr_id = self.expr_id(db, &expr)?; + let local = if field.name_ref().is_some() { + None + } else { + let local_name = field.field_name()?.as_name(); + let path = ModPath::from_segments(PathKind::Plain, once(local_name)); + match self.resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) { + Some(ValueNs::LocalBinding(pat_id)) => { + Some(Local { pat_id, parent: self.resolver.body_owner()? }) + } + _ => None, + } + }; + let struct_field = self.infer.as_ref()?.record_field_resolution(expr_id)?; + Some((struct_field.into(), local)) + } + + pub(crate) fn resolve_record_field_pat( + &self, + _db: &dyn HirDatabase, + field: &ast::RecordPatField, + ) -> Option { + let pat_id = self.pat_id(&field.pat()?)?; + let struct_field = self.infer.as_ref()?.record_field_pat_resolution(pat_id)?; + Some(struct_field.into()) + } + + pub(crate) fn resolve_macro_call( + &self, + db: &dyn HirDatabase, + macro_call: InFile<&ast::MacroCall>, + ) -> Option { + let hygiene = Hygiene::new(db.upcast(), macro_call.file_id); + let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &hygiene))?; + self.resolver.resolve_path_as_macro(db.upcast(), path.mod_path()).map(|it| it.into()) + } + + pub(crate) fn resolve_bind_pat_to_const( + &self, + db: &dyn HirDatabase, + pat: &ast::IdentPat, + ) -> Option { + let pat_id = self.pat_id(&pat.clone().into())?; + let body = self.body.as_ref()?; + let path = match &body[pat_id] { + Pat::Path(path) => path, + _ => return None, + }; + let res = resolve_hir_path(db, &self.resolver, &path)?; + match res { + PathResolution::Def(def) => Some(def), + _ => None, + } + } + + pub(crate) fn resolve_path( + &self, + db: &dyn HirDatabase, + path: &ast::Path, + ) -> Option { + if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) { + let expr_id = self.expr_id(db, &path_expr.into())?; + if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) { + return Some(PathResolution::AssocItem(assoc.into())); + } + if let Some(VariantId::EnumVariantId(variant)) = + self.infer.as_ref()?.variant_resolution_for_expr(expr_id) + { + return Some(PathResolution::Def(ModuleDef::EnumVariant(variant.into()))); + } + } + + if let Some(path_pat) = path.syntax().parent().and_then(ast::PathPat::cast) { + let pat_id = self.pat_id(&path_pat.into())?; + if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) { + return Some(PathResolution::AssocItem(assoc.into())); + } + if let Some(VariantId::EnumVariantId(variant)) = + self.infer.as_ref()?.variant_resolution_for_pat(pat_id) + { + return Some(PathResolution::Def(ModuleDef::EnumVariant(variant.into()))); + } + } + + if let Some(rec_lit) = path.syntax().parent().and_then(ast::RecordExpr::cast) { + let expr_id = self.expr_id(db, &rec_lit.into())?; + if let Some(VariantId::EnumVariantId(variant)) = + self.infer.as_ref()?.variant_resolution_for_expr(expr_id) + { + return Some(PathResolution::Def(ModuleDef::EnumVariant(variant.into()))); + } + } + + if let Some(rec_pat) = path.syntax().parent().and_then(ast::RecordPat::cast) { + let pat_id = self.pat_id(&rec_pat.into())?; + if let Some(VariantId::EnumVariantId(variant)) = + self.infer.as_ref()?.variant_resolution_for_pat(pat_id) + { + return Some(PathResolution::Def(ModuleDef::EnumVariant(variant.into()))); + } + } + + // This must be a normal source file rather than macro file. + let hir_path = Path::from_src(path.clone(), &Hygiene::new(db.upcast(), self.file_id))?; + + // Case where path is a qualifier of another path, e.g. foo::bar::Baz where we + // trying to resolve foo::bar. + if let Some(outer_path) = path.syntax().parent().and_then(ast::Path::cast) { + if let Some(qualifier) = outer_path.qualifier() { + if path == &qualifier { + return resolve_hir_path_qualifier(db, &self.resolver, &hir_path); + } + } + } + + resolve_hir_path(db, &self.resolver, &hir_path) + } + + pub(crate) fn record_literal_missing_fields( + &self, + db: &dyn HirDatabase, + literal: &ast::RecordExpr, + ) -> Option> { + let krate = self.resolver.krate()?; + let body = self.body.as_ref()?; + let infer = self.infer.as_ref()?; + + let expr_id = self.expr_id(db, &literal.clone().into())?; + let substs = match &infer.type_of_expr[expr_id] { + Ty::Apply(a_ty) => &a_ty.parameters, + _ => return None, + }; + + let (variant, missing_fields, _exhaustive) = + record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?; + let res = self.missing_fields(db, krate, substs, variant, missing_fields); + Some(res) + } + + pub(crate) fn record_pattern_missing_fields( + &self, + db: &dyn HirDatabase, + pattern: &ast::RecordPat, + ) -> Option> { + let krate = self.resolver.krate()?; + let body = self.body.as_ref()?; + let infer = self.infer.as_ref()?; + + let pat_id = self.pat_id(&pattern.clone().into())?; + let substs = match &infer.type_of_pat[pat_id] { + Ty::Apply(a_ty) => &a_ty.parameters, + _ => return None, + }; + + let (variant, missing_fields, _exhaustive) = + record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?; + let res = self.missing_fields(db, krate, substs, variant, missing_fields); + Some(res) + } + + fn missing_fields( + &self, + db: &dyn HirDatabase, + krate: CrateId, + substs: &Substs, + variant: VariantId, + missing_fields: Vec, + ) -> Vec<(Field, Type)> { + let field_types = db.field_types(variant); + + missing_fields + .into_iter() + .map(|local_id| { + let field = FieldId { parent: variant, local_id }; + let ty = field_types[local_id].clone().subst(substs); + (field.into(), Type::new_with_resolver_inner(db, krate, &self.resolver, ty)) + }) + .collect() + } + + pub(crate) fn expand( + &self, + db: &dyn HirDatabase, + macro_call: InFile<&ast::MacroCall>, + ) -> Option { + let krate = self.resolver.krate()?; + let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| { + self.resolver.resolve_path_as_macro(db.upcast(), &path) + })?; + Some(macro_call_id.as_file()).filter(|it| it.expansion_level(db.upcast()) < 64) + } + + pub(crate) fn resolve_variant( + &self, + db: &dyn HirDatabase, + record_lit: ast::RecordExpr, + ) -> Option { + let infer = self.infer.as_ref()?; + let expr_id = self.expr_id(db, &record_lit.into())?; + infer.variant_resolution_for_expr(expr_id) + } +} + +fn scope_for( + scopes: &ExprScopes, + source_map: &BodySourceMap, + node: InFile<&SyntaxNode>, +) -> Option { + node.value + .ancestors() + .filter_map(ast::Expr::cast) + .filter_map(|it| source_map.node_expr(InFile::new(node.file_id, &it))) + .find_map(|it| scopes.scope_for(it)) +} + +fn scope_for_offset( + db: &dyn HirDatabase, + scopes: &ExprScopes, + source_map: &BodySourceMap, + offset: InFile, +) -> Option { + scopes + .scope_by_expr() + .iter() + .filter_map(|(id, scope)| { + let source = source_map.expr_syntax(*id).ok()?; + // FIXME: correctly handle macro expansion + if source.file_id != offset.file_id { + return None; + } + let root = source.file_syntax(db.upcast()); + let node = source.value.to_node(&root); + Some((node.syntax().text_range(), scope)) + }) + // find containing scope + .min_by_key(|(expr_range, _scope)| { + ( + !(expr_range.start() <= offset.value && offset.value <= expr_range.end()), + expr_range.len(), + ) + }) + .map(|(expr_range, scope)| { + adjust(db, scopes, source_map, expr_range, offset).unwrap_or(*scope) + }) +} + +// XXX: during completion, cursor might be outside of any particular +// expression. Try to figure out the correct scope... +fn adjust( + db: &dyn HirDatabase, + scopes: &ExprScopes, + source_map: &BodySourceMap, + expr_range: TextRange, + offset: InFile, +) -> Option { + let child_scopes = scopes + .scope_by_expr() + .iter() + .filter_map(|(id, scope)| { + let source = source_map.expr_syntax(*id).ok()?; + // FIXME: correctly handle macro expansion + if source.file_id != offset.file_id { + return None; + } + let root = source.file_syntax(db.upcast()); + let node = source.value.to_node(&root); + Some((node.syntax().text_range(), scope)) + }) + .filter(|&(range, _)| { + range.start() <= offset.value && expr_range.contains_range(range) && range != expr_range + }); + + child_scopes + .max_by(|&(r1, _), &(r2, _)| { + if r1.contains_range(r2) { + std::cmp::Ordering::Greater + } else if r2.contains_range(r1) { + std::cmp::Ordering::Less + } else { + r1.start().cmp(&r2.start()) + } + }) + .map(|(_ptr, scope)| *scope) +} + +pub(crate) fn resolve_hir_path( + db: &dyn HirDatabase, + resolver: &Resolver, + path: &Path, +) -> Option { + let types = + resolver.resolve_path_in_type_ns_fully(db.upcast(), path.mod_path()).map(|ty| match ty { + TypeNs::SelfType(it) => PathResolution::SelfType(it.into()), + TypeNs::GenericParam(id) => PathResolution::TypeParam(TypeParam { id }), + TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => { + PathResolution::Def(Adt::from(it).into()) + } + TypeNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()), + TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()), + TypeNs::BuiltinType(it) => PathResolution::Def(it.into()), + TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()), + }); + + let body_owner = resolver.body_owner(); + let values = + resolver.resolve_path_in_value_ns_fully(db.upcast(), path.mod_path()).and_then(|val| { + let res = match val { + ValueNs::LocalBinding(pat_id) => { + let var = Local { parent: body_owner?.into(), pat_id }; + PathResolution::Local(var) + } + ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()), + ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()), + ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()), + ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()), + ValueNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()), + ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()), + }; + Some(res) + }); + + let items = resolver + .resolve_module_path_in_items(db.upcast(), path.mod_path()) + .take_types() + .map(|it| PathResolution::Def(it.into())); + + types.or(values).or(items).or_else(|| { + resolver + .resolve_path_as_macro(db.upcast(), path.mod_path()) + .map(|def| PathResolution::Macro(def.into())) + }) +} + +/// Resolves a path where we know it is a qualifier of another path. +/// +/// For example, if we have: +/// ``` +/// mod my { +/// pub mod foo { +/// struct Bar; +/// } +/// +/// pub fn foo() {} +/// } +/// ``` +/// then we know that `foo` in `my::foo::Bar` refers to the module, not the function. +pub(crate) fn resolve_hir_path_qualifier( + db: &dyn HirDatabase, + resolver: &Resolver, + path: &Path, +) -> Option { + let items = resolver + .resolve_module_path_in_items(db.upcast(), path.mod_path()) + .take_types() + .map(|it| PathResolution::Def(it.into())); + + if items.is_some() { + return items; + } + + resolver.resolve_path_in_type_ns_fully(db.upcast(), path.mod_path()).map(|ty| match ty { + TypeNs::SelfType(it) => PathResolution::SelfType(it.into()), + TypeNs::GenericParam(id) => PathResolution::TypeParam(TypeParam { id }), + TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => PathResolution::Def(Adt::from(it).into()), + TypeNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()), + TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()), + TypeNs::BuiltinType(it) => PathResolution::Def(it.into()), + TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()), + }) +} -- cgit v1.2.3 From 9664c57e60ec5662b3e8b063324d9ab7879d5570 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Thu, 13 Aug 2020 23:52:14 +0200 Subject: Make hygiene private to hir --- crates/hir/src/code_model.rs | 7 +++++++ crates/hir/src/lib.rs | 8 ++++++-- crates/hir/src/semantics.rs | 16 +++++++++++++--- 3 files changed, 26 insertions(+), 5 deletions(-) (limited to 'crates/hir/src') diff --git a/crates/hir/src/code_model.rs b/crates/hir/src/code_model.rs index 8ffb9e99b..5dc3ae3b1 100644 --- a/crates/hir/src/code_model.rs +++ b/crates/hir/src/code_model.rs @@ -883,6 +883,13 @@ where } impl AssocItem { + pub fn name(self, db: &dyn HirDatabase) -> Option { + match self { + AssocItem::Function(it) => Some(it.name(db)), + AssocItem::Const(it) => it.name(db), + AssocItem::TypeAlias(it) => Some(it.name(db)), + } + } pub fn module(self, db: &dyn HirDatabase) -> Module { match self { AssocItem::Function(f) => f.module(db), diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 24a0f6b4b..4ae2bd085 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -52,8 +52,12 @@ pub use hir_def::{ type_ref::{Mutability, TypeRef}, }; pub use hir_expand::{ - hygiene::Hygiene, name::Name, HirFileId, InFile, MacroCallId, MacroCallLoc, - MacroDefId, /* FIXME */ + name::Name, HirFileId, InFile, MacroCallId, MacroCallLoc, /* FIXME */ MacroDefId, MacroFile, Origin, }; pub use hir_ty::display::HirDisplay; + +// These are negative re-exports: pub using these names is forbidden, they +// should remain private to hir internals. +#[allow(unused)] +use hir_expand::hygiene::Hygiene; diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 1467d825d..d8beac98a 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -502,18 +502,19 @@ impl<'db> SemanticsImpl<'db> { fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> { let node = self.find_file(node.clone()); let resolver = self.analyze2(node.as_ref(), None).resolver; - SemanticsScope { db: self.db, resolver } + SemanticsScope { db: self.db, file_id: node.file_id, resolver } } fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> { let node = self.find_file(node.clone()); let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver; - SemanticsScope { db: self.db, resolver } + SemanticsScope { db: self.db, file_id: node.file_id, resolver } } fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { + let file_id = self.db.lookup_intern_trait(def.id).id.file_id; let resolver = def.id.resolver(self.db.upcast()); - SemanticsScope { db: self.db, resolver } + SemanticsScope { db: self.db, file_id, resolver } } fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer { @@ -709,6 +710,7 @@ fn find_root(node: &SyntaxNode) -> SyntaxNode { #[derive(Debug)] pub struct SemanticsScope<'a> { pub db: &'a dyn HirDatabase, + file_id: HirFileId, resolver: Resolver, } @@ -752,6 +754,14 @@ impl<'a> SemanticsScope<'a> { }) } + /// Resolve a path as-if it was written at the given scope. This is + /// necessary a heuristic, as it doesn't take hygiene into account. + pub fn resolve_hypothetical(&self, path: &ast::Path) -> Option { + let hygiene = Hygiene::new(self.db.upcast(), self.file_id); + let path = Path::from_src(path.clone(), &hygiene)?; + self.resolve_hir_path(&path) + } + pub fn resolve_hir_path(&self, path: &Path) -> Option { resolve_hir_path(self.db, &self.resolver, path) } -- cgit v1.2.3