diff options
42 files changed, 925 insertions, 483 deletions
diff --git a/Cargo.lock b/Cargo.lock index 4ae16c726..8f1a8401f 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -801,9 +801,9 @@ dependencies = [ | |||
801 | 801 | ||
802 | [[package]] | 802 | [[package]] |
803 | name = "libmimalloc-sys" | 803 | name = "libmimalloc-sys" |
804 | version = "0.1.18" | 804 | version = "0.1.19" |
805 | source = "registry+https://github.com/rust-lang/crates.io-index" | 805 | source = "registry+https://github.com/rust-lang/crates.io-index" |
806 | checksum = "82151ff13433c4d403cb15d0e6fbda14b24d65bd1a5b33f7d52ec983cc00752d" | 806 | checksum = "4782d78362f319e7568577c015dc0cb9c6d0fe43be120018a54ac4c6dd89888a" |
807 | dependencies = [ | 807 | dependencies = [ |
808 | "cmake", | 808 | "cmake", |
809 | ] | 809 | ] |
@@ -906,9 +906,9 @@ dependencies = [ | |||
906 | 906 | ||
907 | [[package]] | 907 | [[package]] |
908 | name = "mimalloc" | 908 | name = "mimalloc" |
909 | version = "0.1.22" | 909 | version = "0.1.23" |
910 | source = "registry+https://github.com/rust-lang/crates.io-index" | 910 | source = "registry+https://github.com/rust-lang/crates.io-index" |
911 | checksum = "4a5d2c9cb18f9cdc6d88f4aca6d3d8ea89c4c8202d6facfc7e56efdee97b80fa" | 911 | checksum = "7857c87270957ce1d20aea0f39b383551518986b6e480a0291e3b8ec5f9ab158" |
912 | dependencies = [ | 912 | dependencies = [ |
913 | "libmimalloc-sys", | 913 | "libmimalloc-sys", |
914 | ] | 914 | ] |
@@ -1661,9 +1661,9 @@ dependencies = [ | |||
1661 | 1661 | ||
1662 | [[package]] | 1662 | [[package]] |
1663 | name = "thread_local" | 1663 | name = "thread_local" |
1664 | version = "1.1.2" | 1664 | version = "1.1.3" |
1665 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1665 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1666 | checksum = "d8208a331e1cb318dd5bd76951d2b8fc48ca38a69f5f4e4af1b6a9f8c6236915" | 1666 | checksum = "8018d24e04c95ac8790716a5987d0fec4f8b27249ffa0f7d33f1369bdfb88cbd" |
1667 | dependencies = [ | 1667 | dependencies = [ |
1668 | "once_cell", | 1668 | "once_cell", |
1669 | ] | 1669 | ] |
diff --git a/crates/assists/src/handlers/extract_struct_from_enum_variant.rs b/crates/assists/src/handlers/extract_struct_from_enum_variant.rs index e3ef04932..5c7678b53 100644 --- a/crates/assists/src/handlers/extract_struct_from_enum_variant.rs +++ b/crates/assists/src/handlers/extract_struct_from_enum_variant.rs | |||
@@ -151,8 +151,8 @@ fn insert_import( | |||
151 | ctx.config.insert_use.prefix_kind, | 151 | ctx.config.insert_use.prefix_kind, |
152 | ); | 152 | ); |
153 | if let Some(mut mod_path) = mod_path { | 153 | if let Some(mut mod_path) = mod_path { |
154 | mod_path.segments.pop(); | 154 | mod_path.pop_segment(); |
155 | mod_path.segments.push(variant_hir_name.clone()); | 155 | mod_path.push_segment(variant_hir_name.clone()); |
156 | let scope = ImportScope::find_insert_use_container(scope_node, &ctx.sema)?; | 156 | let scope = ImportScope::find_insert_use_container(scope_node, &ctx.sema)?; |
157 | *rewriter += insert_use(&scope, mod_path_to_ast(&mod_path), ctx.config.insert_use.merge); | 157 | *rewriter += insert_use(&scope, mod_path_to_ast(&mod_path), ctx.config.insert_use.merge); |
158 | } | 158 | } |
diff --git a/crates/completion/src/completions/flyimport.rs b/crates/completion/src/completions/flyimport.rs index 9c6a5a40c..c9f928483 100644 --- a/crates/completion/src/completions/flyimport.rs +++ b/crates/completion/src/completions/flyimport.rs | |||
@@ -175,7 +175,7 @@ fn compute_fuzzy_completion_order_key( | |||
175 | user_input_lowercased: &str, | 175 | user_input_lowercased: &str, |
176 | ) -> usize { | 176 | ) -> usize { |
177 | mark::hit!(certain_fuzzy_order_test); | 177 | mark::hit!(certain_fuzzy_order_test); |
178 | let proposed_import_name = match proposed_mod_path.segments.last() { | 178 | let proposed_import_name = match proposed_mod_path.segments().last() { |
179 | Some(name) => name.to_string().to_lowercase(), | 179 | Some(name) => name.to_string().to_lowercase(), |
180 | None => return usize::MAX, | 180 | None => return usize::MAX, |
181 | }; | 181 | }; |
diff --git a/crates/completion/src/completions/unqualified_path.rs b/crates/completion/src/completions/unqualified_path.rs index 5d62fab97..e2482f959 100644 --- a/crates/completion/src/completions/unqualified_path.rs +++ b/crates/completion/src/completions/unqualified_path.rs | |||
@@ -63,7 +63,7 @@ fn complete_enum_variants(acc: &mut Completions, ctx: &CompletionContext, ty: &T | |||
63 | if let Some(path) = module.find_use_path(ctx.db, ModuleDef::from(variant)) { | 63 | if let Some(path) = module.find_use_path(ctx.db, ModuleDef::from(variant)) { |
64 | // Variants with trivial paths are already added by the existing completion logic, | 64 | // Variants with trivial paths are already added by the existing completion logic, |
65 | // so we should avoid adding these twice | 65 | // so we should avoid adding these twice |
66 | if path.segments.len() > 1 { | 66 | if path.segments().len() > 1 { |
67 | acc.add_qualified_enum_variant(ctx, variant, path); | 67 | acc.add_qualified_enum_variant(ctx, variant, path); |
68 | } | 68 | } |
69 | } | 69 | } |
diff --git a/crates/completion/src/item.rs b/crates/completion/src/item.rs index 8ec4ac65e..884711f11 100644 --- a/crates/completion/src/item.rs +++ b/crates/completion/src/item.rs | |||
@@ -332,9 +332,9 @@ impl Builder { | |||
332 | label = format!("{} ({})", label, import_to_add.import_path); | 332 | label = format!("{} ({})", label, import_to_add.import_path); |
333 | } else { | 333 | } else { |
334 | let mut import_path_without_last_segment = import_to_add.import_path.to_owned(); | 334 | let mut import_path_without_last_segment = import_to_add.import_path.to_owned(); |
335 | let _ = import_path_without_last_segment.segments.pop(); | 335 | let _ = import_path_without_last_segment.pop_segment(); |
336 | 336 | ||
337 | if !import_path_without_last_segment.segments.is_empty() { | 337 | if !import_path_without_last_segment.segments().is_empty() { |
338 | lookup = lookup.or_else(|| Some(label.clone())); | 338 | lookup = lookup.or_else(|| Some(label.clone())); |
339 | insert_text = insert_text.or_else(|| Some(label.clone())); | 339 | insert_text = insert_text.or_else(|| Some(label.clone())); |
340 | label = format!("{}::{}", import_path_without_last_segment, label); | 340 | label = format!("{}::{}", import_path_without_last_segment, label); |
diff --git a/crates/completion/src/render.rs b/crates/completion/src/render.rs index e11b881ca..eddaaa6f3 100644 --- a/crates/completion/src/render.rs +++ b/crates/completion/src/render.rs | |||
@@ -57,7 +57,7 @@ pub(crate) fn render_resolution_with_import<'a>( | |||
57 | ScopeDef::ModuleDef(ModuleDef::Function(f)) => f.name(ctx.completion.db).to_string(), | 57 | ScopeDef::ModuleDef(ModuleDef::Function(f)) => f.name(ctx.completion.db).to_string(), |
58 | ScopeDef::ModuleDef(ModuleDef::Const(c)) => c.name(ctx.completion.db)?.to_string(), | 58 | ScopeDef::ModuleDef(ModuleDef::Const(c)) => c.name(ctx.completion.db)?.to_string(), |
59 | ScopeDef::ModuleDef(ModuleDef::TypeAlias(t)) => t.name(ctx.completion.db).to_string(), | 59 | ScopeDef::ModuleDef(ModuleDef::TypeAlias(t)) => t.name(ctx.completion.db).to_string(), |
60 | _ => import_edit.import_path.segments.last()?.to_string(), | 60 | _ => import_edit.import_path.segments().last()?.to_string(), |
61 | }; | 61 | }; |
62 | Render::new(ctx).render_resolution(local_name, Some(import_edit), resolution).map(|mut item| { | 62 | Render::new(ctx).render_resolution(local_name, Some(import_edit), resolution).map(|mut item| { |
63 | item.completion_kind = CompletionKind::Magic; | 63 | item.completion_kind = CompletionKind::Magic; |
diff --git a/crates/completion/src/render/enum_variant.rs b/crates/completion/src/render/enum_variant.rs index adcddebd1..9214193b4 100644 --- a/crates/completion/src/render/enum_variant.rs +++ b/crates/completion/src/render/enum_variant.rs | |||
@@ -45,8 +45,8 @@ impl<'a> EnumRender<'a> { | |||
45 | let (qualified_name, short_qualified_name) = match &path { | 45 | let (qualified_name, short_qualified_name) = match &path { |
46 | Some(path) => { | 46 | Some(path) => { |
47 | let full = path.to_string(); | 47 | let full = path.to_string(); |
48 | let short = | 48 | let segments = path.segments(); |
49 | path.segments[path.segments.len().saturating_sub(2)..].iter().join("::"); | 49 | let short = segments[segments.len().saturating_sub(2)..].iter().join("::"); |
50 | (full, short) | 50 | (full, short) |
51 | } | 51 | } |
52 | None => (name.to_string(), name.to_string()), | 52 | None => (name.to_string(), name.to_string()), |
diff --git a/crates/hir_def/src/adt.rs b/crates/hir_def/src/adt.rs index 06f0b9b18..ed36c3109 100644 --- a/crates/hir_def/src/adt.rs +++ b/crates/hir_def/src/adt.rs | |||
@@ -351,7 +351,7 @@ fn lower_field( | |||
351 | ) -> FieldData { | 351 | ) -> FieldData { |
352 | FieldData { | 352 | FieldData { |
353 | name: field.name.clone(), | 353 | name: field.name.clone(), |
354 | type_ref: field.type_ref.clone(), | 354 | type_ref: item_tree[field.type_ref].clone(), |
355 | visibility: item_tree[override_visibility.unwrap_or(field.visibility)].clone(), | 355 | visibility: item_tree[override_visibility.unwrap_or(field.visibility)].clone(), |
356 | } | 356 | } |
357 | } | 357 | } |
diff --git a/crates/hir_def/src/body.rs b/crates/hir_def/src/body.rs index b9ecf22fa..9a432f7d1 100644 --- a/crates/hir_def/src/body.rs +++ b/crates/hir_def/src/body.rs | |||
@@ -33,7 +33,7 @@ use crate::{ | |||
33 | nameres::DefMap, | 33 | nameres::DefMap, |
34 | path::{ModPath, Path}, | 34 | path::{ModPath, Path}, |
35 | src::HasSource, | 35 | src::HasSource, |
36 | AsMacroCall, DefWithBodyId, HasModule, Lookup, ModuleId, | 36 | AsMacroCall, DefWithBodyId, HasModule, LocalModuleId, Lookup, ModuleId, |
37 | }; | 37 | }; |
38 | 38 | ||
39 | /// A subset of Expander that only deals with cfg attributes. We only need it to | 39 | /// A subset of Expander that only deals with cfg attributes. We only need it to |
@@ -46,10 +46,10 @@ pub(crate) struct CfgExpander { | |||
46 | 46 | ||
47 | pub(crate) struct Expander { | 47 | pub(crate) struct Expander { |
48 | cfg_expander: CfgExpander, | 48 | cfg_expander: CfgExpander, |
49 | crate_def_map: Arc<DefMap>, | 49 | def_map: Arc<DefMap>, |
50 | current_file_id: HirFileId, | 50 | current_file_id: HirFileId, |
51 | ast_id_map: Arc<AstIdMap>, | 51 | ast_id_map: Arc<AstIdMap>, |
52 | module: ModuleId, | 52 | module: LocalModuleId, |
53 | recursion_limit: usize, | 53 | recursion_limit: usize, |
54 | } | 54 | } |
55 | 55 | ||
@@ -91,10 +91,10 @@ impl Expander { | |||
91 | let ast_id_map = db.ast_id_map(current_file_id); | 91 | let ast_id_map = db.ast_id_map(current_file_id); |
92 | Expander { | 92 | Expander { |
93 | cfg_expander, | 93 | cfg_expander, |
94 | crate_def_map, | 94 | def_map: crate_def_map, |
95 | current_file_id, | 95 | current_file_id, |
96 | ast_id_map, | 96 | ast_id_map, |
97 | module, | 97 | module: module.local_id, |
98 | recursion_limit: 0, | 98 | recursion_limit: 0, |
99 | } | 99 | } |
100 | } | 100 | } |
@@ -102,7 +102,6 @@ impl Expander { | |||
102 | pub(crate) fn enter_expand<T: ast::AstNode>( | 102 | pub(crate) fn enter_expand<T: ast::AstNode>( |
103 | &mut self, | 103 | &mut self, |
104 | db: &dyn DefDatabase, | 104 | db: &dyn DefDatabase, |
105 | local_scope: Option<&ItemScope>, | ||
106 | macro_call: ast::MacroCall, | 105 | macro_call: ast::MacroCall, |
107 | ) -> ExpandResult<Option<(Mark, T)>> { | 106 | ) -> ExpandResult<Option<(Mark, T)>> { |
108 | if self.recursion_limit + 1 > EXPANSION_RECURSION_LIMIT { | 107 | if self.recursion_limit + 1 > EXPANSION_RECURSION_LIMIT { |
@@ -112,18 +111,12 @@ impl Expander { | |||
112 | 111 | ||
113 | let macro_call = InFile::new(self.current_file_id, ¯o_call); | 112 | let macro_call = InFile::new(self.current_file_id, ¯o_call); |
114 | 113 | ||
115 | let resolver = |path: ModPath| -> Option<MacroDefId> { | 114 | let resolver = |
116 | if let Some(local_scope) = local_scope { | 115 | |path: ModPath| -> Option<MacroDefId> { self.resolve_path_as_macro(db, &path) }; |
117 | if let Some(def) = path.as_ident().and_then(|n| local_scope.get_legacy_macro(n)) { | ||
118 | return Some(def); | ||
119 | } | ||
120 | } | ||
121 | self.resolve_path_as_macro(db, &path) | ||
122 | }; | ||
123 | 116 | ||
124 | let mut err = None; | 117 | let mut err = None; |
125 | let call_id = | 118 | let call_id = |
126 | macro_call.as_call_id_with_errors(db, self.crate_def_map.krate(), resolver, &mut |e| { | 119 | macro_call.as_call_id_with_errors(db, self.def_map.krate(), resolver, &mut |e| { |
127 | err.get_or_insert(e); | 120 | err.get_or_insert(e); |
128 | }); | 121 | }); |
129 | let call_id = match call_id { | 122 | let call_id = match call_id { |
@@ -204,10 +197,7 @@ impl Expander { | |||
204 | } | 197 | } |
205 | 198 | ||
206 | fn resolve_path_as_macro(&self, db: &dyn DefDatabase, path: &ModPath) -> Option<MacroDefId> { | 199 | fn resolve_path_as_macro(&self, db: &dyn DefDatabase, path: &ModPath) -> Option<MacroDefId> { |
207 | self.crate_def_map | 200 | self.def_map.resolve_path(db, self.module, path, BuiltinShadowMode::Other).0.take_macros() |
208 | .resolve_path(db, self.module.local_id, path, BuiltinShadowMode::Other) | ||
209 | .0 | ||
210 | .take_macros() | ||
211 | } | 201 | } |
212 | 202 | ||
213 | fn ast_id<N: AstNode>(&self, item: &N) -> AstId<N> { | 203 | fn ast_id<N: AstNode>(&self, item: &N) -> AstId<N> { |
diff --git a/crates/hir_def/src/body/lower.rs b/crates/hir_def/src/body/lower.rs index 209965fca..28b11cdde 100644 --- a/crates/hir_def/src/body/lower.rs +++ b/crates/hir_def/src/body/lower.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | //! Transforms `ast::Expr` into an equivalent `hir_def::expr::Expr` | 1 | //! Transforms `ast::Expr` into an equivalent `hir_def::expr::Expr` |
2 | //! representation. | 2 | //! representation. |
3 | 3 | ||
4 | use std::{any::type_name, sync::Arc}; | 4 | use std::{any::type_name, mem, sync::Arc}; |
5 | 5 | ||
6 | use either::Either; | 6 | use either::Either; |
7 | use hir_expand::{ | 7 | use hir_expand::{ |
@@ -36,8 +36,8 @@ use crate::{ | |||
36 | item_tree::{ItemTree, ItemTreeId, ItemTreeNode}, | 36 | item_tree::{ItemTree, ItemTreeId, ItemTreeNode}, |
37 | path::{GenericArgs, Path}, | 37 | path::{GenericArgs, Path}, |
38 | type_ref::{Mutability, Rawness, TypeRef}, | 38 | type_ref::{Mutability, Rawness, TypeRef}, |
39 | AdtId, ConstLoc, ContainerId, DefWithBodyId, EnumLoc, FunctionLoc, Intern, ModuleDefId, | 39 | AdtId, BlockLoc, ConstLoc, ContainerId, DefWithBodyId, EnumLoc, FunctionLoc, Intern, |
40 | StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc, | 40 | ModuleDefId, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc, |
41 | }; | 41 | }; |
42 | 42 | ||
43 | use super::{diagnostics::BodyDiagnostic, ExprSource, PatSource}; | 43 | use super::{diagnostics::BodyDiagnostic, ExprSource, PatSource}; |
@@ -152,8 +152,8 @@ impl ExprCollector<'_> { | |||
152 | fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId { | 152 | fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId { |
153 | self.make_expr(expr, Err(SyntheticSyntax)) | 153 | self.make_expr(expr, Err(SyntheticSyntax)) |
154 | } | 154 | } |
155 | fn empty_block(&mut self) -> ExprId { | 155 | fn unit(&mut self) -> ExprId { |
156 | self.alloc_expr_desugared(Expr::Block { statements: Vec::new(), tail: None, label: None }) | 156 | self.alloc_expr_desugared(Expr::Tuple { exprs: Vec::new() }) |
157 | } | 157 | } |
158 | fn missing_expr(&mut self) -> ExprId { | 158 | fn missing_expr(&mut self) -> ExprId { |
159 | self.alloc_expr_desugared(Expr::Missing) | 159 | self.alloc_expr_desugared(Expr::Missing) |
@@ -222,7 +222,7 @@ impl ExprCollector<'_> { | |||
222 | MatchArm { pat, expr: then_branch, guard: None }, | 222 | MatchArm { pat, expr: then_branch, guard: None }, |
223 | MatchArm { | 223 | MatchArm { |
224 | pat: placeholder_pat, | 224 | pat: placeholder_pat, |
225 | expr: else_branch.unwrap_or_else(|| self.empty_block()), | 225 | expr: else_branch.unwrap_or_else(|| self.unit()), |
226 | guard: None, | 226 | guard: None, |
227 | }, | 227 | }, |
228 | ]; | 228 | ]; |
@@ -561,7 +561,7 @@ impl ExprCollector<'_> { | |||
561 | let outer_file = self.expander.current_file_id; | 561 | let outer_file = self.expander.current_file_id; |
562 | 562 | ||
563 | let macro_call = self.expander.to_source(AstPtr::new(&e)); | 563 | let macro_call = self.expander.to_source(AstPtr::new(&e)); |
564 | let res = self.expander.enter_expand(self.db, Some(&self.body.item_scope), e); | 564 | let res = self.expander.enter_expand(self.db, e); |
565 | 565 | ||
566 | match &res.err { | 566 | match &res.err { |
567 | Some(ExpandError::UnresolvedProcMacro) => { | 567 | Some(ExpandError::UnresolvedProcMacro) => { |
@@ -697,12 +697,30 @@ impl ExprCollector<'_> { | |||
697 | } | 697 | } |
698 | 698 | ||
699 | fn collect_block(&mut self, block: ast::BlockExpr) -> ExprId { | 699 | fn collect_block(&mut self, block: ast::BlockExpr) -> ExprId { |
700 | let syntax_node_ptr = AstPtr::new(&block.clone().into()); | 700 | let ast_id = self.expander.ast_id(&block); |
701 | let block_loc = | ||
702 | BlockLoc { ast_id, module: self.expander.def_map.module_id(self.expander.module) }; | ||
703 | let block_id = self.db.intern_block(block_loc); | ||
704 | let opt_def_map = self.db.block_def_map(block_id); | ||
705 | let has_def_map = opt_def_map.is_some(); | ||
706 | let def_map = opt_def_map.unwrap_or_else(|| self.expander.def_map.clone()); | ||
707 | let module = if has_def_map { def_map.root() } else { self.expander.module }; | ||
708 | let prev_def_map = mem::replace(&mut self.expander.def_map, def_map); | ||
709 | let prev_local_module = mem::replace(&mut self.expander.module, module); | ||
710 | |||
701 | self.collect_stmts_items(block.statements()); | 711 | self.collect_stmts_items(block.statements()); |
702 | let statements = | 712 | let statements = |
703 | block.statements().filter_map(|s| self.collect_stmt(s)).flatten().collect(); | 713 | block.statements().filter_map(|s| self.collect_stmt(s)).flatten().collect(); |
704 | let tail = block.tail_expr().map(|e| self.collect_expr(e)); | 714 | let tail = block.tail_expr().map(|e| self.collect_expr(e)); |
705 | self.alloc_expr(Expr::Block { statements, tail, label: None }, syntax_node_ptr) | 715 | let syntax_node_ptr = AstPtr::new(&block.clone().into()); |
716 | let expr_id = self.alloc_expr( | ||
717 | Expr::Block { id: block_id, statements, tail, label: None }, | ||
718 | syntax_node_ptr, | ||
719 | ); | ||
720 | |||
721 | self.expander.def_map = prev_def_map; | ||
722 | self.expander.module = prev_local_module; | ||
723 | expr_id | ||
706 | } | 724 | } |
707 | 725 | ||
708 | fn collect_stmts_items(&mut self, stmts: ast::AstChildren<ast::Stmt>) { | 726 | fn collect_stmts_items(&mut self, stmts: ast::AstChildren<ast::Stmt>) { |
@@ -794,7 +812,7 @@ impl ExprCollector<'_> { | |||
794 | } | 812 | } |
795 | Either::Right(e) => { | 813 | Either::Right(e) => { |
796 | let mac = MacroDefId { | 814 | let mac = MacroDefId { |
797 | krate: self.expander.module.krate, | 815 | krate: self.expander.def_map.krate(), |
798 | ast_id: Some(self.expander.ast_id(&e)), | 816 | ast_id: Some(self.expander.ast_id(&e)), |
799 | kind: MacroDefKind::Declarative, | 817 | kind: MacroDefKind::Declarative, |
800 | local_inner: false, | 818 | local_inner: false, |
@@ -832,9 +850,9 @@ impl ExprCollector<'_> { | |||
832 | if annotation == BindingAnnotation::Unannotated && subpat.is_none() { | 850 | if annotation == BindingAnnotation::Unannotated && subpat.is_none() { |
833 | // This could also be a single-segment path pattern. To | 851 | // This could also be a single-segment path pattern. To |
834 | // decide that, we need to try resolving the name. | 852 | // decide that, we need to try resolving the name. |
835 | let (resolved, _) = self.expander.crate_def_map.resolve_path( | 853 | let (resolved, _) = self.expander.def_map.resolve_path( |
836 | self.db, | 854 | self.db, |
837 | self.expander.module.local_id, | 855 | self.expander.module, |
838 | &name.clone().into(), | 856 | &name.clone().into(), |
839 | BuiltinShadowMode::Other, | 857 | BuiltinShadowMode::Other, |
840 | ); | 858 | ); |
diff --git a/crates/hir_def/src/body/tests.rs b/crates/hir_def/src/body/tests.rs index 2e5d0a01e..a92134ba7 100644 --- a/crates/hir_def/src/body/tests.rs +++ b/crates/hir_def/src/body/tests.rs | |||
@@ -1,7 +1,10 @@ | |||
1 | use base_db::{fixture::WithFixture, SourceDatabase}; | 1 | mod block; |
2 | |||
3 | use base_db::{fixture::WithFixture, FilePosition, SourceDatabase}; | ||
4 | use expect_test::Expect; | ||
2 | use test_utils::mark; | 5 | use test_utils::mark; |
3 | 6 | ||
4 | use crate::{test_db::TestDB, ModuleDefId}; | 7 | use crate::{test_db::TestDB, BlockId, ModuleDefId}; |
5 | 8 | ||
6 | use super::*; | 9 | use super::*; |
7 | 10 | ||
@@ -31,6 +34,114 @@ fn check_diagnostics(ra_fixture: &str) { | |||
31 | db.check_diagnostics(); | 34 | db.check_diagnostics(); |
32 | } | 35 | } |
33 | 36 | ||
37 | fn block_def_map_at(ra_fixture: &str) -> String { | ||
38 | let (db, position) = crate::test_db::TestDB::with_position(ra_fixture); | ||
39 | |||
40 | let krate = db.crate_graph().iter().next().unwrap(); | ||
41 | let def_map = db.crate_def_map(krate); | ||
42 | |||
43 | let mut block = | ||
44 | block_at_pos(&db, &def_map, position).expect("couldn't find enclosing function or block"); | ||
45 | loop { | ||
46 | let def_map = db.block_def_map(block).unwrap_or_else(|| def_map.clone()); | ||
47 | let new_block = block_at_pos(&db, &def_map, position); | ||
48 | match new_block { | ||
49 | Some(new_block) => { | ||
50 | assert_ne!(block, new_block); | ||
51 | block = new_block; | ||
52 | } | ||
53 | None => { | ||
54 | return def_map.dump(&db); | ||
55 | } | ||
56 | } | ||
57 | } | ||
58 | } | ||
59 | |||
60 | fn block_at_pos(db: &dyn DefDatabase, def_map: &DefMap, position: FilePosition) -> Option<BlockId> { | ||
61 | // Find the smallest (innermost) function containing the cursor. | ||
62 | let mut size = None; | ||
63 | let mut fn_def = None; | ||
64 | for (_, module) in def_map.modules() { | ||
65 | let file_id = module.definition_source(db).file_id; | ||
66 | if file_id != position.file_id.into() { | ||
67 | continue; | ||
68 | } | ||
69 | let root = db.parse_or_expand(file_id).unwrap(); | ||
70 | let ast_map = db.ast_id_map(file_id); | ||
71 | let item_tree = db.item_tree(file_id); | ||
72 | for decl in module.scope.declarations() { | ||
73 | if let ModuleDefId::FunctionId(it) = decl { | ||
74 | let ast = ast_map.get(item_tree[it.lookup(db).id.value].ast_id).to_node(&root); | ||
75 | let range = ast.syntax().text_range(); | ||
76 | |||
77 | if !range.contains(position.offset) { | ||
78 | continue; | ||
79 | } | ||
80 | |||
81 | let new_size = match size { | ||
82 | None => range.len(), | ||
83 | Some(size) => { | ||
84 | if range.len() < size { | ||
85 | range.len() | ||
86 | } else { | ||
87 | size | ||
88 | } | ||
89 | } | ||
90 | }; | ||
91 | if size != Some(new_size) { | ||
92 | size = Some(new_size); | ||
93 | fn_def = Some(it); | ||
94 | } | ||
95 | } | ||
96 | } | ||
97 | } | ||
98 | |||
99 | let (body, source_map) = db.body_with_source_map(fn_def?.into()); | ||
100 | |||
101 | // Now find the smallest encompassing block expression in the function body. | ||
102 | let mut size = None; | ||
103 | let mut block_id = None; | ||
104 | for (expr_id, expr) in body.exprs.iter() { | ||
105 | if let Expr::Block { id, .. } = expr { | ||
106 | if let Ok(ast) = source_map.expr_syntax(expr_id) { | ||
107 | if ast.file_id != position.file_id.into() { | ||
108 | continue; | ||
109 | } | ||
110 | |||
111 | let root = db.parse_or_expand(ast.file_id).unwrap(); | ||
112 | let ast = ast.value.to_node(&root); | ||
113 | let range = ast.syntax().text_range(); | ||
114 | |||
115 | if !range.contains(position.offset) { | ||
116 | continue; | ||
117 | } | ||
118 | |||
119 | let new_size = match size { | ||
120 | None => range.len(), | ||
121 | Some(size) => { | ||
122 | if range.len() < size { | ||
123 | range.len() | ||
124 | } else { | ||
125 | size | ||
126 | } | ||
127 | } | ||
128 | }; | ||
129 | if size != Some(new_size) { | ||
130 | size = Some(new_size); | ||
131 | block_id = Some(*id); | ||
132 | } | ||
133 | } | ||
134 | } | ||
135 | } | ||
136 | |||
137 | Some(block_id.expect("can't find block containing cursor")) | ||
138 | } | ||
139 | |||
140 | fn check_at(ra_fixture: &str, expect: Expect) { | ||
141 | let actual = block_def_map_at(ra_fixture); | ||
142 | expect.assert_eq(&actual); | ||
143 | } | ||
144 | |||
34 | #[test] | 145 | #[test] |
35 | fn your_stack_belongs_to_me() { | 146 | fn your_stack_belongs_to_me() { |
36 | mark::check!(your_stack_belongs_to_me); | 147 | mark::check!(your_stack_belongs_to_me); |
diff --git a/crates/hir_def/src/nameres/tests/block.rs b/crates/hir_def/src/body/tests/block.rs index 6cc659513..062560a70 100644 --- a/crates/hir_def/src/nameres/tests/block.rs +++ b/crates/hir_def/src/body/tests/block.rs | |||
@@ -1,4 +1,5 @@ | |||
1 | use super::*; | 1 | use super::*; |
2 | use expect_test::expect; | ||
2 | 3 | ||
3 | #[test] | 4 | #[test] |
4 | fn inner_item_smoke() { | 5 | fn inner_item_smoke() { |
@@ -13,6 +14,7 @@ fn outer() { | |||
13 | expect![[r#" | 14 | expect![[r#" |
14 | block scope | 15 | block scope |
15 | inner: v | 16 | inner: v |
17 | |||
16 | crate | 18 | crate |
17 | inner: t | 19 | inner: t |
18 | outer: v | 20 | outer: v |
@@ -37,6 +39,7 @@ fn outer() { | |||
37 | CrateStruct: t v | 39 | CrateStruct: t v |
38 | SelfStruct: t v | 40 | SelfStruct: t v |
39 | Struct: t v | 41 | Struct: t v |
42 | |||
40 | crate | 43 | crate |
41 | Struct: t v | 44 | Struct: t v |
42 | outer: v | 45 | outer: v |
@@ -61,6 +64,7 @@ fn outer() { | |||
61 | block scope | 64 | block scope |
62 | imported: t v | 65 | imported: t v |
63 | name: v | 66 | name: v |
67 | |||
64 | crate | 68 | crate |
65 | name: t | 69 | name: t |
66 | outer: v | 70 | outer: v |
@@ -87,9 +91,11 @@ fn outer() { | |||
87 | inner1: t | 91 | inner1: t |
88 | inner2: v | 92 | inner2: v |
89 | outer: v | 93 | outer: v |
94 | |||
90 | block scope | 95 | block scope |
91 | inner: v | 96 | inner: v |
92 | inner1: t | 97 | inner1: t |
98 | |||
93 | crate | 99 | crate |
94 | outer: v | 100 | outer: v |
95 | "#]], | 101 | "#]], |
@@ -112,6 +118,7 @@ struct Struct {} | |||
112 | expect![[r#" | 118 | expect![[r#" |
113 | block scope | 119 | block scope |
114 | Struct: t | 120 | Struct: t |
121 | |||
115 | crate | 122 | crate |
116 | Struct: t | 123 | Struct: t |
117 | module: t | 124 | module: t |
@@ -142,6 +149,7 @@ fn f() { | |||
142 | expect![[r#" | 149 | expect![[r#" |
143 | block scope | 150 | block scope |
144 | Hit: t | 151 | Hit: t |
152 | |||
145 | crate | 153 | crate |
146 | f: v | 154 | f: v |
147 | "#]], | 155 | "#]], |
@@ -176,11 +184,47 @@ pub mod mark { | |||
176 | expect![[r#" | 184 | expect![[r#" |
177 | block scope | 185 | block scope |
178 | Hit: t | 186 | Hit: t |
187 | |||
179 | block scope | 188 | block scope |
180 | nested: v | 189 | nested: v |
190 | |||
181 | crate | 191 | crate |
182 | f: v | 192 | f: v |
183 | mark: t | 193 | mark: t |
184 | "#]], | 194 | "#]], |
185 | ); | 195 | ); |
186 | } | 196 | } |
197 | |||
198 | #[test] | ||
199 | fn macro_resolve_legacy() { | ||
200 | check_at( | ||
201 | r#" | ||
202 | //- /lib.rs | ||
203 | mod module; | ||
204 | |||
205 | //- /module.rs | ||
206 | macro_rules! m { | ||
207 | () => { | ||
208 | struct Def {} | ||
209 | }; | ||
210 | } | ||
211 | |||
212 | fn f() { | ||
213 | { | ||
214 | m!(); | ||
215 | $0 | ||
216 | } | ||
217 | } | ||
218 | "#, | ||
219 | expect![[r#" | ||
220 | block scope | ||
221 | Def: t | ||
222 | |||
223 | crate | ||
224 | module: t | ||
225 | |||
226 | crate::module | ||
227 | f: v | ||
228 | "#]], | ||
229 | ) | ||
230 | } | ||
diff --git a/crates/hir_def/src/data.rs b/crates/hir_def/src/data.rs index e7b7724f7..42fcca386 100644 --- a/crates/hir_def/src/data.rs +++ b/crates/hir_def/src/data.rs | |||
@@ -41,8 +41,8 @@ impl FunctionData { | |||
41 | 41 | ||
42 | Arc::new(FunctionData { | 42 | Arc::new(FunctionData { |
43 | name: func.name.clone(), | 43 | name: func.name.clone(), |
44 | params: func.params.to_vec(), | 44 | params: func.params.iter().map(|id| item_tree[*id].clone()).collect(), |
45 | ret_type: func.ret_type.clone(), | 45 | ret_type: item_tree[func.ret_type].clone(), |
46 | attrs: item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()).clone(), | 46 | attrs: item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()).clone(), |
47 | has_self_param: func.has_self_param, | 47 | has_self_param: func.has_self_param, |
48 | has_body: func.has_body, | 48 | has_body: func.has_body, |
@@ -75,7 +75,7 @@ impl TypeAliasData { | |||
75 | 75 | ||
76 | Arc::new(TypeAliasData { | 76 | Arc::new(TypeAliasData { |
77 | name: typ.name.clone(), | 77 | name: typ.name.clone(), |
78 | type_ref: typ.type_ref.clone(), | 78 | type_ref: typ.type_ref.map(|id| item_tree[id].clone()), |
79 | visibility: item_tree[typ.visibility].clone(), | 79 | visibility: item_tree[typ.visibility].clone(), |
80 | is_extern: typ.is_extern, | 80 | is_extern: typ.is_extern, |
81 | bounds: typ.bounds.to_vec(), | 81 | bounds: typ.bounds.to_vec(), |
@@ -144,8 +144,8 @@ impl ImplData { | |||
144 | 144 | ||
145 | let item_tree = db.item_tree(impl_loc.id.file_id); | 145 | let item_tree = db.item_tree(impl_loc.id.file_id); |
146 | let impl_def = &item_tree[impl_loc.id.value]; | 146 | let impl_def = &item_tree[impl_loc.id.value]; |
147 | let target_trait = impl_def.target_trait.clone(); | 147 | let target_trait = impl_def.target_trait.map(|id| item_tree[id].clone()); |
148 | let target_type = impl_def.target_type.clone(); | 148 | let target_type = item_tree[impl_def.target_type].clone(); |
149 | let is_negative = impl_def.is_negative; | 149 | let is_negative = impl_def.is_negative; |
150 | let module_id = impl_loc.container.module(db); | 150 | let module_id = impl_loc.container.module(db); |
151 | let container = AssocContainerId::ImplId(id); | 151 | let container = AssocContainerId::ImplId(id); |
@@ -182,7 +182,7 @@ impl ConstData { | |||
182 | 182 | ||
183 | Arc::new(ConstData { | 183 | Arc::new(ConstData { |
184 | name: konst.name.clone(), | 184 | name: konst.name.clone(), |
185 | type_ref: konst.type_ref.clone(), | 185 | type_ref: item_tree[konst.type_ref].clone(), |
186 | visibility: item_tree[konst.visibility].clone(), | 186 | visibility: item_tree[konst.visibility].clone(), |
187 | }) | 187 | }) |
188 | } | 188 | } |
@@ -205,7 +205,7 @@ impl StaticData { | |||
205 | 205 | ||
206 | Arc::new(StaticData { | 206 | Arc::new(StaticData { |
207 | name: Some(statik.name.clone()), | 207 | name: Some(statik.name.clone()), |
208 | type_ref: statik.type_ref.clone(), | 208 | type_ref: item_tree[statik.type_ref].clone(), |
209 | visibility: item_tree[statik.visibility].clone(), | 209 | visibility: item_tree[statik.visibility].clone(), |
210 | mutable: statik.mutable, | 210 | mutable: statik.mutable, |
211 | is_extern: statik.is_extern, | 211 | is_extern: statik.is_extern, |
@@ -262,7 +262,7 @@ fn collect_items( | |||
262 | let root = db.parse_or_expand(file_id).unwrap(); | 262 | let root = db.parse_or_expand(file_id).unwrap(); |
263 | let call = ast_id_map.get(call.ast_id).to_node(&root); | 263 | let call = ast_id_map.get(call.ast_id).to_node(&root); |
264 | 264 | ||
265 | if let Some((mark, mac)) = expander.enter_expand(db, None, call).value { | 265 | if let Some((mark, mac)) = expander.enter_expand(db, call).value { |
266 | let src: InFile<ast::MacroItems> = expander.to_source(mac); | 266 | let src: InFile<ast::MacroItems> = expander.to_source(mac); |
267 | let item_tree = db.item_tree(src.file_id); | 267 | let item_tree = db.item_tree(src.file_id); |
268 | let iter = | 268 | let iter = |
diff --git a/crates/hir_def/src/db.rs b/crates/hir_def/src/db.rs index aef7e1f6c..6c01f1ed0 100644 --- a/crates/hir_def/src/db.rs +++ b/crates/hir_def/src/db.rs | |||
@@ -58,8 +58,23 @@ pub trait DefDatabase: InternDatabase + AstDatabase + Upcast<dyn AstDatabase> { | |||
58 | #[salsa::invoke(DefMap::crate_def_map_query)] | 58 | #[salsa::invoke(DefMap::crate_def_map_query)] |
59 | fn crate_def_map_query(&self, krate: CrateId) -> Arc<DefMap>; | 59 | fn crate_def_map_query(&self, krate: CrateId) -> Arc<DefMap>; |
60 | 60 | ||
61 | /// Computes the block-level `DefMap`, returning `None` when `block` doesn't contain any inner | ||
62 | /// items directly. | ||
63 | /// | ||
64 | /// For example: | ||
65 | /// | ||
66 | /// ``` | ||
67 | /// fn f() { // (0) | ||
68 | /// { // (1) | ||
69 | /// fn inner() {} | ||
70 | /// } | ||
71 | /// } | ||
72 | /// ``` | ||
73 | /// | ||
74 | /// The `block_def_map` for block 0 would return `None`, while `block_def_map` of block 1 would | ||
75 | /// return a `DefMap` containing `inner`. | ||
61 | #[salsa::invoke(DefMap::block_def_map_query)] | 76 | #[salsa::invoke(DefMap::block_def_map_query)] |
62 | fn block_def_map(&self, block: BlockId) -> Arc<DefMap>; | 77 | fn block_def_map(&self, block: BlockId) -> Option<Arc<DefMap>>; |
63 | 78 | ||
64 | #[salsa::invoke(StructData::struct_data_query)] | 79 | #[salsa::invoke(StructData::struct_data_query)] |
65 | fn struct_data(&self, id: StructId) -> Arc<StructData>; | 80 | fn struct_data(&self, id: StructId) -> Arc<StructData>; |
diff --git a/crates/hir_def/src/expr.rs b/crates/hir_def/src/expr.rs index 5be838f4a..4d72eaeaf 100644 --- a/crates/hir_def/src/expr.rs +++ b/crates/hir_def/src/expr.rs | |||
@@ -20,6 +20,7 @@ use crate::{ | |||
20 | builtin_type::{BuiltinFloat, BuiltinInt}, | 20 | builtin_type::{BuiltinFloat, BuiltinInt}, |
21 | path::{GenericArgs, Path}, | 21 | path::{GenericArgs, Path}, |
22 | type_ref::{Mutability, Rawness, TypeRef}, | 22 | type_ref::{Mutability, Rawness, TypeRef}, |
23 | BlockId, | ||
23 | }; | 24 | }; |
24 | 25 | ||
25 | pub type ExprId = Idx<Expr>; | 26 | pub type ExprId = Idx<Expr>; |
@@ -56,6 +57,7 @@ pub enum Expr { | |||
56 | else_branch: Option<ExprId>, | 57 | else_branch: Option<ExprId>, |
57 | }, | 58 | }, |
58 | Block { | 59 | Block { |
60 | id: BlockId, | ||
59 | statements: Vec<Statement>, | 61 | statements: Vec<Statement>, |
60 | tail: Option<ExprId>, | 62 | tail: Option<ExprId>, |
61 | label: Option<LabelId>, | 63 | label: Option<LabelId>, |
diff --git a/crates/hir_def/src/find_path.rs b/crates/hir_def/src/find_path.rs index 94a1d567d..aa2c6e04e 100644 --- a/crates/hir_def/src/find_path.rs +++ b/crates/hir_def/src/find_path.rs | |||
@@ -36,13 +36,13 @@ const MAX_PATH_LEN: usize = 15; | |||
36 | 36 | ||
37 | impl ModPath { | 37 | impl ModPath { |
38 | fn starts_with_std(&self) -> bool { | 38 | fn starts_with_std(&self) -> bool { |
39 | self.segments.first() == Some(&known::std) | 39 | self.segments().first() == Some(&known::std) |
40 | } | 40 | } |
41 | 41 | ||
42 | // When std library is present, paths starting with `std::` | 42 | // When std library is present, paths starting with `std::` |
43 | // should be preferred over paths starting with `core::` and `alloc::` | 43 | // should be preferred over paths starting with `core::` and `alloc::` |
44 | fn can_start_with_std(&self) -> bool { | 44 | fn can_start_with_std(&self) -> bool { |
45 | let first_segment = self.segments.first(); | 45 | let first_segment = self.segments().first(); |
46 | first_segment == Some(&known::alloc) || first_segment == Some(&known::core) | 46 | first_segment == Some(&known::alloc) || first_segment == Some(&known::core) |
47 | } | 47 | } |
48 | } | 48 | } |
@@ -157,7 +157,7 @@ fn find_path_inner( | |||
157 | if let Some(ModuleDefId::EnumVariantId(variant)) = item.as_module_def_id() { | 157 | if let Some(ModuleDefId::EnumVariantId(variant)) = item.as_module_def_id() { |
158 | if let Some(mut path) = find_path(db, ItemInNs::Types(variant.parent.into()), from) { | 158 | if let Some(mut path) = find_path(db, ItemInNs::Types(variant.parent.into()), from) { |
159 | let data = db.enum_data(variant.parent); | 159 | let data = db.enum_data(variant.parent); |
160 | path.segments.push(data.variants[variant.local_id].name.clone()); | 160 | path.push_segment(data.variants[variant.local_id].name.clone()); |
161 | return Some(path); | 161 | return Some(path); |
162 | } | 162 | } |
163 | // If this doesn't work, it seems we have no way of referring to the | 163 | // If this doesn't work, it seems we have no way of referring to the |
@@ -186,7 +186,7 @@ fn find_path_inner( | |||
186 | best_path_len - 1, | 186 | best_path_len - 1, |
187 | prefixed, | 187 | prefixed, |
188 | ) { | 188 | ) { |
189 | path.segments.push(name); | 189 | path.push_segment(name); |
190 | 190 | ||
191 | let new_path = if let Some(best_path) = best_path { | 191 | let new_path = if let Some(best_path) = best_path { |
192 | select_best_path(best_path, path, prefer_no_std) | 192 | select_best_path(best_path, path, prefer_no_std) |
@@ -215,7 +215,7 @@ fn find_path_inner( | |||
215 | prefixed, | 215 | prefixed, |
216 | )?; | 216 | )?; |
217 | mark::hit!(partially_imported); | 217 | mark::hit!(partially_imported); |
218 | path.segments.push(info.path.segments.last().unwrap().clone()); | 218 | path.push_segment(info.path.segments.last().unwrap().clone()); |
219 | Some(path) | 219 | Some(path) |
220 | }) | 220 | }) |
221 | }); | 221 | }); |
diff --git a/crates/hir_def/src/item_scope.rs b/crates/hir_def/src/item_scope.rs index 2750e1c91..ee46c3330 100644 --- a/crates/hir_def/src/item_scope.rs +++ b/crates/hir_def/src/item_scope.rs | |||
@@ -8,6 +8,7 @@ use hir_expand::name::Name; | |||
8 | use hir_expand::MacroDefKind; | 8 | use hir_expand::MacroDefKind; |
9 | use once_cell::sync::Lazy; | 9 | use once_cell::sync::Lazy; |
10 | use rustc_hash::{FxHashMap, FxHashSet}; | 10 | use rustc_hash::{FxHashMap, FxHashSet}; |
11 | use stdx::format_to; | ||
11 | use test_utils::mark; | 12 | use test_utils::mark; |
12 | 13 | ||
13 | use crate::{ | 14 | use crate::{ |
@@ -292,6 +293,30 @@ impl ItemScope { | |||
292 | *vis = Visibility::Module(this_module); | 293 | *vis = Visibility::Module(this_module); |
293 | } | 294 | } |
294 | } | 295 | } |
296 | |||
297 | pub(crate) fn dump(&self, buf: &mut String) { | ||
298 | let mut entries: Vec<_> = self.resolutions().collect(); | ||
299 | entries.sort_by_key(|(name, _)| name.clone()); | ||
300 | |||
301 | for (name, def) in entries { | ||
302 | format_to!(buf, "{}:", name.map_or("_".to_string(), |name| name.to_string())); | ||
303 | |||
304 | if def.types.is_some() { | ||
305 | buf.push_str(" t"); | ||
306 | } | ||
307 | if def.values.is_some() { | ||
308 | buf.push_str(" v"); | ||
309 | } | ||
310 | if def.macros.is_some() { | ||
311 | buf.push_str(" m"); | ||
312 | } | ||
313 | if def.is_none() { | ||
314 | buf.push_str(" _"); | ||
315 | } | ||
316 | |||
317 | buf.push('\n'); | ||
318 | } | ||
319 | } | ||
295 | } | 320 | } |
296 | 321 | ||
297 | impl PerNs { | 322 | impl PerNs { |
diff --git a/crates/hir_def/src/item_tree.rs b/crates/hir_def/src/item_tree.rs index 42d9f0947..3233b1957 100644 --- a/crates/hir_def/src/item_tree.rs +++ b/crates/hir_def/src/item_tree.rs | |||
@@ -24,7 +24,7 @@ use la_arena::{Arena, Idx, RawIdx}; | |||
24 | use profile::Count; | 24 | use profile::Count; |
25 | use rustc_hash::FxHashMap; | 25 | use rustc_hash::FxHashMap; |
26 | use smallvec::SmallVec; | 26 | use smallvec::SmallVec; |
27 | use syntax::{ast, match_ast}; | 27 | use syntax::{ast, match_ast, SyntaxKind}; |
28 | use test_utils::mark; | 28 | use test_utils::mark; |
29 | 29 | ||
30 | use crate::{ | 30 | use crate::{ |
@@ -80,6 +80,10 @@ impl ItemTree { | |||
80 | pub(crate) fn item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> { | 80 | pub(crate) fn item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> { |
81 | let _p = profile::span("item_tree_query").detail(|| format!("{:?}", file_id)); | 81 | let _p = profile::span("item_tree_query").detail(|| format!("{:?}", file_id)); |
82 | let syntax = if let Some(node) = db.parse_or_expand(file_id) { | 82 | let syntax = if let Some(node) = db.parse_or_expand(file_id) { |
83 | if node.kind() == SyntaxKind::ERROR { | ||
84 | // FIXME: not 100% sure why these crop up, but return an empty tree to avoid a panic | ||
85 | return Default::default(); | ||
86 | } | ||
83 | node | 87 | node |
84 | } else { | 88 | } else { |
85 | return Default::default(); | 89 | return Default::default(); |
@@ -142,6 +146,7 @@ impl ItemTree { | |||
142 | macro_defs, | 146 | macro_defs, |
143 | vis, | 147 | vis, |
144 | generics, | 148 | generics, |
149 | type_refs, | ||
145 | inner_items, | 150 | inner_items, |
146 | } = &mut **data; | 151 | } = &mut **data; |
147 | 152 | ||
@@ -165,6 +170,8 @@ impl ItemTree { | |||
165 | 170 | ||
166 | vis.arena.shrink_to_fit(); | 171 | vis.arena.shrink_to_fit(); |
167 | generics.arena.shrink_to_fit(); | 172 | generics.arena.shrink_to_fit(); |
173 | type_refs.arena.shrink_to_fit(); | ||
174 | type_refs.map.shrink_to_fit(); | ||
168 | 175 | ||
169 | inner_items.shrink_to_fit(); | 176 | inner_items.shrink_to_fit(); |
170 | } | 177 | } |
@@ -233,7 +240,7 @@ impl ItemVisibilities { | |||
233 | fn alloc(&mut self, vis: RawVisibility) -> RawVisibilityId { | 240 | fn alloc(&mut self, vis: RawVisibility) -> RawVisibilityId { |
234 | match &vis { | 241 | match &vis { |
235 | RawVisibility::Public => RawVisibilityId::PUB, | 242 | RawVisibility::Public => RawVisibilityId::PUB, |
236 | RawVisibility::Module(path) if path.segments.is_empty() => match &path.kind { | 243 | RawVisibility::Module(path) if path.segments().is_empty() => match &path.kind { |
237 | PathKind::Super(0) => RawVisibilityId::PRIV, | 244 | PathKind::Super(0) => RawVisibilityId::PRIV, |
238 | PathKind::Crate => RawVisibilityId::PUB_CRATE, | 245 | PathKind::Crate => RawVisibilityId::PUB_CRATE, |
239 | _ => RawVisibilityId(self.arena.alloc(vis).into_raw().into()), | 246 | _ => RawVisibilityId(self.arena.alloc(vis).into_raw().into()), |
@@ -244,10 +251,8 @@ impl ItemVisibilities { | |||
244 | } | 251 | } |
245 | 252 | ||
246 | static VIS_PUB: RawVisibility = RawVisibility::Public; | 253 | static VIS_PUB: RawVisibility = RawVisibility::Public; |
247 | static VIS_PRIV: RawVisibility = | 254 | static VIS_PRIV: RawVisibility = RawVisibility::Module(ModPath::from_kind(PathKind::Super(0))); |
248 | RawVisibility::Module(ModPath { kind: PathKind::Super(0), segments: Vec::new() }); | 255 | static VIS_PUB_CRATE: RawVisibility = RawVisibility::Module(ModPath::from_kind(PathKind::Crate)); |
249 | static VIS_PUB_CRATE: RawVisibility = | ||
250 | RawVisibility::Module(ModPath { kind: PathKind::Crate, segments: Vec::new() }); | ||
251 | 256 | ||
252 | #[derive(Default, Debug, Eq, PartialEq)] | 257 | #[derive(Default, Debug, Eq, PartialEq)] |
253 | struct GenericParamsStorage { | 258 | struct GenericParamsStorage { |
@@ -275,6 +280,32 @@ static EMPTY_GENERICS: GenericParams = GenericParams { | |||
275 | where_predicates: Vec::new(), | 280 | where_predicates: Vec::new(), |
276 | }; | 281 | }; |
277 | 282 | ||
283 | /// `TypeRef` interner. | ||
284 | #[derive(Default, Debug, Eq, PartialEq)] | ||
285 | struct TypeRefStorage { | ||
286 | arena: Arena<Arc<TypeRef>>, | ||
287 | map: FxHashMap<Arc<TypeRef>, Idx<Arc<TypeRef>>>, | ||
288 | } | ||
289 | |||
290 | impl TypeRefStorage { | ||
291 | // Note: We lie about the `Idx<TypeRef>` to hide the interner details. | ||
292 | |||
293 | fn intern(&mut self, ty: TypeRef) -> Idx<TypeRef> { | ||
294 | if let Some(id) = self.map.get(&ty) { | ||
295 | return Idx::from_raw(id.into_raw()); | ||
296 | } | ||
297 | |||
298 | let ty = Arc::new(ty); | ||
299 | let idx = self.arena.alloc(ty.clone()); | ||
300 | self.map.insert(ty, idx); | ||
301 | Idx::from_raw(idx.into_raw()) | ||
302 | } | ||
303 | |||
304 | fn lookup(&self, id: Idx<TypeRef>) -> &TypeRef { | ||
305 | &self.arena[Idx::from_raw(id.into_raw())] | ||
306 | } | ||
307 | } | ||
308 | |||
278 | #[derive(Default, Debug, Eq, PartialEq)] | 309 | #[derive(Default, Debug, Eq, PartialEq)] |
279 | struct ItemTreeData { | 310 | struct ItemTreeData { |
280 | imports: Arena<Import>, | 311 | imports: Arena<Import>, |
@@ -297,6 +328,7 @@ struct ItemTreeData { | |||
297 | 328 | ||
298 | vis: ItemVisibilities, | 329 | vis: ItemVisibilities, |
299 | generics: GenericParamsStorage, | 330 | generics: GenericParamsStorage, |
331 | type_refs: TypeRefStorage, | ||
300 | 332 | ||
301 | inner_items: FxHashMap<FileAstId<ast::BlockExpr>, SmallVec<[ModItem; 1]>>, | 333 | inner_items: FxHashMap<FileAstId<ast::BlockExpr>, SmallVec<[ModItem; 1]>>, |
302 | } | 334 | } |
@@ -485,6 +517,14 @@ impl Index<GenericParamsId> for ItemTree { | |||
485 | } | 517 | } |
486 | } | 518 | } |
487 | 519 | ||
520 | impl Index<Idx<TypeRef>> for ItemTree { | ||
521 | type Output = TypeRef; | ||
522 | |||
523 | fn index(&self, id: Idx<TypeRef>) -> &Self::Output { | ||
524 | self.data().type_refs.lookup(id) | ||
525 | } | ||
526 | } | ||
527 | |||
488 | impl<N: ItemTreeNode> Index<FileItemTreeId<N>> for ItemTree { | 528 | impl<N: ItemTreeNode> Index<FileItemTreeId<N>> for ItemTree { |
489 | type Output = N; | 529 | type Output = N; |
490 | fn index(&self, id: FileItemTreeId<N>) -> &N { | 530 | fn index(&self, id: FileItemTreeId<N>) -> &N { |
@@ -528,9 +568,9 @@ pub struct Function { | |||
528 | /// Whether the function is located in an `extern` block (*not* whether it is an | 568 | /// Whether the function is located in an `extern` block (*not* whether it is an |
529 | /// `extern "abi" fn`). | 569 | /// `extern "abi" fn`). |
530 | pub is_extern: bool, | 570 | pub is_extern: bool, |
531 | pub params: Box<[TypeRef]>, | 571 | pub params: Box<[Idx<TypeRef>]>, |
532 | pub is_varargs: bool, | 572 | pub is_varargs: bool, |
533 | pub ret_type: TypeRef, | 573 | pub ret_type: Idx<TypeRef>, |
534 | pub ast_id: FileAstId<ast::Fn>, | 574 | pub ast_id: FileAstId<ast::Fn>, |
535 | } | 575 | } |
536 | 576 | ||
@@ -577,7 +617,7 @@ pub struct Const { | |||
577 | /// const _: () = (); | 617 | /// const _: () = (); |
578 | pub name: Option<Name>, | 618 | pub name: Option<Name>, |
579 | pub visibility: RawVisibilityId, | 619 | pub visibility: RawVisibilityId, |
580 | pub type_ref: TypeRef, | 620 | pub type_ref: Idx<TypeRef>, |
581 | pub ast_id: FileAstId<ast::Const>, | 621 | pub ast_id: FileAstId<ast::Const>, |
582 | } | 622 | } |
583 | 623 | ||
@@ -588,7 +628,7 @@ pub struct Static { | |||
588 | pub mutable: bool, | 628 | pub mutable: bool, |
589 | /// Whether the static is in an `extern` block. | 629 | /// Whether the static is in an `extern` block. |
590 | pub is_extern: bool, | 630 | pub is_extern: bool, |
591 | pub type_ref: TypeRef, | 631 | pub type_ref: Idx<TypeRef>, |
592 | pub ast_id: FileAstId<ast::Static>, | 632 | pub ast_id: FileAstId<ast::Static>, |
593 | } | 633 | } |
594 | 634 | ||
@@ -605,8 +645,8 @@ pub struct Trait { | |||
605 | #[derive(Debug, Clone, Eq, PartialEq)] | 645 | #[derive(Debug, Clone, Eq, PartialEq)] |
606 | pub struct Impl { | 646 | pub struct Impl { |
607 | pub generic_params: GenericParamsId, | 647 | pub generic_params: GenericParamsId, |
608 | pub target_trait: Option<TypeRef>, | 648 | pub target_trait: Option<Idx<TypeRef>>, |
609 | pub target_type: TypeRef, | 649 | pub target_type: Idx<TypeRef>, |
610 | pub is_negative: bool, | 650 | pub is_negative: bool, |
611 | pub items: Box<[AssocItem]>, | 651 | pub items: Box<[AssocItem]>, |
612 | pub ast_id: FileAstId<ast::Impl>, | 652 | pub ast_id: FileAstId<ast::Impl>, |
@@ -619,7 +659,7 @@ pub struct TypeAlias { | |||
619 | /// Bounds on the type alias itself. Only valid in trait declarations, eg. `type Assoc: Copy;`. | 659 | /// Bounds on the type alias itself. Only valid in trait declarations, eg. `type Assoc: Copy;`. |
620 | pub bounds: Box<[TypeBound]>, | 660 | pub bounds: Box<[TypeBound]>, |
621 | pub generic_params: GenericParamsId, | 661 | pub generic_params: GenericParamsId, |
622 | pub type_ref: Option<TypeRef>, | 662 | pub type_ref: Option<Idx<TypeRef>>, |
623 | pub is_extern: bool, | 663 | pub is_extern: bool, |
624 | pub ast_id: FileAstId<ast::TypeAlias>, | 664 | pub ast_id: FileAstId<ast::TypeAlias>, |
625 | } | 665 | } |
@@ -802,6 +842,6 @@ pub enum Fields { | |||
802 | #[derive(Debug, Clone, PartialEq, Eq)] | 842 | #[derive(Debug, Clone, PartialEq, Eq)] |
803 | pub struct Field { | 843 | pub struct Field { |
804 | pub name: Name, | 844 | pub name: Name, |
805 | pub type_ref: TypeRef, | 845 | pub type_ref: Idx<TypeRef>, |
806 | pub visibility: RawVisibilityId, | 846 | pub visibility: RawVisibilityId, |
807 | } | 847 | } |
diff --git a/crates/hir_def/src/item_tree/lower.rs b/crates/hir_def/src/item_tree/lower.rs index acc001add..8f2f0b340 100644 --- a/crates/hir_def/src/item_tree/lower.rs +++ b/crates/hir_def/src/item_tree/lower.rs | |||
@@ -183,6 +183,7 @@ impl Ctx { | |||
183 | block_stack.push(self.source_ast_id_map.ast_id(&block)); | 183 | block_stack.push(self.source_ast_id_map.ast_id(&block)); |
184 | }, | 184 | }, |
185 | ast::Item(item) => { | 185 | ast::Item(item) => { |
186 | // FIXME: This triggers for macro calls in expression position | ||
186 | let mod_items = self.lower_mod_item(&item, true); | 187 | let mod_items = self.lower_mod_item(&item, true); |
187 | let current_block = block_stack.last(); | 188 | let current_block = block_stack.last(); |
188 | if let (Some(mod_items), Some(block)) = (mod_items, current_block) { | 189 | if let (Some(mod_items), Some(block)) = (mod_items, current_block) { |
@@ -363,6 +364,7 @@ impl Ctx { | |||
363 | params.push(type_ref); | 364 | params.push(type_ref); |
364 | } | 365 | } |
365 | } | 366 | } |
367 | let params = params.into_iter().map(|param| self.data().type_refs.intern(param)).collect(); | ||
366 | 368 | ||
367 | let mut is_varargs = false; | 369 | let mut is_varargs = false; |
368 | if let Some(params) = func.param_list() { | 370 | if let Some(params) = func.param_list() { |
@@ -384,6 +386,8 @@ impl Ctx { | |||
384 | ret_type | 386 | ret_type |
385 | }; | 387 | }; |
386 | 388 | ||
389 | let ret_type = self.data().type_refs.intern(ret_type); | ||
390 | |||
387 | let has_body = func.body().is_some(); | 391 | let has_body = func.body().is_some(); |
388 | 392 | ||
389 | let ast_id = self.source_ast_id_map.ast_id(func); | 393 | let ast_id = self.source_ast_id_map.ast_id(func); |
@@ -395,7 +399,7 @@ impl Ctx { | |||
395 | has_body, | 399 | has_body, |
396 | is_unsafe: func.unsafe_token().is_some(), | 400 | is_unsafe: func.unsafe_token().is_some(), |
397 | is_extern: false, | 401 | is_extern: false, |
398 | params: params.into_boxed_slice(), | 402 | params, |
399 | is_varargs, | 403 | is_varargs, |
400 | ret_type, | 404 | ret_type, |
401 | ast_id, | 405 | ast_id, |
@@ -656,6 +660,7 @@ impl Ctx { | |||
656 | generics.fill(&self.body_ctx, sm, node); | 660 | generics.fill(&self.body_ctx, sm, node); |
657 | // lower `impl Trait` in arguments | 661 | // lower `impl Trait` in arguments |
658 | for param in &*func.params { | 662 | for param in &*func.params { |
663 | let param = self.data().type_refs.lookup(*param); | ||
659 | generics.fill_implicit_impl_trait_args(param); | 664 | generics.fill_implicit_impl_trait_args(param); |
660 | } | 665 | } |
661 | } | 666 | } |
@@ -708,11 +713,15 @@ impl Ctx { | |||
708 | self.data().vis.alloc(vis) | 713 | self.data().vis.alloc(vis) |
709 | } | 714 | } |
710 | 715 | ||
711 | fn lower_type_ref(&self, type_ref: &ast::Type) -> TypeRef { | 716 | fn lower_type_ref(&mut self, type_ref: &ast::Type) -> Idx<TypeRef> { |
712 | TypeRef::from_ast(&self.body_ctx, type_ref.clone()) | 717 | let tyref = TypeRef::from_ast(&self.body_ctx, type_ref.clone()); |
718 | self.data().type_refs.intern(tyref) | ||
713 | } | 719 | } |
714 | fn lower_type_ref_opt(&self, type_ref: Option<ast::Type>) -> TypeRef { | 720 | fn lower_type_ref_opt(&mut self, type_ref: Option<ast::Type>) -> Idx<TypeRef> { |
715 | type_ref.map(|ty| self.lower_type_ref(&ty)).unwrap_or(TypeRef::Error) | 721 | match type_ref.map(|ty| self.lower_type_ref(&ty)) { |
722 | Some(it) => it, | ||
723 | None => self.data().type_refs.intern(TypeRef::Error), | ||
724 | } | ||
716 | } | 725 | } |
717 | 726 | ||
718 | /// Forces the visibility `vis` to be used for all items lowered during execution of `f`. | 727 | /// Forces the visibility `vis` to be used for all items lowered during execution of `f`. |
@@ -741,7 +750,8 @@ impl Ctx { | |||
741 | 750 | ||
742 | fn desugar_future_path(orig: TypeRef) -> Path { | 751 | fn desugar_future_path(orig: TypeRef) -> Path { |
743 | let path = path![core::future::Future]; | 752 | let path = path![core::future::Future]; |
744 | let mut generic_args: Vec<_> = std::iter::repeat(None).take(path.segments.len() - 1).collect(); | 753 | let mut generic_args: Vec<_> = |
754 | std::iter::repeat(None).take(path.segments().len() - 1).collect(); | ||
745 | let mut last = GenericArgs::empty(); | 755 | let mut last = GenericArgs::empty(); |
746 | let binding = | 756 | let binding = |
747 | AssociatedTypeBinding { name: name![Output], type_ref: Some(orig), bounds: Vec::new() }; | 757 | AssociatedTypeBinding { name: name![Output], type_ref: Some(orig), bounds: Vec::new() }; |
diff --git a/crates/hir_def/src/lib.rs b/crates/hir_def/src/lib.rs index 42b50b5b7..b50923747 100644 --- a/crates/hir_def/src/lib.rs +++ b/crates/hir_def/src/lib.rs | |||
@@ -81,7 +81,13 @@ pub struct ModuleId { | |||
81 | impl ModuleId { | 81 | impl ModuleId { |
82 | pub fn def_map(&self, db: &dyn db::DefDatabase) -> Arc<DefMap> { | 82 | pub fn def_map(&self, db: &dyn db::DefDatabase) -> Arc<DefMap> { |
83 | match self.block { | 83 | match self.block { |
84 | Some(block) => db.block_def_map(block), | 84 | Some(block) => { |
85 | db.block_def_map(block).unwrap_or_else(|| { | ||
86 | // NOTE: This should be unreachable - all `ModuleId`s come from their `DefMap`s, | ||
87 | // so the `DefMap` here must exist. | ||
88 | panic!("no `block_def_map` for `ModuleId` {:?}", self); | ||
89 | }) | ||
90 | } | ||
85 | None => db.crate_def_map(self.krate), | 91 | None => db.crate_def_map(self.krate), |
86 | } | 92 | } |
87 | } | 93 | } |
@@ -239,6 +245,7 @@ pub struct BlockId(salsa::InternId); | |||
239 | #[derive(Debug, Hash, PartialEq, Eq, Clone)] | 245 | #[derive(Debug, Hash, PartialEq, Eq, Clone)] |
240 | pub struct BlockLoc { | 246 | pub struct BlockLoc { |
241 | ast_id: AstId<ast::BlockExpr>, | 247 | ast_id: AstId<ast::BlockExpr>, |
248 | /// The containing module. | ||
242 | module: ModuleId, | 249 | module: ModuleId, |
243 | } | 250 | } |
244 | impl_intern!(BlockId, BlockLoc, intern_block, lookup_intern_block); | 251 | impl_intern!(BlockId, BlockLoc, intern_block, lookup_intern_block); |
@@ -655,7 +662,7 @@ impl AsMacroCall for AstIdWithPath<ast::Item> { | |||
655 | def.as_lazy_macro( | 662 | def.as_lazy_macro( |
656 | db.upcast(), | 663 | db.upcast(), |
657 | krate, | 664 | krate, |
658 | MacroCallKind::Attr(self.ast_id, self.path.segments.last()?.to_string()), | 665 | MacroCallKind::Attr(self.ast_id, self.path.segments().last()?.to_string()), |
659 | ) | 666 | ) |
660 | .into(), | 667 | .into(), |
661 | ) | 668 | ) |
diff --git a/crates/hir_def/src/nameres.rs b/crates/hir_def/src/nameres.rs index 0a15fc470..ad2e9bcac 100644 --- a/crates/hir_def/src/nameres.rs +++ b/crates/hir_def/src/nameres.rs | |||
@@ -73,7 +73,15 @@ use crate::{ | |||
73 | AstId, BlockId, BlockLoc, LocalModuleId, ModuleDefId, ModuleId, | 73 | AstId, BlockId, BlockLoc, LocalModuleId, ModuleDefId, ModuleId, |
74 | }; | 74 | }; |
75 | 75 | ||
76 | /// Contains all top-level defs from a macro-expanded crate | 76 | /// Contains the results of (early) name resolution. |
77 | /// | ||
78 | /// A `DefMap` stores the module tree and the definitions that are in scope in every module after | ||
79 | /// item-level macros have been expanded. | ||
80 | /// | ||
81 | /// Every crate has a primary `DefMap` whose root is the crate's main file (`main.rs`/`lib.rs`), | ||
82 | /// computed by the `crate_def_map` query. Additionally, every block expression introduces the | ||
83 | /// opportunity to write arbitrary item and module hierarchies, and thus gets its own `DefMap` that | ||
84 | /// is computed by the `block_def_map` query. | ||
77 | #[derive(Debug, PartialEq, Eq)] | 85 | #[derive(Debug, PartialEq, Eq)] |
78 | pub struct DefMap { | 86 | pub struct DefMap { |
79 | _c: Count<Self>, | 87 | _c: Count<Self>, |
@@ -91,11 +99,13 @@ pub struct DefMap { | |||
91 | diagnostics: Vec<DefDiagnostic>, | 99 | diagnostics: Vec<DefDiagnostic>, |
92 | } | 100 | } |
93 | 101 | ||
94 | #[derive(Debug, PartialEq, Eq)] | 102 | /// For `DefMap`s computed for a block expression, this stores its location in the parent map. |
103 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] | ||
95 | struct BlockInfo { | 104 | struct BlockInfo { |
105 | /// The `BlockId` this `DefMap` was created from. | ||
96 | block: BlockId, | 106 | block: BlockId, |
97 | parent: Arc<DefMap>, | 107 | /// The containing module. |
98 | parent_module: LocalModuleId, | 108 | parent: ModuleId, |
99 | } | 109 | } |
100 | 110 | ||
101 | impl std::ops::Index<LocalModuleId> for DefMap { | 111 | impl std::ops::Index<LocalModuleId> for DefMap { |
@@ -197,21 +207,25 @@ impl DefMap { | |||
197 | Arc::new(def_map) | 207 | Arc::new(def_map) |
198 | } | 208 | } |
199 | 209 | ||
200 | pub(crate) fn block_def_map_query(db: &dyn DefDatabase, block_id: BlockId) -> Arc<DefMap> { | 210 | pub(crate) fn block_def_map_query( |
211 | db: &dyn DefDatabase, | ||
212 | block_id: BlockId, | ||
213 | ) -> Option<Arc<DefMap>> { | ||
201 | let block: BlockLoc = db.lookup_intern_block(block_id); | 214 | let block: BlockLoc = db.lookup_intern_block(block_id); |
202 | let parent = block.module.def_map(db); | ||
203 | 215 | ||
204 | // FIXME: It would be good to just return the parent map when the block has no items, but | 216 | let item_tree = db.item_tree(block.ast_id.file_id); |
205 | // we rely on `def_map.block` in a few places, which is `Some` for the inner `DefMap`. | 217 | if item_tree.inner_items_of_block(block.ast_id.value).is_empty() { |
218 | return None; | ||
219 | } | ||
206 | 220 | ||
207 | let block_info = | 221 | let block_info = BlockInfo { block: block_id, parent: block.module }; |
208 | BlockInfo { block: block_id, parent, parent_module: block.module.local_id }; | ||
209 | 222 | ||
210 | let mut def_map = DefMap::empty(block.module.krate, block_info.parent.edition); | 223 | let parent_map = block.module.def_map(db); |
224 | let mut def_map = DefMap::empty(block.module.krate, parent_map.edition); | ||
211 | def_map.block = Some(block_info); | 225 | def_map.block = Some(block_info); |
212 | 226 | ||
213 | let def_map = collector::collect_defs(db, def_map, Some(block.ast_id)); | 227 | let def_map = collector::collect_defs(db, def_map, Some(block.ast_id)); |
214 | Arc::new(def_map) | 228 | Some(Arc::new(def_map)) |
215 | } | 229 | } |
216 | 230 | ||
217 | fn empty(krate: CrateId, edition: Edition) -> DefMap { | 231 | fn empty(krate: CrateId, edition: Edition) -> DefMap { |
@@ -275,9 +289,15 @@ impl DefMap { | |||
275 | ModuleId { krate: self.krate, local_id, block } | 289 | ModuleId { krate: self.krate, local_id, block } |
276 | } | 290 | } |
277 | 291 | ||
278 | pub(crate) fn crate_root(&self) -> ModuleId { | 292 | pub(crate) fn crate_root(&self, db: &dyn DefDatabase) -> ModuleId { |
279 | let (root_map, _) = self.ancestor_maps(self.root).last().unwrap(); | 293 | self.with_ancestor_maps(db, self.root, &mut |def_map, _module| { |
280 | root_map.module_id(root_map.root) | 294 | if def_map.block.is_none() { |
295 | Some(def_map.module_id(def_map.root)) | ||
296 | } else { | ||
297 | None | ||
298 | } | ||
299 | }) | ||
300 | .expect("DefMap chain without root") | ||
281 | } | 301 | } |
282 | 302 | ||
283 | pub(crate) fn resolve_path( | 303 | pub(crate) fn resolve_path( |
@@ -292,25 +312,42 @@ impl DefMap { | |||
292 | (res.resolved_def, res.segment_index) | 312 | (res.resolved_def, res.segment_index) |
293 | } | 313 | } |
294 | 314 | ||
295 | /// Iterates over the containing `DefMap`s, if `self` is a `DefMap` corresponding to a block | 315 | /// Ascends the `DefMap` hierarchy and calls `f` with every `DefMap` and containing module. |
296 | /// expression. | 316 | /// |
297 | fn ancestor_maps( | 317 | /// If `f` returns `Some(val)`, iteration is stopped and `Some(val)` is returned. If `f` returns |
318 | /// `None`, iteration continues. | ||
319 | fn with_ancestor_maps<T>( | ||
298 | &self, | 320 | &self, |
321 | db: &dyn DefDatabase, | ||
299 | local_mod: LocalModuleId, | 322 | local_mod: LocalModuleId, |
300 | ) -> impl Iterator<Item = (&DefMap, LocalModuleId)> { | 323 | f: &mut dyn FnMut(&DefMap, LocalModuleId) -> Option<T>, |
301 | std::iter::successors(Some((self, local_mod)), |(map, _)| { | 324 | ) -> Option<T> { |
302 | map.block.as_ref().map(|block| (&*block.parent, block.parent_module)) | 325 | if let Some(it) = f(self, local_mod) { |
303 | }) | 326 | return Some(it); |
327 | } | ||
328 | let mut block = self.block; | ||
329 | while let Some(block_info) = block { | ||
330 | let parent = block_info.parent.def_map(db); | ||
331 | if let Some(it) = f(&parent, block_info.parent.local_id) { | ||
332 | return Some(it); | ||
333 | } | ||
334 | block = parent.block; | ||
335 | } | ||
336 | |||
337 | None | ||
304 | } | 338 | } |
305 | 339 | ||
306 | // FIXME: this can use some more human-readable format (ideally, an IR | 340 | // FIXME: this can use some more human-readable format (ideally, an IR |
307 | // even), as this should be a great debugging aid. | 341 | // even), as this should be a great debugging aid. |
308 | pub fn dump(&self) -> String { | 342 | pub fn dump(&self, db: &dyn DefDatabase) -> String { |
309 | let mut buf = String::new(); | 343 | let mut buf = String::new(); |
344 | let mut arc; | ||
310 | let mut current_map = self; | 345 | let mut current_map = self; |
311 | while let Some(block) = ¤t_map.block { | 346 | while let Some(block) = ¤t_map.block { |
312 | go(&mut buf, current_map, "block scope", current_map.root); | 347 | go(&mut buf, current_map, "block scope", current_map.root); |
313 | current_map = &*block.parent; | 348 | buf.push('\n'); |
349 | arc = block.parent.def_map(db); | ||
350 | current_map = &*arc; | ||
314 | } | 351 | } |
315 | go(&mut buf, current_map, "crate", current_map.root); | 352 | go(&mut buf, current_map, "crate", current_map.root); |
316 | return buf; | 353 | return buf; |
@@ -318,27 +355,7 @@ impl DefMap { | |||
318 | fn go(buf: &mut String, map: &DefMap, path: &str, module: LocalModuleId) { | 355 | fn go(buf: &mut String, map: &DefMap, path: &str, module: LocalModuleId) { |
319 | format_to!(buf, "{}\n", path); | 356 | format_to!(buf, "{}\n", path); |
320 | 357 | ||
321 | let mut entries: Vec<_> = map.modules[module].scope.resolutions().collect(); | 358 | map.modules[module].scope.dump(buf); |
322 | entries.sort_by_key(|(name, _)| name.clone()); | ||
323 | |||
324 | for (name, def) in entries { | ||
325 | format_to!(buf, "{}:", name.map_or("_".to_string(), |name| name.to_string())); | ||
326 | |||
327 | if def.types.is_some() { | ||
328 | buf.push_str(" t"); | ||
329 | } | ||
330 | if def.values.is_some() { | ||
331 | buf.push_str(" v"); | ||
332 | } | ||
333 | if def.macros.is_some() { | ||
334 | buf.push_str(" m"); | ||
335 | } | ||
336 | if def.is_none() { | ||
337 | buf.push_str(" _"); | ||
338 | } | ||
339 | |||
340 | buf.push('\n'); | ||
341 | } | ||
342 | 359 | ||
343 | for (name, child) in map.modules[module].children.iter() { | 360 | for (name, child) in map.modules[module].children.iter() { |
344 | let path = format!("{}::{}", path, name); | 361 | let path = format!("{}::{}", path, name); |
diff --git a/crates/hir_def/src/nameres/collector.rs b/crates/hir_def/src/nameres/collector.rs index 6e86cc4a7..6bd41bc08 100644 --- a/crates/hir_def/src/nameres/collector.rs +++ b/crates/hir_def/src/nameres/collector.rs | |||
@@ -655,7 +655,7 @@ impl DefCollector<'_> { | |||
655 | } | 655 | } |
656 | } | 656 | } |
657 | } else { | 657 | } else { |
658 | match import.path.segments.last() { | 658 | match import.path.segments().last() { |
659 | Some(last_segment) => { | 659 | Some(last_segment) => { |
660 | let name = match &import.alias { | 660 | let name = match &import.alias { |
661 | Some(ImportAlias::Alias(name)) => Some(name.clone()), | 661 | Some(ImportAlias::Alias(name)) => Some(name.clone()), |
@@ -956,7 +956,7 @@ impl DefCollector<'_> { | |||
956 | let item_tree = self.db.item_tree(import.file_id); | 956 | let item_tree = self.db.item_tree(import.file_id); |
957 | let import_data = &item_tree[import.value]; | 957 | let import_data = &item_tree[import.value]; |
958 | 958 | ||
959 | match (import_data.path.segments.first(), &import_data.path.kind) { | 959 | match (import_data.path.segments().first(), &import_data.path.kind) { |
960 | (Some(krate), PathKind::Plain) | (Some(krate), PathKind::Abs) => { | 960 | (Some(krate), PathKind::Plain) | (Some(krate), PathKind::Abs) => { |
961 | if diagnosed_extern_crates.contains(krate) { | 961 | if diagnosed_extern_crates.contains(krate) { |
962 | continue; | 962 | continue; |
@@ -1449,10 +1449,11 @@ impl ModCollector<'_, '_> { | |||
1449 | if let Some(macro_call_id) = | 1449 | if let Some(macro_call_id) = |
1450 | ast_id.as_call_id(self.def_collector.db, self.def_collector.def_map.krate, |path| { | 1450 | ast_id.as_call_id(self.def_collector.db, self.def_collector.def_map.krate, |path| { |
1451 | path.as_ident().and_then(|name| { | 1451 | path.as_ident().and_then(|name| { |
1452 | self.def_collector | 1452 | self.def_collector.def_map.with_ancestor_maps( |
1453 | .def_map | 1453 | self.def_collector.db, |
1454 | .ancestor_maps(self.module_id) | 1454 | self.module_id, |
1455 | .find_map(|(map, module)| map[module].scope.get_legacy_macro(&name)) | 1455 | &mut |map, module| map[module].scope.get_legacy_macro(&name), |
1456 | ) | ||
1456 | }) | 1457 | }) |
1457 | }) | 1458 | }) |
1458 | { | 1459 | { |
diff --git a/crates/hir_def/src/nameres/path_resolution.rs b/crates/hir_def/src/nameres/path_resolution.rs index ecf75c777..f2b59172d 100644 --- a/crates/hir_def/src/nameres/path_resolution.rs +++ b/crates/hir_def/src/nameres/path_resolution.rs | |||
@@ -110,6 +110,7 @@ impl DefMap { | |||
110 | let mut result = ResolvePathResult::empty(ReachedFixedPoint::No); | 110 | let mut result = ResolvePathResult::empty(ReachedFixedPoint::No); |
111 | result.segment_index = Some(usize::max_value()); | 111 | result.segment_index = Some(usize::max_value()); |
112 | 112 | ||
113 | let mut arc; | ||
113 | let mut current_map = self; | 114 | let mut current_map = self; |
114 | loop { | 115 | loop { |
115 | let new = current_map.resolve_path_fp_with_macro_single( | 116 | let new = current_map.resolve_path_fp_with_macro_single( |
@@ -131,8 +132,9 @@ impl DefMap { | |||
131 | 132 | ||
132 | match ¤t_map.block { | 133 | match ¤t_map.block { |
133 | Some(block) => { | 134 | Some(block) => { |
134 | current_map = &block.parent; | 135 | original_module = block.parent.local_id; |
135 | original_module = block.parent_module; | 136 | arc = block.parent.def_map(db); |
137 | current_map = &*arc; | ||
136 | } | 138 | } |
137 | None => return result, | 139 | None => return result, |
138 | } | 140 | } |
@@ -147,12 +149,12 @@ impl DefMap { | |||
147 | path: &ModPath, | 149 | path: &ModPath, |
148 | shadow: BuiltinShadowMode, | 150 | shadow: BuiltinShadowMode, |
149 | ) -> ResolvePathResult { | 151 | ) -> ResolvePathResult { |
150 | let mut segments = path.segments.iter().enumerate(); | 152 | let mut segments = path.segments().iter().enumerate(); |
151 | let mut curr_per_ns: PerNs = match path.kind { | 153 | let mut curr_per_ns: PerNs = match path.kind { |
152 | PathKind::DollarCrate(krate) => { | 154 | PathKind::DollarCrate(krate) => { |
153 | if krate == self.krate { | 155 | if krate == self.krate { |
154 | mark::hit!(macro_dollar_crate_self); | 156 | mark::hit!(macro_dollar_crate_self); |
155 | PerNs::types(self.crate_root().into(), Visibility::Public) | 157 | PerNs::types(self.crate_root(db).into(), Visibility::Public) |
156 | } else { | 158 | } else { |
157 | let def_map = db.crate_def_map(krate); | 159 | let def_map = db.crate_def_map(krate); |
158 | let module = def_map.module_id(def_map.root); | 160 | let module = def_map.module_id(def_map.root); |
@@ -160,7 +162,7 @@ impl DefMap { | |||
160 | PerNs::types(module.into(), Visibility::Public) | 162 | PerNs::types(module.into(), Visibility::Public) |
161 | } | 163 | } |
162 | } | 164 | } |
163 | PathKind::Crate => PerNs::types(self.crate_root().into(), Visibility::Public), | 165 | PathKind::Crate => PerNs::types(self.crate_root(db).into(), Visibility::Public), |
164 | // plain import or absolute path in 2015: crate-relative with | 166 | // plain import or absolute path in 2015: crate-relative with |
165 | // fallback to extern prelude (with the simplification in | 167 | // fallback to extern prelude (with the simplification in |
166 | // rust-lang/rust#57745) | 168 | // rust-lang/rust#57745) |
@@ -188,7 +190,7 @@ impl DefMap { | |||
188 | // BuiltinShadowMode wasn't Module, then we need to try | 190 | // BuiltinShadowMode wasn't Module, then we need to try |
189 | // resolving it as a builtin. | 191 | // resolving it as a builtin. |
190 | let prefer_module = | 192 | let prefer_module = |
191 | if path.segments.len() == 1 { shadow } else { BuiltinShadowMode::Module }; | 193 | if path.segments().len() == 1 { shadow } else { BuiltinShadowMode::Module }; |
192 | 194 | ||
193 | log::debug!("resolving {:?} in module", segment); | 195 | log::debug!("resolving {:?} in module", segment); |
194 | self.resolve_name_in_module(db, original_module, &segment, prefer_module) | 196 | self.resolve_name_in_module(db, original_module, &segment, prefer_module) |
@@ -201,15 +203,15 @@ impl DefMap { | |||
201 | None => match &self.block { | 203 | None => match &self.block { |
202 | Some(block) => { | 204 | Some(block) => { |
203 | // Look up remaining path in parent `DefMap` | 205 | // Look up remaining path in parent `DefMap` |
204 | let new_path = ModPath { | 206 | let new_path = ModPath::from_segments( |
205 | kind: PathKind::Super(lvl - i), | 207 | PathKind::Super(lvl - i), |
206 | segments: path.segments.clone(), | 208 | path.segments().to_vec(), |
207 | }; | 209 | ); |
208 | log::debug!("`super` path: {} -> {} in parent map", path, new_path); | 210 | log::debug!("`super` path: {} -> {} in parent map", path, new_path); |
209 | return block.parent.resolve_path_fp_with_macro( | 211 | return block.parent.def_map(db).resolve_path_fp_with_macro( |
210 | db, | 212 | db, |
211 | mode, | 213 | mode, |
212 | block.parent_module, | 214 | block.parent.local_id, |
213 | &new_path, | 215 | &new_path, |
214 | shadow, | 216 | shadow, |
215 | ); | 217 | ); |
@@ -256,10 +258,10 @@ impl DefMap { | |||
256 | curr_per_ns = match curr { | 258 | curr_per_ns = match curr { |
257 | ModuleDefId::ModuleId(module) => { | 259 | ModuleDefId::ModuleId(module) => { |
258 | if module.krate != self.krate { | 260 | if module.krate != self.krate { |
259 | let path = ModPath { | 261 | let path = ModPath::from_segments( |
260 | segments: path.segments[i..].to_vec(), | 262 | PathKind::Super(0), |
261 | kind: PathKind::Super(0), | 263 | path.segments()[i..].iter().cloned(), |
262 | }; | 264 | ); |
263 | log::debug!("resolving {:?} in other crate", path); | 265 | log::debug!("resolving {:?} in other crate", path); |
264 | let defp_map = module.def_map(db); | 266 | let defp_map = module.def_map(db); |
265 | let (def, s) = defp_map.resolve_path(db, module.local_id, &path, shadow); | 267 | let (def, s) = defp_map.resolve_path(db, module.local_id, &path, shadow); |
diff --git a/crates/hir_def/src/nameres/tests.rs b/crates/hir_def/src/nameres/tests.rs index b36d0b59b..bd3e2701b 100644 --- a/crates/hir_def/src/nameres/tests.rs +++ b/crates/hir_def/src/nameres/tests.rs | |||
@@ -4,16 +4,16 @@ mod macros; | |||
4 | mod mod_resolution; | 4 | mod mod_resolution; |
5 | mod diagnostics; | 5 | mod diagnostics; |
6 | mod primitives; | 6 | mod primitives; |
7 | mod block; | ||
8 | 7 | ||
9 | use std::sync::Arc; | 8 | use std::sync::Arc; |
10 | 9 | ||
11 | use base_db::{fixture::WithFixture, FilePosition, SourceDatabase}; | 10 | use base_db::{fixture::WithFixture, SourceDatabase}; |
12 | use expect_test::{expect, Expect}; | 11 | use expect_test::{expect, Expect}; |
13 | use syntax::AstNode; | ||
14 | use test_utils::mark; | 12 | use test_utils::mark; |
15 | 13 | ||
16 | use crate::{db::DefDatabase, nameres::*, test_db::TestDB, Lookup}; | 14 | use crate::{db::DefDatabase, test_db::TestDB}; |
15 | |||
16 | use super::DefMap; | ||
17 | 17 | ||
18 | fn compute_crate_def_map(ra_fixture: &str) -> Arc<DefMap> { | 18 | fn compute_crate_def_map(ra_fixture: &str) -> Arc<DefMap> { |
19 | let db = TestDB::with_files(ra_fixture); | 19 | let db = TestDB::with_files(ra_fixture); |
@@ -21,71 +21,14 @@ fn compute_crate_def_map(ra_fixture: &str) -> Arc<DefMap> { | |||
21 | db.crate_def_map(krate) | 21 | db.crate_def_map(krate) |
22 | } | 22 | } |
23 | 23 | ||
24 | fn compute_block_def_map(ra_fixture: &str) -> Arc<DefMap> { | 24 | fn render_crate_def_map(ra_fixture: &str) -> String { |
25 | let (db, position) = TestDB::with_position(ra_fixture); | 25 | let db = TestDB::with_files(ra_fixture); |
26 | 26 | let krate = db.crate_graph().iter().next().unwrap(); | |
27 | // FIXME: perhaps we should make this use body lowering tests instead? | 27 | db.crate_def_map(krate).dump(&db) |
28 | |||
29 | let module = db.module_for_file(position.file_id); | ||
30 | let mut def_map = db.crate_def_map(module.krate); | ||
31 | while let Some(new_def_map) = descend_def_map_at_position(&db, position, def_map.clone()) { | ||
32 | def_map = new_def_map; | ||
33 | } | ||
34 | |||
35 | // FIXME: select the right module, not the root | ||
36 | |||
37 | def_map | ||
38 | } | ||
39 | |||
40 | fn descend_def_map_at_position( | ||
41 | db: &dyn DefDatabase, | ||
42 | position: FilePosition, | ||
43 | def_map: Arc<DefMap>, | ||
44 | ) -> Option<Arc<DefMap>> { | ||
45 | for (local_id, module_data) in def_map.modules() { | ||
46 | let mod_def = module_data.origin.definition_source(db); | ||
47 | let ast_map = db.ast_id_map(mod_def.file_id); | ||
48 | let item_tree = db.item_tree(mod_def.file_id); | ||
49 | let root = db.parse_or_expand(mod_def.file_id).unwrap(); | ||
50 | for item in module_data.scope.declarations() { | ||
51 | match item { | ||
52 | ModuleDefId::FunctionId(it) => { | ||
53 | // Technically blocks can be inside any type (due to arrays and const generics), | ||
54 | // and also in const/static initializers. For tests we only really care about | ||
55 | // functions though. | ||
56 | |||
57 | let ast = ast_map.get(item_tree[it.lookup(db).id.value].ast_id).to_node(&root); | ||
58 | |||
59 | if ast.syntax().text_range().contains(position.offset) { | ||
60 | // Cursor inside function, descend into its body's DefMap. | ||
61 | // Note that we don't handle block *expressions* inside function bodies. | ||
62 | let ast_map = db.ast_id_map(position.file_id.into()); | ||
63 | let ast_id = ast_map.ast_id(&ast.body().unwrap()); | ||
64 | let block = BlockLoc { | ||
65 | ast_id: InFile::new(position.file_id.into(), ast_id), | ||
66 | module: def_map.module_id(local_id), | ||
67 | }; | ||
68 | let block_id = db.intern_block(block); | ||
69 | return Some(db.block_def_map(block_id)); | ||
70 | } | ||
71 | } | ||
72 | _ => continue, | ||
73 | } | ||
74 | } | ||
75 | } | ||
76 | |||
77 | None | ||
78 | } | 28 | } |
79 | 29 | ||
80 | fn check(ra_fixture: &str, expect: Expect) { | 30 | fn check(ra_fixture: &str, expect: Expect) { |
81 | let def_map = compute_crate_def_map(ra_fixture); | 31 | let actual = render_crate_def_map(ra_fixture); |
82 | let actual = def_map.dump(); | ||
83 | expect.assert_eq(&actual); | ||
84 | } | ||
85 | |||
86 | fn check_at(ra_fixture: &str, expect: Expect) { | ||
87 | let def_map = compute_block_def_map(ra_fixture); | ||
88 | let actual = def_map.dump(); | ||
89 | expect.assert_eq(&actual); | 32 | expect.assert_eq(&actual); |
90 | } | 33 | } |
91 | 34 | ||
diff --git a/crates/hir_def/src/path.rs b/crates/hir_def/src/path.rs index 84ea09b53..0e60dc2b6 100644 --- a/crates/hir_def/src/path.rs +++ b/crates/hir_def/src/path.rs | |||
@@ -20,7 +20,7 @@ use crate::{ | |||
20 | #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] | 20 | #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] |
21 | pub struct ModPath { | 21 | pub struct ModPath { |
22 | pub kind: PathKind, | 22 | pub kind: PathKind, |
23 | pub segments: Vec<Name>, | 23 | segments: Vec<Name>, |
24 | } | 24 | } |
25 | 25 | ||
26 | #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] | 26 | #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] |
@@ -53,6 +53,11 @@ impl ModPath { | |||
53 | ModPath { kind, segments } | 53 | ModPath { kind, segments } |
54 | } | 54 | } |
55 | 55 | ||
56 | /// Creates a `ModPath` from a `PathKind`, with no extra path segments. | ||
57 | pub const fn from_kind(kind: PathKind) -> ModPath { | ||
58 | ModPath { kind, segments: Vec::new() } | ||
59 | } | ||
60 | |||
56 | /// Calls `cb` with all paths, represented by this use item. | 61 | /// Calls `cb` with all paths, represented by this use item. |
57 | pub(crate) fn expand_use_item( | 62 | pub(crate) fn expand_use_item( |
58 | item_src: InFile<ast::Use>, | 63 | item_src: InFile<ast::Use>, |
@@ -64,6 +69,18 @@ impl ModPath { | |||
64 | } | 69 | } |
65 | } | 70 | } |
66 | 71 | ||
72 | pub fn segments(&self) -> &[Name] { | ||
73 | &self.segments | ||
74 | } | ||
75 | |||
76 | pub fn push_segment(&mut self, segment: Name) { | ||
77 | self.segments.push(segment); | ||
78 | } | ||
79 | |||
80 | pub fn pop_segment(&mut self) -> Option<Name> { | ||
81 | self.segments.pop() | ||
82 | } | ||
83 | |||
67 | /// Returns the number of segments in the path (counting special segments like `$crate` and | 84 | /// Returns the number of segments in the path (counting special segments like `$crate` and |
68 | /// `super`). | 85 | /// `super`). |
69 | pub fn len(&self) -> usize { | 86 | pub fn len(&self) -> usize { |
@@ -78,7 +95,7 @@ impl ModPath { | |||
78 | } | 95 | } |
79 | 96 | ||
80 | pub fn is_ident(&self) -> bool { | 97 | pub fn is_ident(&self) -> bool { |
81 | self.kind == PathKind::Plain && self.segments.len() == 1 | 98 | self.as_ident().is_some() |
82 | } | 99 | } |
83 | 100 | ||
84 | pub fn is_self(&self) -> bool { | 101 | pub fn is_self(&self) -> bool { |
@@ -87,10 +104,14 @@ impl ModPath { | |||
87 | 104 | ||
88 | /// If this path is a single identifier, like `foo`, return its name. | 105 | /// If this path is a single identifier, like `foo`, return its name. |
89 | pub fn as_ident(&self) -> Option<&Name> { | 106 | pub fn as_ident(&self) -> Option<&Name> { |
90 | if !self.is_ident() { | 107 | if self.kind != PathKind::Plain { |
91 | return None; | 108 | return None; |
92 | } | 109 | } |
93 | self.segments.first() | 110 | |
111 | match &*self.segments { | ||
112 | [name] => Some(name), | ||
113 | _ => None, | ||
114 | } | ||
94 | } | 115 | } |
95 | } | 116 | } |
96 | 117 | ||
@@ -180,10 +201,10 @@ impl Path { | |||
180 | } | 201 | } |
181 | let res = Path { | 202 | let res = Path { |
182 | type_anchor: self.type_anchor.clone(), | 203 | type_anchor: self.type_anchor.clone(), |
183 | mod_path: ModPath { | 204 | mod_path: ModPath::from_segments( |
184 | kind: self.mod_path.kind.clone(), | 205 | self.mod_path.kind.clone(), |
185 | segments: self.mod_path.segments[..self.mod_path.segments.len() - 1].to_vec(), | 206 | self.mod_path.segments[..self.mod_path.segments.len() - 1].iter().cloned(), |
186 | }, | 207 | ), |
187 | generic_args: self.generic_args[..self.generic_args.len() - 1].to_vec(), | 208 | generic_args: self.generic_args[..self.generic_args.len() - 1].to_vec(), |
188 | }; | 209 | }; |
189 | Some(res) | 210 | Some(res) |
diff --git a/crates/hir_def/src/path/lower.rs b/crates/hir_def/src/path/lower.rs index 9518ac109..a469546c1 100644 --- a/crates/hir_def/src/path/lower.rs +++ b/crates/hir_def/src/path/lower.rs | |||
@@ -129,7 +129,7 @@ pub(super) fn lower_path(mut path: ast::Path, hygiene: &Hygiene) -> Option<Path> | |||
129 | } | 129 | } |
130 | } | 130 | } |
131 | 131 | ||
132 | let mod_path = ModPath { kind, segments }; | 132 | let mod_path = ModPath::from_segments(kind, segments); |
133 | return Some(Path { type_anchor, mod_path, generic_args }); | 133 | return Some(Path { type_anchor, mod_path, generic_args }); |
134 | 134 | ||
135 | fn qualifier(path: &ast::Path) -> Option<ast::Path> { | 135 | fn qualifier(path: &ast::Path) -> Option<ast::Path> { |
diff --git a/crates/hir_def/src/path/lower/lower_use.rs b/crates/hir_def/src/path/lower/lower_use.rs index ba0d1f0e7..d584b0b70 100644 --- a/crates/hir_def/src/path/lower/lower_use.rs +++ b/crates/hir_def/src/path/lower/lower_use.rs | |||
@@ -75,9 +75,10 @@ fn convert_path(prefix: Option<ModPath>, path: ast::Path, hygiene: &Hygiene) -> | |||
75 | match hygiene.name_ref_to_name(name_ref) { | 75 | match hygiene.name_ref_to_name(name_ref) { |
76 | Either::Left(name) => { | 76 | Either::Left(name) => { |
77 | // no type args in use | 77 | // no type args in use |
78 | let mut res = prefix.unwrap_or_else(|| ModPath { | 78 | let mut res = prefix.unwrap_or_else(|| { |
79 | kind: segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs), | 79 | ModPath::from_kind( |
80 | segments: Vec::with_capacity(1), | 80 | segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs), |
81 | ) | ||
81 | }); | 82 | }); |
82 | res.segments.push(name); | 83 | res.segments.push(name); |
83 | res | 84 | res |
diff --git a/crates/hir_def/src/resolver.rs b/crates/hir_def/src/resolver.rs index 9021ea712..f9ad50301 100644 --- a/crates/hir_def/src/resolver.rs +++ b/crates/hir_def/src/resolver.rs | |||
@@ -164,7 +164,7 @@ impl Resolver { | |||
164 | db: &dyn DefDatabase, | 164 | db: &dyn DefDatabase, |
165 | path: &ModPath, | 165 | path: &ModPath, |
166 | ) -> Option<(TypeNs, Option<usize>)> { | 166 | ) -> Option<(TypeNs, Option<usize>)> { |
167 | let first_name = path.segments.first()?; | 167 | let first_name = path.segments().first()?; |
168 | let skip_to_mod = path.kind != PathKind::Plain; | 168 | let skip_to_mod = path.kind != PathKind::Plain; |
169 | for scope in self.scopes.iter().rev() { | 169 | for scope in self.scopes.iter().rev() { |
170 | match scope { | 170 | match scope { |
@@ -179,7 +179,7 @@ impl Resolver { | |||
179 | 179 | ||
180 | Scope::GenericParams { params, def } => { | 180 | Scope::GenericParams { params, def } => { |
181 | if let Some(local_id) = params.find_type_by_name(first_name) { | 181 | if let Some(local_id) = params.find_type_by_name(first_name) { |
182 | let idx = if path.segments.len() == 1 { None } else { Some(1) }; | 182 | let idx = if path.segments().len() == 1 { None } else { Some(1) }; |
183 | return Some(( | 183 | return Some(( |
184 | TypeNs::GenericParam(TypeParamId { local_id, parent: *def }), | 184 | TypeNs::GenericParam(TypeParamId { local_id, parent: *def }), |
185 | idx, | 185 | idx, |
@@ -188,13 +188,13 @@ impl Resolver { | |||
188 | } | 188 | } |
189 | Scope::ImplDefScope(impl_) => { | 189 | Scope::ImplDefScope(impl_) => { |
190 | if first_name == &name![Self] { | 190 | if first_name == &name![Self] { |
191 | let idx = if path.segments.len() == 1 { None } else { Some(1) }; | 191 | let idx = if path.segments().len() == 1 { None } else { Some(1) }; |
192 | return Some((TypeNs::SelfType(*impl_), idx)); | 192 | return Some((TypeNs::SelfType(*impl_), idx)); |
193 | } | 193 | } |
194 | } | 194 | } |
195 | Scope::AdtScope(adt) => { | 195 | Scope::AdtScope(adt) => { |
196 | if first_name == &name![Self] { | 196 | if first_name == &name![Self] { |
197 | let idx = if path.segments.len() == 1 { None } else { Some(1) }; | 197 | let idx = if path.segments().len() == 1 { None } else { Some(1) }; |
198 | return Some((TypeNs::AdtSelfType(*adt), idx)); | 198 | return Some((TypeNs::AdtSelfType(*adt), idx)); |
199 | } | 199 | } |
200 | } | 200 | } |
@@ -270,9 +270,9 @@ impl Resolver { | |||
270 | db: &dyn DefDatabase, | 270 | db: &dyn DefDatabase, |
271 | path: &ModPath, | 271 | path: &ModPath, |
272 | ) -> Option<ResolveValueResult> { | 272 | ) -> Option<ResolveValueResult> { |
273 | let n_segments = path.segments.len(); | 273 | let n_segments = path.segments().len(); |
274 | let tmp = name![self]; | 274 | let tmp = name![self]; |
275 | let first_name = if path.is_self() { &tmp } else { path.segments.first()? }; | 275 | let first_name = if path.is_self() { &tmp } else { path.segments().first()? }; |
276 | let skip_to_mod = path.kind != PathKind::Plain && !path.is_self(); | 276 | let skip_to_mod = path.kind != PathKind::Plain && !path.is_self(); |
277 | for scope in self.scopes.iter().rev() { | 277 | for scope in self.scopes.iter().rev() { |
278 | match scope { | 278 | match scope { |
diff --git a/crates/hir_def/src/visibility.rs b/crates/hir_def/src/visibility.rs index e79a91102..38da3132b 100644 --- a/crates/hir_def/src/visibility.rs +++ b/crates/hir_def/src/visibility.rs | |||
@@ -22,8 +22,7 @@ pub enum RawVisibility { | |||
22 | 22 | ||
23 | impl RawVisibility { | 23 | impl RawVisibility { |
24 | pub(crate) const fn private() -> RawVisibility { | 24 | pub(crate) const fn private() -> RawVisibility { |
25 | let path = ModPath { kind: PathKind::Super(0), segments: Vec::new() }; | 25 | RawVisibility::Module(ModPath::from_kind(PathKind::Super(0))) |
26 | RawVisibility::Module(path) | ||
27 | } | 26 | } |
28 | 27 | ||
29 | pub(crate) fn from_ast( | 28 | pub(crate) fn from_ast( |
@@ -59,15 +58,15 @@ impl RawVisibility { | |||
59 | RawVisibility::Module(path) | 58 | RawVisibility::Module(path) |
60 | } | 59 | } |
61 | ast::VisibilityKind::PubCrate => { | 60 | ast::VisibilityKind::PubCrate => { |
62 | let path = ModPath { kind: PathKind::Crate, segments: Vec::new() }; | 61 | let path = ModPath::from_kind(PathKind::Crate); |
63 | RawVisibility::Module(path) | 62 | RawVisibility::Module(path) |
64 | } | 63 | } |
65 | ast::VisibilityKind::PubSuper => { | 64 | ast::VisibilityKind::PubSuper => { |
66 | let path = ModPath { kind: PathKind::Super(1), segments: Vec::new() }; | 65 | let path = ModPath::from_kind(PathKind::Super(1)); |
67 | RawVisibility::Module(path) | 66 | RawVisibility::Module(path) |
68 | } | 67 | } |
69 | ast::VisibilityKind::PubSelf => { | 68 | ast::VisibilityKind::PubSelf => { |
70 | let path = ModPath { kind: PathKind::Plain, segments: Vec::new() }; | 69 | let path = ModPath::from_kind(PathKind::Plain); |
71 | RawVisibility::Module(path) | 70 | RawVisibility::Module(path) |
72 | } | 71 | } |
73 | ast::VisibilityKind::Pub => RawVisibility::Public, | 72 | ast::VisibilityKind::Pub => RawVisibility::Public, |
diff --git a/crates/hir_ty/src/infer.rs b/crates/hir_ty/src/infer.rs index d08867c70..4b683c5a7 100644 --- a/crates/hir_ty/src/infer.rs +++ b/crates/hir_ty/src/infer.rs | |||
@@ -461,7 +461,7 @@ impl<'a> InferenceContext<'a> { | |||
461 | (ty, variant) | 461 | (ty, variant) |
462 | } | 462 | } |
463 | Some(1) => { | 463 | Some(1) => { |
464 | let segment = path.mod_path().segments.last().unwrap(); | 464 | let segment = path.mod_path().segments().last().unwrap(); |
465 | // this could be an enum variant or associated type | 465 | // this could be an enum variant or associated type |
466 | if let Some((AdtId::EnumId(enum_id), _)) = ty.as_adt() { | 466 | if let Some((AdtId::EnumId(enum_id), _)) = ty.as_adt() { |
467 | let enum_data = self.db.enum_data(enum_id); | 467 | let enum_data = self.db.enum_data(enum_id); |
diff --git a/crates/hir_ty/src/infer/expr.rs b/crates/hir_ty/src/infer/expr.rs index d7351d212..12f1591c8 100644 --- a/crates/hir_ty/src/infer/expr.rs +++ b/crates/hir_ty/src/infer/expr.rs | |||
@@ -137,7 +137,7 @@ impl<'a> InferenceContext<'a> { | |||
137 | 137 | ||
138 | self.coerce_merge_branch(&then_ty, &else_ty) | 138 | self.coerce_merge_branch(&then_ty, &else_ty) |
139 | } | 139 | } |
140 | Expr::Block { statements, tail, label } => match label { | 140 | Expr::Block { statements, tail, label, id: _ } => match label { |
141 | Some(_) => { | 141 | Some(_) => { |
142 | let break_ty = self.table.new_type_var(); | 142 | let break_ty = self.table.new_type_var(); |
143 | self.breakables.push(BreakableContext { | 143 | self.breakables.push(BreakableContext { |
diff --git a/crates/ide_db/src/helpers.rs b/crates/ide_db/src/helpers.rs index 0dcc4dd29..bc7aee110 100644 --- a/crates/ide_db/src/helpers.rs +++ b/crates/ide_db/src/helpers.rs | |||
@@ -24,7 +24,7 @@ pub fn mod_path_to_ast(path: &hir::ModPath) -> ast::Path { | |||
24 | } | 24 | } |
25 | 25 | ||
26 | segments.extend( | 26 | segments.extend( |
27 | path.segments | 27 | path.segments() |
28 | .iter() | 28 | .iter() |
29 | .map(|segment| make::path_segment(make::name_ref(&segment.to_string()))), | 29 | .map(|segment| make::path_segment(make::name_ref(&segment.to_string()))), |
30 | ); | 30 | ); |
diff --git a/crates/mbe/Cargo.toml b/crates/mbe/Cargo.toml index 43bc10490..ef0907194 100644 --- a/crates/mbe/Cargo.toml +++ b/crates/mbe/Cargo.toml | |||
@@ -17,5 +17,5 @@ log = "0.4.8" | |||
17 | syntax = { path = "../syntax", version = "0.0.0" } | 17 | syntax = { path = "../syntax", version = "0.0.0" } |
18 | parser = { path = "../parser", version = "0.0.0" } | 18 | parser = { path = "../parser", version = "0.0.0" } |
19 | tt = { path = "../tt", version = "0.0.0" } | 19 | tt = { path = "../tt", version = "0.0.0" } |
20 | test_utils = { path = "../test_utils" } | 20 | test_utils = { path = "../test_utils", version = "0.0.0" } |
21 | 21 | ||
diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index 07204436c..5a6501216 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs | |||
@@ -1729,7 +1729,7 @@ fn fill_resolve_data( | |||
1729 | ) -> Option<()> { | 1729 | ) -> Option<()> { |
1730 | let import_edit = item.import_to_add()?; | 1730 | let import_edit = item.import_to_add()?; |
1731 | let full_import_path = import_edit.import_path.to_string(); | 1731 | let full_import_path = import_edit.import_path.to_string(); |
1732 | let imported_name = import_edit.import_path.segments.clone().pop()?.to_string(); | 1732 | let imported_name = import_edit.import_path.segments().last()?.to_string(); |
1733 | 1733 | ||
1734 | *resolve_data = Some( | 1734 | *resolve_data = Some( |
1735 | to_value(CompletionResolveData { | 1735 | to_value(CompletionResolveData { |
diff --git a/crates/syntax/fuzz/Cargo.toml b/crates/syntax/fuzz/Cargo.toml index 32c40d1b9..e22cd6b0c 100644 --- a/crates/syntax/fuzz/Cargo.toml +++ b/crates/syntax/fuzz/Cargo.toml | |||
@@ -10,8 +10,8 @@ edition = "2018" | |||
10 | cargo-fuzz = true | 10 | cargo-fuzz = true |
11 | 11 | ||
12 | [dependencies] | 12 | [dependencies] |
13 | syntax = { path = ".." } | 13 | syntax = { path = "..", version = "0.0.0" } |
14 | text_edit = { path = "../../text_edit" } | 14 | text_edit = { path = "../../text_edit", version = "0.0.0" } |
15 | libfuzzer-sys = { git = "https://github.com/rust-fuzz/libfuzzer-sys.git" } | 15 | libfuzzer-sys = { git = "https://github.com/rust-fuzz/libfuzzer-sys.git" } |
16 | 16 | ||
17 | # Prevent this from interfering with workspaces | 17 | # Prevent this from interfering with workspaces |
diff --git a/docs/dev/README.md b/docs/dev/README.md index 4cc608b07..9c0af68e3 100644 --- a/docs/dev/README.md +++ b/docs/dev/README.md | |||
@@ -9,8 +9,9 @@ $ cargo test | |||
9 | 9 | ||
10 | should be enough to get you started! | 10 | should be enough to get you started! |
11 | 11 | ||
12 | To learn more about how rust-analyzer works, see | 12 | To learn more about how rust-analyzer works, see [./architecture.md](./architecture.md) document. |
13 | [./architecture.md](./architecture.md) document. | 13 | It also explains the high-level layout of the source code. |
14 | Do skim through that document. | ||
14 | 15 | ||
15 | We also publish rustdoc docs to pages: | 16 | We also publish rustdoc docs to pages: |
16 | 17 | ||
@@ -99,25 +100,6 @@ I don't have a specific workflow for this case. | |||
99 | Additionally, I use `cargo run --release -p rust-analyzer -- analysis-stats path/to/some/rust/crate` to run a batch analysis. | 100 | Additionally, I use `cargo run --release -p rust-analyzer -- analysis-stats path/to/some/rust/crate` to run a batch analysis. |
100 | This is primarily useful for performance optimizations, or for bug minimization. | 101 | This is primarily useful for performance optimizations, or for bug minimization. |
101 | 102 | ||
102 | ## Parser Tests | ||
103 | |||
104 | Tests for the parser (`parser`) live in the `syntax` crate (see `test_data` directory). | ||
105 | There are two kinds of tests: | ||
106 | |||
107 | * Manually written test cases in `parser/ok` and `parser/err` | ||
108 | * "Inline" tests in `parser/inline` (these are generated) from comments in `parser` crate. | ||
109 | |||
110 | The purpose of inline tests is not to achieve full coverage by test cases, but to explain to the reader of the code what each particular `if` and `match` is responsible for. | ||
111 | If you are tempted to add a large inline test, it might be a good idea to leave only the simplest example in place, and move the test to a manual `parser/ok` test. | ||
112 | |||
113 | To update test data, run with `UPDATE_EXPECT` variable: | ||
114 | |||
115 | ```bash | ||
116 | env UPDATE_EXPECT=1 cargo qt | ||
117 | ``` | ||
118 | |||
119 | After adding a new inline test you need to run `cargo xtest codegen` and also update the test data as described above. | ||
120 | |||
121 | ## TypeScript Tests | 103 | ## TypeScript Tests |
122 | 104 | ||
123 | If you change files under `editors/code` and would like to run the tests and linter, install npm and run: | 105 | If you change files under `editors/code` and would like to run the tests and linter, install npm and run: |
@@ -128,73 +110,6 @@ npm ci | |||
128 | npm run lint | 110 | npm run lint |
129 | ``` | 111 | ``` |
130 | 112 | ||
131 | # Code organization | ||
132 | |||
133 | All Rust code lives in the `crates` top-level directory, and is organized as a single Cargo workspace. | ||
134 | The `editors` top-level directory contains code for integrating with editors. | ||
135 | Currently, it contains the plugin for VS Code (in TypeScript). | ||
136 | The `docs` top-level directory contains both developer and user documentation. | ||
137 | |||
138 | We have some automation infra in Rust in the `xtask` package. | ||
139 | It contains stuff like formatting checking, code generation and powers `cargo xtask install`. | ||
140 | The latter syntax is achieved with the help of cargo aliases (see `.cargo` directory). | ||
141 | |||
142 | # Architecture Invariants | ||
143 | |||
144 | This section tries to document high-level design constraints, which are not | ||
145 | always obvious from the low-level code. | ||
146 | |||
147 | ## Incomplete syntax trees | ||
148 | |||
149 | Syntax trees are by design incomplete and do not enforce well-formedness. | ||
150 | If an AST method returns an `Option`, it *can* be `None` at runtime, even if this is forbidden by the grammar. | ||
151 | |||
152 | ## LSP independence | ||
153 | |||
154 | rust-analyzer is independent from LSP. | ||
155 | It provides features for a hypothetical perfect Rust-specific IDE client. | ||
156 | Internal representations are lowered to LSP in the `rust-analyzer` crate (the only crate which is allowed to use LSP types). | ||
157 | |||
158 | ## IDE/Compiler split | ||
159 | |||
160 | There's a semi-hard split between "compiler" and "IDE", at the `hir` crate. | ||
161 | Compiler derives new facts about source code. | ||
162 | It explicitly acknowledges that not all info is available (i.e. you can't look at types during name resolution). | ||
163 | |||
164 | IDE assumes that all information is available at all times. | ||
165 | |||
166 | IDE should use only types from `hir`, and should not depend on the underling compiler types. | ||
167 | `hir` is a facade. | ||
168 | |||
169 | ## IDE API | ||
170 | |||
171 | The main IDE crate (`ide`) uses "Plain Old Data" for the API. | ||
172 | Rather than talking in definitions and references, it talks in Strings and textual offsets. | ||
173 | In general, API is centered around UI concerns -- the result of the call is what the user sees in the editor, and not what the compiler sees underneath. | ||
174 | The results are 100% Rust specific though. | ||
175 | Shout outs to LSP developers for popularizing the idea that "UI" is a good place to draw a boundary at. | ||
176 | |||
177 | ## LSP is stateless | ||
178 | |||
179 | The protocol is implemented in the mostly stateless way. | ||
180 | A good mental model is HTTP, which doesn't store per-client state, and instead relies on devices like cookies to maintain an illusion of state. | ||
181 | If some action requires multi-step protocol, each step should be self-contained. | ||
182 | |||
183 | A good example here is code action resolving process. | ||
184 | TO display the lightbulb, we compute the list of code actions without computing edits. | ||
185 | Figuring out the edit is done in a separate `codeAction/resolve` call. | ||
186 | Rather than storing some `lazy_edit: Box<dyn FnOnce() -> Edit>` somewhere, we use a string ID of action to re-compute the list of actions during the resolve process. | ||
187 | (See [this post](https://rust-analyzer.github.io/blog/2020/09/28/how-to-make-a-light-bulb.html) for more details.) | ||
188 | The benefit here is that, generally speaking, the state of the world might change between `codeAction` and `codeAction` resolve requests, so any closure we store might become invalid. | ||
189 | |||
190 | While we don't currently implement any complicated refactors with complex GUI, I imagine we'd use the same techniques for refactors. | ||
191 | After clicking each "Next" button during refactor, the client would send all the info which server needs to re-recreate the context from scratch. | ||
192 | |||
193 | ## CI | ||
194 | |||
195 | CI does not test rust-analyzer, CI is a core part of rust-analyzer, and is maintained with above average standard of quality. | ||
196 | CI is reproducible -- it can only be broken by changes to files in this repository, any dependence on externalities is a bug. | ||
197 | |||
198 | # Code Style & Review Process | 113 | # Code Style & Review Process |
199 | 114 | ||
200 | Do see [./style.md](./style.md). | 115 | Do see [./style.md](./style.md). |
diff --git a/docs/dev/architecture.md b/docs/dev/architecture.md index b5831f47c..56ebaa3df 100644 --- a/docs/dev/architecture.md +++ b/docs/dev/architecture.md | |||
@@ -1,174 +1,420 @@ | |||
1 | # Architecture | 1 | # Architecture |
2 | 2 | ||
3 | This document describes the high-level architecture of rust-analyzer. | 3 | This document describes the high-level architecture of rust-analyzer. |
4 | If you want to familiarize yourself with the code base, you are just | 4 | If you want to familiarize yourself with the code base, you are just in the right place! |
5 | in the right place! | ||
6 | 5 | ||
7 | See also the [guide](./guide.md), which walks through a particular snapshot of | 6 | See also the [guide](./guide.md), which walks through a particular snapshot of rust-analyzer code base. |
8 | rust-analyzer code base. | ||
9 | 7 | ||
10 | Yet another resource is this playlist with videos about various parts of the | 8 | Yet another resource is this playlist with videos about various parts of the analyzer: |
11 | analyzer: | ||
12 | 9 | ||
13 | https://www.youtube.com/playlist?list=PL85XCvVPmGQho7MZkdW-wtPtuJcFpzycE | 10 | https://www.youtube.com/playlist?list=PL85XCvVPmGQho7MZkdW-wtPtuJcFpzycE |
14 | 11 | ||
15 | Note that the guide and videos are pretty dated, this document should be in | 12 | Note that the guide and videos are pretty dated, this document should be, in general, fresher. |
16 | generally fresher. | ||
17 | 13 | ||
18 | ## The Big Picture | 14 | See also these implementation-related blog posts: |
15 | |||
16 | * https://rust-analyzer.github.io/blog/2019/11/13/find-usages.html | ||
17 | * https://rust-analyzer.github.io/blog/2020/07/20/three-architectures-for-responsive-ide.html | ||
18 | * https://rust-analyzer.github.io/blog/2020/09/16/challeging-LR-parsing.html | ||
19 | * https://rust-analyzer.github.io/blog/2020/09/28/how-to-make-a-light-bulb.html | ||
20 | * https://rust-analyzer.github.io/blog/2020/10/24/introducing-ungrammar.html | ||
21 | |||
22 | ## Bird's Eye View | ||
19 | 23 | ||
20 | ![](https://user-images.githubusercontent.com/1711539/50114578-e8a34280-0255-11e9-902c-7cfc70747966.png) | 24 | ![](https://user-images.githubusercontent.com/1711539/50114578-e8a34280-0255-11e9-902c-7cfc70747966.png) |
21 | 25 | ||
22 | On the highest level, rust-analyzer is a thing which accepts input source code | 26 | On the highest level, rust-analyzer is a thing which accepts input source code from the client and produces a structured semantic model of the code. |
23 | from the client and produces a structured semantic model of the code. | 27 | |
28 | More specifically, input data consists of a set of test files (`(PathBuf, String)` pairs) and information about project structure, captured in the so called `CrateGraph`. | ||
29 | The crate graph specifies which files are crate roots, which cfg flags are specified for each crate and what dependencies exist between the crates. | ||
30 | This is the input (ground) state. | ||
31 | The analyzer keeps all this input data in memory and never does any IO. | ||
32 | Because the input data is source code, which typically measures in tens of megabytes at most, keeping everything in memory is OK. | ||
33 | |||
34 | A "structured semantic model" is basically an object-oriented representation of modules, functions and types which appear in the source code. | ||
35 | This representation is fully "resolved": all expressions have types, all references are bound to declarations, etc. | ||
36 | This is derived state. | ||
37 | |||
38 | The client can submit a small delta of input data (typically, a change to a single file) and get a fresh code model which accounts for changes. | ||
39 | |||
40 | The underlying engine makes sure that model is computed lazily (on-demand) and can be quickly updated for small modifications. | ||
24 | 41 | ||
25 | More specifically, input data consists of a set of test files (`(PathBuf, | ||
26 | String)` pairs) and information about project structure, captured in the so | ||
27 | called `CrateGraph`. The crate graph specifies which files are crate roots, | ||
28 | which cfg flags are specified for each crate and what dependencies exist between | ||
29 | the crates. The analyzer keeps all this input data in memory and never does any | ||
30 | IO. Because the input data are source code, which typically measures in tens of | ||
31 | megabytes at most, keeping everything in memory is OK. | ||
32 | 42 | ||
33 | A "structured semantic model" is basically an object-oriented representation of | 43 | ## Code Map |
34 | modules, functions and types which appear in the source code. This representation | ||
35 | is fully "resolved": all expressions have types, all references are bound to | ||
36 | declarations, etc. | ||
37 | 44 | ||
38 | The client can submit a small delta of input data (typically, a change to a | 45 | This section talks briefly about various important directories and data structures. |
39 | single file) and get a fresh code model which accounts for changes. | 46 | Pay attention to the **Architecture Invariant** sections. |
47 | They often talk about things which are deliberately absent in the source code. | ||
40 | 48 | ||
41 | The underlying engine makes sure that model is computed lazily (on-demand) and | 49 | Note also which crates are **API Boundaries**. |
42 | can be quickly updated for small modifications. | 50 | Remember, [rules at the boundary are different](https://www.tedinski.com/2018/02/06/system-boundaries.html). |
43 | 51 | ||
52 | ### `xtask` | ||
44 | 53 | ||
45 | ## Code generation | 54 | This is rust-analyzer's "build system". |
55 | We use cargo to compile rust code, but there are also various other tasks, like release management or local installation. | ||
56 | They are handled by Rust code in the xtask directory. | ||
46 | 57 | ||
47 | Some of the components of this repository are generated through automatic | 58 | ### `editors/code` |
48 | processes. `cargo xtask codegen` runs all generation tasks. Generated code is | ||
49 | committed to the git repository. | ||
50 | 59 | ||
51 | In particular, `cargo xtask codegen` generates: | 60 | VS Code plugin. |
52 | 61 | ||
53 | 1. [`syntax_kind/generated`](https://github.com/rust-analyzer/rust-analyzer/blob/a0be39296d2925972cacd9fbf8b5fb258fad6947/crates/ra_parser/src/syntax_kind/generated.rs) | 62 | ### `libs/` |
54 | -- the set of terminals and non-terminals of rust grammar. | ||
55 | 63 | ||
56 | 2. [`ast/generated`](https://github.com/rust-analyzer/rust-analyzer/blob/a0be39296d2925972cacd9fbf8b5fb258fad6947/crates/ra_syntax/src/ast/generated.rs) | 64 | rust-analyzer independent libraries which we publish to crates.io. |
57 | -- AST data structure. | 65 | It's not heavily utilized at the moment. |
58 | 66 | ||
59 | 3. [`doc_tests/generated`](https://github.com/rust-analyzer/rust-analyzer/blob/a0be39296d2925972cacd9fbf8b5fb258fad6947/crates/assists/src/doc_tests/generated.rs), | 67 | ### `crates/parser` |
60 | [`test_data/parser/inline`](https://github.com/rust-analyzer/rust-analyzer/tree/a0be39296d2925972cacd9fbf8b5fb258fad6947/crates/ra_syntax/test_data/parser/inline) | ||
61 | -- tests for assists and the parser. | ||
62 | 68 | ||
63 | The source for 1 and 2 is in [`ast_src.rs`](https://github.com/rust-analyzer/rust-analyzer/blob/a0be39296d2925972cacd9fbf8b5fb258fad6947/xtask/src/ast_src.rs). | 69 | It is a hand-written recursive descent parser, which produces a sequence of events like "start node X", "finish node Y". |
70 | It works similarly to | ||
71 | [kotlin's parser](https://github.com/JetBrains/kotlin/blob/4d951de616b20feca92f3e9cc9679b2de9e65195/compiler/frontend/src/org/jetbrains/kotlin/parsing/KotlinParsing.java), | ||
72 | which is a good source of inspiration for dealing with syntax errors and incomplete input. | ||
73 | Original [libsyntax parser](https://github.com/rust-lang/rust/blob/6b99adeb11313197f409b4f7c4083c2ceca8a4fe/src/libsyntax/parse/parser.rs) is what we use for the definition of the Rust language. | ||
74 | `TreeSink` and `TokenSource` traits bridge the tree-agnostic parser from `grammar` with `rowan` trees. | ||
64 | 75 | ||
65 | ## Code Walk-Through | 76 | **Architecture Invariant:** the parser is independent of the particular tree structure and particular representation of the tokens. |
77 | It transforms one flat stream of events into another flat stream of events. | ||
78 | Token independence allows us to pares out both text-based source code and `tt`-based macro input. | ||
79 | Tree independence allows us to more easily vary the syntax tree implementation. | ||
80 | It should also unlock efficient light-parsing approaches. | ||
81 | For example, you can extract the set of names defined in a file (for typo correction) without building a syntax tree. | ||
66 | 82 | ||
67 | ### `crates/ra_syntax`, `crates/parser` | 83 | **Architecture Invariant:** parsing never fails, the parser produces `(T, Vec<Error>)` rather than `Result<T, Error>`. |
68 | 84 | ||
69 | Rust syntax tree structure and parser. See | 85 | ### `crates/syntax` |
70 | [RFC](https://github.com/rust-lang/rfcs/pull/2256) and [./syntax.md](./syntax.md) for some design notes. | 86 | |
87 | Rust syntax tree structure and parser. | ||
88 | See [RFC](https://github.com/rust-lang/rfcs/pull/2256) and [./syntax.md](./syntax.md) for some design notes. | ||
71 | 89 | ||
72 | - [rowan](https://github.com/rust-analyzer/rowan) library is used for constructing syntax trees. | 90 | - [rowan](https://github.com/rust-analyzer/rowan) library is used for constructing syntax trees. |
73 | - `grammar` module is the actual parser. It is a hand-written recursive descent parser, which | ||
74 | produces a sequence of events like "start node X", "finish node Y". It works similarly to [kotlin's parser](https://github.com/JetBrains/kotlin/blob/4d951de616b20feca92f3e9cc9679b2de9e65195/compiler/frontend/src/org/jetbrains/kotlin/parsing/KotlinParsing.java), | ||
75 | which is a good source of inspiration for dealing with syntax errors and incomplete input. Original [libsyntax parser](https://github.com/rust-lang/rust/blob/6b99adeb11313197f409b4f7c4083c2ceca8a4fe/src/libsyntax/parse/parser.rs) | ||
76 | is what we use for the definition of the Rust language. | ||
77 | - `TreeSink` and `TokenSource` traits bridge the tree-agnostic parser from `grammar` with `rowan` trees. | ||
78 | - `ast` provides a type safe API on top of the raw `rowan` tree. | 91 | - `ast` provides a type safe API on top of the raw `rowan` tree. |
79 | - `ast_src` description of the grammar, which is used to generate `syntax_kinds` | 92 | - `ungrammar` description of the grammar, which is used to generate `syntax_kinds` and `ast` modules, using `cargo xtask codegen` command. |
80 | and `ast` modules, using `cargo xtask codegen` command. | 93 | |
94 | Tests for ra_syntax are mostly data-driven. | ||
95 | `test_data/parser` contains subdirectories with a bunch of `.rs` (test vectors) and `.txt` files with corresponding syntax trees. | ||
96 | During testing, we check `.rs` against `.txt`. | ||
97 | If the `.txt` file is missing, it is created (this is how you update tests). | ||
98 | Additionally, running `cargo xtask codegen` will walk the grammar module and collect all `// test test_name` comments into files inside `test_data/parser/inline` directory. | ||
99 | |||
100 | To update test data, run with `UPDATE_EXPECT` variable: | ||
81 | 101 | ||
82 | Tests for ra_syntax are mostly data-driven: `test_data/parser` contains subdirectories with a bunch of `.rs` | 102 | ```bash |
83 | (test vectors) and `.txt` files with corresponding syntax trees. During testing, we check | 103 | env UPDATE_EXPECT=1 cargo qt |
84 | `.rs` against `.txt`. If the `.txt` file is missing, it is created (this is how you update | 104 | ``` |
85 | tests). Additionally, running `cargo xtask codegen` will walk the grammar module and collect | ||
86 | all `// test test_name` comments into files inside `test_data/parser/inline` directory. | ||
87 | 105 | ||
88 | Note | 106 | After adding a new inline test you need to run `cargo xtest codegen` and also update the test data as described above. |
89 | [`api_walkthrough`](https://github.com/rust-analyzer/rust-analyzer/blob/2fb6af89eb794f775de60b82afe56b6f986c2a40/crates/ra_syntax/src/lib.rs#L190-L348) | 107 | |
108 | Note [`api_walkthrough`](https://github.com/rust-analyzer/rust-analyzer/blob/2fb6af89eb794f775de60b82afe56b6f986c2a40/crates/ra_syntax/src/lib.rs#L190-L348) | ||
90 | in particular: it shows off various methods of working with syntax tree. | 109 | in particular: it shows off various methods of working with syntax tree. |
91 | 110 | ||
92 | See [#93](https://github.com/rust-analyzer/rust-analyzer/pull/93) for an example PR which | 111 | See [#93](https://github.com/rust-analyzer/rust-analyzer/pull/93) for an example PR which fixes a bug in the grammar. |
93 | fixes a bug in the grammar. | 112 | |
113 | **Architecture Invariant:** `syntax` crate is completely independent from the rest of rust-analyzer. It knows nothing about salsa or LSP. | ||
114 | This is important because it is possible to make useful tooling using only the syntax tree. | ||
115 | Without semantic information, you don't need to be able to _build_ code, which makes the tooling more robust. | ||
116 | See also https://web.stanford.edu/~mlfbrown/paper.pdf. | ||
117 | You can view the `syntax` crate as an entry point to rust-analyzer. | ||
118 | `syntax` crate is an **API Boundary**. | ||
119 | |||
120 | **Architecture Invariant:** syntax tree is a value type. | ||
121 | The tree is fully determined by the contents of its syntax nodes, it doesn't need global context (like an interner) and doesn't store semantic info. | ||
122 | Using the tree as a store for semantic info is convenient in traditional compilers, but doesn't work nicely in the IDE. | ||
123 | Specifically, assists and refactors require transforming syntax trees, and that becomes awkward if you need to do something with the semantic info. | ||
124 | |||
125 | **Architecture Invariant:** syntax tree is built for a single file. | ||
126 | This is to enable parallel parsing of all files. | ||
127 | |||
128 | **Architecture Invariant:** Syntax trees are by design incomplete and do not enforce well-formedness. | ||
129 | If an AST method returns an `Option`, it *can* be `None` at runtime, even if this is forbidden by the grammar. | ||
94 | 130 | ||
95 | ### `crates/base_db` | 131 | ### `crates/base_db` |
96 | 132 | ||
97 | We use the [salsa](https://github.com/salsa-rs/salsa) crate for incremental and | 133 | We use the [salsa](https://github.com/salsa-rs/salsa) crate for incremental and on-demand computation. |
98 | on-demand computation. Roughly, you can think of salsa as a key-value store, but | 134 | Roughly, you can think of salsa as a key-value store, but it can also compute derived values using specified functions. The `base_db` crate provides basic infrastructure for interacting with salsa. |
99 | it also can compute derived values using specified functions. The `base_db` crate | 135 | Crucially, it defines most of the "input" queries: facts supplied by the client of the analyzer. |
100 | provides basic infrastructure for interacting with salsa. Crucially, it | 136 | Reading the docs of the `base_db::input` module should be useful: everything else is strictly derived from those inputs. |
101 | defines most of the "input" queries: facts supplied by the client of the | 137 | |
102 | analyzer. Reading the docs of the `base_db::input` module should be useful: | 138 | **Architecture Invariant:** particularities of the build system are *not* the part of the ground state. |
103 | everything else is strictly derived from those inputs. | 139 | In particular, `base_db` knows nothing about cargo. |
140 | The `CrateGraph` structure is used to represent the dependencies between the crates abstractly. | ||
141 | |||
142 | **Architecture Invariant:** `base_db` doesn't know about file system and file paths. | ||
143 | Files are represented with opaque `FileId`, there's no operation to get an `std::path::Path` out of the `FileId`. | ||
144 | |||
145 | ### `crates/hir_expand`, `crates/hir_def`, `crates/hir_ty` | ||
146 | |||
147 | These crates are the *brain* of rust-analyzer. | ||
148 | This is the compiler part of the IDE. | ||
149 | |||
150 | `hir_xxx` crates have a strong ECS flavor, in that they work with raw ids and directly query the database. | ||
151 | There's little abstraction here. | ||
152 | These crates integrate deeply with salsa and chalk. | ||
153 | |||
154 | Name resolution, macro expansion and type inference all happen here. | ||
155 | These crates also define various intermediate representations of the core. | ||
156 | |||
157 | `ItemTree` condenses a single `SyntaxTree` into a "summary" data structure, which is stable over modifications to function bodies. | ||
104 | 158 | ||
105 | ### `crates/hir*` crates | 159 | `DefMap` contains the module tree of a crate and stores module scopes. |
106 | 160 | ||
107 | HIR provides high-level "object oriented" access to Rust code. | 161 | `Body` stores information about expressions. |
108 | 162 | ||
109 | The principal difference between HIR and syntax trees is that HIR is bound to a | 163 | **Architecture Invariant:** these crates are not, and will never be, an api boundary. |
110 | particular crate instance. That is, it has cfg flags and features applied. So, | ||
111 | the relation between syntax and HIR is many-to-one. The `source_binder` module | ||
112 | is responsible for guessing a HIR for a particular source position. | ||
113 | 164 | ||
114 | Underneath, HIR works on top of salsa, using a `HirDatabase` trait. | 165 | **Architecture Invariant:** these crates explicitly care about being incremental. |
166 | The core invariant we maintain is "typing inside a function's body never invalidates global derived data". | ||
167 | i.e., if you change the body of `foo`, all facts about `bar` should remain intact. | ||
115 | 168 | ||
116 | `hir_xxx` crates have a strong ECS flavor, in that they work with raw ids and | 169 | **Architecture Invariant:** hir exists only in context of particular crate instance with specific CFG flags. |
117 | directly query the database. | 170 | The same syntax may produce several instances of HIR if the crate participates in the crate graph more than once. |
118 | 171 | ||
119 | The top-level `hir` façade crate wraps ids into a more OO-flavored API. | 172 | ### `crates/hir` |
173 | |||
174 | The top-level `hir` crate is an **API Boundary**. | ||
175 | If you think about "using rust-analyzer as a library", `hir` crate is most likely the façade you'll be talking to. | ||
176 | |||
177 | It wraps ECS-style internal API into a more OO-flavored API (with an extra `db` argument for each call). | ||
178 | |||
179 | **Architecture Invariant:** `hir` provides a static, fully resolved view of the code. | ||
180 | While internal `hir_*` crates _compute_ things, `hir`, from the outside, looks like an inert data structure. | ||
181 | |||
182 | `hir` also handles the delicate task of going from syntax to the corresponding `hir`. | ||
183 | Remember that the mapping here is one-to-many. | ||
184 | See `Semantics` type and `source_to_def` module. | ||
185 | |||
186 | Note in particular a curious recursive structure in `source_to_def`. | ||
187 | We first resolve the parent _syntax_ node to the parent _hir_ element. | ||
188 | Then we ask the _hir_ parent what _syntax_ children does it have. | ||
189 | Then we look for our node in the set of children. | ||
190 | |||
191 | This is the heart of many IDE features, like goto definition, which start with figuring out the hir node at the cursor. | ||
192 | This is some kind of (yet unnamed) uber-IDE pattern, as it is present in Roslyn and Kotlin as well. | ||
120 | 193 | ||
121 | ### `crates/ide` | 194 | ### `crates/ide` |
122 | 195 | ||
123 | A stateful library for analyzing many Rust files as they change. `AnalysisHost` | 196 | The `ide` crate builds on top of `hir` semantic model to provide high-level IDE features like completion or goto definition. |
124 | is a mutable entity (clojure's atom) which holds the current state, incorporates | 197 | It is an **API Boundary**. |
125 | changes and hands out `Analysis` --- an immutable and consistent snapshot of | 198 | If you want to use IDE parts of rust-analyzer via LSP, custom flatbuffers-based protocol or just as a library in your text editor, this is the right API. |
126 | the world state at a point in time, which actually powers analysis. | 199 | |
200 | **Architecture Invariant:** `ide` crate's API is build out of POD types with public fields. | ||
201 | The API uses editor's terminology, it talks about offsets and string labels rather than in terms of definitions or types. | ||
202 | It is effectively the view in MVC and viewmodel in [MVVM](https://en.wikipedia.org/wiki/Model%E2%80%93view%E2%80%93viewmodel). | ||
203 | All arguments and return types are conceptually serializable. | ||
204 | In particular, syntax tress and and hir types are generally absent from the API (but are used heavily in the implementation). | ||
205 | Shout outs to LSP developers for popularizing the idea that "UI" is a good place to draw a boundary at. | ||
206 | |||
207 | `ide` is also the first crate which has the notion of change over time. | ||
208 | `AnalysisHost` is a state to which you can transactionally `apply_change`. | ||
209 | `Analysis` is an immutable snapshot of the state. | ||
127 | 210 | ||
128 | One interesting aspect of analysis is its support for cancellation. When a | 211 | Internally, `ide` is split across several crates. `ide_assists`, `ide_completion` and `ide_ssr` implement large isolated features. |
129 | change is applied to `AnalysisHost`, first all currently active snapshots are | 212 | `ide_db` implements common IDE functionality (notably, reference search is implemented here). |
130 | canceled. Only after all snapshots are dropped the change actually affects the | 213 | The `ide` contains a public API/façade, as well as implementation for a plethora of smaller features. |
131 | database. | ||
132 | 214 | ||
133 | APIs in this crate are IDE centric: they take text offsets as input and produce | 215 | **Architecture Invariant:** `ide` crate strives to provide a _perfect_ API. |
134 | offsets and strings as output. This works on top of rich code model powered by | 216 | Although at the moment it has only one consumer, the LSP server, LSP *does not* influence it's API design. |
135 | `hir`. | 217 | Instead, we keep in mind a hypothetical _ideal_ client -- an IDE tailored specifically for rust, every nook and cranny of which is packed with Rust-specific goodies. |
136 | 218 | ||
137 | ### `crates/rust-analyzer` | 219 | ### `crates/rust-analyzer` |
138 | 220 | ||
139 | An LSP implementation which wraps `ide` into a language server protocol. | 221 | This crate defines the `rust-analyzer` binary, so it is the **entry point**. |
222 | It implements the language server. | ||
223 | |||
224 | **Architecture Invariant:** `rust-analyzer` is the only crate that knows about LSP and JSON serialization. | ||
225 | If you want to expose a datastructure `X` from ide to LSP, don't make it serializable. | ||
226 | Instead, create a serializable counterpart in `rust-analyzer` crate and manually convert between the two. | ||
227 | |||
228 | `GlobalState` is the state of the server. | ||
229 | The `main_loop` defines the server event loop which accepts requests and sends responses. | ||
230 | Requests that modify the state or might block user's typing are handled on the main thread. | ||
231 | All other requests are processed in background. | ||
232 | |||
233 | **Architecture Invariant:** the server is stateless, a-la HTTP. | ||
234 | Sometimes state needs to be preserved between requests. | ||
235 | For example, "what is the `edit` for the fifth completion item of the last completion edit?". | ||
236 | For this, the second request should include enough info to re-create the context from scratch. | ||
237 | This generally means including all the parameters of the original request. | ||
238 | |||
239 | `reload` module contains the code that handles configuration and Cargo.toml changes. | ||
240 | This is a tricky business. | ||
241 | |||
242 | **Architecture Invariant:** `rust-analyzer` should be partially available even when the build is broken. | ||
243 | Reloading process should not prevent IDE features from working. | ||
244 | |||
245 | ### `crates/toolchain`, `crates/project_model`, `crates/flycheck` | ||
246 | |||
247 | These crates deal with invoking `cargo` to learn about project structure and get compiler errors for the "check on save" feature. | ||
248 | |||
249 | They use `crates/path` heavily instead of `std::path`. | ||
250 | A single `rust-analyzer` process can serve many projects, so it is important that server's current directory does not leak. | ||
251 | |||
252 | ### `crates/mbe`, `crates/tt`, `crates/proc_macro_api`, `crates/proc_macro_srv` | ||
253 | |||
254 | These crates implement macros as token tree -> token tree transforms. | ||
255 | They are independent from the rest of the code. | ||
256 | |||
257 | ### `crates/cfg` | ||
258 | |||
259 | This crate is responsible for parsing, evaluation and general definition of `cfg` attributes. | ||
140 | 260 | ||
141 | ### `crates/vfs` | 261 | ### `crates/vfs`, `crates/vfs-notify` |
142 | 262 | ||
143 | Although `hir` and `ide` don't do any IO, we need to be able to read | 263 | These crates implement a virtual file system. |
144 | files from disk at the end of the day. This is what `vfs` does. It also | 264 | They provide consistent snapshots of the underlying file system and insulate messy OS paths. |
145 | manages overlays: "dirty" files in the editor, whose "true" contents is | ||
146 | different from data on disk. | ||
147 | 265 | ||
148 | ## Testing Infrastructure | 266 | **Architecture Invariant:** vfs doesn't assume a single unified file system. |
267 | i.e., a single rust-analyzer process can act as a remote server for two different machines, where the same `/tmp/foo.rs` path points to different files. | ||
268 | For this reason, all path APIs generally take some existing path as a "file system witness". | ||
149 | 269 | ||
150 | Rust Analyzer has three interesting [systems | 270 | ### `crates/stdx` |
151 | boundaries](https://www.tedinski.com/2018/04/10/making-tests-a-positive-influence-on-design.html) | ||
152 | to concentrate tests on. | ||
153 | 271 | ||
154 | The outermost boundary is the `rust-analyzer` crate, which defines an LSP | 272 | This crate contains various non-rust-analyzer specific utils, which could have been in std, as well |
155 | interface in terms of stdio. We do integration testing of this component, by | 273 | as copies of unstable std items we would like to make use of already, like `std::str::split_once`. |
156 | feeding it with a stream of LSP requests and checking responses. These tests are | ||
157 | known as "heavy", because they interact with Cargo and read real files from | ||
158 | disk. For this reason, we try to avoid writing too many tests on this boundary: | ||
159 | in a statically typed language, it's hard to make an error in the protocol | ||
160 | itself if messages are themselves typed. | ||
161 | 274 | ||
162 | The middle, and most important, boundary is `ide`. Unlike | 275 | ### `crates/profile` |
163 | `rust-analyzer`, which exposes API, `ide` uses Rust API and is intended to | ||
164 | use by various tools. Typical test creates an `AnalysisHost`, calls some | ||
165 | `Analysis` functions and compares the results against expectation. | ||
166 | 276 | ||
167 | The innermost and most elaborate boundary is `hir`. It has a much richer | 277 | This crate contains utilities for CPU and memory profiling. |
168 | vocabulary of types than `ide`, but the basic testing setup is the same: we | 278 | |
169 | create a database, run some queries, assert result. | 279 | |
280 | ## Cross-Cutting Concerns | ||
281 | |||
282 | This sections talks about the things which are everywhere and nowhere in particular. | ||
283 | |||
284 | ### Code generation | ||
285 | |||
286 | Some of the components of this repository are generated through automatic processes. | ||
287 | `cargo xtask codegen` runs all generation tasks. | ||
288 | Generated code is generally committed to the git repository. | ||
289 | There are tests to check that the generated code is fresh. | ||
290 | |||
291 | In particular, we generate: | ||
292 | |||
293 | * API for working with syntax trees (`syntax::ast`, the [`ungrammar`](https://github.com/rust-analyzer/ungrammar) crate). | ||
294 | * Various sections of the manual: | ||
295 | |||
296 | * features | ||
297 | * assists | ||
298 | * config | ||
299 | |||
300 | * Documentation tests for assists | ||
301 | |||
302 | **Architecture Invariant:** we avoid bootstrapping. | ||
303 | For codegen we need to parse Rust code. | ||
304 | Using rust-analyzer for that would work and would be fun, but it would also complicate the build process a lot. | ||
305 | For that reason, we use syn and manual string parsing. | ||
306 | |||
307 | ### Cancellation | ||
308 | |||
309 | Let's say that the IDE is in the process of computing syntax highlighting, when the user types `foo`. | ||
310 | What should happen? | ||
311 | `rust-analyzer`s answer is that the highlighting process should be cancelled -- its results are now stale, and it also blocks modification of the inputs. | ||
312 | |||
313 | The salsa database maintains a global revision counter. | ||
314 | When applying a change, salsa bumps this counter and waits until all other threads using salsa finish. | ||
315 | If a thread does salsa-based computation and notices that the counter is incremented, it panics with a special value (see `Canceled::throw`). | ||
316 | That is, rust-analyzer requires unwinding. | ||
317 | |||
318 | `ide` is the boundary where the panic is caught and transformed into a `Result<T, Cancelled>`. | ||
319 | |||
320 | ### Testing | ||
321 | |||
322 | Rust Analyzer has three interesting [system boundaries](https://www.tedinski.com/2018/04/10/making-tests-a-positive-influence-on-design.html) to concentrate tests on. | ||
323 | |||
324 | The outermost boundary is the `rust-analyzer` crate, which defines an LSP interface in terms of stdio. | ||
325 | We do integration testing of this component, by feeding it with a stream of LSP requests and checking responses. | ||
326 | These tests are known as "heavy", because they interact with Cargo and read real files from disk. | ||
327 | For this reason, we try to avoid writing too many tests on this boundary: in a statically typed language, it's hard to make an error in the protocol itself if messages are themselves typed. | ||
328 | Heavy tests are only run when `RUN_SLOW_TESTS` env var is set. | ||
329 | |||
330 | The middle, and most important, boundary is `ide`. | ||
331 | Unlike `rust-analyzer`, which exposes API, `ide` uses Rust API and is intended for use by various tools. | ||
332 | A typical test creates an `AnalysisHost`, calls some `Analysis` functions and compares the results against expectation. | ||
333 | |||
334 | The innermost and most elaborate boundary is `hir`. | ||
335 | It has a much richer vocabulary of types than `ide`, but the basic testing setup is the same: we create a database, run some queries, assert result. | ||
170 | 336 | ||
171 | For comparisons, we use the `expect` crate for snapshot testing. | 337 | For comparisons, we use the `expect` crate for snapshot testing. |
172 | 338 | ||
173 | To test various analysis corner cases and avoid forgetting about old tests, we | 339 | To test various analysis corner cases and avoid forgetting about old tests, we use so-called marks. |
174 | use so-called marks. See the `marks` module in the `test_utils` crate for more. | 340 | See the `marks` module in the `test_utils` crate for more. |
341 | |||
342 | **Architecture Invariant:** rust-analyzer tests do not use libcore or libstd. | ||
343 | All required library code must be a part of the tests. | ||
344 | This ensures fast test execution. | ||
345 | |||
346 | **Architecture Invariant:** tests are data driven and do not test the API. | ||
347 | Tests which directly call various API functions are a liability, because they make refactoring the API significantly more complicated. | ||
348 | So most of the tests look like this: | ||
349 | |||
350 | ```rust | ||
351 | fn check(input: &str, expect: expect_test::Expect) { | ||
352 | // The single place that actually exercises a particular API | ||
353 | } | ||
354 | |||
355 | |||
356 | #[test] | ||
357 | fn foo() { | ||
358 | check("foo", expect![["bar"]]); | ||
359 | } | ||
360 | |||
361 | #[test] | ||
362 | fn spam() { | ||
363 | check("spam", expect![["eggs"]]); | ||
364 | } | ||
365 | // ...and a hundred more tests that don't care about the specific API at all. | ||
366 | ``` | ||
367 | |||
368 | To specify input data, we use a single string literal in a special format, which can describe a set of rust files. | ||
369 | See the `Fixture` type. | ||
370 | |||
371 | **Architecture Invariant:** all code invariants are tested by `#[test]` tests. | ||
372 | There's no additional checks in CI, formatting and tidy tests are run with `cargo test`. | ||
373 | |||
374 | **Architecture Invariant:** tests do not depend on any kind of external resources, they are perfectly reproducible. | ||
375 | |||
376 | ### Error Handling | ||
377 | |||
378 | **Architecture Invariant:** core parts of rust-analyzer (`ide`/`hir`) don't interact with the outside world and thus can't fail. | ||
379 | Only parts touching LSP are allowed to do IO. | ||
380 | |||
381 | Internals of rust-analyzer need to deal with broken code, but this is not an error condition. | ||
382 | rust-analyzer is robust: various analysis compute `(T, Vec<Error>)` rather than `Result<T, Error>`. | ||
383 | |||
384 | rust-analyzer is a complex long-running process. | ||
385 | It will always have bugs and panics. | ||
386 | But a panic in an isolated feature should not bring down the whole process. | ||
387 | Each LSP-request is protected by a `catch_unwind`. | ||
388 | We use `always` and `never` macros instead of `assert` to gracefully recover from impossible conditions. | ||
389 | |||
390 | ### Observability | ||
391 | |||
392 | rust-analyzer is a long-running process, so its important to understand what's going on inside. | ||
393 | We have several instruments for that. | ||
394 | |||
395 | The event loop that runs rust-analyzer is very explicit. | ||
396 | Rather than spawning futures or scheduling callbacks (open), the event loop accepts an `enum` of possible events (closed). | ||
397 | It's easy to see all the things that trigger rust-analyzer processing, together with their performance | ||
398 | |||
399 | rust-analyzer includes a simple hierarchical profiler (`hprof`). | ||
400 | It is enabled with `RA_PROFILE='*>50` env var (log all (`*`) actions which take more than `50` ms) and produces output like: | ||
401 | |||
402 | ``` | ||
403 | 85ms - handle_completion | ||
404 | 68ms - import_on_the_fly | ||
405 | 67ms - import_assets::search_for_relative_paths | ||
406 | 0ms - crate_def_map:wait (804 calls) | ||
407 | 0ms - find_path (16 calls) | ||
408 | 2ms - find_similar_imports (1 calls) | ||
409 | 0ms - generic_params_query (334 calls) | ||
410 | 59ms - trait_solve_query (186 calls) | ||
411 | 0ms - Semantics::analyze_impl (1 calls) | ||
412 | 1ms - render_resolution (8 calls) | ||
413 | 0ms - Semantics::analyze_impl (5 calls) | ||
414 | ``` | ||
415 | |||
416 | This is cheap enough to enable in production. | ||
417 | |||
418 | |||
419 | Similarly, we save live object counting (`RA_COUNT=1`). | ||
420 | It is not cheap enough to enable in prod, and this is a bug which should be fixed. | ||
diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md index d7f287894..b2defa737 100644 --- a/docs/dev/lsp-extensions.md +++ b/docs/dev/lsp-extensions.md | |||
@@ -238,7 +238,7 @@ As proper cursor positioning is raison-d'etat for `onEnter`, it uses `SnippetTex | |||
238 | * How to deal with synchronicity of the request? | 238 | * How to deal with synchronicity of the request? |
239 | One option is to require the client to block until the server returns the response. | 239 | One option is to require the client to block until the server returns the response. |
240 | Another option is to do a OT-style merging of edits from client and server. | 240 | Another option is to do a OT-style merging of edits from client and server. |
241 | A third option is to do a record-replay: client applies heuristic on enter immediatelly, then applies all user's keypresses. | 241 | A third option is to do a record-replay: client applies heuristic on enter immediately, then applies all user's keypresses. |
242 | When the server is ready with the response, the client rollbacks all the changes and applies the recorded actions on top of the correct response. | 242 | When the server is ready with the response, the client rollbacks all the changes and applies the recorded actions on top of the correct response. |
243 | * How to deal with multiple carets? | 243 | * How to deal with multiple carets? |
244 | * Should we extend this to arbitrary typed events and not just `onEnter`? | 244 | * Should we extend this to arbitrary typed events and not just `onEnter`? |
diff --git a/docs/dev/style.md b/docs/dev/style.md index e2f1b6996..0482bc190 100644 --- a/docs/dev/style.md +++ b/docs/dev/style.md | |||
@@ -159,7 +159,7 @@ Express function preconditions in types and force the caller to provide them (ra | |||
159 | 159 | ||
160 | ```rust | 160 | ```rust |
161 | // GOOD | 161 | // GOOD |
162 | fn frbonicate(walrus: Walrus) { | 162 | fn frobnicate(walrus: Walrus) { |
163 | ... | 163 | ... |
164 | } | 164 | } |
165 | 165 | ||
@@ -374,7 +374,7 @@ Avoid making a lot of code type parametric, *especially* on the boundaries betwe | |||
374 | 374 | ||
375 | ```rust | 375 | ```rust |
376 | // GOOD | 376 | // GOOD |
377 | fn frbonicate(f: impl FnMut()) { | 377 | fn frobnicate(f: impl FnMut()) { |
378 | frobnicate_impl(&mut f) | 378 | frobnicate_impl(&mut f) |
379 | } | 379 | } |
380 | fn frobnicate_impl(f: &mut dyn FnMut()) { | 380 | fn frobnicate_impl(f: &mut dyn FnMut()) { |
@@ -382,7 +382,7 @@ fn frobnicate_impl(f: &mut dyn FnMut()) { | |||
382 | } | 382 | } |
383 | 383 | ||
384 | // BAD | 384 | // BAD |
385 | fn frbonicate(f: impl FnMut()) { | 385 | fn frobnicate(f: impl FnMut()) { |
386 | // lots of code | 386 | // lots of code |
387 | } | 387 | } |
388 | ``` | 388 | ``` |
@@ -391,11 +391,11 @@ Avoid `AsRef` polymorphism, it pays back only for widely used libraries: | |||
391 | 391 | ||
392 | ```rust | 392 | ```rust |
393 | // GOOD | 393 | // GOOD |
394 | fn frbonicate(f: &Path) { | 394 | fn frobnicate(f: &Path) { |
395 | } | 395 | } |
396 | 396 | ||
397 | // BAD | 397 | // BAD |
398 | fn frbonicate(f: impl AsRef<Path>) { | 398 | fn frobnicate(f: impl AsRef<Path>) { |
399 | } | 399 | } |
400 | ``` | 400 | ``` |
401 | 401 | ||
@@ -705,7 +705,7 @@ fn foo() -> Option<Bar> { | |||
705 | } | 705 | } |
706 | ``` | 706 | ``` |
707 | 707 | ||
708 | **Rationale:** reduce congnitive stack usage. | 708 | **Rationale:** reduce cognitive stack usage. |
709 | 709 | ||
710 | ## Comparisons | 710 | ## Comparisons |
711 | 711 | ||
diff --git a/docs/dev/syntax.md b/docs/dev/syntax.md index 1edafab68..737cc7a72 100644 --- a/docs/dev/syntax.md +++ b/docs/dev/syntax.md | |||
@@ -92,19 +92,18 @@ [email protected] | |||
92 | [email protected] ")" | 92 | [email protected] ")" |
93 | [email protected] " " | 93 | [email protected] " " |
94 | [email protected] | 94 | [email protected] |
95 | [email protected] | 95 | [email protected] "{" |
96 | [email protected] "{" | 96 | [email protected] " " |
97 | [email protected] " " | 97 | [email protected] |
98 | [email protected] | 98 | [email protected] |
99 | [email protected] | 99 | [email protected] "90" |
100 | [email protected] "90" | 100 | [email protected] " " |
101 | [email protected] " " | 101 | [email protected] "+" |
102 | [email protected] "+" | 102 | [email protected] " " |
103 | [email protected] " " | 103 | [email protected] |
104 | [email protected] | 104 | [email protected] "2" |
105 | [email protected] "2" | 105 | [email protected] " " |
106 | [email protected] " " | 106 | [email protected] "}" |
107 | [email protected] "}" | ||
108 | ``` | 107 | ``` |
109 | 108 | ||
110 | #### Optimizations | 109 | #### Optimizations |
@@ -387,7 +386,7 @@ trait HasVisibility: AstNode { | |||
387 | fn visibility(&self) -> Option<Visibility>; | 386 | fn visibility(&self) -> Option<Visibility>; |
388 | } | 387 | } |
389 | 388 | ||
390 | impl HasVisbility for FnDef { | 389 | impl HasVisibility for FnDef { |
391 | fn visibility(&self) -> Option<Visibility> { | 390 | fn visibility(&self) -> Option<Visibility> { |
392 | self.syntax.children().find_map(Visibility::cast) | 391 | self.syntax.children().find_map(Visibility::cast) |
393 | } | 392 | } |
@@ -527,7 +526,7 @@ In practice, incremental reparsing doesn't actually matter much for IDE use-case | |||
527 | 526 | ||
528 | ### Parsing Algorithm | 527 | ### Parsing Algorithm |
529 | 528 | ||
530 | We use a boring hand-crafted recursive descent + pratt combination, with a special effort of continuting the parsing if an error is detected. | 529 | We use a boring hand-crafted recursive descent + pratt combination, with a special effort of continuing the parsing if an error is detected. |
531 | 530 | ||
532 | ### Parser Recap | 531 | ### Parser Recap |
533 | 532 | ||
diff --git a/xtask/src/lib.rs b/xtask/src/lib.rs index 16b06b853..b19985fb2 100644 --- a/xtask/src/lib.rs +++ b/xtask/src/lib.rs | |||
@@ -38,19 +38,13 @@ pub fn rust_files() -> impl Iterator<Item = PathBuf> { | |||
38 | rust_files_in(&project_root().join("crates")) | 38 | rust_files_in(&project_root().join("crates")) |
39 | } | 39 | } |
40 | 40 | ||
41 | pub fn rust_files_in(path: &Path) -> impl Iterator<Item = PathBuf> { | 41 | pub fn cargo_files() -> impl Iterator<Item = PathBuf> { |
42 | let iter = WalkDir::new(path); | 42 | files_in(&project_root(), "toml") |
43 | return iter | 43 | .filter(|path| path.file_name().map(|it| it == "Cargo.toml").unwrap_or(false)) |
44 | .into_iter() | 44 | } |
45 | .filter_entry(|e| !is_hidden(e)) | ||
46 | .map(|e| e.unwrap()) | ||
47 | .filter(|e| !e.file_type().is_dir()) | ||
48 | .map(|e| e.into_path()) | ||
49 | .filter(|path| path.extension().map(|it| it == "rs").unwrap_or(false)); | ||
50 | 45 | ||
51 | fn is_hidden(entry: &DirEntry) -> bool { | 46 | pub fn rust_files_in(path: &Path) -> impl Iterator<Item = PathBuf> { |
52 | entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false) | 47 | files_in(path, "rs") |
53 | } | ||
54 | } | 48 | } |
55 | 49 | ||
56 | pub fn run_rustfmt(mode: Mode) -> Result<()> { | 50 | pub fn run_rustfmt(mode: Mode) -> Result<()> { |
@@ -120,3 +114,18 @@ fn date_iso() -> Result<String> { | |||
120 | fn is_release_tag(tag: &str) -> bool { | 114 | fn is_release_tag(tag: &str) -> bool { |
121 | tag.len() == "2020-02-24".len() && tag.starts_with(|c: char| c.is_ascii_digit()) | 115 | tag.len() == "2020-02-24".len() && tag.starts_with(|c: char| c.is_ascii_digit()) |
122 | } | 116 | } |
117 | |||
118 | fn files_in(path: &Path, ext: &'static str) -> impl Iterator<Item = PathBuf> { | ||
119 | let iter = WalkDir::new(path); | ||
120 | return iter | ||
121 | .into_iter() | ||
122 | .filter_entry(|e| !is_hidden(e)) | ||
123 | .map(|e| e.unwrap()) | ||
124 | .filter(|e| !e.file_type().is_dir()) | ||
125 | .map(|e| e.into_path()) | ||
126 | .filter(move |path| path.extension().map(|it| it == ext).unwrap_or(false)); | ||
127 | |||
128 | fn is_hidden(entry: &DirEntry) -> bool { | ||
129 | entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false) | ||
130 | } | ||
131 | } | ||
diff --git a/xtask/tests/tidy.rs b/xtask/tests/tidy.rs index 9a6933b09..cb83e07fd 100644 --- a/xtask/tests/tidy.rs +++ b/xtask/tests/tidy.rs | |||
@@ -5,6 +5,7 @@ use std::{ | |||
5 | 5 | ||
6 | use xshell::{cmd, read_file}; | 6 | use xshell::{cmd, read_file}; |
7 | use xtask::{ | 7 | use xtask::{ |
8 | cargo_files, | ||
8 | codegen::{self, Mode}, | 9 | codegen::{self, Mode}, |
9 | project_root, run_rustfmt, rust_files, | 10 | project_root, run_rustfmt, rust_files, |
10 | }; | 11 | }; |
@@ -94,6 +95,32 @@ fn rust_files_are_tidy() { | |||
94 | } | 95 | } |
95 | 96 | ||
96 | #[test] | 97 | #[test] |
98 | fn cargo_files_are_tidy() { | ||
99 | for cargo in cargo_files() { | ||
100 | let mut section = None; | ||
101 | for (line_no, text) in read_file(&cargo).unwrap().lines().enumerate() { | ||
102 | let text = text.trim(); | ||
103 | if text.starts_with("[") { | ||
104 | section = Some(text); | ||
105 | continue; | ||
106 | } | ||
107 | if !section.map(|it| it.starts_with("[dependencies")).unwrap_or(false) { | ||
108 | continue; | ||
109 | } | ||
110 | let text: String = text.split_whitespace().collect(); | ||
111 | if text.contains("path=") && !text.contains("version") { | ||
112 | panic!( | ||
113 | "\ncargo internal dependencies should have version.\n\ | ||
114 | {}:{}\n", | ||
115 | cargo.display(), | ||
116 | line_no + 1 | ||
117 | ) | ||
118 | } | ||
119 | } | ||
120 | } | ||
121 | } | ||
122 | |||
123 | #[test] | ||
97 | fn check_merge_commits() { | 124 | fn check_merge_commits() { |
98 | let stdout = cmd!("git rev-list --merges --invert-grep --author 'bors\\[bot\\]' HEAD~19..") | 125 | let stdout = cmd!("git rev-list --merges --invert-grep --author 'bors\\[bot\\]' HEAD~19..") |
99 | .read() | 126 | .read() |