diff options
27 files changed, 287 insertions, 173 deletions
diff --git a/crates/completion/src/completions/unqualified_path.rs b/crates/completion/src/completions/unqualified_path.rs index 86c143b63..3bd776905 100644 --- a/crates/completion/src/completions/unqualified_path.rs +++ b/crates/completion/src/completions/unqualified_path.rs | |||
@@ -44,7 +44,9 @@ pub(crate) fn complete_unqualified_path(acc: &mut Completions, ctx: &CompletionC | |||
44 | acc.add_resolution(ctx, name.to_string(), &res) | 44 | acc.add_resolution(ctx, name.to_string(), &res) |
45 | }); | 45 | }); |
46 | 46 | ||
47 | fuzzy_completion(acc, ctx).unwrap_or_default() | 47 | if ctx.config.enable_experimental_completions { |
48 | fuzzy_completion(acc, ctx).unwrap_or_default() | ||
49 | } | ||
48 | } | 50 | } |
49 | 51 | ||
50 | fn complete_enum_variants(acc: &mut Completions, ctx: &CompletionContext, ty: &Type) { | 52 | fn complete_enum_variants(acc: &mut Completions, ctx: &CompletionContext, ty: &Type) { |
@@ -79,32 +81,34 @@ fn fuzzy_completion(acc: &mut Completions, ctx: &CompletionContext) -> Option<() | |||
79 | 81 | ||
80 | let potential_import_name = ctx.token.to_string(); | 82 | let potential_import_name = ctx.token.to_string(); |
81 | 83 | ||
82 | let possible_imports = | 84 | let possible_imports = imports_locator::find_similar_imports( |
83 | imports_locator::find_similar_imports(&ctx.sema, ctx.krate?, &potential_import_name, 400) | 85 | &ctx.sema, |
84 | .filter_map(|import_candidate| match import_candidate { | 86 | ctx.krate?, |
85 | // when completing outside the use declaration, modules are pretty useless | 87 | &potential_import_name, |
86 | // and tend to bloat the completion suggestions a lot | 88 | 50, |
87 | Either::Left(ModuleDef::Module(_)) => None, | 89 | true, |
88 | Either::Left(module_def) => Some(( | 90 | ) |
89 | current_module.find_use_path(ctx.db, module_def)?, | 91 | .filter_map(|import_candidate| { |
90 | ScopeDef::ModuleDef(module_def), | 92 | Some(match import_candidate { |
91 | )), | 93 | Either::Left(module_def) => { |
92 | Either::Right(macro_def) => Some(( | 94 | (current_module.find_use_path(ctx.db, module_def)?, ScopeDef::ModuleDef(module_def)) |
93 | current_module.find_use_path(ctx.db, macro_def)?, | 95 | } |
94 | ScopeDef::MacroDef(macro_def), | 96 | Either::Right(macro_def) => { |
95 | )), | 97 | (current_module.find_use_path(ctx.db, macro_def)?, ScopeDef::MacroDef(macro_def)) |
96 | }) | 98 | } |
97 | .filter(|(mod_path, _)| mod_path.len() > 1) | 99 | }) |
98 | .filter_map(|(import_path, definition)| { | 100 | }) |
99 | render_resolution_with_import( | 101 | .filter(|(mod_path, _)| mod_path.len() > 1) |
100 | RenderContext::new(ctx), | 102 | .take(20) |
101 | import_path.clone(), | 103 | .filter_map(|(import_path, definition)| { |
102 | import_scope.clone(), | 104 | render_resolution_with_import( |
103 | ctx.config.merge, | 105 | RenderContext::new(ctx), |
104 | &definition, | 106 | import_path.clone(), |
105 | ) | 107 | import_scope.clone(), |
106 | }) | 108 | ctx.config.merge, |
107 | .take(20); | 109 | &definition, |
110 | ) | ||
111 | }); | ||
108 | 112 | ||
109 | acc.add_all(possible_imports); | 113 | acc.add_all(possible_imports); |
110 | Some(()) | 114 | Some(()) |
diff --git a/crates/completion/src/config.rs b/crates/completion/src/config.rs index 82874ff25..f50735372 100644 --- a/crates/completion/src/config.rs +++ b/crates/completion/src/config.rs | |||
@@ -9,6 +9,7 @@ use assists::utils::MergeBehaviour; | |||
9 | #[derive(Clone, Debug, PartialEq, Eq)] | 9 | #[derive(Clone, Debug, PartialEq, Eq)] |
10 | pub struct CompletionConfig { | 10 | pub struct CompletionConfig { |
11 | pub enable_postfix_completions: bool, | 11 | pub enable_postfix_completions: bool, |
12 | pub enable_experimental_completions: bool, | ||
12 | pub add_call_parenthesis: bool, | 13 | pub add_call_parenthesis: bool, |
13 | pub add_call_argument_snippets: bool, | 14 | pub add_call_argument_snippets: bool, |
14 | pub snippet_cap: Option<SnippetCap>, | 15 | pub snippet_cap: Option<SnippetCap>, |
@@ -30,6 +31,7 @@ impl Default for CompletionConfig { | |||
30 | fn default() -> Self { | 31 | fn default() -> Self { |
31 | CompletionConfig { | 32 | CompletionConfig { |
32 | enable_postfix_completions: true, | 33 | enable_postfix_completions: true, |
34 | enable_experimental_completions: true, | ||
33 | add_call_parenthesis: true, | 35 | add_call_parenthesis: true, |
34 | add_call_argument_snippets: true, | 36 | add_call_argument_snippets: true, |
35 | snippet_cap: Some(SnippetCap { _private: () }), | 37 | snippet_cap: Some(SnippetCap { _private: () }), |
diff --git a/crates/completion/src/lib.rs b/crates/completion/src/lib.rs index cb6e0554e..aecc1378b 100644 --- a/crates/completion/src/lib.rs +++ b/crates/completion/src/lib.rs | |||
@@ -67,6 +67,13 @@ pub use crate::{ | |||
67 | // fn test_name() {} | 67 | // fn test_name() {} |
68 | // } | 68 | // } |
69 | // ``` | 69 | // ``` |
70 | // | ||
71 | // And experimental completions, enabled with the `rust-analyzer.completion.enableExperimental` setting. | ||
72 | // This flag enables or disables: | ||
73 | // | ||
74 | // - Auto import: additional completion options with automatic `use` import and options from all project importable items, matched for the input | ||
75 | // | ||
76 | // Experimental completions might cause issues with performance and completion list look. | ||
70 | 77 | ||
71 | /// Main entry point for completion. We run completion as a two-phase process. | 78 | /// Main entry point for completion. We run completion as a two-phase process. |
72 | /// | 79 | /// |
diff --git a/crates/completion/src/render.rs b/crates/completion/src/render.rs index e892d4de8..bce02f577 100644 --- a/crates/completion/src/render.rs +++ b/crates/completion/src/render.rs | |||
@@ -150,6 +150,7 @@ impl<'a> Render<'a> { | |||
150 | import_data: Option<(ModPath, ImportScope, Option<MergeBehaviour>)>, | 150 | import_data: Option<(ModPath, ImportScope, Option<MergeBehaviour>)>, |
151 | resolution: &ScopeDef, | 151 | resolution: &ScopeDef, |
152 | ) -> Option<CompletionItem> { | 152 | ) -> Option<CompletionItem> { |
153 | let _p = profile::span("render_resolution"); | ||
153 | use hir::ModuleDef::*; | 154 | use hir::ModuleDef::*; |
154 | 155 | ||
155 | let completion_kind = match resolution { | 156 | let completion_kind = match resolution { |
diff --git a/crates/flycheck/Cargo.toml b/crates/flycheck/Cargo.toml index 44499bc79..3d9436d69 100644 --- a/crates/flycheck/Cargo.toml +++ b/crates/flycheck/Cargo.toml | |||
@@ -12,7 +12,7 @@ doctest = false | |||
12 | [dependencies] | 12 | [dependencies] |
13 | crossbeam-channel = "0.5.0" | 13 | crossbeam-channel = "0.5.0" |
14 | log = "0.4.8" | 14 | log = "0.4.8" |
15 | cargo_metadata = "0.12.0" | 15 | cargo_metadata = "=0.12.0" |
16 | serde_json = "1.0.48" | 16 | serde_json = "1.0.48" |
17 | jod-thread = "0.1.1" | 17 | jod-thread = "0.1.1" |
18 | 18 | ||
diff --git a/crates/hir/src/db.rs b/crates/hir/src/db.rs index 07333c453..8c767b249 100644 --- a/crates/hir/src/db.rs +++ b/crates/hir/src/db.rs | |||
@@ -11,7 +11,7 @@ pub use hir_def::db::{ | |||
11 | }; | 11 | }; |
12 | pub use hir_expand::db::{ | 12 | pub use hir_expand::db::{ |
13 | AstDatabase, AstDatabaseStorage, AstIdMapQuery, InternEagerExpansionQuery, InternMacroQuery, | 13 | AstDatabase, AstDatabaseStorage, AstIdMapQuery, InternEagerExpansionQuery, InternMacroQuery, |
14 | MacroArgTextQuery, MacroDefQuery, MacroExpandQuery, ParseMacroQuery, | 14 | MacroArgTextQuery, MacroDefQuery, MacroExpandQuery, ParseMacroExpansionQuery, |
15 | }; | 15 | }; |
16 | pub use hir_ty::db::*; | 16 | pub use hir_ty::db::*; |
17 | 17 | ||
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index ed110329d..93bdb4472 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs | |||
@@ -57,7 +57,7 @@ pub use hir_def::{ | |||
57 | visibility::Visibility, | 57 | visibility::Visibility, |
58 | }; | 58 | }; |
59 | pub use hir_expand::{ | 59 | pub use hir_expand::{ |
60 | db::MacroResult, name::known, name::AsName, name::Name, HirFileId, InFile, MacroCallId, | 60 | name::known, name::AsName, name::Name, ExpandResult, HirFileId, InFile, MacroCallId, |
61 | MacroCallLoc, /* FIXME */ MacroDefId, MacroFile, Origin, | 61 | MacroCallLoc, /* FIXME */ MacroDefId, MacroFile, Origin, |
62 | }; | 62 | }; |
63 | pub use hir_ty::display::HirDisplay; | 63 | pub use hir_ty::display::HirDisplay; |
diff --git a/crates/hir_def/src/import_map.rs b/crates/hir_def/src/import_map.rs index 1e24f29a8..c0f108848 100644 --- a/crates/hir_def/src/import_map.rs +++ b/crates/hir_def/src/import_map.rs | |||
@@ -7,7 +7,7 @@ use fst::{self, Streamer}; | |||
7 | use hir_expand::name::Name; | 7 | use hir_expand::name::Name; |
8 | use indexmap::{map::Entry, IndexMap}; | 8 | use indexmap::{map::Entry, IndexMap}; |
9 | use itertools::Itertools; | 9 | use itertools::Itertools; |
10 | use rustc_hash::{FxHashMap, FxHasher}; | 10 | use rustc_hash::{FxHashMap, FxHashSet, FxHasher}; |
11 | use smallvec::SmallVec; | 11 | use smallvec::SmallVec; |
12 | use syntax::SmolStr; | 12 | use syntax::SmolStr; |
13 | 13 | ||
@@ -225,6 +225,19 @@ fn cmp((_, lhs): &(&ItemInNs, &ImportInfo), (_, rhs): &(&ItemInNs, &ImportInfo)) | |||
225 | lhs_str.cmp(&rhs_str) | 225 | lhs_str.cmp(&rhs_str) |
226 | } | 226 | } |
227 | 227 | ||
228 | #[derive(Debug, Eq, PartialEq, Hash)] | ||
229 | pub enum ImportKind { | ||
230 | Module, | ||
231 | Function, | ||
232 | Adt, | ||
233 | EnumVariant, | ||
234 | Const, | ||
235 | Static, | ||
236 | Trait, | ||
237 | TypeAlias, | ||
238 | BuiltinType, | ||
239 | } | ||
240 | |||
228 | #[derive(Debug)] | 241 | #[derive(Debug)] |
229 | pub struct Query { | 242 | pub struct Query { |
230 | query: String, | 243 | query: String, |
@@ -232,6 +245,7 @@ pub struct Query { | |||
232 | anchor_end: bool, | 245 | anchor_end: bool, |
233 | case_sensitive: bool, | 246 | case_sensitive: bool, |
234 | limit: usize, | 247 | limit: usize, |
248 | exclude_import_kinds: FxHashSet<ImportKind>, | ||
235 | } | 249 | } |
236 | 250 | ||
237 | impl Query { | 251 | impl Query { |
@@ -242,6 +256,7 @@ impl Query { | |||
242 | anchor_end: false, | 256 | anchor_end: false, |
243 | case_sensitive: false, | 257 | case_sensitive: false, |
244 | limit: usize::max_value(), | 258 | limit: usize::max_value(), |
259 | exclude_import_kinds: FxHashSet::default(), | ||
245 | } | 260 | } |
246 | } | 261 | } |
247 | 262 | ||
@@ -260,6 +275,12 @@ impl Query { | |||
260 | pub fn case_sensitive(self) -> Self { | 275 | pub fn case_sensitive(self) -> Self { |
261 | Self { case_sensitive: true, ..self } | 276 | Self { case_sensitive: true, ..self } |
262 | } | 277 | } |
278 | |||
279 | /// Do not include imports of the specified kind in the search results. | ||
280 | pub fn exclude_import_kind(mut self, import_kind: ImportKind) -> Self { | ||
281 | self.exclude_import_kinds.insert(import_kind); | ||
282 | self | ||
283 | } | ||
263 | } | 284 | } |
264 | 285 | ||
265 | /// Searches dependencies of `krate` for an importable path matching `query`. | 286 | /// Searches dependencies of `krate` for an importable path matching `query`. |
@@ -303,10 +324,17 @@ pub fn search_dependencies<'a>( | |||
303 | 324 | ||
304 | // Add the items from this `ModPath` group. Those are all subsequent items in | 325 | // Add the items from this `ModPath` group. Those are all subsequent items in |
305 | // `importables` whose paths match `path`. | 326 | // `importables` whose paths match `path`. |
306 | let iter = importables.iter().copied().take_while(|item| { | 327 | let iter = importables |
307 | let item_path = &import_map.map[item].path; | 328 | .iter() |
308 | fst_path(item_path) == fst_path(path) | 329 | .copied() |
309 | }); | 330 | .take_while(|item| { |
331 | let item_path = &import_map.map[item].path; | ||
332 | fst_path(item_path) == fst_path(path) | ||
333 | }) | ||
334 | .filter(|&item| match item_import_kind(item) { | ||
335 | Some(import_kind) => !query.exclude_import_kinds.contains(&import_kind), | ||
336 | None => true, | ||
337 | }); | ||
310 | 338 | ||
311 | if query.case_sensitive { | 339 | if query.case_sensitive { |
312 | // FIXME: This does not do a subsequence match. | 340 | // FIXME: This does not do a subsequence match. |
@@ -341,6 +369,20 @@ pub fn search_dependencies<'a>( | |||
341 | res | 369 | res |
342 | } | 370 | } |
343 | 371 | ||
372 | fn item_import_kind(item: ItemInNs) -> Option<ImportKind> { | ||
373 | Some(match item.as_module_def_id()? { | ||
374 | ModuleDefId::ModuleId(_) => ImportKind::Module, | ||
375 | ModuleDefId::FunctionId(_) => ImportKind::Function, | ||
376 | ModuleDefId::AdtId(_) => ImportKind::Adt, | ||
377 | ModuleDefId::EnumVariantId(_) => ImportKind::EnumVariant, | ||
378 | ModuleDefId::ConstId(_) => ImportKind::Const, | ||
379 | ModuleDefId::StaticId(_) => ImportKind::Static, | ||
380 | ModuleDefId::TraitId(_) => ImportKind::Trait, | ||
381 | ModuleDefId::TypeAliasId(_) => ImportKind::TypeAlias, | ||
382 | ModuleDefId::BuiltinType(_) => ImportKind::BuiltinType, | ||
383 | }) | ||
384 | } | ||
385 | |||
344 | #[cfg(test)] | 386 | #[cfg(test)] |
345 | mod tests { | 387 | mod tests { |
346 | use base_db::{fixture::WithFixture, SourceDatabase, Upcast}; | 388 | use base_db::{fixture::WithFixture, SourceDatabase, Upcast}; |
@@ -758,4 +800,34 @@ mod tests { | |||
758 | "#]], | 800 | "#]], |
759 | ); | 801 | ); |
760 | } | 802 | } |
803 | |||
804 | #[test] | ||
805 | fn search_exclusions() { | ||
806 | let ra_fixture = r#" | ||
807 | //- /main.rs crate:main deps:dep | ||
808 | //- /dep.rs crate:dep | ||
809 | |||
810 | pub struct fmt; | ||
811 | pub struct FMT; | ||
812 | "#; | ||
813 | |||
814 | check_search( | ||
815 | ra_fixture, | ||
816 | "main", | ||
817 | Query::new("FMT"), | ||
818 | expect![[r#" | ||
819 | dep::fmt (t) | ||
820 | dep::fmt (v) | ||
821 | dep::FMT (t) | ||
822 | dep::FMT (v) | ||
823 | "#]], | ||
824 | ); | ||
825 | |||
826 | check_search( | ||
827 | ra_fixture, | ||
828 | "main", | ||
829 | Query::new("FMT").exclude_import_kind(ImportKind::Adt), | ||
830 | expect![[r#""#]], | ||
831 | ); | ||
832 | } | ||
761 | } | 833 | } |
diff --git a/crates/hir_def/src/nameres.rs b/crates/hir_def/src/nameres.rs index eb41d324e..202a7dcb6 100644 --- a/crates/hir_def/src/nameres.rs +++ b/crates/hir_def/src/nameres.rs | |||
@@ -287,7 +287,7 @@ mod diagnostics { | |||
287 | use hir_expand::diagnostics::DiagnosticSink; | 287 | use hir_expand::diagnostics::DiagnosticSink; |
288 | use hir_expand::hygiene::Hygiene; | 288 | use hir_expand::hygiene::Hygiene; |
289 | use hir_expand::InFile; | 289 | use hir_expand::InFile; |
290 | use syntax::{ast, AstPtr, SyntaxNodePtr}; | 290 | use syntax::{ast, AstPtr}; |
291 | 291 | ||
292 | use crate::path::ModPath; | 292 | use crate::path::ModPath; |
293 | use crate::{db::DefDatabase, diagnostics::*, nameres::LocalModuleId, AstId}; | 293 | use crate::{db::DefDatabase, diagnostics::*, nameres::LocalModuleId, AstId}; |
@@ -300,7 +300,7 @@ mod diagnostics { | |||
300 | 300 | ||
301 | UnresolvedImport { ast: AstId<ast::Use>, index: usize }, | 301 | UnresolvedImport { ast: AstId<ast::Use>, index: usize }, |
302 | 302 | ||
303 | UnconfiguredCode { ast: InFile<SyntaxNodePtr>, cfg: CfgExpr, opts: CfgOptions }, | 303 | UnconfiguredCode { ast: AstId<ast::Item>, cfg: CfgExpr, opts: CfgOptions }, |
304 | } | 304 | } |
305 | 305 | ||
306 | #[derive(Debug, PartialEq, Eq)] | 306 | #[derive(Debug, PartialEq, Eq)] |
@@ -341,7 +341,7 @@ mod diagnostics { | |||
341 | 341 | ||
342 | pub(super) fn unconfigured_code( | 342 | pub(super) fn unconfigured_code( |
343 | container: LocalModuleId, | 343 | container: LocalModuleId, |
344 | ast: InFile<SyntaxNodePtr>, | 344 | ast: AstId<ast::Item>, |
345 | cfg: CfgExpr, | 345 | cfg: CfgExpr, |
346 | opts: CfgOptions, | 346 | opts: CfgOptions, |
347 | ) -> Self { | 347 | ) -> Self { |
@@ -399,9 +399,10 @@ mod diagnostics { | |||
399 | } | 399 | } |
400 | 400 | ||
401 | DiagnosticKind::UnconfiguredCode { ast, cfg, opts } => { | 401 | DiagnosticKind::UnconfiguredCode { ast, cfg, opts } => { |
402 | let item = ast.to_node(db.upcast()); | ||
402 | sink.push(InactiveCode { | 403 | sink.push(InactiveCode { |
403 | file: ast.file_id, | 404 | file: ast.file_id, |
404 | node: ast.value.clone(), | 405 | node: AstPtr::new(&item).into(), |
405 | cfg: cfg.clone(), | 406 | cfg: cfg.clone(), |
406 | opts: opts.clone(), | 407 | opts: opts.clone(), |
407 | }); | 408 | }); |
diff --git a/crates/hir_def/src/nameres/collector.rs b/crates/hir_def/src/nameres/collector.rs index 386287518..5ed9073e0 100644 --- a/crates/hir_def/src/nameres/collector.rs +++ b/crates/hir_def/src/nameres/collector.rs | |||
@@ -1336,13 +1336,11 @@ impl ModCollector<'_, '_> { | |||
1336 | 1336 | ||
1337 | fn emit_unconfigured_diagnostic(&mut self, item: ModItem, cfg: &CfgExpr) { | 1337 | fn emit_unconfigured_diagnostic(&mut self, item: ModItem, cfg: &CfgExpr) { |
1338 | let ast_id = item.ast_id(self.item_tree); | 1338 | let ast_id = item.ast_id(self.item_tree); |
1339 | let id_map = self.def_collector.db.ast_id_map(self.file_id); | ||
1340 | let syntax_ptr = id_map.get(ast_id).syntax_node_ptr(); | ||
1341 | 1339 | ||
1342 | let ast_node = InFile::new(self.file_id, syntax_ptr); | 1340 | let ast_id = InFile::new(self.file_id, ast_id); |
1343 | self.def_collector.def_map.diagnostics.push(DefDiagnostic::unconfigured_code( | 1341 | self.def_collector.def_map.diagnostics.push(DefDiagnostic::unconfigured_code( |
1344 | self.module_id, | 1342 | self.module_id, |
1345 | ast_node, | 1343 | ast_id, |
1346 | cfg.clone(), | 1344 | cfg.clone(), |
1347 | self.def_collector.cfg_options.clone(), | 1345 | self.def_collector.cfg_options.clone(), |
1348 | )); | 1346 | )); |
diff --git a/crates/hir_def/src/nameres/tests/incremental.rs b/crates/hir_def/src/nameres/tests/incremental.rs index cfbc62cc4..8981fa7c9 100644 --- a/crates/hir_def/src/nameres/tests/incremental.rs +++ b/crates/hir_def/src/nameres/tests/incremental.rs | |||
@@ -38,6 +38,9 @@ fn typing_inside_a_function_should_not_invalidate_def_map() { | |||
38 | fn foo() -> i32 { | 38 | fn foo() -> i32 { |
39 | 1 + 1 | 39 | 1 + 1 |
40 | } | 40 | } |
41 | |||
42 | #[cfg(never)] | ||
43 | fn no() {} | ||
41 | //- /foo/mod.rs | 44 | //- /foo/mod.rs |
42 | pub mod bar; | 45 | pub mod bar; |
43 | 46 | ||
@@ -53,6 +56,9 @@ fn typing_inside_a_function_should_not_invalidate_def_map() { | |||
53 | use E::*; | 56 | use E::*; |
54 | 57 | ||
55 | fn foo() -> i32 { 92 } | 58 | fn foo() -> i32 { 92 } |
59 | |||
60 | #[cfg(never)] | ||
61 | fn no() {} | ||
56 | ", | 62 | ", |
57 | ); | 63 | ); |
58 | } | 64 | } |
diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs index fc512517c..46ebdbc74 100644 --- a/crates/hir_expand/src/db.rs +++ b/crates/hir_expand/src/db.rs | |||
@@ -13,19 +13,6 @@ use crate::{ | |||
13 | MacroFile, ProcMacroExpander, | 13 | MacroFile, ProcMacroExpander, |
14 | }; | 14 | }; |
15 | 15 | ||
16 | /// A result of some macro expansion. | ||
17 | #[derive(Debug, Clone, Eq, PartialEq)] | ||
18 | pub struct MacroResult<T> { | ||
19 | /// The result of the expansion. Might be `None` when error recovery was impossible and no | ||
20 | /// usable result was produced. | ||
21 | pub value: Option<T>, | ||
22 | |||
23 | /// The error that occurred during expansion or processing. | ||
24 | /// | ||
25 | /// Since we do error recovery, getting an error here does not mean that `value` will be absent. | ||
26 | pub error: Option<String>, | ||
27 | } | ||
28 | |||
29 | #[derive(Debug, Clone, Eq, PartialEq)] | 16 | #[derive(Debug, Clone, Eq, PartialEq)] |
30 | pub enum TokenExpander { | 17 | pub enum TokenExpander { |
31 | MacroRules(mbe::MacroRules), | 18 | MacroRules(mbe::MacroRules), |
@@ -88,11 +75,11 @@ pub trait AstDatabase: SourceDatabase { | |||
88 | #[salsa::transparent] | 75 | #[salsa::transparent] |
89 | fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>; | 76 | fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>; |
90 | fn macro_def(&self, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>>; | 77 | fn macro_def(&self, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>>; |
91 | fn parse_macro( | 78 | fn parse_macro_expansion( |
92 | &self, | 79 | &self, |
93 | macro_file: MacroFile, | 80 | macro_file: MacroFile, |
94 | ) -> MacroResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>; | 81 | ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>>; |
95 | fn macro_expand(&self, macro_call: MacroCallId) -> MacroResult<Arc<tt::Subtree>>; | 82 | fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>>; |
96 | 83 | ||
97 | #[salsa::interned] | 84 | #[salsa::interned] |
98 | fn intern_eager_expansion(&self, eager: EagerCallLoc) -> EagerMacroId; | 85 | fn intern_eager_expansion(&self, eager: EagerCallLoc) -> EagerMacroId; |
@@ -100,20 +87,6 @@ pub trait AstDatabase: SourceDatabase { | |||
100 | fn expand_proc_macro(&self, call: MacroCallId) -> Result<tt::Subtree, mbe::ExpandError>; | 87 | fn expand_proc_macro(&self, call: MacroCallId) -> Result<tt::Subtree, mbe::ExpandError>; |
101 | } | 88 | } |
102 | 89 | ||
103 | impl<T> MacroResult<T> { | ||
104 | fn error(message: String) -> Self { | ||
105 | Self { value: None, error: Some(message) } | ||
106 | } | ||
107 | |||
108 | fn map<U>(self, f: impl FnOnce(T) -> U) -> MacroResult<U> { | ||
109 | MacroResult { value: self.value.map(f), error: self.error } | ||
110 | } | ||
111 | |||
112 | fn drop_value<U>(self) -> MacroResult<U> { | ||
113 | MacroResult { value: None, error: self.error } | ||
114 | } | ||
115 | } | ||
116 | |||
117 | /// This expands the given macro call, but with different arguments. This is | 90 | /// This expands the given macro call, but with different arguments. This is |
118 | /// used for completion, where we want to see what 'would happen' if we insert a | 91 | /// used for completion, where we want to see what 'would happen' if we insert a |
119 | /// token. The `token_to_map` mapped down into the expansion, with the mapped | 92 | /// token. The `token_to_map` mapped down into the expansion, with the mapped |
@@ -138,16 +111,13 @@ pub fn expand_hypothetical( | |||
138 | Some((node.syntax_node(), token)) | 111 | Some((node.syntax_node(), token)) |
139 | } | 112 | } |
140 | 113 | ||
141 | pub(crate) fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { | 114 | fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { |
142 | let map = | 115 | let map = |
143 | db.parse_or_expand(file_id).map_or_else(AstIdMap::default, |it| AstIdMap::from_source(&it)); | 116 | db.parse_or_expand(file_id).map_or_else(AstIdMap::default, |it| AstIdMap::from_source(&it)); |
144 | Arc::new(map) | 117 | Arc::new(map) |
145 | } | 118 | } |
146 | 119 | ||
147 | pub(crate) fn macro_def( | 120 | fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> { |
148 | db: &dyn AstDatabase, | ||
149 | id: MacroDefId, | ||
150 | ) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> { | ||
151 | match id.kind { | 121 | match id.kind { |
152 | MacroDefKind::Declarative => { | 122 | MacroDefKind::Declarative => { |
153 | let macro_call = id.ast_id?.to_node(db); | 123 | let macro_call = id.ast_id?.to_node(db); |
@@ -178,7 +148,7 @@ pub(crate) fn macro_def( | |||
178 | } | 148 | } |
179 | } | 149 | } |
180 | 150 | ||
181 | pub(crate) fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> { | 151 | fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> { |
182 | let id = match id { | 152 | let id = match id { |
183 | MacroCallId::LazyMacro(id) => id, | 153 | MacroCallId::LazyMacro(id) => id, |
184 | MacroCallId::EagerMacro(_id) => { | 154 | MacroCallId::EagerMacro(_id) => { |
@@ -191,16 +161,13 @@ pub(crate) fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<Gr | |||
191 | Some(arg.green().clone()) | 161 | Some(arg.green().clone()) |
192 | } | 162 | } |
193 | 163 | ||
194 | pub(crate) fn macro_arg( | 164 | fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> { |
195 | db: &dyn AstDatabase, | ||
196 | id: MacroCallId, | ||
197 | ) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> { | ||
198 | let arg = db.macro_arg_text(id)?; | 165 | let arg = db.macro_arg_text(id)?; |
199 | let (tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg))?; | 166 | let (tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg))?; |
200 | Some(Arc::new((tt, tmap))) | 167 | Some(Arc::new((tt, tmap))) |
201 | } | 168 | } |
202 | 169 | ||
203 | pub(crate) fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> MacroResult<Arc<tt::Subtree>> { | 170 | fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>> { |
204 | macro_expand_with_arg(db, id, None) | 171 | macro_expand_with_arg(db, id, None) |
205 | } | 172 | } |
206 | 173 | ||
@@ -221,18 +188,18 @@ fn macro_expand_with_arg( | |||
221 | db: &dyn AstDatabase, | 188 | db: &dyn AstDatabase, |
222 | id: MacroCallId, | 189 | id: MacroCallId, |
223 | arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>, | 190 | arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>, |
224 | ) -> MacroResult<Arc<tt::Subtree>> { | 191 | ) -> ExpandResult<Option<Arc<tt::Subtree>>> { |
225 | let lazy_id = match id { | 192 | let lazy_id = match id { |
226 | MacroCallId::LazyMacro(id) => id, | 193 | MacroCallId::LazyMacro(id) => id, |
227 | MacroCallId::EagerMacro(id) => { | 194 | MacroCallId::EagerMacro(id) => { |
228 | if arg.is_some() { | 195 | if arg.is_some() { |
229 | return MacroResult::error( | 196 | return ExpandResult::str_err( |
230 | "hypothetical macro expansion not implemented for eager macro".to_owned(), | 197 | "hypothetical macro expansion not implemented for eager macro".to_owned(), |
231 | ); | 198 | ); |
232 | } else { | 199 | } else { |
233 | return MacroResult { | 200 | return ExpandResult { |
234 | value: Some(db.lookup_intern_eager_expansion(id).subtree), | 201 | value: Some(db.lookup_intern_eager_expansion(id).subtree), |
235 | error: None, | 202 | err: None, |
236 | }; | 203 | }; |
237 | } | 204 | } |
238 | } | 205 | } |
@@ -241,24 +208,27 @@ fn macro_expand_with_arg( | |||
241 | let loc = db.lookup_intern_macro(lazy_id); | 208 | let loc = db.lookup_intern_macro(lazy_id); |
242 | let macro_arg = match arg.or_else(|| db.macro_arg(id)) { | 209 | let macro_arg = match arg.or_else(|| db.macro_arg(id)) { |
243 | Some(it) => it, | 210 | Some(it) => it, |
244 | None => return MacroResult::error("Fail to args in to tt::TokenTree".into()), | 211 | None => return ExpandResult::str_err("Fail to args in to tt::TokenTree".into()), |
245 | }; | 212 | }; |
246 | 213 | ||
247 | let macro_rules = match db.macro_def(loc.def) { | 214 | let macro_rules = match db.macro_def(loc.def) { |
248 | Some(it) => it, | 215 | Some(it) => it, |
249 | None => return MacroResult::error("Fail to find macro definition".into()), | 216 | None => return ExpandResult::str_err("Fail to find macro definition".into()), |
250 | }; | 217 | }; |
251 | let ExpandResult(tt, err) = macro_rules.0.expand(db, lazy_id, ¯o_arg.0); | 218 | let ExpandResult { value: tt, err } = macro_rules.0.expand(db, lazy_id, ¯o_arg.0); |
252 | // Set a hard limit for the expanded tt | 219 | // Set a hard limit for the expanded tt |
253 | let count = tt.count(); | 220 | let count = tt.count(); |
254 | if count > 262144 { | 221 | if count > 262144 { |
255 | return MacroResult::error(format!("Total tokens count exceed limit : count = {}", count)); | 222 | return ExpandResult::str_err(format!( |
223 | "Total tokens count exceed limit : count = {}", | ||
224 | count | ||
225 | )); | ||
256 | } | 226 | } |
257 | 227 | ||
258 | MacroResult { value: Some(Arc::new(tt)), error: err.map(|e| format!("{:?}", e)) } | 228 | ExpandResult { value: Some(Arc::new(tt)), err } |
259 | } | 229 | } |
260 | 230 | ||
261 | pub(crate) fn expand_proc_macro( | 231 | fn expand_proc_macro( |
262 | db: &dyn AstDatabase, | 232 | db: &dyn AstDatabase, |
263 | id: MacroCallId, | 233 | id: MacroCallId, |
264 | ) -> Result<tt::Subtree, mbe::ExpandError> { | 234 | ) -> Result<tt::Subtree, mbe::ExpandError> { |
@@ -285,27 +255,27 @@ pub(crate) fn expand_proc_macro( | |||
285 | expander.expand(db, lazy_id, ¯o_arg.0) | 255 | expander.expand(db, lazy_id, ¯o_arg.0) |
286 | } | 256 | } |
287 | 257 | ||
288 | pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> { | 258 | fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> { |
289 | match file_id.0 { | 259 | match file_id.0 { |
290 | HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()), | 260 | HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()), |
291 | HirFileIdRepr::MacroFile(macro_file) => { | 261 | HirFileIdRepr::MacroFile(macro_file) => { |
292 | db.parse_macro(macro_file).map(|(it, _)| it.syntax_node()).value | 262 | db.parse_macro_expansion(macro_file).value.map(|(it, _)| it.syntax_node()) |
293 | } | 263 | } |
294 | } | 264 | } |
295 | } | 265 | } |
296 | 266 | ||
297 | pub(crate) fn parse_macro( | 267 | fn parse_macro_expansion( |
298 | db: &dyn AstDatabase, | 268 | db: &dyn AstDatabase, |
299 | macro_file: MacroFile, | 269 | macro_file: MacroFile, |
300 | ) -> MacroResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> { | 270 | ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>> { |
301 | parse_macro_with_arg(db, macro_file, None) | 271 | parse_macro_with_arg(db, macro_file, None) |
302 | } | 272 | } |
303 | 273 | ||
304 | pub fn parse_macro_with_arg( | 274 | fn parse_macro_with_arg( |
305 | db: &dyn AstDatabase, | 275 | db: &dyn AstDatabase, |
306 | macro_file: MacroFile, | 276 | macro_file: MacroFile, |
307 | arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>, | 277 | arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>, |
308 | ) -> MacroResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> { | 278 | ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>> { |
309 | let _p = profile::span("parse_macro_query"); | 279 | let _p = profile::span("parse_macro_query"); |
310 | 280 | ||
311 | let macro_call_id = macro_file.macro_call_id; | 281 | let macro_call_id = macro_file.macro_call_id; |
@@ -314,7 +284,7 @@ pub fn parse_macro_with_arg( | |||
314 | } else { | 284 | } else { |
315 | db.macro_expand(macro_call_id) | 285 | db.macro_expand(macro_call_id) |
316 | }; | 286 | }; |
317 | if let Some(err) = &result.error { | 287 | if let Some(err) = &result.err { |
318 | // Note: | 288 | // Note: |
319 | // The final goal we would like to make all parse_macro success, | 289 | // The final goal we would like to make all parse_macro success, |
320 | // such that the following log will not call anyway. | 290 | // such that the following log will not call anyway. |
@@ -332,20 +302,20 @@ pub fn parse_macro_with_arg( | |||
332 | .join("\n"); | 302 | .join("\n"); |
333 | 303 | ||
334 | log::warn!( | 304 | log::warn!( |
335 | "fail on macro_parse: (reason: {} macro_call: {:#}) parents: {}", | 305 | "fail on macro_parse: (reason: {:?} macro_call: {:#}) parents: {}", |
336 | err, | 306 | err, |
337 | node.value, | 307 | node.value, |
338 | parents | 308 | parents |
339 | ); | 309 | ); |
340 | } | 310 | } |
341 | _ => { | 311 | _ => { |
342 | log::warn!("fail on macro_parse: (reason: {})", err); | 312 | log::warn!("fail on macro_parse: (reason: {:?})", err); |
343 | } | 313 | } |
344 | } | 314 | } |
345 | } | 315 | } |
346 | let tt = match result.value { | 316 | let tt = match result.value { |
347 | Some(tt) => tt, | 317 | Some(tt) => tt, |
348 | None => return result.drop_value(), | 318 | None => return ExpandResult { value: None, err: result.err }, |
349 | }; | 319 | }; |
350 | 320 | ||
351 | let fragment_kind = to_fragment_kind(db, macro_call_id); | 321 | let fragment_kind = to_fragment_kind(db, macro_call_id); |
@@ -353,33 +323,29 @@ pub fn parse_macro_with_arg( | |||
353 | let (parse, rev_token_map) = match mbe::token_tree_to_syntax_node(&tt, fragment_kind) { | 323 | let (parse, rev_token_map) = match mbe::token_tree_to_syntax_node(&tt, fragment_kind) { |
354 | Ok(it) => it, | 324 | Ok(it) => it, |
355 | Err(err) => { | 325 | Err(err) => { |
356 | return MacroResult::error(format!("{:?}", err)); | 326 | return ExpandResult::only_err(err); |
357 | } | 327 | } |
358 | }; | 328 | }; |
359 | 329 | ||
360 | match result.error { | 330 | match result.err { |
361 | Some(error) => { | 331 | Some(err) => { |
362 | // FIXME: | 332 | // Safety check for recursive identity macro. |
363 | // In future, we should propagate the actual error with recovery information | ||
364 | // instead of ignore the error here. | ||
365 | |||
366 | // Safe check for recurisve identity macro | ||
367 | let node = parse.syntax_node(); | 333 | let node = parse.syntax_node(); |
368 | let file: HirFileId = macro_file.into(); | 334 | let file: HirFileId = macro_file.into(); |
369 | let call_node = match file.call_node(db) { | 335 | let call_node = match file.call_node(db) { |
370 | Some(it) => it, | 336 | Some(it) => it, |
371 | None => { | 337 | None => { |
372 | return MacroResult::error(error); | 338 | return ExpandResult::only_err(err); |
373 | } | 339 | } |
374 | }; | 340 | }; |
375 | 341 | ||
376 | if !diff(&node, &call_node.value).is_empty() { | 342 | if !diff(&node, &call_node.value).is_empty() { |
377 | MacroResult { value: Some((parse, Arc::new(rev_token_map))), error: None } | 343 | ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: Some(err) } |
378 | } else { | 344 | } else { |
379 | return MacroResult::error(error); | 345 | return ExpandResult::only_err(err); |
380 | } | 346 | } |
381 | } | 347 | } |
382 | None => MacroResult { value: Some((parse, Arc::new(rev_token_map))), error: None }, | 348 | None => ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: None }, |
383 | } | 349 | } |
384 | } | 350 | } |
385 | 351 | ||
diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs index 9fc697d6f..d5ba691b7 100644 --- a/crates/hir_expand/src/lib.rs +++ b/crates/hir_expand/src/lib.rs | |||
@@ -15,6 +15,8 @@ pub mod proc_macro; | |||
15 | pub mod quote; | 15 | pub mod quote; |
16 | pub mod eager; | 16 | pub mod eager; |
17 | 17 | ||
18 | pub use mbe::{ExpandError, ExpandResult}; | ||
19 | |||
18 | use std::hash::Hash; | 20 | use std::hash::Hash; |
19 | use std::sync::Arc; | 21 | use std::sync::Arc; |
20 | 22 | ||
@@ -144,7 +146,7 @@ impl HirFileId { | |||
144 | let def_tt = loc.def.ast_id?.to_node(db).token_tree()?; | 146 | let def_tt = loc.def.ast_id?.to_node(db).token_tree()?; |
145 | 147 | ||
146 | let macro_def = db.macro_def(loc.def)?; | 148 | let macro_def = db.macro_def(loc.def)?; |
147 | let (parse, exp_map) = db.parse_macro(macro_file).value?; | 149 | let (parse, exp_map) = db.parse_macro_expansion(macro_file).value?; |
148 | let macro_arg = db.macro_arg(macro_file.macro_call_id)?; | 150 | let macro_arg = db.macro_arg(macro_file.macro_call_id)?; |
149 | 151 | ||
150 | Some(ExpansionInfo { | 152 | Some(ExpansionInfo { |
diff --git a/crates/ide/src/status.rs b/crates/ide/src/status.rs index 8b4a1652e..e10d7c3a4 100644 --- a/crates/ide/src/status.rs +++ b/crates/ide/src/status.rs | |||
@@ -1,6 +1,6 @@ | |||
1 | use std::{fmt, iter::FromIterator, sync::Arc}; | 1 | use std::{fmt, iter::FromIterator, sync::Arc}; |
2 | 2 | ||
3 | use hir::{MacroFile, MacroResult}; | 3 | use hir::{ExpandResult, MacroFile}; |
4 | use ide_db::base_db::{ | 4 | use ide_db::base_db::{ |
5 | salsa::debug::{DebugQueryTable, TableEntry}, | 5 | salsa::debug::{DebugQueryTable, TableEntry}, |
6 | CrateId, FileId, FileTextQuery, SourceDatabase, SourceRootId, | 6 | CrateId, FileId, FileTextQuery, SourceDatabase, SourceRootId, |
@@ -19,7 +19,7 @@ fn syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { | |||
19 | ide_db::base_db::ParseQuery.in_db(db).entries::<SyntaxTreeStats>() | 19 | ide_db::base_db::ParseQuery.in_db(db).entries::<SyntaxTreeStats>() |
20 | } | 20 | } |
21 | fn macro_syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { | 21 | fn macro_syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { |
22 | hir::db::ParseMacroQuery.in_db(db).entries::<SyntaxTreeStats>() | 22 | hir::db::ParseMacroExpansionQuery.in_db(db).entries::<SyntaxTreeStats>() |
23 | } | 23 | } |
24 | 24 | ||
25 | // Feature: Status | 25 | // Feature: Status |
@@ -115,12 +115,12 @@ impl FromIterator<TableEntry<FileId, Parse<ast::SourceFile>>> for SyntaxTreeStat | |||
115 | } | 115 | } |
116 | } | 116 | } |
117 | 117 | ||
118 | impl<M> FromIterator<TableEntry<MacroFile, MacroResult<(Parse<SyntaxNode>, M)>>> | 118 | impl<M> FromIterator<TableEntry<MacroFile, ExpandResult<Option<(Parse<SyntaxNode>, M)>>>> |
119 | for SyntaxTreeStats | 119 | for SyntaxTreeStats |
120 | { | 120 | { |
121 | fn from_iter<T>(iter: T) -> SyntaxTreeStats | 121 | fn from_iter<T>(iter: T) -> SyntaxTreeStats |
122 | where | 122 | where |
123 | T: IntoIterator<Item = TableEntry<MacroFile, MacroResult<(Parse<SyntaxNode>, M)>>>, | 123 | T: IntoIterator<Item = TableEntry<MacroFile, ExpandResult<Option<(Parse<SyntaxNode>, M)>>>>, |
124 | { | 124 | { |
125 | let mut res = SyntaxTreeStats::default(); | 125 | let mut res = SyntaxTreeStats::default(); |
126 | for entry in iter { | 126 | for entry in iter { |
diff --git a/crates/ide_db/src/apply_change.rs b/crates/ide_db/src/apply_change.rs index da16fa21d..987191fe3 100644 --- a/crates/ide_db/src/apply_change.rs +++ b/crates/ide_db/src/apply_change.rs | |||
@@ -76,7 +76,7 @@ impl RootDatabase { | |||
76 | let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); | 76 | let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); |
77 | 77 | ||
78 | base_db::ParseQuery.in_db(self).sweep(sweep); | 78 | base_db::ParseQuery.in_db(self).sweep(sweep); |
79 | hir::db::ParseMacroQuery.in_db(self).sweep(sweep); | 79 | hir::db::ParseMacroExpansionQuery.in_db(self).sweep(sweep); |
80 | 80 | ||
81 | // Macros do take significant space, but less then the syntax trees | 81 | // Macros do take significant space, but less then the syntax trees |
82 | // self.query(hir::db::MacroDefQuery).sweep(sweep); | 82 | // self.query(hir::db::MacroDefQuery).sweep(sweep); |
@@ -143,7 +143,7 @@ impl RootDatabase { | |||
143 | hir::db::AstIdMapQuery | 143 | hir::db::AstIdMapQuery |
144 | hir::db::MacroArgTextQuery | 144 | hir::db::MacroArgTextQuery |
145 | hir::db::MacroDefQuery | 145 | hir::db::MacroDefQuery |
146 | hir::db::ParseMacroQuery | 146 | hir::db::ParseMacroExpansionQuery |
147 | hir::db::MacroExpandQuery | 147 | hir::db::MacroExpandQuery |
148 | 148 | ||
149 | // DefDatabase | 149 | // DefDatabase |
diff --git a/crates/ide_db/src/imports_locator.rs b/crates/ide_db/src/imports_locator.rs index 9d8ea7368..09046d3c3 100644 --- a/crates/ide_db/src/imports_locator.rs +++ b/crates/ide_db/src/imports_locator.rs | |||
@@ -36,8 +36,15 @@ pub fn find_similar_imports<'a>( | |||
36 | krate: Crate, | 36 | krate: Crate, |
37 | name_to_import: &str, | 37 | name_to_import: &str, |
38 | limit: usize, | 38 | limit: usize, |
39 | ignore_modules: bool, | ||
39 | ) -> impl Iterator<Item = Either<ModuleDef, MacroDef>> { | 40 | ) -> impl Iterator<Item = Either<ModuleDef, MacroDef>> { |
40 | let _p = profile::span("find_similar_imports"); | 41 | let _p = profile::span("find_similar_imports"); |
42 | |||
43 | let mut external_query = import_map::Query::new(name_to_import).limit(limit); | ||
44 | if ignore_modules { | ||
45 | external_query = external_query.exclude_import_kind(import_map::ImportKind::Module); | ||
46 | } | ||
47 | |||
41 | find_imports( | 48 | find_imports( |
42 | sema, | 49 | sema, |
43 | krate, | 50 | krate, |
@@ -46,7 +53,7 @@ pub fn find_similar_imports<'a>( | |||
46 | local_query.limit(limit); | 53 | local_query.limit(limit); |
47 | local_query | 54 | local_query |
48 | }, | 55 | }, |
49 | import_map::Query::new(name_to_import).limit(limit), | 56 | external_query, |
50 | ) | 57 | ) |
51 | } | 58 | } |
52 | 59 | ||
diff --git a/crates/ide_db/src/lib.rs b/crates/ide_db/src/lib.rs index 38ebdbf79..05139a651 100644 --- a/crates/ide_db/src/lib.rs +++ b/crates/ide_db/src/lib.rs | |||
@@ -113,7 +113,7 @@ impl RootDatabase { | |||
113 | pub fn update_lru_capacity(&mut self, lru_capacity: Option<usize>) { | 113 | pub fn update_lru_capacity(&mut self, lru_capacity: Option<usize>) { |
114 | let lru_capacity = lru_capacity.unwrap_or(base_db::DEFAULT_LRU_CAP); | 114 | let lru_capacity = lru_capacity.unwrap_or(base_db::DEFAULT_LRU_CAP); |
115 | base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity); | 115 | base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity); |
116 | hir::db::ParseMacroQuery.in_db_mut(self).set_lru_capacity(lru_capacity); | 116 | hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(lru_capacity); |
117 | hir::db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(lru_capacity); | 117 | hir::db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(lru_capacity); |
118 | } | 118 | } |
119 | } | 119 | } |
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index f854ca09a..2d0763c47 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs | |||
@@ -12,6 +12,8 @@ mod subtree_source; | |||
12 | #[cfg(test)] | 12 | #[cfg(test)] |
13 | mod tests; | 13 | mod tests; |
14 | 14 | ||
15 | use std::fmt; | ||
16 | |||
15 | pub use tt::{Delimiter, Punct}; | 17 | pub use tt::{Delimiter, Punct}; |
16 | 18 | ||
17 | use crate::{ | 19 | use crate::{ |
@@ -33,6 +35,7 @@ pub enum ExpandError { | |||
33 | ConversionError, | 35 | ConversionError, |
34 | InvalidRepeat, | 36 | InvalidRepeat, |
35 | ProcMacroError(tt::ExpansionError), | 37 | ProcMacroError(tt::ExpansionError), |
38 | Other(String), | ||
36 | } | 39 | } |
37 | 40 | ||
38 | impl From<tt::ExpansionError> for ExpandError { | 41 | impl From<tt::ExpansionError> for ExpandError { |
@@ -41,6 +44,20 @@ impl From<tt::ExpansionError> for ExpandError { | |||
41 | } | 44 | } |
42 | } | 45 | } |
43 | 46 | ||
47 | impl fmt::Display for ExpandError { | ||
48 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
49 | match self { | ||
50 | ExpandError::NoMatchingRule => f.write_str("no rule matches input tokens"), | ||
51 | ExpandError::UnexpectedToken => f.write_str("unexpected token in input"), | ||
52 | ExpandError::BindingError(e) => f.write_str(e), | ||
53 | ExpandError::ConversionError => f.write_str("could not convert tokens"), | ||
54 | ExpandError::InvalidRepeat => f.write_str("invalid repeat expression"), | ||
55 | ExpandError::ProcMacroError(e) => e.fmt(f), | ||
56 | ExpandError::Other(e) => f.write_str(e), | ||
57 | } | ||
58 | } | ||
59 | } | ||
60 | |||
44 | pub use crate::syntax_bridge::{ | 61 | pub use crate::syntax_bridge::{ |
45 | ast_to_token_tree, parse_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node, | 62 | ast_to_token_tree, parse_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node, |
46 | TokenMap, | 63 | TokenMap, |
@@ -246,33 +263,42 @@ fn validate(pattern: &tt::Subtree) -> Result<(), ParseError> { | |||
246 | Ok(()) | 263 | Ok(()) |
247 | } | 264 | } |
248 | 265 | ||
249 | #[derive(Debug)] | 266 | #[derive(Debug, Clone, Eq, PartialEq)] |
250 | pub struct ExpandResult<T>(pub T, pub Option<ExpandError>); | 267 | pub struct ExpandResult<T> { |
268 | pub value: T, | ||
269 | pub err: Option<ExpandError>, | ||
270 | } | ||
251 | 271 | ||
252 | impl<T> ExpandResult<T> { | 272 | impl<T> ExpandResult<T> { |
253 | pub fn ok(t: T) -> ExpandResult<T> { | 273 | pub fn ok(value: T) -> Self { |
254 | ExpandResult(t, None) | 274 | Self { value, err: None } |
275 | } | ||
276 | |||
277 | pub fn only_err(err: ExpandError) -> Self | ||
278 | where | ||
279 | T: Default, | ||
280 | { | ||
281 | Self { value: Default::default(), err: Some(err) } | ||
255 | } | 282 | } |
256 | 283 | ||
257 | pub fn only_err(err: ExpandError) -> ExpandResult<T> | 284 | pub fn str_err(err: String) -> Self |
258 | where | 285 | where |
259 | T: Default, | 286 | T: Default, |
260 | { | 287 | { |
261 | ExpandResult(Default::default(), Some(err)) | 288 | Self::only_err(ExpandError::Other(err)) |
262 | } | 289 | } |
263 | 290 | ||
264 | pub fn map<U>(self, f: impl FnOnce(T) -> U) -> ExpandResult<U> { | 291 | pub fn map<U>(self, f: impl FnOnce(T) -> U) -> ExpandResult<U> { |
265 | ExpandResult(f(self.0), self.1) | 292 | ExpandResult { value: f(self.value), err: self.err } |
266 | } | 293 | } |
267 | 294 | ||
268 | pub fn result(self) -> Result<T, ExpandError> { | 295 | pub fn result(self) -> Result<T, ExpandError> { |
269 | self.1.map(Err).unwrap_or(Ok(self.0)) | 296 | self.err.map(Err).unwrap_or(Ok(self.value)) |
270 | } | 297 | } |
271 | } | 298 | } |
272 | 299 | ||
273 | impl<T: Default> From<Result<T, ExpandError>> for ExpandResult<T> { | 300 | impl<T: Default> From<Result<T, ExpandError>> for ExpandResult<T> { |
274 | fn from(result: Result<T, ExpandError>) -> ExpandResult<T> { | 301 | fn from(result: Result<T, ExpandError>) -> Self { |
275 | result | 302 | result.map_or_else(|e| Self::only_err(e), |it| Self::ok(it)) |
276 | .map_or_else(|e| ExpandResult(Default::default(), Some(e)), |it| ExpandResult(it, None)) | ||
277 | } | 303 | } |
278 | } | 304 | } |
diff --git a/crates/mbe/src/mbe_expander.rs b/crates/mbe/src/mbe_expander.rs index 1ad8b9f8a..97bce0536 100644 --- a/crates/mbe/src/mbe_expander.rs +++ b/crates/mbe/src/mbe_expander.rs | |||
@@ -28,10 +28,10 @@ fn expand_rules(rules: &[crate::Rule], input: &tt::Subtree) -> ExpandResult<tt:: | |||
28 | // If we find a rule that applies without errors, we're done. | 28 | // If we find a rule that applies without errors, we're done. |
29 | // Unconditionally returning the transcription here makes the | 29 | // Unconditionally returning the transcription here makes the |
30 | // `test_repeat_bad_var` test fail. | 30 | // `test_repeat_bad_var` test fail. |
31 | let ExpandResult(res, transcribe_err) = | 31 | let ExpandResult { value, err: transcribe_err } = |
32 | transcriber::transcribe(&rule.rhs, &new_match.bindings); | 32 | transcriber::transcribe(&rule.rhs, &new_match.bindings); |
33 | if transcribe_err.is_none() { | 33 | if transcribe_err.is_none() { |
34 | return ExpandResult::ok(res); | 34 | return ExpandResult::ok(value); |
35 | } | 35 | } |
36 | } | 36 | } |
37 | // Use the rule if we matched more tokens, or had fewer errors | 37 | // Use the rule if we matched more tokens, or had fewer errors |
@@ -47,11 +47,11 @@ fn expand_rules(rules: &[crate::Rule], input: &tt::Subtree) -> ExpandResult<tt:: | |||
47 | } | 47 | } |
48 | if let Some((match_, rule)) = match_ { | 48 | if let Some((match_, rule)) = match_ { |
49 | // if we got here, there was no match without errors | 49 | // if we got here, there was no match without errors |
50 | let ExpandResult(result, transcribe_err) = | 50 | let ExpandResult { value, err: transcribe_err } = |
51 | transcriber::transcribe(&rule.rhs, &match_.bindings); | 51 | transcriber::transcribe(&rule.rhs, &match_.bindings); |
52 | ExpandResult(result, match_.err.or(transcribe_err)) | 52 | ExpandResult { value, err: match_.err.or(transcribe_err) } |
53 | } else { | 53 | } else { |
54 | ExpandResult(tt::Subtree::default(), Some(ExpandError::NoMatchingRule)) | 54 | ExpandResult::only_err(ExpandError::NoMatchingRule) |
55 | } | 55 | } |
56 | } | 56 | } |
57 | 57 | ||
@@ -143,7 +143,10 @@ mod tests { | |||
143 | } | 143 | } |
144 | 144 | ||
145 | fn assert_err(macro_body: &str, invocation: &str, err: ExpandError) { | 145 | fn assert_err(macro_body: &str, invocation: &str, err: ExpandError) { |
146 | assert_eq!(expand_first(&create_rules(&format_macro(macro_body)), invocation).1, Some(err)); | 146 | assert_eq!( |
147 | expand_first(&create_rules(&format_macro(macro_body)), invocation).err, | ||
148 | Some(err) | ||
149 | ); | ||
147 | } | 150 | } |
148 | 151 | ||
149 | fn format_macro(macro_body: &str) -> String { | 152 | fn format_macro(macro_body: &str) -> String { |
diff --git a/crates/mbe/src/mbe_expander/matcher.rs b/crates/mbe/src/mbe_expander/matcher.rs index 39a8eefbd..3f8445897 100644 --- a/crates/mbe/src/mbe_expander/matcher.rs +++ b/crates/mbe/src/mbe_expander/matcher.rs | |||
@@ -158,7 +158,8 @@ fn match_subtree( | |||
158 | continue; | 158 | continue; |
159 | } | 159 | } |
160 | }; | 160 | }; |
161 | let ExpandResult(matched, match_err) = match_meta_var(kind.as_str(), src); | 161 | let ExpandResult { value: matched, err: match_err } = |
162 | match_meta_var(kind.as_str(), src); | ||
162 | match matched { | 163 | match matched { |
163 | Some(fragment) => { | 164 | Some(fragment) => { |
164 | res.bindings.inner.insert(name.clone(), Binding::Fragment(fragment)); | 165 | res.bindings.inner.insert(name.clone(), Binding::Fragment(fragment)); |
@@ -342,17 +343,17 @@ impl<'a> TtIter<'a> { | |||
342 | token_trees: res.into_iter().cloned().collect(), | 343 | token_trees: res.into_iter().cloned().collect(), |
343 | })), | 344 | })), |
344 | }; | 345 | }; |
345 | ExpandResult(res, err) | 346 | ExpandResult { value: res, err } |
346 | } | 347 | } |
347 | 348 | ||
348 | pub(crate) fn eat_vis(&mut self) -> Option<tt::TokenTree> { | 349 | pub(crate) fn eat_vis(&mut self) -> Option<tt::TokenTree> { |
349 | let mut fork = self.clone(); | 350 | let mut fork = self.clone(); |
350 | match fork.expect_fragment(Visibility) { | 351 | match fork.expect_fragment(Visibility) { |
351 | ExpandResult(tt, None) => { | 352 | ExpandResult { value: tt, err: None } => { |
352 | *self = fork; | 353 | *self = fork; |
353 | tt | 354 | tt |
354 | } | 355 | } |
355 | ExpandResult(_, Some(_)) => None, | 356 | ExpandResult { value: _, err: Some(_) } => None, |
356 | } | 357 | } |
357 | } | 358 | } |
358 | } | 359 | } |
diff --git a/crates/mbe/src/mbe_expander/transcriber.rs b/crates/mbe/src/mbe_expander/transcriber.rs index c9525c5bf..616119ba9 100644 --- a/crates/mbe/src/mbe_expander/transcriber.rs +++ b/crates/mbe/src/mbe_expander/transcriber.rs | |||
@@ -93,17 +93,18 @@ fn expand_subtree( | |||
93 | match op { | 93 | match op { |
94 | Op::TokenTree(tt @ tt::TokenTree::Leaf(..)) => arena.push(tt.clone()), | 94 | Op::TokenTree(tt @ tt::TokenTree::Leaf(..)) => arena.push(tt.clone()), |
95 | Op::TokenTree(tt::TokenTree::Subtree(tt)) => { | 95 | Op::TokenTree(tt::TokenTree::Subtree(tt)) => { |
96 | let ExpandResult(tt, e) = expand_subtree(ctx, tt, arena); | 96 | let ExpandResult { value: tt, err: e } = expand_subtree(ctx, tt, arena); |
97 | err = err.or(e); | 97 | err = err.or(e); |
98 | arena.push(tt.into()); | 98 | arena.push(tt.into()); |
99 | } | 99 | } |
100 | Op::Var { name, kind: _ } => { | 100 | Op::Var { name, kind: _ } => { |
101 | let ExpandResult(fragment, e) = expand_var(ctx, name); | 101 | let ExpandResult { value: fragment, err: e } = expand_var(ctx, name); |
102 | err = err.or(e); | 102 | err = err.or(e); |
103 | push_fragment(arena, fragment); | 103 | push_fragment(arena, fragment); |
104 | } | 104 | } |
105 | Op::Repeat { subtree, kind, separator } => { | 105 | Op::Repeat { subtree, kind, separator } => { |
106 | let ExpandResult(fragment, e) = expand_repeat(ctx, subtree, kind, separator, arena); | 106 | let ExpandResult { value: fragment, err: e } = |
107 | expand_repeat(ctx, subtree, kind, separator, arena); | ||
107 | err = err.or(e); | 108 | err = err.or(e); |
108 | push_fragment(arena, fragment) | 109 | push_fragment(arena, fragment) |
109 | } | 110 | } |
@@ -111,7 +112,7 @@ fn expand_subtree( | |||
111 | } | 112 | } |
112 | // drain the elements added in this instance of expand_subtree | 113 | // drain the elements added in this instance of expand_subtree |
113 | let tts = arena.drain(start_elements..arena.len()).collect(); | 114 | let tts = arena.drain(start_elements..arena.len()).collect(); |
114 | ExpandResult(tt::Subtree { delimiter: template.delimiter, token_trees: tts }, err) | 115 | ExpandResult { value: tt::Subtree { delimiter: template.delimiter, token_trees: tts }, err } |
115 | } | 116 | } |
116 | 117 | ||
117 | fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr) -> ExpandResult<Fragment> { | 118 | fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr) -> ExpandResult<Fragment> { |
@@ -152,7 +153,7 @@ fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr) -> ExpandResult<Fragment> { | |||
152 | ExpandResult::ok(Fragment::Tokens(tt)) | 153 | ExpandResult::ok(Fragment::Tokens(tt)) |
153 | } else { | 154 | } else { |
154 | ctx.bindings.get(&v, &mut ctx.nesting).map_or_else( | 155 | ctx.bindings.get(&v, &mut ctx.nesting).map_or_else( |
155 | |e| ExpandResult(Fragment::Tokens(tt::TokenTree::empty()), Some(e)), | 156 | |e| ExpandResult { value: Fragment::Tokens(tt::TokenTree::empty()), err: Some(e) }, |
156 | |b| ExpandResult::ok(b.clone()), | 157 | |b| ExpandResult::ok(b.clone()), |
157 | ) | 158 | ) |
158 | } | 159 | } |
@@ -174,7 +175,7 @@ fn expand_repeat( | |||
174 | let mut counter = 0; | 175 | let mut counter = 0; |
175 | 176 | ||
176 | loop { | 177 | loop { |
177 | let ExpandResult(mut t, e) = expand_subtree(ctx, template, arena); | 178 | let ExpandResult { value: mut t, err: e } = expand_subtree(ctx, template, arena); |
178 | let nesting_state = ctx.nesting.last_mut().unwrap(); | 179 | let nesting_state = ctx.nesting.last_mut().unwrap(); |
179 | if nesting_state.at_end || !nesting_state.hit { | 180 | if nesting_state.at_end || !nesting_state.hit { |
180 | break; | 181 | break; |
@@ -234,7 +235,10 @@ fn expand_repeat( | |||
234 | let tt = tt::Subtree { delimiter: None, token_trees: buf }.into(); | 235 | let tt = tt::Subtree { delimiter: None, token_trees: buf }.into(); |
235 | 236 | ||
236 | if RepeatKind::OneOrMore == kind && counter == 0 { | 237 | if RepeatKind::OneOrMore == kind && counter == 0 { |
237 | return ExpandResult(Fragment::Tokens(tt), Some(ExpandError::UnexpectedToken)); | 238 | return ExpandResult { |
239 | value: Fragment::Tokens(tt), | ||
240 | err: Some(ExpandError::UnexpectedToken), | ||
241 | }; | ||
238 | } | 242 | } |
239 | ExpandResult::ok(Fragment::Tokens(tt)) | 243 | ExpandResult::ok(Fragment::Tokens(tt)) |
240 | } | 244 | } |
diff --git a/crates/proc_macro_srv/Cargo.toml b/crates/proc_macro_srv/Cargo.toml index 048b32186..729372968 100644 --- a/crates/proc_macro_srv/Cargo.toml +++ b/crates/proc_macro_srv/Cargo.toml | |||
@@ -20,7 +20,7 @@ proc_macro_api = { path = "../proc_macro_api", version = "0.0.0" } | |||
20 | test_utils = { path = "../test_utils", version = "0.0.0" } | 20 | test_utils = { path = "../test_utils", version = "0.0.0" } |
21 | 21 | ||
22 | [dev-dependencies] | 22 | [dev-dependencies] |
23 | cargo_metadata = "0.12.0" | 23 | cargo_metadata = "=0.12.0" |
24 | difference = "2.0.0" | 24 | difference = "2.0.0" |
25 | 25 | ||
26 | # used as proc macro test targets | 26 | # used as proc macro test targets |
diff --git a/crates/project_model/Cargo.toml b/crates/project_model/Cargo.toml index 2d53bcbcc..e0c591603 100644 --- a/crates/project_model/Cargo.toml +++ b/crates/project_model/Cargo.toml | |||
@@ -12,7 +12,7 @@ doctest = false | |||
12 | [dependencies] | 12 | [dependencies] |
13 | log = "0.4.8" | 13 | log = "0.4.8" |
14 | rustc-hash = "1.1.0" | 14 | rustc-hash = "1.1.0" |
15 | cargo_metadata = "0.12.0" | 15 | cargo_metadata = "=0.12.0" |
16 | serde = { version = "1.0.106", features = ["derive"] } | 16 | serde = { version = "1.0.106", features = ["derive"] } |
17 | serde_json = "1.0.48" | 17 | serde_json = "1.0.48" |
18 | anyhow = "1.0.26" | 18 | anyhow = "1.0.26" |
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 5fc6800cf..a334cdb11 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs | |||
@@ -184,6 +184,7 @@ impl Config { | |||
184 | }, | 184 | }, |
185 | completion: CompletionConfig { | 185 | completion: CompletionConfig { |
186 | enable_postfix_completions: true, | 186 | enable_postfix_completions: true, |
187 | enable_experimental_completions: true, | ||
187 | add_call_parenthesis: true, | 188 | add_call_parenthesis: true, |
188 | add_call_argument_snippets: true, | 189 | add_call_argument_snippets: true, |
189 | ..CompletionConfig::default() | 190 | ..CompletionConfig::default() |
@@ -306,6 +307,7 @@ impl Config { | |||
306 | }; | 307 | }; |
307 | 308 | ||
308 | self.completion.enable_postfix_completions = data.completion_postfix_enable; | 309 | self.completion.enable_postfix_completions = data.completion_postfix_enable; |
310 | self.completion.enable_experimental_completions = data.completion_enableExperimental; | ||
309 | self.completion.add_call_parenthesis = data.completion_addCallParenthesis; | 311 | self.completion.add_call_parenthesis = data.completion_addCallParenthesis; |
310 | self.completion.add_call_argument_snippets = data.completion_addCallArgumentSnippets; | 312 | self.completion.add_call_argument_snippets = data.completion_addCallArgumentSnippets; |
311 | self.completion.merge = self.assist.insert_use.merge; | 313 | self.completion.merge = self.assist.insert_use.merge; |
@@ -506,6 +508,7 @@ config_data! { | |||
506 | completion_addCallArgumentSnippets: bool = true, | 508 | completion_addCallArgumentSnippets: bool = true, |
507 | completion_addCallParenthesis: bool = true, | 509 | completion_addCallParenthesis: bool = true, |
508 | completion_postfix_enable: bool = true, | 510 | completion_postfix_enable: bool = true, |
511 | completion_enableExperimental: bool = true, | ||
509 | 512 | ||
510 | diagnostics_enable: bool = true, | 513 | diagnostics_enable: bool = true, |
511 | diagnostics_enableExperimental: bool = true, | 514 | diagnostics_enableExperimental: bool = true, |
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index b34ff092d..866d1d176 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | //! The main loop of `rust-analyzer` responsible for dispatching LSP | 1 | //! The main loop of `rust-analyzer` responsible for dispatching LSP |
2 | //! requests/replies and notifications back to the client. | 2 | //! requests/replies and notifications back to the client. |
3 | use std::{ | 3 | use std::{ |
4 | env, fmt, panic, | 4 | env, fmt, |
5 | time::{Duration, Instant}, | 5 | time::{Duration, Instant}, |
6 | }; | 6 | }; |
7 | 7 | ||
@@ -348,13 +348,7 @@ impl GlobalState { | |||
348 | } | 348 | } |
349 | 349 | ||
350 | if self.status == Status::Ready && (state_changed || prev_status == Status::Loading) { | 350 | if self.status == Status::Ready && (state_changed || prev_status == Status::Loading) { |
351 | let subscriptions = self | 351 | self.update_file_notifications_on_threadpool(); |
352 | .mem_docs | ||
353 | .keys() | ||
354 | .map(|path| self.vfs.read().0.file_id(&path).unwrap()) | ||
355 | .collect::<Vec<_>>(); | ||
356 | |||
357 | self.update_file_notifications_on_threadpool(subscriptions); | ||
358 | 352 | ||
359 | // Refresh semantic tokens if the client supports it. | 353 | // Refresh semantic tokens if the client supports it. |
360 | if self.config.semantic_tokens_refresh { | 354 | if self.config.semantic_tokens_refresh { |
@@ -498,6 +492,7 @@ impl GlobalState { | |||
498 | .write() | 492 | .write() |
499 | .0 | 493 | .0 |
500 | .set_file_contents(path, Some(params.text_document.text.into_bytes())); | 494 | .set_file_contents(path, Some(params.text_document.text.into_bytes())); |
495 | this.update_file_notifications_on_threadpool(); | ||
501 | } | 496 | } |
502 | Ok(()) | 497 | Ok(()) |
503 | })? | 498 | })? |
@@ -606,7 +601,13 @@ impl GlobalState { | |||
606 | .finish(); | 601 | .finish(); |
607 | Ok(()) | 602 | Ok(()) |
608 | } | 603 | } |
609 | fn update_file_notifications_on_threadpool(&mut self, subscriptions: Vec<FileId>) { | 604 | fn update_file_notifications_on_threadpool(&mut self) { |
605 | let subscriptions = self | ||
606 | .mem_docs | ||
607 | .keys() | ||
608 | .map(|path| self.vfs.read().0.file_id(&path).unwrap()) | ||
609 | .collect::<Vec<_>>(); | ||
610 | |||
610 | log::trace!("updating notifications for {:?}", subscriptions); | 611 | log::trace!("updating notifications for {:?}", subscriptions); |
611 | if self.config.publish_diagnostics { | 612 | if self.config.publish_diagnostics { |
612 | let snapshot = self.snapshot(); | 613 | let snapshot = self.snapshot(); |
diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index 20c3f5eab..6c1bf8d09 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs | |||
@@ -1,10 +1,7 @@ | |||
1 | //! `tt` crate defines a `TokenTree` data structure: this is the interface (both | 1 | //! `tt` crate defines a `TokenTree` data structure: this is the interface (both |
2 | //! input and output) of macros. It closely mirrors `proc_macro` crate's | 2 | //! input and output) of macros. It closely mirrors `proc_macro` crate's |
3 | //! `TokenTree`. | 3 | //! `TokenTree`. |
4 | use std::{ | 4 | use std::{fmt, panic::RefUnwindSafe}; |
5 | fmt::{self, Debug}, | ||
6 | panic::RefUnwindSafe, | ||
7 | }; | ||
8 | 5 | ||
9 | use stdx::impl_from; | 6 | use stdx::impl_from; |
10 | 7 | ||
@@ -139,7 +136,7 @@ fn print_debug_token(f: &mut fmt::Formatter<'_>, tkn: &TokenTree, level: usize) | |||
139 | Ok(()) | 136 | Ok(()) |
140 | } | 137 | } |
141 | 138 | ||
142 | impl Debug for Subtree { | 139 | impl fmt::Debug for Subtree { |
143 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | 140 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
144 | print_debug_subtree(f, self, 0) | 141 | print_debug_subtree(f, self, 0) |
145 | } | 142 | } |
@@ -240,7 +237,18 @@ pub enum ExpansionError { | |||
240 | ExpansionError(String), | 237 | ExpansionError(String), |
241 | } | 238 | } |
242 | 239 | ||
243 | pub trait TokenExpander: Debug + Send + Sync + RefUnwindSafe { | 240 | impl fmt::Display for ExpansionError { |
241 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
242 | match self { | ||
243 | ExpansionError::IOError(e) => write!(f, "I/O error: {}", e), | ||
244 | ExpansionError::JsonError(e) => write!(f, "JSON decoding error: {}", e), | ||
245 | ExpansionError::Unknown(e) => e.fmt(f), | ||
246 | ExpansionError::ExpansionError(e) => write!(f, "proc macro returned error: {}", e), | ||
247 | } | ||
248 | } | ||
249 | } | ||
250 | |||
251 | pub trait TokenExpander: fmt::Debug + Send + Sync + RefUnwindSafe { | ||
244 | fn expand(&self, subtree: &Subtree, attrs: Option<&Subtree>) | 252 | fn expand(&self, subtree: &Subtree, attrs: Option<&Subtree>) |
245 | -> Result<Subtree, ExpansionError>; | 253 | -> Result<Subtree, ExpansionError>; |
246 | } | 254 | } |
diff --git a/editors/code/package.json b/editors/code/package.json index a2d6b1148..c3f1a0d8d 100644 --- a/editors/code/package.json +++ b/editors/code/package.json | |||
@@ -460,6 +460,11 @@ | |||
460 | "default": true, | 460 | "default": true, |
461 | "markdownDescription": "Whether to show postfix snippets like `dbg`, `if`, `not`, etc." | 461 | "markdownDescription": "Whether to show postfix snippets like `dbg`, `if`, `not`, etc." |
462 | }, | 462 | }, |
463 | "rust-analyzer.completion.enableExperimental": { | ||
464 | "type": "boolean", | ||
465 | "default": true, | ||
466 | "markdownDescription": "Display additional completions with potential false positives and performance issues" | ||
467 | }, | ||
463 | "rust-analyzer.callInfo.full": { | 468 | "rust-analyzer.callInfo.full": { |
464 | "type": "boolean", | 469 | "type": "boolean", |
465 | "default": true, | 470 | "default": true, |
@@ -952,9 +957,6 @@ | |||
952 | { | 957 | { |
953 | "language": "rust", | 958 | "language": "rust", |
954 | "scopes": { | 959 | "scopes": { |
955 | "macro": [ | ||
956 | "entity.name.function.macro.rust" | ||
957 | ], | ||
958 | "attribute": [ | 960 | "attribute": [ |
959 | "meta.attribute.rust" | 961 | "meta.attribute.rust" |
960 | ], | 962 | ], |