diff options
Diffstat (limited to 'crates')
58 files changed, 1961 insertions, 1115 deletions
diff --git a/crates/ra_assists/src/ast_editor.rs b/crates/ra_assists/src/ast_editor.rs index 048478662..a710edce8 100644 --- a/crates/ra_assists/src/ast_editor.rs +++ b/crates/ra_assists/src/ast_editor.rs | |||
@@ -297,6 +297,11 @@ impl AstBuilder<ast::Path> { | |||
297 | ast_node_from_file_text(text) | 297 | ast_node_from_file_text(text) |
298 | } | 298 | } |
299 | 299 | ||
300 | pub fn from_name(name: ast::Name) -> ast::Path { | ||
301 | let name = name.syntax().to_string(); | ||
302 | Self::from_text(name.as_str()) | ||
303 | } | ||
304 | |||
300 | pub fn from_pieces(enum_name: ast::Name, var_name: ast::Name) -> ast::Path { | 305 | pub fn from_pieces(enum_name: ast::Name, var_name: ast::Name) -> ast::Path { |
301 | Self::from_text(&format!("{}::{}", enum_name.syntax(), var_name.syntax())) | 306 | Self::from_text(&format!("{}::{}", enum_name.syntax(), var_name.syntax())) |
302 | } | 307 | } |
@@ -380,6 +385,31 @@ impl AstBuilder<ast::MatchArmList> { | |||
380 | } | 385 | } |
381 | } | 386 | } |
382 | 387 | ||
388 | impl AstBuilder<ast::WherePred> { | ||
389 | fn from_text(text: &str) -> ast::WherePred { | ||
390 | ast_node_from_file_text(&format!("fn f() where {} {{ }}", text)) | ||
391 | } | ||
392 | |||
393 | pub fn from_pieces( | ||
394 | path: ast::Path, | ||
395 | bounds: impl Iterator<Item = ast::TypeBound>, | ||
396 | ) -> ast::WherePred { | ||
397 | let bounds = bounds.map(|b| b.syntax().to_string()).collect::<Vec<_>>().join(" + "); | ||
398 | Self::from_text(&format!("{}: {}", path.syntax(), bounds)) | ||
399 | } | ||
400 | } | ||
401 | |||
402 | impl AstBuilder<ast::WhereClause> { | ||
403 | fn from_text(text: &str) -> ast::WhereClause { | ||
404 | ast_node_from_file_text(&format!("fn f() where {} {{ }}", text)) | ||
405 | } | ||
406 | |||
407 | pub fn from_predicates(preds: impl Iterator<Item = ast::WherePred>) -> ast::WhereClause { | ||
408 | let preds = preds.map(|p| p.syntax().to_string()).collect::<Vec<_>>().join(", "); | ||
409 | Self::from_text(preds.as_str()) | ||
410 | } | ||
411 | } | ||
412 | |||
383 | fn ast_node_from_file_text<N: AstNode>(text: &str) -> N { | 413 | fn ast_node_from_file_text<N: AstNode>(text: &str) -> N { |
384 | let parse = SourceFile::parse(text); | 414 | let parse = SourceFile::parse(text); |
385 | let res = parse.tree().syntax().descendants().find_map(N::cast).unwrap(); | 415 | let res = parse.tree().syntax().descendants().find_map(N::cast).unwrap(); |
diff --git a/crates/ra_assists/src/lib.rs b/crates/ra_assists/src/lib.rs index 03eec73ad..10ccc345c 100644 --- a/crates/ra_assists/src/lib.rs +++ b/crates/ra_assists/src/lib.rs | |||
@@ -102,6 +102,7 @@ mod remove_dbg; | |||
102 | pub mod auto_import; | 102 | pub mod auto_import; |
103 | mod add_missing_impl_members; | 103 | mod add_missing_impl_members; |
104 | mod move_guard; | 104 | mod move_guard; |
105 | mod move_bounds; | ||
105 | 106 | ||
106 | fn all_assists<DB: HirDatabase>() -> &'static [fn(AssistCtx<DB>) -> Option<Assist>] { | 107 | fn all_assists<DB: HirDatabase>() -> &'static [fn(AssistCtx<DB>) -> Option<Assist>] { |
107 | &[ | 108 | &[ |
@@ -123,6 +124,7 @@ fn all_assists<DB: HirDatabase>() -> &'static [fn(AssistCtx<DB>) -> Option<Assis | |||
123 | inline_local_variable::inline_local_varialbe, | 124 | inline_local_variable::inline_local_varialbe, |
124 | move_guard::move_guard_to_arm_body, | 125 | move_guard::move_guard_to_arm_body, |
125 | move_guard::move_arm_cond_to_match_guard, | 126 | move_guard::move_arm_cond_to_match_guard, |
127 | move_bounds::move_bounds_to_where_clause, | ||
126 | ] | 128 | ] |
127 | } | 129 | } |
128 | 130 | ||
diff --git a/crates/ra_assists/src/move_bounds.rs b/crates/ra_assists/src/move_bounds.rs new file mode 100644 index 000000000..526de1d98 --- /dev/null +++ b/crates/ra_assists/src/move_bounds.rs | |||
@@ -0,0 +1,135 @@ | |||
1 | use hir::db::HirDatabase; | ||
2 | use ra_syntax::{ | ||
3 | ast::{self, AstNode, NameOwner, TypeBoundsOwner}, | ||
4 | SyntaxElement, | ||
5 | SyntaxKind::*, | ||
6 | TextRange, | ||
7 | }; | ||
8 | |||
9 | use crate::{ast_editor::AstBuilder, Assist, AssistCtx, AssistId}; | ||
10 | |||
11 | pub(crate) fn move_bounds_to_where_clause(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | ||
12 | let type_param_list = ctx.node_at_offset::<ast::TypeParamList>()?; | ||
13 | |||
14 | let mut type_params = type_param_list.type_params(); | ||
15 | if type_params.all(|p| p.type_bound_list().is_none()) { | ||
16 | return None; | ||
17 | } | ||
18 | |||
19 | let parent = type_param_list.syntax().parent()?; | ||
20 | if parent.children_with_tokens().find(|it| it.kind() == WHERE_CLAUSE).is_some() { | ||
21 | return None; | ||
22 | } | ||
23 | |||
24 | let anchor: SyntaxElement = match parent.kind() { | ||
25 | FN_DEF => ast::FnDef::cast(parent)?.body()?.syntax().clone().into(), | ||
26 | TRAIT_DEF => ast::TraitDef::cast(parent)?.item_list()?.syntax().clone().into(), | ||
27 | IMPL_BLOCK => ast::ImplBlock::cast(parent)?.item_list()?.syntax().clone().into(), | ||
28 | ENUM_DEF => ast::EnumDef::cast(parent)?.variant_list()?.syntax().clone().into(), | ||
29 | STRUCT_DEF => parent | ||
30 | .children_with_tokens() | ||
31 | .find(|it| it.kind() == RECORD_FIELD_DEF_LIST || it.kind() == SEMI)?, | ||
32 | _ => return None, | ||
33 | }; | ||
34 | |||
35 | ctx.add_action( | ||
36 | AssistId("move_bounds_to_where_clause"), | ||
37 | "move_bounds_to_where_clause", | ||
38 | |edit| { | ||
39 | let type_params = type_param_list.type_params().collect::<Vec<_>>(); | ||
40 | |||
41 | for param in &type_params { | ||
42 | if let Some(bounds) = param.type_bound_list() { | ||
43 | let colon = param | ||
44 | .syntax() | ||
45 | .children_with_tokens() | ||
46 | .find(|it| it.kind() == COLON) | ||
47 | .unwrap(); | ||
48 | let start = colon.text_range().start(); | ||
49 | let end = bounds.syntax().text_range().end(); | ||
50 | edit.delete(TextRange::from_to(start, end)); | ||
51 | } | ||
52 | } | ||
53 | |||
54 | let predicates = type_params.iter().filter_map(build_predicate); | ||
55 | let where_clause = AstBuilder::<ast::WhereClause>::from_predicates(predicates); | ||
56 | |||
57 | let to_insert = match anchor.prev_sibling_or_token() { | ||
58 | Some(ref elem) if elem.kind() == WHITESPACE => { | ||
59 | format!("{} ", where_clause.syntax()) | ||
60 | } | ||
61 | _ => format!(" {}", where_clause.syntax()), | ||
62 | }; | ||
63 | edit.insert(anchor.text_range().start(), to_insert); | ||
64 | edit.target(type_param_list.syntax().text_range()); | ||
65 | }, | ||
66 | ); | ||
67 | |||
68 | ctx.build() | ||
69 | } | ||
70 | |||
71 | fn build_predicate(param: &ast::TypeParam) -> Option<ast::WherePred> { | ||
72 | let path = AstBuilder::<ast::Path>::from_name(param.name()?); | ||
73 | let predicate = | ||
74 | AstBuilder::<ast::WherePred>::from_pieces(path, param.type_bound_list()?.bounds()); | ||
75 | Some(predicate) | ||
76 | } | ||
77 | |||
78 | #[cfg(test)] | ||
79 | mod tests { | ||
80 | use super::*; | ||
81 | |||
82 | use crate::helpers::check_assist; | ||
83 | |||
84 | #[test] | ||
85 | fn move_bounds_to_where_clause_fn() { | ||
86 | check_assist( | ||
87 | move_bounds_to_where_clause, | ||
88 | r#" | ||
89 | fn foo<T: u32, <|>F: FnOnce(T) -> T>() {} | ||
90 | "#, | ||
91 | r#" | ||
92 | fn foo<T, <|>F>() where T: u32, F: FnOnce(T) -> T {} | ||
93 | "#, | ||
94 | ); | ||
95 | } | ||
96 | |||
97 | #[test] | ||
98 | fn move_bounds_to_where_clause_impl() { | ||
99 | check_assist( | ||
100 | move_bounds_to_where_clause, | ||
101 | r#" | ||
102 | impl<U: u32, <|>T> A<U, T> {} | ||
103 | "#, | ||
104 | r#" | ||
105 | impl<U, <|>T> A<U, T> where U: u32 {} | ||
106 | "#, | ||
107 | ); | ||
108 | } | ||
109 | |||
110 | #[test] | ||
111 | fn move_bounds_to_where_clause_struct() { | ||
112 | check_assist( | ||
113 | move_bounds_to_where_clause, | ||
114 | r#" | ||
115 | struct A<<|>T: Iterator<Item = u32>> {} | ||
116 | "#, | ||
117 | r#" | ||
118 | struct A<<|>T> where T: Iterator<Item = u32> {} | ||
119 | "#, | ||
120 | ); | ||
121 | } | ||
122 | |||
123 | #[test] | ||
124 | fn move_bounds_to_where_clause_tuple_struct() { | ||
125 | check_assist( | ||
126 | move_bounds_to_where_clause, | ||
127 | r#" | ||
128 | struct Pair<<|>T: u32>(T, T); | ||
129 | "#, | ||
130 | r#" | ||
131 | struct Pair<<|>T>(T, T) where T: u32; | ||
132 | "#, | ||
133 | ); | ||
134 | } | ||
135 | } | ||
diff --git a/crates/ra_batch/Cargo.toml b/crates/ra_batch/Cargo.toml index 5fc2703ee..62850746f 100644 --- a/crates/ra_batch/Cargo.toml +++ b/crates/ra_batch/Cargo.toml | |||
@@ -9,7 +9,7 @@ log = "0.4.5" | |||
9 | rustc-hash = "1.0" | 9 | rustc-hash = "1.0" |
10 | crossbeam-channel = "0.3.5" | 10 | crossbeam-channel = "0.3.5" |
11 | 11 | ||
12 | ra_vfs = "0.3.0" | 12 | ra_vfs = "0.4.0" |
13 | ra_vfs_glob = { path = "../ra_vfs_glob" } | 13 | ra_vfs_glob = { path = "../ra_vfs_glob" } |
14 | ra_db = { path = "../ra_db" } | 14 | ra_db = { path = "../ra_db" } |
15 | ra_ide_api = { path = "../ra_ide_api" } | 15 | ra_ide_api = { path = "../ra_ide_api" } |
diff --git a/crates/ra_batch/src/lib.rs b/crates/ra_batch/src/lib.rs index 4e5bad044..07a7e0c86 100644 --- a/crates/ra_batch/src/lib.rs +++ b/crates/ra_batch/src/lib.rs | |||
@@ -6,7 +6,7 @@ use crossbeam_channel::{unbounded, Receiver}; | |||
6 | use ra_db::{CrateGraph, FileId, SourceRootId}; | 6 | use ra_db::{CrateGraph, FileId, SourceRootId}; |
7 | use ra_ide_api::{AnalysisChange, AnalysisHost, FeatureFlags}; | 7 | use ra_ide_api::{AnalysisChange, AnalysisHost, FeatureFlags}; |
8 | use ra_project_model::{PackageRoot, ProjectWorkspace}; | 8 | use ra_project_model::{PackageRoot, ProjectWorkspace}; |
9 | use ra_vfs::{RootEntry, Vfs, VfsChange, VfsTask}; | 9 | use ra_vfs::{RootEntry, Vfs, VfsChange, VfsTask, Watch}; |
10 | use ra_vfs_glob::RustPackageFilterBuilder; | 10 | use ra_vfs_glob::RustPackageFilterBuilder; |
11 | 11 | ||
12 | type Result<T> = std::result::Result<T, Box<dyn Error + Send + Sync>>; | 12 | type Result<T> = std::result::Result<T, Box<dyn Error + Send + Sync>>; |
@@ -37,6 +37,7 @@ pub fn load_cargo(root: &Path) -> Result<(AnalysisHost, FxHashMap<SourceRootId, | |||
37 | }) | 37 | }) |
38 | .collect(), | 38 | .collect(), |
39 | sender, | 39 | sender, |
40 | Watch(false), | ||
40 | ); | 41 | ); |
41 | let crate_graph = ws.to_crate_graph(&mut |path: &Path| { | 42 | let crate_graph = ws.to_crate_graph(&mut |path: &Path| { |
42 | let vfs_file = vfs.load(path); | 43 | let vfs_file = vfs.load(path); |
diff --git a/crates/ra_cli/src/analysis_bench.rs b/crates/ra_cli/src/analysis_bench.rs index 9e76bcebf..01b96ec58 100644 --- a/crates/ra_cli/src/analysis_bench.rs +++ b/crates/ra_cli/src/analysis_bench.rs | |||
@@ -34,10 +34,11 @@ pub(crate) fn run(verbose: bool, path: &Path, op: Op) -> Result<()> { | |||
34 | .iter() | 34 | .iter() |
35 | .find_map(|(source_root_id, project_root)| { | 35 | .find_map(|(source_root_id, project_root)| { |
36 | if project_root.is_member() { | 36 | if project_root.is_member() { |
37 | for (rel_path, file_id) in &db.source_root(*source_root_id).files { | 37 | for file_id in db.source_root(*source_root_id).walk() { |
38 | let rel_path = db.file_relative_path(file_id); | ||
38 | let abs_path = rel_path.to_path(project_root.path()); | 39 | let abs_path = rel_path.to_path(project_root.path()); |
39 | if abs_path == path { | 40 | if abs_path == path { |
40 | return Some(*file_id); | 41 | return Some(file_id); |
41 | } | 42 | } |
42 | } | 43 | } |
43 | } | 44 | } |
diff --git a/crates/ra_cli/src/analysis_stats.rs b/crates/ra_cli/src/analysis_stats.rs index d355fa2e8..1fad5b233 100644 --- a/crates/ra_cli/src/analysis_stats.rs +++ b/crates/ra_cli/src/analysis_stats.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | use std::{collections::HashSet, fmt::Write, path::Path, time::Instant}; | 1 | use std::{collections::HashSet, fmt::Write, path::Path, time::Instant}; |
2 | 2 | ||
3 | use ra_db::SourceDatabase; | 3 | use ra_db::SourceDatabase; |
4 | use ra_hir::{Crate, HasBodySource, HasSource, HirDisplay, ImplItem, ModuleDef, Ty}; | 4 | use ra_hir::{Crate, HasBodySource, HasSource, HirDisplay, ImplItem, ModuleDef, Ty, TypeWalk}; |
5 | use ra_syntax::AstNode; | 5 | use ra_syntax::AstNode; |
6 | 6 | ||
7 | use crate::Result; | 7 | use crate::Result; |
@@ -110,9 +110,12 @@ pub fn run(verbose: bool, memory_usage: bool, path: &Path, only: Option<&str>) - | |||
110 | let original_file = src.file_id.original_file(db); | 110 | let original_file = src.file_id.original_file(db); |
111 | let path = db.file_relative_path(original_file); | 111 | let path = db.file_relative_path(original_file); |
112 | let line_index = host.analysis().file_line_index(original_file).unwrap(); | 112 | let line_index = host.analysis().file_line_index(original_file).unwrap(); |
113 | let text_range = src | ||
114 | .ast | ||
115 | .either(|it| it.syntax().text_range(), |it| it.syntax().text_range()); | ||
113 | let (start, end) = ( | 116 | let (start, end) = ( |
114 | line_index.line_col(src.ast.syntax().text_range().start()), | 117 | line_index.line_col(text_range.start()), |
115 | line_index.line_col(src.ast.syntax().text_range().end()), | 118 | line_index.line_col(text_range.end()), |
116 | ); | 119 | ); |
117 | bar.println(format!( | 120 | bar.println(format!( |
118 | "{} {}:{}-{}:{}: Expected {}, got {}", | 121 | "{} {}:{}-{}:{}: Expected {}, got {}", |
diff --git a/crates/ra_db/src/input.rs b/crates/ra_db/src/input.rs index ad8e10c52..d1ee3c036 100644 --- a/crates/ra_db/src/input.rs +++ b/crates/ra_db/src/input.rs | |||
@@ -5,7 +5,7 @@ | |||
5 | /// Note that neither this module, nor any other part of the analyzer's core do | 5 | /// Note that neither this module, nor any other part of the analyzer's core do |
6 | /// actual IO. See `vfs` and `project_model` in the `ra_lsp_server` crate for how | 6 | /// actual IO. See `vfs` and `project_model` in the `ra_lsp_server` crate for how |
7 | /// actual IO is done and lowered to input. | 7 | /// actual IO is done and lowered to input. |
8 | use relative_path::RelativePathBuf; | 8 | use relative_path::{RelativePath, RelativePathBuf}; |
9 | use rustc_hash::FxHashMap; | 9 | use rustc_hash::FxHashMap; |
10 | 10 | ||
11 | use ra_syntax::SmolStr; | 11 | use ra_syntax::SmolStr; |
@@ -36,7 +36,7 @@ pub struct SourceRoot { | |||
36 | /// Libraries are considered mostly immutable, this assumption is used to | 36 | /// Libraries are considered mostly immutable, this assumption is used to |
37 | /// optimize salsa's query structure | 37 | /// optimize salsa's query structure |
38 | pub is_library: bool, | 38 | pub is_library: bool, |
39 | pub files: FxHashMap<RelativePathBuf, FileId>, | 39 | files: FxHashMap<RelativePathBuf, FileId>, |
40 | } | 40 | } |
41 | 41 | ||
42 | impl SourceRoot { | 42 | impl SourceRoot { |
@@ -46,6 +46,18 @@ impl SourceRoot { | |||
46 | pub fn new_library() -> SourceRoot { | 46 | pub fn new_library() -> SourceRoot { |
47 | SourceRoot { is_library: true, ..SourceRoot::new() } | 47 | SourceRoot { is_library: true, ..SourceRoot::new() } |
48 | } | 48 | } |
49 | pub fn file_by_relative_path(&self, path: &RelativePath) -> Option<FileId> { | ||
50 | self.files.get(path).copied() | ||
51 | } | ||
52 | pub fn insert_file(&mut self, path: RelativePathBuf, file_id: FileId) { | ||
53 | self.files.insert(path, file_id); | ||
54 | } | ||
55 | pub fn remove_file(&mut self, path: &RelativePath) { | ||
56 | self.files.remove(path); | ||
57 | } | ||
58 | pub fn walk(&self) -> impl Iterator<Item = FileId> + '_ { | ||
59 | self.files.values().copied() | ||
60 | } | ||
49 | } | 61 | } |
50 | 62 | ||
51 | /// `CrateGraph` is a bit of information which turns a set of text files into a | 63 | /// `CrateGraph` is a bit of information which turns a set of text files into a |
diff --git a/crates/ra_db/src/lib.rs b/crates/ra_db/src/lib.rs index b82d1bda0..c54791b7a 100644 --- a/crates/ra_db/src/lib.rs +++ b/crates/ra_db/src/lib.rs | |||
@@ -12,7 +12,7 @@ pub use crate::{ | |||
12 | cancellation::Canceled, | 12 | cancellation::Canceled, |
13 | input::{CrateGraph, CrateId, Dependency, Edition, FileId, SourceRoot, SourceRootId}, | 13 | input::{CrateGraph, CrateId, Dependency, Edition, FileId, SourceRoot, SourceRootId}, |
14 | }; | 14 | }; |
15 | pub use ::salsa; | 15 | pub use salsa; |
16 | 16 | ||
17 | pub trait CheckCanceled { | 17 | pub trait CheckCanceled { |
18 | /// Aborts current query if there are pending changes. | 18 | /// Aborts current query if there are pending changes. |
@@ -93,8 +93,7 @@ pub trait SourceDatabase: CheckCanceled + std::fmt::Debug { | |||
93 | fn source_root_crates(db: &impl SourceDatabase, id: SourceRootId) -> Arc<Vec<CrateId>> { | 93 | fn source_root_crates(db: &impl SourceDatabase, id: SourceRootId) -> Arc<Vec<CrateId>> { |
94 | let root = db.source_root(id); | 94 | let root = db.source_root(id); |
95 | let graph = db.crate_graph(); | 95 | let graph = db.crate_graph(); |
96 | let res = | 96 | let res = root.walk().filter_map(|it| graph.crate_id_for_crate_root(it)).collect::<Vec<_>>(); |
97 | root.files.values().filter_map(|&it| graph.crate_id_for_crate_root(it)).collect::<Vec<_>>(); | ||
98 | Arc::new(res) | 97 | Arc::new(res) |
99 | } | 98 | } |
100 | 99 | ||
diff --git a/crates/ra_hir/src/code_model/src.rs b/crates/ra_hir/src/code_model/src.rs index 7c9454c0b..b9ffb0c7a 100644 --- a/crates/ra_hir/src/code_model/src.rs +++ b/crates/ra_hir/src/code_model/src.rs | |||
@@ -1,11 +1,15 @@ | |||
1 | use ra_syntax::ast::{self, AstNode}; | 1 | use ra_syntax::{ |
2 | ast::{self, AstNode}, | ||
3 | SyntaxNode, | ||
4 | }; | ||
2 | 5 | ||
3 | use crate::{ | 6 | use crate::{ |
4 | ids::AstItemDef, AstDatabase, Const, DefDatabase, Enum, EnumVariant, FieldSource, Function, | 7 | ids::AstItemDef, AstDatabase, Const, DefDatabase, Either, Enum, EnumVariant, FieldSource, |
5 | HasBody, HirDatabase, HirFileId, MacroDef, Module, ModuleSource, Static, Struct, StructField, | 8 | Function, HasBody, HirDatabase, HirFileId, MacroDef, Module, ModuleSource, Static, Struct, |
6 | Trait, TypeAlias, Union, | 9 | StructField, Trait, TypeAlias, Union, |
7 | }; | 10 | }; |
8 | 11 | ||
12 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] | ||
9 | pub struct Source<T> { | 13 | pub struct Source<T> { |
10 | pub file_id: HirFileId, | 14 | pub file_id: HirFileId, |
11 | pub ast: T, | 15 | pub ast: T, |
@@ -16,6 +20,15 @@ pub trait HasSource { | |||
16 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<Self::Ast>; | 20 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<Self::Ast>; |
17 | } | 21 | } |
18 | 22 | ||
23 | impl<T> Source<T> { | ||
24 | pub(crate) fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> { | ||
25 | Source { file_id: self.file_id, ast: f(self.ast) } | ||
26 | } | ||
27 | pub(crate) fn file_syntax(&self, db: &impl AstDatabase) -> SyntaxNode { | ||
28 | db.parse_or_expand(self.file_id).expect("source created from invalid file") | ||
29 | } | ||
30 | } | ||
31 | |||
19 | /// NB: Module is !HasSource, because it has two source nodes at the same time: | 32 | /// NB: Module is !HasSource, because it has two source nodes at the same time: |
20 | /// definition and declaration. | 33 | /// definition and declaration. |
21 | impl Module { | 34 | impl Module { |
@@ -117,12 +130,12 @@ where | |||
117 | self, | 130 | self, |
118 | db: &impl HirDatabase, | 131 | db: &impl HirDatabase, |
119 | expr_id: crate::expr::ExprId, | 132 | expr_id: crate::expr::ExprId, |
120 | ) -> Option<Source<ast::Expr>> { | 133 | ) -> Option<Source<Either<ast::Expr, ast::RecordField>>> { |
121 | let source_map = self.body_source_map(db); | 134 | let source_map = self.body_source_map(db); |
122 | let expr_syntax = source_map.expr_syntax(expr_id)?.a()?; | 135 | let source_ptr = source_map.expr_syntax(expr_id)?; |
123 | let source = self.source(db); | 136 | let root = source_ptr.file_syntax(db); |
124 | let ast = expr_syntax.to_node(&source.ast.syntax()); | 137 | let source = source_ptr.map(|ast| ast.map(|it| it.to_node(&root), |it| it.to_node(&root))); |
125 | Some(Source { file_id: source.file_id, ast }) | 138 | Some(source) |
126 | } | 139 | } |
127 | } | 140 | } |
128 | 141 | ||
diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs index f669ab969..7b7974f5b 100644 --- a/crates/ra_hir/src/db.rs +++ b/crates/ra_hir/src/db.rs | |||
@@ -169,13 +169,13 @@ pub trait HirDatabase: DefDatabase + AstDatabase { | |||
169 | #[salsa::invoke(crate::ty::generic_defaults_query)] | 169 | #[salsa::invoke(crate::ty::generic_defaults_query)] |
170 | fn generic_defaults(&self, def: GenericDef) -> Substs; | 170 | fn generic_defaults(&self, def: GenericDef) -> Substs; |
171 | 171 | ||
172 | #[salsa::invoke(crate::expr::lower::body_with_source_map_query)] | 172 | #[salsa::invoke(crate::expr::body_with_source_map_query)] |
173 | fn body_with_source_map( | 173 | fn body_with_source_map( |
174 | &self, | 174 | &self, |
175 | def: DefWithBody, | 175 | def: DefWithBody, |
176 | ) -> (Arc<crate::expr::Body>, Arc<crate::expr::BodySourceMap>); | 176 | ) -> (Arc<crate::expr::Body>, Arc<crate::expr::BodySourceMap>); |
177 | 177 | ||
178 | #[salsa::invoke(crate::expr::lower::body_hir_query)] | 178 | #[salsa::invoke(crate::expr::body_hir_query)] |
179 | fn body_hir(&self, def: DefWithBody) -> Arc<crate::expr::Body>; | 179 | fn body_hir(&self, def: DefWithBody) -> Arc<crate::expr::Body>; |
180 | 180 | ||
181 | #[salsa::invoke(crate::ty::method_resolution::CrateImplBlocks::impls_in_crate_query)] | 181 | #[salsa::invoke(crate::ty::method_resolution::CrateImplBlocks::impls_in_crate_query)] |
diff --git a/crates/ra_hir/src/expr.rs b/crates/ra_hir/src/expr.rs index bfd250f38..fc21e269f 100644 --- a/crates/ra_hir/src/expr.rs +++ b/crates/ra_hir/src/expr.rs | |||
@@ -12,7 +12,7 @@ use crate::{ | |||
12 | path::GenericArgs, | 12 | path::GenericArgs, |
13 | ty::primitive::{UncertainFloatTy, UncertainIntTy}, | 13 | ty::primitive::{UncertainFloatTy, UncertainIntTy}, |
14 | type_ref::{Mutability, TypeRef}, | 14 | type_ref::{Mutability, TypeRef}, |
15 | DefWithBody, Either, HirDatabase, Name, Path, Resolver, | 15 | DefWithBody, Either, HasSource, HirDatabase, Name, Path, Resolver, Source, |
16 | }; | 16 | }; |
17 | 17 | ||
18 | pub use self::scope::ExprScopes; | 18 | pub use self::scope::ExprScopes; |
@@ -43,23 +43,32 @@ pub struct Body { | |||
43 | body_expr: ExprId, | 43 | body_expr: ExprId, |
44 | } | 44 | } |
45 | 45 | ||
46 | type ExprPtr = Either<AstPtr<ast::Expr>, AstPtr<ast::RecordField>>; | ||
47 | type ExprSource = Source<ExprPtr>; | ||
48 | |||
49 | type PatPtr = Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>; | ||
50 | type PatSource = Source<PatPtr>; | ||
51 | |||
46 | /// An item body together with the mapping from syntax nodes to HIR expression | 52 | /// An item body together with the mapping from syntax nodes to HIR expression |
47 | /// IDs. This is needed to go from e.g. a position in a file to the HIR | 53 | /// IDs. This is needed to go from e.g. a position in a file to the HIR |
48 | /// expression containing it; but for type inference etc., we want to operate on | 54 | /// expression containing it; but for type inference etc., we want to operate on |
49 | /// a structure that is agnostic to the actual positions of expressions in the | 55 | /// a structure that is agnostic to the actual positions of expressions in the |
50 | /// file, so that we don't recompute types whenever some whitespace is typed. | 56 | /// file, so that we don't recompute types whenever some whitespace is typed. |
57 | /// | ||
58 | /// One complication here is that, due to macro expansion, a single `Body` might | ||
59 | /// be spread across several files. So, for each ExprId and PatId, we record | ||
60 | /// both the HirFileId and the position inside the file. However, we only store | ||
61 | /// AST -> ExprId mapping for non-macro files, as it is not clear how to handle | ||
62 | /// this properly for macros. | ||
51 | #[derive(Default, Debug, Eq, PartialEq)] | 63 | #[derive(Default, Debug, Eq, PartialEq)] |
52 | pub struct BodySourceMap { | 64 | pub struct BodySourceMap { |
53 | expr_map: FxHashMap<ExprPtr, ExprId>, | 65 | expr_map: FxHashMap<ExprPtr, ExprId>, |
54 | expr_map_back: ArenaMap<ExprId, ExprPtr>, | 66 | expr_map_back: ArenaMap<ExprId, ExprSource>, |
55 | pat_map: FxHashMap<PatPtr, PatId>, | 67 | pat_map: FxHashMap<PatPtr, PatId>, |
56 | pat_map_back: ArenaMap<PatId, PatPtr>, | 68 | pat_map_back: ArenaMap<PatId, PatSource>, |
57 | field_map: FxHashMap<(ExprId, usize), AstPtr<ast::RecordField>>, | 69 | field_map: FxHashMap<(ExprId, usize), AstPtr<ast::RecordField>>, |
58 | } | 70 | } |
59 | 71 | ||
60 | type ExprPtr = Either<AstPtr<ast::Expr>, AstPtr<ast::RecordField>>; | ||
61 | type PatPtr = Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>; | ||
62 | |||
63 | impl Body { | 72 | impl Body { |
64 | pub fn params(&self) -> &[PatId] { | 73 | pub fn params(&self) -> &[PatId] { |
65 | &self.params | 74 | &self.params |
@@ -123,16 +132,16 @@ impl Index<PatId> for Body { | |||
123 | } | 132 | } |
124 | 133 | ||
125 | impl BodySourceMap { | 134 | impl BodySourceMap { |
126 | pub(crate) fn expr_syntax(&self, expr: ExprId) -> Option<ExprPtr> { | 135 | pub(crate) fn expr_syntax(&self, expr: ExprId) -> Option<ExprSource> { |
127 | self.expr_map_back.get(expr).cloned() | 136 | self.expr_map_back.get(expr).copied() |
128 | } | 137 | } |
129 | 138 | ||
130 | pub(crate) fn node_expr(&self, node: &ast::Expr) -> Option<ExprId> { | 139 | pub(crate) fn node_expr(&self, node: &ast::Expr) -> Option<ExprId> { |
131 | self.expr_map.get(&Either::A(AstPtr::new(node))).cloned() | 140 | self.expr_map.get(&Either::A(AstPtr::new(node))).cloned() |
132 | } | 141 | } |
133 | 142 | ||
134 | pub(crate) fn pat_syntax(&self, pat: PatId) -> Option<PatPtr> { | 143 | pub(crate) fn pat_syntax(&self, pat: PatId) -> Option<PatSource> { |
135 | self.pat_map_back.get(pat).cloned() | 144 | self.pat_map_back.get(pat).copied() |
136 | } | 145 | } |
137 | 146 | ||
138 | pub(crate) fn node_pat(&self, node: &ast::Pat) -> Option<PatId> { | 147 | pub(crate) fn node_pat(&self, node: &ast::Pat) -> Option<PatId> { |
@@ -524,3 +533,34 @@ impl Pat { | |||
524 | } | 533 | } |
525 | } | 534 | } |
526 | } | 535 | } |
536 | |||
537 | // Queries | ||
538 | pub(crate) fn body_with_source_map_query( | ||
539 | db: &impl HirDatabase, | ||
540 | def: DefWithBody, | ||
541 | ) -> (Arc<Body>, Arc<BodySourceMap>) { | ||
542 | let mut params = None; | ||
543 | |||
544 | let (file_id, body) = match def { | ||
545 | DefWithBody::Function(f) => { | ||
546 | let src = f.source(db); | ||
547 | params = src.ast.param_list(); | ||
548 | (src.file_id, src.ast.body().map(ast::Expr::from)) | ||
549 | } | ||
550 | DefWithBody::Const(c) => { | ||
551 | let src = c.source(db); | ||
552 | (src.file_id, src.ast.body()) | ||
553 | } | ||
554 | DefWithBody::Static(s) => { | ||
555 | let src = s.source(db); | ||
556 | (src.file_id, src.ast.body()) | ||
557 | } | ||
558 | }; | ||
559 | |||
560 | let (body, source_map) = lower::lower(db, def.resolver(db), file_id, def, params, body); | ||
561 | (Arc::new(body), Arc::new(source_map)) | ||
562 | } | ||
563 | |||
564 | pub(crate) fn body_hir_query(db: &impl HirDatabase, def: DefWithBody) -> Arc<Body> { | ||
565 | db.body_with_source_map(def).0 | ||
566 | } | ||
diff --git a/crates/ra_hir/src/expr/lower.rs b/crates/ra_hir/src/expr/lower.rs index f6a75a379..6afd80989 100644 --- a/crates/ra_hir/src/expr/lower.rs +++ b/crates/ra_hir/src/expr/lower.rs | |||
@@ -1,5 +1,3 @@ | |||
1 | use std::sync::Arc; | ||
2 | |||
3 | use ra_arena::Arena; | 1 | use ra_arena::Arena; |
4 | use ra_syntax::{ | 2 | use ra_syntax::{ |
5 | ast::{ | 3 | ast::{ |
@@ -15,8 +13,8 @@ use crate::{ | |||
15 | path::GenericArgs, | 13 | path::GenericArgs, |
16 | ty::primitive::{FloatTy, IntTy, UncertainFloatTy, UncertainIntTy}, | 14 | ty::primitive::{FloatTy, IntTy, UncertainFloatTy, UncertainIntTy}, |
17 | type_ref::TypeRef, | 15 | type_ref::TypeRef, |
18 | DefWithBody, Either, HasSource, HirDatabase, HirFileId, MacroCallLoc, MacroFileKind, | 16 | DefWithBody, Either, HirDatabase, HirFileId, MacroCallLoc, MacroFileKind, Mutability, Path, |
19 | Mutability, Path, Resolver, | 17 | Resolver, Source, |
20 | }; | 18 | }; |
21 | 19 | ||
22 | use super::{ | 20 | use super::{ |
@@ -24,14 +22,33 @@ use super::{ | |||
24 | LogicOp, MatchArm, Ordering, Pat, PatId, PatPtr, RecordFieldPat, RecordLitField, Statement, | 22 | LogicOp, MatchArm, Ordering, Pat, PatId, PatPtr, RecordFieldPat, RecordLitField, Statement, |
25 | }; | 23 | }; |
26 | 24 | ||
27 | pub(crate) struct ExprCollector<DB> { | 25 | pub(super) fn lower( |
28 | db: DB, | 26 | db: &impl HirDatabase, |
27 | resolver: Resolver, | ||
28 | file_id: HirFileId, | ||
29 | owner: DefWithBody, | 29 | owner: DefWithBody, |
30 | exprs: Arena<ExprId, Expr>, | 30 | params: Option<ast::ParamList>, |
31 | pats: Arena<PatId, Pat>, | 31 | body: Option<ast::Expr>, |
32 | source_map: BodySourceMap, | 32 | ) -> (Body, BodySourceMap) { |
33 | params: Vec<PatId>, | 33 | ExprCollector { |
34 | body_expr: Option<ExprId>, | 34 | resolver, |
35 | db, | ||
36 | original_file_id: file_id, | ||
37 | current_file_id: file_id, | ||
38 | source_map: BodySourceMap::default(), | ||
39 | body: Body { | ||
40 | owner, | ||
41 | exprs: Arena::default(), | ||
42 | pats: Arena::default(), | ||
43 | params: Vec::new(), | ||
44 | body_expr: ExprId((!0).into()), | ||
45 | }, | ||
46 | } | ||
47 | .collect(params, body) | ||
48 | } | ||
49 | |||
50 | struct ExprCollector<DB> { | ||
51 | db: DB, | ||
35 | resolver: Resolver, | 52 | resolver: Resolver, |
36 | // Expr collector expands macros along the way. original points to the file | 53 | // Expr collector expands macros along the way. original points to the file |
37 | // we started with, current points to the current macro expansion. source | 54 | // we started with, current points to the current macro expansion. source |
@@ -39,50 +56,95 @@ pub(crate) struct ExprCollector<DB> { | |||
39 | // current == original (see #1196) | 56 | // current == original (see #1196) |
40 | original_file_id: HirFileId, | 57 | original_file_id: HirFileId, |
41 | current_file_id: HirFileId, | 58 | current_file_id: HirFileId, |
59 | |||
60 | body: Body, | ||
61 | source_map: BodySourceMap, | ||
42 | } | 62 | } |
43 | 63 | ||
44 | impl<'a, DB> ExprCollector<&'a DB> | 64 | impl<'a, DB> ExprCollector<&'a DB> |
45 | where | 65 | where |
46 | DB: HirDatabase, | 66 | DB: HirDatabase, |
47 | { | 67 | { |
48 | fn new(owner: DefWithBody, file_id: HirFileId, resolver: Resolver, db: &'a DB) -> Self { | 68 | fn collect( |
49 | ExprCollector { | 69 | mut self, |
50 | owner, | 70 | param_list: Option<ast::ParamList>, |
51 | resolver, | 71 | body: Option<ast::Expr>, |
52 | db, | 72 | ) -> (Body, BodySourceMap) { |
53 | exprs: Arena::default(), | 73 | if let Some(param_list) = param_list { |
54 | pats: Arena::default(), | 74 | if let Some(self_param) = param_list.self_param() { |
55 | source_map: BodySourceMap::default(), | 75 | let ptr = AstPtr::new(&self_param); |
56 | params: Vec::new(), | 76 | let param_pat = self.alloc_pat( |
57 | body_expr: None, | 77 | Pat::Bind { |
58 | original_file_id: file_id, | 78 | name: SELF_PARAM, |
59 | current_file_id: file_id, | 79 | mode: BindingAnnotation::Unannotated, |
60 | } | 80 | subpat: None, |
81 | }, | ||
82 | Either::B(ptr), | ||
83 | ); | ||
84 | self.body.params.push(param_pat); | ||
85 | } | ||
86 | |||
87 | for param in param_list.params() { | ||
88 | let pat = match param.pat() { | ||
89 | None => continue, | ||
90 | Some(pat) => pat, | ||
91 | }; | ||
92 | let param_pat = self.collect_pat(pat); | ||
93 | self.body.params.push(param_pat); | ||
94 | } | ||
95 | }; | ||
96 | |||
97 | self.body.body_expr = self.collect_expr_opt(body); | ||
98 | (self.body, self.source_map) | ||
61 | } | 99 | } |
100 | |||
62 | fn alloc_expr(&mut self, expr: Expr, ptr: AstPtr<ast::Expr>) -> ExprId { | 101 | fn alloc_expr(&mut self, expr: Expr, ptr: AstPtr<ast::Expr>) -> ExprId { |
63 | let ptr = Either::A(ptr); | 102 | let ptr = Either::A(ptr); |
64 | let id = self.exprs.alloc(expr); | 103 | let id = self.body.exprs.alloc(expr); |
65 | if self.current_file_id == self.original_file_id { | 104 | if self.current_file_id == self.original_file_id { |
66 | self.source_map.expr_map.insert(ptr, id); | 105 | self.source_map.expr_map.insert(ptr, id); |
67 | self.source_map.expr_map_back.insert(id, ptr); | ||
68 | } | 106 | } |
107 | self.source_map | ||
108 | .expr_map_back | ||
109 | .insert(id, Source { file_id: self.current_file_id, ast: ptr }); | ||
110 | id | ||
111 | } | ||
112 | // desugared exprs don't have ptr, that's wrong and should be fixed | ||
113 | // somehow. | ||
114 | fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId { | ||
115 | self.body.exprs.alloc(expr) | ||
116 | } | ||
117 | fn alloc_expr_field_shorthand(&mut self, expr: Expr, ptr: AstPtr<ast::RecordField>) -> ExprId { | ||
118 | let ptr = Either::B(ptr); | ||
119 | let id = self.body.exprs.alloc(expr); | ||
120 | if self.current_file_id == self.original_file_id { | ||
121 | self.source_map.expr_map.insert(ptr, id); | ||
122 | } | ||
123 | self.source_map | ||
124 | .expr_map_back | ||
125 | .insert(id, Source { file_id: self.current_file_id, ast: ptr }); | ||
69 | id | 126 | id |
70 | } | 127 | } |
71 | |||
72 | fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId { | 128 | fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId { |
73 | let id = self.pats.alloc(pat); | 129 | let id = self.body.pats.alloc(pat); |
74 | |||
75 | if self.current_file_id == self.original_file_id { | 130 | if self.current_file_id == self.original_file_id { |
76 | self.source_map.pat_map.insert(ptr, id); | 131 | self.source_map.pat_map.insert(ptr, id); |
77 | self.source_map.pat_map_back.insert(id, ptr); | ||
78 | } | 132 | } |
79 | 133 | self.source_map.pat_map_back.insert(id, Source { file_id: self.current_file_id, ast: ptr }); | |
80 | id | 134 | id |
81 | } | 135 | } |
82 | 136 | ||
83 | fn empty_block(&mut self) -> ExprId { | 137 | fn empty_block(&mut self) -> ExprId { |
84 | let block = Expr::Block { statements: Vec::new(), tail: None }; | 138 | let block = Expr::Block { statements: Vec::new(), tail: None }; |
85 | self.exprs.alloc(block) | 139 | self.body.exprs.alloc(block) |
140 | } | ||
141 | |||
142 | fn missing_expr(&mut self) -> ExprId { | ||
143 | self.body.exprs.alloc(Expr::Missing) | ||
144 | } | ||
145 | |||
146 | fn missing_pat(&mut self) -> PatId { | ||
147 | self.body.pats.alloc(Pat::Missing) | ||
86 | } | 148 | } |
87 | 149 | ||
88 | fn collect_expr(&mut self, expr: ast::Expr) -> ExprId { | 150 | fn collect_expr(&mut self, expr: ast::Expr) -> ExprId { |
@@ -100,14 +162,14 @@ where | |||
100 | }); | 162 | }); |
101 | 163 | ||
102 | let condition = match e.condition() { | 164 | let condition = match e.condition() { |
103 | None => self.exprs.alloc(Expr::Missing), | 165 | None => self.missing_expr(), |
104 | Some(condition) => match condition.pat() { | 166 | Some(condition) => match condition.pat() { |
105 | None => self.collect_expr_opt(condition.expr()), | 167 | None => self.collect_expr_opt(condition.expr()), |
106 | // if let -- desugar to match | 168 | // if let -- desugar to match |
107 | Some(pat) => { | 169 | Some(pat) => { |
108 | let pat = self.collect_pat(pat); | 170 | let pat = self.collect_pat(pat); |
109 | let match_expr = self.collect_expr_opt(condition.expr()); | 171 | let match_expr = self.collect_expr_opt(condition.expr()); |
110 | let placeholder_pat = self.pats.alloc(Pat::Missing); | 172 | let placeholder_pat = self.missing_pat(); |
111 | let arms = vec![ | 173 | let arms = vec![ |
112 | MatchArm { pats: vec![pat], expr: then_branch, guard: None }, | 174 | MatchArm { pats: vec![pat], expr: then_branch, guard: None }, |
113 | MatchArm { | 175 | MatchArm { |
@@ -137,7 +199,7 @@ where | |||
137 | let body = self.collect_block_opt(e.loop_body()); | 199 | let body = self.collect_block_opt(e.loop_body()); |
138 | 200 | ||
139 | let condition = match e.condition() { | 201 | let condition = match e.condition() { |
140 | None => self.exprs.alloc(Expr::Missing), | 202 | None => self.missing_expr(), |
141 | Some(condition) => match condition.pat() { | 203 | Some(condition) => match condition.pat() { |
142 | None => self.collect_expr_opt(condition.expr()), | 204 | None => self.collect_expr_opt(condition.expr()), |
143 | // if let -- desugar to match | 205 | // if let -- desugar to match |
@@ -145,14 +207,14 @@ where | |||
145 | tested_by!(infer_while_let); | 207 | tested_by!(infer_while_let); |
146 | let pat = self.collect_pat(pat); | 208 | let pat = self.collect_pat(pat); |
147 | let match_expr = self.collect_expr_opt(condition.expr()); | 209 | let match_expr = self.collect_expr_opt(condition.expr()); |
148 | let placeholder_pat = self.pats.alloc(Pat::Missing); | 210 | let placeholder_pat = self.missing_pat(); |
149 | let break_ = self.exprs.alloc(Expr::Break { expr: None }); | 211 | let break_ = self.alloc_expr_desugared(Expr::Break { expr: None }); |
150 | let arms = vec![ | 212 | let arms = vec![ |
151 | MatchArm { pats: vec![pat], expr: body, guard: None }, | 213 | MatchArm { pats: vec![pat], expr: body, guard: None }, |
152 | MatchArm { pats: vec![placeholder_pat], expr: break_, guard: None }, | 214 | MatchArm { pats: vec![placeholder_pat], expr: break_, guard: None }, |
153 | ]; | 215 | ]; |
154 | let match_expr = | 216 | let match_expr = |
155 | self.exprs.alloc(Expr::Match { expr: match_expr, arms }); | 217 | self.alloc_expr_desugared(Expr::Match { expr: match_expr, arms }); |
156 | return self.alloc_expr(Expr::Loop { body: match_expr }, syntax_ptr); | 218 | return self.alloc_expr(Expr::Loop { body: match_expr }, syntax_ptr); |
157 | } | 219 | } |
158 | }, | 220 | }, |
@@ -247,13 +309,12 @@ where | |||
247 | self.collect_expr(e) | 309 | self.collect_expr(e) |
248 | } else if let Some(nr) = field.name_ref() { | 310 | } else if let Some(nr) = field.name_ref() { |
249 | // field shorthand | 311 | // field shorthand |
250 | let id = self.exprs.alloc(Expr::Path(Path::from_name_ref(&nr))); | 312 | self.alloc_expr_field_shorthand( |
251 | let ptr = Either::B(AstPtr::new(&field)); | 313 | Expr::Path(Path::from_name_ref(&nr)), |
252 | self.source_map.expr_map.insert(ptr, id); | 314 | AstPtr::new(&field), |
253 | self.source_map.expr_map_back.insert(id, ptr); | 315 | ) |
254 | id | ||
255 | } else { | 316 | } else { |
256 | self.exprs.alloc(Expr::Missing) | 317 | self.missing_expr() |
257 | }, | 318 | }, |
258 | }) | 319 | }) |
259 | .collect(); | 320 | .collect(); |
@@ -420,7 +481,7 @@ where | |||
420 | if let Some(expr) = expr { | 481 | if let Some(expr) = expr { |
421 | self.collect_expr(expr) | 482 | self.collect_expr(expr) |
422 | } else { | 483 | } else { |
423 | self.exprs.alloc(Expr::Missing) | 484 | self.missing_expr() |
424 | } | 485 | } |
425 | } | 486 | } |
426 | 487 | ||
@@ -450,7 +511,7 @@ where | |||
450 | if let Some(block) = expr { | 511 | if let Some(block) = expr { |
451 | self.collect_block(block) | 512 | self.collect_block(block) |
452 | } else { | 513 | } else { |
453 | self.exprs.alloc(Expr::Missing) | 514 | self.missing_expr() |
454 | } | 515 | } |
455 | } | 516 | } |
456 | 517 | ||
@@ -519,60 +580,9 @@ where | |||
519 | if let Some(pat) = pat { | 580 | if let Some(pat) = pat { |
520 | self.collect_pat(pat) | 581 | self.collect_pat(pat) |
521 | } else { | 582 | } else { |
522 | self.pats.alloc(Pat::Missing) | 583 | self.missing_pat() |
523 | } | 584 | } |
524 | } | 585 | } |
525 | |||
526 | fn collect_const_body(&mut self, node: ast::ConstDef) { | ||
527 | let body = self.collect_expr_opt(node.body()); | ||
528 | self.body_expr = Some(body); | ||
529 | } | ||
530 | |||
531 | fn collect_static_body(&mut self, node: ast::StaticDef) { | ||
532 | let body = self.collect_expr_opt(node.body()); | ||
533 | self.body_expr = Some(body); | ||
534 | } | ||
535 | |||
536 | fn collect_fn_body(&mut self, node: ast::FnDef) { | ||
537 | if let Some(param_list) = node.param_list() { | ||
538 | if let Some(self_param) = param_list.self_param() { | ||
539 | let ptr = AstPtr::new(&self_param); | ||
540 | let param_pat = self.alloc_pat( | ||
541 | Pat::Bind { | ||
542 | name: SELF_PARAM, | ||
543 | mode: BindingAnnotation::Unannotated, | ||
544 | subpat: None, | ||
545 | }, | ||
546 | Either::B(ptr), | ||
547 | ); | ||
548 | self.params.push(param_pat); | ||
549 | } | ||
550 | |||
551 | for param in param_list.params() { | ||
552 | let pat = if let Some(pat) = param.pat() { | ||
553 | pat | ||
554 | } else { | ||
555 | continue; | ||
556 | }; | ||
557 | let param_pat = self.collect_pat(pat); | ||
558 | self.params.push(param_pat); | ||
559 | } | ||
560 | }; | ||
561 | |||
562 | let body = self.collect_block_opt(node.body()); | ||
563 | self.body_expr = Some(body); | ||
564 | } | ||
565 | |||
566 | fn finish(self) -> (Body, BodySourceMap) { | ||
567 | let body = Body { | ||
568 | owner: self.owner, | ||
569 | exprs: self.exprs, | ||
570 | pats: self.pats, | ||
571 | params: self.params, | ||
572 | body_expr: self.body_expr.expect("A body should have been collected"), | ||
573 | }; | ||
574 | (body, self.source_map) | ||
575 | } | ||
576 | } | 586 | } |
577 | 587 | ||
578 | impl From<ast::BinOp> for BinaryOp { | 588 | impl From<ast::BinOp> for BinaryOp { |
@@ -618,35 +628,3 @@ impl From<ast::BinOp> for BinaryOp { | |||
618 | } | 628 | } |
619 | } | 629 | } |
620 | } | 630 | } |
621 | |||
622 | pub(crate) fn body_with_source_map_query( | ||
623 | db: &impl HirDatabase, | ||
624 | def: DefWithBody, | ||
625 | ) -> (Arc<Body>, Arc<BodySourceMap>) { | ||
626 | let mut collector; | ||
627 | |||
628 | match def { | ||
629 | DefWithBody::Const(ref c) => { | ||
630 | let src = c.source(db); | ||
631 | collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); | ||
632 | collector.collect_const_body(src.ast) | ||
633 | } | ||
634 | DefWithBody::Function(ref f) => { | ||
635 | let src = f.source(db); | ||
636 | collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); | ||
637 | collector.collect_fn_body(src.ast) | ||
638 | } | ||
639 | DefWithBody::Static(ref s) => { | ||
640 | let src = s.source(db); | ||
641 | collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); | ||
642 | collector.collect_static_body(src.ast) | ||
643 | } | ||
644 | } | ||
645 | |||
646 | let (body, source_map) = collector.finish(); | ||
647 | (Arc::new(body), Arc::new(source_map)) | ||
648 | } | ||
649 | |||
650 | pub(crate) fn body_hir_query(db: &impl HirDatabase, def: DefWithBody) -> Arc<Body> { | ||
651 | db.body_with_source_map(def).0 | ||
652 | } | ||
diff --git a/crates/ra_hir/src/expr/validation.rs b/crates/ra_hir/src/expr/validation.rs index 6fdaf1fce..1202913e2 100644 --- a/crates/ra_hir/src/expr/validation.rs +++ b/crates/ra_hir/src/expr/validation.rs | |||
@@ -1,9 +1,8 @@ | |||
1 | use std::sync::Arc; | 1 | use std::sync::Arc; |
2 | 2 | ||
3 | use ra_syntax::ast::{self, AstNode}; | 3 | use ra_syntax::ast; |
4 | use rustc_hash::FxHashSet; | 4 | use rustc_hash::FxHashSet; |
5 | 5 | ||
6 | use super::{Expr, ExprId, RecordLitField}; | ||
7 | use crate::{ | 6 | use crate::{ |
8 | adt::AdtDef, | 7 | adt::AdtDef, |
9 | diagnostics::{DiagnosticSink, MissingFields, MissingOkInTailExpr}, | 8 | diagnostics::{DiagnosticSink, MissingFields, MissingOkInTailExpr}, |
@@ -11,9 +10,11 @@ use crate::{ | |||
11 | name, | 10 | name, |
12 | path::{PathKind, PathSegment}, | 11 | path::{PathKind, PathSegment}, |
13 | ty::{ApplicationTy, InferenceResult, Ty, TypeCtor}, | 12 | ty::{ApplicationTy, InferenceResult, Ty, TypeCtor}, |
14 | Function, HasSource, HirDatabase, ModuleDef, Name, Path, PerNs, Resolution, | 13 | Function, HirDatabase, ModuleDef, Name, Path, PerNs, Resolution, |
15 | }; | 14 | }; |
16 | 15 | ||
16 | use super::{Expr, ExprId, RecordLitField}; | ||
17 | |||
17 | pub(crate) struct ExprValidator<'a, 'b: 'a> { | 18 | pub(crate) struct ExprValidator<'a, 'b: 'a> { |
18 | func: Function, | 19 | func: Function, |
19 | infer: Arc<InferenceResult>, | 20 | infer: Arc<InferenceResult>, |
@@ -78,25 +79,20 @@ impl<'a, 'b> ExprValidator<'a, 'b> { | |||
78 | return; | 79 | return; |
79 | } | 80 | } |
80 | let source_map = self.func.body_source_map(db); | 81 | let source_map = self.func.body_source_map(db); |
81 | let file_id = self.func.source(db).file_id; | 82 | |
82 | let parse = db.parse(file_id.original_file(db)); | 83 | if let Some(source_ptr) = source_map.expr_syntax(id) { |
83 | let source_file = parse.tree(); | 84 | if let Some(expr) = source_ptr.ast.a() { |
84 | if let Some(field_list_node) = source_map | 85 | let root = source_ptr.file_syntax(db); |
85 | .expr_syntax(id) | 86 | if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) { |
86 | .and_then(|ptr| ptr.a()) | 87 | if let Some(field_list) = record_lit.record_field_list() { |
87 | .map(|ptr| ptr.to_node(source_file.syntax())) | 88 | self.sink.push(MissingFields { |
88 | .and_then(|expr| match expr { | 89 | file: source_ptr.file_id, |
89 | ast::Expr::RecordLit(it) => Some(it), | 90 | field_list: AstPtr::new(&field_list), |
90 | _ => None, | 91 | missed_fields, |
91 | }) | 92 | }) |
92 | .and_then(|lit| lit.record_field_list()) | 93 | } |
93 | { | 94 | } |
94 | let field_list_ptr = AstPtr::new(&field_list_node); | 95 | } |
95 | self.sink.push(MissingFields { | ||
96 | file: file_id, | ||
97 | field_list: field_list_ptr, | ||
98 | missed_fields, | ||
99 | }) | ||
100 | } | 96 | } |
101 | } | 97 | } |
102 | 98 | ||
@@ -136,10 +132,11 @@ impl<'a, 'b> ExprValidator<'a, 'b> { | |||
136 | 132 | ||
137 | if params.len() == 2 && ¶ms[0] == &mismatch.actual { | 133 | if params.len() == 2 && ¶ms[0] == &mismatch.actual { |
138 | let source_map = self.func.body_source_map(db); | 134 | let source_map = self.func.body_source_map(db); |
139 | let file_id = self.func.source(db).file_id; | ||
140 | 135 | ||
141 | if let Some(expr) = source_map.expr_syntax(id).and_then(|n| n.a()) { | 136 | if let Some(source_ptr) = source_map.expr_syntax(id) { |
142 | self.sink.push(MissingOkInTailExpr { file: file_id, expr }); | 137 | if let Some(expr) = source_ptr.ast.a() { |
138 | self.sink.push(MissingOkInTailExpr { file: source_ptr.file_id, expr }); | ||
139 | } | ||
143 | } | 140 | } |
144 | } | 141 | } |
145 | } | 142 | } |
diff --git a/crates/ra_hir/src/lib.rs b/crates/ra_hir/src/lib.rs index 752653ad7..c3e589921 100644 --- a/crates/ra_hir/src/lib.rs +++ b/crates/ra_hir/src/lib.rs | |||
@@ -69,7 +69,9 @@ pub use self::{ | |||
69 | resolve::Resolution, | 69 | resolve::Resolution, |
70 | source_binder::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer}, | 70 | source_binder::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer}, |
71 | source_id::{AstIdMap, ErasedFileAstId}, | 71 | source_id::{AstIdMap, ErasedFileAstId}, |
72 | ty::{display::HirDisplay, ApplicationTy, CallableDef, Substs, TraitRef, Ty, TypeCtor}, | 72 | ty::{ |
73 | display::HirDisplay, ApplicationTy, CallableDef, Substs, TraitRef, Ty, TypeCtor, TypeWalk, | ||
74 | }, | ||
73 | type_ref::Mutability, | 75 | type_ref::Mutability, |
74 | }; | 76 | }; |
75 | 77 | ||
diff --git a/crates/ra_hir/src/marks.rs b/crates/ra_hir/src/marks.rs index 5b15eee90..fe119b97c 100644 --- a/crates/ra_hir/src/marks.rs +++ b/crates/ra_hir/src/marks.rs | |||
@@ -11,4 +11,6 @@ test_utils::marks!( | |||
11 | match_ergonomics_ref | 11 | match_ergonomics_ref |
12 | trait_resolution_on_fn_type | 12 | trait_resolution_on_fn_type |
13 | infer_while_let | 13 | infer_while_let |
14 | macro_rules_from_other_crates_are_visible_with_macro_use | ||
15 | prelude_is_macro_use | ||
14 | ); | 16 | ); |
diff --git a/crates/ra_hir/src/mock.rs b/crates/ra_hir/src/mock.rs index 77a44a275..972f0ece5 100644 --- a/crates/ra_hir/src/mock.rs +++ b/crates/ra_hir/src/mock.rs | |||
@@ -157,7 +157,7 @@ impl MockDatabase { | |||
157 | self.set_file_text(file_id, text); | 157 | self.set_file_text(file_id, text); |
158 | self.set_file_relative_path(file_id, rel_path.clone()); | 158 | self.set_file_relative_path(file_id, rel_path.clone()); |
159 | self.set_file_source_root(file_id, source_root_id); | 159 | self.set_file_source_root(file_id, source_root_id); |
160 | source_root.files.insert(rel_path, file_id); | 160 | source_root.insert_file(rel_path, file_id); |
161 | 161 | ||
162 | if is_crate_root { | 162 | if is_crate_root { |
163 | let mut crate_graph = CrateGraph::default(); | 163 | let mut crate_graph = CrateGraph::default(); |
diff --git a/crates/ra_hir/src/nameres.rs b/crates/ra_hir/src/nameres.rs index bbdc606cd..fe90879b6 100644 --- a/crates/ra_hir/src/nameres.rs +++ b/crates/ra_hir/src/nameres.rs | |||
@@ -1,55 +1,56 @@ | |||
1 | /// This module implements import-resolution/macro expansion algorithm. | 1 | //! This module implements import-resolution/macro expansion algorithm. |
2 | /// | 2 | //! |
3 | /// The result of this module is `CrateDefMap`: a data structure which contains: | 3 | //! The result of this module is `CrateDefMap`: a data structure which contains: |
4 | /// | 4 | //! |
5 | /// * a tree of modules for the crate | 5 | //! * a tree of modules for the crate |
6 | /// * for each module, a set of items visible in the module (directly declared | 6 | //! * for each module, a set of items visible in the module (directly declared |
7 | /// or imported) | 7 | //! or imported) |
8 | /// | 8 | //! |
9 | /// Note that `CrateDefMap` contains fully macro expanded code. | 9 | //! Note that `CrateDefMap` contains fully macro expanded code. |
10 | /// | 10 | //! |
11 | /// Computing `CrateDefMap` can be partitioned into several logically | 11 | //! Computing `CrateDefMap` can be partitioned into several logically |
12 | /// independent "phases". The phases are mutually recursive though, there's no | 12 | //! independent "phases". The phases are mutually recursive though, there's no |
13 | /// strict ordering. | 13 | //! strict ordering. |
14 | /// | 14 | //! |
15 | /// ## Collecting RawItems | 15 | //! ## Collecting RawItems |
16 | /// | 16 | //! |
17 | /// This happens in the `raw` module, which parses a single source file into a | 17 | //! This happens in the `raw` module, which parses a single source file into a |
18 | /// set of top-level items. Nested imports are desugared to flat imports in | 18 | //! set of top-level items. Nested imports are desugared to flat imports in |
19 | /// this phase. Macro calls are represented as a triple of (Path, Option<Name>, | 19 | //! this phase. Macro calls are represented as a triple of (Path, Option<Name>, |
20 | /// TokenTree). | 20 | //! TokenTree). |
21 | /// | 21 | //! |
22 | /// ## Collecting Modules | 22 | //! ## Collecting Modules |
23 | /// | 23 | //! |
24 | /// This happens in the `collector` module. In this phase, we recursively walk | 24 | //! This happens in the `collector` module. In this phase, we recursively walk |
25 | /// tree of modules, collect raw items from submodules, populate module scopes | 25 | //! tree of modules, collect raw items from submodules, populate module scopes |
26 | /// with defined items (so, we assign item ids in this phase) and record the set | 26 | //! with defined items (so, we assign item ids in this phase) and record the set |
27 | /// of unresolved imports and macros. | 27 | //! of unresolved imports and macros. |
28 | /// | 28 | //! |
29 | /// While we walk tree of modules, we also record macro_rules definitions and | 29 | //! While we walk tree of modules, we also record macro_rules definitions and |
30 | /// expand calls to macro_rules defined macros. | 30 | //! expand calls to macro_rules defined macros. |
31 | /// | 31 | //! |
32 | /// ## Resolving Imports | 32 | //! ## Resolving Imports |
33 | /// | 33 | //! |
34 | /// We maintain a list of currently unresolved imports. On every iteration, we | 34 | //! We maintain a list of currently unresolved imports. On every iteration, we |
35 | /// try to resolve some imports from this list. If the import is resolved, we | 35 | //! try to resolve some imports from this list. If the import is resolved, we |
36 | /// record it, by adding an item to current module scope and, if necessary, by | 36 | //! record it, by adding an item to current module scope and, if necessary, by |
37 | /// recursively populating glob imports. | 37 | //! recursively populating glob imports. |
38 | /// | 38 | //! |
39 | /// ## Resolving Macros | 39 | //! ## Resolving Macros |
40 | /// | 40 | //! |
41 | /// macro_rules from the same crate use a global mutable namespace. We expand | 41 | //! macro_rules from the same crate use a global mutable namespace. We expand |
42 | /// them immediately, when we collect modules. | 42 | //! them immediately, when we collect modules. |
43 | /// | 43 | //! |
44 | /// Macros from other crates (including proc-macros) can be used with | 44 | //! Macros from other crates (including proc-macros) can be used with |
45 | /// `foo::bar!` syntax. We handle them similarly to imports. There's a list of | 45 | //! `foo::bar!` syntax. We handle them similarly to imports. There's a list of |
46 | /// unexpanded macros. On every iteration, we try to resolve each macro call | 46 | //! unexpanded macros. On every iteration, we try to resolve each macro call |
47 | /// path and, upon success, we run macro expansion and "collect module" phase | 47 | //! path and, upon success, we run macro expansion and "collect module" phase |
48 | /// on the result | 48 | //! on the result |
49 | 49 | ||
50 | mod per_ns; | 50 | mod per_ns; |
51 | mod raw; | 51 | mod raw; |
52 | mod collector; | 52 | mod collector; |
53 | mod mod_resolution; | ||
53 | #[cfg(test)] | 54 | #[cfg(test)] |
54 | mod tests; | 55 | mod tests; |
55 | 56 | ||
@@ -101,6 +102,8 @@ pub struct CrateDefMap { | |||
101 | /// However, do we want to put it as a global variable? | 102 | /// However, do we want to put it as a global variable? |
102 | poison_macros: FxHashSet<MacroDefId>, | 103 | poison_macros: FxHashSet<MacroDefId>, |
103 | 104 | ||
105 | exported_macros: FxHashMap<Name, MacroDefId>, | ||
106 | |||
104 | diagnostics: Vec<DefDiagnostic>, | 107 | diagnostics: Vec<DefDiagnostic>, |
105 | } | 108 | } |
106 | 109 | ||
@@ -245,6 +248,7 @@ impl CrateDefMap { | |||
245 | root, | 248 | root, |
246 | modules, | 249 | modules, |
247 | poison_macros: FxHashSet::default(), | 250 | poison_macros: FxHashSet::default(), |
251 | exported_macros: FxHashMap::default(), | ||
248 | diagnostics: Vec::new(), | 252 | diagnostics: Vec::new(), |
249 | } | 253 | } |
250 | }; | 254 | }; |
diff --git a/crates/ra_hir/src/nameres/collector.rs b/crates/ra_hir/src/nameres/collector.rs index 7da2dcdff..5af26f953 100644 --- a/crates/ra_hir/src/nameres/collector.rs +++ b/crates/ra_hir/src/nameres/collector.rs | |||
@@ -1,9 +1,5 @@ | |||
1 | use std::borrow::Cow; | 1 | use ra_db::FileId; |
2 | use std::sync::Arc; | 2 | use ra_syntax::ast; |
3 | |||
4 | use ra_db::{FileId, SourceRoot}; | ||
5 | use ra_syntax::{ast, SmolStr}; | ||
6 | use relative_path::RelativePathBuf; | ||
7 | use rustc_hash::FxHashMap; | 3 | use rustc_hash::FxHashMap; |
8 | use test_utils::tested_by; | 4 | use test_utils::tested_by; |
9 | 5 | ||
@@ -12,8 +8,10 @@ use crate::{ | |||
12 | ids::{AstItemDef, LocationCtx, MacroCallId, MacroCallLoc, MacroDefId, MacroFileKind}, | 8 | ids::{AstItemDef, LocationCtx, MacroCallId, MacroCallLoc, MacroDefId, MacroFileKind}, |
13 | name::MACRO_RULES, | 9 | name::MACRO_RULES, |
14 | nameres::{ | 10 | nameres::{ |
15 | diagnostics::DefDiagnostic, raw, CrateDefMap, CrateModuleId, ItemOrMacro, ModuleData, | 11 | diagnostics::DefDiagnostic, |
16 | ModuleDef, PerNs, ReachedFixedPoint, Resolution, ResolveMode, | 12 | mod_resolution::{resolve_submodule, ParentModule}, |
13 | raw, CrateDefMap, CrateModuleId, ItemOrMacro, ModuleData, ModuleDef, PerNs, | ||
14 | ReachedFixedPoint, Resolution, ResolveMode, | ||
17 | }, | 15 | }, |
18 | AstId, Const, DefDatabase, Enum, Function, HirFileId, MacroDef, Module, Name, Path, Static, | 16 | AstId, Const, DefDatabase, Enum, Function, HirFileId, MacroDef, Module, Name, Path, Static, |
19 | Struct, Trait, TypeAlias, Union, | 17 | Struct, Trait, TypeAlias, Union, |
@@ -157,11 +155,45 @@ where | |||
157 | // crate root, even if the parent modules is **not** visible. | 155 | // crate root, even if the parent modules is **not** visible. |
158 | if export { | 156 | if export { |
159 | self.update(self.def_map.root, None, &[(name.clone(), def.clone())]); | 157 | self.update(self.def_map.root, None, &[(name.clone(), def.clone())]); |
158 | |||
159 | // Exported macros are collected in crate level ready for | ||
160 | // glob import with `#[macro_use]`. | ||
161 | self.def_map.exported_macros.insert(name.clone(), macro_id); | ||
160 | } | 162 | } |
161 | self.update(module_id, None, &[(name.clone(), def)]); | 163 | self.update(module_id, None, &[(name.clone(), def)]); |
162 | self.global_macro_scope.insert(name, macro_id); | 164 | self.global_macro_scope.insert(name, macro_id); |
163 | } | 165 | } |
164 | 166 | ||
167 | /// Import macros from `#[macro_use] extern crate`. | ||
168 | /// | ||
169 | /// They are non-scoped, and will only be inserted into mutable `global_macro_scope`. | ||
170 | fn import_macros_from_extern_crate(&mut self, import: &raw::ImportData) { | ||
171 | log::debug!( | ||
172 | "importing macros from extern crate: {:?} ({:?})", | ||
173 | import, | ||
174 | self.def_map.edition, | ||
175 | ); | ||
176 | |||
177 | let res = self.def_map.resolve_name_in_extern_prelude( | ||
178 | &import | ||
179 | .path | ||
180 | .as_ident() | ||
181 | .expect("extern crate should have been desugared to one-element path"), | ||
182 | ); | ||
183 | |||
184 | if let Some(ModuleDef::Module(m)) = res.take_types() { | ||
185 | tested_by!(macro_rules_from_other_crates_are_visible_with_macro_use); | ||
186 | self.import_all_macros_exported(m); | ||
187 | } | ||
188 | } | ||
189 | |||
190 | fn import_all_macros_exported(&mut self, module: Module) { | ||
191 | let item_map = self.db.crate_def_map(module.krate); | ||
192 | for (name, ¯o_id) in &item_map.exported_macros { | ||
193 | self.global_macro_scope.insert(name.clone(), macro_id); | ||
194 | } | ||
195 | } | ||
196 | |||
165 | fn resolve_imports(&mut self) -> ReachedFixedPoint { | 197 | fn resolve_imports(&mut self) -> ReachedFixedPoint { |
166 | let mut imports = std::mem::replace(&mut self.unresolved_imports, Vec::new()); | 198 | let mut imports = std::mem::replace(&mut self.unresolved_imports, Vec::new()); |
167 | let mut resolved = Vec::new(); | 199 | let mut resolved = Vec::new(); |
@@ -491,13 +523,31 @@ where | |||
491 | DB: DefDatabase, | 523 | DB: DefDatabase, |
492 | { | 524 | { |
493 | fn collect(&mut self, items: &[raw::RawItem]) { | 525 | fn collect(&mut self, items: &[raw::RawItem]) { |
526 | // Prelude module is always considered to be `#[macro_use]`. | ||
527 | if let Some(prelude_module) = self.def_collector.def_map.prelude { | ||
528 | tested_by!(prelude_is_macro_use); | ||
529 | self.def_collector.import_all_macros_exported(prelude_module); | ||
530 | } | ||
531 | |||
532 | // This should be processed eagerly instead of deferred to resolving. | ||
533 | // `#[macro_use] extern crate` is hoisted to imports macros before collecting | ||
534 | // any other items. | ||
535 | for item in items { | ||
536 | if let raw::RawItem::Import(import_id) = *item { | ||
537 | let import = self.raw_items[import_id].clone(); | ||
538 | if import.is_extern_crate && import.is_macro_use { | ||
539 | self.def_collector.import_macros_from_extern_crate(&import); | ||
540 | } | ||
541 | } | ||
542 | } | ||
543 | |||
494 | for item in items { | 544 | for item in items { |
495 | match *item { | 545 | match *item { |
496 | raw::RawItem::Module(m) => self.collect_module(&self.raw_items[m]), | 546 | raw::RawItem::Module(m) => self.collect_module(&self.raw_items[m]), |
497 | raw::RawItem::Import(import) => self.def_collector.unresolved_imports.push(( | 547 | raw::RawItem::Import(import_id) => self.def_collector.unresolved_imports.push(( |
498 | self.module_id, | 548 | self.module_id, |
499 | import, | 549 | import_id, |
500 | self.raw_items[import].clone(), | 550 | self.raw_items[import_id].clone(), |
501 | )), | 551 | )), |
502 | raw::RawItem::Def(def) => self.define_def(&self.raw_items[def]), | 552 | raw::RawItem::Def(def) => self.define_def(&self.raw_items[def]), |
503 | raw::RawItem::Macro(mac) => self.collect_macro(&self.raw_items[mac]), | 553 | raw::RawItem::Macro(mac) => self.collect_macro(&self.raw_items[mac]), |
@@ -531,7 +581,7 @@ where | |||
531 | name, | 581 | name, |
532 | is_root, | 582 | is_root, |
533 | attr_path.as_ref(), | 583 | attr_path.as_ref(), |
534 | self.parent_module.as_ref(), | 584 | self.parent_module, |
535 | ) { | 585 | ) { |
536 | Ok(file_id) => { | 586 | Ok(file_id) => { |
537 | let module_id = self.push_child_module(name.clone(), ast_id, Some(file_id)); | 587 | let module_id = self.push_child_module(name.clone(), ast_id, Some(file_id)); |
@@ -642,180 +692,6 @@ fn is_macro_rules(path: &Path) -> bool { | |||
642 | path.as_ident() == Some(&MACRO_RULES) | 692 | path.as_ident() == Some(&MACRO_RULES) |
643 | } | 693 | } |
644 | 694 | ||
645 | fn resolve_submodule( | ||
646 | db: &impl DefDatabase, | ||
647 | file_id: HirFileId, | ||
648 | name: &Name, | ||
649 | is_root: bool, | ||
650 | attr_path: Option<&SmolStr>, | ||
651 | parent_module: Option<&ParentModule>, | ||
652 | ) -> Result<FileId, RelativePathBuf> { | ||
653 | let file_id = file_id.original_file(db); | ||
654 | let source_root_id = db.file_source_root(file_id); | ||
655 | let path = db.file_relative_path(file_id); | ||
656 | let root = RelativePathBuf::default(); | ||
657 | let dir_path = path.parent().unwrap_or(&root); | ||
658 | let mod_name = path.file_stem().unwrap_or("unknown"); | ||
659 | |||
660 | let resolve_mode = match (attr_path.filter(|p| !p.is_empty()), parent_module) { | ||
661 | (Some(file_path), Some(parent_module)) => { | ||
662 | let file_path = normalize_attribute_path(file_path); | ||
663 | match parent_module.attribute_path() { | ||
664 | Some(parent_module_attr_path) => { | ||
665 | let path = dir_path | ||
666 | .join(format!( | ||
667 | "{}/{}", | ||
668 | normalize_attribute_path(parent_module_attr_path), | ||
669 | file_path | ||
670 | )) | ||
671 | .normalize(); | ||
672 | ResolutionMode::InlineModuleWithAttributePath( | ||
673 | InsideInlineModuleMode::WithAttributePath(path), | ||
674 | ) | ||
675 | } | ||
676 | None => { | ||
677 | let path = | ||
678 | dir_path.join(format!("{}/{}", parent_module.name, file_path)).normalize(); | ||
679 | ResolutionMode::InsideInlineModule(InsideInlineModuleMode::WithAttributePath( | ||
680 | path, | ||
681 | )) | ||
682 | } | ||
683 | } | ||
684 | } | ||
685 | (None, Some(parent_module)) => match parent_module.attribute_path() { | ||
686 | Some(parent_module_attr_path) => { | ||
687 | let path = dir_path.join(format!( | ||
688 | "{}/{}.rs", | ||
689 | normalize_attribute_path(parent_module_attr_path), | ||
690 | name | ||
691 | )); | ||
692 | ResolutionMode::InlineModuleWithAttributePath(InsideInlineModuleMode::File(path)) | ||
693 | } | ||
694 | None => { | ||
695 | let path = dir_path.join(format!("{}/{}.rs", parent_module.name, name)); | ||
696 | ResolutionMode::InsideInlineModule(InsideInlineModuleMode::File(path)) | ||
697 | } | ||
698 | }, | ||
699 | (Some(file_path), None) => { | ||
700 | let file_path = normalize_attribute_path(file_path); | ||
701 | let path = dir_path.join(file_path.as_ref()).normalize(); | ||
702 | ResolutionMode::OutOfLine(OutOfLineMode::WithAttributePath(path)) | ||
703 | } | ||
704 | _ => { | ||
705 | let is_dir_owner = is_root || mod_name == "mod"; | ||
706 | if is_dir_owner { | ||
707 | let file_mod = dir_path.join(format!("{}.rs", name)); | ||
708 | let dir_mod = dir_path.join(format!("{}/mod.rs", name)); | ||
709 | ResolutionMode::OutOfLine(OutOfLineMode::RootOrModRs { | ||
710 | file: file_mod, | ||
711 | directory: dir_mod, | ||
712 | }) | ||
713 | } else { | ||
714 | let path = dir_path.join(format!("{}/{}.rs", mod_name, name)); | ||
715 | ResolutionMode::OutOfLine(OutOfLineMode::FileInDirectory(path)) | ||
716 | } | ||
717 | } | ||
718 | }; | ||
719 | |||
720 | resolve_mode.resolve(db.source_root(source_root_id)) | ||
721 | } | ||
722 | |||
723 | fn normalize_attribute_path(file_path: &SmolStr) -> Cow<str> { | ||
724 | let current_dir = "./"; | ||
725 | let windows_path_separator = r#"\"#; | ||
726 | let current_dir_normalize = if file_path.starts_with(current_dir) { | ||
727 | &file_path[current_dir.len()..] | ||
728 | } else { | ||
729 | file_path.as_str() | ||
730 | }; | ||
731 | if current_dir_normalize.contains(windows_path_separator) { | ||
732 | Cow::Owned(current_dir_normalize.replace(windows_path_separator, "/")) | ||
733 | } else { | ||
734 | Cow::Borrowed(current_dir_normalize) | ||
735 | } | ||
736 | } | ||
737 | |||
738 | enum OutOfLineMode { | ||
739 | RootOrModRs { file: RelativePathBuf, directory: RelativePathBuf }, | ||
740 | FileInDirectory(RelativePathBuf), | ||
741 | WithAttributePath(RelativePathBuf), | ||
742 | } | ||
743 | |||
744 | impl OutOfLineMode { | ||
745 | pub fn resolve(&self, source_root: Arc<SourceRoot>) -> Result<FileId, RelativePathBuf> { | ||
746 | match self { | ||
747 | OutOfLineMode::RootOrModRs { file, directory } => match source_root.files.get(file) { | ||
748 | None => resolve_simple_path(source_root, directory).map_err(|_| file.clone()), | ||
749 | file_id => resolve_find_result(file_id, file), | ||
750 | }, | ||
751 | OutOfLineMode::FileInDirectory(path) => resolve_simple_path(source_root, path), | ||
752 | OutOfLineMode::WithAttributePath(path) => resolve_simple_path(source_root, path), | ||
753 | } | ||
754 | } | ||
755 | } | ||
756 | |||
757 | enum InsideInlineModuleMode { | ||
758 | File(RelativePathBuf), | ||
759 | WithAttributePath(RelativePathBuf), | ||
760 | } | ||
761 | |||
762 | impl InsideInlineModuleMode { | ||
763 | pub fn resolve(&self, source_root: Arc<SourceRoot>) -> Result<FileId, RelativePathBuf> { | ||
764 | match self { | ||
765 | InsideInlineModuleMode::File(path) => resolve_simple_path(source_root, path), | ||
766 | InsideInlineModuleMode::WithAttributePath(path) => { | ||
767 | resolve_simple_path(source_root, path) | ||
768 | } | ||
769 | } | ||
770 | } | ||
771 | } | ||
772 | |||
773 | enum ResolutionMode { | ||
774 | OutOfLine(OutOfLineMode), | ||
775 | InsideInlineModule(InsideInlineModuleMode), | ||
776 | InlineModuleWithAttributePath(InsideInlineModuleMode), | ||
777 | } | ||
778 | |||
779 | impl ResolutionMode { | ||
780 | pub fn resolve(&self, source_root: Arc<SourceRoot>) -> Result<FileId, RelativePathBuf> { | ||
781 | use self::ResolutionMode::*; | ||
782 | |||
783 | match self { | ||
784 | OutOfLine(mode) => mode.resolve(source_root), | ||
785 | InsideInlineModule(mode) => mode.resolve(source_root), | ||
786 | InlineModuleWithAttributePath(mode) => mode.resolve(source_root), | ||
787 | } | ||
788 | } | ||
789 | } | ||
790 | |||
791 | fn resolve_simple_path( | ||
792 | source_root: Arc<SourceRoot>, | ||
793 | path: &RelativePathBuf, | ||
794 | ) -> Result<FileId, RelativePathBuf> { | ||
795 | resolve_find_result(source_root.files.get(path), path) | ||
796 | } | ||
797 | |||
798 | fn resolve_find_result( | ||
799 | file_id: Option<&FileId>, | ||
800 | path: &RelativePathBuf, | ||
801 | ) -> Result<FileId, RelativePathBuf> { | ||
802 | match file_id { | ||
803 | Some(file_id) => Ok(file_id.clone()), | ||
804 | None => Err(path.clone()), | ||
805 | } | ||
806 | } | ||
807 | |||
808 | struct ParentModule<'a> { | ||
809 | name: &'a Name, | ||
810 | attr_path: Option<&'a SmolStr>, | ||
811 | } | ||
812 | |||
813 | impl<'a> ParentModule<'a> { | ||
814 | pub fn attribute_path(&self) -> Option<&SmolStr> { | ||
815 | self.attr_path.filter(|p| !p.is_empty()) | ||
816 | } | ||
817 | } | ||
818 | |||
819 | #[cfg(test)] | 695 | #[cfg(test)] |
820 | mod tests { | 696 | mod tests { |
821 | use ra_db::SourceDatabase; | 697 | use ra_db::SourceDatabase; |
@@ -860,6 +736,7 @@ mod tests { | |||
860 | root, | 736 | root, |
861 | modules, | 737 | modules, |
862 | poison_macros: FxHashSet::default(), | 738 | poison_macros: FxHashSet::default(), |
739 | exported_macros: FxHashMap::default(), | ||
863 | diagnostics: Vec::new(), | 740 | diagnostics: Vec::new(), |
864 | } | 741 | } |
865 | }; | 742 | }; |
diff --git a/crates/ra_hir/src/nameres/mod_resolution.rs b/crates/ra_hir/src/nameres/mod_resolution.rs new file mode 100644 index 000000000..918c9591f --- /dev/null +++ b/crates/ra_hir/src/nameres/mod_resolution.rs | |||
@@ -0,0 +1,186 @@ | |||
1 | //! This module resolves `mod foo;` declaration to file. | ||
2 | |||
3 | use std::{borrow::Cow, sync::Arc}; | ||
4 | |||
5 | use ra_db::{FileId, SourceRoot}; | ||
6 | use ra_syntax::SmolStr; | ||
7 | use relative_path::RelativePathBuf; | ||
8 | |||
9 | use crate::{DefDatabase, HirFileId, Name}; | ||
10 | |||
11 | #[derive(Clone, Copy)] | ||
12 | pub(super) struct ParentModule<'a> { | ||
13 | pub(super) name: &'a Name, | ||
14 | pub(super) attr_path: Option<&'a SmolStr>, | ||
15 | } | ||
16 | |||
17 | impl<'a> ParentModule<'a> { | ||
18 | fn attribute_path(&self) -> Option<&SmolStr> { | ||
19 | self.attr_path.filter(|p| !p.is_empty()) | ||
20 | } | ||
21 | } | ||
22 | |||
23 | pub(super) fn resolve_submodule( | ||
24 | db: &impl DefDatabase, | ||
25 | file_id: HirFileId, | ||
26 | name: &Name, | ||
27 | is_root: bool, | ||
28 | attr_path: Option<&SmolStr>, | ||
29 | parent_module: Option<ParentModule<'_>>, | ||
30 | ) -> Result<FileId, RelativePathBuf> { | ||
31 | let file_id = file_id.original_file(db); | ||
32 | let source_root_id = db.file_source_root(file_id); | ||
33 | let path = db.file_relative_path(file_id); | ||
34 | let root = RelativePathBuf::default(); | ||
35 | let dir_path = path.parent().unwrap_or(&root); | ||
36 | let mod_name = path.file_stem().unwrap_or("unknown"); | ||
37 | |||
38 | let resolve_mode = match (attr_path.filter(|p| !p.is_empty()), parent_module) { | ||
39 | (Some(file_path), Some(parent_module)) => { | ||
40 | let file_path = normalize_attribute_path(file_path); | ||
41 | match parent_module.attribute_path() { | ||
42 | Some(parent_module_attr_path) => { | ||
43 | let path = dir_path | ||
44 | .join(format!( | ||
45 | "{}/{}", | ||
46 | normalize_attribute_path(parent_module_attr_path), | ||
47 | file_path | ||
48 | )) | ||
49 | .normalize(); | ||
50 | ResolutionMode::InlineModuleWithAttributePath( | ||
51 | InsideInlineModuleMode::WithAttributePath(path), | ||
52 | ) | ||
53 | } | ||
54 | None => { | ||
55 | let path = | ||
56 | dir_path.join(format!("{}/{}", parent_module.name, file_path)).normalize(); | ||
57 | ResolutionMode::InsideInlineModule(InsideInlineModuleMode::WithAttributePath( | ||
58 | path, | ||
59 | )) | ||
60 | } | ||
61 | } | ||
62 | } | ||
63 | (None, Some(parent_module)) => match parent_module.attribute_path() { | ||
64 | Some(parent_module_attr_path) => { | ||
65 | let path = dir_path.join(format!( | ||
66 | "{}/{}.rs", | ||
67 | normalize_attribute_path(parent_module_attr_path), | ||
68 | name | ||
69 | )); | ||
70 | ResolutionMode::InlineModuleWithAttributePath(InsideInlineModuleMode::File(path)) | ||
71 | } | ||
72 | None => { | ||
73 | let path = dir_path.join(format!("{}/{}.rs", parent_module.name, name)); | ||
74 | ResolutionMode::InsideInlineModule(InsideInlineModuleMode::File(path)) | ||
75 | } | ||
76 | }, | ||
77 | (Some(file_path), None) => { | ||
78 | let file_path = normalize_attribute_path(file_path); | ||
79 | let path = dir_path.join(file_path.as_ref()).normalize(); | ||
80 | ResolutionMode::OutOfLine(OutOfLineMode::WithAttributePath(path)) | ||
81 | } | ||
82 | (None, None) => { | ||
83 | let is_dir_owner = is_root || mod_name == "mod"; | ||
84 | if is_dir_owner { | ||
85 | let file_mod = dir_path.join(format!("{}.rs", name)); | ||
86 | let dir_mod = dir_path.join(format!("{}/mod.rs", name)); | ||
87 | ResolutionMode::OutOfLine(OutOfLineMode::RootOrModRs { | ||
88 | file: file_mod, | ||
89 | directory: dir_mod, | ||
90 | }) | ||
91 | } else { | ||
92 | let path = dir_path.join(format!("{}/{}.rs", mod_name, name)); | ||
93 | ResolutionMode::OutOfLine(OutOfLineMode::FileInDirectory(path)) | ||
94 | } | ||
95 | } | ||
96 | }; | ||
97 | |||
98 | resolve_mode.resolve(db.source_root(source_root_id)) | ||
99 | } | ||
100 | |||
101 | fn normalize_attribute_path(file_path: &SmolStr) -> Cow<str> { | ||
102 | let current_dir = "./"; | ||
103 | let windows_path_separator = r#"\"#; | ||
104 | let current_dir_normalize = if file_path.starts_with(current_dir) { | ||
105 | &file_path[current_dir.len()..] | ||
106 | } else { | ||
107 | file_path.as_str() | ||
108 | }; | ||
109 | if current_dir_normalize.contains(windows_path_separator) { | ||
110 | Cow::Owned(current_dir_normalize.replace(windows_path_separator, "/")) | ||
111 | } else { | ||
112 | Cow::Borrowed(current_dir_normalize) | ||
113 | } | ||
114 | } | ||
115 | |||
116 | enum OutOfLineMode { | ||
117 | RootOrModRs { file: RelativePathBuf, directory: RelativePathBuf }, | ||
118 | FileInDirectory(RelativePathBuf), | ||
119 | WithAttributePath(RelativePathBuf), | ||
120 | } | ||
121 | |||
122 | impl OutOfLineMode { | ||
123 | pub fn resolve(&self, source_root: Arc<SourceRoot>) -> Result<FileId, RelativePathBuf> { | ||
124 | match self { | ||
125 | OutOfLineMode::RootOrModRs { file, directory } => { | ||
126 | match source_root.file_by_relative_path(file) { | ||
127 | None => resolve_simple_path(source_root, directory).map_err(|_| file.clone()), | ||
128 | file_id => resolve_find_result(file_id, file), | ||
129 | } | ||
130 | } | ||
131 | OutOfLineMode::FileInDirectory(path) => resolve_simple_path(source_root, path), | ||
132 | OutOfLineMode::WithAttributePath(path) => resolve_simple_path(source_root, path), | ||
133 | } | ||
134 | } | ||
135 | } | ||
136 | |||
137 | enum InsideInlineModuleMode { | ||
138 | File(RelativePathBuf), | ||
139 | WithAttributePath(RelativePathBuf), | ||
140 | } | ||
141 | |||
142 | impl InsideInlineModuleMode { | ||
143 | pub fn resolve(&self, source_root: Arc<SourceRoot>) -> Result<FileId, RelativePathBuf> { | ||
144 | match self { | ||
145 | InsideInlineModuleMode::File(path) => resolve_simple_path(source_root, path), | ||
146 | InsideInlineModuleMode::WithAttributePath(path) => { | ||
147 | resolve_simple_path(source_root, path) | ||
148 | } | ||
149 | } | ||
150 | } | ||
151 | } | ||
152 | |||
153 | enum ResolutionMode { | ||
154 | OutOfLine(OutOfLineMode), | ||
155 | InsideInlineModule(InsideInlineModuleMode), | ||
156 | InlineModuleWithAttributePath(InsideInlineModuleMode), | ||
157 | } | ||
158 | |||
159 | impl ResolutionMode { | ||
160 | pub fn resolve(&self, source_root: Arc<SourceRoot>) -> Result<FileId, RelativePathBuf> { | ||
161 | use self::ResolutionMode::*; | ||
162 | |||
163 | match self { | ||
164 | OutOfLine(mode) => mode.resolve(source_root), | ||
165 | InsideInlineModule(mode) => mode.resolve(source_root), | ||
166 | InlineModuleWithAttributePath(mode) => mode.resolve(source_root), | ||
167 | } | ||
168 | } | ||
169 | } | ||
170 | |||
171 | fn resolve_simple_path( | ||
172 | source_root: Arc<SourceRoot>, | ||
173 | path: &RelativePathBuf, | ||
174 | ) -> Result<FileId, RelativePathBuf> { | ||
175 | resolve_find_result(source_root.file_by_relative_path(path), path) | ||
176 | } | ||
177 | |||
178 | fn resolve_find_result( | ||
179 | file_id: Option<FileId>, | ||
180 | path: &RelativePathBuf, | ||
181 | ) -> Result<FileId, RelativePathBuf> { | ||
182 | match file_id { | ||
183 | Some(file_id) => Ok(file_id.clone()), | ||
184 | None => Err(path.clone()), | ||
185 | } | ||
186 | } | ||
diff --git a/crates/ra_hir/src/nameres/raw.rs b/crates/ra_hir/src/nameres/raw.rs index 2f973359f..129b047eb 100644 --- a/crates/ra_hir/src/nameres/raw.rs +++ b/crates/ra_hir/src/nameres/raw.rs | |||
@@ -154,6 +154,7 @@ pub struct ImportData { | |||
154 | pub(super) is_glob: bool, | 154 | pub(super) is_glob: bool, |
155 | pub(super) is_prelude: bool, | 155 | pub(super) is_prelude: bool, |
156 | pub(super) is_extern_crate: bool, | 156 | pub(super) is_extern_crate: bool, |
157 | pub(super) is_macro_use: bool, | ||
157 | } | 158 | } |
158 | 159 | ||
159 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | 160 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] |
@@ -293,8 +294,14 @@ impl RawItemsCollector { | |||
293 | let is_prelude = use_item.has_atom_attr("prelude_import"); | 294 | let is_prelude = use_item.has_atom_attr("prelude_import"); |
294 | 295 | ||
295 | Path::expand_use_item(&use_item, |path, use_tree, is_glob, alias| { | 296 | Path::expand_use_item(&use_item, |path, use_tree, is_glob, alias| { |
296 | let import_data = | 297 | let import_data = ImportData { |
297 | ImportData { path, alias, is_glob, is_prelude, is_extern_crate: false }; | 298 | path, |
299 | alias, | ||
300 | is_glob, | ||
301 | is_prelude, | ||
302 | is_extern_crate: false, | ||
303 | is_macro_use: false, | ||
304 | }; | ||
298 | self.push_import(current_module, import_data, Either::A(AstPtr::new(use_tree))); | 305 | self.push_import(current_module, import_data, Either::A(AstPtr::new(use_tree))); |
299 | }) | 306 | }) |
300 | } | 307 | } |
@@ -307,12 +314,14 @@ impl RawItemsCollector { | |||
307 | if let Some(name_ref) = extern_crate.name_ref() { | 314 | if let Some(name_ref) = extern_crate.name_ref() { |
308 | let path = Path::from_name_ref(&name_ref); | 315 | let path = Path::from_name_ref(&name_ref); |
309 | let alias = extern_crate.alias().and_then(|a| a.name()).map(|it| it.as_name()); | 316 | let alias = extern_crate.alias().and_then(|a| a.name()).map(|it| it.as_name()); |
317 | let is_macro_use = extern_crate.has_atom_attr("macro_use"); | ||
310 | let import_data = ImportData { | 318 | let import_data = ImportData { |
311 | path, | 319 | path, |
312 | alias, | 320 | alias, |
313 | is_glob: false, | 321 | is_glob: false, |
314 | is_prelude: false, | 322 | is_prelude: false, |
315 | is_extern_crate: true, | 323 | is_extern_crate: true, |
324 | is_macro_use, | ||
316 | }; | 325 | }; |
317 | self.push_import(current_module, import_data, Either::B(AstPtr::new(&extern_crate))); | 326 | self.push_import(current_module, import_data, Either::B(AstPtr::new(&extern_crate))); |
318 | } | 327 | } |
diff --git a/crates/ra_hir/src/nameres/tests.rs b/crates/ra_hir/src/nameres/tests.rs index c1dbad283..4ff897ca5 100644 --- a/crates/ra_hir/src/nameres/tests.rs +++ b/crates/ra_hir/src/nameres/tests.rs | |||
@@ -2,7 +2,7 @@ mod macros; | |||
2 | mod globs; | 2 | mod globs; |
3 | mod incremental; | 3 | mod incremental; |
4 | mod primitives; | 4 | mod primitives; |
5 | mod mods; | 5 | mod mod_resolution; |
6 | 6 | ||
7 | use std::sync::Arc; | 7 | use std::sync::Arc; |
8 | 8 | ||
diff --git a/crates/ra_hir/src/nameres/tests/macros.rs b/crates/ra_hir/src/nameres/tests/macros.rs index 631df2cef..aece1515b 100644 --- a/crates/ra_hir/src/nameres/tests/macros.rs +++ b/crates/ra_hir/src/nameres/tests/macros.rs | |||
@@ -99,14 +99,14 @@ fn macro_rules_from_other_crates_are_visible() { | |||
99 | fn unexpanded_macro_should_expand_by_fixedpoint_loop() { | 99 | fn unexpanded_macro_should_expand_by_fixedpoint_loop() { |
100 | let map = def_map_with_crate_graph( | 100 | let map = def_map_with_crate_graph( |
101 | " | 101 | " |
102 | //- /main.rs | 102 | //- /main.rs |
103 | macro_rules! baz { | 103 | macro_rules! baz { |
104 | () => { | 104 | () => { |
105 | use foo::bar; | 105 | use foo::bar; |
106 | } | 106 | } |
107 | } | 107 | } |
108 | 108 | ||
109 | foo!(); | 109 | foo!(); |
110 | bar!(); | 110 | bar!(); |
111 | baz!(); | 111 | baz!(); |
112 | 112 | ||
@@ -114,7 +114,7 @@ fn unexpanded_macro_should_expand_by_fixedpoint_loop() { | |||
114 | #[macro_export] | 114 | #[macro_export] |
115 | macro_rules! foo { | 115 | macro_rules! foo { |
116 | () => { | 116 | () => { |
117 | struct Foo { field: u32 } | 117 | struct Foo { field: u32 } |
118 | } | 118 | } |
119 | } | 119 | } |
120 | #[macro_export] | 120 | #[macro_export] |
@@ -137,3 +137,114 @@ fn unexpanded_macro_should_expand_by_fixedpoint_loop() { | |||
137 | â‹®foo: m | 137 | â‹®foo: m |
138 | "###); | 138 | "###); |
139 | } | 139 | } |
140 | |||
141 | #[test] | ||
142 | fn macro_rules_from_other_crates_are_visible_with_macro_use() { | ||
143 | covers!(macro_rules_from_other_crates_are_visible_with_macro_use); | ||
144 | let map = def_map_with_crate_graph( | ||
145 | " | ||
146 | //- /main.rs | ||
147 | structs!(Foo); | ||
148 | structs_priv!(Bar); | ||
149 | structs_not_exported!(MacroNotResolved1); | ||
150 | crate::structs!(MacroNotResolved2); | ||
151 | |||
152 | mod bar; | ||
153 | |||
154 | #[macro_use] | ||
155 | extern crate foo; | ||
156 | |||
157 | //- /bar.rs | ||
158 | structs!(Baz); | ||
159 | crate::structs!(MacroNotResolved3); | ||
160 | |||
161 | //- /lib.rs | ||
162 | #[macro_export] | ||
163 | macro_rules! structs { | ||
164 | ($i:ident) => { struct $i; } | ||
165 | } | ||
166 | |||
167 | macro_rules! structs_not_exported { | ||
168 | ($i:ident) => { struct $i; } | ||
169 | } | ||
170 | |||
171 | mod priv_mod { | ||
172 | #[macro_export] | ||
173 | macro_rules! structs_priv { | ||
174 | ($i:ident) => { struct $i; } | ||
175 | } | ||
176 | } | ||
177 | ", | ||
178 | crate_graph! { | ||
179 | "main": ("/main.rs", ["foo"]), | ||
180 | "foo": ("/lib.rs", []), | ||
181 | }, | ||
182 | ); | ||
183 | assert_snapshot!(map, @r###" | ||
184 | â‹®crate | ||
185 | â‹®Bar: t v | ||
186 | â‹®Foo: t v | ||
187 | â‹®bar: t | ||
188 | â‹®foo: t | ||
189 | â‹® | ||
190 | â‹®crate::bar | ||
191 | â‹®Baz: t v | ||
192 | "###); | ||
193 | } | ||
194 | |||
195 | #[test] | ||
196 | fn prelude_is_macro_use() { | ||
197 | covers!(prelude_is_macro_use); | ||
198 | let map = def_map_with_crate_graph( | ||
199 | " | ||
200 | //- /main.rs | ||
201 | structs!(Foo); | ||
202 | structs_priv!(Bar); | ||
203 | structs_outside!(Out); | ||
204 | crate::structs!(MacroNotResolved2); | ||
205 | |||
206 | mod bar; | ||
207 | |||
208 | //- /bar.rs | ||
209 | structs!(Baz); | ||
210 | crate::structs!(MacroNotResolved3); | ||
211 | |||
212 | //- /lib.rs | ||
213 | #[prelude_import] | ||
214 | use self::prelude::*; | ||
215 | |||
216 | mod prelude { | ||
217 | #[macro_export] | ||
218 | macro_rules! structs { | ||
219 | ($i:ident) => { struct $i; } | ||
220 | } | ||
221 | |||
222 | mod priv_mod { | ||
223 | #[macro_export] | ||
224 | macro_rules! structs_priv { | ||
225 | ($i:ident) => { struct $i; } | ||
226 | } | ||
227 | } | ||
228 | } | ||
229 | |||
230 | #[macro_export] | ||
231 | macro_rules! structs_outside { | ||
232 | ($i:ident) => { struct $i; } | ||
233 | } | ||
234 | ", | ||
235 | crate_graph! { | ||
236 | "main": ("/main.rs", ["foo"]), | ||
237 | "foo": ("/lib.rs", []), | ||
238 | }, | ||
239 | ); | ||
240 | assert_snapshot!(map, @r###" | ||
241 | â‹®crate | ||
242 | â‹®Bar: t v | ||
243 | â‹®Foo: t v | ||
244 | â‹®Out: t v | ||
245 | â‹®bar: t | ||
246 | â‹® | ||
247 | â‹®crate::bar | ||
248 | â‹®Baz: t v | ||
249 | "###); | ||
250 | } | ||
diff --git a/crates/ra_hir/src/nameres/tests/mods.rs b/crates/ra_hir/src/nameres/tests/mod_resolution.rs index 4f8398460..4f8398460 100644 --- a/crates/ra_hir/src/nameres/tests/mods.rs +++ b/crates/ra_hir/src/nameres/tests/mod_resolution.rs | |||
diff --git a/crates/ra_hir/src/path.rs b/crates/ra_hir/src/path.rs index 5ee71e421..24316fc91 100644 --- a/crates/ra_hir/src/path.rs +++ b/crates/ra_hir/src/path.rs | |||
@@ -31,7 +31,8 @@ pub struct GenericArgs { | |||
31 | /// Self type. Otherwise, when we have a path `Trait<X, Y>`, the Self type | 31 | /// Self type. Otherwise, when we have a path `Trait<X, Y>`, the Self type |
32 | /// is left out. | 32 | /// is left out. |
33 | pub has_self_type: bool, | 33 | pub has_self_type: bool, |
34 | // someday also bindings | 34 | /// Associated type bindings like in `Iterator<Item = T>`. |
35 | pub bindings: Vec<(Name, TypeRef)>, | ||
35 | } | 36 | } |
36 | 37 | ||
37 | /// A single generic argument. | 38 | /// A single generic argument. |
@@ -170,16 +171,24 @@ impl GenericArgs { | |||
170 | let type_ref = TypeRef::from_ast_opt(type_arg.type_ref()); | 171 | let type_ref = TypeRef::from_ast_opt(type_arg.type_ref()); |
171 | args.push(GenericArg::Type(type_ref)); | 172 | args.push(GenericArg::Type(type_ref)); |
172 | } | 173 | } |
173 | // lifetimes and assoc type args ignored for now | 174 | // lifetimes ignored for now |
174 | if !args.is_empty() { | 175 | let mut bindings = Vec::new(); |
175 | Some(GenericArgs { args, has_self_type: false }) | 176 | for assoc_type_arg in node.assoc_type_args() { |
176 | } else { | 177 | if let Some(name_ref) = assoc_type_arg.name_ref() { |
178 | let name = name_ref.as_name(); | ||
179 | let type_ref = TypeRef::from_ast_opt(assoc_type_arg.type_ref()); | ||
180 | bindings.push((name, type_ref)); | ||
181 | } | ||
182 | } | ||
183 | if args.is_empty() && bindings.is_empty() { | ||
177 | None | 184 | None |
185 | } else { | ||
186 | Some(GenericArgs { args, has_self_type: false, bindings }) | ||
178 | } | 187 | } |
179 | } | 188 | } |
180 | 189 | ||
181 | pub(crate) fn empty() -> GenericArgs { | 190 | pub(crate) fn empty() -> GenericArgs { |
182 | GenericArgs { args: Vec::new(), has_self_type: false } | 191 | GenericArgs { args: Vec::new(), has_self_type: false, bindings: Vec::new() } |
183 | } | 192 | } |
184 | } | 193 | } |
185 | 194 | ||
diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs index e5f4d11a6..fdbe5e8b0 100644 --- a/crates/ra_hir/src/source_binder.rs +++ b/crates/ra_hir/src/source_binder.rs | |||
@@ -228,7 +228,7 @@ impl SourceAnalyzer { | |||
228 | let scopes = db.expr_scopes(def); | 228 | let scopes = db.expr_scopes(def); |
229 | let scope = match offset { | 229 | let scope = match offset { |
230 | None => scope_for(&scopes, &source_map, &node), | 230 | None => scope_for(&scopes, &source_map, &node), |
231 | Some(offset) => scope_for_offset(&scopes, &source_map, offset), | 231 | Some(offset) => scope_for_offset(&scopes, &source_map, file_id.into(), offset), |
232 | }; | 232 | }; |
233 | let resolver = expr::resolver_for_scope(def.body(db), db, scope); | 233 | let resolver = expr::resolver_for_scope(def.body(db), db, scope); |
234 | SourceAnalyzer { | 234 | SourceAnalyzer { |
@@ -330,6 +330,7 @@ impl SourceAnalyzer { | |||
330 | .body_source_map | 330 | .body_source_map |
331 | .as_ref()? | 331 | .as_ref()? |
332 | .pat_syntax(it)? | 332 | .pat_syntax(it)? |
333 | .ast // FIXME: ignoring file_id here is definitelly wrong | ||
333 | .map_a(|ptr| ptr.cast::<ast::BindPat>().unwrap()); | 334 | .map_a(|ptr| ptr.cast::<ast::BindPat>().unwrap()); |
334 | PathResolution::LocalBinding(pat_ptr) | 335 | PathResolution::LocalBinding(pat_ptr) |
335 | } | 336 | } |
@@ -354,7 +355,7 @@ impl SourceAnalyzer { | |||
354 | ret.and_then(|entry| { | 355 | ret.and_then(|entry| { |
355 | Some(ScopeEntryWithSyntax { | 356 | Some(ScopeEntryWithSyntax { |
356 | name: entry.name().clone(), | 357 | name: entry.name().clone(), |
357 | ptr: source_map.pat_syntax(entry.pat())?, | 358 | ptr: source_map.pat_syntax(entry.pat())?.ast, |
358 | }) | 359 | }) |
359 | }) | 360 | }) |
360 | } | 361 | } |
@@ -470,20 +471,27 @@ fn scope_for( | |||
470 | fn scope_for_offset( | 471 | fn scope_for_offset( |
471 | scopes: &ExprScopes, | 472 | scopes: &ExprScopes, |
472 | source_map: &BodySourceMap, | 473 | source_map: &BodySourceMap, |
474 | file_id: HirFileId, | ||
473 | offset: TextUnit, | 475 | offset: TextUnit, |
474 | ) -> Option<ScopeId> { | 476 | ) -> Option<ScopeId> { |
475 | scopes | 477 | scopes |
476 | .scope_by_expr() | 478 | .scope_by_expr() |
477 | .iter() | 479 | .iter() |
478 | .filter_map(|(id, scope)| { | 480 | .filter_map(|(id, scope)| { |
479 | let ast_ptr = source_map.expr_syntax(*id)?.a()?; | 481 | let source = source_map.expr_syntax(*id)?; |
480 | Some((ast_ptr.syntax_node_ptr(), scope)) | 482 | // FIXME: correctly handle macro expansion |
483 | if source.file_id != file_id { | ||
484 | return None; | ||
485 | } | ||
486 | let syntax_node_ptr = | ||
487 | source.ast.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()); | ||
488 | Some((syntax_node_ptr, scope)) | ||
481 | }) | 489 | }) |
482 | // find containing scope | 490 | // find containing scope |
483 | .min_by_key(|(ptr, _scope)| { | 491 | .min_by_key(|(ptr, _scope)| { |
484 | (!(ptr.range().start() <= offset && offset <= ptr.range().end()), ptr.range().len()) | 492 | (!(ptr.range().start() <= offset && offset <= ptr.range().end()), ptr.range().len()) |
485 | }) | 493 | }) |
486 | .map(|(ptr, scope)| adjust(scopes, source_map, ptr, offset).unwrap_or(*scope)) | 494 | .map(|(ptr, scope)| adjust(scopes, source_map, ptr, file_id, offset).unwrap_or(*scope)) |
487 | } | 495 | } |
488 | 496 | ||
489 | // XXX: during completion, cursor might be outside of any particular | 497 | // XXX: during completion, cursor might be outside of any particular |
@@ -492,6 +500,7 @@ fn adjust( | |||
492 | scopes: &ExprScopes, | 500 | scopes: &ExprScopes, |
493 | source_map: &BodySourceMap, | 501 | source_map: &BodySourceMap, |
494 | ptr: SyntaxNodePtr, | 502 | ptr: SyntaxNodePtr, |
503 | file_id: HirFileId, | ||
495 | offset: TextUnit, | 504 | offset: TextUnit, |
496 | ) -> Option<ScopeId> { | 505 | ) -> Option<ScopeId> { |
497 | let r = ptr.range(); | 506 | let r = ptr.range(); |
@@ -499,8 +508,14 @@ fn adjust( | |||
499 | .scope_by_expr() | 508 | .scope_by_expr() |
500 | .iter() | 509 | .iter() |
501 | .filter_map(|(id, scope)| { | 510 | .filter_map(|(id, scope)| { |
502 | let ast_ptr = source_map.expr_syntax(*id)?.a()?; | 511 | let source = source_map.expr_syntax(*id)?; |
503 | Some((ast_ptr.syntax_node_ptr(), scope)) | 512 | // FIXME: correctly handle macro expansion |
513 | if source.file_id != file_id { | ||
514 | return None; | ||
515 | } | ||
516 | let syntax_node_ptr = | ||
517 | source.ast.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()); | ||
518 | Some((syntax_node_ptr, scope)) | ||
504 | }) | 519 | }) |
505 | .map(|(ptr, scope)| (ptr.range(), scope)) | 520 | .map(|(ptr, scope)| (ptr.range(), scope)) |
506 | .filter(|(range, _)| range.start() <= offset && range.is_subrange(&r) && *range != r); | 521 | .filter(|(range, _)| range.start() <= offset && range.is_subrange(&r) && *range != r); |
diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs index b54c80318..a3df08827 100644 --- a/crates/ra_hir/src/ty.rs +++ b/crates/ra_hir/src/ty.rs | |||
@@ -120,12 +120,44 @@ pub struct ProjectionTy { | |||
120 | pub parameters: Substs, | 120 | pub parameters: Substs, |
121 | } | 121 | } |
122 | 122 | ||
123 | impl ProjectionTy { | ||
124 | pub fn trait_ref(&self, db: &impl HirDatabase) -> TraitRef { | ||
125 | TraitRef { | ||
126 | trait_: self | ||
127 | .associated_ty | ||
128 | .parent_trait(db) | ||
129 | .expect("projection ty without parent trait"), | ||
130 | substs: self.parameters.clone(), | ||
131 | } | ||
132 | } | ||
133 | } | ||
134 | |||
135 | impl TypeWalk for ProjectionTy { | ||
136 | fn walk(&self, f: &mut impl FnMut(&Ty)) { | ||
137 | self.parameters.walk(f); | ||
138 | } | ||
139 | |||
140 | fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) { | ||
141 | self.parameters.walk_mut(f); | ||
142 | } | ||
143 | } | ||
144 | |||
123 | #[derive(Clone, PartialEq, Eq, Debug, Hash)] | 145 | #[derive(Clone, PartialEq, Eq, Debug, Hash)] |
124 | pub struct UnselectedProjectionTy { | 146 | pub struct UnselectedProjectionTy { |
125 | pub type_name: Name, | 147 | pub type_name: Name, |
126 | pub parameters: Substs, | 148 | pub parameters: Substs, |
127 | } | 149 | } |
128 | 150 | ||
151 | impl TypeWalk for UnselectedProjectionTy { | ||
152 | fn walk(&self, f: &mut impl FnMut(&Ty)) { | ||
153 | self.parameters.walk(f); | ||
154 | } | ||
155 | |||
156 | fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) { | ||
157 | self.parameters.walk_mut(f); | ||
158 | } | ||
159 | } | ||
160 | |||
129 | /// A type. | 161 | /// A type. |
130 | /// | 162 | /// |
131 | /// See also the `TyKind` enum in rustc (librustc/ty/sty.rs), which represents | 163 | /// See also the `TyKind` enum in rustc (librustc/ty/sty.rs), which represents |
@@ -282,20 +314,14 @@ impl TraitRef { | |||
282 | pub fn self_ty(&self) -> &Ty { | 314 | pub fn self_ty(&self) -> &Ty { |
283 | &self.substs[0] | 315 | &self.substs[0] |
284 | } | 316 | } |
317 | } | ||
285 | 318 | ||
286 | pub fn subst(mut self, substs: &Substs) -> TraitRef { | 319 | impl TypeWalk for TraitRef { |
287 | self.substs.walk_mut(&mut |ty_mut| { | 320 | fn walk(&self, f: &mut impl FnMut(&Ty)) { |
288 | let ty = mem::replace(ty_mut, Ty::Unknown); | ||
289 | *ty_mut = ty.subst(substs); | ||
290 | }); | ||
291 | self | ||
292 | } | ||
293 | |||
294 | pub fn walk(&self, f: &mut impl FnMut(&Ty)) { | ||
295 | self.substs.walk(f); | 321 | self.substs.walk(f); |
296 | } | 322 | } |
297 | 323 | ||
298 | pub fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) { | 324 | fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) { |
299 | self.substs.walk_mut(f); | 325 | self.substs.walk_mut(f); |
300 | } | 326 | } |
301 | } | 327 | } |
@@ -306,6 +332,8 @@ impl TraitRef { | |||
306 | pub enum GenericPredicate { | 332 | pub enum GenericPredicate { |
307 | /// The given trait needs to be implemented for its type parameters. | 333 | /// The given trait needs to be implemented for its type parameters. |
308 | Implemented(TraitRef), | 334 | Implemented(TraitRef), |
335 | /// An associated type bindings like in `Iterator<Item = T>`. | ||
336 | Projection(ProjectionPredicate), | ||
309 | /// We couldn't resolve the trait reference. (If some type parameters can't | 337 | /// We couldn't resolve the trait reference. (If some type parameters can't |
310 | /// be resolved, they will just be Unknown). | 338 | /// be resolved, they will just be Unknown). |
311 | Error, | 339 | Error, |
@@ -319,25 +347,35 @@ impl GenericPredicate { | |||
319 | } | 347 | } |
320 | } | 348 | } |
321 | 349 | ||
322 | pub fn subst(self, substs: &Substs) -> GenericPredicate { | 350 | pub fn is_implemented(&self) -> bool { |
323 | match self { | 351 | match self { |
324 | GenericPredicate::Implemented(trait_ref) => { | 352 | GenericPredicate::Implemented(_) => true, |
325 | GenericPredicate::Implemented(trait_ref.subst(substs)) | 353 | _ => false, |
326 | } | ||
327 | GenericPredicate::Error => self, | ||
328 | } | 354 | } |
329 | } | 355 | } |
330 | 356 | ||
331 | pub fn walk(&self, f: &mut impl FnMut(&Ty)) { | 357 | pub fn trait_ref(&self, db: &impl HirDatabase) -> Option<TraitRef> { |
358 | match self { | ||
359 | GenericPredicate::Implemented(tr) => Some(tr.clone()), | ||
360 | GenericPredicate::Projection(proj) => Some(proj.projection_ty.trait_ref(db)), | ||
361 | GenericPredicate::Error => None, | ||
362 | } | ||
363 | } | ||
364 | } | ||
365 | |||
366 | impl TypeWalk for GenericPredicate { | ||
367 | fn walk(&self, f: &mut impl FnMut(&Ty)) { | ||
332 | match self { | 368 | match self { |
333 | GenericPredicate::Implemented(trait_ref) => trait_ref.walk(f), | 369 | GenericPredicate::Implemented(trait_ref) => trait_ref.walk(f), |
370 | GenericPredicate::Projection(projection_pred) => projection_pred.walk(f), | ||
334 | GenericPredicate::Error => {} | 371 | GenericPredicate::Error => {} |
335 | } | 372 | } |
336 | } | 373 | } |
337 | 374 | ||
338 | pub fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) { | 375 | fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) { |
339 | match self { | 376 | match self { |
340 | GenericPredicate::Implemented(trait_ref) => trait_ref.walk_mut(f), | 377 | GenericPredicate::Implemented(trait_ref) => trait_ref.walk_mut(f), |
378 | GenericPredicate::Projection(projection_pred) => projection_pred.walk_mut(f), | ||
341 | GenericPredicate::Error => {} | 379 | GenericPredicate::Error => {} |
342 | } | 380 | } |
343 | } | 381 | } |
@@ -378,16 +416,16 @@ impl FnSig { | |||
378 | pub fn ret(&self) -> &Ty { | 416 | pub fn ret(&self) -> &Ty { |
379 | &self.params_and_return[self.params_and_return.len() - 1] | 417 | &self.params_and_return[self.params_and_return.len() - 1] |
380 | } | 418 | } |
419 | } | ||
381 | 420 | ||
382 | /// Applies the given substitutions to all types in this signature and | 421 | impl TypeWalk for FnSig { |
383 | /// returns the result. | 422 | fn walk(&self, f: &mut impl FnMut(&Ty)) { |
384 | pub fn subst(&self, substs: &Substs) -> FnSig { | 423 | for t in self.params_and_return.iter() { |
385 | let result: Vec<_> = | 424 | t.walk(f); |
386 | self.params_and_return.iter().map(|ty| ty.clone().subst(substs)).collect(); | 425 | } |
387 | FnSig { params_and_return: result.into() } | ||
388 | } | 426 | } |
389 | 427 | ||
390 | pub fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) { | 428 | fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) { |
391 | // Without an Arc::make_mut_slice, we can't avoid the clone here: | 429 | // Without an Arc::make_mut_slice, we can't avoid the clone here: |
392 | let mut v: Vec<_> = self.params_and_return.iter().cloned().collect(); | 430 | let mut v: Vec<_> = self.params_and_return.iter().cloned().collect(); |
393 | for t in &mut v { | 431 | for t in &mut v { |
@@ -411,64 +449,6 @@ impl Ty { | |||
411 | Ty::apply(TypeCtor::Tuple { cardinality: 0 }, Substs::empty()) | 449 | Ty::apply(TypeCtor::Tuple { cardinality: 0 }, Substs::empty()) |
412 | } | 450 | } |
413 | 451 | ||
414 | pub fn walk(&self, f: &mut impl FnMut(&Ty)) { | ||
415 | match self { | ||
416 | Ty::Apply(a_ty) => { | ||
417 | for t in a_ty.parameters.iter() { | ||
418 | t.walk(f); | ||
419 | } | ||
420 | } | ||
421 | Ty::Projection(p_ty) => { | ||
422 | for t in p_ty.parameters.iter() { | ||
423 | t.walk(f); | ||
424 | } | ||
425 | } | ||
426 | Ty::UnselectedProjection(p_ty) => { | ||
427 | for t in p_ty.parameters.iter() { | ||
428 | t.walk(f); | ||
429 | } | ||
430 | } | ||
431 | Ty::Dyn(predicates) | Ty::Opaque(predicates) => { | ||
432 | for p in predicates.iter() { | ||
433 | p.walk(f); | ||
434 | } | ||
435 | } | ||
436 | Ty::Param { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {} | ||
437 | } | ||
438 | f(self); | ||
439 | } | ||
440 | |||
441 | fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) { | ||
442 | match self { | ||
443 | Ty::Apply(a_ty) => { | ||
444 | a_ty.parameters.walk_mut(f); | ||
445 | } | ||
446 | Ty::Projection(p_ty) => { | ||
447 | p_ty.parameters.walk_mut(f); | ||
448 | } | ||
449 | Ty::UnselectedProjection(p_ty) => { | ||
450 | p_ty.parameters.walk_mut(f); | ||
451 | } | ||
452 | Ty::Dyn(predicates) | Ty::Opaque(predicates) => { | ||
453 | let mut v: Vec<_> = predicates.iter().cloned().collect(); | ||
454 | for p in &mut v { | ||
455 | p.walk_mut(f); | ||
456 | } | ||
457 | *predicates = v.into(); | ||
458 | } | ||
459 | Ty::Param { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {} | ||
460 | } | ||
461 | f(self); | ||
462 | } | ||
463 | |||
464 | fn fold(mut self, f: &mut impl FnMut(Ty) -> Ty) -> Ty { | ||
465 | self.walk_mut(&mut |ty_mut| { | ||
466 | let ty = mem::replace(ty_mut, Ty::Unknown); | ||
467 | *ty_mut = f(ty); | ||
468 | }); | ||
469 | self | ||
470 | } | ||
471 | |||
472 | pub fn as_reference(&self) -> Option<(&Ty, Mutability)> { | 452 | pub fn as_reference(&self) -> Option<(&Ty, Mutability)> { |
473 | match self { | 453 | match self { |
474 | Ty::Apply(ApplicationTy { ctor: TypeCtor::Ref(mutability), parameters }) => { | 454 | Ty::Apply(ApplicationTy { ctor: TypeCtor::Ref(mutability), parameters }) => { |
@@ -544,10 +524,53 @@ impl Ty { | |||
544 | } | 524 | } |
545 | } | 525 | } |
546 | 526 | ||
527 | /// Returns the type parameters of this type if it has some (i.e. is an ADT | ||
528 | /// or function); so if `self` is `Option<u32>`, this returns the `u32`. | ||
529 | pub fn substs(&self) -> Option<Substs> { | ||
530 | match self { | ||
531 | Ty::Apply(ApplicationTy { parameters, .. }) => Some(parameters.clone()), | ||
532 | _ => None, | ||
533 | } | ||
534 | } | ||
535 | |||
536 | /// If this is an `impl Trait` or `dyn Trait`, returns that trait. | ||
537 | pub fn inherent_trait(&self) -> Option<Trait> { | ||
538 | match self { | ||
539 | Ty::Dyn(predicates) | Ty::Opaque(predicates) => { | ||
540 | predicates.iter().find_map(|pred| match pred { | ||
541 | GenericPredicate::Implemented(tr) => Some(tr.trait_), | ||
542 | _ => None, | ||
543 | }) | ||
544 | } | ||
545 | _ => None, | ||
546 | } | ||
547 | } | ||
548 | } | ||
549 | |||
550 | /// This allows walking structures that contain types to do something with those | ||
551 | /// types, similar to Chalk's `Fold` trait. | ||
552 | pub trait TypeWalk { | ||
553 | fn walk(&self, f: &mut impl FnMut(&Ty)); | ||
554 | fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)); | ||
555 | |||
556 | fn fold(mut self, f: &mut impl FnMut(Ty) -> Ty) -> Self | ||
557 | where | ||
558 | Self: Sized, | ||
559 | { | ||
560 | self.walk_mut(&mut |ty_mut| { | ||
561 | let ty = mem::replace(ty_mut, Ty::Unknown); | ||
562 | *ty_mut = f(ty); | ||
563 | }); | ||
564 | self | ||
565 | } | ||
566 | |||
547 | /// Replaces type parameters in this type using the given `Substs`. (So e.g. | 567 | /// Replaces type parameters in this type using the given `Substs`. (So e.g. |
548 | /// if `self` is `&[T]`, where type parameter T has index 0, and the | 568 | /// if `self` is `&[T]`, where type parameter T has index 0, and the |
549 | /// `Substs` contain `u32` at index 0, we'll have `&[u32]` afterwards.) | 569 | /// `Substs` contain `u32` at index 0, we'll have `&[u32]` afterwards.) |
550 | pub fn subst(self, substs: &Substs) -> Ty { | 570 | fn subst(self, substs: &Substs) -> Self |
571 | where | ||
572 | Self: Sized, | ||
573 | { | ||
551 | self.fold(&mut |ty| match ty { | 574 | self.fold(&mut |ty| match ty { |
552 | Ty::Param { idx, name } => { | 575 | Ty::Param { idx, name } => { |
553 | substs.get(idx as usize).cloned().unwrap_or(Ty::Param { idx, name }) | 576 | substs.get(idx as usize).cloned().unwrap_or(Ty::Param { idx, name }) |
@@ -557,24 +580,21 @@ impl Ty { | |||
557 | } | 580 | } |
558 | 581 | ||
559 | /// Substitutes `Ty::Bound` vars (as opposed to type parameters). | 582 | /// Substitutes `Ty::Bound` vars (as opposed to type parameters). |
560 | pub fn subst_bound_vars(self, substs: &Substs) -> Ty { | 583 | fn subst_bound_vars(self, substs: &Substs) -> Self |
584 | where | ||
585 | Self: Sized, | ||
586 | { | ||
561 | self.fold(&mut |ty| match ty { | 587 | self.fold(&mut |ty| match ty { |
562 | Ty::Bound(idx) => substs.get(idx as usize).cloned().unwrap_or_else(|| Ty::Bound(idx)), | 588 | Ty::Bound(idx) => substs.get(idx as usize).cloned().unwrap_or_else(|| Ty::Bound(idx)), |
563 | ty => ty, | 589 | ty => ty, |
564 | }) | 590 | }) |
565 | } | 591 | } |
566 | 592 | ||
567 | /// Returns the type parameters of this type if it has some (i.e. is an ADT | ||
568 | /// or function); so if `self` is `Option<u32>`, this returns the `u32`. | ||
569 | pub fn substs(&self) -> Option<Substs> { | ||
570 | match self { | ||
571 | Ty::Apply(ApplicationTy { parameters, .. }) => Some(parameters.clone()), | ||
572 | _ => None, | ||
573 | } | ||
574 | } | ||
575 | |||
576 | /// Shifts up `Ty::Bound` vars by `n`. | 593 | /// Shifts up `Ty::Bound` vars by `n`. |
577 | pub fn shift_bound_vars(self, n: i32) -> Ty { | 594 | fn shift_bound_vars(self, n: i32) -> Self |
595 | where | ||
596 | Self: Sized, | ||
597 | { | ||
578 | self.fold(&mut |ty| match ty { | 598 | self.fold(&mut |ty| match ty { |
579 | Ty::Bound(idx) => { | 599 | Ty::Bound(idx) => { |
580 | assert!(idx as i32 >= -n); | 600 | assert!(idx as i32 >= -n); |
@@ -583,18 +603,57 @@ impl Ty { | |||
583 | ty => ty, | 603 | ty => ty, |
584 | }) | 604 | }) |
585 | } | 605 | } |
606 | } | ||
586 | 607 | ||
587 | /// If this is an `impl Trait` or `dyn Trait`, returns that trait. | 608 | impl TypeWalk for Ty { |
588 | pub fn inherent_trait(&self) -> Option<Trait> { | 609 | fn walk(&self, f: &mut impl FnMut(&Ty)) { |
610 | match self { | ||
611 | Ty::Apply(a_ty) => { | ||
612 | for t in a_ty.parameters.iter() { | ||
613 | t.walk(f); | ||
614 | } | ||
615 | } | ||
616 | Ty::Projection(p_ty) => { | ||
617 | for t in p_ty.parameters.iter() { | ||
618 | t.walk(f); | ||
619 | } | ||
620 | } | ||
621 | Ty::UnselectedProjection(p_ty) => { | ||
622 | for t in p_ty.parameters.iter() { | ||
623 | t.walk(f); | ||
624 | } | ||
625 | } | ||
626 | Ty::Dyn(predicates) | Ty::Opaque(predicates) => { | ||
627 | for p in predicates.iter() { | ||
628 | p.walk(f); | ||
629 | } | ||
630 | } | ||
631 | Ty::Param { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {} | ||
632 | } | ||
633 | f(self); | ||
634 | } | ||
635 | |||
636 | fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) { | ||
589 | match self { | 637 | match self { |
638 | Ty::Apply(a_ty) => { | ||
639 | a_ty.parameters.walk_mut(f); | ||
640 | } | ||
641 | Ty::Projection(p_ty) => { | ||
642 | p_ty.parameters.walk_mut(f); | ||
643 | } | ||
644 | Ty::UnselectedProjection(p_ty) => { | ||
645 | p_ty.parameters.walk_mut(f); | ||
646 | } | ||
590 | Ty::Dyn(predicates) | Ty::Opaque(predicates) => { | 647 | Ty::Dyn(predicates) | Ty::Opaque(predicates) => { |
591 | predicates.iter().find_map(|pred| match pred { | 648 | let mut v: Vec<_> = predicates.iter().cloned().collect(); |
592 | GenericPredicate::Implemented(tr) => Some(tr.trait_), | 649 | for p in &mut v { |
593 | _ => None, | 650 | p.walk_mut(f); |
594 | }) | 651 | } |
652 | *predicates = v.into(); | ||
595 | } | 653 | } |
596 | _ => None, | 654 | Ty::Param { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {} |
597 | } | 655 | } |
656 | f(self); | ||
598 | } | 657 | } |
599 | } | 658 | } |
600 | 659 | ||
@@ -742,20 +801,66 @@ impl HirDisplay for Ty { | |||
742 | Ty::Opaque(_) => write!(f, "impl ")?, | 801 | Ty::Opaque(_) => write!(f, "impl ")?, |
743 | _ => unreachable!(), | 802 | _ => unreachable!(), |
744 | }; | 803 | }; |
745 | // looping by hand here just to format the bounds in a slightly nicer way | 804 | // Note: This code is written to produce nice results (i.e. |
805 | // corresponding to surface Rust) for types that can occur in | ||
806 | // actual Rust. It will have weird results if the predicates | ||
807 | // aren't as expected (i.e. self types = $0, projection | ||
808 | // predicates for a certain trait come after the Implemented | ||
809 | // predicate for that trait). | ||
746 | let mut first = true; | 810 | let mut first = true; |
811 | let mut angle_open = false; | ||
747 | for p in predicates.iter() { | 812 | for p in predicates.iter() { |
748 | if !first { | ||
749 | write!(f, " + ")?; | ||
750 | } | ||
751 | first = false; | ||
752 | match p { | 813 | match p { |
753 | // don't show the $0 self type | ||
754 | GenericPredicate::Implemented(trait_ref) => { | 814 | GenericPredicate::Implemented(trait_ref) => { |
755 | trait_ref.hir_fmt_ext(f, false)? | 815 | if angle_open { |
816 | write!(f, ">")?; | ||
817 | } | ||
818 | if !first { | ||
819 | write!(f, " + ")?; | ||
820 | } | ||
821 | // We assume that the self type is $0 (i.e. the | ||
822 | // existential) here, which is the only thing that's | ||
823 | // possible in actual Rust, and hence don't print it | ||
824 | write!( | ||
825 | f, | ||
826 | "{}", | ||
827 | trait_ref.trait_.name(f.db).unwrap_or_else(Name::missing) | ||
828 | )?; | ||
829 | if trait_ref.substs.len() > 1 { | ||
830 | write!(f, "<")?; | ||
831 | f.write_joined(&trait_ref.substs[1..], ", ")?; | ||
832 | // there might be assoc type bindings, so we leave the angle brackets open | ||
833 | angle_open = true; | ||
834 | } | ||
835 | } | ||
836 | GenericPredicate::Projection(projection_pred) => { | ||
837 | // in types in actual Rust, these will always come | ||
838 | // after the corresponding Implemented predicate | ||
839 | if angle_open { | ||
840 | write!(f, ", ")?; | ||
841 | } else { | ||
842 | write!(f, "<")?; | ||
843 | angle_open = true; | ||
844 | } | ||
845 | let name = projection_pred.projection_ty.associated_ty.name(f.db); | ||
846 | write!(f, "{} = ", name)?; | ||
847 | projection_pred.ty.hir_fmt(f)?; | ||
848 | } | ||
849 | GenericPredicate::Error => { | ||
850 | if angle_open { | ||
851 | // impl Trait<X, {error}> | ||
852 | write!(f, ", ")?; | ||
853 | } else if !first { | ||
854 | // impl Trait + {error} | ||
855 | write!(f, " + ")?; | ||
856 | } | ||
857 | p.hir_fmt(f)?; | ||
756 | } | 858 | } |
757 | GenericPredicate::Error => p.hir_fmt(f)?, | ||
758 | } | 859 | } |
860 | first = false; | ||
861 | } | ||
862 | if angle_open { | ||
863 | write!(f, ">")?; | ||
759 | } | 864 | } |
760 | } | 865 | } |
761 | Ty::Unknown => write!(f, "{{unknown}}")?, | 866 | Ty::Unknown => write!(f, "{{unknown}}")?, |
@@ -766,13 +871,12 @@ impl HirDisplay for Ty { | |||
766 | } | 871 | } |
767 | 872 | ||
768 | impl TraitRef { | 873 | impl TraitRef { |
769 | fn hir_fmt_ext( | 874 | fn hir_fmt_ext(&self, f: &mut HirFormatter<impl HirDatabase>, use_as: bool) -> fmt::Result { |
770 | &self, | 875 | self.substs[0].hir_fmt(f)?; |
771 | f: &mut HirFormatter<impl HirDatabase>, | 876 | if use_as { |
772 | with_self_ty: bool, | 877 | write!(f, " as ")?; |
773 | ) -> fmt::Result { | 878 | } else { |
774 | if with_self_ty { | 879 | write!(f, ": ")?; |
775 | write!(f, "{}: ", self.substs[0].display(f.db),)?; | ||
776 | } | 880 | } |
777 | write!(f, "{}", self.trait_.name(f.db).unwrap_or_else(Name::missing))?; | 881 | write!(f, "{}", self.trait_.name(f.db).unwrap_or_else(Name::missing))?; |
778 | if self.substs.len() > 1 { | 882 | if self.substs.len() > 1 { |
@@ -786,7 +890,7 @@ impl TraitRef { | |||
786 | 890 | ||
787 | impl HirDisplay for TraitRef { | 891 | impl HirDisplay for TraitRef { |
788 | fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result { | 892 | fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result { |
789 | self.hir_fmt_ext(f, true) | 893 | self.hir_fmt_ext(f, false) |
790 | } | 894 | } |
791 | } | 895 | } |
792 | 896 | ||
@@ -800,6 +904,16 @@ impl HirDisplay for GenericPredicate { | |||
800 | fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result { | 904 | fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result { |
801 | match self { | 905 | match self { |
802 | GenericPredicate::Implemented(trait_ref) => trait_ref.hir_fmt(f)?, | 906 | GenericPredicate::Implemented(trait_ref) => trait_ref.hir_fmt(f)?, |
907 | GenericPredicate::Projection(projection_pred) => { | ||
908 | write!(f, "<")?; | ||
909 | projection_pred.projection_ty.trait_ref(f.db).hir_fmt_ext(f, true)?; | ||
910 | write!( | ||
911 | f, | ||
912 | ">::{} = {}", | ||
913 | projection_pred.projection_ty.associated_ty.name(f.db), | ||
914 | projection_pred.ty.display(f.db) | ||
915 | )?; | ||
916 | } | ||
803 | GenericPredicate::Error => write!(f, "{{error}}")?, | 917 | GenericPredicate::Error => write!(f, "{{error}}")?, |
804 | } | 918 | } |
805 | Ok(()) | 919 | Ok(()) |
diff --git a/crates/ra_hir/src/ty/autoderef.rs b/crates/ra_hir/src/ty/autoderef.rs index 2535d4ae7..08f52a53b 100644 --- a/crates/ra_hir/src/ty/autoderef.rs +++ b/crates/ra_hir/src/ty/autoderef.rs | |||
@@ -7,7 +7,7 @@ use std::iter::successors; | |||
7 | 7 | ||
8 | use log::{info, warn}; | 8 | use log::{info, warn}; |
9 | 9 | ||
10 | use super::{traits::Solution, Canonical, Ty}; | 10 | use super::{traits::Solution, Canonical, Ty, TypeWalk}; |
11 | use crate::{HasGenericParams, HirDatabase, Name, Resolver}; | 11 | use crate::{HasGenericParams, HirDatabase, Name, Resolver}; |
12 | 12 | ||
13 | const AUTODEREF_RECURSION_LIMIT: usize = 10; | 13 | const AUTODEREF_RECURSION_LIMIT: usize = 10; |
diff --git a/crates/ra_hir/src/ty/infer.rs b/crates/ra_hir/src/ty/infer.rs index b89a40b4b..ec3b7ffef 100644 --- a/crates/ra_hir/src/ty/infer.rs +++ b/crates/ra_hir/src/ty/infer.rs | |||
@@ -30,7 +30,7 @@ use super::{ | |||
30 | autoderef, lower, method_resolution, op, primitive, | 30 | autoderef, lower, method_resolution, op, primitive, |
31 | traits::{Guidance, Obligation, ProjectionPredicate, Solution}, | 31 | traits::{Guidance, Obligation, ProjectionPredicate, Solution}, |
32 | ApplicationTy, CallableDef, InEnvironment, ProjectionTy, Substs, TraitEnvironment, TraitRef, | 32 | ApplicationTy, CallableDef, InEnvironment, ProjectionTy, Substs, TraitEnvironment, TraitRef, |
33 | Ty, TypableDef, TypeCtor, | 33 | Ty, TypableDef, TypeCtor, TypeWalk, |
34 | }; | 34 | }; |
35 | use crate::{ | 35 | use crate::{ |
36 | adt::VariantDef, | 36 | adt::VariantDef, |
diff --git a/crates/ra_hir/src/ty/infer/unify.rs b/crates/ra_hir/src/ty/infer/unify.rs index e7e8825d1..9a0d2d8f9 100644 --- a/crates/ra_hir/src/ty/infer/unify.rs +++ b/crates/ra_hir/src/ty/infer/unify.rs | |||
@@ -3,7 +3,7 @@ | |||
3 | use super::{InferenceContext, Obligation}; | 3 | use super::{InferenceContext, Obligation}; |
4 | use crate::db::HirDatabase; | 4 | use crate::db::HirDatabase; |
5 | use crate::ty::{ | 5 | use crate::ty::{ |
6 | Canonical, InEnvironment, InferTy, ProjectionPredicate, ProjectionTy, TraitRef, Ty, | 6 | Canonical, InEnvironment, InferTy, ProjectionPredicate, ProjectionTy, TraitRef, Ty, TypeWalk, |
7 | }; | 7 | }; |
8 | 8 | ||
9 | impl<'a, D: HirDatabase> InferenceContext<'a, D> { | 9 | impl<'a, D: HirDatabase> InferenceContext<'a, D> { |
diff --git a/crates/ra_hir/src/ty/lower.rs b/crates/ra_hir/src/ty/lower.rs index 47d161277..f6f0137cf 100644 --- a/crates/ra_hir/src/ty/lower.rs +++ b/crates/ra_hir/src/ty/lower.rs | |||
@@ -8,7 +8,10 @@ | |||
8 | use std::iter; | 8 | use std::iter; |
9 | use std::sync::Arc; | 9 | use std::sync::Arc; |
10 | 10 | ||
11 | use super::{FnSig, GenericPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor}; | 11 | use super::{ |
12 | FnSig, GenericPredicate, ProjectionPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor, | ||
13 | TypeWalk, | ||
14 | }; | ||
12 | use crate::{ | 15 | use crate::{ |
13 | adt::VariantDef, | 16 | adt::VariantDef, |
14 | generics::HasGenericParams, | 17 | generics::HasGenericParams, |
@@ -62,7 +65,9 @@ impl Ty { | |||
62 | let self_ty = Ty::Bound(0); | 65 | let self_ty = Ty::Bound(0); |
63 | let predicates = bounds | 66 | let predicates = bounds |
64 | .iter() | 67 | .iter() |
65 | .map(|b| GenericPredicate::from_type_bound(db, resolver, b, self_ty.clone())) | 68 | .flat_map(|b| { |
69 | GenericPredicate::from_type_bound(db, resolver, b, self_ty.clone()) | ||
70 | }) | ||
66 | .collect::<Vec<_>>(); | 71 | .collect::<Vec<_>>(); |
67 | Ty::Dyn(predicates.into()) | 72 | Ty::Dyn(predicates.into()) |
68 | } | 73 | } |
@@ -70,7 +75,9 @@ impl Ty { | |||
70 | let self_ty = Ty::Bound(0); | 75 | let self_ty = Ty::Bound(0); |
71 | let predicates = bounds | 76 | let predicates = bounds |
72 | .iter() | 77 | .iter() |
73 | .map(|b| GenericPredicate::from_type_bound(db, resolver, b, self_ty.clone())) | 78 | .flat_map(|b| { |
79 | GenericPredicate::from_type_bound(db, resolver, b, self_ty.clone()) | ||
80 | }) | ||
74 | .collect::<Vec<_>>(); | 81 | .collect::<Vec<_>>(); |
75 | Ty::Opaque(predicates.into()) | 82 | Ty::Opaque(predicates.into()) |
76 | } | 83 | } |
@@ -326,15 +333,6 @@ impl TraitRef { | |||
326 | TraitRef { trait_, substs } | 333 | TraitRef { trait_, substs } |
327 | } | 334 | } |
328 | 335 | ||
329 | pub(crate) fn from_where_predicate( | ||
330 | db: &impl HirDatabase, | ||
331 | resolver: &Resolver, | ||
332 | pred: &WherePredicate, | ||
333 | ) -> Option<TraitRef> { | ||
334 | let self_ty = Ty::from_hir(db, resolver, &pred.type_ref); | ||
335 | TraitRef::from_type_bound(db, resolver, &pred.bound, self_ty) | ||
336 | } | ||
337 | |||
338 | pub(crate) fn from_type_bound( | 336 | pub(crate) fn from_type_bound( |
339 | db: &impl HirDatabase, | 337 | db: &impl HirDatabase, |
340 | resolver: &Resolver, | 338 | resolver: &Resolver, |
@@ -349,26 +347,58 @@ impl TraitRef { | |||
349 | } | 347 | } |
350 | 348 | ||
351 | impl GenericPredicate { | 349 | impl GenericPredicate { |
352 | pub(crate) fn from_where_predicate( | 350 | pub(crate) fn from_where_predicate<'a>( |
353 | db: &impl HirDatabase, | 351 | db: &'a impl HirDatabase, |
354 | resolver: &Resolver, | 352 | resolver: &'a Resolver, |
355 | where_predicate: &WherePredicate, | 353 | where_predicate: &'a WherePredicate, |
356 | ) -> GenericPredicate { | 354 | ) -> impl Iterator<Item = GenericPredicate> + 'a { |
357 | TraitRef::from_where_predicate(db, &resolver, where_predicate) | 355 | let self_ty = Ty::from_hir(db, resolver, &where_predicate.type_ref); |
358 | .map_or(GenericPredicate::Error, GenericPredicate::Implemented) | 356 | GenericPredicate::from_type_bound(db, resolver, &where_predicate.bound, self_ty) |
359 | } | 357 | } |
360 | 358 | ||
361 | pub(crate) fn from_type_bound( | 359 | pub(crate) fn from_type_bound<'a>( |
362 | db: &impl HirDatabase, | 360 | db: &'a impl HirDatabase, |
363 | resolver: &Resolver, | 361 | resolver: &'a Resolver, |
364 | bound: &TypeBound, | 362 | bound: &'a TypeBound, |
365 | self_ty: Ty, | 363 | self_ty: Ty, |
366 | ) -> GenericPredicate { | 364 | ) -> impl Iterator<Item = GenericPredicate> + 'a { |
367 | TraitRef::from_type_bound(db, &resolver, bound, self_ty) | 365 | let trait_ref = TraitRef::from_type_bound(db, &resolver, bound, self_ty); |
368 | .map_or(GenericPredicate::Error, GenericPredicate::Implemented) | 366 | iter::once(trait_ref.clone().map_or(GenericPredicate::Error, GenericPredicate::Implemented)) |
367 | .chain( | ||
368 | trait_ref.into_iter().flat_map(move |tr| { | ||
369 | assoc_type_bindings_from_type_bound(db, resolver, bound, tr) | ||
370 | }), | ||
371 | ) | ||
369 | } | 372 | } |
370 | } | 373 | } |
371 | 374 | ||
375 | fn assoc_type_bindings_from_type_bound<'a>( | ||
376 | db: &'a impl HirDatabase, | ||
377 | resolver: &'a Resolver, | ||
378 | bound: &'a TypeBound, | ||
379 | trait_ref: TraitRef, | ||
380 | ) -> impl Iterator<Item = GenericPredicate> + 'a { | ||
381 | let last_segment = match bound { | ||
382 | TypeBound::Path(path) => path.segments.last(), | ||
383 | TypeBound::Error => None, | ||
384 | }; | ||
385 | last_segment | ||
386 | .into_iter() | ||
387 | .flat_map(|segment| segment.args_and_bindings.iter()) | ||
388 | .flat_map(|args_and_bindings| args_and_bindings.bindings.iter()) | ||
389 | .map(move |(name, type_ref)| { | ||
390 | let associated_ty = match trait_ref.trait_.associated_type_by_name(db, name.clone()) { | ||
391 | None => return GenericPredicate::Error, | ||
392 | Some(t) => t, | ||
393 | }; | ||
394 | let projection_ty = | ||
395 | ProjectionTy { associated_ty, parameters: trait_ref.substs.clone() }; | ||
396 | let ty = Ty::from_hir(db, resolver, type_ref); | ||
397 | let projection_predicate = ProjectionPredicate { projection_ty, ty }; | ||
398 | GenericPredicate::Projection(projection_predicate) | ||
399 | }) | ||
400 | } | ||
401 | |||
372 | /// Build the declared type of an item. This depends on the namespace; e.g. for | 402 | /// Build the declared type of an item. This depends on the namespace; e.g. for |
373 | /// `struct Foo(usize)`, we have two types: The type of the struct itself, and | 403 | /// `struct Foo(usize)`, we have two types: The type of the struct itself, and |
374 | /// the constructor function `(usize) -> Foo` which lives in the values | 404 | /// the constructor function `(usize) -> Foo` which lives in the values |
@@ -425,7 +455,7 @@ pub(crate) fn trait_env( | |||
425 | ) -> Arc<super::TraitEnvironment> { | 455 | ) -> Arc<super::TraitEnvironment> { |
426 | let predicates = resolver | 456 | let predicates = resolver |
427 | .where_predicates_in_scope() | 457 | .where_predicates_in_scope() |
428 | .map(|pred| GenericPredicate::from_where_predicate(db, &resolver, pred)) | 458 | .flat_map(|pred| GenericPredicate::from_where_predicate(db, &resolver, pred)) |
429 | .collect::<Vec<_>>(); | 459 | .collect::<Vec<_>>(); |
430 | 460 | ||
431 | Arc::new(super::TraitEnvironment { predicates }) | 461 | Arc::new(super::TraitEnvironment { predicates }) |
@@ -439,7 +469,7 @@ pub(crate) fn generic_predicates_query( | |||
439 | let resolver = def.resolver(db); | 469 | let resolver = def.resolver(db); |
440 | let predicates = resolver | 470 | let predicates = resolver |
441 | .where_predicates_in_scope() | 471 | .where_predicates_in_scope() |
442 | .map(|pred| GenericPredicate::from_where_predicate(db, &resolver, pred)) | 472 | .flat_map(|pred| GenericPredicate::from_where_predicate(db, &resolver, pred)) |
443 | .collect::<Vec<_>>(); | 473 | .collect::<Vec<_>>(); |
444 | predicates.into() | 474 | predicates.into() |
445 | } | 475 | } |
diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs index d344ab12e..d92d4659b 100644 --- a/crates/ra_hir/src/ty/tests.rs +++ b/crates/ra_hir/src/ty/tests.rs | |||
@@ -2793,6 +2793,10 @@ fn main() { | |||
2793 | } | 2793 | } |
2794 | "#), | 2794 | "#), |
2795 | @r###" | 2795 | @r###" |
2796 | ![0; 17) '{Foo(v...,2,])}': Foo | ||
2797 | ![1; 4) 'Foo': Foo({unknown}) -> Foo | ||
2798 | ![1; 16) 'Foo(vec![1,2,])': Foo | ||
2799 | ![5; 15) 'vec![1,2,]': {unknown} | ||
2796 | [156; 182) '{ ...,2); }': () | 2800 | [156; 182) '{ ...,2); }': () |
2797 | [166; 167) 'x': Foo | 2801 | [166; 167) 'x': Foo |
2798 | "### | 2802 | "### |
@@ -3548,6 +3552,97 @@ fn test() { | |||
3548 | ); | 3552 | ); |
3549 | } | 3553 | } |
3550 | 3554 | ||
3555 | #[test] | ||
3556 | fn assoc_type_bindings() { | ||
3557 | assert_snapshot!( | ||
3558 | infer(r#" | ||
3559 | trait Trait { | ||
3560 | type Type; | ||
3561 | } | ||
3562 | |||
3563 | fn get<T: Trait>(t: T) -> <T as Trait>::Type {} | ||
3564 | fn get2<U, T: Trait<Type = U>>(t: T) -> U {} | ||
3565 | fn set<T: Trait<Type = u64>>(t: T) -> T {t} | ||
3566 | |||
3567 | struct S<T>; | ||
3568 | impl<T> Trait for S<T> { type Type = T; } | ||
3569 | |||
3570 | fn test<T: Trait<Type = u32>>(x: T, y: impl Trait<Type = i64>) { | ||
3571 | get(x); | ||
3572 | get2(x); | ||
3573 | get(y); | ||
3574 | get2(y); | ||
3575 | get(set(S)); | ||
3576 | get2(set(S)); | ||
3577 | get2(S::<str>); | ||
3578 | } | ||
3579 | "#), | ||
3580 | @r###" | ||
3581 | [50; 51) 't': T | ||
3582 | [78; 80) '{}': () | ||
3583 | [112; 113) 't': T | ||
3584 | [123; 125) '{}': () | ||
3585 | [155; 156) 't': T | ||
3586 | [166; 169) '{t}': T | ||
3587 | [167; 168) 't': T | ||
3588 | [257; 258) 'x': T | ||
3589 | [263; 264) 'y': impl Trait<Type = i64> | ||
3590 | [290; 398) '{ ...r>); }': () | ||
3591 | [296; 299) 'get': fn get<T>(T) -> <T as Trait>::Type | ||
3592 | [296; 302) 'get(x)': {unknown} | ||
3593 | [300; 301) 'x': T | ||
3594 | [308; 312) 'get2': fn get2<{unknown}, S<{unknown}>>(T) -> U | ||
3595 | [308; 315) 'get2(x)': {unknown} | ||
3596 | [313; 314) 'x': T | ||
3597 | [321; 324) 'get': fn get<impl Trait<Type = i64>>(T) -> <T as Trait>::Type | ||
3598 | [321; 327) 'get(y)': {unknown} | ||
3599 | [325; 326) 'y': impl Trait<Type = i64> | ||
3600 | [333; 337) 'get2': fn get2<{unknown}, S<{unknown}>>(T) -> U | ||
3601 | [333; 340) 'get2(y)': {unknown} | ||
3602 | [338; 339) 'y': impl Trait<Type = i64> | ||
3603 | [346; 349) 'get': fn get<S<u64>>(T) -> <T as Trait>::Type | ||
3604 | [346; 357) 'get(set(S))': u64 | ||
3605 | [350; 353) 'set': fn set<S<u64>>(T) -> T | ||
3606 | [350; 356) 'set(S)': S<u64> | ||
3607 | [354; 355) 'S': S<u64> | ||
3608 | [363; 367) 'get2': fn get2<u64, S<u64>>(T) -> U | ||
3609 | [363; 375) 'get2(set(S))': u64 | ||
3610 | [368; 371) 'set': fn set<S<u64>>(T) -> T | ||
3611 | [368; 374) 'set(S)': S<u64> | ||
3612 | [372; 373) 'S': S<u64> | ||
3613 | [381; 385) 'get2': fn get2<str, S<str>>(T) -> U | ||
3614 | [381; 395) 'get2(S::<str>)': str | ||
3615 | [386; 394) 'S::<str>': S<str> | ||
3616 | "### | ||
3617 | ); | ||
3618 | } | ||
3619 | |||
3620 | #[test] | ||
3621 | fn projection_eq_within_chalk() { | ||
3622 | // std::env::set_var("CHALK_DEBUG", "1"); | ||
3623 | assert_snapshot!( | ||
3624 | infer(r#" | ||
3625 | trait Trait1 { | ||
3626 | type Type; | ||
3627 | } | ||
3628 | trait Trait2<T> { | ||
3629 | fn foo(self) -> T; | ||
3630 | } | ||
3631 | impl<T, U> Trait2<T> for U where U: Trait1<Type = T> {} | ||
3632 | |||
3633 | fn test<T: Trait1<Type = u32>>(x: T) { | ||
3634 | x.foo(); | ||
3635 | } | ||
3636 | "#), | ||
3637 | @r###" | ||
3638 | [62; 66) 'self': Self | ||
3639 | [164; 165) 'x': T | ||
3640 | [170; 186) '{ ...o(); }': () | ||
3641 | [176; 177) 'x': T | ||
3642 | [176; 183) 'x.foo()': {unknown} | ||
3643 | "### | ||
3644 | ); | ||
3645 | } | ||
3551 | fn type_at_pos(db: &MockDatabase, pos: FilePosition) -> String { | 3646 | fn type_at_pos(db: &MockDatabase, pos: FilePosition) -> String { |
3552 | let file = db.parse(pos.file_id).ok().unwrap(); | 3647 | let file = db.parse(pos.file_id).ok().unwrap(); |
3553 | let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap(); | 3648 | let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap(); |
@@ -3566,7 +3661,6 @@ fn infer(content: &str) -> String { | |||
3566 | let source_file = db.parse(file_id).ok().unwrap(); | 3661 | let source_file = db.parse(file_id).ok().unwrap(); |
3567 | 3662 | ||
3568 | let mut acc = String::new(); | 3663 | let mut acc = String::new(); |
3569 | // acc.push_str("\n"); | ||
3570 | 3664 | ||
3571 | let mut infer_def = |inference_result: Arc<InferenceResult>, | 3665 | let mut infer_def = |inference_result: Arc<InferenceResult>, |
3572 | body_source_map: Arc<BodySourceMap>| { | 3666 | body_source_map: Arc<BodySourceMap>| { |
@@ -3574,7 +3668,9 @@ fn infer(content: &str) -> String { | |||
3574 | 3668 | ||
3575 | for (pat, ty) in inference_result.type_of_pat.iter() { | 3669 | for (pat, ty) in inference_result.type_of_pat.iter() { |
3576 | let syntax_ptr = match body_source_map.pat_syntax(pat) { | 3670 | let syntax_ptr = match body_source_map.pat_syntax(pat) { |
3577 | Some(sp) => sp.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()), | 3671 | Some(sp) => { |
3672 | sp.map(|ast| ast.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr())) | ||
3673 | } | ||
3578 | None => continue, | 3674 | None => continue, |
3579 | }; | 3675 | }; |
3580 | types.push((syntax_ptr, ty)); | 3676 | types.push((syntax_ptr, ty)); |
@@ -3582,22 +3678,34 @@ fn infer(content: &str) -> String { | |||
3582 | 3678 | ||
3583 | for (expr, ty) in inference_result.type_of_expr.iter() { | 3679 | for (expr, ty) in inference_result.type_of_expr.iter() { |
3584 | let syntax_ptr = match body_source_map.expr_syntax(expr) { | 3680 | let syntax_ptr = match body_source_map.expr_syntax(expr) { |
3585 | Some(sp) => sp.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()), | 3681 | Some(sp) => { |
3682 | sp.map(|ast| ast.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr())) | ||
3683 | } | ||
3586 | None => continue, | 3684 | None => continue, |
3587 | }; | 3685 | }; |
3588 | types.push((syntax_ptr, ty)); | 3686 | types.push((syntax_ptr, ty)); |
3589 | } | 3687 | } |
3590 | 3688 | ||
3591 | // sort ranges for consistency | 3689 | // sort ranges for consistency |
3592 | types.sort_by_key(|(ptr, _)| (ptr.range().start(), ptr.range().end())); | 3690 | types.sort_by_key(|(src_ptr, _)| (src_ptr.ast.range().start(), src_ptr.ast.range().end())); |
3593 | for (syntax_ptr, ty) in &types { | 3691 | for (src_ptr, ty) in &types { |
3594 | let node = syntax_ptr.to_node(source_file.syntax()); | 3692 | let node = src_ptr.ast.to_node(&src_ptr.file_syntax(&db)); |
3693 | |||
3595 | let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.clone()) { | 3694 | let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.clone()) { |
3596 | (self_param.self_kw_token().text_range(), "self".to_string()) | 3695 | (self_param.self_kw_token().text_range(), "self".to_string()) |
3597 | } else { | 3696 | } else { |
3598 | (syntax_ptr.range(), node.text().to_string().replace("\n", " ")) | 3697 | (src_ptr.ast.range(), node.text().to_string().replace("\n", " ")) |
3599 | }; | 3698 | }; |
3600 | write!(acc, "{} '{}': {}\n", range, ellipsize(text, 15), ty.display(&db)).unwrap(); | 3699 | let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" }; |
3700 | write!( | ||
3701 | acc, | ||
3702 | "{}{} '{}': {}\n", | ||
3703 | macro_prefix, | ||
3704 | range, | ||
3705 | ellipsize(text, 15), | ||
3706 | ty.display(&db) | ||
3707 | ) | ||
3708 | .unwrap(); | ||
3601 | } | 3709 | } |
3602 | }; | 3710 | }; |
3603 | 3711 | ||
diff --git a/crates/ra_hir/src/ty/traits.rs b/crates/ra_hir/src/ty/traits.rs index b634f0b79..6e0271a96 100644 --- a/crates/ra_hir/src/ty/traits.rs +++ b/crates/ra_hir/src/ty/traits.rs | |||
@@ -8,7 +8,7 @@ use ra_db::salsa; | |||
8 | use ra_prof::profile; | 8 | use ra_prof::profile; |
9 | use rustc_hash::FxHashSet; | 9 | use rustc_hash::FxHashSet; |
10 | 10 | ||
11 | use super::{Canonical, GenericPredicate, HirDisplay, ProjectionTy, TraitRef, Ty}; | 11 | use super::{Canonical, GenericPredicate, HirDisplay, ProjectionTy, TraitRef, Ty, TypeWalk}; |
12 | use crate::{db::HirDatabase, Crate, ImplBlock, Trait}; | 12 | use crate::{db::HirDatabase, Crate, ImplBlock, Trait}; |
13 | 13 | ||
14 | use self::chalk::{from_chalk, ToChalk}; | 14 | use self::chalk::{from_chalk, ToChalk}; |
@@ -124,6 +124,9 @@ impl Obligation { | |||
124 | pub fn from_predicate(predicate: GenericPredicate) -> Option<Obligation> { | 124 | pub fn from_predicate(predicate: GenericPredicate) -> Option<Obligation> { |
125 | match predicate { | 125 | match predicate { |
126 | GenericPredicate::Implemented(trait_ref) => Some(Obligation::Trait(trait_ref)), | 126 | GenericPredicate::Implemented(trait_ref) => Some(Obligation::Trait(trait_ref)), |
127 | GenericPredicate::Projection(projection_pred) => { | ||
128 | Some(Obligation::Projection(projection_pred)) | ||
129 | } | ||
127 | GenericPredicate::Error => None, | 130 | GenericPredicate::Error => None, |
128 | } | 131 | } |
129 | } | 132 | } |
@@ -135,6 +138,18 @@ pub struct ProjectionPredicate { | |||
135 | pub ty: Ty, | 138 | pub ty: Ty, |
136 | } | 139 | } |
137 | 140 | ||
141 | impl TypeWalk for ProjectionPredicate { | ||
142 | fn walk(&self, f: &mut impl FnMut(&Ty)) { | ||
143 | self.projection_ty.walk(f); | ||
144 | self.ty.walk(f); | ||
145 | } | ||
146 | |||
147 | fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) { | ||
148 | self.projection_ty.walk_mut(f); | ||
149 | self.ty.walk_mut(f); | ||
150 | } | ||
151 | } | ||
152 | |||
138 | /// Solve a trait goal using Chalk. | 153 | /// Solve a trait goal using Chalk. |
139 | pub(crate) fn trait_solve_query( | 154 | pub(crate) fn trait_solve_query( |
140 | db: &impl HirDatabase, | 155 | db: &impl HirDatabase, |
diff --git a/crates/ra_hir/src/ty/traits/chalk.rs b/crates/ra_hir/src/ty/traits/chalk.rs index 2ebc06135..c201c5e50 100644 --- a/crates/ra_hir/src/ty/traits/chalk.rs +++ b/crates/ra_hir/src/ty/traits/chalk.rs | |||
@@ -19,6 +19,7 @@ use crate::{ | |||
19 | ty::display::HirDisplay, | 19 | ty::display::HirDisplay, |
20 | ty::{ | 20 | ty::{ |
21 | ApplicationTy, CallableDef, GenericPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor, | 21 | ApplicationTy, CallableDef, GenericPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor, |
22 | TypeWalk, | ||
22 | }, | 23 | }, |
23 | Crate, HasGenericParams, ImplBlock, ImplItem, Trait, TypeAlias, | 24 | Crate, HasGenericParams, ImplBlock, ImplItem, Trait, TypeAlias, |
24 | }; | 25 | }; |
@@ -211,6 +212,13 @@ impl ToChalk for GenericPredicate { | |||
211 | GenericPredicate::Implemented(trait_ref) => { | 212 | GenericPredicate::Implemented(trait_ref) => { |
212 | make_binders(chalk_ir::WhereClause::Implemented(trait_ref.to_chalk(db)), 0) | 213 | make_binders(chalk_ir::WhereClause::Implemented(trait_ref.to_chalk(db)), 0) |
213 | } | 214 | } |
215 | GenericPredicate::Projection(projection_pred) => make_binders( | ||
216 | chalk_ir::WhereClause::ProjectionEq(chalk_ir::ProjectionEq { | ||
217 | projection: projection_pred.projection_ty.to_chalk(db), | ||
218 | ty: projection_pred.ty.to_chalk(db), | ||
219 | }), | ||
220 | 0, | ||
221 | ), | ||
214 | GenericPredicate::Error => { | 222 | GenericPredicate::Error => { |
215 | let impossible_trait_ref = chalk_ir::TraitRef { | 223 | let impossible_trait_ref = chalk_ir::TraitRef { |
216 | trait_id: UNKNOWN_TRAIT, | 224 | trait_id: UNKNOWN_TRAIT, |
diff --git a/crates/ra_ide_api/src/change.rs b/crates/ra_ide_api/src/change.rs index 0234c572d..89631935a 100644 --- a/crates/ra_ide_api/src/change.rs +++ b/crates/ra_ide_api/src/change.rs | |||
@@ -213,11 +213,11 @@ impl RootDatabase { | |||
213 | durability, | 213 | durability, |
214 | ); | 214 | ); |
215 | self.set_file_source_root_with_durability(add_file.file_id, root_id, durability); | 215 | self.set_file_source_root_with_durability(add_file.file_id, root_id, durability); |
216 | source_root.files.insert(add_file.path, add_file.file_id); | 216 | source_root.insert_file(add_file.path, add_file.file_id); |
217 | } | 217 | } |
218 | for remove_file in root_change.removed { | 218 | for remove_file in root_change.removed { |
219 | self.set_file_text_with_durability(remove_file.file_id, Default::default(), durability); | 219 | self.set_file_text_with_durability(remove_file.file_id, Default::default(), durability); |
220 | source_root.files.remove(&remove_file.path); | 220 | source_root.remove_file(&remove_file.path); |
221 | } | 221 | } |
222 | self.set_source_root_with_durability(root_id, Arc::new(source_root), durability); | 222 | self.set_source_root_with_durability(root_id, Arc::new(source_root), durability); |
223 | } | 223 | } |
diff --git a/crates/ra_ide_api/src/completion/presentation.rs b/crates/ra_ide_api/src/completion/presentation.rs index f19eec9b7..db7e8348e 100644 --- a/crates/ra_ide_api/src/completion/presentation.rs +++ b/crates/ra_ide_api/src/completion/presentation.rs | |||
@@ -1,5 +1,5 @@ | |||
1 | //! This modules takes care of rendering various defenitions as completion items. | 1 | //! This modules takes care of rendering various defenitions as completion items. |
2 | use hir::{Docs, HasSource, HirDisplay, PerNs, Resolution, Ty}; | 2 | use hir::{Docs, HasSource, HirDisplay, PerNs, Resolution, Ty, TypeWalk}; |
3 | use join_to_string::join; | 3 | use join_to_string::join; |
4 | use ra_syntax::ast::NameOwner; | 4 | use ra_syntax::ast::NameOwner; |
5 | use test_utils::tested_by; | 5 | use test_utils::tested_by; |
diff --git a/crates/ra_ide_api/src/lib.rs b/crates/ra_ide_api/src/lib.rs index 514dcaf96..e90fbd428 100644 --- a/crates/ra_ide_api/src/lib.rs +++ b/crates/ra_ide_api/src/lib.rs | |||
@@ -466,7 +466,7 @@ impl Analysis { | |||
466 | &self, | 466 | &self, |
467 | position: FilePosition, | 467 | position: FilePosition, |
468 | ) -> Cancelable<Option<ReferenceSearchResult>> { | 468 | ) -> Cancelable<Option<ReferenceSearchResult>> { |
469 | self.with_db(|db| references::find_all_refs(db, position)) | 469 | self.with_db(|db| references::find_all_refs(db, position).map(|it| it.info)) |
470 | } | 470 | } |
471 | 471 | ||
472 | /// Returns a short text describing element at position. | 472 | /// Returns a short text describing element at position. |
@@ -536,7 +536,7 @@ impl Analysis { | |||
536 | &self, | 536 | &self, |
537 | position: FilePosition, | 537 | position: FilePosition, |
538 | new_name: &str, | 538 | new_name: &str, |
539 | ) -> Cancelable<Option<SourceChange>> { | 539 | ) -> Cancelable<Option<RangeInfo<SourceChange>>> { |
540 | self.with_db(|db| references::rename(db, position, new_name)) | 540 | self.with_db(|db| references::rename(db, position, new_name)) |
541 | } | 541 | } |
542 | 542 | ||
diff --git a/crates/ra_ide_api/src/references.rs b/crates/ra_ide_api/src/references.rs index d8a00067f..5f1f0efc3 100644 --- a/crates/ra_ide_api/src/references.rs +++ b/crates/ra_ide_api/src/references.rs | |||
@@ -4,7 +4,7 @@ use ra_syntax::{algo::find_node_at_offset, ast, AstNode, SourceFile, SyntaxNode} | |||
4 | use relative_path::{RelativePath, RelativePathBuf}; | 4 | use relative_path::{RelativePath, RelativePathBuf}; |
5 | 5 | ||
6 | use crate::{ | 6 | use crate::{ |
7 | db::RootDatabase, FileId, FilePosition, FileRange, FileSystemEdit, NavigationTarget, | 7 | db::RootDatabase, FileId, FilePosition, FileRange, FileSystemEdit, NavigationTarget, RangeInfo, |
8 | SourceChange, SourceFileEdit, TextRange, | 8 | SourceChange, SourceFileEdit, TextRange, |
9 | }; | 9 | }; |
10 | 10 | ||
@@ -48,9 +48,9 @@ impl IntoIterator for ReferenceSearchResult { | |||
48 | pub(crate) fn find_all_refs( | 48 | pub(crate) fn find_all_refs( |
49 | db: &RootDatabase, | 49 | db: &RootDatabase, |
50 | position: FilePosition, | 50 | position: FilePosition, |
51 | ) -> Option<ReferenceSearchResult> { | 51 | ) -> Option<RangeInfo<ReferenceSearchResult>> { |
52 | let parse = db.parse(position.file_id); | 52 | let parse = db.parse(position.file_id); |
53 | let (binding, analyzer) = find_binding(db, &parse.tree(), position)?; | 53 | let RangeInfo { range, info: (binding, analyzer) } = find_binding(db, &parse.tree(), position)?; |
54 | let declaration = NavigationTarget::from_bind_pat(position.file_id, &binding); | 54 | let declaration = NavigationTarget::from_bind_pat(position.file_id, &binding); |
55 | 55 | ||
56 | let references = analyzer | 56 | let references = analyzer |
@@ -59,24 +59,26 @@ pub(crate) fn find_all_refs( | |||
59 | .map(move |ref_desc| FileRange { file_id: position.file_id, range: ref_desc.range }) | 59 | .map(move |ref_desc| FileRange { file_id: position.file_id, range: ref_desc.range }) |
60 | .collect::<Vec<_>>(); | 60 | .collect::<Vec<_>>(); |
61 | 61 | ||
62 | return Some(ReferenceSearchResult { declaration, references }); | 62 | return Some(RangeInfo::new(range, ReferenceSearchResult { declaration, references })); |
63 | 63 | ||
64 | fn find_binding<'a>( | 64 | fn find_binding<'a>( |
65 | db: &RootDatabase, | 65 | db: &RootDatabase, |
66 | source_file: &SourceFile, | 66 | source_file: &SourceFile, |
67 | position: FilePosition, | 67 | position: FilePosition, |
68 | ) -> Option<(ast::BindPat, hir::SourceAnalyzer)> { | 68 | ) -> Option<RangeInfo<(ast::BindPat, hir::SourceAnalyzer)>> { |
69 | let syntax = source_file.syntax(); | 69 | let syntax = source_file.syntax(); |
70 | if let Some(binding) = find_node_at_offset::<ast::BindPat>(syntax, position.offset) { | 70 | if let Some(binding) = find_node_at_offset::<ast::BindPat>(syntax, position.offset) { |
71 | let range = binding.syntax().text_range(); | ||
71 | let analyzer = hir::SourceAnalyzer::new(db, position.file_id, binding.syntax(), None); | 72 | let analyzer = hir::SourceAnalyzer::new(db, position.file_id, binding.syntax(), None); |
72 | return Some((binding, analyzer)); | 73 | return Some(RangeInfo::new(range, (binding, analyzer))); |
73 | }; | 74 | }; |
74 | let name_ref = find_node_at_offset::<ast::NameRef>(syntax, position.offset)?; | 75 | let name_ref = find_node_at_offset::<ast::NameRef>(syntax, position.offset)?; |
76 | let range = name_ref.syntax().text_range(); | ||
75 | let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None); | 77 | let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None); |
76 | let resolved = analyzer.resolve_local_name(&name_ref)?; | 78 | let resolved = analyzer.resolve_local_name(&name_ref)?; |
77 | if let Either::A(ptr) = resolved.ptr() { | 79 | if let Either::A(ptr) = resolved.ptr() { |
78 | if let ast::Pat::BindPat(binding) = ptr.to_node(source_file.syntax()) { | 80 | if let ast::Pat::BindPat(binding) = ptr.to_node(source_file.syntax()) { |
79 | return Some((binding, analyzer)); | 81 | return Some(RangeInfo::new(range, (binding, analyzer))); |
80 | } | 82 | } |
81 | } | 83 | } |
82 | None | 84 | None |
@@ -87,12 +89,14 @@ pub(crate) fn rename( | |||
87 | db: &RootDatabase, | 89 | db: &RootDatabase, |
88 | position: FilePosition, | 90 | position: FilePosition, |
89 | new_name: &str, | 91 | new_name: &str, |
90 | ) -> Option<SourceChange> { | 92 | ) -> Option<RangeInfo<SourceChange>> { |
91 | let parse = db.parse(position.file_id); | 93 | let parse = db.parse(position.file_id); |
92 | if let Some((ast_name, ast_module)) = | 94 | if let Some((ast_name, ast_module)) = |
93 | find_name_and_module_at_offset(parse.tree().syntax(), position) | 95 | find_name_and_module_at_offset(parse.tree().syntax(), position) |
94 | { | 96 | { |
97 | let range = ast_name.syntax().text_range(); | ||
95 | rename_mod(db, &ast_name, &ast_module, position, new_name) | 98 | rename_mod(db, &ast_name, &ast_module, position, new_name) |
99 | .map(|info| RangeInfo::new(range, info)) | ||
96 | } else { | 100 | } else { |
97 | rename_reference(db, position, new_name) | 101 | rename_reference(db, position, new_name) |
98 | } | 102 | } |
@@ -107,7 +111,7 @@ fn find_name_and_module_at_offset( | |||
107 | Some((ast_name, ast_module)) | 111 | Some((ast_name, ast_module)) |
108 | } | 112 | } |
109 | 113 | ||
110 | fn source_edit_from_fileid_range( | 114 | fn source_edit_from_file_id_range( |
111 | file_id: FileId, | 115 | file_id: FileId, |
112 | range: TextRange, | 116 | range: TextRange, |
113 | new_name: &str, | 117 | new_name: &str, |
@@ -179,19 +183,19 @@ fn rename_reference( | |||
179 | db: &RootDatabase, | 183 | db: &RootDatabase, |
180 | position: FilePosition, | 184 | position: FilePosition, |
181 | new_name: &str, | 185 | new_name: &str, |
182 | ) -> Option<SourceChange> { | 186 | ) -> Option<RangeInfo<SourceChange>> { |
183 | let refs = find_all_refs(db, position)?; | 187 | let RangeInfo { range, info: refs } = find_all_refs(db, position)?; |
184 | 188 | ||
185 | let edit = refs | 189 | let edit = refs |
186 | .into_iter() | 190 | .into_iter() |
187 | .map(|range| source_edit_from_fileid_range(range.file_id, range.range, new_name)) | 191 | .map(|range| source_edit_from_file_id_range(range.file_id, range.range, new_name)) |
188 | .collect::<Vec<_>>(); | 192 | .collect::<Vec<_>>(); |
189 | 193 | ||
190 | if edit.is_empty() { | 194 | if edit.is_empty() { |
191 | return None; | 195 | return None; |
192 | } | 196 | } |
193 | 197 | ||
194 | Some(SourceChange::source_file_edits("rename", edit)) | 198 | Some(RangeInfo::new(range, SourceChange::source_file_edits("rename", edit))) |
195 | } | 199 | } |
196 | 200 | ||
197 | #[cfg(test)] | 201 | #[cfg(test)] |
@@ -342,38 +346,43 @@ mod tests { | |||
342 | let new_name = "foo2"; | 346 | let new_name = "foo2"; |
343 | let source_change = analysis.rename(position, new_name).unwrap(); | 347 | let source_change = analysis.rename(position, new_name).unwrap(); |
344 | assert_debug_snapshot!(&source_change, | 348 | assert_debug_snapshot!(&source_change, |
345 | @r#"Some( | 349 | @r###" |
346 | SourceChange { | 350 | Some( |
347 | label: "rename", | 351 | RangeInfo { |
348 | source_file_edits: [ | 352 | range: [4; 7), |
349 | SourceFileEdit { | 353 | info: SourceChange { |
350 | file_id: FileId( | 354 | label: "rename", |
351 | 2, | 355 | source_file_edits: [ |
352 | ), | 356 | SourceFileEdit { |
353 | edit: TextEdit { | 357 | file_id: FileId( |
354 | atoms: [ | 358 | 2, |
355 | AtomTextEdit { | 359 | ), |
356 | delete: [4; 7), | 360 | edit: TextEdit { |
357 | insert: "foo2", | 361 | atoms: [ |
362 | AtomTextEdit { | ||
363 | delete: [4; 7), | ||
364 | insert: "foo2", | ||
365 | }, | ||
366 | ], | ||
367 | }, | ||
368 | }, | ||
369 | ], | ||
370 | file_system_edits: [ | ||
371 | MoveFile { | ||
372 | src: FileId( | ||
373 | 3, | ||
374 | ), | ||
375 | dst_source_root: SourceRootId( | ||
376 | 0, | ||
377 | ), | ||
378 | dst_path: "bar/foo2.rs", | ||
358 | }, | 379 | }, |
359 | ], | 380 | ], |
381 | cursor_position: None, | ||
360 | }, | 382 | }, |
361 | }, | 383 | }, |
362 | ], | 384 | ) |
363 | file_system_edits: [ | 385 | "###); |
364 | MoveFile { | ||
365 | src: FileId( | ||
366 | 3, | ||
367 | ), | ||
368 | dst_source_root: SourceRootId( | ||
369 | 0, | ||
370 | ), | ||
371 | dst_path: "bar/foo2.rs", | ||
372 | }, | ||
373 | ], | ||
374 | cursor_position: None, | ||
375 | }, | ||
376 | )"#); | ||
377 | } | 386 | } |
378 | 387 | ||
379 | #[test] | 388 | #[test] |
@@ -389,38 +398,43 @@ mod tests { | |||
389 | let new_name = "foo2"; | 398 | let new_name = "foo2"; |
390 | let source_change = analysis.rename(position, new_name).unwrap(); | 399 | let source_change = analysis.rename(position, new_name).unwrap(); |
391 | assert_debug_snapshot!(&source_change, | 400 | assert_debug_snapshot!(&source_change, |
392 | @r###"Some( | 401 | @r###" |
393 | SourceChange { | 402 | Some( |
394 | label: "rename", | 403 | RangeInfo { |
395 | source_file_edits: [ | 404 | range: [4; 7), |
396 | SourceFileEdit { | 405 | info: SourceChange { |
397 | file_id: FileId( | 406 | label: "rename", |
398 | 1, | 407 | source_file_edits: [ |
399 | ), | 408 | SourceFileEdit { |
400 | edit: TextEdit { | 409 | file_id: FileId( |
401 | atoms: [ | 410 | 1, |
402 | AtomTextEdit { | 411 | ), |
403 | delete: [4; 7), | 412 | edit: TextEdit { |
404 | insert: "foo2", | 413 | atoms: [ |
414 | AtomTextEdit { | ||
415 | delete: [4; 7), | ||
416 | insert: "foo2", | ||
417 | }, | ||
418 | ], | ||
419 | }, | ||
420 | }, | ||
421 | ], | ||
422 | file_system_edits: [ | ||
423 | MoveFile { | ||
424 | src: FileId( | ||
425 | 2, | ||
426 | ), | ||
427 | dst_source_root: SourceRootId( | ||
428 | 0, | ||
429 | ), | ||
430 | dst_path: "foo2/mod.rs", | ||
405 | }, | 431 | }, |
406 | ], | 432 | ], |
433 | cursor_position: None, | ||
407 | }, | 434 | }, |
408 | }, | 435 | }, |
409 | ], | 436 | ) |
410 | file_system_edits: [ | 437 | "### |
411 | MoveFile { | ||
412 | src: FileId( | ||
413 | 2, | ||
414 | ), | ||
415 | dst_source_root: SourceRootId( | ||
416 | 0, | ||
417 | ), | ||
418 | dst_path: "foo2/mod.rs", | ||
419 | }, | ||
420 | ], | ||
421 | cursor_position: None, | ||
422 | }, | ||
423 | )"### | ||
424 | ); | 438 | ); |
425 | } | 439 | } |
426 | 440 | ||
@@ -430,7 +444,7 @@ mod tests { | |||
430 | let mut text_edit_builder = ra_text_edit::TextEditBuilder::default(); | 444 | let mut text_edit_builder = ra_text_edit::TextEditBuilder::default(); |
431 | let mut file_id: Option<FileId> = None; | 445 | let mut file_id: Option<FileId> = None; |
432 | if let Some(change) = source_change { | 446 | if let Some(change) = source_change { |
433 | for edit in change.source_file_edits { | 447 | for edit in change.info.source_file_edits { |
434 | file_id = Some(edit.file_id); | 448 | file_id = Some(edit.file_id); |
435 | for atom in edit.edit.as_atoms() { | 449 | for atom in edit.edit.as_atoms() { |
436 | text_edit_builder.replace(atom.delete, atom.insert.clone()); | 450 | text_edit_builder.replace(atom.delete, atom.insert.clone()); |
diff --git a/crates/ra_ide_api/src/symbol_index.rs b/crates/ra_ide_api/src/symbol_index.rs index d4afddab4..a5729c368 100644 --- a/crates/ra_ide_api/src/symbol_index.rs +++ b/crates/ra_ide_api/src/symbol_index.rs | |||
@@ -87,7 +87,7 @@ pub(crate) fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol> | |||
87 | let mut files = Vec::new(); | 87 | let mut files = Vec::new(); |
88 | for &root in db.local_roots().iter() { | 88 | for &root in db.local_roots().iter() { |
89 | let sr = db.source_root(root); | 89 | let sr = db.source_root(root); |
90 | files.extend(sr.files.values().copied()) | 90 | files.extend(sr.walk()) |
91 | } | 91 | } |
92 | 92 | ||
93 | let snap = Snap(db.snapshot()); | 93 | let snap = Snap(db.snapshot()); |
diff --git a/crates/ra_ide_api/src/syntax_tree.rs b/crates/ra_ide_api/src/syntax_tree.rs index 914759709..e2bb120b4 100644 --- a/crates/ra_ide_api/src/syntax_tree.rs +++ b/crates/ra_ide_api/src/syntax_tree.rs | |||
@@ -1,357 +1,357 @@ | |||
1 | use crate::db::RootDatabase; | 1 | use crate::db::RootDatabase; |
2 | use ra_db::SourceDatabase; | 2 | use ra_db::SourceDatabase; |
3 | use ra_syntax::{ | 3 | use ra_syntax::{ |
4 | algo, AstNode, NodeOrToken, SourceFile, | 4 | algo, AstNode, NodeOrToken, SourceFile, |
5 | SyntaxKind::{RAW_STRING, STRING}, | 5 | SyntaxKind::{RAW_STRING, STRING}, |
6 | SyntaxToken, TextRange, | 6 | SyntaxToken, TextRange, |
7 | }; | 7 | }; |
8 | 8 | ||
9 | pub use ra_db::FileId; | 9 | pub use ra_db::FileId; |
10 | 10 | ||
11 | pub(crate) fn syntax_tree( | 11 | pub(crate) fn syntax_tree( |
12 | db: &RootDatabase, | 12 | db: &RootDatabase, |
13 | file_id: FileId, | 13 | file_id: FileId, |
14 | text_range: Option<TextRange>, | 14 | text_range: Option<TextRange>, |
15 | ) -> String { | 15 | ) -> String { |
16 | let parse = db.parse(file_id); | 16 | let parse = db.parse(file_id); |
17 | if let Some(text_range) = text_range { | 17 | if let Some(text_range) = text_range { |
18 | let node = match algo::find_covering_element(parse.tree().syntax(), text_range) { | 18 | let node = match algo::find_covering_element(parse.tree().syntax(), text_range) { |
19 | NodeOrToken::Node(node) => node, | 19 | NodeOrToken::Node(node) => node, |
20 | NodeOrToken::Token(token) => { | 20 | NodeOrToken::Token(token) => { |
21 | if let Some(tree) = syntax_tree_for_string(&token, text_range) { | 21 | if let Some(tree) = syntax_tree_for_string(&token, text_range) { |
22 | return tree; | 22 | return tree; |
23 | } | 23 | } |
24 | token.parent() | 24 | token.parent() |
25 | } | 25 | } |
26 | }; | 26 | }; |
27 | 27 | ||
28 | format!("{:#?}", node) | 28 | format!("{:#?}", node) |
29 | } else { | 29 | } else { |
30 | format!("{:#?}", parse.tree().syntax()) | 30 | format!("{:#?}", parse.tree().syntax()) |
31 | } | 31 | } |
32 | } | 32 | } |
33 | 33 | ||
34 | /// Attempts parsing the selected contents of a string literal | 34 | /// Attempts parsing the selected contents of a string literal |
35 | /// as rust syntax and returns its syntax tree | 35 | /// as rust syntax and returns its syntax tree |
36 | fn syntax_tree_for_string(token: &SyntaxToken, text_range: TextRange) -> Option<String> { | 36 | fn syntax_tree_for_string(token: &SyntaxToken, text_range: TextRange) -> Option<String> { |
37 | // When the range is inside a string | 37 | // When the range is inside a string |
38 | // we'll attempt parsing it as rust syntax | 38 | // we'll attempt parsing it as rust syntax |
39 | // to provide the syntax tree of the contents of the string | 39 | // to provide the syntax tree of the contents of the string |
40 | match token.kind() { | 40 | match token.kind() { |
41 | STRING | RAW_STRING => syntax_tree_for_token(token, text_range), | 41 | STRING | RAW_STRING => syntax_tree_for_token(token, text_range), |
42 | _ => None, | 42 | _ => None, |
43 | } | 43 | } |
44 | } | 44 | } |
45 | 45 | ||
46 | fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option<String> { | 46 | fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option<String> { |
47 | // Range of the full node | 47 | // Range of the full node |
48 | let node_range = node.text_range(); | 48 | let node_range = node.text_range(); |
49 | let text = node.text().to_string(); | 49 | let text = node.text().to_string(); |
50 | 50 | ||
51 | // We start at some point inside the node | 51 | // We start at some point inside the node |
52 | // Either we have selected the whole string | 52 | // Either we have selected the whole string |
53 | // or our selection is inside it | 53 | // or our selection is inside it |
54 | let start = text_range.start() - node_range.start(); | 54 | let start = text_range.start() - node_range.start(); |
55 | 55 | ||
56 | // how many characters we have selected | 56 | // how many characters we have selected |
57 | let len = text_range.len().to_usize(); | 57 | let len = text_range.len().to_usize(); |
58 | 58 | ||
59 | let node_len = node_range.len().to_usize(); | 59 | let node_len = node_range.len().to_usize(); |
60 | 60 | ||
61 | let start = start.to_usize(); | 61 | let start = start.to_usize(); |
62 | 62 | ||
63 | // We want to cap our length | 63 | // We want to cap our length |
64 | let len = len.min(node_len); | 64 | let len = len.min(node_len); |
65 | 65 | ||
66 | // Ensure our slice is inside the actual string | 66 | // Ensure our slice is inside the actual string |
67 | let end = if start + len < text.len() { start + len } else { text.len() - start }; | 67 | let end = if start + len < text.len() { start + len } else { text.len() - start }; |
68 | 68 | ||
69 | let text = &text[start..end]; | 69 | let text = &text[start..end]; |
70 | 70 | ||
71 | // Remove possible extra string quotes from the start | 71 | // Remove possible extra string quotes from the start |
72 | // and the end of the string | 72 | // and the end of the string |
73 | let text = text | 73 | let text = text |
74 | .trim_start_matches('r') | 74 | .trim_start_matches('r') |
75 | .trim_start_matches('#') | 75 | .trim_start_matches('#') |
76 | .trim_start_matches('"') | 76 | .trim_start_matches('"') |
77 | .trim_end_matches('#') | 77 | .trim_end_matches('#') |
78 | .trim_end_matches('"') | 78 | .trim_end_matches('"') |
79 | .trim() | 79 | .trim() |
80 | // Remove custom markers | 80 | // Remove custom markers |
81 | .replace("<|>", ""); | 81 | .replace("<|>", ""); |
82 | 82 | ||
83 | let parsed = SourceFile::parse(&text); | 83 | let parsed = SourceFile::parse(&text); |
84 | 84 | ||
85 | // If the "file" parsed without errors, | 85 | // If the "file" parsed without errors, |
86 | // return its syntax | 86 | // return its syntax |
87 | if parsed.errors().is_empty() { | 87 | if parsed.errors().is_empty() { |
88 | return Some(format!("{:#?}", parsed.tree().syntax())); | 88 | return Some(format!("{:#?}", parsed.tree().syntax())); |
89 | } | 89 | } |
90 | 90 | ||
91 | None | 91 | None |
92 | } | 92 | } |
93 | 93 | ||
94 | #[cfg(test)] | 94 | #[cfg(test)] |
95 | mod tests { | 95 | mod tests { |
96 | use test_utils::assert_eq_text; | 96 | use test_utils::assert_eq_text; |
97 | 97 | ||
98 | use crate::mock_analysis::{single_file, single_file_with_range}; | 98 | use crate::mock_analysis::{single_file, single_file_with_range}; |
99 | 99 | ||
100 | #[test] | 100 | #[test] |
101 | fn test_syntax_tree_without_range() { | 101 | fn test_syntax_tree_without_range() { |
102 | // Basic syntax | 102 | // Basic syntax |
103 | let (analysis, file_id) = single_file(r#"fn foo() {}"#); | 103 | let (analysis, file_id) = single_file(r#"fn foo() {}"#); |
104 | let syn = analysis.syntax_tree(file_id, None).unwrap(); | 104 | let syn = analysis.syntax_tree(file_id, None).unwrap(); |
105 | 105 | ||
106 | assert_eq_text!( | 106 | assert_eq_text!( |
107 | syn.trim(), | 107 | syn.trim(), |
108 | r#" | 108 | r#" |
109 | SOURCE_FILE@[0; 11) | 109 | SOURCE_FILE@[0; 11) |
110 | FN_DEF@[0; 11) | 110 | FN_DEF@[0; 11) |
111 | FN_KW@[0; 2) "fn" | 111 | FN_KW@[0; 2) "fn" |
112 | WHITESPACE@[2; 3) " " | 112 | WHITESPACE@[2; 3) " " |
113 | NAME@[3; 6) | 113 | NAME@[3; 6) |
114 | IDENT@[3; 6) "foo" | 114 | IDENT@[3; 6) "foo" |
115 | PARAM_LIST@[6; 8) | 115 | PARAM_LIST@[6; 8) |
116 | L_PAREN@[6; 7) "(" | 116 | L_PAREN@[6; 7) "(" |
117 | R_PAREN@[7; 8) ")" | 117 | R_PAREN@[7; 8) ")" |
118 | WHITESPACE@[8; 9) " " | 118 | WHITESPACE@[8; 9) " " |
119 | BLOCK_EXPR@[9; 11) | 119 | BLOCK_EXPR@[9; 11) |
120 | BLOCK@[9; 11) | 120 | BLOCK@[9; 11) |
121 | L_CURLY@[9; 10) "{" | 121 | L_CURLY@[9; 10) "{" |
122 | R_CURLY@[10; 11) "}" | 122 | R_CURLY@[10; 11) "}" |
123 | "# | 123 | "# |
124 | .trim() | 124 | .trim() |
125 | ); | 125 | ); |
126 | 126 | ||
127 | let (analysis, file_id) = single_file( | 127 | let (analysis, file_id) = single_file( |
128 | r#" | 128 | r#" |
129 | fn test() { | 129 | fn test() { |
130 | assert!(" | 130 | assert!(" |
131 | fn foo() { | 131 | fn foo() { |
132 | } | 132 | } |
133 | ", ""); | 133 | ", ""); |
134 | }"# | 134 | }"# |
135 | .trim(), | 135 | .trim(), |
136 | ); | 136 | ); |
137 | let syn = analysis.syntax_tree(file_id, None).unwrap(); | 137 | let syn = analysis.syntax_tree(file_id, None).unwrap(); |
138 | 138 | ||
139 | assert_eq_text!( | 139 | assert_eq_text!( |
140 | syn.trim(), | 140 | syn.trim(), |
141 | r#" | 141 | r#" |
142 | SOURCE_FILE@[0; 60) | 142 | SOURCE_FILE@[0; 60) |
143 | FN_DEF@[0; 60) | 143 | FN_DEF@[0; 60) |
144 | FN_KW@[0; 2) "fn" | 144 | FN_KW@[0; 2) "fn" |
145 | WHITESPACE@[2; 3) " " | 145 | WHITESPACE@[2; 3) " " |
146 | NAME@[3; 7) | 146 | NAME@[3; 7) |
147 | IDENT@[3; 7) "test" | 147 | IDENT@[3; 7) "test" |
148 | PARAM_LIST@[7; 9) | 148 | PARAM_LIST@[7; 9) |
149 | L_PAREN@[7; 8) "(" | 149 | L_PAREN@[7; 8) "(" |
150 | R_PAREN@[8; 9) ")" | 150 | R_PAREN@[8; 9) ")" |
151 | WHITESPACE@[9; 10) " " | 151 | WHITESPACE@[9; 10) " " |
152 | BLOCK_EXPR@[10; 60) | 152 | BLOCK_EXPR@[10; 60) |
153 | BLOCK@[10; 60) | 153 | BLOCK@[10; 60) |
154 | L_CURLY@[10; 11) "{" | 154 | L_CURLY@[10; 11) "{" |
155 | WHITESPACE@[11; 16) "\n " | 155 | WHITESPACE@[11; 16) "\n " |
156 | EXPR_STMT@[16; 58) | 156 | EXPR_STMT@[16; 58) |
157 | MACRO_CALL@[16; 57) | 157 | MACRO_CALL@[16; 57) |
158 | PATH@[16; 22) | 158 | PATH@[16; 22) |
159 | PATH_SEGMENT@[16; 22) | 159 | PATH_SEGMENT@[16; 22) |
160 | NAME_REF@[16; 22) | 160 | NAME_REF@[16; 22) |
161 | IDENT@[16; 22) "assert" | 161 | IDENT@[16; 22) "assert" |
162 | EXCL@[22; 23) "!" | 162 | EXCL@[22; 23) "!" |
163 | TOKEN_TREE@[23; 57) | 163 | TOKEN_TREE@[23; 57) |
164 | L_PAREN@[23; 24) "(" | 164 | L_PAREN@[23; 24) "(" |
165 | STRING@[24; 52) "\"\n fn foo() {\n ..." | 165 | STRING@[24; 52) "\"\n fn foo() {\n ..." |
166 | COMMA@[52; 53) "," | 166 | COMMA@[52; 53) "," |
167 | WHITESPACE@[53; 54) " " | 167 | WHITESPACE@[53; 54) " " |
168 | STRING@[54; 56) "\"\"" | 168 | STRING@[54; 56) "\"\"" |
169 | R_PAREN@[56; 57) ")" | 169 | R_PAREN@[56; 57) ")" |
170 | SEMI@[57; 58) ";" | 170 | SEMI@[57; 58) ";" |
171 | WHITESPACE@[58; 59) "\n" | 171 | WHITESPACE@[58; 59) "\n" |
172 | R_CURLY@[59; 60) "}" | 172 | R_CURLY@[59; 60) "}" |
173 | "# | 173 | "# |
174 | .trim() | 174 | .trim() |
175 | ); | 175 | ); |
176 | } | 176 | } |
177 | 177 | ||
178 | #[test] | 178 | #[test] |
179 | fn test_syntax_tree_with_range() { | 179 | fn test_syntax_tree_with_range() { |
180 | let (analysis, range) = single_file_with_range(r#"<|>fn foo() {}<|>"#.trim()); | 180 | let (analysis, range) = single_file_with_range(r#"<|>fn foo() {}<|>"#.trim()); |
181 | let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); | 181 | let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); |
182 | 182 | ||
183 | assert_eq_text!( | 183 | assert_eq_text!( |
184 | syn.trim(), | 184 | syn.trim(), |
185 | r#" | 185 | r#" |
186 | FN_DEF@[0; 11) | 186 | FN_DEF@[0; 11) |
187 | FN_KW@[0; 2) "fn" | 187 | FN_KW@[0; 2) "fn" |
188 | WHITESPACE@[2; 3) " " | 188 | WHITESPACE@[2; 3) " " |
189 | NAME@[3; 6) | 189 | NAME@[3; 6) |
190 | IDENT@[3; 6) "foo" | 190 | IDENT@[3; 6) "foo" |
191 | PARAM_LIST@[6; 8) | 191 | PARAM_LIST@[6; 8) |
192 | L_PAREN@[6; 7) "(" | 192 | L_PAREN@[6; 7) "(" |
193 | R_PAREN@[7; 8) ")" | 193 | R_PAREN@[7; 8) ")" |
194 | WHITESPACE@[8; 9) " " | 194 | WHITESPACE@[8; 9) " " |
195 | BLOCK_EXPR@[9; 11) | 195 | BLOCK_EXPR@[9; 11) |
196 | BLOCK@[9; 11) | 196 | BLOCK@[9; 11) |
197 | L_CURLY@[9; 10) "{" | 197 | L_CURLY@[9; 10) "{" |
198 | R_CURLY@[10; 11) "}" | 198 | R_CURLY@[10; 11) "}" |
199 | "# | 199 | "# |
200 | .trim() | 200 | .trim() |
201 | ); | 201 | ); |
202 | 202 | ||
203 | let (analysis, range) = single_file_with_range( | 203 | let (analysis, range) = single_file_with_range( |
204 | r#"fn test() { | 204 | r#"fn test() { |
205 | <|>assert!(" | 205 | <|>assert!(" |
206 | fn foo() { | 206 | fn foo() { |
207 | } | 207 | } |
208 | ", "");<|> | 208 | ", "");<|> |
209 | }"# | 209 | }"# |
210 | .trim(), | 210 | .trim(), |
211 | ); | 211 | ); |
212 | let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); | 212 | let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); |
213 | 213 | ||
214 | assert_eq_text!( | 214 | assert_eq_text!( |
215 | syn.trim(), | 215 | syn.trim(), |
216 | r#" | 216 | r#" |
217 | EXPR_STMT@[16; 58) | 217 | EXPR_STMT@[16; 58) |
218 | MACRO_CALL@[16; 57) | 218 | MACRO_CALL@[16; 57) |
219 | PATH@[16; 22) | 219 | PATH@[16; 22) |
220 | PATH_SEGMENT@[16; 22) | 220 | PATH_SEGMENT@[16; 22) |
221 | NAME_REF@[16; 22) | 221 | NAME_REF@[16; 22) |
222 | IDENT@[16; 22) "assert" | 222 | IDENT@[16; 22) "assert" |
223 | EXCL@[22; 23) "!" | 223 | EXCL@[22; 23) "!" |
224 | TOKEN_TREE@[23; 57) | 224 | TOKEN_TREE@[23; 57) |
225 | L_PAREN@[23; 24) "(" | 225 | L_PAREN@[23; 24) "(" |
226 | STRING@[24; 52) "\"\n fn foo() {\n ..." | 226 | STRING@[24; 52) "\"\n fn foo() {\n ..." |
227 | COMMA@[52; 53) "," | 227 | COMMA@[52; 53) "," |
228 | WHITESPACE@[53; 54) " " | 228 | WHITESPACE@[53; 54) " " |
229 | STRING@[54; 56) "\"\"" | 229 | STRING@[54; 56) "\"\"" |
230 | R_PAREN@[56; 57) ")" | 230 | R_PAREN@[56; 57) ")" |
231 | SEMI@[57; 58) ";" | 231 | SEMI@[57; 58) ";" |
232 | "# | 232 | "# |
233 | .trim() | 233 | .trim() |
234 | ); | 234 | ); |
235 | } | 235 | } |
236 | 236 | ||
237 | #[test] | 237 | #[test] |
238 | fn test_syntax_tree_inside_string() { | 238 | fn test_syntax_tree_inside_string() { |
239 | let (analysis, range) = single_file_with_range( | 239 | let (analysis, range) = single_file_with_range( |
240 | r#"fn test() { | 240 | r#"fn test() { |
241 | assert!(" | 241 | assert!(" |
242 | <|>fn foo() { | 242 | <|>fn foo() { |
243 | }<|> | 243 | }<|> |
244 | fn bar() { | 244 | fn bar() { |
245 | } | 245 | } |
246 | ", ""); | 246 | ", ""); |
247 | }"# | 247 | }"# |
248 | .trim(), | 248 | .trim(), |
249 | ); | 249 | ); |
250 | let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); | 250 | let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); |
251 | assert_eq_text!( | 251 | assert_eq_text!( |
252 | syn.trim(), | 252 | syn.trim(), |
253 | r#" | 253 | r#" |
254 | SOURCE_FILE@[0; 12) | 254 | SOURCE_FILE@[0; 12) |
255 | FN_DEF@[0; 12) | 255 | FN_DEF@[0; 12) |
256 | FN_KW@[0; 2) "fn" | 256 | FN_KW@[0; 2) "fn" |
257 | WHITESPACE@[2; 3) " " | 257 | WHITESPACE@[2; 3) " " |
258 | NAME@[3; 6) | 258 | NAME@[3; 6) |
259 | IDENT@[3; 6) "foo" | 259 | IDENT@[3; 6) "foo" |
260 | PARAM_LIST@[6; 8) | 260 | PARAM_LIST@[6; 8) |
261 | L_PAREN@[6; 7) "(" | 261 | L_PAREN@[6; 7) "(" |
262 | R_PAREN@[7; 8) ")" | 262 | R_PAREN@[7; 8) ")" |
263 | WHITESPACE@[8; 9) " " | 263 | WHITESPACE@[8; 9) " " |
264 | BLOCK_EXPR@[9; 12) | 264 | BLOCK_EXPR@[9; 12) |
265 | BLOCK@[9; 12) | 265 | BLOCK@[9; 12) |
266 | L_CURLY@[9; 10) "{" | 266 | L_CURLY@[9; 10) "{" |
267 | WHITESPACE@[10; 11) "\n" | 267 | WHITESPACE@[10; 11) "\n" |
268 | R_CURLY@[11; 12) "}" | 268 | R_CURLY@[11; 12) "}" |
269 | "# | 269 | "# |
270 | .trim() | 270 | .trim() |
271 | ); | 271 | ); |
272 | 272 | ||
273 | // With a raw string | 273 | // With a raw string |
274 | let (analysis, range) = single_file_with_range( | 274 | let (analysis, range) = single_file_with_range( |
275 | r###"fn test() { | 275 | r###"fn test() { |
276 | assert!(r#" | 276 | assert!(r#" |
277 | <|>fn foo() { | 277 | <|>fn foo() { |
278 | }<|> | 278 | }<|> |
279 | fn bar() { | 279 | fn bar() { |
280 | } | 280 | } |
281 | "#, ""); | 281 | "#, ""); |
282 | }"### | 282 | }"### |
283 | .trim(), | 283 | .trim(), |
284 | ); | 284 | ); |
285 | let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); | 285 | let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); |
286 | assert_eq_text!( | 286 | assert_eq_text!( |
287 | syn.trim(), | 287 | syn.trim(), |
288 | r#" | 288 | r#" |
289 | SOURCE_FILE@[0; 12) | 289 | SOURCE_FILE@[0; 12) |
290 | FN_DEF@[0; 12) | 290 | FN_DEF@[0; 12) |
291 | FN_KW@[0; 2) "fn" | 291 | FN_KW@[0; 2) "fn" |
292 | WHITESPACE@[2; 3) " " | 292 | WHITESPACE@[2; 3) " " |
293 | NAME@[3; 6) | 293 | NAME@[3; 6) |
294 | IDENT@[3; 6) "foo" | 294 | IDENT@[3; 6) "foo" |
295 | PARAM_LIST@[6; 8) | 295 | PARAM_LIST@[6; 8) |
296 | L_PAREN@[6; 7) "(" | 296 | L_PAREN@[6; 7) "(" |
297 | R_PAREN@[7; 8) ")" | 297 | R_PAREN@[7; 8) ")" |
298 | WHITESPACE@[8; 9) " " | 298 | WHITESPACE@[8; 9) " " |
299 | BLOCK_EXPR@[9; 12) | 299 | BLOCK_EXPR@[9; 12) |
300 | BLOCK@[9; 12) | 300 | BLOCK@[9; 12) |
301 | L_CURLY@[9; 10) "{" | 301 | L_CURLY@[9; 10) "{" |
302 | WHITESPACE@[10; 11) "\n" | 302 | WHITESPACE@[10; 11) "\n" |
303 | R_CURLY@[11; 12) "}" | 303 | R_CURLY@[11; 12) "}" |
304 | "# | 304 | "# |
305 | .trim() | 305 | .trim() |
306 | ); | 306 | ); |
307 | 307 | ||
308 | // With a raw string | 308 | // With a raw string |
309 | let (analysis, range) = single_file_with_range( | 309 | let (analysis, range) = single_file_with_range( |
310 | r###"fn test() { | 310 | r###"fn test() { |
311 | assert!(r<|>#" | 311 | assert!(r<|>#" |
312 | fn foo() { | 312 | fn foo() { |
313 | } | 313 | } |
314 | fn bar() { | 314 | fn bar() { |
315 | }"<|>#, ""); | 315 | }"<|>#, ""); |
316 | }"### | 316 | }"### |
317 | .trim(), | 317 | .trim(), |
318 | ); | 318 | ); |
319 | let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); | 319 | let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); |
320 | assert_eq_text!( | 320 | assert_eq_text!( |
321 | syn.trim(), | 321 | syn.trim(), |
322 | r#" | 322 | r#" |
323 | SOURCE_FILE@[0; 25) | 323 | SOURCE_FILE@[0; 25) |
324 | FN_DEF@[0; 12) | 324 | FN_DEF@[0; 12) |
325 | FN_KW@[0; 2) "fn" | 325 | FN_KW@[0; 2) "fn" |
326 | WHITESPACE@[2; 3) " " | 326 | WHITESPACE@[2; 3) " " |
327 | NAME@[3; 6) | 327 | NAME@[3; 6) |
328 | IDENT@[3; 6) "foo" | 328 | IDENT@[3; 6) "foo" |
329 | PARAM_LIST@[6; 8) | 329 | PARAM_LIST@[6; 8) |
330 | L_PAREN@[6; 7) "(" | 330 | L_PAREN@[6; 7) "(" |
331 | R_PAREN@[7; 8) ")" | 331 | R_PAREN@[7; 8) ")" |
332 | WHITESPACE@[8; 9) " " | 332 | WHITESPACE@[8; 9) " " |
333 | BLOCK_EXPR@[9; 12) | 333 | BLOCK_EXPR@[9; 12) |
334 | BLOCK@[9; 12) | 334 | BLOCK@[9; 12) |
335 | L_CURLY@[9; 10) "{" | 335 | L_CURLY@[9; 10) "{" |
336 | WHITESPACE@[10; 11) "\n" | 336 | WHITESPACE@[10; 11) "\n" |
337 | R_CURLY@[11; 12) "}" | 337 | R_CURLY@[11; 12) "}" |
338 | WHITESPACE@[12; 13) "\n" | 338 | WHITESPACE@[12; 13) "\n" |
339 | FN_DEF@[13; 25) | 339 | FN_DEF@[13; 25) |
340 | FN_KW@[13; 15) "fn" | 340 | FN_KW@[13; 15) "fn" |
341 | WHITESPACE@[15; 16) " " | 341 | WHITESPACE@[15; 16) " " |
342 | NAME@[16; 19) | 342 | NAME@[16; 19) |
343 | IDENT@[16; 19) "bar" | 343 | IDENT@[16; 19) "bar" |
344 | PARAM_LIST@[19; 21) | 344 | PARAM_LIST@[19; 21) |
345 | L_PAREN@[19; 20) "(" | 345 | L_PAREN@[19; 20) "(" |
346 | R_PAREN@[20; 21) ")" | 346 | R_PAREN@[20; 21) ")" |
347 | WHITESPACE@[21; 22) " " | 347 | WHITESPACE@[21; 22) " " |
348 | BLOCK_EXPR@[22; 25) | 348 | BLOCK_EXPR@[22; 25) |
349 | BLOCK@[22; 25) | 349 | BLOCK@[22; 25) |
350 | L_CURLY@[22; 23) "{" | 350 | L_CURLY@[22; 23) "{" |
351 | WHITESPACE@[23; 24) "\n" | 351 | WHITESPACE@[23; 24) "\n" |
352 | R_CURLY@[24; 25) "}" | 352 | R_CURLY@[24; 25) "}" |
353 | "# | 353 | "# |
354 | .trim() | 354 | .trim() |
355 | ); | 355 | ); |
356 | } | 356 | } |
357 | } | 357 | } |
diff --git a/crates/ra_lsp_server/Cargo.toml b/crates/ra_lsp_server/Cargo.toml index eb4812633..46a0f958c 100644 --- a/crates/ra_lsp_server/Cargo.toml +++ b/crates/ra_lsp_server/Cargo.toml | |||
@@ -12,11 +12,11 @@ serde = { version = "1.0.83", features = ["derive"] } | |||
12 | crossbeam-channel = "0.3.5" | 12 | crossbeam-channel = "0.3.5" |
13 | flexi_logger = "0.14.0" | 13 | flexi_logger = "0.14.0" |
14 | log = "0.4.3" | 14 | log = "0.4.3" |
15 | lsp-types = { version = "0.60.0", features = ["proposed"] } | 15 | lsp-types = { version = "0.61.0", features = ["proposed"] } |
16 | rustc-hash = "1.0" | 16 | rustc-hash = "1.0" |
17 | parking_lot = "0.9.0" | 17 | parking_lot = "0.9.0" |
18 | jod-thread = "0.1.0" | 18 | jod-thread = "0.1.0" |
19 | ra_vfs = "0.3.0" | 19 | ra_vfs = "0.4.0" |
20 | ra_syntax = { path = "../ra_syntax" } | 20 | ra_syntax = { path = "../ra_syntax" } |
21 | ra_text_edit = { path = "../ra_text_edit" } | 21 | ra_text_edit = { path = "../ra_text_edit" } |
22 | ra_ide_api = { path = "../ra_ide_api" } | 22 | ra_ide_api = { path = "../ra_ide_api" } |
diff --git a/crates/ra_lsp_server/src/caps.rs b/crates/ra_lsp_server/src/caps.rs index bb9205aed..22fc97a97 100644 --- a/crates/ra_lsp_server/src/caps.rs +++ b/crates/ra_lsp_server/src/caps.rs | |||
@@ -43,6 +43,7 @@ pub fn server_capabilities() -> ServerCapabilities { | |||
43 | rename_provider: Some(RenameProviderCapability::Options(RenameOptions { | 43 | rename_provider: Some(RenameProviderCapability::Options(RenameOptions { |
44 | prepare_provider: Some(true), | 44 | prepare_provider: Some(true), |
45 | })), | 45 | })), |
46 | document_link_provider: None, | ||
46 | color_provider: None, | 47 | color_provider: None, |
47 | execute_command_provider: None, | 48 | execute_command_provider: None, |
48 | workspace: None, | 49 | workspace: None, |
diff --git a/crates/ra_lsp_server/src/config.rs b/crates/ra_lsp_server/src/config.rs index 5c5ae3e18..cf53e7c4c 100644 --- a/crates/ra_lsp_server/src/config.rs +++ b/crates/ra_lsp_server/src/config.rs | |||
@@ -15,6 +15,8 @@ pub struct ServerConfig { | |||
15 | pub publish_decorations: bool, | 15 | pub publish_decorations: bool, |
16 | 16 | ||
17 | pub exclude_globs: Vec<String>, | 17 | pub exclude_globs: Vec<String>, |
18 | #[serde(deserialize_with = "nullable_bool_false")] | ||
19 | pub use_client_watching: bool, | ||
18 | 20 | ||
19 | pub lru_capacity: Option<usize>, | 21 | pub lru_capacity: Option<usize>, |
20 | 22 | ||
@@ -31,6 +33,7 @@ impl Default for ServerConfig { | |||
31 | ServerConfig { | 33 | ServerConfig { |
32 | publish_decorations: false, | 34 | publish_decorations: false, |
33 | exclude_globs: Vec::new(), | 35 | exclude_globs: Vec::new(), |
36 | use_client_watching: false, | ||
34 | lru_capacity: None, | 37 | lru_capacity: None, |
35 | with_sysroot: true, | 38 | with_sysroot: true, |
36 | feature_flags: FxHashMap::default(), | 39 | feature_flags: FxHashMap::default(), |
diff --git a/crates/ra_lsp_server/src/main_loop.rs b/crates/ra_lsp_server/src/main_loop.rs index 80f0216e8..25fa51b8a 100644 --- a/crates/ra_lsp_server/src/main_loop.rs +++ b/crates/ra_lsp_server/src/main_loop.rs | |||
@@ -9,8 +9,9 @@ use lsp_server::{Connection, ErrorCode, Message, Notification, Request, RequestI | |||
9 | use lsp_types::{ClientCapabilities, NumberOrString}; | 9 | use lsp_types::{ClientCapabilities, NumberOrString}; |
10 | use ra_ide_api::{Canceled, FeatureFlags, FileId, LibraryData, SourceRootId}; | 10 | use ra_ide_api::{Canceled, FeatureFlags, FileId, LibraryData, SourceRootId}; |
11 | use ra_prof::profile; | 11 | use ra_prof::profile; |
12 | use ra_vfs::VfsTask; | 12 | use ra_vfs::{VfsTask, Watch}; |
13 | use relative_path::RelativePathBuf; | 13 | use relative_path::RelativePathBuf; |
14 | use rustc_hash::FxHashSet; | ||
14 | use serde::{de::DeserializeOwned, Serialize}; | 15 | use serde::{de::DeserializeOwned, Serialize}; |
15 | use threadpool::ThreadPool; | 16 | use threadpool::ThreadPool; |
16 | 17 | ||
@@ -55,72 +56,96 @@ pub fn main_loop( | |||
55 | ) -> Result<()> { | 56 | ) -> Result<()> { |
56 | log::info!("server_config: {:#?}", config); | 57 | log::info!("server_config: {:#?}", config); |
57 | 58 | ||
58 | // FIXME: support dynamic workspace loading. | 59 | let mut loop_state = LoopState::default(); |
59 | let workspaces = { | 60 | let mut world_state = { |
60 | let mut loaded_workspaces = Vec::new(); | 61 | // FIXME: support dynamic workspace loading. |
61 | for ws_root in &ws_roots { | 62 | let workspaces = { |
62 | let workspace = ra_project_model::ProjectWorkspace::discover_with_sysroot( | 63 | let mut loaded_workspaces = Vec::new(); |
63 | ws_root.as_path(), | 64 | for ws_root in &ws_roots { |
64 | config.with_sysroot, | 65 | let workspace = ra_project_model::ProjectWorkspace::discover_with_sysroot( |
65 | ); | 66 | ws_root.as_path(), |
66 | match workspace { | 67 | config.with_sysroot, |
67 | Ok(workspace) => loaded_workspaces.push(workspace), | 68 | ); |
68 | Err(e) => { | 69 | match workspace { |
69 | log::error!("loading workspace failed: {}", e); | 70 | Ok(workspace) => loaded_workspaces.push(workspace), |
71 | Err(e) => { | ||
72 | log::error!("loading workspace failed: {}", e); | ||
73 | |||
74 | show_message( | ||
75 | req::MessageType::Error, | ||
76 | format!("rust-analyzer failed to load workspace: {}", e), | ||
77 | &connection.sender, | ||
78 | ); | ||
79 | } | ||
80 | } | ||
81 | } | ||
82 | loaded_workspaces | ||
83 | }; | ||
70 | 84 | ||
85 | let globs = config | ||
86 | .exclude_globs | ||
87 | .iter() | ||
88 | .map(|glob| ra_vfs_glob::Glob::new(glob)) | ||
89 | .collect::<std::result::Result<Vec<_>, _>>()?; | ||
90 | |||
91 | if config.use_client_watching { | ||
92 | let registration_options = req::DidChangeWatchedFilesRegistrationOptions { | ||
93 | watchers: workspaces | ||
94 | .iter() | ||
95 | .flat_map(|ws| ws.to_roots()) | ||
96 | .filter(|root| root.is_member()) | ||
97 | .map(|root| format!("{}/**/*.rs", root.path().display())) | ||
98 | .map(|glob_pattern| req::FileSystemWatcher { glob_pattern, kind: None }) | ||
99 | .collect(), | ||
100 | }; | ||
101 | let registration = req::Registration { | ||
102 | id: "file-watcher".to_string(), | ||
103 | method: "workspace/didChangeWatchedFiles".to_string(), | ||
104 | register_options: Some(serde_json::to_value(registration_options).unwrap()), | ||
105 | }; | ||
106 | let params = req::RegistrationParams { registrations: vec![registration] }; | ||
107 | let request = | ||
108 | request_new::<req::RegisterCapability>(loop_state.next_request_id(), params); | ||
109 | connection.sender.send(request.into()).unwrap(); | ||
110 | } | ||
111 | |||
112 | let feature_flags = { | ||
113 | let mut ff = FeatureFlags::default(); | ||
114 | for (flag, value) in config.feature_flags { | ||
115 | if let Err(_) = ff.set(flag.as_str(), value) { | ||
116 | log::error!("unknown feature flag: {:?}", flag); | ||
71 | show_message( | 117 | show_message( |
72 | req::MessageType::Error, | 118 | req::MessageType::Error, |
73 | format!("rust-analyzer failed to load workspace: {}", e), | 119 | format!("unknown feature flag: {:?}", flag), |
74 | &connection.sender, | 120 | &connection.sender, |
75 | ); | 121 | ); |
76 | } | 122 | } |
77 | } | 123 | } |
78 | } | 124 | ff |
79 | loaded_workspaces | 125 | }; |
80 | }; | 126 | log::info!("feature_flags: {:#?}", feature_flags); |
81 | 127 | ||
82 | let globs = config | 128 | WorldState::new( |
83 | .exclude_globs | 129 | ws_roots, |
84 | .iter() | 130 | workspaces, |
85 | .map(|glob| ra_vfs_glob::Glob::new(glob)) | 131 | config.lru_capacity, |
86 | .collect::<std::result::Result<Vec<_>, _>>()?; | 132 | &globs, |
87 | 133 | Watch(!config.use_client_watching), | |
88 | let feature_flags = { | 134 | Options { |
89 | let mut ff = FeatureFlags::default(); | 135 | publish_decorations: config.publish_decorations, |
90 | for (flag, value) in config.feature_flags { | 136 | supports_location_link: client_caps |
91 | if let Err(_) = ff.set(flag.as_str(), value) { | 137 | .text_document |
92 | log::error!("unknown feature flag: {:?}", flag); | 138 | .and_then(|it| it.definition) |
93 | show_message( | 139 | .and_then(|it| it.link_support) |
94 | req::MessageType::Error, | 140 | .unwrap_or(false), |
95 | format!("unknown feature flag: {:?}", flag), | 141 | }, |
96 | &connection.sender, | 142 | feature_flags, |
97 | ); | 143 | ) |
98 | } | ||
99 | } | ||
100 | ff | ||
101 | }; | 144 | }; |
102 | log::info!("feature_flags: {:#?}", feature_flags); | ||
103 | |||
104 | let mut world_state = WorldState::new( | ||
105 | ws_roots, | ||
106 | workspaces, | ||
107 | config.lru_capacity, | ||
108 | &globs, | ||
109 | Options { | ||
110 | publish_decorations: config.publish_decorations, | ||
111 | supports_location_link: client_caps | ||
112 | .text_document | ||
113 | .and_then(|it| it.definition) | ||
114 | .and_then(|it| it.link_support) | ||
115 | .unwrap_or(false), | ||
116 | }, | ||
117 | feature_flags, | ||
118 | ); | ||
119 | 145 | ||
120 | let pool = ThreadPool::new(THREADPOOL_SIZE); | 146 | let pool = ThreadPool::new(THREADPOOL_SIZE); |
121 | let (task_sender, task_receiver) = unbounded::<Task>(); | 147 | let (task_sender, task_receiver) = unbounded::<Task>(); |
122 | let (libdata_sender, libdata_receiver) = unbounded::<LibraryData>(); | 148 | let (libdata_sender, libdata_receiver) = unbounded::<LibraryData>(); |
123 | let mut loop_state = LoopState::default(); | ||
124 | 149 | ||
125 | log::info!("server initialized, serving requests"); | 150 | log::info!("server initialized, serving requests"); |
126 | { | 151 | { |
@@ -227,6 +252,8 @@ impl fmt::Debug for Event { | |||
227 | 252 | ||
228 | #[derive(Debug, Default)] | 253 | #[derive(Debug, Default)] |
229 | struct LoopState { | 254 | struct LoopState { |
255 | next_request_id: u64, | ||
256 | pending_responses: FxHashSet<RequestId>, | ||
230 | pending_requests: PendingRequests, | 257 | pending_requests: PendingRequests, |
231 | subscriptions: Subscriptions, | 258 | subscriptions: Subscriptions, |
232 | // We try not to index more than MAX_IN_FLIGHT_LIBS libraries at the same | 259 | // We try not to index more than MAX_IN_FLIGHT_LIBS libraries at the same |
@@ -236,6 +263,16 @@ struct LoopState { | |||
236 | workspace_loaded: bool, | 263 | workspace_loaded: bool, |
237 | } | 264 | } |
238 | 265 | ||
266 | impl LoopState { | ||
267 | fn next_request_id(&mut self) -> RequestId { | ||
268 | self.next_request_id += 1; | ||
269 | let res: RequestId = self.next_request_id.into(); | ||
270 | let inserted = self.pending_responses.insert(res.clone()); | ||
271 | assert!(inserted); | ||
272 | res | ||
273 | } | ||
274 | } | ||
275 | |||
239 | fn loop_turn( | 276 | fn loop_turn( |
240 | pool: &ThreadPool, | 277 | pool: &ThreadPool, |
241 | task_sender: &Sender<Task>, | 278 | task_sender: &Sender<Task>, |
@@ -290,7 +327,12 @@ fn loop_turn( | |||
290 | )?; | 327 | )?; |
291 | state_changed = true; | 328 | state_changed = true; |
292 | } | 329 | } |
293 | Message::Response(resp) => log::error!("unexpected response: {:?}", resp), | 330 | Message::Response(resp) => { |
331 | let removed = loop_state.pending_responses.remove(&resp.id); | ||
332 | if !removed { | ||
333 | log::error!("unexpected response: {:?}", resp) | ||
334 | } | ||
335 | } | ||
294 | }, | 336 | }, |
295 | }; | 337 | }; |
296 | 338 | ||
@@ -479,6 +521,18 @@ fn on_notification( | |||
479 | } | 521 | } |
480 | Err(not) => not, | 522 | Err(not) => not, |
481 | }; | 523 | }; |
524 | let not = match notification_cast::<req::DidChangeWatchedFiles>(not) { | ||
525 | Ok(params) => { | ||
526 | let mut vfs = state.vfs.write(); | ||
527 | for change in params.changes { | ||
528 | let uri = change.uri; | ||
529 | let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?; | ||
530 | vfs.notify_changed(path) | ||
531 | } | ||
532 | return Ok(()); | ||
533 | } | ||
534 | Err(not) => not, | ||
535 | }; | ||
482 | log::error!("unhandled notification: {:?}", not); | 536 | log::error!("unhandled notification: {:?}", not); |
483 | Ok(()) | 537 | Ok(()) |
484 | } | 538 | } |
@@ -682,3 +736,11 @@ where | |||
682 | { | 736 | { |
683 | Notification::new(N::METHOD.to_string(), params) | 737 | Notification::new(N::METHOD.to_string(), params) |
684 | } | 738 | } |
739 | |||
740 | fn request_new<R>(id: RequestId, params: R::Params) -> Request | ||
741 | where | ||
742 | R: lsp_types::request::Request, | ||
743 | R::Params: Serialize, | ||
744 | { | ||
745 | Request::new(id, R::METHOD.to_string(), params) | ||
746 | } | ||
diff --git a/crates/ra_lsp_server/src/main_loop/handlers.rs b/crates/ra_lsp_server/src/main_loop/handlers.rs index eb805a6d3..948d543ea 100644 --- a/crates/ra_lsp_server/src/main_loop/handlers.rs +++ b/crates/ra_lsp_server/src/main_loop/handlers.rs | |||
@@ -460,18 +460,16 @@ pub fn handle_prepare_rename( | |||
460 | 460 | ||
461 | // We support renaming references like handle_rename does. | 461 | // We support renaming references like handle_rename does. |
462 | // In the future we may want to reject the renaming of things like keywords here too. | 462 | // In the future we may want to reject the renaming of things like keywords here too. |
463 | let refs = match world.analysis().find_all_refs(position)? { | 463 | let optional_change = world.analysis().rename(position, "dummy")?; |
464 | let range = match optional_change { | ||
464 | None => return Ok(None), | 465 | None => return Ok(None), |
465 | Some(refs) => refs, | 466 | Some(it) => it.range, |
466 | }; | 467 | }; |
467 | 468 | ||
468 | // Refs should always have a declaration | ||
469 | let r = refs.declaration(); | ||
470 | let file_id = params.text_document.try_conv_with(&world)?; | 469 | let file_id = params.text_document.try_conv_with(&world)?; |
471 | let line_index = world.analysis().file_line_index(file_id)?; | 470 | let line_index = world.analysis().file_line_index(file_id)?; |
472 | let loc = to_location(r.file_id(), r.range(), &world, &line_index)?; | 471 | let range = range.conv_with(&line_index); |
473 | 472 | Ok(Some(PrepareRenameResponse::Range(range))) | |
474 | Ok(Some(PrepareRenameResponse::Range(loc.range))) | ||
475 | } | 473 | } |
476 | 474 | ||
477 | pub fn handle_rename(world: WorldSnapshot, params: RenameParams) -> Result<Option<WorkspaceEdit>> { | 475 | pub fn handle_rename(world: WorldSnapshot, params: RenameParams) -> Result<Option<WorkspaceEdit>> { |
@@ -488,7 +486,7 @@ pub fn handle_rename(world: WorldSnapshot, params: RenameParams) -> Result<Optio | |||
488 | let optional_change = world.analysis().rename(position, &*params.new_name)?; | 486 | let optional_change = world.analysis().rename(position, &*params.new_name)?; |
489 | let change = match optional_change { | 487 | let change = match optional_change { |
490 | None => return Ok(None), | 488 | None => return Ok(None), |
491 | Some(it) => it, | 489 | Some(it) => it.info, |
492 | }; | 490 | }; |
493 | 491 | ||
494 | let source_change_req = change.try_conv_with(&world)?; | 492 | let source_change_req = change.try_conv_with(&world)?; |
diff --git a/crates/ra_lsp_server/src/req.rs b/crates/ra_lsp_server/src/req.rs index 1b23f0c3d..0540f166e 100644 --- a/crates/ra_lsp_server/src/req.rs +++ b/crates/ra_lsp_server/src/req.rs | |||
@@ -5,10 +5,11 @@ use serde::{Deserialize, Serialize}; | |||
5 | pub use lsp_types::{ | 5 | pub use lsp_types::{ |
6 | notification::*, request::*, ApplyWorkspaceEditParams, CodeActionParams, CodeLens, | 6 | notification::*, request::*, ApplyWorkspaceEditParams, CodeActionParams, CodeLens, |
7 | CodeLensParams, CompletionParams, CompletionResponse, DidChangeConfigurationParams, | 7 | CodeLensParams, CompletionParams, CompletionResponse, DidChangeConfigurationParams, |
8 | DocumentOnTypeFormattingParams, DocumentSymbolParams, DocumentSymbolResponse, Hover, | 8 | DidChangeWatchedFilesParams, DidChangeWatchedFilesRegistrationOptions, |
9 | InitializeResult, MessageType, PublishDiagnosticsParams, ReferenceParams, ShowMessageParams, | 9 | DocumentOnTypeFormattingParams, DocumentSymbolParams, DocumentSymbolResponse, |
10 | SignatureHelp, TextDocumentEdit, TextDocumentPositionParams, TextEdit, WorkspaceEdit, | 10 | FileSystemWatcher, Hover, InitializeResult, MessageType, PublishDiagnosticsParams, |
11 | WorkspaceSymbolParams, | 11 | ReferenceParams, Registration, RegistrationParams, ShowMessageParams, SignatureHelp, |
12 | TextDocumentEdit, TextDocumentPositionParams, TextEdit, WorkspaceEdit, WorkspaceSymbolParams, | ||
12 | }; | 13 | }; |
13 | 14 | ||
14 | pub enum AnalyzerStatus {} | 15 | pub enum AnalyzerStatus {} |
diff --git a/crates/ra_lsp_server/src/world.rs b/crates/ra_lsp_server/src/world.rs index e1c5c3343..086ecd587 100644 --- a/crates/ra_lsp_server/src/world.rs +++ b/crates/ra_lsp_server/src/world.rs | |||
@@ -12,7 +12,7 @@ use ra_ide_api::{ | |||
12 | SourceRootId, | 12 | SourceRootId, |
13 | }; | 13 | }; |
14 | use ra_project_model::ProjectWorkspace; | 14 | use ra_project_model::ProjectWorkspace; |
15 | use ra_vfs::{LineEndings, RootEntry, Vfs, VfsChange, VfsFile, VfsRoot, VfsTask}; | 15 | use ra_vfs::{LineEndings, RootEntry, Vfs, VfsChange, VfsFile, VfsRoot, VfsTask, Watch}; |
16 | use ra_vfs_glob::{Glob, RustPackageFilterBuilder}; | 16 | use ra_vfs_glob::{Glob, RustPackageFilterBuilder}; |
17 | use relative_path::RelativePathBuf; | 17 | use relative_path::RelativePathBuf; |
18 | 18 | ||
@@ -60,6 +60,7 @@ impl WorldState { | |||
60 | workspaces: Vec<ProjectWorkspace>, | 60 | workspaces: Vec<ProjectWorkspace>, |
61 | lru_capacity: Option<usize>, | 61 | lru_capacity: Option<usize>, |
62 | exclude_globs: &[Glob], | 62 | exclude_globs: &[Glob], |
63 | watch: Watch, | ||
63 | options: Options, | 64 | options: Options, |
64 | feature_flags: FeatureFlags, | 65 | feature_flags: FeatureFlags, |
65 | ) -> WorldState { | 66 | ) -> WorldState { |
@@ -85,7 +86,7 @@ impl WorldState { | |||
85 | } | 86 | } |
86 | let (task_sender, task_receiver) = unbounded(); | 87 | let (task_sender, task_receiver) = unbounded(); |
87 | let task_sender = Box::new(move |t| task_sender.send(t).unwrap()); | 88 | let task_sender = Box::new(move |t| task_sender.send(t).unwrap()); |
88 | let (mut vfs, vfs_roots) = Vfs::new(roots, task_sender); | 89 | let (mut vfs, vfs_roots) = Vfs::new(roots, task_sender, watch); |
89 | let roots_to_scan = vfs_roots.len(); | 90 | let roots_to_scan = vfs_roots.len(); |
90 | for r in vfs_roots { | 91 | for r in vfs_roots { |
91 | let vfs_root_path = vfs.root2path(r); | 92 | let vfs_root_path = vfs.root2path(r); |
diff --git a/crates/ra_parser/src/grammar/items.rs b/crates/ra_parser/src/grammar/items.rs index 6d426206e..b4327b78f 100644 --- a/crates/ra_parser/src/grammar/items.rs +++ b/crates/ra_parser/src/grammar/items.rs | |||
@@ -31,7 +31,7 @@ pub(super) enum ItemFlavor { | |||
31 | 31 | ||
32 | pub(super) const ITEM_RECOVERY_SET: TokenSet = token_set![ | 32 | pub(super) const ITEM_RECOVERY_SET: TokenSet = token_set![ |
33 | FN_KW, STRUCT_KW, ENUM_KW, IMPL_KW, TRAIT_KW, CONST_KW, STATIC_KW, LET_KW, MOD_KW, PUB_KW, | 33 | FN_KW, STRUCT_KW, ENUM_KW, IMPL_KW, TRAIT_KW, CONST_KW, STATIC_KW, LET_KW, MOD_KW, PUB_KW, |
34 | CRATE_KW | 34 | CRATE_KW, USE_KW |
35 | ]; | 35 | ]; |
36 | 36 | ||
37 | pub(super) fn item_or_macro(p: &mut Parser, stop_on_r_curly: bool, flavor: ItemFlavor) { | 37 | pub(super) fn item_or_macro(p: &mut Parser, stop_on_r_curly: bool, flavor: ItemFlavor) { |
diff --git a/crates/ra_parser/src/grammar/items/use_item.rs b/crates/ra_parser/src/grammar/items/use_item.rs index c0c7d0ec6..83a65e226 100644 --- a/crates/ra_parser/src/grammar/items/use_item.rs +++ b/crates/ra_parser/src/grammar/items/use_item.rs | |||
@@ -101,7 +101,10 @@ fn use_tree(p: &mut Parser) { | |||
101 | } | 101 | } |
102 | _ => { | 102 | _ => { |
103 | m.abandon(p); | 103 | m.abandon(p); |
104 | p.err_and_bump("expected one of `*`, `::`, `{`, `self`, `super` or an indentifier"); | 104 | p.err_recover( |
105 | "expected one of `*`, `::`, `{`, `self`, `super` or an identifier", | ||
106 | ITEM_RECOVERY_SET, | ||
107 | ); | ||
105 | return; | 108 | return; |
106 | } | 109 | } |
107 | } | 110 | } |
diff --git a/crates/ra_project_model/src/lib.rs b/crates/ra_project_model/src/lib.rs index 676dc4941..9b2f534e7 100644 --- a/crates/ra_project_model/src/lib.rs +++ b/crates/ra_project_model/src/lib.rs | |||
@@ -153,7 +153,7 @@ impl ProjectWorkspace { | |||
153 | if let Some(file_id) = load(krate.root(&sysroot)) { | 153 | if let Some(file_id) = load(krate.root(&sysroot)) { |
154 | sysroot_crates.insert( | 154 | sysroot_crates.insert( |
155 | krate, | 155 | krate, |
156 | crate_graph.add_crate_root(file_id, Edition::Edition2015), | 156 | crate_graph.add_crate_root(file_id, Edition::Edition2018), |
157 | ); | 157 | ); |
158 | } | 158 | } |
159 | } | 159 | } |
diff --git a/crates/ra_syntax/Cargo.toml b/crates/ra_syntax/Cargo.toml index 0ead277b2..d3a8b516a 100644 --- a/crates/ra_syntax/Cargo.toml +++ b/crates/ra_syntax/Cargo.toml | |||
@@ -10,7 +10,7 @@ repository = "https://github.com/rust-analyzer/rust-analyzer" | |||
10 | [dependencies] | 10 | [dependencies] |
11 | itertools = "0.8.0" | 11 | itertools = "0.8.0" |
12 | rowan = "0.6.1" | 12 | rowan = "0.6.1" |
13 | ra_rustc_lexer = { version = "0.1.0-pre.3", features = ["unicode-xid"] } | 13 | rustc_lexer = "0.1.0" |
14 | 14 | ||
15 | # ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here | 15 | # ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here |
16 | # to reduce number of compilations | 16 | # to reduce number of compilations |
diff --git a/crates/ra_syntax/src/ast/generated.rs b/crates/ra_syntax/src/ast/generated.rs index e2a92ae60..bcf753f78 100644 --- a/crates/ra_syntax/src/ast/generated.rs +++ b/crates/ra_syntax/src/ast/generated.rs | |||
@@ -934,6 +934,7 @@ impl AstNode for ExternCrateItem { | |||
934 | &self.syntax | 934 | &self.syntax |
935 | } | 935 | } |
936 | } | 936 | } |
937 | impl ast::AttrsOwner for ExternCrateItem {} | ||
937 | impl ExternCrateItem { | 938 | impl ExternCrateItem { |
938 | pub fn name_ref(&self) -> Option<NameRef> { | 939 | pub fn name_ref(&self) -> Option<NameRef> { |
939 | AstChildren::new(&self.syntax).next() | 940 | AstChildren::new(&self.syntax).next() |
diff --git a/crates/ra_syntax/src/grammar.ron b/crates/ra_syntax/src/grammar.ron index c14ee0e85..3e6c2d3f3 100644 --- a/crates/ra_syntax/src/grammar.ron +++ b/crates/ra_syntax/src/grammar.ron | |||
@@ -669,6 +669,7 @@ Grammar( | |||
669 | collections: [("use_trees", "UseTree")] | 669 | collections: [("use_trees", "UseTree")] |
670 | ), | 670 | ), |
671 | "ExternCrateItem": ( | 671 | "ExternCrateItem": ( |
672 | traits: ["AttrsOwner"], | ||
672 | options: ["NameRef", "Alias"], | 673 | options: ["NameRef", "Alias"], |
673 | ), | 674 | ), |
674 | "ArgList": ( | 675 | "ArgList": ( |
diff --git a/crates/ra_syntax/src/ptr.rs b/crates/ra_syntax/src/ptr.rs index 80e55d2aa..992034ef0 100644 --- a/crates/ra_syntax/src/ptr.rs +++ b/crates/ra_syntax/src/ptr.rs | |||
@@ -15,8 +15,9 @@ impl SyntaxNodePtr { | |||
15 | SyntaxNodePtr { range: node.text_range(), kind: node.kind() } | 15 | SyntaxNodePtr { range: node.text_range(), kind: node.kind() } |
16 | } | 16 | } |
17 | 17 | ||
18 | pub fn to_node(self, parent: &SyntaxNode) -> SyntaxNode { | 18 | pub fn to_node(self, root: &SyntaxNode) -> SyntaxNode { |
19 | successors(Some(parent.clone()), |node| { | 19 | assert!(root.parent().is_none()); |
20 | successors(Some(root.clone()), |node| { | ||
20 | node.children().find(|it| self.range.is_subrange(&it.text_range())) | 21 | node.children().find(|it| self.range.is_subrange(&it.text_range())) |
21 | }) | 22 | }) |
22 | .find(|it| it.text_range() == self.range && it.kind() == self.kind) | 23 | .find(|it| it.text_range() == self.range && it.kind() == self.kind) |
diff --git a/crates/ra_syntax/test_data/parser/err/0002_duplicate_shebang.txt b/crates/ra_syntax/test_data/parser/err/0002_duplicate_shebang.txt index 84867026f..bdb5fa6c5 100644 --- a/crates/ra_syntax/test_data/parser/err/0002_duplicate_shebang.txt +++ b/crates/ra_syntax/test_data/parser/err/0002_duplicate_shebang.txt | |||
@@ -28,7 +28,7 @@ SOURCE_FILE@[0; 42) | |||
28 | WHITESPACE@[41; 42) "\n" | 28 | WHITESPACE@[41; 42) "\n" |
29 | error 23: expected `[` | 29 | error 23: expected `[` |
30 | error 23: expected an item | 30 | error 23: expected an item |
31 | error 27: expected one of `*`, `::`, `{`, `self`, `super` or an indentifier | 31 | error 27: expected one of `*`, `::`, `{`, `self`, `super` or an identifier |
32 | error 28: expected SEMI | 32 | error 28: expected SEMI |
33 | error 31: expected EXCL | 33 | error 31: expected EXCL |
34 | error 31: expected `{`, `[`, `(` | 34 | error 31: expected `{`, `[`, `(` |
diff --git a/crates/ra_syntax/test_data/parser/err/0035_use_recover.rs b/crates/ra_syntax/test_data/parser/err/0035_use_recover.rs new file mode 100644 index 000000000..4a2668126 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/err/0035_use_recover.rs | |||
@@ -0,0 +1,5 @@ | |||
1 | use foo::bar; | ||
2 | use | ||
3 | use crate::baz; | ||
4 | use | ||
5 | fn f() {} | ||
diff --git a/crates/ra_syntax/test_data/parser/err/0035_use_recover.txt b/crates/ra_syntax/test_data/parser/err/0035_use_recover.txt new file mode 100644 index 000000000..636840828 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/err/0035_use_recover.txt | |||
@@ -0,0 +1,54 @@ | |||
1 | SOURCE_FILE@[0; 48) | ||
2 | USE_ITEM@[0; 13) | ||
3 | USE_KW@[0; 3) "use" | ||
4 | WHITESPACE@[3; 4) " " | ||
5 | USE_TREE@[4; 12) | ||
6 | PATH@[4; 12) | ||
7 | PATH@[4; 7) | ||
8 | PATH_SEGMENT@[4; 7) | ||
9 | NAME_REF@[4; 7) | ||
10 | IDENT@[4; 7) "foo" | ||
11 | COLONCOLON@[7; 9) "::" | ||
12 | PATH_SEGMENT@[9; 12) | ||
13 | NAME_REF@[9; 12) | ||
14 | IDENT@[9; 12) "bar" | ||
15 | SEMI@[12; 13) ";" | ||
16 | WHITESPACE@[13; 14) "\n" | ||
17 | USE_ITEM@[14; 17) | ||
18 | USE_KW@[14; 17) "use" | ||
19 | WHITESPACE@[17; 18) "\n" | ||
20 | USE_ITEM@[18; 33) | ||
21 | USE_KW@[18; 21) "use" | ||
22 | WHITESPACE@[21; 22) " " | ||
23 | USE_TREE@[22; 32) | ||
24 | PATH@[22; 32) | ||
25 | PATH@[22; 27) | ||
26 | PATH_SEGMENT@[22; 27) | ||
27 | CRATE_KW@[22; 27) "crate" | ||
28 | COLONCOLON@[27; 29) "::" | ||
29 | PATH_SEGMENT@[29; 32) | ||
30 | NAME_REF@[29; 32) | ||
31 | IDENT@[29; 32) "baz" | ||
32 | SEMI@[32; 33) ";" | ||
33 | WHITESPACE@[33; 34) "\n" | ||
34 | USE_ITEM@[34; 37) | ||
35 | USE_KW@[34; 37) "use" | ||
36 | WHITESPACE@[37; 38) "\n" | ||
37 | FN_DEF@[38; 47) | ||
38 | FN_KW@[38; 40) "fn" | ||
39 | WHITESPACE@[40; 41) " " | ||
40 | NAME@[41; 42) | ||
41 | IDENT@[41; 42) "f" | ||
42 | PARAM_LIST@[42; 44) | ||
43 | L_PAREN@[42; 43) "(" | ||
44 | R_PAREN@[43; 44) ")" | ||
45 | WHITESPACE@[44; 45) " " | ||
46 | BLOCK_EXPR@[45; 47) | ||
47 | BLOCK@[45; 47) | ||
48 | L_CURLY@[45; 46) "{" | ||
49 | R_CURLY@[46; 47) "}" | ||
50 | WHITESPACE@[47; 48) "\n" | ||
51 | error 17: expected one of `*`, `::`, `{`, `self`, `super` or an identifier | ||
52 | error 17: expected SEMI | ||
53 | error 37: expected one of `*`, `::`, `{`, `self`, `super` or an identifier | ||
54 | error 37: expected SEMI | ||
diff --git a/crates/ra_vfs_glob/Cargo.toml b/crates/ra_vfs_glob/Cargo.toml index 09ba3d3bf..d1073b2be 100644 --- a/crates/ra_vfs_glob/Cargo.toml +++ b/crates/ra_vfs_glob/Cargo.toml | |||
@@ -5,5 +5,5 @@ version = "0.1.0" | |||
5 | authors = ["rust-analyzer developers"] | 5 | authors = ["rust-analyzer developers"] |
6 | 6 | ||
7 | [dependencies] | 7 | [dependencies] |
8 | ra_vfs = "0.3.0" | 8 | ra_vfs = "0.4.0" |
9 | globset = "0.4.4" | 9 | globset = "0.4.4" |