diff options
author | bors[bot] <26634292+bors[bot]@users.noreply.github.com> | 2019-07-19 12:15:55 +0100 |
---|---|---|
committer | bors[bot] <26634292+bors[bot]@users.noreply.github.com> | 2019-07-19 12:15:55 +0100 |
commit | f209843e31af7f0e0212aa28ffec2efad2a70c6f (patch) | |
tree | 548227da78a3bea644f57714d075410c0bdf7469 | |
parent | 58d4983ba5745975446d60f2886d96f8d2adf0f2 (diff) | |
parent | d4a66166c002f0a49e41d856a49cb5685ac93202 (diff) |
Merge #1545
1545: migrate ra_syntax to the new rowan API r=matklad a=matklad
Co-authored-by: Aleksey Kladov <[email protected]>
92 files changed, 1984 insertions, 3100 deletions
diff --git a/Cargo.lock b/Cargo.lock index 03f5be16b..141cc6088 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -1313,7 +1313,7 @@ dependencies = [ | |||
1313 | "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1313 | "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", |
1314 | "ra_parser 0.1.0", | 1314 | "ra_parser 0.1.0", |
1315 | "ra_text_edit 0.1.0", | 1315 | "ra_text_edit 0.1.0", |
1316 | "rowan 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)", | 1316 | "rowan 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)", |
1317 | "smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", | 1317 | "smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", |
1318 | "test_utils 0.1.0", | 1318 | "test_utils 0.1.0", |
1319 | "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1319 | "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", |
@@ -1584,11 +1584,11 @@ dependencies = [ | |||
1584 | 1584 | ||
1585 | [[package]] | 1585 | [[package]] |
1586 | name = "rowan" | 1586 | name = "rowan" |
1587 | version = "0.5.5" | 1587 | version = "0.5.6" |
1588 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1588 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1589 | dependencies = [ | 1589 | dependencies = [ |
1590 | "colosseum 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", | 1590 | "colosseum 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", |
1591 | "parking_lot 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1591 | "parking_lot 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", |
1592 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1592 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", |
1593 | "smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", | 1593 | "smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", |
1594 | "text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", | 1594 | "text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", |
@@ -2275,7 +2275,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" | |||
2275 | "checksum relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0e7790c7f1cc73d831d28dc5a7deb316a006e7848e6a7f467cdb10a0a9e0fb1c" | 2275 | "checksum relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0e7790c7f1cc73d831d28dc5a7deb316a006e7848e6a7f467cdb10a0a9e0fb1c" |
2276 | "checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e" | 2276 | "checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e" |
2277 | "checksum ron 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "17f52a24414403f81528b67488cf8edc4eda977d3af1646bb6b106a600ead78f" | 2277 | "checksum ron 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "17f52a24414403f81528b67488cf8edc4eda977d3af1646bb6b106a600ead78f" |
2278 | "checksum rowan 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)" = "500ba7550373d42593a5228085bad391517378fa31ad2a84defe100dd8259fef" | 2278 | "checksum rowan 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0c433ffe99ac9b96fa9882805d05eee5d750c9202fb42d0546c556e5d70d54be" |
2279 | "checksum rustc-demangle 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7f4dccf6f4891ebcc0c39f9b6eb1a83b9bf5d747cb439ec6fba4f3b977038af" | 2279 | "checksum rustc-demangle 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7f4dccf6f4891ebcc0c39f9b6eb1a83b9bf5d747cb439ec6fba4f3b977038af" |
2280 | "checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8" | 2280 | "checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8" |
2281 | "checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" | 2281 | "checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" |
diff --git a/crates/ra_assists/src/add_derive.rs b/crates/ra_assists/src/add_derive.rs index bf7d55d6d..f19196f53 100644 --- a/crates/ra_assists/src/add_derive.rs +++ b/crates/ra_assists/src/add_derive.rs | |||
@@ -9,7 +9,7 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
9 | 9 | ||
10 | pub(crate) fn add_derive(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 10 | pub(crate) fn add_derive(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { |
11 | let nominal = ctx.node_at_offset::<ast::NominalDef>()?; | 11 | let nominal = ctx.node_at_offset::<ast::NominalDef>()?; |
12 | let node_start = derive_insertion_offset(nominal)?; | 12 | let node_start = derive_insertion_offset(&nominal)?; |
13 | ctx.add_action(AssistId("add_derive"), "add `#[derive]`", |edit| { | 13 | ctx.add_action(AssistId("add_derive"), "add `#[derive]`", |edit| { |
14 | let derive_attr = nominal | 14 | let derive_attr = nominal |
15 | .attrs() | 15 | .attrs() |
diff --git a/crates/ra_assists/src/add_explicit_type.rs b/crates/ra_assists/src/add_explicit_type.rs index bb47a32f0..a69cfc8e3 100644 --- a/crates/ra_assists/src/add_explicit_type.rs +++ b/crates/ra_assists/src/add_explicit_type.rs | |||
@@ -27,7 +27,7 @@ pub(crate) fn add_explicit_type(mut ctx: AssistCtx<impl HirDatabase>) -> Option< | |||
27 | // Infer type | 27 | // Infer type |
28 | let db = ctx.db; | 28 | let db = ctx.db; |
29 | let analyzer = hir::SourceAnalyzer::new(db, ctx.frange.file_id, stmt.syntax(), None); | 29 | let analyzer = hir::SourceAnalyzer::new(db, ctx.frange.file_id, stmt.syntax(), None); |
30 | let ty = analyzer.type_of(db, expr)?; | 30 | let ty = analyzer.type_of(db, &expr)?; |
31 | // Assist not applicable if the type is unknown | 31 | // Assist not applicable if the type is unknown |
32 | if is_unknown(&ty) { | 32 | if is_unknown(&ty) { |
33 | return None; | 33 | return None; |
diff --git a/crates/ra_assists/src/add_impl.rs b/crates/ra_assists/src/add_impl.rs index b81922c1d..cebc19539 100644 --- a/crates/ra_assists/src/add_impl.rs +++ b/crates/ra_assists/src/add_impl.rs | |||
@@ -16,7 +16,7 @@ pub(crate) fn add_impl(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | |||
16 | let start_offset = nominal.syntax().range().end(); | 16 | let start_offset = nominal.syntax().range().end(); |
17 | let mut buf = String::new(); | 17 | let mut buf = String::new(); |
18 | buf.push_str("\n\nimpl"); | 18 | buf.push_str("\n\nimpl"); |
19 | if let Some(type_params) = type_params { | 19 | if let Some(type_params) = &type_params { |
20 | type_params.syntax().text().push_to(&mut buf); | 20 | type_params.syntax().text().push_to(&mut buf); |
21 | } | 21 | } |
22 | buf.push_str(" "); | 22 | buf.push_str(" "); |
@@ -25,9 +25,9 @@ pub(crate) fn add_impl(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | |||
25 | let lifetime_params = type_params | 25 | let lifetime_params = type_params |
26 | .lifetime_params() | 26 | .lifetime_params() |
27 | .filter_map(|it| it.lifetime_token()) | 27 | .filter_map(|it| it.lifetime_token()) |
28 | .map(|it| it.text()); | 28 | .map(|it| it.text().clone()); |
29 | let type_params = | 29 | let type_params = |
30 | type_params.type_params().filter_map(|it| it.name()).map(|it| it.text()); | 30 | type_params.type_params().filter_map(|it| it.name()).map(|it| it.text().clone()); |
31 | join(lifetime_params.chain(type_params)).surround_with("<", ">").to_buf(&mut buf); | 31 | join(lifetime_params.chain(type_params)).surround_with("<", ">").to_buf(&mut buf); |
32 | } | 32 | } |
33 | buf.push_str(" {\n"); | 33 | buf.push_str(" {\n"); |
diff --git a/crates/ra_assists/src/add_missing_impl_members.rs b/crates/ra_assists/src/add_missing_impl_members.rs index 6ffdad0b1..b992a4dc8 100644 --- a/crates/ra_assists/src/add_missing_impl_members.rs +++ b/crates/ra_assists/src/add_missing_impl_members.rs | |||
@@ -5,8 +5,8 @@ use crate::{ | |||
5 | 5 | ||
6 | use hir::{db::HirDatabase, HasSource}; | 6 | use hir::{db::HirDatabase, HasSource}; |
7 | use ra_db::FilePosition; | 7 | use ra_db::FilePosition; |
8 | use ra_syntax::ast::{self, AstNode, ImplItem, ImplItemKind, NameOwner}; | 8 | use ra_syntax::ast::{self, AstNode, ImplItemKind, NameOwner}; |
9 | use ra_syntax::{SmolStr, TreeArc}; | 9 | use ra_syntax::SmolStr; |
10 | 10 | ||
11 | #[derive(PartialEq)] | 11 | #[derive(PartialEq)] |
12 | enum AddMissingImplMembersMode { | 12 | enum AddMissingImplMembersMode { |
@@ -46,16 +46,16 @@ fn add_missing_impl_members_inner( | |||
46 | let position = FilePosition { file_id, offset: impl_node.syntax().range().start() }; | 46 | let position = FilePosition { file_id, offset: impl_node.syntax().range().start() }; |
47 | let analyzer = hir::SourceAnalyzer::new(ctx.db, position.file_id, impl_node.syntax(), None); | 47 | let analyzer = hir::SourceAnalyzer::new(ctx.db, position.file_id, impl_node.syntax(), None); |
48 | 48 | ||
49 | resolve_target_trait_def(ctx.db, &analyzer, impl_node)? | 49 | resolve_target_trait_def(ctx.db, &analyzer, &impl_node)? |
50 | }; | 50 | }; |
51 | 51 | ||
52 | let def_name = |kind| -> Option<&SmolStr> { | 52 | let def_name = |kind| -> Option<SmolStr> { |
53 | match kind { | 53 | match kind { |
54 | ImplItemKind::FnDef(def) => def.name(), | 54 | ast::ImplItemKind::FnDef(def) => def.name(), |
55 | ImplItemKind::TypeAliasDef(def) => def.name(), | 55 | ast::ImplItemKind::TypeAliasDef(def) => def.name(), |
56 | ImplItemKind::ConstDef(def) => def.name(), | 56 | ast::ImplItemKind::ConstDef(def) => def.name(), |
57 | } | 57 | } |
58 | .map(ast::Name::text) | 58 | .map(|it| it.text().clone()) |
59 | }; | 59 | }; |
60 | 60 | ||
61 | let trait_items = trait_def.item_list()?.impl_items(); | 61 | let trait_items = trait_def.item_list()?.impl_items(); |
@@ -78,18 +78,13 @@ fn add_missing_impl_members_inner( | |||
78 | 78 | ||
79 | ctx.add_action(AssistId(assist_id), label, |edit| { | 79 | ctx.add_action(AssistId(assist_id), label, |edit| { |
80 | let n_existing_items = impl_item_list.impl_items().count(); | 80 | let n_existing_items = impl_item_list.impl_items().count(); |
81 | let items: Vec<_> = missing_items | 81 | let items = missing_items.into_iter().map(|it| match it.kind() { |
82 | .into_iter() | 82 | ImplItemKind::FnDef(def) => strip_docstring(add_body(def).into()), |
83 | .map(|it| match it.kind() { | 83 | _ => strip_docstring(it), |
84 | ImplItemKind::FnDef(def) => { | 84 | }); |
85 | strip_docstring(ImplItem::cast(add_body(def).syntax()).unwrap()) | ||
86 | } | ||
87 | _ => strip_docstring(it), | ||
88 | }) | ||
89 | .collect(); | ||
90 | let mut ast_editor = AstEditor::new(impl_item_list); | 85 | let mut ast_editor = AstEditor::new(impl_item_list); |
91 | 86 | ||
92 | ast_editor.append_items(items.iter().map(|it| &**it)); | 87 | ast_editor.append_items(items); |
93 | 88 | ||
94 | let first_new_item = ast_editor.ast().impl_items().nth(n_existing_items).unwrap(); | 89 | let first_new_item = ast_editor.ast().impl_items().nth(n_existing_items).unwrap(); |
95 | let cursor_position = first_new_item.syntax().range().start(); | 90 | let cursor_position = first_new_item.syntax().range().start(); |
@@ -101,14 +96,14 @@ fn add_missing_impl_members_inner( | |||
101 | ctx.build() | 96 | ctx.build() |
102 | } | 97 | } |
103 | 98 | ||
104 | fn strip_docstring(item: &ast::ImplItem) -> TreeArc<ast::ImplItem> { | 99 | fn strip_docstring(item: ast::ImplItem) -> ast::ImplItem { |
105 | let mut ast_editor = AstEditor::new(item); | 100 | let mut ast_editor = AstEditor::new(item); |
106 | ast_editor.strip_attrs_and_docs(); | 101 | ast_editor.strip_attrs_and_docs(); |
107 | ast_editor.ast().to_owned() | 102 | ast_editor.ast().to_owned() |
108 | } | 103 | } |
109 | 104 | ||
110 | fn add_body(fn_def: &ast::FnDef) -> TreeArc<ast::FnDef> { | 105 | fn add_body(fn_def: ast::FnDef) -> ast::FnDef { |
111 | let mut ast_editor = AstEditor::new(fn_def); | 106 | let mut ast_editor = AstEditor::new(fn_def.clone()); |
112 | if fn_def.body().is_none() { | 107 | if fn_def.body().is_none() { |
113 | ast_editor.set_body(&AstBuilder::<ast::Block>::single_expr( | 108 | ast_editor.set_body(&AstBuilder::<ast::Block>::single_expr( |
114 | &AstBuilder::<ast::Expr>::unimplemented(), | 109 | &AstBuilder::<ast::Expr>::unimplemented(), |
@@ -123,9 +118,12 @@ fn resolve_target_trait_def( | |||
123 | db: &impl HirDatabase, | 118 | db: &impl HirDatabase, |
124 | analyzer: &hir::SourceAnalyzer, | 119 | analyzer: &hir::SourceAnalyzer, |
125 | impl_block: &ast::ImplBlock, | 120 | impl_block: &ast::ImplBlock, |
126 | ) -> Option<TreeArc<ast::TraitDef>> { | 121 | ) -> Option<ast::TraitDef> { |
127 | let ast_path = | 122 | let ast_path = impl_block |
128 | impl_block.target_trait().map(AstNode::syntax).and_then(ast::PathType::cast)?.path()?; | 123 | .target_trait() |
124 | .map(|it| it.syntax().clone()) | ||
125 | .and_then(ast::PathType::cast)? | ||
126 | .path()?; | ||
129 | 127 | ||
130 | match analyzer.resolve_path(db, &ast_path) { | 128 | match analyzer.resolve_path(db, &ast_path) { |
131 | Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def.source(db).ast), | 129 | Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def.source(db).ast), |
diff --git a/crates/ra_assists/src/assist_ctx.rs b/crates/ra_assists/src/assist_ctx.rs index 34b207154..e52085f85 100644 --- a/crates/ra_assists/src/assist_ctx.rs +++ b/crates/ra_assists/src/assist_ctx.rs | |||
@@ -49,7 +49,7 @@ pub(crate) enum Assist { | |||
49 | pub(crate) struct AssistCtx<'a, DB> { | 49 | pub(crate) struct AssistCtx<'a, DB> { |
50 | pub(crate) db: &'a DB, | 50 | pub(crate) db: &'a DB, |
51 | pub(crate) frange: FileRange, | 51 | pub(crate) frange: FileRange, |
52 | source_file: &'a SourceFile, | 52 | source_file: SourceFile, |
53 | should_compute_edit: bool, | 53 | should_compute_edit: bool, |
54 | assist: Assist, | 54 | assist: Assist, |
55 | } | 55 | } |
@@ -59,7 +59,7 @@ impl<'a, DB> Clone for AssistCtx<'a, DB> { | |||
59 | AssistCtx { | 59 | AssistCtx { |
60 | db: self.db, | 60 | db: self.db, |
61 | frange: self.frange, | 61 | frange: self.frange, |
62 | source_file: self.source_file, | 62 | source_file: self.source_file.clone(), |
63 | should_compute_edit: self.should_compute_edit, | 63 | should_compute_edit: self.should_compute_edit, |
64 | assist: self.assist.clone(), | 64 | assist: self.assist.clone(), |
65 | } | 65 | } |
@@ -104,18 +104,18 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> { | |||
104 | Some(self.assist) | 104 | Some(self.assist) |
105 | } | 105 | } |
106 | 106 | ||
107 | pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken<'a>> { | 107 | pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> { |
108 | find_token_at_offset(self.source_file.syntax(), self.frange.range.start()) | 108 | find_token_at_offset(self.source_file.syntax(), self.frange.range.start()) |
109 | } | 109 | } |
110 | 110 | ||
111 | pub(crate) fn node_at_offset<N: AstNode>(&self) -> Option<&'a N> { | 111 | pub(crate) fn node_at_offset<N: AstNode>(&self) -> Option<N> { |
112 | find_node_at_offset(self.source_file.syntax(), self.frange.range.start()) | 112 | find_node_at_offset(self.source_file.syntax(), self.frange.range.start()) |
113 | } | 113 | } |
114 | pub(crate) fn covering_element(&self) -> SyntaxElement<'a> { | 114 | pub(crate) fn covering_element(&self) -> SyntaxElement { |
115 | find_covering_element(self.source_file.syntax(), self.frange.range) | 115 | find_covering_element(self.source_file.syntax(), self.frange.range) |
116 | } | 116 | } |
117 | 117 | ||
118 | pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement<'a> { | 118 | pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement { |
119 | find_covering_element(self.source_file.syntax(), range) | 119 | find_covering_element(self.source_file.syntax(), range) |
120 | } | 120 | } |
121 | } | 121 | } |
@@ -139,7 +139,7 @@ impl AssistBuilder { | |||
139 | ) { | 139 | ) { |
140 | let mut replace_with = replace_with.into(); | 140 | let mut replace_with = replace_with.into(); |
141 | if let Some(indent) = leading_indent(node) { | 141 | if let Some(indent) = leading_indent(node) { |
142 | replace_with = reindent(&replace_with, indent) | 142 | replace_with = reindent(&replace_with, &indent) |
143 | } | 143 | } |
144 | self.replace(node.range(), replace_with) | 144 | self.replace(node.range(), replace_with) |
145 | } | 145 | } |
diff --git a/crates/ra_assists/src/ast_editor.rs b/crates/ra_assists/src/ast_editor.rs index 7b743c9f0..5fbcadfee 100644 --- a/crates/ra_assists/src/ast_editor.rs +++ b/crates/ra_assists/src/ast_editor.rs | |||
@@ -4,18 +4,18 @@ use arrayvec::ArrayVec; | |||
4 | use hir::Name; | 4 | use hir::Name; |
5 | use ra_fmt::leading_indent; | 5 | use ra_fmt::leading_indent; |
6 | use ra_syntax::{ | 6 | use ra_syntax::{ |
7 | ast, AstNode, Direction, InsertPosition, SourceFile, SyntaxElement, SyntaxKind::*, TreeArc, T, | 7 | ast, AstNode, Direction, InsertPosition, SourceFile, SyntaxElement, SyntaxKind::*, T, |
8 | }; | 8 | }; |
9 | use ra_text_edit::TextEditBuilder; | 9 | use ra_text_edit::TextEditBuilder; |
10 | 10 | ||
11 | pub struct AstEditor<N: AstNode> { | 11 | pub struct AstEditor<N: AstNode> { |
12 | original_ast: TreeArc<N>, | 12 | original_ast: N, |
13 | ast: TreeArc<N>, | 13 | ast: N, |
14 | } | 14 | } |
15 | 15 | ||
16 | impl<N: AstNode> AstEditor<N> { | 16 | impl<N: AstNode> AstEditor<N> { |
17 | pub fn new(node: &N) -> AstEditor<N> { | 17 | pub fn new(node: N) -> AstEditor<N> { |
18 | AstEditor { original_ast: node.to_owned(), ast: node.to_owned() } | 18 | AstEditor { original_ast: node.clone(), ast: node } |
19 | } | 19 | } |
20 | 20 | ||
21 | pub fn into_text_edit(self, builder: &mut TextEditBuilder) { | 21 | pub fn into_text_edit(self, builder: &mut TextEditBuilder) { |
@@ -26,27 +26,27 @@ impl<N: AstNode> AstEditor<N> { | |||
26 | } | 26 | } |
27 | 27 | ||
28 | pub fn ast(&self) -> &N { | 28 | pub fn ast(&self) -> &N { |
29 | &*self.ast | 29 | &self.ast |
30 | } | 30 | } |
31 | 31 | ||
32 | #[must_use] | 32 | #[must_use] |
33 | fn insert_children<'a>( | 33 | fn insert_children( |
34 | &self, | 34 | &self, |
35 | position: InsertPosition<SyntaxElement<'_>>, | 35 | position: InsertPosition<SyntaxElement>, |
36 | to_insert: impl Iterator<Item = SyntaxElement<'a>>, | 36 | to_insert: impl Iterator<Item = SyntaxElement>, |
37 | ) -> TreeArc<N> { | 37 | ) -> N { |
38 | let new_syntax = self.ast().syntax().insert_children(position, to_insert); | 38 | let new_syntax = self.ast().syntax().insert_children(position, to_insert); |
39 | N::cast(&new_syntax).unwrap().to_owned() | 39 | N::cast(new_syntax).unwrap() |
40 | } | 40 | } |
41 | 41 | ||
42 | #[must_use] | 42 | #[must_use] |
43 | fn replace_children<'a>( | 43 | fn replace_children( |
44 | &self, | 44 | &self, |
45 | to_delete: RangeInclusive<SyntaxElement<'_>>, | 45 | to_delete: RangeInclusive<SyntaxElement>, |
46 | to_insert: impl Iterator<Item = SyntaxElement<'a>>, | 46 | to_insert: impl Iterator<Item = SyntaxElement>, |
47 | ) -> TreeArc<N> { | 47 | ) -> N { |
48 | let new_syntax = self.ast().syntax().replace_children(to_delete, to_insert); | 48 | let new_syntax = self.ast().syntax().replace_children(to_delete, to_insert); |
49 | N::cast(&new_syntax).unwrap().to_owned() | 49 | N::cast(new_syntax).unwrap() |
50 | } | 50 | } |
51 | 51 | ||
52 | fn do_make_multiline(&mut self) { | 52 | fn do_make_multiline(&mut self) { |
@@ -66,16 +66,18 @@ impl<N: AstNode> AstEditor<N> { | |||
66 | if ws.text().contains('\n') { | 66 | if ws.text().contains('\n') { |
67 | return; | 67 | return; |
68 | } | 68 | } |
69 | Some(ws) | 69 | Some(ws.clone()) |
70 | } | 70 | } |
71 | }; | 71 | }; |
72 | 72 | ||
73 | let indent = leading_indent(self.ast().syntax()).unwrap_or(""); | 73 | let indent = leading_indent(self.ast().syntax()).unwrap_or("".into()); |
74 | let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); | 74 | let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); |
75 | let to_insert = iter::once(ws.ws().into()); | 75 | let to_insert = iter::once(ws.ws().into()); |
76 | self.ast = match existing_ws { | 76 | self.ast = match existing_ws { |
77 | None => self.insert_children(InsertPosition::After(l_curly), to_insert), | 77 | None => self.insert_children(InsertPosition::After(l_curly), to_insert), |
78 | Some(ws) => self.replace_children(RangeInclusive::new(ws.into(), ws.into()), to_insert), | 78 | Some(ws) => { |
79 | self.replace_children(RangeInclusive::new(ws.clone().into(), ws.into()), to_insert) | ||
80 | } | ||
79 | }; | 81 | }; |
80 | } | 82 | } |
81 | } | 83 | } |
@@ -95,7 +97,7 @@ impl AstEditor<ast::NamedFieldList> { | |||
95 | let space = if is_multiline { | 97 | let space = if is_multiline { |
96 | ws = tokens::WsBuilder::new(&format!( | 98 | ws = tokens::WsBuilder::new(&format!( |
97 | "\n{} ", | 99 | "\n{} ", |
98 | leading_indent(self.ast().syntax()).unwrap_or("") | 100 | leading_indent(self.ast().syntax()).unwrap_or("".into()) |
99 | )); | 101 | )); |
100 | ws.ws() | 102 | ws.ws() |
101 | } else { | 103 | } else { |
@@ -104,7 +106,7 @@ impl AstEditor<ast::NamedFieldList> { | |||
104 | 106 | ||
105 | let mut to_insert: ArrayVec<[SyntaxElement; 4]> = ArrayVec::new(); | 107 | let mut to_insert: ArrayVec<[SyntaxElement; 4]> = ArrayVec::new(); |
106 | to_insert.push(space.into()); | 108 | to_insert.push(space.into()); |
107 | to_insert.push(field.syntax().into()); | 109 | to_insert.push(field.syntax().clone().into()); |
108 | to_insert.push(tokens::comma().into()); | 110 | to_insert.push(tokens::comma().into()); |
109 | 111 | ||
110 | macro_rules! after_l_curly { | 112 | macro_rules! after_l_curly { |
@@ -127,7 +129,7 @@ impl AstEditor<ast::NamedFieldList> { | |||
127 | InsertPosition::After(comma) | 129 | InsertPosition::After(comma) |
128 | } else { | 130 | } else { |
129 | to_insert.insert(0, tokens::comma().into()); | 131 | to_insert.insert(0, tokens::comma().into()); |
130 | InsertPosition::After($anchor.syntax().into()) | 132 | InsertPosition::After($anchor.syntax().clone().into()) |
131 | } | 133 | } |
132 | }; | 134 | }; |
133 | }; | 135 | }; |
@@ -144,7 +146,9 @@ impl AstEditor<ast::NamedFieldList> { | |||
144 | None => after_l_curly!(), | 146 | None => after_l_curly!(), |
145 | } | 147 | } |
146 | } | 148 | } |
147 | InsertPosition::Before(anchor) => InsertPosition::Before(anchor.syntax().into()), | 149 | InsertPosition::Before(anchor) => { |
150 | InsertPosition::Before(anchor.syntax().clone().into()) | ||
151 | } | ||
148 | InsertPosition::After(anchor) => after_field!(anchor), | 152 | InsertPosition::After(anchor) => after_field!(anchor), |
149 | }; | 153 | }; |
150 | 154 | ||
@@ -157,7 +161,7 @@ impl AstEditor<ast::NamedFieldList> { | |||
157 | } | 161 | } |
158 | 162 | ||
159 | impl AstEditor<ast::ItemList> { | 163 | impl AstEditor<ast::ItemList> { |
160 | pub fn append_items<'a>(&mut self, items: impl Iterator<Item = &'a ast::ImplItem>) { | 164 | pub fn append_items(&mut self, items: impl Iterator<Item = ast::ImplItem>) { |
161 | let n_existing_items = self.ast().impl_items().count(); | 165 | let n_existing_items = self.ast().impl_items().count(); |
162 | if n_existing_items == 0 { | 166 | if n_existing_items == 0 { |
163 | self.do_make_multiline(); | 167 | self.do_make_multiline(); |
@@ -165,22 +169,23 @@ impl AstEditor<ast::ItemList> { | |||
165 | items.for_each(|it| self.append_item(it)); | 169 | items.for_each(|it| self.append_item(it)); |
166 | } | 170 | } |
167 | 171 | ||
168 | pub fn append_item(&mut self, item: &ast::ImplItem) { | 172 | pub fn append_item(&mut self, item: ast::ImplItem) { |
169 | let (indent, position) = match self.ast().impl_items().last() { | 173 | let (indent, position) = match self.ast().impl_items().last() { |
170 | Some(it) => ( | 174 | Some(it) => ( |
171 | leading_indent(it.syntax()).unwrap_or("").to_string(), | 175 | leading_indent(it.syntax()).unwrap_or_default().to_string(), |
172 | InsertPosition::After(it.syntax().into()), | 176 | InsertPosition::After(it.syntax().clone().into()), |
173 | ), | 177 | ), |
174 | None => match self.l_curly() { | 178 | None => match self.l_curly() { |
175 | Some(it) => ( | 179 | Some(it) => ( |
176 | " ".to_string() + leading_indent(self.ast().syntax()).unwrap_or(""), | 180 | " ".to_string() + &leading_indent(self.ast().syntax()).unwrap_or_default(), |
177 | InsertPosition::After(it), | 181 | InsertPosition::After(it), |
178 | ), | 182 | ), |
179 | None => return, | 183 | None => return, |
180 | }, | 184 | }, |
181 | }; | 185 | }; |
182 | let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); | 186 | let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); |
183 | let to_insert: ArrayVec<[SyntaxElement; 2]> = [ws.ws().into(), item.syntax().into()].into(); | 187 | let to_insert: ArrayVec<[SyntaxElement; 2]> = |
188 | [ws.ws().into(), item.syntax().clone().into()].into(); | ||
184 | self.ast = self.insert_children(position, to_insert.into_iter()); | 189 | self.ast = self.insert_children(position, to_insert.into_iter()); |
185 | } | 190 | } |
186 | 191 | ||
@@ -197,9 +202,9 @@ impl AstEditor<ast::ImplItem> { | |||
197 | .children_with_tokens() | 202 | .children_with_tokens() |
198 | .find(|it| it.kind() == ATTR || it.kind() == COMMENT) | 203 | .find(|it| it.kind() == ATTR || it.kind() == COMMENT) |
199 | { | 204 | { |
200 | let end = match start.next_sibling_or_token() { | 205 | let end = match &start.next_sibling_or_token() { |
201 | Some(el) if el.kind() == WHITESPACE => el, | 206 | Some(el) if el.kind() == WHITESPACE => el.clone(), |
202 | Some(_) | None => start, | 207 | Some(_) | None => start.clone(), |
203 | }; | 208 | }; |
204 | self.ast = self.replace_children(RangeInclusive::new(start, end), iter::empty()); | 209 | self.ast = self.replace_children(RangeInclusive::new(start, end), iter::empty()); |
205 | } | 210 | } |
@@ -210,18 +215,18 @@ impl AstEditor<ast::FnDef> { | |||
210 | pub fn set_body(&mut self, body: &ast::Block) { | 215 | pub fn set_body(&mut self, body: &ast::Block) { |
211 | let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new(); | 216 | let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new(); |
212 | let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.ast().body() { | 217 | let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.ast().body() { |
213 | old_body.syntax().into() | 218 | old_body.syntax().clone().into() |
214 | } else if let Some(semi) = self.ast().semicolon_token() { | 219 | } else if let Some(semi) = self.ast().semicolon_token() { |
215 | to_insert.push(tokens::single_space().into()); | 220 | to_insert.push(tokens::single_space().into()); |
216 | semi.into() | 221 | semi.into() |
217 | } else { | 222 | } else { |
218 | to_insert.push(tokens::single_space().into()); | 223 | to_insert.push(tokens::single_space().into()); |
219 | to_insert.push(body.syntax().into()); | 224 | to_insert.push(body.syntax().clone().into()); |
220 | self.ast = self.insert_children(InsertPosition::Last, to_insert.into_iter()); | 225 | self.ast = self.insert_children(InsertPosition::Last, to_insert.into_iter()); |
221 | return; | 226 | return; |
222 | }; | 227 | }; |
223 | to_insert.push(body.syntax().into()); | 228 | to_insert.push(body.syntax().clone().into()); |
224 | let replace_range = RangeInclusive::new(old_body_or_semi, old_body_or_semi); | 229 | let replace_range = RangeInclusive::new(old_body_or_semi.clone(), old_body_or_semi); |
225 | self.ast = self.replace_children(replace_range, to_insert.into_iter()) | 230 | self.ast = self.replace_children(replace_range, to_insert.into_iter()) |
226 | } | 231 | } |
227 | } | 232 | } |
@@ -231,15 +236,15 @@ pub struct AstBuilder<N: AstNode> { | |||
231 | } | 236 | } |
232 | 237 | ||
233 | impl AstBuilder<ast::NamedField> { | 238 | impl AstBuilder<ast::NamedField> { |
234 | pub fn from_name(name: &Name) -> TreeArc<ast::NamedField> { | 239 | pub fn from_name(name: &Name) -> ast::NamedField { |
235 | ast_node_from_file_text(&format!("fn f() {{ S {{ {}: (), }} }}", name)) | 240 | ast_node_from_file_text(&format!("fn f() {{ S {{ {}: (), }} }}", name)) |
236 | } | 241 | } |
237 | 242 | ||
238 | fn from_text(text: &str) -> TreeArc<ast::NamedField> { | 243 | fn from_text(text: &str) -> ast::NamedField { |
239 | ast_node_from_file_text(&format!("fn f() {{ S {{ {}, }} }}", text)) | 244 | ast_node_from_file_text(&format!("fn f() {{ S {{ {}, }} }}", text)) |
240 | } | 245 | } |
241 | 246 | ||
242 | pub fn from_pieces(name: &ast::NameRef, expr: Option<&ast::Expr>) -> TreeArc<ast::NamedField> { | 247 | pub fn from_pieces(name: &ast::NameRef, expr: Option<&ast::Expr>) -> ast::NamedField { |
243 | match expr { | 248 | match expr { |
244 | Some(expr) => Self::from_text(&format!("{}: {}", name.syntax(), expr.syntax())), | 249 | Some(expr) => Self::from_text(&format!("{}: {}", name.syntax(), expr.syntax())), |
245 | None => Self::from_text(&name.syntax().to_string()), | 250 | None => Self::from_text(&name.syntax().to_string()), |
@@ -248,36 +253,36 @@ impl AstBuilder<ast::NamedField> { | |||
248 | } | 253 | } |
249 | 254 | ||
250 | impl AstBuilder<ast::Block> { | 255 | impl AstBuilder<ast::Block> { |
251 | fn from_text(text: &str) -> TreeArc<ast::Block> { | 256 | fn from_text(text: &str) -> ast::Block { |
252 | ast_node_from_file_text(&format!("fn f() {}", text)) | 257 | ast_node_from_file_text(&format!("fn f() {}", text)) |
253 | } | 258 | } |
254 | 259 | ||
255 | pub fn single_expr(e: &ast::Expr) -> TreeArc<ast::Block> { | 260 | pub fn single_expr(e: &ast::Expr) -> ast::Block { |
256 | Self::from_text(&format!("{{ {} }}", e.syntax())) | 261 | Self::from_text(&format!("{{ {} }}", e.syntax())) |
257 | } | 262 | } |
258 | } | 263 | } |
259 | 264 | ||
260 | impl AstBuilder<ast::Expr> { | 265 | impl AstBuilder<ast::Expr> { |
261 | fn from_text(text: &str) -> TreeArc<ast::Expr> { | 266 | fn from_text(text: &str) -> ast::Expr { |
262 | ast_node_from_file_text(&format!("fn f() {{ {}; }}", text)) | 267 | ast_node_from_file_text(&format!("fn f() {{ {}; }}", text)) |
263 | } | 268 | } |
264 | 269 | ||
265 | pub fn unit() -> TreeArc<ast::Expr> { | 270 | pub fn unit() -> ast::Expr { |
266 | Self::from_text("()") | 271 | Self::from_text("()") |
267 | } | 272 | } |
268 | 273 | ||
269 | pub fn unimplemented() -> TreeArc<ast::Expr> { | 274 | pub fn unimplemented() -> ast::Expr { |
270 | Self::from_text("unimplemented!()") | 275 | Self::from_text("unimplemented!()") |
271 | } | 276 | } |
272 | } | 277 | } |
273 | 278 | ||
274 | impl AstBuilder<ast::NameRef> { | 279 | impl AstBuilder<ast::NameRef> { |
275 | pub fn new(text: &str) -> TreeArc<ast::NameRef> { | 280 | pub fn new(text: &str) -> ast::NameRef { |
276 | ast_node_from_file_text(&format!("fn f() {{ {}; }}", text)) | 281 | ast_node_from_file_text(&format!("fn f() {{ {}; }}", text)) |
277 | } | 282 | } |
278 | } | 283 | } |
279 | 284 | ||
280 | fn ast_node_from_file_text<N: AstNode>(text: &str) -> TreeArc<N> { | 285 | fn ast_node_from_file_text<N: AstNode>(text: &str) -> N { |
281 | let parse = SourceFile::parse(text); | 286 | let parse = SourceFile::parse(text); |
282 | let res = parse.tree().syntax().descendants().find_map(N::cast).unwrap().to_owned(); | 287 | let res = parse.tree().syntax().descendants().find_map(N::cast).unwrap().to_owned(); |
283 | res | 288 | res |
@@ -285,47 +290,49 @@ fn ast_node_from_file_text<N: AstNode>(text: &str) -> TreeArc<N> { | |||
285 | 290 | ||
286 | mod tokens { | 291 | mod tokens { |
287 | use once_cell::sync::Lazy; | 292 | use once_cell::sync::Lazy; |
288 | use ra_syntax::{AstNode, SourceFile, SyntaxKind::*, SyntaxToken, TreeArc, T}; | 293 | use ra_syntax::{AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken, T}; |
289 | 294 | ||
290 | static SOURCE_FILE: Lazy<TreeArc<SourceFile>> = | 295 | static SOURCE_FILE: Lazy<Parse<SourceFile>> = Lazy::new(|| SourceFile::parse(",\n; ;")); |
291 | Lazy::new(|| SourceFile::parse(",\n; ;").tree().to_owned()); | ||
292 | 296 | ||
293 | pub(crate) fn comma() -> SyntaxToken<'static> { | 297 | pub(crate) fn comma() -> SyntaxToken { |
294 | SOURCE_FILE | 298 | SOURCE_FILE |
299 | .tree() | ||
295 | .syntax() | 300 | .syntax() |
296 | .descendants_with_tokens() | 301 | .descendants_with_tokens() |
297 | .filter_map(|it| it.as_token()) | 302 | .filter_map(|it| it.as_token().cloned()) |
298 | .find(|it| it.kind() == T![,]) | 303 | .find(|it| it.kind() == T![,]) |
299 | .unwrap() | 304 | .unwrap() |
300 | } | 305 | } |
301 | 306 | ||
302 | pub(crate) fn single_space() -> SyntaxToken<'static> { | 307 | pub(crate) fn single_space() -> SyntaxToken { |
303 | SOURCE_FILE | 308 | SOURCE_FILE |
309 | .tree() | ||
304 | .syntax() | 310 | .syntax() |
305 | .descendants_with_tokens() | 311 | .descendants_with_tokens() |
306 | .filter_map(|it| it.as_token()) | 312 | .filter_map(|it| it.as_token().cloned()) |
307 | .find(|it| it.kind() == WHITESPACE && it.text().as_str() == " ") | 313 | .find(|it| it.kind() == WHITESPACE && it.text().as_str() == " ") |
308 | .unwrap() | 314 | .unwrap() |
309 | } | 315 | } |
310 | 316 | ||
311 | #[allow(unused)] | 317 | #[allow(unused)] |
312 | pub(crate) fn single_newline() -> SyntaxToken<'static> { | 318 | pub(crate) fn single_newline() -> SyntaxToken { |
313 | SOURCE_FILE | 319 | SOURCE_FILE |
320 | .tree() | ||
314 | .syntax() | 321 | .syntax() |
315 | .descendants_with_tokens() | 322 | .descendants_with_tokens() |
316 | .filter_map(|it| it.as_token()) | 323 | .filter_map(|it| it.as_token().cloned()) |
317 | .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n") | 324 | .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n") |
318 | .unwrap() | 325 | .unwrap() |
319 | } | 326 | } |
320 | 327 | ||
321 | pub(crate) struct WsBuilder(TreeArc<SourceFile>); | 328 | pub(crate) struct WsBuilder(SourceFile); |
322 | 329 | ||
323 | impl WsBuilder { | 330 | impl WsBuilder { |
324 | pub(crate) fn new(text: &str) -> WsBuilder { | 331 | pub(crate) fn new(text: &str) -> WsBuilder { |
325 | WsBuilder(SourceFile::parse(text).ok().unwrap()) | 332 | WsBuilder(SourceFile::parse(text).ok().unwrap()) |
326 | } | 333 | } |
327 | pub(crate) fn ws(&self) -> SyntaxToken<'_> { | 334 | pub(crate) fn ws(&self) -> SyntaxToken { |
328 | self.0.syntax().first_child_or_token().unwrap().as_token().unwrap() | 335 | self.0.syntax().first_child_or_token().unwrap().as_token().cloned().unwrap() |
329 | } | 336 | } |
330 | } | 337 | } |
331 | 338 | ||
diff --git a/crates/ra_assists/src/auto_import.rs b/crates/ra_assists/src/auto_import.rs index f8f37e852..0eb4bdb62 100644 --- a/crates/ra_assists/src/auto_import.rs +++ b/crates/ra_assists/src/auto_import.rs | |||
@@ -12,25 +12,25 @@ use ra_syntax::{ | |||
12 | SyntaxNode, TextRange, T, | 12 | SyntaxNode, TextRange, T, |
13 | }; | 13 | }; |
14 | 14 | ||
15 | fn collect_path_segments_raw<'a>( | 15 | fn collect_path_segments_raw( |
16 | segments: &mut Vec<&'a ast::PathSegment>, | 16 | segments: &mut Vec<ast::PathSegment>, |
17 | mut path: &'a ast::Path, | 17 | mut path: ast::Path, |
18 | ) -> Option<usize> { | 18 | ) -> Option<usize> { |
19 | let oldlen = segments.len(); | 19 | let oldlen = segments.len(); |
20 | loop { | 20 | loop { |
21 | let mut children = path.syntax().children_with_tokens(); | 21 | let mut children = path.syntax().children_with_tokens(); |
22 | let (first, second, third) = ( | 22 | let (first, second, third) = ( |
23 | children.next().map(|n| (n, n.kind())), | 23 | children.next().map(|n| (n.clone(), n.kind())), |
24 | children.next().map(|n| (n, n.kind())), | 24 | children.next().map(|n| (n.clone(), n.kind())), |
25 | children.next().map(|n| (n, n.kind())), | 25 | children.next().map(|n| (n.clone(), n.kind())), |
26 | ); | 26 | ); |
27 | match (first, second, third) { | 27 | match (first, second, third) { |
28 | (Some((subpath, PATH)), Some((_, T![::])), Some((segment, PATH_SEGMENT))) => { | 28 | (Some((subpath, PATH)), Some((_, T![::])), Some((segment, PATH_SEGMENT))) => { |
29 | path = ast::Path::cast(subpath.as_node()?)?; | 29 | path = ast::Path::cast(subpath.as_node()?.clone())?; |
30 | segments.push(ast::PathSegment::cast(segment.as_node()?)?); | 30 | segments.push(ast::PathSegment::cast(segment.as_node()?.clone())?); |
31 | } | 31 | } |
32 | (Some((segment, PATH_SEGMENT)), _, _) => { | 32 | (Some((segment, PATH_SEGMENT)), _, _) => { |
33 | segments.push(ast::PathSegment::cast(segment.as_node()?)?); | 33 | segments.push(ast::PathSegment::cast(segment.as_node()?.clone())?); |
34 | break; | 34 | break; |
35 | } | 35 | } |
36 | (_, _, _) => return None, | 36 | (_, _, _) => return None, |
@@ -60,7 +60,7 @@ fn fmt_segments_raw(segments: &[SmolStr], buf: &mut String) { | |||
60 | } | 60 | } |
61 | 61 | ||
62 | // Returns the numeber of common segments. | 62 | // Returns the numeber of common segments. |
63 | fn compare_path_segments(left: &[SmolStr], right: &[&ast::PathSegment]) -> usize { | 63 | fn compare_path_segments(left: &[SmolStr], right: &[ast::PathSegment]) -> usize { |
64 | left.iter().zip(right).filter(|(l, r)| compare_path_segment(l, r)).count() | 64 | left.iter().zip(right).filter(|(l, r)| compare_path_segment(l, r)).count() |
65 | } | 65 | } |
66 | 66 | ||
@@ -81,12 +81,12 @@ fn compare_path_segment_with_name(a: &SmolStr, b: &ast::Name) -> bool { | |||
81 | a == b.text() | 81 | a == b.text() |
82 | } | 82 | } |
83 | 83 | ||
84 | #[derive(Copy, Clone)] | 84 | #[derive(Clone)] |
85 | enum ImportAction<'a> { | 85 | enum ImportAction { |
86 | Nothing, | 86 | Nothing, |
87 | // Add a brand new use statement. | 87 | // Add a brand new use statement. |
88 | AddNewUse { | 88 | AddNewUse { |
89 | anchor: Option<&'a SyntaxNode>, // anchor node | 89 | anchor: Option<SyntaxNode>, // anchor node |
90 | add_after_anchor: bool, | 90 | add_after_anchor: bool, |
91 | }, | 91 | }, |
92 | 92 | ||
@@ -94,9 +94,9 @@ enum ImportAction<'a> { | |||
94 | AddNestedImport { | 94 | AddNestedImport { |
95 | // how may segments matched with the target path | 95 | // how may segments matched with the target path |
96 | common_segments: usize, | 96 | common_segments: usize, |
97 | path_to_split: &'a ast::Path, | 97 | path_to_split: ast::Path, |
98 | // the first segment of path_to_split we want to add into the new nested list | 98 | // the first segment of path_to_split we want to add into the new nested list |
99 | first_segment_to_split: Option<&'a ast::PathSegment>, | 99 | first_segment_to_split: Option<ast::PathSegment>, |
100 | // Wether to add 'self' in addition to the target path | 100 | // Wether to add 'self' in addition to the target path |
101 | add_self: bool, | 101 | add_self: bool, |
102 | }, | 102 | }, |
@@ -104,20 +104,20 @@ enum ImportAction<'a> { | |||
104 | AddInTreeList { | 104 | AddInTreeList { |
105 | common_segments: usize, | 105 | common_segments: usize, |
106 | // The UseTreeList where to add the target path | 106 | // The UseTreeList where to add the target path |
107 | tree_list: &'a ast::UseTreeList, | 107 | tree_list: ast::UseTreeList, |
108 | add_self: bool, | 108 | add_self: bool, |
109 | }, | 109 | }, |
110 | } | 110 | } |
111 | 111 | ||
112 | impl<'a> ImportAction<'a> { | 112 | impl ImportAction { |
113 | fn add_new_use(anchor: Option<&'a SyntaxNode>, add_after_anchor: bool) -> Self { | 113 | fn add_new_use(anchor: Option<SyntaxNode>, add_after_anchor: bool) -> Self { |
114 | ImportAction::AddNewUse { anchor, add_after_anchor } | 114 | ImportAction::AddNewUse { anchor, add_after_anchor } |
115 | } | 115 | } |
116 | 116 | ||
117 | fn add_nested_import( | 117 | fn add_nested_import( |
118 | common_segments: usize, | 118 | common_segments: usize, |
119 | path_to_split: &'a ast::Path, | 119 | path_to_split: ast::Path, |
120 | first_segment_to_split: Option<&'a ast::PathSegment>, | 120 | first_segment_to_split: Option<ast::PathSegment>, |
121 | add_self: bool, | 121 | add_self: bool, |
122 | ) -> Self { | 122 | ) -> Self { |
123 | ImportAction::AddNestedImport { | 123 | ImportAction::AddNestedImport { |
@@ -130,14 +130,14 @@ impl<'a> ImportAction<'a> { | |||
130 | 130 | ||
131 | fn add_in_tree_list( | 131 | fn add_in_tree_list( |
132 | common_segments: usize, | 132 | common_segments: usize, |
133 | tree_list: &'a ast::UseTreeList, | 133 | tree_list: ast::UseTreeList, |
134 | add_self: bool, | 134 | add_self: bool, |
135 | ) -> Self { | 135 | ) -> Self { |
136 | ImportAction::AddInTreeList { common_segments, tree_list, add_self } | 136 | ImportAction::AddInTreeList { common_segments, tree_list, add_self } |
137 | } | 137 | } |
138 | 138 | ||
139 | fn better<'b>(left: &'b ImportAction<'a>, right: &'b ImportAction<'a>) -> &'b ImportAction<'a> { | 139 | fn better(left: ImportAction, right: ImportAction) -> ImportAction { |
140 | if left.is_better(right) { | 140 | if left.is_better(&right) { |
141 | left | 141 | left |
142 | } else { | 142 | } else { |
143 | right | 143 | right |
@@ -166,12 +166,12 @@ impl<'a> ImportAction<'a> { | |||
166 | 166 | ||
167 | // Find out the best ImportAction to import target path against current_use_tree. | 167 | // Find out the best ImportAction to import target path against current_use_tree. |
168 | // If current_use_tree has a nested import the function gets called recursively on every UseTree inside a UseTreeList. | 168 | // If current_use_tree has a nested import the function gets called recursively on every UseTree inside a UseTreeList. |
169 | fn walk_use_tree_for_best_action<'a>( | 169 | fn walk_use_tree_for_best_action( |
170 | current_path_segments: &mut Vec<&'a ast::PathSegment>, // buffer containing path segments | 170 | current_path_segments: &mut Vec<ast::PathSegment>, // buffer containing path segments |
171 | current_parent_use_tree_list: Option<&'a ast::UseTreeList>, // will be Some value if we are in a nested import | 171 | current_parent_use_tree_list: Option<ast::UseTreeList>, // will be Some value if we are in a nested import |
172 | current_use_tree: &'a ast::UseTree, // the use tree we are currently examinating | 172 | current_use_tree: ast::UseTree, // the use tree we are currently examinating |
173 | target: &[SmolStr], // the path we want to import | 173 | target: &[SmolStr], // the path we want to import |
174 | ) -> ImportAction<'a> { | 174 | ) -> ImportAction { |
175 | // We save the number of segments in the buffer so we can restore the correct segments | 175 | // We save the number of segments in the buffer so we can restore the correct segments |
176 | // before returning. Recursive call will add segments so we need to delete them. | 176 | // before returning. Recursive call will add segments so we need to delete them. |
177 | let prev_len = current_path_segments.len(); | 177 | let prev_len = current_path_segments.len(); |
@@ -188,32 +188,36 @@ fn walk_use_tree_for_best_action<'a>( | |||
188 | .syntax() | 188 | .syntax() |
189 | .ancestors() | 189 | .ancestors() |
190 | .find_map(ast::UseItem::cast) | 190 | .find_map(ast::UseItem::cast) |
191 | .map(AstNode::syntax), | 191 | .map(|it| it.syntax().clone()), |
192 | true, | 192 | true, |
193 | ); | 193 | ); |
194 | } | 194 | } |
195 | }; | 195 | }; |
196 | 196 | ||
197 | // This can happen only if current_use_tree is a direct child of a UseItem | 197 | // This can happen only if current_use_tree is a direct child of a UseItem |
198 | if let Some(name) = alias.and_then(ast::NameOwner::name) { | 198 | if let Some(name) = alias.and_then(|it| it.name()) { |
199 | if compare_path_segment_with_name(&target[0], name) { | 199 | if compare_path_segment_with_name(&target[0], &name) { |
200 | return ImportAction::Nothing; | 200 | return ImportAction::Nothing; |
201 | } | 201 | } |
202 | } | 202 | } |
203 | 203 | ||
204 | collect_path_segments_raw(current_path_segments, path); | 204 | collect_path_segments_raw(current_path_segments, path.clone()); |
205 | 205 | ||
206 | // We compare only the new segments added in the line just above. | 206 | // We compare only the new segments added in the line just above. |
207 | // The first prev_len segments were already compared in 'parent' recursive calls. | 207 | // The first prev_len segments were already compared in 'parent' recursive calls. |
208 | let left = target.split_at(prev_len).1; | 208 | let left = target.split_at(prev_len).1; |
209 | let right = current_path_segments.split_at(prev_len).1; | 209 | let right = current_path_segments.split_at(prev_len).1; |
210 | let common = compare_path_segments(left, right); | 210 | let common = compare_path_segments(left, &right); |
211 | let mut action = match common { | 211 | let mut action = match common { |
212 | 0 => ImportAction::add_new_use( | 212 | 0 => ImportAction::add_new_use( |
213 | // e.g: target is std::fmt and we can have | 213 | // e.g: target is std::fmt and we can have |
214 | // use foo::bar | 214 | // use foo::bar |
215 | // We add a brand new use statement | 215 | // We add a brand new use statement |
216 | current_use_tree.syntax().ancestors().find_map(ast::UseItem::cast).map(AstNode::syntax), | 216 | current_use_tree |
217 | .syntax() | ||
218 | .ancestors() | ||
219 | .find_map(ast::UseItem::cast) | ||
220 | .map(|it| it.syntax().clone()), | ||
217 | true, | 221 | true, |
218 | ), | 222 | ), |
219 | common if common == left.len() && left.len() == right.len() => { | 223 | common if common == left.len() && left.len() == right.len() => { |
@@ -223,9 +227,9 @@ fn walk_use_tree_for_best_action<'a>( | |||
223 | if let Some(list) = tree_list { | 227 | if let Some(list) = tree_list { |
224 | // In case 2 we need to add self to the nested list | 228 | // In case 2 we need to add self to the nested list |
225 | // unless it's already there | 229 | // unless it's already there |
226 | let has_self = list.use_trees().map(ast::UseTree::path).any(|p| { | 230 | let has_self = list.use_trees().map(|it| it.path()).any(|p| { |
227 | p.and_then(ast::Path::segment) | 231 | p.and_then(|it| it.segment()) |
228 | .and_then(ast::PathSegment::kind) | 232 | .and_then(|it| it.kind()) |
229 | .filter(|k| *k == ast::PathSegmentKind::SelfKw) | 233 | .filter(|k| *k == ast::PathSegmentKind::SelfKw) |
230 | .is_some() | 234 | .is_some() |
231 | }); | 235 | }); |
@@ -248,7 +252,7 @@ fn walk_use_tree_for_best_action<'a>( | |||
248 | ImportAction::add_nested_import( | 252 | ImportAction::add_nested_import( |
249 | prev_len + common, | 253 | prev_len + common, |
250 | path, | 254 | path, |
251 | Some(segments_to_split[0]), | 255 | Some(segments_to_split[0].clone()), |
252 | false, | 256 | false, |
253 | ) | 257 | ) |
254 | } | 258 | } |
@@ -263,14 +267,18 @@ fn walk_use_tree_for_best_action<'a>( | |||
263 | .syntax() | 267 | .syntax() |
264 | .ancestors() | 268 | .ancestors() |
265 | .find_map(ast::UseItem::cast) | 269 | .find_map(ast::UseItem::cast) |
266 | .map(AstNode::syntax), | 270 | .map(|it| it.syntax().clone()), |
267 | true, | 271 | true, |
268 | ); | 272 | ); |
269 | if let Some(list) = tree_list { | 273 | if let Some(list) = tree_list { |
270 | // Case 2, check recursively if the path is already imported in the nested list | 274 | // Case 2, check recursively if the path is already imported in the nested list |
271 | for u in list.use_trees() { | 275 | for u in list.use_trees() { |
272 | let child_action = | 276 | let child_action = walk_use_tree_for_best_action( |
273 | walk_use_tree_for_best_action(current_path_segments, Some(list), u, target); | 277 | current_path_segments, |
278 | Some(list.clone()), | ||
279 | u, | ||
280 | target, | ||
281 | ); | ||
274 | if child_action.is_better(&better_action) { | 282 | if child_action.is_better(&better_action) { |
275 | better_action = child_action; | 283 | better_action = child_action; |
276 | if let ImportAction::Nothing = better_action { | 284 | if let ImportAction::Nothing = better_action { |
@@ -291,7 +299,7 @@ fn walk_use_tree_for_best_action<'a>( | |||
291 | ImportAction::add_nested_import( | 299 | ImportAction::add_nested_import( |
292 | prev_len + common, | 300 | prev_len + common, |
293 | path, | 301 | path, |
294 | Some(segments_to_split[0]), | 302 | Some(segments_to_split[0].clone()), |
295 | true, | 303 | true, |
296 | ) | 304 | ) |
297 | } | 305 | } |
@@ -302,7 +310,7 @@ fn walk_use_tree_for_best_action<'a>( | |||
302 | ImportAction::add_nested_import( | 310 | ImportAction::add_nested_import( |
303 | prev_len + common, | 311 | prev_len + common, |
304 | path, | 312 | path, |
305 | Some(segments_to_split[0]), | 313 | Some(segments_to_split[0].clone()), |
306 | false, | 314 | false, |
307 | ) | 315 | ) |
308 | } | 316 | } |
@@ -311,7 +319,7 @@ fn walk_use_tree_for_best_action<'a>( | |||
311 | 319 | ||
312 | // If we are inside a UseTreeList adding a use statement become adding to the existing | 320 | // If we are inside a UseTreeList adding a use statement become adding to the existing |
313 | // tree list. | 321 | // tree list. |
314 | action = match (current_parent_use_tree_list, action) { | 322 | action = match (current_parent_use_tree_list, action.clone()) { |
315 | (Some(use_tree_list), ImportAction::AddNewUse { .. }) => { | 323 | (Some(use_tree_list), ImportAction::AddNewUse { .. }) => { |
316 | ImportAction::add_in_tree_list(prev_len, use_tree_list, false) | 324 | ImportAction::add_in_tree_list(prev_len, use_tree_list, false) |
317 | } | 325 | } |
@@ -323,19 +331,20 @@ fn walk_use_tree_for_best_action<'a>( | |||
323 | action | 331 | action |
324 | } | 332 | } |
325 | 333 | ||
326 | fn best_action_for_target<'b, 'a: 'b>( | 334 | fn best_action_for_target( |
327 | container: &'a SyntaxNode, | 335 | container: SyntaxNode, |
328 | anchor: &'a SyntaxNode, | 336 | anchor: SyntaxNode, |
329 | target: &'b [SmolStr], | 337 | target: &[SmolStr], |
330 | ) -> ImportAction<'a> { | 338 | ) -> ImportAction { |
331 | let mut storage = Vec::with_capacity(16); // this should be the only allocation | 339 | let mut storage = Vec::with_capacity(16); // this should be the only allocation |
332 | let best_action = container | 340 | let best_action = container |
333 | .children() | 341 | .children() |
334 | .filter_map(ast::UseItem::cast) | 342 | .filter_map(ast::UseItem::cast) |
335 | .filter_map(ast::UseItem::use_tree) | 343 | .filter_map(|it| it.use_tree()) |
336 | .map(|u| walk_use_tree_for_best_action(&mut storage, None, u, target)) | 344 | .map(|u| walk_use_tree_for_best_action(&mut storage, None, u, target)) |
337 | .fold(None, |best, a| { | 345 | .fold(None, |best, a| match best { |
338 | best.and_then(|best| Some(*ImportAction::better(&best, &a))).or_else(|| Some(a)) | 346 | Some(best) => Some(ImportAction::better(best, a)), |
347 | None => Some(a), | ||
339 | }); | 348 | }); |
340 | 349 | ||
341 | match best_action { | 350 | match best_action { |
@@ -386,7 +395,7 @@ fn make_assist(action: &ImportAction, target: &[SmolStr], edit: &mut TextEditBui | |||
386 | } | 395 | } |
387 | 396 | ||
388 | fn make_assist_add_new_use( | 397 | fn make_assist_add_new_use( |
389 | anchor: &Option<&SyntaxNode>, | 398 | anchor: &Option<SyntaxNode>, |
390 | after: bool, | 399 | after: bool, |
391 | target: &[SmolStr], | 400 | target: &[SmolStr], |
392 | edit: &mut TextEditBuilder, | 401 | edit: &mut TextEditBuilder, |
@@ -396,7 +405,7 @@ fn make_assist_add_new_use( | |||
396 | let mut buf = String::new(); | 405 | let mut buf = String::new(); |
397 | if after { | 406 | if after { |
398 | buf.push_str("\n"); | 407 | buf.push_str("\n"); |
399 | if let Some(spaces) = indent { | 408 | if let Some(spaces) = &indent { |
400 | buf.push_str(spaces); | 409 | buf.push_str(spaces); |
401 | } | 410 | } |
402 | } | 411 | } |
@@ -405,8 +414,8 @@ fn make_assist_add_new_use( | |||
405 | buf.push_str(";"); | 414 | buf.push_str(";"); |
406 | if !after { | 415 | if !after { |
407 | buf.push_str("\n\n"); | 416 | buf.push_str("\n\n"); |
408 | if let Some(spaces) = indent { | 417 | if let Some(spaces) = &indent { |
409 | buf.push_str(spaces); | 418 | buf.push_str(&spaces); |
410 | } | 419 | } |
411 | } | 420 | } |
412 | let position = if after { anchor.range().end() } else { anchor.range().start() }; | 421 | let position = if after { anchor.range().end() } else { anchor.range().start() }; |
@@ -444,7 +453,7 @@ fn make_assist_add_in_tree_list( | |||
444 | 453 | ||
445 | fn make_assist_add_nested_import( | 454 | fn make_assist_add_nested_import( |
446 | path: &ast::Path, | 455 | path: &ast::Path, |
447 | first_segment_to_split: &Option<&ast::PathSegment>, | 456 | first_segment_to_split: &Option<ast::PathSegment>, |
448 | target: &[SmolStr], | 457 | target: &[SmolStr], |
449 | add_self: bool, | 458 | add_self: bool, |
450 | edit: &mut TextEditBuilder, | 459 | edit: &mut TextEditBuilder, |
@@ -482,7 +491,7 @@ fn apply_auto_import( | |||
482 | target: &[SmolStr], | 491 | target: &[SmolStr], |
483 | edit: &mut TextEditBuilder, | 492 | edit: &mut TextEditBuilder, |
484 | ) { | 493 | ) { |
485 | let action = best_action_for_target(container, path.syntax(), target); | 494 | let action = best_action_for_target(container.clone(), path.syntax().clone(), target); |
486 | make_assist(&action, target, edit); | 495 | make_assist(&action, target, edit); |
487 | if let Some(last) = path.segment() { | 496 | if let Some(last) = path.segment() { |
488 | // Here we are assuming the assist will provide a correct use statement | 497 | // Here we are assuming the assist will provide a correct use statement |
@@ -522,26 +531,26 @@ pub fn auto_import_text_edit( | |||
522 | edit: &mut TextEditBuilder, | 531 | edit: &mut TextEditBuilder, |
523 | ) { | 532 | ) { |
524 | let container = position.ancestors().find_map(|n| { | 533 | let container = position.ancestors().find_map(|n| { |
525 | if let Some(module) = ast::Module::cast(n) { | 534 | if let Some(module) = ast::Module::cast(n.clone()) { |
526 | return module.item_list().map(ast::AstNode::syntax); | 535 | return module.item_list().map(|it| it.syntax().clone()); |
527 | } | 536 | } |
528 | ast::SourceFile::cast(n).map(ast::AstNode::syntax) | 537 | ast::SourceFile::cast(n).map(|it| it.syntax().clone()) |
529 | }); | 538 | }); |
530 | 539 | ||
531 | if let Some(container) = container { | 540 | if let Some(container) = container { |
532 | let action = best_action_for_target(container, anchor, target); | 541 | let action = best_action_for_target(container, anchor.clone(), target); |
533 | make_assist(&action, target, edit); | 542 | make_assist(&action, target, edit); |
534 | } | 543 | } |
535 | } | 544 | } |
536 | 545 | ||
537 | pub(crate) fn auto_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 546 | pub(crate) fn auto_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { |
538 | let path: &ast::Path = ctx.node_at_offset()?; | 547 | let path: ast::Path = ctx.node_at_offset()?; |
539 | // We don't want to mess with use statements | 548 | // We don't want to mess with use statements |
540 | if path.syntax().ancestors().find_map(ast::UseItem::cast).is_some() { | 549 | if path.syntax().ancestors().find_map(ast::UseItem::cast).is_some() { |
541 | return None; | 550 | return None; |
542 | } | 551 | } |
543 | 552 | ||
544 | let hir_path = hir::Path::from_ast(path)?; | 553 | let hir_path = hir::Path::from_ast(path.clone())?; |
545 | let segments = collect_hir_path_segments(&hir_path); | 554 | let segments = collect_hir_path_segments(&hir_path); |
546 | if segments.len() < 2 { | 555 | if segments.len() < 2 { |
547 | return None; | 556 | return None; |
@@ -554,7 +563,7 @@ pub(crate) fn auto_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist | |||
554 | format!("import {} in mod {}", fmt_segments(&segments), name.text()), | 563 | format!("import {} in mod {}", fmt_segments(&segments), name.text()), |
555 | |edit| { | 564 | |edit| { |
556 | let mut text_edit = TextEditBuilder::default(); | 565 | let mut text_edit = TextEditBuilder::default(); |
557 | apply_auto_import(item_list.syntax(), path, &segments, &mut text_edit); | 566 | apply_auto_import(item_list.syntax(), &path, &segments, &mut text_edit); |
558 | edit.set_edit_builder(text_edit); | 567 | edit.set_edit_builder(text_edit); |
559 | }, | 568 | }, |
560 | ); | 569 | ); |
@@ -566,7 +575,7 @@ pub(crate) fn auto_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist | |||
566 | format!("import {} in the current file", fmt_segments(&segments)), | 575 | format!("import {} in the current file", fmt_segments(&segments)), |
567 | |edit| { | 576 | |edit| { |
568 | let mut text_edit = TextEditBuilder::default(); | 577 | let mut text_edit = TextEditBuilder::default(); |
569 | apply_auto_import(current_file.syntax(), path, &segments, &mut text_edit); | 578 | apply_auto_import(current_file.syntax(), &path, &segments, &mut text_edit); |
570 | edit.set_edit_builder(text_edit); | 579 | edit.set_edit_builder(text_edit); |
571 | }, | 580 | }, |
572 | ); | 581 | ); |
diff --git a/crates/ra_assists/src/change_visibility.rs b/crates/ra_assists/src/change_visibility.rs index 6cabba3e3..ab10d2aa4 100644 --- a/crates/ra_assists/src/change_visibility.rs +++ b/crates/ra_assists/src/change_visibility.rs | |||
@@ -35,7 +35,7 @@ fn add_vis(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | |||
35 | if parent.children().any(|child| child.kind() == VISIBILITY) { | 35 | if parent.children().any(|child| child.kind() == VISIBILITY) { |
36 | return None; | 36 | return None; |
37 | } | 37 | } |
38 | (vis_offset(parent), keyword.range()) | 38 | (vis_offset(&parent), keyword.range()) |
39 | } else { | 39 | } else { |
40 | let ident = ctx.token_at_offset().find(|leaf| leaf.kind() == IDENT)?; | 40 | let ident = ctx.token_at_offset().find(|leaf| leaf.kind() == IDENT)?; |
41 | let field = ident.parent().ancestors().find_map(ast::NamedFieldDef::cast)?; | 41 | let field = ident.parent().ancestors().find_map(ast::NamedFieldDef::cast)?; |
@@ -65,7 +65,7 @@ fn vis_offset(node: &SyntaxNode) -> TextUnit { | |||
65 | .unwrap_or_else(|| node.range().start()) | 65 | .unwrap_or_else(|| node.range().start()) |
66 | } | 66 | } |
67 | 67 | ||
68 | fn change_vis(mut ctx: AssistCtx<impl HirDatabase>, vis: &ast::Visibility) -> Option<Assist> { | 68 | fn change_vis(mut ctx: AssistCtx<impl HirDatabase>, vis: ast::Visibility) -> Option<Assist> { |
69 | if vis.syntax().text() == "pub" { | 69 | if vis.syntax().text() == "pub" { |
70 | ctx.add_action(AssistId("change_visibility"), "change to pub(crate)", |edit| { | 70 | ctx.add_action(AssistId("change_visibility"), "change to pub(crate)", |edit| { |
71 | edit.target(vis.syntax().range()); | 71 | edit.target(vis.syntax().range()); |
diff --git a/crates/ra_assists/src/fill_match_arms.rs b/crates/ra_assists/src/fill_match_arms.rs index deef166b5..b96806ac6 100644 --- a/crates/ra_assists/src/fill_match_arms.rs +++ b/crates/ra_assists/src/fill_match_arms.rs | |||
@@ -27,7 +27,7 @@ pub(crate) fn fill_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<As | |||
27 | let mut arm_iter = arm_list.arms(); | 27 | let mut arm_iter = arm_list.arms(); |
28 | let first = arm_iter.next(); | 28 | let first = arm_iter.next(); |
29 | 29 | ||
30 | match first { | 30 | match &first { |
31 | // If there arm list is empty or there is only one trivial arm, then proceed. | 31 | // If there arm list is empty or there is only one trivial arm, then proceed. |
32 | Some(arm) if is_trivial_arm(arm) => { | 32 | Some(arm) if is_trivial_arm(arm) => { |
33 | if arm_iter.next() != None { | 33 | if arm_iter.next() != None { |
@@ -44,7 +44,7 @@ pub(crate) fn fill_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<As | |||
44 | 44 | ||
45 | let expr = match_expr.expr()?; | 45 | let expr = match_expr.expr()?; |
46 | let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, expr.syntax(), None); | 46 | let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, expr.syntax(), None); |
47 | let match_expr_ty = analyzer.type_of(ctx.db, expr)?; | 47 | let match_expr_ty = analyzer.type_of(ctx.db, &expr)?; |
48 | let enum_def = analyzer.autoderef(ctx.db, match_expr_ty).find_map(|ty| match ty.as_adt() { | 48 | let enum_def = analyzer.autoderef(ctx.db, match_expr_ty).find_map(|ty| match ty.as_adt() { |
49 | Some((AdtDef::Enum(e), _)) => Some(e), | 49 | Some((AdtDef::Enum(e), _)) => Some(e), |
50 | _ => None, | 50 | _ => None, |
diff --git a/crates/ra_assists/src/flip_binexpr.rs b/crates/ra_assists/src/flip_binexpr.rs index 5e41f9346..2e591ad3b 100644 --- a/crates/ra_assists/src/flip_binexpr.rs +++ b/crates/ra_assists/src/flip_binexpr.rs | |||
@@ -6,8 +6,8 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
6 | /// Flip binary expression assist. | 6 | /// Flip binary expression assist. |
7 | pub(crate) fn flip_binexpr(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 7 | pub(crate) fn flip_binexpr(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { |
8 | let expr = ctx.node_at_offset::<BinExpr>()?; | 8 | let expr = ctx.node_at_offset::<BinExpr>()?; |
9 | let lhs = expr.lhs()?.syntax(); | 9 | let lhs = expr.lhs()?.syntax().clone(); |
10 | let rhs = expr.rhs()?.syntax(); | 10 | let rhs = expr.rhs()?.syntax().clone(); |
11 | let op_range = expr.op_token()?.range(); | 11 | let op_range = expr.op_token()?.range(); |
12 | // The assist should be applied only if the cursor is on the operator | 12 | // The assist should be applied only if the cursor is on the operator |
13 | let cursor_in_range = ctx.frange.range.is_subrange(&op_range); | 13 | let cursor_in_range = ctx.frange.range.is_subrange(&op_range); |
diff --git a/crates/ra_assists/src/flip_comma.rs b/crates/ra_assists/src/flip_comma.rs index d8dba779f..13016ae06 100644 --- a/crates/ra_assists/src/flip_comma.rs +++ b/crates/ra_assists/src/flip_comma.rs | |||
@@ -5,8 +5,8 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
5 | 5 | ||
6 | pub(crate) fn flip_comma(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 6 | pub(crate) fn flip_comma(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { |
7 | let comma = ctx.token_at_offset().find(|leaf| leaf.kind() == T![,])?; | 7 | let comma = ctx.token_at_offset().find(|leaf| leaf.kind() == T![,])?; |
8 | let prev = non_trivia_sibling(comma.into(), Direction::Prev)?; | 8 | let prev = non_trivia_sibling(comma.clone().into(), Direction::Prev)?; |
9 | let next = non_trivia_sibling(comma.into(), Direction::Next)?; | 9 | let next = non_trivia_sibling(comma.clone().into(), Direction::Next)?; |
10 | ctx.add_action(AssistId("flip_comma"), "flip comma", |edit| { | 10 | ctx.add_action(AssistId("flip_comma"), "flip comma", |edit| { |
11 | edit.target(comma.range()); | 11 | edit.target(comma.range()); |
12 | edit.replace(prev.range(), next.to_string()); | 12 | edit.replace(prev.range(), next.to_string()); |
diff --git a/crates/ra_assists/src/inline_local_variable.rs b/crates/ra_assists/src/inline_local_variable.rs index 554de8b46..3c17089de 100644 --- a/crates/ra_assists/src/inline_local_variable.rs +++ b/crates/ra_assists/src/inline_local_variable.rs | |||
@@ -16,18 +16,18 @@ pub(crate) fn inline_local_varialbe(mut ctx: AssistCtx<impl HirDatabase>) -> Opt | |||
16 | if bind_pat.is_mutable() { | 16 | if bind_pat.is_mutable() { |
17 | return None; | 17 | return None; |
18 | } | 18 | } |
19 | let initializer_expr = let_stmt.initializer(); | 19 | let initializer_expr = let_stmt.initializer()?; |
20 | let delete_range = if let Some(whitespace) = let_stmt | 20 | let delete_range = if let Some(whitespace) = let_stmt |
21 | .syntax() | 21 | .syntax() |
22 | .next_sibling_or_token() | 22 | .next_sibling_or_token() |
23 | .and_then(|it| ast::Whitespace::cast(it.as_token()?)) | 23 | .and_then(|it| ast::Whitespace::cast(it.as_token()?.clone())) |
24 | { | 24 | { |
25 | TextRange::from_to(let_stmt.syntax().range().start(), whitespace.syntax().range().end()) | 25 | TextRange::from_to(let_stmt.syntax().range().start(), whitespace.syntax().range().end()) |
26 | } else { | 26 | } else { |
27 | let_stmt.syntax().range() | 27 | let_stmt.syntax().range() |
28 | }; | 28 | }; |
29 | let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, bind_pat.syntax(), None); | 29 | let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, bind_pat.syntax(), None); |
30 | let refs = analyzer.find_all_refs(bind_pat); | 30 | let refs = analyzer.find_all_refs(&bind_pat); |
31 | 31 | ||
32 | let mut wrap_in_parens = vec![true; refs.len()]; | 32 | let mut wrap_in_parens = vec![true; refs.len()]; |
33 | 33 | ||
@@ -45,7 +45,7 @@ pub(crate) fn inline_local_varialbe(mut ctx: AssistCtx<impl HirDatabase>) -> Opt | |||
45 | } | 45 | } |
46 | }; | 46 | }; |
47 | 47 | ||
48 | wrap_in_parens[i] = match (initializer_expr?.kind(), usage_parent.kind()) { | 48 | wrap_in_parens[i] = match (initializer_expr.kind(), usage_parent.kind()) { |
49 | (ExprKind::CallExpr(_), _) | 49 | (ExprKind::CallExpr(_), _) |
50 | | (ExprKind::IndexExpr(_), _) | 50 | | (ExprKind::IndexExpr(_), _) |
51 | | (ExprKind::MethodCallExpr(_), _) | 51 | | (ExprKind::MethodCallExpr(_), _) |
@@ -71,7 +71,7 @@ pub(crate) fn inline_local_varialbe(mut ctx: AssistCtx<impl HirDatabase>) -> Opt | |||
71 | }; | 71 | }; |
72 | } | 72 | } |
73 | 73 | ||
74 | let init_str = initializer_expr?.syntax().text().to_string(); | 74 | let init_str = initializer_expr.syntax().text().to_string(); |
75 | let init_in_paren = format!("({})", &init_str); | 75 | let init_in_paren = format!("({})", &init_str); |
76 | 76 | ||
77 | ctx.add_action( | 77 | ctx.add_action( |
diff --git a/crates/ra_assists/src/introduce_variable.rs b/crates/ra_assists/src/introduce_variable.rs index f7f5ccafa..ce28132c9 100644 --- a/crates/ra_assists/src/introduce_variable.rs +++ b/crates/ra_assists/src/introduce_variable.rs | |||
@@ -20,8 +20,8 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx<impl HirDatabase>) -> Option | |||
20 | return None; | 20 | return None; |
21 | } | 21 | } |
22 | let expr = node.ancestors().find_map(valid_target_expr)?; | 22 | let expr = node.ancestors().find_map(valid_target_expr)?; |
23 | let (anchor_stmt, wrap_in_block) = anchor_stmt(expr)?; | 23 | let (anchor_stmt, wrap_in_block) = anchor_stmt(expr.clone())?; |
24 | let indent = anchor_stmt.prev_sibling_or_token()?.as_token()?; | 24 | let indent = anchor_stmt.prev_sibling_or_token()?.as_token()?.clone(); |
25 | if indent.kind() != WHITESPACE { | 25 | if indent.kind() != WHITESPACE { |
26 | return None; | 26 | return None; |
27 | } | 27 | } |
@@ -37,9 +37,9 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx<impl HirDatabase>) -> Option | |||
37 | }; | 37 | }; |
38 | 38 | ||
39 | expr.syntax().text().push_to(&mut buf); | 39 | expr.syntax().text().push_to(&mut buf); |
40 | let full_stmt = ast::ExprStmt::cast(anchor_stmt); | 40 | let full_stmt = ast::ExprStmt::cast(anchor_stmt.clone()); |
41 | let is_full_stmt = if let Some(expr_stmt) = full_stmt { | 41 | let is_full_stmt = if let Some(expr_stmt) = &full_stmt { |
42 | Some(expr.syntax()) == expr_stmt.expr().map(|e| e.syntax()) | 42 | Some(expr.syntax().clone()) == expr_stmt.expr().map(|e| e.syntax().clone()) |
43 | } else { | 43 | } else { |
44 | false | 44 | false |
45 | }; | 45 | }; |
@@ -81,7 +81,7 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx<impl HirDatabase>) -> Option | |||
81 | 81 | ||
82 | /// Check whether the node is a valid expression which can be extracted to a variable. | 82 | /// Check whether the node is a valid expression which can be extracted to a variable. |
83 | /// In general that's true for any expression, but in some cases that would produce invalid code. | 83 | /// In general that's true for any expression, but in some cases that would produce invalid code. |
84 | fn valid_target_expr(node: &SyntaxNode) -> Option<&ast::Expr> { | 84 | fn valid_target_expr(node: SyntaxNode) -> Option<ast::Expr> { |
85 | match node.kind() { | 85 | match node.kind() { |
86 | PATH_EXPR => None, | 86 | PATH_EXPR => None, |
87 | BREAK_EXPR => ast::BreakExpr::cast(node).and_then(|e| e.expr()), | 87 | BREAK_EXPR => ast::BreakExpr::cast(node).and_then(|e| e.expr()), |
@@ -96,14 +96,10 @@ fn valid_target_expr(node: &SyntaxNode) -> Option<&ast::Expr> { | |||
96 | /// to produce correct code. | 96 | /// to produce correct code. |
97 | /// It can be a statement, the last in a block expression or a wanna be block | 97 | /// It can be a statement, the last in a block expression or a wanna be block |
98 | /// expression like a lambda or match arm. | 98 | /// expression like a lambda or match arm. |
99 | fn anchor_stmt(expr: &ast::Expr) -> Option<(&SyntaxNode, bool)> { | 99 | fn anchor_stmt(expr: ast::Expr) -> Option<(SyntaxNode, bool)> { |
100 | expr.syntax().ancestors().find_map(|node| { | 100 | expr.syntax().ancestors().find_map(|node| { |
101 | if ast::Stmt::cast(node).is_some() { | ||
102 | return Some((node, false)); | ||
103 | } | ||
104 | |||
105 | if let Some(expr) = node.parent().and_then(ast::Block::cast).and_then(|it| it.expr()) { | 101 | if let Some(expr) = node.parent().and_then(ast::Block::cast).and_then(|it| it.expr()) { |
106 | if expr.syntax() == node { | 102 | if expr.syntax() == &node { |
107 | tested_by!(test_introduce_var_last_expr); | 103 | tested_by!(test_introduce_var_last_expr); |
108 | return Some((node, false)); | 104 | return Some((node, false)); |
109 | } | 105 | } |
@@ -115,6 +111,10 @@ fn anchor_stmt(expr: &ast::Expr) -> Option<(&SyntaxNode, bool)> { | |||
115 | } | 111 | } |
116 | } | 112 | } |
117 | 113 | ||
114 | if ast::Stmt::cast(node.clone()).is_some() { | ||
115 | return Some((node, false)); | ||
116 | } | ||
117 | |||
118 | None | 118 | None |
119 | }) | 119 | }) |
120 | } | 120 | } |
diff --git a/crates/ra_assists/src/move_guard.rs b/crates/ra_assists/src/move_guard.rs index e1ce86a33..313c9ad18 100644 --- a/crates/ra_assists/src/move_guard.rs +++ b/crates/ra_assists/src/move_guard.rs | |||
@@ -18,9 +18,9 @@ pub(crate) fn move_guard_to_arm_body(mut ctx: AssistCtx<impl HirDatabase>) -> Op | |||
18 | 18 | ||
19 | ctx.add_action(AssistId("move_guard_to_arm_body"), "move guard to arm body", |edit| { | 19 | ctx.add_action(AssistId("move_guard_to_arm_body"), "move guard to arm body", |edit| { |
20 | edit.target(guard.syntax().range()); | 20 | edit.target(guard.syntax().range()); |
21 | let offseting_amount = match space_before_guard { | 21 | let offseting_amount = match &space_before_guard { |
22 | Some(SyntaxElement::Token(tok)) => { | 22 | Some(SyntaxElement::Token(tok)) => { |
23 | if let Some(_) = ast::Whitespace::cast(tok) { | 23 | if let Some(_) = ast::Whitespace::cast(tok.clone()) { |
24 | let ele = space_before_guard.unwrap().range(); | 24 | let ele = space_before_guard.unwrap().range(); |
25 | edit.delete(ele); | 25 | edit.delete(ele); |
26 | ele.len() | 26 | ele.len() |
@@ -39,11 +39,11 @@ pub(crate) fn move_guard_to_arm_body(mut ctx: AssistCtx<impl HirDatabase>) -> Op | |||
39 | } | 39 | } |
40 | 40 | ||
41 | pub(crate) fn move_arm_cond_to_match_guard(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 41 | pub(crate) fn move_arm_cond_to_match_guard(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { |
42 | let match_arm: &MatchArm = ctx.node_at_offset::<MatchArm>()?; | 42 | let match_arm: MatchArm = ctx.node_at_offset::<MatchArm>()?; |
43 | let last_match_pat = match_arm.pats().last()?; | 43 | let last_match_pat = match_arm.pats().last()?; |
44 | 44 | ||
45 | let arm_body = match_arm.expr()?; | 45 | let arm_body = match_arm.expr()?; |
46 | let if_expr: &IfExpr = IfExpr::cast(arm_body.syntax())?; | 46 | let if_expr: IfExpr = IfExpr::cast(arm_body.syntax().clone())?; |
47 | let cond = if_expr.condition()?; | 47 | let cond = if_expr.condition()?; |
48 | let then_block = if_expr.then_branch()?; | 48 | let then_block = if_expr.then_branch()?; |
49 | 49 | ||
@@ -65,7 +65,7 @@ pub(crate) fn move_arm_cond_to_match_guard(mut ctx: AssistCtx<impl HirDatabase>) | |||
65 | edit.target(if_expr.syntax().range()); | 65 | edit.target(if_expr.syntax().range()); |
66 | let then_only_expr = then_block.statements().next().is_none(); | 66 | let then_only_expr = then_block.statements().next().is_none(); |
67 | 67 | ||
68 | match then_block.expr() { | 68 | match &then_block.expr() { |
69 | Some(then_expr) if then_only_expr => { | 69 | Some(then_expr) if then_only_expr => { |
70 | edit.replace(if_expr.syntax().range(), then_expr.syntax().text()) | 70 | edit.replace(if_expr.syntax().range(), then_expr.syntax().text()) |
71 | } | 71 | } |
diff --git a/crates/ra_assists/src/remove_dbg.rs b/crates/ra_assists/src/remove_dbg.rs index 5680f76ca..c330bc827 100644 --- a/crates/ra_assists/src/remove_dbg.rs +++ b/crates/ra_assists/src/remove_dbg.rs | |||
@@ -8,7 +8,7 @@ use ra_syntax::{ | |||
8 | pub(crate) fn remove_dbg(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 8 | pub(crate) fn remove_dbg(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { |
9 | let macro_call = ctx.node_at_offset::<ast::MacroCall>()?; | 9 | let macro_call = ctx.node_at_offset::<ast::MacroCall>()?; |
10 | 10 | ||
11 | if !is_valid_macrocall(macro_call, "dbg")? { | 11 | if !is_valid_macrocall(¯o_call, "dbg")? { |
12 | return None; | 12 | return None; |
13 | } | 13 | } |
14 | 14 | ||
@@ -35,7 +35,7 @@ pub(crate) fn remove_dbg(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> | |||
35 | }; | 35 | }; |
36 | 36 | ||
37 | let macro_content = { | 37 | let macro_content = { |
38 | let macro_args = macro_call.token_tree()?.syntax(); | 38 | let macro_args = macro_call.token_tree()?.syntax().clone(); |
39 | let range = macro_args.range(); | 39 | let range = macro_args.range(); |
40 | let start = range.start() + TextUnit::of_char('('); | 40 | let start = range.start() + TextUnit::of_char('('); |
41 | let end = range.end() - TextUnit::of_char(')'); | 41 | let end = range.end() - TextUnit::of_char(')'); |
@@ -65,7 +65,7 @@ fn is_valid_macrocall(macro_call: &ast::MacroCall, macro_name: &str) -> Option<b | |||
65 | return None; | 65 | return None; |
66 | } | 66 | } |
67 | 67 | ||
68 | let node = macro_call.token_tree()?.syntax(); | 68 | let node = macro_call.token_tree()?.syntax().clone(); |
69 | let first_child = node.first_child_or_token()?; | 69 | let first_child = node.first_child_or_token()?; |
70 | let last_child = node.last_child_or_token()?; | 70 | let last_child = node.last_child_or_token()?; |
71 | 71 | ||
diff --git a/crates/ra_assists/src/replace_if_let_with_match.rs b/crates/ra_assists/src/replace_if_let_with_match.rs index c2c7cf70b..5de6aa266 100644 --- a/crates/ra_assists/src/replace_if_let_with_match.rs +++ b/crates/ra_assists/src/replace_if_let_with_match.rs | |||
@@ -5,7 +5,7 @@ use ra_syntax::{ast, AstNode}; | |||
5 | use crate::{Assist, AssistCtx, AssistId}; | 5 | use crate::{Assist, AssistCtx, AssistId}; |
6 | 6 | ||
7 | pub(crate) fn replace_if_let_with_match(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 7 | pub(crate) fn replace_if_let_with_match(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { |
8 | let if_expr: &ast::IfExpr = ctx.node_at_offset()?; | 8 | let if_expr: ast::IfExpr = ctx.node_at_offset()?; |
9 | let cond = if_expr.condition()?; | 9 | let cond = if_expr.condition()?; |
10 | let pat = cond.pat()?; | 10 | let pat = cond.pat()?; |
11 | let expr = cond.expr()?; | 11 | let expr = cond.expr()?; |
@@ -25,16 +25,11 @@ pub(crate) fn replace_if_let_with_match(mut ctx: AssistCtx<impl HirDatabase>) -> | |||
25 | ctx.build() | 25 | ctx.build() |
26 | } | 26 | } |
27 | 27 | ||
28 | fn build_match_expr( | 28 | fn build_match_expr(expr: ast::Expr, pat1: ast::Pat, arm1: ast::Block, arm2: ast::Block) -> String { |
29 | expr: &ast::Expr, | ||
30 | pat1: &ast::Pat, | ||
31 | arm1: &ast::Block, | ||
32 | arm2: &ast::Block, | ||
33 | ) -> String { | ||
34 | let mut buf = String::new(); | 29 | let mut buf = String::new(); |
35 | buf.push_str(&format!("match {} {{\n", expr.syntax().text())); | 30 | buf.push_str(&format!("match {} {{\n", expr.syntax().text())); |
36 | buf.push_str(&format!(" {} => {}\n", pat1.syntax().text(), format_arm(arm1))); | 31 | buf.push_str(&format!(" {} => {}\n", pat1.syntax().text(), format_arm(&arm1))); |
37 | buf.push_str(&format!(" _ => {}\n", format_arm(arm2))); | 32 | buf.push_str(&format!(" _ => {}\n", format_arm(&arm2))); |
38 | buf.push_str("}"); | 33 | buf.push_str("}"); |
39 | buf | 34 | buf |
40 | } | 35 | } |
diff --git a/crates/ra_cli/src/main.rs b/crates/ra_cli/src/main.rs index b063193cf..375e2f508 100644 --- a/crates/ra_cli/src/main.rs +++ b/crates/ra_cli/src/main.rs | |||
@@ -7,7 +7,7 @@ use clap::{App, Arg, SubCommand}; | |||
7 | use flexi_logger::Logger; | 7 | use flexi_logger::Logger; |
8 | use ra_ide_api::{file_structure, Analysis}; | 8 | use ra_ide_api::{file_structure, Analysis}; |
9 | use ra_prof::profile; | 9 | use ra_prof::profile; |
10 | use ra_syntax::{AstNode, SourceFile, TreeArc}; | 10 | use ra_syntax::{AstNode, SourceFile}; |
11 | 11 | ||
12 | type Result<T> = std::result::Result<T, Box<dyn Error + Send + Sync>>; | 12 | type Result<T> = std::result::Result<T, Box<dyn Error + Send + Sync>>; |
13 | 13 | ||
@@ -100,9 +100,9 @@ fn main() -> Result<()> { | |||
100 | Ok(()) | 100 | Ok(()) |
101 | } | 101 | } |
102 | 102 | ||
103 | fn file() -> Result<TreeArc<SourceFile>> { | 103 | fn file() -> Result<SourceFile> { |
104 | let text = read_stdin()?; | 104 | let text = read_stdin()?; |
105 | Ok(SourceFile::parse(&text).tree().to_owned()) | 105 | Ok(SourceFile::parse(&text).tree()) |
106 | } | 106 | } |
107 | 107 | ||
108 | fn read_stdin() -> Result<String> { | 108 | fn read_stdin() -> Result<String> { |
diff --git a/crates/ra_fmt/src/lib.rs b/crates/ra_fmt/src/lib.rs index 1c2c04ad2..d6e895729 100644 --- a/crates/ra_fmt/src/lib.rs +++ b/crates/ra_fmt/src/lib.rs | |||
@@ -3,7 +3,7 @@ | |||
3 | use itertools::Itertools; | 3 | use itertools::Itertools; |
4 | use ra_syntax::{ | 4 | use ra_syntax::{ |
5 | ast::{self, AstNode, AstToken}, | 5 | ast::{self, AstNode, AstToken}, |
6 | SyntaxKind, | 6 | SmolStr, SyntaxKind, |
7 | SyntaxKind::*, | 7 | SyntaxKind::*, |
8 | SyntaxNode, SyntaxToken, T, | 8 | SyntaxNode, SyntaxToken, T, |
9 | }; | 9 | }; |
@@ -15,12 +15,12 @@ pub fn reindent(text: &str, indent: &str) -> String { | |||
15 | } | 15 | } |
16 | 16 | ||
17 | /// If the node is on the beginning of the line, calculate indent. | 17 | /// If the node is on the beginning of the line, calculate indent. |
18 | pub fn leading_indent(node: &SyntaxNode) -> Option<&str> { | 18 | pub fn leading_indent(node: &SyntaxNode) -> Option<SmolStr> { |
19 | for token in prev_tokens(node.first_token()?) { | 19 | for token in prev_tokens(node.first_token()?) { |
20 | if let Some(ws) = ast::Whitespace::cast(token) { | 20 | if let Some(ws) = ast::Whitespace::cast(token.clone()) { |
21 | let ws_text = ws.text(); | 21 | let ws_text = ws.text(); |
22 | if let Some(pos) = ws_text.rfind('\n') { | 22 | if let Some(pos) = ws_text.rfind('\n') { |
23 | return Some(&ws_text[pos + 1..]); | 23 | return Some(ws_text[pos + 1..].into()); |
24 | } | 24 | } |
25 | } | 25 | } |
26 | if token.text().contains('\n') { | 26 | if token.text().contains('\n') { |
@@ -31,17 +31,17 @@ pub fn leading_indent(node: &SyntaxNode) -> Option<&str> { | |||
31 | } | 31 | } |
32 | 32 | ||
33 | fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> { | 33 | fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> { |
34 | successors(token.prev_token(), |&token| token.prev_token()) | 34 | successors(token.prev_token(), |token| token.prev_token()) |
35 | } | 35 | } |
36 | 36 | ||
37 | pub fn extract_trivial_expression(block: &ast::Block) -> Option<&ast::Expr> { | 37 | pub fn extract_trivial_expression(block: &ast::Block) -> Option<ast::Expr> { |
38 | let expr = block.expr()?; | 38 | let expr = block.expr()?; |
39 | if expr.syntax().text().contains('\n') { | 39 | if expr.syntax().text().contains('\n') { |
40 | return None; | 40 | return None; |
41 | } | 41 | } |
42 | let non_trivial_children = block.syntax().children().filter(|it| match it.kind() { | 42 | let non_trivial_children = block.syntax().children().filter(|it| match it.kind() { |
43 | WHITESPACE | T!['{'] | T!['}'] => false, | 43 | WHITESPACE | T!['{'] | T!['}'] => false, |
44 | _ => it != &expr.syntax(), | 44 | _ => it != expr.syntax(), |
45 | }); | 45 | }); |
46 | if non_trivial_children.count() > 0 { | 46 | if non_trivial_children.count() > 0 { |
47 | return None; | 47 | return None; |
diff --git a/crates/ra_hir/src/adt.rs b/crates/ra_hir/src/adt.rs index 8afdac801..c65446df4 100644 --- a/crates/ra_hir/src/adt.rs +++ b/crates/ra_hir/src/adt.rs | |||
@@ -4,10 +4,7 @@ | |||
4 | use std::sync::Arc; | 4 | use std::sync::Arc; |
5 | 5 | ||
6 | use ra_arena::{impl_arena_id, Arena, RawId}; | 6 | use ra_arena::{impl_arena_id, Arena, RawId}; |
7 | use ra_syntax::{ | 7 | use ra_syntax::ast::{self, NameOwner, StructKind, TypeAscriptionOwner}; |
8 | ast::{self, NameOwner, StructKind, TypeAscriptionOwner}, | ||
9 | TreeArc, | ||
10 | }; | ||
11 | 8 | ||
12 | use crate::{ | 9 | use crate::{ |
13 | type_ref::TypeRef, AsName, AstDatabase, Crate, DefDatabase, Enum, EnumVariant, FieldSource, | 10 | type_ref::TypeRef, AsName, AstDatabase, Crate, DefDatabase, Enum, EnumVariant, FieldSource, |
@@ -59,11 +56,11 @@ impl StructData { | |||
59 | struct_: Struct, | 56 | struct_: Struct, |
60 | ) -> Arc<StructData> { | 57 | ) -> Arc<StructData> { |
61 | let src = struct_.source(db); | 58 | let src = struct_.source(db); |
62 | Arc::new(StructData::new(&*src.ast)) | 59 | Arc::new(StructData::new(&src.ast)) |
63 | } | 60 | } |
64 | } | 61 | } |
65 | 62 | ||
66 | fn variants(enum_def: &ast::EnumDef) -> impl Iterator<Item = &ast::EnumVariant> { | 63 | fn variants(enum_def: &ast::EnumDef) -> impl Iterator<Item = ast::EnumVariant> { |
67 | enum_def.variant_list().into_iter().flat_map(|it| it.variants()) | 64 | enum_def.variant_list().into_iter().flat_map(|it| it.variants()) |
68 | } | 65 | } |
69 | 66 | ||
@@ -71,9 +68,9 @@ impl EnumVariant { | |||
71 | pub(crate) fn source_impl( | 68 | pub(crate) fn source_impl( |
72 | self, | 69 | self, |
73 | db: &(impl DefDatabase + AstDatabase), | 70 | db: &(impl DefDatabase + AstDatabase), |
74 | ) -> Source<TreeArc<ast::EnumVariant>> { | 71 | ) -> Source<ast::EnumVariant> { |
75 | let src = self.parent.source(db); | 72 | let src = self.parent.source(db); |
76 | let ast = variants(&*src.ast) | 73 | let ast = variants(&src.ast) |
77 | .zip(db.enum_data(self.parent).variants.iter()) | 74 | .zip(db.enum_data(self.parent).variants.iter()) |
78 | .find(|(_syntax, (id, _))| *id == self.id) | 75 | .find(|(_syntax, (id, _))| *id == self.id) |
79 | .unwrap() | 76 | .unwrap() |
@@ -96,7 +93,7 @@ impl EnumData { | |||
96 | pub(crate) fn enum_data_query(db: &(impl DefDatabase + AstDatabase), e: Enum) -> Arc<EnumData> { | 93 | pub(crate) fn enum_data_query(db: &(impl DefDatabase + AstDatabase), e: Enum) -> Arc<EnumData> { |
97 | let src = e.source(db); | 94 | let src = e.source(db); |
98 | let name = src.ast.name().map(|n| n.as_name()); | 95 | let name = src.ast.name().map(|n| n.as_name()); |
99 | let variants = variants(&*src.ast) | 96 | let variants = variants(&src.ast) |
100 | .map(|var| EnumVariantData { | 97 | .map(|var| EnumVariantData { |
101 | name: var.name().map(|it| it.as_name()), | 98 | name: var.name().map(|it| it.as_name()), |
102 | variant_data: Arc::new(VariantData::new(var.kind())), | 99 | variant_data: Arc::new(VariantData::new(var.kind())), |
diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs index 4fb5844f4..779764590 100644 --- a/crates/ra_hir/src/code_model.rs +++ b/crates/ra_hir/src/code_model.rs | |||
@@ -4,10 +4,7 @@ pub(crate) mod docs; | |||
4 | use std::sync::Arc; | 4 | use std::sync::Arc; |
5 | 5 | ||
6 | use ra_db::{CrateId, Edition, FileId, SourceRootId}; | 6 | use ra_db::{CrateId, Edition, FileId, SourceRootId}; |
7 | use ra_syntax::{ | 7 | use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner}; |
8 | ast::{self, NameOwner, TypeAscriptionOwner}, | ||
9 | TreeArc, | ||
10 | }; | ||
11 | 8 | ||
12 | use crate::{ | 9 | use crate::{ |
13 | adt::{EnumVariantId, StructFieldId, VariantDef}, | 10 | adt::{EnumVariantId, StructFieldId, VariantDef}, |
@@ -155,8 +152,8 @@ impl_froms!( | |||
155 | ); | 152 | ); |
156 | 153 | ||
157 | pub enum ModuleSource { | 154 | pub enum ModuleSource { |
158 | SourceFile(TreeArc<ast::SourceFile>), | 155 | SourceFile(ast::SourceFile), |
159 | Module(TreeArc<ast::Module>), | 156 | Module(ast::Module), |
160 | } | 157 | } |
161 | 158 | ||
162 | impl ModuleSource { | 159 | impl ModuleSource { |
@@ -199,7 +196,7 @@ impl Module { | |||
199 | self, | 196 | self, |
200 | db: &impl HirDatabase, | 197 | db: &impl HirDatabase, |
201 | import: ImportId, | 198 | import: ImportId, |
202 | ) -> Either<TreeArc<ast::UseTree>, TreeArc<ast::ExternCrateItem>> { | 199 | ) -> Either<ast::UseTree, ast::ExternCrateItem> { |
203 | let src = self.definition_source(db); | 200 | let src = self.definition_source(db); |
204 | let (_, source_map) = db.raw_items_with_source_map(src.file_id); | 201 | let (_, source_map) = db.raw_items_with_source_map(src.file_id); |
205 | source_map.get(&src.ast, import) | 202 | source_map.get(&src.ast, import) |
@@ -321,8 +318,8 @@ pub struct StructField { | |||
321 | 318 | ||
322 | #[derive(Debug)] | 319 | #[derive(Debug)] |
323 | pub enum FieldSource { | 320 | pub enum FieldSource { |
324 | Named(TreeArc<ast::NamedFieldDef>), | 321 | Named(ast::NamedFieldDef), |
325 | Pos(TreeArc<ast::PosFieldDef>), | 322 | Pos(ast::PosFieldDef), |
326 | } | 323 | } |
327 | 324 | ||
328 | impl StructField { | 325 | impl StructField { |
@@ -736,7 +733,7 @@ impl ConstData { | |||
736 | konst: Const, | 733 | konst: Const, |
737 | ) -> Arc<ConstData> { | 734 | ) -> Arc<ConstData> { |
738 | let node = konst.source(db).ast; | 735 | let node = konst.source(db).ast; |
739 | const_data_for(&*node) | 736 | const_data_for(&node) |
740 | } | 737 | } |
741 | 738 | ||
742 | pub(crate) fn static_data_query( | 739 | pub(crate) fn static_data_query( |
@@ -744,7 +741,7 @@ impl ConstData { | |||
744 | konst: Static, | 741 | konst: Static, |
745 | ) -> Arc<ConstData> { | 742 | ) -> Arc<ConstData> { |
746 | let node = konst.source(db).ast; | 743 | let node = konst.source(db).ast; |
747 | const_data_for(&*node) | 744 | const_data_for(&node) |
748 | } | 745 | } |
749 | } | 746 | } |
750 | 747 | ||
diff --git a/crates/ra_hir/src/code_model/docs.rs b/crates/ra_hir/src/code_model/docs.rs index 007ef315d..a2b4d8e97 100644 --- a/crates/ra_hir/src/code_model/docs.rs +++ b/crates/ra_hir/src/code_model/docs.rs | |||
@@ -71,21 +71,21 @@ pub(crate) fn documentation_query( | |||
71 | def: DocDef, | 71 | def: DocDef, |
72 | ) -> Option<Documentation> { | 72 | ) -> Option<Documentation> { |
73 | match def { | 73 | match def { |
74 | DocDef::Module(it) => docs_from_ast(&*it.declaration_source(db)?.ast), | 74 | DocDef::Module(it) => docs_from_ast(&it.declaration_source(db)?.ast), |
75 | DocDef::StructField(it) => match it.source(db).ast { | 75 | DocDef::StructField(it) => match it.source(db).ast { |
76 | FieldSource::Named(named) => docs_from_ast(&*named), | 76 | FieldSource::Named(named) => docs_from_ast(&named), |
77 | FieldSource::Pos(..) => None, | 77 | FieldSource::Pos(..) => None, |
78 | }, | 78 | }, |
79 | DocDef::Struct(it) => docs_from_ast(&*it.source(db).ast), | 79 | DocDef::Struct(it) => docs_from_ast(&it.source(db).ast), |
80 | DocDef::Enum(it) => docs_from_ast(&*it.source(db).ast), | 80 | DocDef::Enum(it) => docs_from_ast(&it.source(db).ast), |
81 | DocDef::EnumVariant(it) => docs_from_ast(&*it.source(db).ast), | 81 | DocDef::EnumVariant(it) => docs_from_ast(&it.source(db).ast), |
82 | DocDef::Static(it) => docs_from_ast(&*it.source(db).ast), | 82 | DocDef::Static(it) => docs_from_ast(&it.source(db).ast), |
83 | DocDef::Const(it) => docs_from_ast(&*it.source(db).ast), | 83 | DocDef::Const(it) => docs_from_ast(&it.source(db).ast), |
84 | DocDef::Function(it) => docs_from_ast(&*it.source(db).ast), | 84 | DocDef::Function(it) => docs_from_ast(&it.source(db).ast), |
85 | DocDef::Union(it) => docs_from_ast(&*it.source(db).ast), | 85 | DocDef::Union(it) => docs_from_ast(&it.source(db).ast), |
86 | DocDef::Trait(it) => docs_from_ast(&*it.source(db).ast), | 86 | DocDef::Trait(it) => docs_from_ast(&it.source(db).ast), |
87 | DocDef::TypeAlias(it) => docs_from_ast(&*it.source(db).ast), | 87 | DocDef::TypeAlias(it) => docs_from_ast(&it.source(db).ast), |
88 | DocDef::MacroDef(it) => docs_from_ast(&*it.source(db).ast), | 88 | DocDef::MacroDef(it) => docs_from_ast(&it.source(db).ast), |
89 | } | 89 | } |
90 | } | 90 | } |
91 | 91 | ||
diff --git a/crates/ra_hir/src/code_model/src.rs b/crates/ra_hir/src/code_model/src.rs index 72451e0e7..32bd9c661 100644 --- a/crates/ra_hir/src/code_model/src.rs +++ b/crates/ra_hir/src/code_model/src.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | use ra_syntax::{ast, TreeArc}; | 1 | use ra_syntax::ast; |
2 | 2 | ||
3 | use crate::{ | 3 | use crate::{ |
4 | ids::AstItemDef, AstDatabase, Const, DefDatabase, Enum, EnumVariant, FieldSource, Function, | 4 | ids::AstItemDef, AstDatabase, Const, DefDatabase, Enum, EnumVariant, FieldSource, Function, |
@@ -34,7 +34,7 @@ impl Module { | |||
34 | pub fn declaration_source( | 34 | pub fn declaration_source( |
35 | self, | 35 | self, |
36 | db: &(impl DefDatabase + AstDatabase), | 36 | db: &(impl DefDatabase + AstDatabase), |
37 | ) -> Option<Source<TreeArc<ast::Module>>> { | 37 | ) -> Option<Source<ast::Module>> { |
38 | let def_map = db.crate_def_map(self.krate); | 38 | let def_map = db.crate_def_map(self.krate); |
39 | let decl = def_map[self.module_id].declaration?; | 39 | let decl = def_map[self.module_id].declaration?; |
40 | let ast = decl.to_node(db); | 40 | let ast = decl.to_node(db); |
@@ -49,62 +49,62 @@ impl HasSource for StructField { | |||
49 | } | 49 | } |
50 | } | 50 | } |
51 | impl HasSource for Struct { | 51 | impl HasSource for Struct { |
52 | type Ast = TreeArc<ast::StructDef>; | 52 | type Ast = ast::StructDef; |
53 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::StructDef>> { | 53 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::StructDef> { |
54 | self.id.source(db) | 54 | self.id.source(db) |
55 | } | 55 | } |
56 | } | 56 | } |
57 | impl HasSource for Union { | 57 | impl HasSource for Union { |
58 | type Ast = TreeArc<ast::StructDef>; | 58 | type Ast = ast::StructDef; |
59 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::StructDef>> { | 59 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::StructDef> { |
60 | self.id.source(db) | 60 | self.id.source(db) |
61 | } | 61 | } |
62 | } | 62 | } |
63 | impl HasSource for Enum { | 63 | impl HasSource for Enum { |
64 | type Ast = TreeArc<ast::EnumDef>; | 64 | type Ast = ast::EnumDef; |
65 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::EnumDef>> { | 65 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::EnumDef> { |
66 | self.id.source(db) | 66 | self.id.source(db) |
67 | } | 67 | } |
68 | } | 68 | } |
69 | impl HasSource for EnumVariant { | 69 | impl HasSource for EnumVariant { |
70 | type Ast = TreeArc<ast::EnumVariant>; | 70 | type Ast = ast::EnumVariant; |
71 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::EnumVariant>> { | 71 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::EnumVariant> { |
72 | self.source_impl(db) | 72 | self.source_impl(db) |
73 | } | 73 | } |
74 | } | 74 | } |
75 | impl HasSource for Function { | 75 | impl HasSource for Function { |
76 | type Ast = TreeArc<ast::FnDef>; | 76 | type Ast = ast::FnDef; |
77 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::FnDef>> { | 77 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::FnDef> { |
78 | self.id.source(db) | 78 | self.id.source(db) |
79 | } | 79 | } |
80 | } | 80 | } |
81 | impl HasSource for Const { | 81 | impl HasSource for Const { |
82 | type Ast = TreeArc<ast::ConstDef>; | 82 | type Ast = ast::ConstDef; |
83 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::ConstDef>> { | 83 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::ConstDef> { |
84 | self.id.source(db) | 84 | self.id.source(db) |
85 | } | 85 | } |
86 | } | 86 | } |
87 | impl HasSource for Static { | 87 | impl HasSource for Static { |
88 | type Ast = TreeArc<ast::StaticDef>; | 88 | type Ast = ast::StaticDef; |
89 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::StaticDef>> { | 89 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::StaticDef> { |
90 | self.id.source(db) | 90 | self.id.source(db) |
91 | } | 91 | } |
92 | } | 92 | } |
93 | impl HasSource for Trait { | 93 | impl HasSource for Trait { |
94 | type Ast = TreeArc<ast::TraitDef>; | 94 | type Ast = ast::TraitDef; |
95 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::TraitDef>> { | 95 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::TraitDef> { |
96 | self.id.source(db) | 96 | self.id.source(db) |
97 | } | 97 | } |
98 | } | 98 | } |
99 | impl HasSource for TypeAlias { | 99 | impl HasSource for TypeAlias { |
100 | type Ast = TreeArc<ast::TypeAliasDef>; | 100 | type Ast = ast::TypeAliasDef; |
101 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::TypeAliasDef>> { | 101 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::TypeAliasDef> { |
102 | self.id.source(db) | 102 | self.id.source(db) |
103 | } | 103 | } |
104 | } | 104 | } |
105 | impl HasSource for MacroDef { | 105 | impl HasSource for MacroDef { |
106 | type Ast = TreeArc<ast::MacroCall>; | 106 | type Ast = ast::MacroCall; |
107 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::MacroCall>> { | 107 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::MacroCall> { |
108 | Source { file_id: self.id.0.file_id(), ast: self.id.0.to_node(db) } | 108 | Source { file_id: self.id.0.file_id(), ast: self.id.0.to_node(db) } |
109 | } | 109 | } |
110 | } | 110 | } |
diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs index da9f3e32d..358365176 100644 --- a/crates/ra_hir/src/db.rs +++ b/crates/ra_hir/src/db.rs | |||
@@ -2,7 +2,7 @@ use std::sync::Arc; | |||
2 | 2 | ||
3 | use parking_lot::Mutex; | 3 | use parking_lot::Mutex; |
4 | use ra_db::{salsa, SourceDatabase}; | 4 | use ra_db::{salsa, SourceDatabase}; |
5 | use ra_syntax::{ast, Parse, SmolStr, SyntaxNode, TreeArc}; | 5 | use ra_syntax::{ast, Parse, SmolStr, SyntaxNode}; |
6 | 6 | ||
7 | use crate::{ | 7 | use crate::{ |
8 | adt::{EnumData, StructData}, | 8 | adt::{EnumData, StructData}, |
@@ -62,11 +62,11 @@ pub trait AstDatabase: InternDatabase { | |||
62 | 62 | ||
63 | #[salsa::transparent] | 63 | #[salsa::transparent] |
64 | #[salsa::invoke(crate::source_id::AstIdMap::file_item_query)] | 64 | #[salsa::invoke(crate::source_id::AstIdMap::file_item_query)] |
65 | fn ast_id_to_node(&self, file_id: HirFileId, ast_id: ErasedFileAstId) -> TreeArc<SyntaxNode>; | 65 | fn ast_id_to_node(&self, file_id: HirFileId, ast_id: ErasedFileAstId) -> SyntaxNode; |
66 | 66 | ||
67 | #[salsa::transparent] | 67 | #[salsa::transparent] |
68 | #[salsa::invoke(crate::ids::HirFileId::parse_or_expand_query)] | 68 | #[salsa::invoke(crate::ids::HirFileId::parse_or_expand_query)] |
69 | fn parse_or_expand(&self, file_id: HirFileId) -> Option<TreeArc<SyntaxNode>>; | 69 | fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>; |
70 | 70 | ||
71 | #[salsa::invoke(crate::ids::HirFileId::parse_macro_query)] | 71 | #[salsa::invoke(crate::ids::HirFileId::parse_macro_query)] |
72 | fn parse_macro(&self, macro_file: ids::MacroFile) -> Option<Parse<SyntaxNode>>; | 72 | fn parse_macro(&self, macro_file: ids::MacroFile) -> Option<Parse<SyntaxNode>>; |
diff --git a/crates/ra_hir/src/diagnostics.rs b/crates/ra_hir/src/diagnostics.rs index c97f0656d..0290483b3 100644 --- a/crates/ra_hir/src/diagnostics.rs +++ b/crates/ra_hir/src/diagnostics.rs | |||
@@ -1,6 +1,6 @@ | |||
1 | use std::{any::Any, fmt}; | 1 | use std::{any::Any, fmt}; |
2 | 2 | ||
3 | use ra_syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, TextRange, TreeArc}; | 3 | use ra_syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, TextRange}; |
4 | use relative_path::RelativePathBuf; | 4 | use relative_path::RelativePathBuf; |
5 | 5 | ||
6 | use crate::{HirDatabase, HirFileId, Name}; | 6 | use crate::{HirDatabase, HirFileId, Name}; |
@@ -33,9 +33,9 @@ pub trait AstDiagnostic { | |||
33 | } | 33 | } |
34 | 34 | ||
35 | impl dyn Diagnostic { | 35 | impl dyn Diagnostic { |
36 | pub fn syntax_node(&self, db: &impl HirDatabase) -> TreeArc<SyntaxNode> { | 36 | pub fn syntax_node(&self, db: &impl HirDatabase) -> SyntaxNode { |
37 | let node = db.parse_or_expand(self.file()).unwrap(); | 37 | let node = db.parse_or_expand(self.file()).unwrap(); |
38 | self.syntax_node_ptr().to_node(&*node).to_owned() | 38 | self.syntax_node_ptr().to_node(&node) |
39 | } | 39 | } |
40 | 40 | ||
41 | pub fn downcast_ref<D: Diagnostic>(&self) -> Option<&D> { | 41 | pub fn downcast_ref<D: Diagnostic>(&self) -> Option<&D> { |
@@ -143,11 +143,11 @@ impl Diagnostic for MissingFields { | |||
143 | } | 143 | } |
144 | 144 | ||
145 | impl AstDiagnostic for MissingFields { | 145 | impl AstDiagnostic for MissingFields { |
146 | type AST = TreeArc<ast::NamedFieldList>; | 146 | type AST = ast::NamedFieldList; |
147 | 147 | ||
148 | fn ast(&self, db: &impl HirDatabase) -> Self::AST { | 148 | fn ast(&self, db: &impl HirDatabase) -> Self::AST { |
149 | let root = db.parse_or_expand(self.file()).unwrap(); | 149 | let root = db.parse_or_expand(self.file()).unwrap(); |
150 | let node = self.syntax_node_ptr().to_node(&*root); | 150 | let node = self.syntax_node_ptr().to_node(&root); |
151 | ast::NamedFieldList::cast(&node).unwrap().to_owned() | 151 | ast::NamedFieldList::cast(node).unwrap() |
152 | } | 152 | } |
153 | } | 153 | } |
diff --git a/crates/ra_hir/src/expr.rs b/crates/ra_hir/src/expr.rs index 3a97d97ce..70af3f119 100644 --- a/crates/ra_hir/src/expr.rs +++ b/crates/ra_hir/src/expr.rs | |||
@@ -550,7 +550,7 @@ where | |||
550 | self.exprs.alloc(block) | 550 | self.exprs.alloc(block) |
551 | } | 551 | } |
552 | 552 | ||
553 | fn collect_expr(&mut self, expr: &ast::Expr) -> ExprId { | 553 | fn collect_expr(&mut self, expr: ast::Expr) -> ExprId { |
554 | let syntax_ptr = SyntaxNodePtr::new(expr.syntax()); | 554 | let syntax_ptr = SyntaxNodePtr::new(expr.syntax()); |
555 | match expr.kind() { | 555 | match expr.kind() { |
556 | ast::ExprKind::IfExpr(e) => { | 556 | ast::ExprKind::IfExpr(e) => { |
@@ -565,7 +565,8 @@ where | |||
565 | .map(|b| match b { | 565 | .map(|b| match b { |
566 | ast::ElseBranch::Block(it) => self.collect_block(it), | 566 | ast::ElseBranch::Block(it) => self.collect_block(it), |
567 | ast::ElseBranch::IfExpr(elif) => { | 567 | ast::ElseBranch::IfExpr(elif) => { |
568 | let expr: &ast::Expr = ast::Expr::cast(elif.syntax()).unwrap(); | 568 | let expr: ast::Expr = |
569 | ast::Expr::cast(elif.syntax().clone()).unwrap(); | ||
569 | self.collect_expr(expr) | 570 | self.collect_expr(expr) |
570 | } | 571 | } |
571 | }) | 572 | }) |
@@ -582,7 +583,7 @@ where | |||
582 | let else_branch = e.else_branch().map(|b| match b { | 583 | let else_branch = e.else_branch().map(|b| match b { |
583 | ast::ElseBranch::Block(it) => self.collect_block(it), | 584 | ast::ElseBranch::Block(it) => self.collect_block(it), |
584 | ast::ElseBranch::IfExpr(elif) => { | 585 | ast::ElseBranch::IfExpr(elif) => { |
585 | let expr: &ast::Expr = ast::Expr::cast(elif.syntax()).unwrap(); | 586 | let expr: ast::Expr = ast::Expr::cast(elif.syntax().clone()).unwrap(); |
586 | self.collect_expr(expr) | 587 | self.collect_expr(expr) |
587 | } | 588 | } |
588 | }); | 589 | }); |
@@ -689,7 +690,7 @@ where | |||
689 | let struct_lit = if let Some(nfl) = e.named_field_list() { | 690 | let struct_lit = if let Some(nfl) = e.named_field_list() { |
690 | let fields = nfl | 691 | let fields = nfl |
691 | .fields() | 692 | .fields() |
692 | .inspect(|field| field_ptrs.push(AstPtr::new(*field))) | 693 | .inspect(|field| field_ptrs.push(AstPtr::new(field))) |
693 | .map(|field| StructLitField { | 694 | .map(|field| StructLitField { |
694 | name: field | 695 | name: field |
695 | .name_ref() | 696 | .name_ref() |
@@ -699,7 +700,7 @@ where | |||
699 | self.collect_expr(e) | 700 | self.collect_expr(e) |
700 | } else if let Some(nr) = field.name_ref() { | 701 | } else if let Some(nr) = field.name_ref() { |
701 | // field shorthand | 702 | // field shorthand |
702 | let id = self.exprs.alloc(Expr::Path(Path::from_name_ref(nr))); | 703 | let id = self.exprs.alloc(Expr::Path(Path::from_name_ref(&nr))); |
703 | self.source_map | 704 | self.source_map |
704 | .expr_map | 705 | .expr_map |
705 | .insert(SyntaxNodePtr::new(nr.syntax()), id); | 706 | .insert(SyntaxNodePtr::new(nr.syntax()), id); |
@@ -837,7 +838,7 @@ where | |||
837 | let ast_id = self | 838 | let ast_id = self |
838 | .db | 839 | .db |
839 | .ast_id_map(self.current_file_id) | 840 | .ast_id_map(self.current_file_id) |
840 | .ast_id(e) | 841 | .ast_id(&e) |
841 | .with_file_id(self.current_file_id); | 842 | .with_file_id(self.current_file_id); |
842 | 843 | ||
843 | if let Some(path) = e.path().and_then(Path::from_ast) { | 844 | if let Some(path) = e.path().and_then(Path::from_ast) { |
@@ -845,11 +846,11 @@ where | |||
845 | let call_id = MacroCallLoc { def: def.id, ast_id }.id(self.db); | 846 | let call_id = MacroCallLoc { def: def.id, ast_id }.id(self.db); |
846 | let file_id = call_id.as_file(MacroFileKind::Expr); | 847 | let file_id = call_id.as_file(MacroFileKind::Expr); |
847 | if let Some(node) = self.db.parse_or_expand(file_id) { | 848 | if let Some(node) = self.db.parse_or_expand(file_id) { |
848 | if let Some(expr) = ast::Expr::cast(&*node) { | 849 | if let Some(expr) = ast::Expr::cast(node) { |
849 | log::debug!("macro expansion {}", expr.syntax().debug_dump()); | 850 | log::debug!("macro expansion {}", expr.syntax().debug_dump()); |
850 | let old_file_id = | 851 | let old_file_id = |
851 | std::mem::replace(&mut self.current_file_id, file_id); | 852 | std::mem::replace(&mut self.current_file_id, file_id); |
852 | let id = self.collect_expr(&expr); | 853 | let id = self.collect_expr(expr); |
853 | self.current_file_id = old_file_id; | 854 | self.current_file_id = old_file_id; |
854 | return id; | 855 | return id; |
855 | } | 856 | } |
@@ -863,7 +864,7 @@ where | |||
863 | } | 864 | } |
864 | } | 865 | } |
865 | 866 | ||
866 | fn collect_expr_opt(&mut self, expr: Option<&ast::Expr>) -> ExprId { | 867 | fn collect_expr_opt(&mut self, expr: Option<ast::Expr>) -> ExprId { |
867 | if let Some(expr) = expr { | 868 | if let Some(expr) = expr { |
868 | self.collect_expr(expr) | 869 | self.collect_expr(expr) |
869 | } else { | 870 | } else { |
@@ -871,7 +872,7 @@ where | |||
871 | } | 872 | } |
872 | } | 873 | } |
873 | 874 | ||
874 | fn collect_block(&mut self, block: &ast::Block) -> ExprId { | 875 | fn collect_block(&mut self, block: ast::Block) -> ExprId { |
875 | let statements = block | 876 | let statements = block |
876 | .statements() | 877 | .statements() |
877 | .map(|s| match s.kind() { | 878 | .map(|s| match s.kind() { |
@@ -890,7 +891,7 @@ where | |||
890 | self.alloc_expr(Expr::Block { statements, tail }, SyntaxNodePtr::new(block.syntax())) | 891 | self.alloc_expr(Expr::Block { statements, tail }, SyntaxNodePtr::new(block.syntax())) |
891 | } | 892 | } |
892 | 893 | ||
893 | fn collect_block_opt(&mut self, block: Option<&ast::Block>) -> ExprId { | 894 | fn collect_block_opt(&mut self, block: Option<ast::Block>) -> ExprId { |
894 | if let Some(block) = block { | 895 | if let Some(block) = block { |
895 | self.collect_block(block) | 896 | self.collect_block(block) |
896 | } else { | 897 | } else { |
@@ -898,7 +899,7 @@ where | |||
898 | } | 899 | } |
899 | } | 900 | } |
900 | 901 | ||
901 | fn collect_pat(&mut self, pat: &ast::Pat) -> PatId { | 902 | fn collect_pat(&mut self, pat: ast::Pat) -> PatId { |
902 | let pattern = match pat.kind() { | 903 | let pattern = match pat.kind() { |
903 | ast::PatKind::BindPat(bp) => { | 904 | ast::PatKind::BindPat(bp) => { |
904 | let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing); | 905 | let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing); |
@@ -932,7 +933,8 @@ where | |||
932 | let mut fields: Vec<_> = field_pat_list | 933 | let mut fields: Vec<_> = field_pat_list |
933 | .bind_pats() | 934 | .bind_pats() |
934 | .filter_map(|bind_pat| { | 935 | .filter_map(|bind_pat| { |
935 | let ast_pat = ast::Pat::cast(bind_pat.syntax()).expect("bind pat is a pat"); | 936 | let ast_pat = |
937 | ast::Pat::cast(bind_pat.syntax().clone()).expect("bind pat is a pat"); | ||
936 | let pat = self.collect_pat(ast_pat); | 938 | let pat = self.collect_pat(ast_pat); |
937 | let name = bind_pat.name()?.as_name(); | 939 | let name = bind_pat.name()?.as_name(); |
938 | Some(FieldPat { name, pat }) | 940 | Some(FieldPat { name, pat }) |
@@ -953,11 +955,11 @@ where | |||
953 | ast::PatKind::LiteralPat(_) => Pat::Missing, | 955 | ast::PatKind::LiteralPat(_) => Pat::Missing, |
954 | ast::PatKind::SlicePat(_) | ast::PatKind::RangePat(_) => Pat::Missing, | 956 | ast::PatKind::SlicePat(_) | ast::PatKind::RangePat(_) => Pat::Missing, |
955 | }; | 957 | }; |
956 | let ptr = AstPtr::new(pat); | 958 | let ptr = AstPtr::new(&pat); |
957 | self.alloc_pat(pattern, Either::A(ptr)) | 959 | self.alloc_pat(pattern, Either::A(ptr)) |
958 | } | 960 | } |
959 | 961 | ||
960 | fn collect_pat_opt(&mut self, pat: Option<&ast::Pat>) -> PatId { | 962 | fn collect_pat_opt(&mut self, pat: Option<ast::Pat>) -> PatId { |
961 | if let Some(pat) = pat { | 963 | if let Some(pat) = pat { |
962 | self.collect_pat(pat) | 964 | self.collect_pat(pat) |
963 | } else { | 965 | } else { |
@@ -965,20 +967,20 @@ where | |||
965 | } | 967 | } |
966 | } | 968 | } |
967 | 969 | ||
968 | fn collect_const_body(&mut self, node: &ast::ConstDef) { | 970 | fn collect_const_body(&mut self, node: ast::ConstDef) { |
969 | let body = self.collect_expr_opt(node.body()); | 971 | let body = self.collect_expr_opt(node.body()); |
970 | self.body_expr = Some(body); | 972 | self.body_expr = Some(body); |
971 | } | 973 | } |
972 | 974 | ||
973 | fn collect_static_body(&mut self, node: &ast::StaticDef) { | 975 | fn collect_static_body(&mut self, node: ast::StaticDef) { |
974 | let body = self.collect_expr_opt(node.body()); | 976 | let body = self.collect_expr_opt(node.body()); |
975 | self.body_expr = Some(body); | 977 | self.body_expr = Some(body); |
976 | } | 978 | } |
977 | 979 | ||
978 | fn collect_fn_body(&mut self, node: &ast::FnDef) { | 980 | fn collect_fn_body(&mut self, node: ast::FnDef) { |
979 | if let Some(param_list) = node.param_list() { | 981 | if let Some(param_list) = node.param_list() { |
980 | if let Some(self_param) = param_list.self_param() { | 982 | if let Some(self_param) = param_list.self_param() { |
981 | let ptr = AstPtr::new(self_param); | 983 | let ptr = AstPtr::new(&self_param); |
982 | let param_pat = self.alloc_pat( | 984 | let param_pat = self.alloc_pat( |
983 | Pat::Bind { | 985 | Pat::Bind { |
984 | name: SELF_PARAM, | 986 | name: SELF_PARAM, |
@@ -1027,17 +1029,17 @@ pub(crate) fn body_with_source_map_query( | |||
1027 | DefWithBody::Const(ref c) => { | 1029 | DefWithBody::Const(ref c) => { |
1028 | let src = c.source(db); | 1030 | let src = c.source(db); |
1029 | collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); | 1031 | collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); |
1030 | collector.collect_const_body(&src.ast) | 1032 | collector.collect_const_body(src.ast) |
1031 | } | 1033 | } |
1032 | DefWithBody::Function(ref f) => { | 1034 | DefWithBody::Function(ref f) => { |
1033 | let src = f.source(db); | 1035 | let src = f.source(db); |
1034 | collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); | 1036 | collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); |
1035 | collector.collect_fn_body(&src.ast) | 1037 | collector.collect_fn_body(src.ast) |
1036 | } | 1038 | } |
1037 | DefWithBody::Static(ref s) => { | 1039 | DefWithBody::Static(ref s) => { |
1038 | let src = s.source(db); | 1040 | let src = s.source(db); |
1039 | collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); | 1041 | collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); |
1040 | collector.collect_static_body(&src.ast) | 1042 | collector.collect_static_body(src.ast) |
1041 | } | 1043 | } |
1042 | } | 1044 | } |
1043 | 1045 | ||
diff --git a/crates/ra_hir/src/expr/scope.rs b/crates/ra_hir/src/expr/scope.rs index 28fd52684..6589b782c 100644 --- a/crates/ra_hir/src/expr/scope.rs +++ b/crates/ra_hir/src/expr/scope.rs | |||
@@ -190,7 +190,7 @@ mod tests { | |||
190 | 190 | ||
191 | let (db, _source_root, file_id) = MockDatabase::with_single_file(&code); | 191 | let (db, _source_root, file_id) = MockDatabase::with_single_file(&code); |
192 | let file = db.parse(file_id).ok().unwrap(); | 192 | let file = db.parse(file_id).ok().unwrap(); |
193 | let marker: &ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap(); | 193 | let marker: ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap(); |
194 | let analyzer = SourceAnalyzer::new(&db, file_id, marker.syntax(), None); | 194 | let analyzer = SourceAnalyzer::new(&db, file_id, marker.syntax(), None); |
195 | 195 | ||
196 | let scopes = analyzer.scopes(); | 196 | let scopes = analyzer.scopes(); |
@@ -290,10 +290,10 @@ mod tests { | |||
290 | let file = db.parse(file_id).ok().unwrap(); | 290 | let file = db.parse(file_id).ok().unwrap(); |
291 | let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into()) | 291 | let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into()) |
292 | .expect("failed to find a name at the target offset"); | 292 | .expect("failed to find a name at the target offset"); |
293 | let name_ref: &ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap(); | 293 | let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap(); |
294 | let analyzer = SourceAnalyzer::new(&db, file_id, name_ref.syntax(), None); | 294 | let analyzer = SourceAnalyzer::new(&db, file_id, name_ref.syntax(), None); |
295 | 295 | ||
296 | let local_name_entry = analyzer.resolve_local_name(name_ref).unwrap(); | 296 | let local_name_entry = analyzer.resolve_local_name(&name_ref).unwrap(); |
297 | let local_name = | 297 | let local_name = |
298 | local_name_entry.ptr().either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()); | 298 | local_name_entry.ptr().either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()); |
299 | assert_eq!(local_name.range(), expected_name.syntax().range()); | 299 | assert_eq!(local_name.range(), expected_name.syntax().range()); |
diff --git a/crates/ra_hir/src/expr/validation.rs b/crates/ra_hir/src/expr/validation.rs index c2a10a0b5..82a06ca25 100644 --- a/crates/ra_hir/src/expr/validation.rs +++ b/crates/ra_hir/src/expr/validation.rs | |||
@@ -79,7 +79,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> { | |||
79 | .and_then(StructLit::cast) | 79 | .and_then(StructLit::cast) |
80 | .and_then(|lit| lit.named_field_list()) | 80 | .and_then(|lit| lit.named_field_list()) |
81 | { | 81 | { |
82 | let field_list_ptr = AstPtr::new(field_list_node); | 82 | let field_list_ptr = AstPtr::new(&field_list_node); |
83 | self.sink.push(MissingFields { | 83 | self.sink.push(MissingFields { |
84 | file: file_id, | 84 | file: file_id, |
85 | field_list: field_list_ptr, | 85 | field_list: field_list_ptr, |
diff --git a/crates/ra_hir/src/generics.rs b/crates/ra_hir/src/generics.rs index 07a59193f..bcbb4988d 100644 --- a/crates/ra_hir/src/generics.rs +++ b/crates/ra_hir/src/generics.rs | |||
@@ -76,17 +76,17 @@ impl GenericParams { | |||
76 | generics.parent_params = parent.map(|p| db.generic_params(p)); | 76 | generics.parent_params = parent.map(|p| db.generic_params(p)); |
77 | let start = generics.parent_params.as_ref().map(|p| p.params.len()).unwrap_or(0) as u32; | 77 | let start = generics.parent_params.as_ref().map(|p| p.params.len()).unwrap_or(0) as u32; |
78 | match def { | 78 | match def { |
79 | GenericDef::Function(it) => generics.fill(&*it.source(db).ast, start), | 79 | GenericDef::Function(it) => generics.fill(&it.source(db).ast, start), |
80 | GenericDef::Struct(it) => generics.fill(&*it.source(db).ast, start), | 80 | GenericDef::Struct(it) => generics.fill(&it.source(db).ast, start), |
81 | GenericDef::Union(it) => generics.fill(&*it.source(db).ast, start), | 81 | GenericDef::Union(it) => generics.fill(&it.source(db).ast, start), |
82 | GenericDef::Enum(it) => generics.fill(&*it.source(db).ast, start), | 82 | GenericDef::Enum(it) => generics.fill(&it.source(db).ast, start), |
83 | GenericDef::Trait(it) => { | 83 | GenericDef::Trait(it) => { |
84 | // traits get the Self type as an implicit first type parameter | 84 | // traits get the Self type as an implicit first type parameter |
85 | generics.params.push(GenericParam { idx: start, name: SELF_TYPE, default: None }); | 85 | generics.params.push(GenericParam { idx: start, name: SELF_TYPE, default: None }); |
86 | generics.fill(&*it.source(db).ast, start + 1); | 86 | generics.fill(&it.source(db).ast, start + 1); |
87 | } | 87 | } |
88 | GenericDef::TypeAlias(it) => generics.fill(&*it.source(db).ast, start), | 88 | GenericDef::TypeAlias(it) => generics.fill(&it.source(db).ast, start), |
89 | GenericDef::ImplBlock(it) => generics.fill(&*it.source(db).ast, start), | 89 | GenericDef::ImplBlock(it) => generics.fill(&it.source(db).ast, start), |
90 | GenericDef::EnumVariant(_) => {} | 90 | GenericDef::EnumVariant(_) => {} |
91 | } | 91 | } |
92 | 92 | ||
@@ -102,9 +102,9 @@ impl GenericParams { | |||
102 | } | 102 | } |
103 | } | 103 | } |
104 | 104 | ||
105 | fn fill_params(&mut self, params: &ast::TypeParamList, start: u32) { | 105 | fn fill_params(&mut self, params: ast::TypeParamList, start: u32) { |
106 | for (idx, type_param) in params.type_params().enumerate() { | 106 | for (idx, type_param) in params.type_params().enumerate() { |
107 | let name = type_param.name().map(AsName::as_name).unwrap_or_else(Name::missing); | 107 | let name = type_param.name().map_or_else(Name::missing, |it| it.as_name()); |
108 | let default = type_param.default_type().and_then(|t| t.path()).and_then(Path::from_ast); | 108 | let default = type_param.default_type().and_then(|t| t.path()).and_then(Path::from_ast); |
109 | 109 | ||
110 | let param = GenericParam { idx: idx as u32 + start, name: name.clone(), default }; | 110 | let param = GenericParam { idx: idx as u32 + start, name: name.clone(), default }; |
@@ -121,7 +121,7 @@ impl GenericParams { | |||
121 | } | 121 | } |
122 | } | 122 | } |
123 | 123 | ||
124 | fn fill_where_predicates(&mut self, where_clause: &ast::WhereClause) { | 124 | fn fill_where_predicates(&mut self, where_clause: ast::WhereClause) { |
125 | for pred in where_clause.predicates() { | 125 | for pred in where_clause.predicates() { |
126 | let type_ref = match pred.type_ref() { | 126 | let type_ref = match pred.type_ref() { |
127 | Some(type_ref) => type_ref, | 127 | Some(type_ref) => type_ref, |
@@ -134,7 +134,7 @@ impl GenericParams { | |||
134 | } | 134 | } |
135 | } | 135 | } |
136 | 136 | ||
137 | fn add_where_predicate_from_bound(&mut self, bound: &ast::TypeBound, type_ref: TypeRef) { | 137 | fn add_where_predicate_from_bound(&mut self, bound: ast::TypeBound, type_ref: TypeRef) { |
138 | let path = bound | 138 | let path = bound |
139 | .type_ref() | 139 | .type_ref() |
140 | .and_then(|tr| match tr.kind() { | 140 | .and_then(|tr| match tr.kind() { |
diff --git a/crates/ra_hir/src/ids.rs b/crates/ra_hir/src/ids.rs index 83f5c3f39..05a18eb56 100644 --- a/crates/ra_hir/src/ids.rs +++ b/crates/ra_hir/src/ids.rs | |||
@@ -6,7 +6,7 @@ use std::{ | |||
6 | use mbe::MacroRules; | 6 | use mbe::MacroRules; |
7 | use ra_db::{salsa, FileId}; | 7 | use ra_db::{salsa, FileId}; |
8 | use ra_prof::profile; | 8 | use ra_prof::profile; |
9 | use ra_syntax::{ast, AstNode, Parse, SyntaxNode, TreeArc}; | 9 | use ra_syntax::{ast, AstNode, Parse, SyntaxNode}; |
10 | 10 | ||
11 | use crate::{AstDatabase, AstId, DefDatabase, FileAstId, InternDatabase, Module, Source}; | 11 | use crate::{AstDatabase, AstId, DefDatabase, FileAstId, InternDatabase, Module, Source}; |
12 | 12 | ||
@@ -58,11 +58,11 @@ impl HirFileId { | |||
58 | pub(crate) fn parse_or_expand_query( | 58 | pub(crate) fn parse_or_expand_query( |
59 | db: &impl AstDatabase, | 59 | db: &impl AstDatabase, |
60 | file_id: HirFileId, | 60 | file_id: HirFileId, |
61 | ) -> Option<TreeArc<SyntaxNode>> { | 61 | ) -> Option<SyntaxNode> { |
62 | match file_id.0 { | 62 | match file_id.0 { |
63 | HirFileIdRepr::File(file_id) => Some(db.parse(file_id).tree().syntax().to_owned()), | 63 | HirFileIdRepr::File(file_id) => Some(db.parse(file_id).tree().syntax().clone()), |
64 | HirFileIdRepr::Macro(macro_file) => { | 64 | HirFileIdRepr::Macro(macro_file) => { |
65 | db.parse_macro(macro_file).map(|it| it.tree().to_owned()) | 65 | db.parse_macro(macro_file).map(|it| it.syntax_node()) |
66 | } | 66 | } |
67 | } | 67 | } |
68 | } | 68 | } |
@@ -123,7 +123,7 @@ pub struct MacroDefId(pub(crate) AstId<ast::MacroCall>); | |||
123 | pub(crate) fn macro_def_query(db: &impl AstDatabase, id: MacroDefId) -> Option<Arc<MacroRules>> { | 123 | pub(crate) fn macro_def_query(db: &impl AstDatabase, id: MacroDefId) -> Option<Arc<MacroRules>> { |
124 | let macro_call = id.0.to_node(db); | 124 | let macro_call = id.0.to_node(db); |
125 | let arg = macro_call.token_tree()?; | 125 | let arg = macro_call.token_tree()?; |
126 | let (tt, _) = mbe::ast_to_token_tree(arg).or_else(|| { | 126 | let (tt, _) = mbe::ast_to_token_tree(&arg).or_else(|| { |
127 | log::warn!("fail on macro_def to token tree: {:#?}", arg); | 127 | log::warn!("fail on macro_def to token tree: {:#?}", arg); |
128 | None | 128 | None |
129 | })?; | 129 | })?; |
@@ -138,7 +138,7 @@ pub(crate) fn macro_arg_query(db: &impl AstDatabase, id: MacroCallId) -> Option< | |||
138 | let loc = id.loc(db); | 138 | let loc = id.loc(db); |
139 | let macro_call = loc.ast_id.to_node(db); | 139 | let macro_call = loc.ast_id.to_node(db); |
140 | let arg = macro_call.token_tree()?; | 140 | let arg = macro_call.token_tree()?; |
141 | let (tt, _) = mbe::ast_to_token_tree(arg)?; | 141 | let (tt, _) = mbe::ast_to_token_tree(&arg)?; |
142 | Some(Arc::new(tt)) | 142 | Some(Arc::new(tt)) |
143 | } | 143 | } |
144 | 144 | ||
@@ -262,7 +262,7 @@ pub(crate) trait AstItemDef<N: AstNode>: salsa::InternKey + Clone { | |||
262 | let loc = ItemLoc { module: ctx.module, ast_id: ast_id.with_file_id(ctx.file_id) }; | 262 | let loc = ItemLoc { module: ctx.module, ast_id: ast_id.with_file_id(ctx.file_id) }; |
263 | Self::intern(ctx.db, loc) | 263 | Self::intern(ctx.db, loc) |
264 | } | 264 | } |
265 | fn source(self, db: &(impl AstDatabase + DefDatabase)) -> Source<TreeArc<N>> { | 265 | fn source(self, db: &(impl AstDatabase + DefDatabase)) -> Source<N> { |
266 | let loc = self.lookup_intern(db); | 266 | let loc = self.lookup_intern(db); |
267 | let ast = loc.ast_id.to_node(db); | 267 | let ast = loc.ast_id.to_node(db); |
268 | Source { file_id: loc.ast_id.file_id(), ast } | 268 | Source { file_id: loc.ast_id.file_id(), ast } |
diff --git a/crates/ra_hir/src/impl_block.rs b/crates/ra_hir/src/impl_block.rs index ce134b27a..8e62cf66d 100644 --- a/crates/ra_hir/src/impl_block.rs +++ b/crates/ra_hir/src/impl_block.rs | |||
@@ -4,7 +4,7 @@ use std::sync::Arc; | |||
4 | use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId}; | 4 | use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId}; |
5 | use ra_syntax::{ | 5 | use ra_syntax::{ |
6 | ast::{self, AstNode}, | 6 | ast::{self, AstNode}, |
7 | AstPtr, SourceFile, TreeArc, | 7 | AstPtr, SourceFile, |
8 | }; | 8 | }; |
9 | 9 | ||
10 | use crate::{ | 10 | use crate::{ |
@@ -28,9 +28,9 @@ impl ImplSourceMap { | |||
28 | self.map.insert(impl_id, AstPtr::new(impl_block)) | 28 | self.map.insert(impl_id, AstPtr::new(impl_block)) |
29 | } | 29 | } |
30 | 30 | ||
31 | pub fn get(&self, source: &ModuleSource, impl_id: ImplId) -> TreeArc<ast::ImplBlock> { | 31 | pub fn get(&self, source: &ModuleSource, impl_id: ImplId) -> ast::ImplBlock { |
32 | let file = match source { | 32 | let file = match source { |
33 | ModuleSource::SourceFile(file) => &*file, | 33 | ModuleSource::SourceFile(file) => file.clone(), |
34 | ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(), | 34 | ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(), |
35 | }; | 35 | }; |
36 | 36 | ||
@@ -45,8 +45,8 @@ pub struct ImplBlock { | |||
45 | } | 45 | } |
46 | 46 | ||
47 | impl HasSource for ImplBlock { | 47 | impl HasSource for ImplBlock { |
48 | type Ast = TreeArc<ast::ImplBlock>; | 48 | type Ast = ast::ImplBlock; |
49 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::ImplBlock>> { | 49 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::ImplBlock> { |
50 | let source_map = db.impls_in_module_with_source_map(self.module).1; | 50 | let source_map = db.impls_in_module_with_source_map(self.module).1; |
51 | let src = self.module.definition_source(db); | 51 | let src = self.module.definition_source(db); |
52 | Source { file_id: src.file_id, ast: source_map.get(&src.ast, self.impl_id) } | 52 | Source { file_id: src.file_id, ast: source_map.get(&src.ast, self.impl_id) } |
@@ -132,9 +132,9 @@ impl ImplData { | |||
132 | item_list | 132 | item_list |
133 | .impl_items() | 133 | .impl_items() |
134 | .map(|item_node| match item_node.kind() { | 134 | .map(|item_node| match item_node.kind() { |
135 | ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(it) }.into(), | 135 | ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(&it) }.into(), |
136 | ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(it) }.into(), | 136 | ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(&it) }.into(), |
137 | ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(it) }.into(), | 137 | ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(&it) }.into(), |
138 | }) | 138 | }) |
139 | .collect() | 139 | .collect() |
140 | } else { | 140 | } else { |
@@ -202,20 +202,20 @@ impl ModuleImplBlocks { | |||
202 | 202 | ||
203 | let src = m.module.definition_source(db); | 203 | let src = m.module.definition_source(db); |
204 | let node = match &src.ast { | 204 | let node = match &src.ast { |
205 | ModuleSource::SourceFile(node) => node.syntax(), | 205 | ModuleSource::SourceFile(node) => node.syntax().clone(), |
206 | ModuleSource::Module(node) => { | 206 | ModuleSource::Module(node) => { |
207 | node.item_list().expect("inline module should have item list").syntax() | 207 | node.item_list().expect("inline module should have item list").syntax().clone() |
208 | } | 208 | } |
209 | }; | 209 | }; |
210 | 210 | ||
211 | for impl_block_ast in node.children().filter_map(ast::ImplBlock::cast) { | 211 | for impl_block_ast in node.children().filter_map(ast::ImplBlock::cast) { |
212 | let impl_block = ImplData::from_ast(db, src.file_id, m.module, impl_block_ast); | 212 | let impl_block = ImplData::from_ast(db, src.file_id, m.module, &impl_block_ast); |
213 | let id = m.impls.alloc(impl_block); | 213 | let id = m.impls.alloc(impl_block); |
214 | for &impl_item in &m.impls[id].items { | 214 | for &impl_item in &m.impls[id].items { |
215 | m.impls_by_def.insert(impl_item, id); | 215 | m.impls_by_def.insert(impl_item, id); |
216 | } | 216 | } |
217 | 217 | ||
218 | source_map.insert(id, impl_block_ast); | 218 | source_map.insert(id, &impl_block_ast); |
219 | } | 219 | } |
220 | 220 | ||
221 | m | 221 | m |
diff --git a/crates/ra_hir/src/lang_item.rs b/crates/ra_hir/src/lang_item.rs index 0443d4d9a..fd6609fb8 100644 --- a/crates/ra_hir/src/lang_item.rs +++ b/crates/ra_hir/src/lang_item.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | use rustc_hash::FxHashMap; | 1 | use rustc_hash::FxHashMap; |
2 | use std::sync::Arc; | 2 | use std::sync::Arc; |
3 | 3 | ||
4 | use ra_syntax::{ast::AttrsOwner, SmolStr, TreeArc}; | 4 | use ra_syntax::{ast::AttrsOwner, SmolStr}; |
5 | 5 | ||
6 | use crate::{ | 6 | use crate::{ |
7 | AstDatabase, Crate, DefDatabase, Enum, Function, HasSource, HirDatabase, ImplBlock, Module, | 7 | AstDatabase, Crate, DefDatabase, Enum, Function, HasSource, HirDatabase, ImplBlock, Module, |
@@ -95,7 +95,7 @@ impl LangItems { | |||
95 | // Look for impl targets | 95 | // Look for impl targets |
96 | for impl_block in module.impl_blocks(db) { | 96 | for impl_block in module.impl_blocks(db) { |
97 | let src = impl_block.source(db); | 97 | let src = impl_block.source(db); |
98 | if let Some(lang_item_name) = lang_item_name(&*src.ast) { | 98 | if let Some(lang_item_name) = lang_item_name(&src.ast) { |
99 | self.items | 99 | self.items |
100 | .entry(lang_item_name) | 100 | .entry(lang_item_name) |
101 | .or_insert_with(|| LangItemTarget::ImplBlock(impl_block)); | 101 | .or_insert_with(|| LangItemTarget::ImplBlock(impl_block)); |
@@ -137,11 +137,11 @@ impl LangItems { | |||
137 | item: T, | 137 | item: T, |
138 | constructor: fn(T) -> LangItemTarget, | 138 | constructor: fn(T) -> LangItemTarget, |
139 | ) where | 139 | ) where |
140 | T: Copy + HasSource<Ast = TreeArc<N>>, | 140 | T: Copy + HasSource<Ast = N>, |
141 | N: AttrsOwner, | 141 | N: AttrsOwner, |
142 | { | 142 | { |
143 | let node = item.source(db).ast; | 143 | let node = item.source(db).ast; |
144 | if let Some(lang_item_name) = lang_item_name(&*node) { | 144 | if let Some(lang_item_name) = lang_item_name(&node) { |
145 | self.items.entry(lang_item_name).or_insert_with(|| constructor(item)); | 145 | self.items.entry(lang_item_name).or_insert_with(|| constructor(item)); |
146 | } | 146 | } |
147 | } | 147 | } |
diff --git a/crates/ra_hir/src/name.rs b/crates/ra_hir/src/name.rs index 40c9d6002..c589f8aba 100644 --- a/crates/ra_hir/src/name.rs +++ b/crates/ra_hir/src/name.rs | |||
@@ -75,7 +75,7 @@ impl AsName for ast::Name { | |||
75 | } | 75 | } |
76 | } | 76 | } |
77 | 77 | ||
78 | impl<'a> AsName for ast::FieldKind<'a> { | 78 | impl AsName for ast::FieldKind { |
79 | fn as_name(&self) -> Name { | 79 | fn as_name(&self) -> Name { |
80 | match self { | 80 | match self { |
81 | ast::FieldKind::Name(nr) => nr.as_name(), | 81 | ast::FieldKind::Name(nr) => nr.as_name(), |
diff --git a/crates/ra_hir/src/nameres/raw.rs b/crates/ra_hir/src/nameres/raw.rs index 46b2bef5b..8517f3c43 100644 --- a/crates/ra_hir/src/nameres/raw.rs +++ b/crates/ra_hir/src/nameres/raw.rs | |||
@@ -3,7 +3,7 @@ use std::{ops::Index, sync::Arc}; | |||
3 | use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId}; | 3 | use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId}; |
4 | use ra_syntax::{ | 4 | use ra_syntax::{ |
5 | ast::{self, AttrsOwner, NameOwner}, | 5 | ast::{self, AttrsOwner, NameOwner}, |
6 | AstNode, AstPtr, SmolStr, SourceFile, TreeArc, | 6 | AstNode, AstPtr, SmolStr, SourceFile, |
7 | }; | 7 | }; |
8 | use test_utils::tested_by; | 8 | use test_utils::tested_by; |
9 | 9 | ||
@@ -32,7 +32,7 @@ pub struct ImportSourceMap { | |||
32 | } | 32 | } |
33 | 33 | ||
34 | type ImportSourcePtr = Either<AstPtr<ast::UseTree>, AstPtr<ast::ExternCrateItem>>; | 34 | type ImportSourcePtr = Either<AstPtr<ast::UseTree>, AstPtr<ast::ExternCrateItem>>; |
35 | type ImportSource = Either<TreeArc<ast::UseTree>, TreeArc<ast::ExternCrateItem>>; | 35 | type ImportSource = Either<ast::UseTree, ast::ExternCrateItem>; |
36 | 36 | ||
37 | impl ImportSourcePtr { | 37 | impl ImportSourcePtr { |
38 | fn to_node(self, file: &SourceFile) -> ImportSource { | 38 | fn to_node(self, file: &SourceFile) -> ImportSource { |
@@ -50,11 +50,11 @@ impl ImportSourceMap { | |||
50 | 50 | ||
51 | pub(crate) fn get(&self, source: &ModuleSource, import: ImportId) -> ImportSource { | 51 | pub(crate) fn get(&self, source: &ModuleSource, import: ImportId) -> ImportSource { |
52 | let file = match source { | 52 | let file = match source { |
53 | ModuleSource::SourceFile(file) => &*file, | 53 | ModuleSource::SourceFile(file) => file.clone(), |
54 | ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(), | 54 | ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(), |
55 | }; | 55 | }; |
56 | 56 | ||
57 | self.map[import].to_node(file) | 57 | self.map[import].to_node(&file) |
58 | } | 58 | } |
59 | } | 59 | } |
60 | 60 | ||
@@ -76,8 +76,8 @@ impl RawItems { | |||
76 | source_map: ImportSourceMap::default(), | 76 | source_map: ImportSourceMap::default(), |
77 | }; | 77 | }; |
78 | if let Some(node) = db.parse_or_expand(file_id) { | 78 | if let Some(node) = db.parse_or_expand(file_id) { |
79 | if let Some(source_file) = ast::SourceFile::cast(&node) { | 79 | if let Some(source_file) = ast::SourceFile::cast(node) { |
80 | collector.process_module(None, &*source_file); | 80 | collector.process_module(None, source_file); |
81 | } | 81 | } |
82 | } | 82 | } |
83 | (Arc::new(collector.raw_items), Arc::new(collector.source_map)) | 83 | (Arc::new(collector.raw_items), Arc::new(collector.source_map)) |
@@ -188,7 +188,7 @@ struct RawItemsCollector { | |||
188 | } | 188 | } |
189 | 189 | ||
190 | impl RawItemsCollector { | 190 | impl RawItemsCollector { |
191 | fn process_module(&mut self, current_module: Option<Module>, body: &impl ast::ModuleItemOwner) { | 191 | fn process_module(&mut self, current_module: Option<Module>, body: impl ast::ModuleItemOwner) { |
192 | for item_or_macro in body.items_with_macros() { | 192 | for item_or_macro in body.items_with_macros() { |
193 | match item_or_macro { | 193 | match item_or_macro { |
194 | ast::ItemOrMacro::Macro(m) => self.add_macro(current_module, m), | 194 | ast::ItemOrMacro::Macro(m) => self.add_macro(current_module, m), |
@@ -197,7 +197,7 @@ impl RawItemsCollector { | |||
197 | } | 197 | } |
198 | } | 198 | } |
199 | 199 | ||
200 | fn add_item(&mut self, current_module: Option<Module>, item: &ast::ModuleItem) { | 200 | fn add_item(&mut self, current_module: Option<Module>, item: ast::ModuleItem) { |
201 | let (kind, name) = match item.kind() { | 201 | let (kind, name) = match item.kind() { |
202 | ast::ModuleItemKind::Module(module) => { | 202 | ast::ModuleItemKind::Module(module) => { |
203 | self.add_module(current_module, module); | 203 | self.add_module(current_module, module); |
@@ -216,7 +216,7 @@ impl RawItemsCollector { | |||
216 | return; | 216 | return; |
217 | } | 217 | } |
218 | ast::ModuleItemKind::StructDef(it) => { | 218 | ast::ModuleItemKind::StructDef(it) => { |
219 | let id = self.source_ast_id_map.ast_id(it); | 219 | let id = self.source_ast_id_map.ast_id(&it); |
220 | let name = it.name(); | 220 | let name = it.name(); |
221 | if it.is_union() { | 221 | if it.is_union() { |
222 | (DefKind::Union(id), name) | 222 | (DefKind::Union(id), name) |
@@ -225,22 +225,22 @@ impl RawItemsCollector { | |||
225 | } | 225 | } |
226 | } | 226 | } |
227 | ast::ModuleItemKind::EnumDef(it) => { | 227 | ast::ModuleItemKind::EnumDef(it) => { |
228 | (DefKind::Enum(self.source_ast_id_map.ast_id(it)), it.name()) | 228 | (DefKind::Enum(self.source_ast_id_map.ast_id(&it)), it.name()) |
229 | } | 229 | } |
230 | ast::ModuleItemKind::FnDef(it) => { | 230 | ast::ModuleItemKind::FnDef(it) => { |
231 | (DefKind::Function(self.source_ast_id_map.ast_id(it)), it.name()) | 231 | (DefKind::Function(self.source_ast_id_map.ast_id(&it)), it.name()) |
232 | } | 232 | } |
233 | ast::ModuleItemKind::TraitDef(it) => { | 233 | ast::ModuleItemKind::TraitDef(it) => { |
234 | (DefKind::Trait(self.source_ast_id_map.ast_id(it)), it.name()) | 234 | (DefKind::Trait(self.source_ast_id_map.ast_id(&it)), it.name()) |
235 | } | 235 | } |
236 | ast::ModuleItemKind::TypeAliasDef(it) => { | 236 | ast::ModuleItemKind::TypeAliasDef(it) => { |
237 | (DefKind::TypeAlias(self.source_ast_id_map.ast_id(it)), it.name()) | 237 | (DefKind::TypeAlias(self.source_ast_id_map.ast_id(&it)), it.name()) |
238 | } | 238 | } |
239 | ast::ModuleItemKind::ConstDef(it) => { | 239 | ast::ModuleItemKind::ConstDef(it) => { |
240 | (DefKind::Const(self.source_ast_id_map.ast_id(it)), it.name()) | 240 | (DefKind::Const(self.source_ast_id_map.ast_id(&it)), it.name()) |
241 | } | 241 | } |
242 | ast::ModuleItemKind::StaticDef(it) => { | 242 | ast::ModuleItemKind::StaticDef(it) => { |
243 | (DefKind::Static(self.source_ast_id_map.ast_id(it)), it.name()) | 243 | (DefKind::Static(self.source_ast_id_map.ast_id(&it)), it.name()) |
244 | } | 244 | } |
245 | }; | 245 | }; |
246 | if let Some(name) = name { | 246 | if let Some(name) = name { |
@@ -250,14 +250,14 @@ impl RawItemsCollector { | |||
250 | } | 250 | } |
251 | } | 251 | } |
252 | 252 | ||
253 | fn add_module(&mut self, current_module: Option<Module>, module: &ast::Module) { | 253 | fn add_module(&mut self, current_module: Option<Module>, module: ast::Module) { |
254 | let name = match module.name() { | 254 | let name = match module.name() { |
255 | Some(it) => it.as_name(), | 255 | Some(it) => it.as_name(), |
256 | None => return, | 256 | None => return, |
257 | }; | 257 | }; |
258 | 258 | ||
259 | let attr_path = extract_mod_path_attribute(module); | 259 | let attr_path = extract_mod_path_attribute(&module); |
260 | let ast_id = self.source_ast_id_map.ast_id(module); | 260 | let ast_id = self.source_ast_id_map.ast_id(&module); |
261 | if module.has_semi() { | 261 | if module.has_semi() { |
262 | let item = | 262 | let item = |
263 | self.raw_items.modules.alloc(ModuleData::Declaration { name, ast_id, attr_path }); | 263 | self.raw_items.modules.alloc(ModuleData::Declaration { name, ast_id, attr_path }); |
@@ -278,10 +278,10 @@ impl RawItemsCollector { | |||
278 | tested_by!(name_res_works_for_broken_modules); | 278 | tested_by!(name_res_works_for_broken_modules); |
279 | } | 279 | } |
280 | 280 | ||
281 | fn add_use_item(&mut self, current_module: Option<Module>, use_item: &ast::UseItem) { | 281 | fn add_use_item(&mut self, current_module: Option<Module>, use_item: ast::UseItem) { |
282 | let is_prelude = use_item.has_atom_attr("prelude_import"); | 282 | let is_prelude = use_item.has_atom_attr("prelude_import"); |
283 | 283 | ||
284 | Path::expand_use_item(use_item, |path, use_tree, is_glob, alias| { | 284 | Path::expand_use_item(&use_item, |path, use_tree, is_glob, alias| { |
285 | let import_data = | 285 | let import_data = |
286 | ImportData { path, alias, is_glob, is_prelude, is_extern_crate: false }; | 286 | ImportData { path, alias, is_glob, is_prelude, is_extern_crate: false }; |
287 | self.push_import(current_module, import_data, Either::A(AstPtr::new(use_tree))); | 287 | self.push_import(current_module, import_data, Either::A(AstPtr::new(use_tree))); |
@@ -291,11 +291,11 @@ impl RawItemsCollector { | |||
291 | fn add_extern_crate_item( | 291 | fn add_extern_crate_item( |
292 | &mut self, | 292 | &mut self, |
293 | current_module: Option<Module>, | 293 | current_module: Option<Module>, |
294 | extern_crate: &ast::ExternCrateItem, | 294 | extern_crate: ast::ExternCrateItem, |
295 | ) { | 295 | ) { |
296 | if let Some(name_ref) = extern_crate.name_ref() { | 296 | if let Some(name_ref) = extern_crate.name_ref() { |
297 | let path = Path::from_name_ref(name_ref); | 297 | let path = Path::from_name_ref(&name_ref); |
298 | let alias = extern_crate.alias().and_then(|a| a.name()).map(AsName::as_name); | 298 | let alias = extern_crate.alias().and_then(|a| a.name()).map(|it| it.as_name()); |
299 | let import_data = ImportData { | 299 | let import_data = ImportData { |
300 | path, | 300 | path, |
301 | alias, | 301 | alias, |
@@ -303,18 +303,18 @@ impl RawItemsCollector { | |||
303 | is_prelude: false, | 303 | is_prelude: false, |
304 | is_extern_crate: true, | 304 | is_extern_crate: true, |
305 | }; | 305 | }; |
306 | self.push_import(current_module, import_data, Either::B(AstPtr::new(extern_crate))); | 306 | self.push_import(current_module, import_data, Either::B(AstPtr::new(&extern_crate))); |
307 | } | 307 | } |
308 | } | 308 | } |
309 | 309 | ||
310 | fn add_macro(&mut self, current_module: Option<Module>, m: &ast::MacroCall) { | 310 | fn add_macro(&mut self, current_module: Option<Module>, m: ast::MacroCall) { |
311 | let path = match m.path().and_then(Path::from_ast) { | 311 | let path = match m.path().and_then(Path::from_ast) { |
312 | Some(it) => it, | 312 | Some(it) => it, |
313 | _ => return, | 313 | _ => return, |
314 | }; | 314 | }; |
315 | 315 | ||
316 | let name = m.name().map(|it| it.as_name()); | 316 | let name = m.name().map(|it| it.as_name()); |
317 | let ast_id = self.source_ast_id_map.ast_id(m); | 317 | let ast_id = self.source_ast_id_map.ast_id(&m); |
318 | let export = m.has_atom_attr("macro_export"); | 318 | let export = m.has_atom_attr("macro_export"); |
319 | let m = self.raw_items.macros.alloc(MacroData { ast_id, path, name, export }); | 319 | let m = self.raw_items.macros.alloc(MacroData { ast_id, path, name, export }); |
320 | self.push_item(current_module, RawItem::Macro(m)); | 320 | self.push_item(current_module, RawItem::Macro(m)); |
diff --git a/crates/ra_hir/src/path.rs b/crates/ra_hir/src/path.rs index bce9d2d4b..882db7681 100644 --- a/crates/ra_hir/src/path.rs +++ b/crates/ra_hir/src/path.rs | |||
@@ -47,9 +47,9 @@ pub enum PathKind { | |||
47 | 47 | ||
48 | impl Path { | 48 | impl Path { |
49 | /// Calls `cb` with all paths, represented by this use item. | 49 | /// Calls `cb` with all paths, represented by this use item. |
50 | pub fn expand_use_item<'a>( | 50 | pub fn expand_use_item( |
51 | item: &'a ast::UseItem, | 51 | item: &ast::UseItem, |
52 | mut cb: impl FnMut(Path, &'a ast::UseTree, bool, Option<Name>), | 52 | mut cb: impl FnMut(Path, &ast::UseTree, bool, Option<Name>), |
53 | ) { | 53 | ) { |
54 | if let Some(tree) = item.use_tree() { | 54 | if let Some(tree) = item.use_tree() { |
55 | expand_use_tree(None, tree, &mut cb); | 55 | expand_use_tree(None, tree, &mut cb); |
@@ -57,7 +57,7 @@ impl Path { | |||
57 | } | 57 | } |
58 | 58 | ||
59 | /// Converts an `ast::Path` to `Path`. Works with use trees. | 59 | /// Converts an `ast::Path` to `Path`. Works with use trees. |
60 | pub fn from_ast(mut path: &ast::Path) -> Option<Path> { | 60 | pub fn from_ast(mut path: ast::Path) -> Option<Path> { |
61 | let mut kind = PathKind::Plain; | 61 | let mut kind = PathKind::Plain; |
62 | let mut segments = Vec::new(); | 62 | let mut segments = Vec::new(); |
63 | loop { | 63 | loop { |
@@ -87,7 +87,7 @@ impl Path { | |||
87 | break; | 87 | break; |
88 | } | 88 | } |
89 | } | 89 | } |
90 | path = match qualifier(path) { | 90 | path = match qualifier(&path) { |
91 | Some(it) => it, | 91 | Some(it) => it, |
92 | None => break, | 92 | None => break, |
93 | }; | 93 | }; |
@@ -95,7 +95,7 @@ impl Path { | |||
95 | segments.reverse(); | 95 | segments.reverse(); |
96 | return Some(Path { kind, segments }); | 96 | return Some(Path { kind, segments }); |
97 | 97 | ||
98 | fn qualifier(path: &ast::Path) -> Option<&ast::Path> { | 98 | fn qualifier(path: &ast::Path) -> Option<ast::Path> { |
99 | if let Some(q) = path.qualifier() { | 99 | if let Some(q) = path.qualifier() { |
100 | return Some(q); | 100 | return Some(q); |
101 | } | 101 | } |
@@ -136,7 +136,7 @@ impl Path { | |||
136 | } | 136 | } |
137 | 137 | ||
138 | impl GenericArgs { | 138 | impl GenericArgs { |
139 | pub(crate) fn from_ast(node: &ast::TypeArgList) -> Option<GenericArgs> { | 139 | pub(crate) fn from_ast(node: ast::TypeArgList) -> Option<GenericArgs> { |
140 | let mut args = Vec::new(); | 140 | let mut args = Vec::new(); |
141 | for type_arg in node.type_args() { | 141 | for type_arg in node.type_args() { |
142 | let type_ref = TypeRef::from_ast_opt(type_arg.type_ref()); | 142 | let type_ref = TypeRef::from_ast_opt(type_arg.type_ref()); |
@@ -160,10 +160,10 @@ impl From<Name> for Path { | |||
160 | } | 160 | } |
161 | } | 161 | } |
162 | 162 | ||
163 | fn expand_use_tree<'a>( | 163 | fn expand_use_tree( |
164 | prefix: Option<Path>, | 164 | prefix: Option<Path>, |
165 | tree: &'a ast::UseTree, | 165 | tree: ast::UseTree, |
166 | cb: &mut impl FnMut(Path, &'a ast::UseTree, bool, Option<Name>), | 166 | cb: &mut impl FnMut(Path, &ast::UseTree, bool, Option<Name>), |
167 | ) { | 167 | ) { |
168 | if let Some(use_tree_list) = tree.use_tree_list() { | 168 | if let Some(use_tree_list) = tree.use_tree_list() { |
169 | let prefix = match tree.path() { | 169 | let prefix = match tree.path() { |
@@ -188,7 +188,7 @@ fn expand_use_tree<'a>( | |||
188 | if let Some(segment) = ast_path.segment() { | 188 | if let Some(segment) = ast_path.segment() { |
189 | if segment.kind() == Some(ast::PathSegmentKind::SelfKw) { | 189 | if segment.kind() == Some(ast::PathSegmentKind::SelfKw) { |
190 | if let Some(prefix) = prefix { | 190 | if let Some(prefix) = prefix { |
191 | cb(prefix, tree, false, alias); | 191 | cb(prefix, &tree, false, alias); |
192 | return; | 192 | return; |
193 | } | 193 | } |
194 | } | 194 | } |
@@ -196,7 +196,7 @@ fn expand_use_tree<'a>( | |||
196 | } | 196 | } |
197 | if let Some(path) = convert_path(prefix, ast_path) { | 197 | if let Some(path) = convert_path(prefix, ast_path) { |
198 | let is_glob = tree.has_star(); | 198 | let is_glob = tree.has_star(); |
199 | cb(path, tree, is_glob, alias) | 199 | cb(path, &tree, is_glob, alias) |
200 | } | 200 | } |
201 | // FIXME: report errors somewhere | 201 | // FIXME: report errors somewhere |
202 | // We get here if we do | 202 | // We get here if we do |
@@ -204,7 +204,7 @@ fn expand_use_tree<'a>( | |||
204 | } | 204 | } |
205 | } | 205 | } |
206 | 206 | ||
207 | fn convert_path(prefix: Option<Path>, path: &ast::Path) -> Option<Path> { | 207 | fn convert_path(prefix: Option<Path>, path: ast::Path) -> Option<Path> { |
208 | let prefix = | 208 | let prefix = |
209 | if let Some(qual) = path.qualifier() { Some(convert_path(prefix, qual)?) } else { prefix }; | 209 | if let Some(qual) = path.qualifier() { Some(convert_path(prefix, qual)?) } else { prefix }; |
210 | let segment = path.segment()?; | 210 | let segment = path.segment()?; |
diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs index 071c1bb18..e7bc4df97 100644 --- a/crates/ra_hir/src/source_binder.rs +++ b/crates/ra_hir/src/source_binder.rs | |||
@@ -37,7 +37,7 @@ pub fn module_from_file_id(db: &impl HirDatabase, file_id: FileId) -> Option<Mod | |||
37 | pub fn module_from_declaration( | 37 | pub fn module_from_declaration( |
38 | db: &impl HirDatabase, | 38 | db: &impl HirDatabase, |
39 | file_id: FileId, | 39 | file_id: FileId, |
40 | decl: &ast::Module, | 40 | decl: ast::Module, |
41 | ) -> Option<Module> { | 41 | ) -> Option<Module> { |
42 | let parent_module = module_from_file_id(db, file_id); | 42 | let parent_module = module_from_file_id(db, file_id); |
43 | let child_name = decl.name(); | 43 | let child_name = decl.name(); |
@@ -50,8 +50,8 @@ pub fn module_from_declaration( | |||
50 | /// Locates the module by position in the source code. | 50 | /// Locates the module by position in the source code. |
51 | pub fn module_from_position(db: &impl HirDatabase, position: FilePosition) -> Option<Module> { | 51 | pub fn module_from_position(db: &impl HirDatabase, position: FilePosition) -> Option<Module> { |
52 | let parse = db.parse(position.file_id); | 52 | let parse = db.parse(position.file_id); |
53 | match find_node_at_offset::<ast::Module>(parse.tree().syntax(), position.offset) { | 53 | match &find_node_at_offset::<ast::Module>(parse.tree().syntax(), position.offset) { |
54 | Some(m) if !m.has_semi() => module_from_inline(db, position.file_id, m), | 54 | Some(m) if !m.has_semi() => module_from_inline(db, position.file_id, m.clone()), |
55 | _ => module_from_file_id(db, position.file_id), | 55 | _ => module_from_file_id(db, position.file_id), |
56 | } | 56 | } |
57 | } | 57 | } |
@@ -59,12 +59,12 @@ pub fn module_from_position(db: &impl HirDatabase, position: FilePosition) -> Op | |||
59 | fn module_from_inline( | 59 | fn module_from_inline( |
60 | db: &impl HirDatabase, | 60 | db: &impl HirDatabase, |
61 | file_id: FileId, | 61 | file_id: FileId, |
62 | module: &ast::Module, | 62 | module: ast::Module, |
63 | ) -> Option<Module> { | 63 | ) -> Option<Module> { |
64 | assert!(!module.has_semi()); | 64 | assert!(!module.has_semi()); |
65 | let file_id = file_id.into(); | 65 | let file_id = file_id.into(); |
66 | let ast_id_map = db.ast_id_map(file_id); | 66 | let ast_id_map = db.ast_id_map(file_id); |
67 | let item_id = ast_id_map.ast_id(module).with_file_id(file_id); | 67 | let item_id = ast_id_map.ast_id(&module).with_file_id(file_id); |
68 | module_from_source(db, file_id, Some(item_id)) | 68 | module_from_source(db, file_id, Some(item_id)) |
69 | } | 69 | } |
70 | 70 | ||
@@ -127,16 +127,16 @@ fn try_get_resolver_for_node( | |||
127 | file_id: FileId, | 127 | file_id: FileId, |
128 | node: &SyntaxNode, | 128 | node: &SyntaxNode, |
129 | ) -> Option<Resolver> { | 129 | ) -> Option<Resolver> { |
130 | if let Some(module) = ast::Module::cast(node) { | 130 | if let Some(module) = ast::Module::cast(node.clone()) { |
131 | Some(module_from_declaration(db, file_id, module)?.resolver(db)) | 131 | Some(module_from_declaration(db, file_id, module)?.resolver(db)) |
132 | } else if let Some(_) = ast::SourceFile::cast(node) { | 132 | } else if let Some(_) = ast::SourceFile::cast(node.clone()) { |
133 | Some(module_from_source(db, file_id.into(), None)?.resolver(db)) | 133 | Some(module_from_source(db, file_id.into(), None)?.resolver(db)) |
134 | } else if let Some(s) = ast::StructDef::cast(node) { | 134 | } else if let Some(s) = ast::StructDef::cast(node.clone()) { |
135 | let module = module_from_child_node(db, file_id, s.syntax())?; | 135 | let module = module_from_child_node(db, file_id, s.syntax())?; |
136 | Some(struct_from_module(db, module, s).resolver(db)) | 136 | Some(struct_from_module(db, module, &s).resolver(db)) |
137 | } else if let Some(e) = ast::EnumDef::cast(node) { | 137 | } else if let Some(e) = ast::EnumDef::cast(node.clone()) { |
138 | let module = module_from_child_node(db, file_id, e.syntax())?; | 138 | let module = module_from_child_node(db, file_id, e.syntax())?; |
139 | Some(enum_from_module(db, module, e).resolver(db)) | 139 | Some(enum_from_module(db, module, &e).resolver(db)) |
140 | } else if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF { | 140 | } else if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF { |
141 | Some(def_with_body_from_child_node(db, file_id, node)?.resolver(db)) | 141 | Some(def_with_body_from_child_node(db, file_id, node)?.resolver(db)) |
142 | } else { | 142 | } else { |
@@ -153,14 +153,14 @@ fn def_with_body_from_child_node( | |||
153 | let module = module_from_child_node(db, file_id, node)?; | 153 | let module = module_from_child_node(db, file_id, node)?; |
154 | let ctx = LocationCtx::new(db, module, file_id.into()); | 154 | let ctx = LocationCtx::new(db, module, file_id.into()); |
155 | node.ancestors().find_map(|node| { | 155 | node.ancestors().find_map(|node| { |
156 | if let Some(def) = ast::FnDef::cast(node) { | 156 | if let Some(def) = ast::FnDef::cast(node.clone()) { |
157 | return Some(Function { id: ctx.to_def(def) }.into()); | 157 | return Some(Function { id: ctx.to_def(&def) }.into()); |
158 | } | 158 | } |
159 | if let Some(def) = ast::ConstDef::cast(node) { | 159 | if let Some(def) = ast::ConstDef::cast(node.clone()) { |
160 | return Some(Const { id: ctx.to_def(def) }.into()); | 160 | return Some(Const { id: ctx.to_def(&def) }.into()); |
161 | } | 161 | } |
162 | if let Some(def) = ast::StaticDef::cast(node) { | 162 | if let Some(def) = ast::StaticDef::cast(node.clone()) { |
163 | return Some(Static { id: ctx.to_def(def) }.into()); | 163 | return Some(Static { id: ctx.to_def(&def) }.into()); |
164 | } | 164 | } |
165 | None | 165 | None |
166 | }) | 166 | }) |
@@ -237,7 +237,7 @@ impl SourceAnalyzer { | |||
237 | SourceAnalyzer { | 237 | SourceAnalyzer { |
238 | resolver: node | 238 | resolver: node |
239 | .ancestors() | 239 | .ancestors() |
240 | .find_map(|node| try_get_resolver_for_node(db, file_id, node)) | 240 | .find_map(|node| try_get_resolver_for_node(db, file_id, &node)) |
241 | .unwrap_or_default(), | 241 | .unwrap_or_default(), |
242 | body_source_map: None, | 242 | body_source_map: None, |
243 | infer: None, | 243 | infer: None, |
@@ -257,17 +257,17 @@ impl SourceAnalyzer { | |||
257 | } | 257 | } |
258 | 258 | ||
259 | pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> { | 259 | pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> { |
260 | let expr_id = self.body_source_map.as_ref()?.node_expr(call.into())?; | 260 | let expr_id = self.body_source_map.as_ref()?.node_expr(&call.clone().into())?; |
261 | self.infer.as_ref()?.method_resolution(expr_id) | 261 | self.infer.as_ref()?.method_resolution(expr_id) |
262 | } | 262 | } |
263 | 263 | ||
264 | pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<crate::StructField> { | 264 | pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<crate::StructField> { |
265 | let expr_id = self.body_source_map.as_ref()?.node_expr(field.into())?; | 265 | let expr_id = self.body_source_map.as_ref()?.node_expr(&field.clone().into())?; |
266 | self.infer.as_ref()?.field_resolution(expr_id) | 266 | self.infer.as_ref()?.field_resolution(expr_id) |
267 | } | 267 | } |
268 | 268 | ||
269 | pub fn resolve_variant(&self, struct_lit: &ast::StructLit) -> Option<crate::VariantDef> { | 269 | pub fn resolve_variant(&self, struct_lit: &ast::StructLit) -> Option<crate::VariantDef> { |
270 | let expr_id = self.body_source_map.as_ref()?.node_expr(struct_lit.into())?; | 270 | let expr_id = self.body_source_map.as_ref()?.node_expr(&struct_lit.clone().into())?; |
271 | self.infer.as_ref()?.variant_resolution(expr_id) | 271 | self.infer.as_ref()?.variant_resolution(expr_id) |
272 | } | 272 | } |
273 | 273 | ||
@@ -290,18 +290,18 @@ impl SourceAnalyzer { | |||
290 | 290 | ||
291 | pub fn resolve_path(&self, db: &impl HirDatabase, path: &ast::Path) -> Option<PathResolution> { | 291 | pub fn resolve_path(&self, db: &impl HirDatabase, path: &ast::Path) -> Option<PathResolution> { |
292 | if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) { | 292 | if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) { |
293 | let expr_id = self.body_source_map.as_ref()?.node_expr(path_expr.into())?; | 293 | let expr_id = self.body_source_map.as_ref()?.node_expr(&path_expr.into())?; |
294 | if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) { | 294 | if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) { |
295 | return Some(PathResolution::AssocItem(assoc)); | 295 | return Some(PathResolution::AssocItem(assoc)); |
296 | } | 296 | } |
297 | } | 297 | } |
298 | if let Some(path_pat) = path.syntax().parent().and_then(ast::PathPat::cast) { | 298 | if let Some(path_pat) = path.syntax().parent().and_then(ast::PathPat::cast) { |
299 | let pat_id = self.body_source_map.as_ref()?.node_pat(path_pat.into())?; | 299 | let pat_id = self.body_source_map.as_ref()?.node_pat(&path_pat.into())?; |
300 | if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) { | 300 | if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) { |
301 | return Some(PathResolution::AssocItem(assoc)); | 301 | return Some(PathResolution::AssocItem(assoc)); |
302 | } | 302 | } |
303 | } | 303 | } |
304 | let hir_path = crate::Path::from_ast(path)?; | 304 | let hir_path = crate::Path::from_ast(path.clone())?; |
305 | let res = self.resolver.resolve_path_without_assoc_items(db, &hir_path); | 305 | let res = self.resolver.resolve_path_without_assoc_items(db, &hir_path); |
306 | let res = res.clone().take_types().or_else(|| res.take_values())?; | 306 | let res = res.clone().take_types().or_else(|| res.take_values())?; |
307 | let res = match res { | 307 | let res = match res { |
@@ -343,12 +343,12 @@ impl SourceAnalyzer { | |||
343 | // FIXME: at least, this should work with any DefWithBody, but ideally | 343 | // FIXME: at least, this should work with any DefWithBody, but ideally |
344 | // this should be hir-based altogether | 344 | // this should be hir-based altogether |
345 | let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap(); | 345 | let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap(); |
346 | let ptr = Either::A(AstPtr::new(pat.into())); | 346 | let ptr = Either::A(AstPtr::new(&ast::Pat::from(pat.clone()))); |
347 | fn_def | 347 | fn_def |
348 | .syntax() | 348 | .syntax() |
349 | .descendants() | 349 | .descendants() |
350 | .filter_map(ast::NameRef::cast) | 350 | .filter_map(ast::NameRef::cast) |
351 | .filter(|name_ref| match self.resolve_local_name(*name_ref) { | 351 | .filter(|name_ref| match self.resolve_local_name(&name_ref) { |
352 | None => false, | 352 | None => false, |
353 | Some(entry) => entry.ptr() == ptr, | 353 | Some(entry) => entry.ptr() == ptr, |
354 | }) | 354 | }) |
@@ -411,7 +411,7 @@ fn scope_for( | |||
411 | node: &SyntaxNode, | 411 | node: &SyntaxNode, |
412 | ) -> Option<ScopeId> { | 412 | ) -> Option<ScopeId> { |
413 | node.ancestors() | 413 | node.ancestors() |
414 | .map(SyntaxNodePtr::new) | 414 | .map(|it| SyntaxNodePtr::new(&it)) |
415 | .filter_map(|ptr| source_map.syntax_expr(ptr)) | 415 | .filter_map(|ptr| source_map.syntax_expr(ptr)) |
416 | .find_map(|it| scopes.scope_for(it)) | 416 | .find_map(|it| scopes.scope_for(it)) |
417 | } | 417 | } |
diff --git a/crates/ra_hir/src/source_id.rs b/crates/ra_hir/src/source_id.rs index 6cdb90141..51cd65dda 100644 --- a/crates/ra_hir/src/source_id.rs +++ b/crates/ra_hir/src/source_id.rs | |||
@@ -5,7 +5,7 @@ use std::{ | |||
5 | }; | 5 | }; |
6 | 6 | ||
7 | use ra_arena::{impl_arena_id, Arena, RawId}; | 7 | use ra_arena::{impl_arena_id, Arena, RawId}; |
8 | use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxNodePtr, TreeArc}; | 8 | use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxNodePtr}; |
9 | 9 | ||
10 | use crate::{AstDatabase, HirFileId}; | 10 | use crate::{AstDatabase, HirFileId}; |
11 | 11 | ||
@@ -42,9 +42,9 @@ impl<N: AstNode> AstId<N> { | |||
42 | self.file_id | 42 | self.file_id |
43 | } | 43 | } |
44 | 44 | ||
45 | pub(crate) fn to_node(&self, db: &impl AstDatabase) -> TreeArc<N> { | 45 | pub(crate) fn to_node(&self, db: &impl AstDatabase) -> N { |
46 | let syntax_node = db.ast_id_to_node(self.file_id, self.file_ast_id.raw); | 46 | let syntax_node = db.ast_id_to_node(self.file_id, self.file_ast_id.raw); |
47 | N::cast(&syntax_node).unwrap().to_owned() | 47 | N::cast(syntax_node).unwrap() |
48 | } | 48 | } |
49 | } | 49 | } |
50 | 50 | ||
@@ -93,7 +93,7 @@ pub struct AstIdMap { | |||
93 | impl AstIdMap { | 93 | impl AstIdMap { |
94 | pub(crate) fn ast_id_map_query(db: &impl AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { | 94 | pub(crate) fn ast_id_map_query(db: &impl AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { |
95 | let map = if let Some(node) = db.parse_or_expand(file_id) { | 95 | let map = if let Some(node) = db.parse_or_expand(file_id) { |
96 | AstIdMap::from_source(&*node) | 96 | AstIdMap::from_source(&node) |
97 | } else { | 97 | } else { |
98 | AstIdMap::default() | 98 | AstIdMap::default() |
99 | }; | 99 | }; |
@@ -104,9 +104,9 @@ impl AstIdMap { | |||
104 | db: &impl AstDatabase, | 104 | db: &impl AstDatabase, |
105 | file_id: HirFileId, | 105 | file_id: HirFileId, |
106 | ast_id: ErasedFileAstId, | 106 | ast_id: ErasedFileAstId, |
107 | ) -> TreeArc<SyntaxNode> { | 107 | ) -> SyntaxNode { |
108 | let node = db.parse_or_expand(file_id).unwrap(); | 108 | let node = db.parse_or_expand(file_id).unwrap(); |
109 | db.ast_id_map(file_id).arena[ast_id].to_node(&*node).to_owned() | 109 | db.ast_id_map(file_id).arena[ast_id].to_node(&node) |
110 | } | 110 | } |
111 | 111 | ||
112 | pub(crate) fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> { | 112 | pub(crate) fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> { |
@@ -131,7 +131,7 @@ impl AstIdMap { | |||
131 | // change parent's id. This means that, say, adding a new function to a | 131 | // change parent's id. This means that, say, adding a new function to a |
132 | // trait does not change ids of top-level items, which helps caching. | 132 | // trait does not change ids of top-level items, which helps caching. |
133 | bfs(node, |it| { | 133 | bfs(node, |it| { |
134 | if let Some(module_item) = ast::ModuleItem::cast(it) { | 134 | if let Some(module_item) = ast::ModuleItem::cast(it.clone()) { |
135 | res.alloc(module_item.syntax()); | 135 | res.alloc(module_item.syntax()); |
136 | } else if let Some(macro_call) = ast::MacroCall::cast(it) { | 136 | } else if let Some(macro_call) = ast::MacroCall::cast(it) { |
137 | res.alloc(macro_call.syntax()); | 137 | res.alloc(macro_call.syntax()); |
@@ -146,8 +146,8 @@ impl AstIdMap { | |||
146 | } | 146 | } |
147 | 147 | ||
148 | /// Walks the subtree in bfs order, calling `f` for each node. | 148 | /// Walks the subtree in bfs order, calling `f` for each node. |
149 | fn bfs(node: &SyntaxNode, mut f: impl FnMut(&SyntaxNode)) { | 149 | fn bfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode)) { |
150 | let mut curr_layer = vec![node]; | 150 | let mut curr_layer = vec![node.clone()]; |
151 | let mut next_layer = vec![]; | 151 | let mut next_layer = vec![]; |
152 | while !curr_layer.is_empty() { | 152 | while !curr_layer.is_empty() { |
153 | curr_layer.drain(..).for_each(|node| { | 153 | curr_layer.drain(..).for_each(|node| { |
diff --git a/crates/ra_hir/src/traits.rs b/crates/ra_hir/src/traits.rs index fc0368303..de26f1a68 100644 --- a/crates/ra_hir/src/traits.rs +++ b/crates/ra_hir/src/traits.rs | |||
@@ -31,9 +31,9 @@ impl TraitData { | |||
31 | item_list | 31 | item_list |
32 | .impl_items() | 32 | .impl_items() |
33 | .map(|item_node| match item_node.kind() { | 33 | .map(|item_node| match item_node.kind() { |
34 | ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(it) }.into(), | 34 | ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(&it) }.into(), |
35 | ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(it) }.into(), | 35 | ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(&it) }.into(), |
36 | ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(it) }.into(), | 36 | ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(&it) }.into(), |
37 | }) | 37 | }) |
38 | .collect() | 38 | .collect() |
39 | } else { | 39 | } else { |
diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs index 2410602a6..265740e54 100644 --- a/crates/ra_hir/src/ty/tests.rs +++ b/crates/ra_hir/src/ty/tests.rs | |||
@@ -3086,7 +3086,7 @@ fn type_at_pos(db: &MockDatabase, pos: FilePosition) -> String { | |||
3086 | let file = db.parse(pos.file_id).ok().unwrap(); | 3086 | let file = db.parse(pos.file_id).ok().unwrap(); |
3087 | let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap(); | 3087 | let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap(); |
3088 | let analyzer = SourceAnalyzer::new(db, pos.file_id, expr.syntax(), Some(pos.offset)); | 3088 | let analyzer = SourceAnalyzer::new(db, pos.file_id, expr.syntax(), Some(pos.offset)); |
3089 | let ty = analyzer.type_of(db, expr).unwrap(); | 3089 | let ty = analyzer.type_of(db, &expr).unwrap(); |
3090 | ty.display(db).to_string() | 3090 | ty.display(db).to_string() |
3091 | } | 3091 | } |
3092 | 3092 | ||
@@ -3126,7 +3126,7 @@ fn infer(content: &str) -> String { | |||
3126 | types.sort_by_key(|(ptr, _)| (ptr.range().start(), ptr.range().end())); | 3126 | types.sort_by_key(|(ptr, _)| (ptr.range().start(), ptr.range().end())); |
3127 | for (syntax_ptr, ty) in &types { | 3127 | for (syntax_ptr, ty) in &types { |
3128 | let node = syntax_ptr.to_node(source_file.syntax()); | 3128 | let node = syntax_ptr.to_node(source_file.syntax()); |
3129 | let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node) { | 3129 | let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.clone()) { |
3130 | (self_param.self_kw_token().range(), "self".to_string()) | 3130 | (self_param.self_kw_token().range(), "self".to_string()) |
3131 | } else { | 3131 | } else { |
3132 | (syntax_ptr.range(), node.text().to_string().replace("\n", " ")) | 3132 | (syntax_ptr.range(), node.text().to_string().replace("\n", " ")) |
@@ -3137,7 +3137,7 @@ fn infer(content: &str) -> String { | |||
3137 | 3137 | ||
3138 | for node in source_file.syntax().descendants() { | 3138 | for node in source_file.syntax().descendants() { |
3139 | if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF { | 3139 | if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF { |
3140 | let analyzer = SourceAnalyzer::new(&db, file_id, node, None); | 3140 | let analyzer = SourceAnalyzer::new(&db, file_id, &node, None); |
3141 | infer_def(analyzer.inference_result(), analyzer.body_source_map()); | 3141 | infer_def(analyzer.inference_result(), analyzer.body_source_map()); |
3142 | } | 3142 | } |
3143 | } | 3143 | } |
@@ -3179,7 +3179,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() { | |||
3179 | let node = | 3179 | let node = |
3180 | algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent(); | 3180 | algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent(); |
3181 | let events = db.log_executed(|| { | 3181 | let events = db.log_executed(|| { |
3182 | SourceAnalyzer::new(&db, pos.file_id, node, None); | 3182 | SourceAnalyzer::new(&db, pos.file_id, &node, None); |
3183 | }); | 3183 | }); |
3184 | assert!(format!("{:?}", events).contains("infer")) | 3184 | assert!(format!("{:?}", events).contains("infer")) |
3185 | } | 3185 | } |
@@ -3200,7 +3200,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() { | |||
3200 | let node = | 3200 | let node = |
3201 | algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent(); | 3201 | algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent(); |
3202 | let events = db.log_executed(|| { | 3202 | let events = db.log_executed(|| { |
3203 | SourceAnalyzer::new(&db, pos.file_id, node, None); | 3203 | SourceAnalyzer::new(&db, pos.file_id, &node, None); |
3204 | }); | 3204 | }); |
3205 | assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events) | 3205 | assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events) |
3206 | } | 3206 | } |
diff --git a/crates/ra_hir/src/type_ref.rs b/crates/ra_hir/src/type_ref.rs index 8aa807648..8536ae44a 100644 --- a/crates/ra_hir/src/type_ref.rs +++ b/crates/ra_hir/src/type_ref.rs | |||
@@ -56,7 +56,7 @@ pub enum TypeRef { | |||
56 | 56 | ||
57 | impl TypeRef { | 57 | impl TypeRef { |
58 | /// Converts an `ast::TypeRef` to a `hir::TypeRef`. | 58 | /// Converts an `ast::TypeRef` to a `hir::TypeRef`. |
59 | pub(crate) fn from_ast(node: &ast::TypeRef) -> Self { | 59 | pub(crate) fn from_ast(node: ast::TypeRef) -> Self { |
60 | use ra_syntax::ast::TypeRefKind::*; | 60 | use ra_syntax::ast::TypeRefKind::*; |
61 | match node.kind() { | 61 | match node.kind() { |
62 | ParenType(inner) => TypeRef::from_ast_opt(inner.type_ref()), | 62 | ParenType(inner) => TypeRef::from_ast_opt(inner.type_ref()), |
@@ -95,7 +95,7 @@ impl TypeRef { | |||
95 | } | 95 | } |
96 | } | 96 | } |
97 | 97 | ||
98 | pub(crate) fn from_ast_opt(node: Option<&ast::TypeRef>) -> Self { | 98 | pub(crate) fn from_ast_opt(node: Option<ast::TypeRef>) -> Self { |
99 | if let Some(node) = node { | 99 | if let Some(node) = node { |
100 | TypeRef::from_ast(node) | 100 | TypeRef::from_ast(node) |
101 | } else { | 101 | } else { |
diff --git a/crates/ra_ide_api/src/call_info.rs b/crates/ra_ide_api/src/call_info.rs index 11dea7c14..270499612 100644 --- a/crates/ra_ide_api/src/call_info.rs +++ b/crates/ra_ide_api/src/call_info.rs | |||
@@ -11,24 +11,24 @@ use crate::{db::RootDatabase, CallInfo, FilePosition, FunctionSignature}; | |||
11 | /// Computes parameter information for the given call expression. | 11 | /// Computes parameter information for the given call expression. |
12 | pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> { | 12 | pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> { |
13 | let parse = db.parse(position.file_id); | 13 | let parse = db.parse(position.file_id); |
14 | let syntax = parse.tree().syntax(); | 14 | let syntax = parse.tree().syntax().clone(); |
15 | 15 | ||
16 | // Find the calling expression and it's NameRef | 16 | // Find the calling expression and it's NameRef |
17 | let calling_node = FnCallNode::with_node(syntax, position.offset)?; | 17 | let calling_node = FnCallNode::with_node(&syntax, position.offset)?; |
18 | let name_ref = calling_node.name_ref()?; | 18 | let name_ref = calling_node.name_ref()?; |
19 | 19 | ||
20 | let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None); | 20 | let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None); |
21 | let function = match calling_node { | 21 | let function = match &calling_node { |
22 | FnCallNode::CallExpr(expr) => { | 22 | FnCallNode::CallExpr(expr) => { |
23 | //FIXME: apply subst | 23 | //FIXME: apply subst |
24 | let (callable_def, _subst) = analyzer.type_of(db, expr.expr()?)?.as_callable()?; | 24 | let (callable_def, _subst) = analyzer.type_of(db, &expr.expr()?)?.as_callable()?; |
25 | match callable_def { | 25 | match callable_def { |
26 | hir::CallableDef::Function(it) => it, | 26 | hir::CallableDef::Function(it) => it, |
27 | //FIXME: handle other callables | 27 | //FIXME: handle other callables |
28 | _ => return None, | 28 | _ => return None, |
29 | } | 29 | } |
30 | } | 30 | } |
31 | FnCallNode::MethodCallExpr(expr) => analyzer.resolve_method_call(expr)?, | 31 | FnCallNode::MethodCallExpr(expr) => analyzer.resolve_method_call(&expr)?, |
32 | }; | 32 | }; |
33 | 33 | ||
34 | let mut call_info = CallInfo::new(db, function); | 34 | let mut call_info = CallInfo::new(db, function); |
@@ -73,13 +73,13 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal | |||
73 | Some(call_info) | 73 | Some(call_info) |
74 | } | 74 | } |
75 | 75 | ||
76 | enum FnCallNode<'a> { | 76 | enum FnCallNode { |
77 | CallExpr(&'a ast::CallExpr), | 77 | CallExpr(ast::CallExpr), |
78 | MethodCallExpr(&'a ast::MethodCallExpr), | 78 | MethodCallExpr(ast::MethodCallExpr), |
79 | } | 79 | } |
80 | 80 | ||
81 | impl<'a> FnCallNode<'a> { | 81 | impl FnCallNode { |
82 | fn with_node(syntax: &'a SyntaxNode, offset: TextUnit) -> Option<FnCallNode<'a>> { | 82 | fn with_node(syntax: &SyntaxNode, offset: TextUnit) -> Option<FnCallNode> { |
83 | if let Some(expr) = find_node_at_offset::<ast::CallExpr>(syntax, offset) { | 83 | if let Some(expr) = find_node_at_offset::<ast::CallExpr>(syntax, offset) { |
84 | return Some(FnCallNode::CallExpr(expr)); | 84 | return Some(FnCallNode::CallExpr(expr)); |
85 | } | 85 | } |
@@ -89,8 +89,8 @@ impl<'a> FnCallNode<'a> { | |||
89 | None | 89 | None |
90 | } | 90 | } |
91 | 91 | ||
92 | fn name_ref(&self) -> Option<&'a ast::NameRef> { | 92 | fn name_ref(&self) -> Option<ast::NameRef> { |
93 | match *self { | 93 | match self { |
94 | FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()?.kind() { | 94 | FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()?.kind() { |
95 | ast::ExprKind::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?, | 95 | ast::ExprKind::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?, |
96 | _ => return None, | 96 | _ => return None, |
@@ -102,8 +102,8 @@ impl<'a> FnCallNode<'a> { | |||
102 | } | 102 | } |
103 | } | 103 | } |
104 | 104 | ||
105 | fn arg_list(&self) -> Option<&'a ast::ArgList> { | 105 | fn arg_list(&self) -> Option<ast::ArgList> { |
106 | match *self { | 106 | match self { |
107 | FnCallNode::CallExpr(expr) => expr.arg_list(), | 107 | FnCallNode::CallExpr(expr) => expr.arg_list(), |
108 | FnCallNode::MethodCallExpr(expr) => expr.arg_list(), | 108 | FnCallNode::MethodCallExpr(expr) => expr.arg_list(), |
109 | } | 109 | } |
diff --git a/crates/ra_ide_api/src/completion/complete_dot.rs b/crates/ra_ide_api/src/completion/complete_dot.rs index a5f071442..536ba36df 100644 --- a/crates/ra_ide_api/src/completion/complete_dot.rs +++ b/crates/ra_ide_api/src/completion/complete_dot.rs | |||
@@ -5,10 +5,11 @@ use rustc_hash::FxHashSet; | |||
5 | 5 | ||
6 | /// Complete dot accesses, i.e. fields or methods (currently only fields). | 6 | /// Complete dot accesses, i.e. fields or methods (currently only fields). |
7 | pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) { | 7 | pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) { |
8 | let receiver_ty = match ctx.dot_receiver.and_then(|it| ctx.analyzer.type_of(ctx.db, it)) { | 8 | let receiver_ty = |
9 | Some(it) => it, | 9 | match ctx.dot_receiver.as_ref().and_then(|it| ctx.analyzer.type_of(ctx.db, it)) { |
10 | None => return, | 10 | Some(it) => it, |
11 | }; | 11 | None => return, |
12 | }; | ||
12 | if !ctx.is_call { | 13 | if !ctx.is_call { |
13 | complete_fields(acc, ctx, receiver_ty.clone()); | 14 | complete_fields(acc, ctx, receiver_ty.clone()); |
14 | } | 15 | } |
diff --git a/crates/ra_ide_api/src/completion/complete_fn_param.rs b/crates/ra_ide_api/src/completion/complete_fn_param.rs index 5a117c485..0887ef1f6 100644 --- a/crates/ra_ide_api/src/completion/complete_fn_param.rs +++ b/crates/ra_ide_api/src/completion/complete_fn_param.rs | |||
@@ -20,7 +20,7 @@ pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext) | |||
20 | let _ = visitor_ctx(&mut params) | 20 | let _ = visitor_ctx(&mut params) |
21 | .visit::<ast::SourceFile, _>(process) | 21 | .visit::<ast::SourceFile, _>(process) |
22 | .visit::<ast::ItemList, _>(process) | 22 | .visit::<ast::ItemList, _>(process) |
23 | .accept(node); | 23 | .accept(&node); |
24 | } | 24 | } |
25 | params | 25 | params |
26 | .into_iter() | 26 | .into_iter() |
@@ -38,10 +38,7 @@ pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext) | |||
38 | .add_to(acc) | 38 | .add_to(acc) |
39 | }); | 39 | }); |
40 | 40 | ||
41 | fn process<'a, N: ast::FnDefOwner>( | 41 | fn process<N: ast::FnDefOwner>(node: N, params: &mut FxHashMap<String, (u32, ast::Param)>) { |
42 | node: &'a N, | ||
43 | params: &mut FxHashMap<String, (u32, &'a ast::Param)>, | ||
44 |