aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Cargo.lock8
-rw-r--r--crates/ra_assists/src/add_derive.rs2
-rw-r--r--crates/ra_assists/src/add_explicit_type.rs2
-rw-r--r--crates/ra_assists/src/add_impl.rs6
-rw-r--r--crates/ra_assists/src/add_missing_impl_members.rs46
-rw-r--r--crates/ra_assists/src/assist_ctx.rs14
-rw-r--r--crates/ra_assists/src/ast_editor.rs123
-rw-r--r--crates/ra_assists/src/auto_import.rs143
-rw-r--r--crates/ra_assists/src/change_visibility.rs4
-rw-r--r--crates/ra_assists/src/fill_match_arms.rs4
-rw-r--r--crates/ra_assists/src/flip_binexpr.rs4
-rw-r--r--crates/ra_assists/src/flip_comma.rs4
-rw-r--r--crates/ra_assists/src/inline_local_variable.rs10
-rw-r--r--crates/ra_assists/src/introduce_variable.rs24
-rw-r--r--crates/ra_assists/src/move_guard.rs10
-rw-r--r--crates/ra_assists/src/remove_dbg.rs6
-rw-r--r--crates/ra_assists/src/replace_if_let_with_match.rs13
-rw-r--r--crates/ra_cli/src/main.rs6
-rw-r--r--crates/ra_fmt/src/lib.rs14
-rw-r--r--crates/ra_hir/src/adt.rs15
-rw-r--r--crates/ra_hir/src/code_model.rs19
-rw-r--r--crates/ra_hir/src/code_model/docs.rs24
-rw-r--r--crates/ra_hir/src/code_model/src.rs44
-rw-r--r--crates/ra_hir/src/db.rs6
-rw-r--r--crates/ra_hir/src/diagnostics.rs12
-rw-r--r--crates/ra_hir/src/expr.rs46
-rw-r--r--crates/ra_hir/src/expr/scope.rs6
-rw-r--r--crates/ra_hir/src/expr/validation.rs2
-rw-r--r--crates/ra_hir/src/generics.rs22
-rw-r--r--crates/ra_hir/src/ids.rs14
-rw-r--r--crates/ra_hir/src/impl_block.rs24
-rw-r--r--crates/ra_hir/src/lang_item.rs8
-rw-r--r--crates/ra_hir/src/name.rs2
-rw-r--r--crates/ra_hir/src/nameres/raw.rs52
-rw-r--r--crates/ra_hir/src/path.rs26
-rw-r--r--crates/ra_hir/src/source_binder.rs54
-rw-r--r--crates/ra_hir/src/source_id.rs18
-rw-r--r--crates/ra_hir/src/traits.rs6
-rw-r--r--crates/ra_hir/src/ty/tests.rs10
-rw-r--r--crates/ra_hir/src/type_ref.rs4
-rw-r--r--crates/ra_ide_api/src/call_info.rs28
-rw-r--r--crates/ra_ide_api/src/completion/complete_dot.rs9
-rw-r--r--crates/ra_ide_api/src/completion/complete_fn_param.rs7
-rw-r--r--crates/ra_ide_api/src/completion/complete_keyword.rs16
-rw-r--r--crates/ra_ide_api/src/completion/complete_postfix.rs7
-rw-r--r--crates/ra_ide_api/src/completion/complete_scope.rs4
-rw-r--r--crates/ra_ide_api/src/completion/complete_struct_literal.rs4
-rw-r--r--crates/ra_ide_api/src/completion/completion_context.rs26
-rw-r--r--crates/ra_ide_api/src/diagnostics.rs16
-rw-r--r--crates/ra_ide_api/src/display/function_signature.rs2
-rw-r--r--crates/ra_ide_api/src/display/navigation_target.rs56
-rw-r--r--crates/ra_ide_api/src/display/structure.rs50
-rw-r--r--crates/ra_ide_api/src/extend_selection.rs28
-rw-r--r--crates/ra_ide_api/src/folding_ranges.rs48
-rw-r--r--crates/ra_ide_api/src/goto_definition.rs106
-rw-r--r--crates/ra_ide_api/src/goto_type_definition.rs10
-rw-r--r--crates/ra_ide_api/src/hover.rs51
-rw-r--r--crates/ra_ide_api/src/impls.rs10
-rw-r--r--crates/ra_ide_api/src/join_lines.rs23
-rw-r--r--crates/ra_ide_api/src/lib.rs12
-rw-r--r--crates/ra_ide_api/src/matching_brace.rs2
-rw-r--r--crates/ra_ide_api/src/name_ref_kind.rs10
-rw-r--r--crates/ra_ide_api/src/references.rs35
-rw-r--r--crates/ra_ide_api/src/runnables.rs10
-rw-r--r--crates/ra_ide_api/src/status.rs2
-rw-r--r--crates/ra_ide_api/src/symbol_index.rs10
-rw-r--r--crates/ra_ide_api/src/syntax_highlighting.rs25
-rw-r--r--crates/ra_ide_api/src/syntax_tree.rs6
-rw-r--r--crates/ra_ide_api/src/typing.rs19
-rw-r--r--crates/ra_mbe/src/mbe_expander.rs6
-rw-r--r--crates/ra_mbe/src/mbe_parser.rs3
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs11
-rw-r--r--crates/ra_mbe/src/tests.rs43
-rw-r--r--crates/ra_syntax/Cargo.toml2
-rw-r--r--crates/ra_syntax/src/algo.rs10
-rw-r--r--crates/ra_syntax/src/algo/visit.rs12
-rw-r--r--crates/ra_syntax/src/ast.rs36
-rw-r--r--crates/ra_syntax/src/ast/expr_extensions.rs30
-rw-r--r--crates/ra_syntax/src/ast/extensions.rs64
-rw-r--r--crates/ra_syntax/src/ast/generated.rs2720
-rw-r--r--crates/ra_syntax/src/ast/generated.rs.tera51
-rw-r--r--crates/ra_syntax/src/ast/tokens.rs26
-rw-r--r--crates/ra_syntax/src/ast/traits.rs50
-rw-r--r--crates/ra_syntax/src/fuzz.rs14
-rw-r--r--crates/ra_syntax/src/lib.rs122
-rw-r--r--crates/ra_syntax/src/parsing/reparsing.rs20
-rw-r--r--crates/ra_syntax/src/ptr.rs11
-rw-r--r--crates/ra_syntax/src/syntax_node.rs331
-rw-r--r--crates/ra_syntax/src/syntax_text.rs25
-rw-r--r--crates/ra_syntax/src/validation.rs4
-rw-r--r--crates/ra_syntax/src/validation/block.rs2
-rw-r--r--crates/ra_syntax/src/validation/field_expr.rs2
92 files changed, 1984 insertions, 3100 deletions
diff --git a/Cargo.lock b/Cargo.lock
index 03f5be16b..141cc6088 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1313,7 +1313,7 @@ dependencies = [
1313 "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", 1313 "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
1314 "ra_parser 0.1.0", 1314 "ra_parser 0.1.0",
1315 "ra_text_edit 0.1.0", 1315 "ra_text_edit 0.1.0",
1316 "rowan 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)", 1316 "rowan 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)",
1317 "smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", 1317 "smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
1318 "test_utils 0.1.0", 1318 "test_utils 0.1.0",
1319 "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", 1319 "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1584,11 +1584,11 @@ dependencies = [
1584 1584
1585[[package]] 1585[[package]]
1586name = "rowan" 1586name = "rowan"
1587version = "0.5.5" 1587version = "0.5.6"
1588source = "registry+https://github.com/rust-lang/crates.io-index" 1588source = "registry+https://github.com/rust-lang/crates.io-index"
1589dependencies = [ 1589dependencies = [
1590 "colosseum 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", 1590 "colosseum 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
1591 "parking_lot 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", 1591 "parking_lot 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
1592 "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", 1592 "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
1593 "smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", 1593 "smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
1594 "text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", 1594 "text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2275,7 +2275,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
2275"checksum relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0e7790c7f1cc73d831d28dc5a7deb316a006e7848e6a7f467cdb10a0a9e0fb1c" 2275"checksum relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0e7790c7f1cc73d831d28dc5a7deb316a006e7848e6a7f467cdb10a0a9e0fb1c"
2276"checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e" 2276"checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e"
2277"checksum ron 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "17f52a24414403f81528b67488cf8edc4eda977d3af1646bb6b106a600ead78f" 2277"checksum ron 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "17f52a24414403f81528b67488cf8edc4eda977d3af1646bb6b106a600ead78f"
2278"checksum rowan 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)" = "500ba7550373d42593a5228085bad391517378fa31ad2a84defe100dd8259fef" 2278"checksum rowan 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0c433ffe99ac9b96fa9882805d05eee5d750c9202fb42d0546c556e5d70d54be"
2279"checksum rustc-demangle 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7f4dccf6f4891ebcc0c39f9b6eb1a83b9bf5d747cb439ec6fba4f3b977038af" 2279"checksum rustc-demangle 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7f4dccf6f4891ebcc0c39f9b6eb1a83b9bf5d747cb439ec6fba4f3b977038af"
2280"checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8" 2280"checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8"
2281"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" 2281"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
diff --git a/crates/ra_assists/src/add_derive.rs b/crates/ra_assists/src/add_derive.rs
index bf7d55d6d..f19196f53 100644
--- a/crates/ra_assists/src/add_derive.rs
+++ b/crates/ra_assists/src/add_derive.rs
@@ -9,7 +9,7 @@ use crate::{Assist, AssistCtx, AssistId};
9 9
10pub(crate) fn add_derive(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { 10pub(crate) fn add_derive(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
11 let nominal = ctx.node_at_offset::<ast::NominalDef>()?; 11 let nominal = ctx.node_at_offset::<ast::NominalDef>()?;
12 let node_start = derive_insertion_offset(nominal)?; 12 let node_start = derive_insertion_offset(&nominal)?;
13 ctx.add_action(AssistId("add_derive"), "add `#[derive]`", |edit| { 13 ctx.add_action(AssistId("add_derive"), "add `#[derive]`", |edit| {
14 let derive_attr = nominal 14 let derive_attr = nominal
15 .attrs() 15 .attrs()
diff --git a/crates/ra_assists/src/add_explicit_type.rs b/crates/ra_assists/src/add_explicit_type.rs
index bb47a32f0..a69cfc8e3 100644
--- a/crates/ra_assists/src/add_explicit_type.rs
+++ b/crates/ra_assists/src/add_explicit_type.rs
@@ -27,7 +27,7 @@ pub(crate) fn add_explicit_type(mut ctx: AssistCtx<impl HirDatabase>) -> Option<
27 // Infer type 27 // Infer type
28 let db = ctx.db; 28 let db = ctx.db;
29 let analyzer = hir::SourceAnalyzer::new(db, ctx.frange.file_id, stmt.syntax(), None); 29 let analyzer = hir::SourceAnalyzer::new(db, ctx.frange.file_id, stmt.syntax(), None);
30 let ty = analyzer.type_of(db, expr)?; 30 let ty = analyzer.type_of(db, &expr)?;
31 // Assist not applicable if the type is unknown 31 // Assist not applicable if the type is unknown
32 if is_unknown(&ty) { 32 if is_unknown(&ty) {
33 return None; 33 return None;
diff --git a/crates/ra_assists/src/add_impl.rs b/crates/ra_assists/src/add_impl.rs
index b81922c1d..cebc19539 100644
--- a/crates/ra_assists/src/add_impl.rs
+++ b/crates/ra_assists/src/add_impl.rs
@@ -16,7 +16,7 @@ pub(crate) fn add_impl(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
16 let start_offset = nominal.syntax().range().end(); 16 let start_offset = nominal.syntax().range().end();
17 let mut buf = String::new(); 17 let mut buf = String::new();
18 buf.push_str("\n\nimpl"); 18 buf.push_str("\n\nimpl");
19 if let Some(type_params) = type_params { 19 if let Some(type_params) = &type_params {
20 type_params.syntax().text().push_to(&mut buf); 20 type_params.syntax().text().push_to(&mut buf);
21 } 21 }
22 buf.push_str(" "); 22 buf.push_str(" ");
@@ -25,9 +25,9 @@ pub(crate) fn add_impl(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
25 let lifetime_params = type_params 25 let lifetime_params = type_params
26 .lifetime_params() 26 .lifetime_params()
27 .filter_map(|it| it.lifetime_token()) 27 .filter_map(|it| it.lifetime_token())
28 .map(|it| it.text()); 28 .map(|it| it.text().clone());
29 let type_params = 29 let type_params =
30 type_params.type_params().filter_map(|it| it.name()).map(|it| it.text()); 30 type_params.type_params().filter_map(|it| it.name()).map(|it| it.text().clone());
31 join(lifetime_params.chain(type_params)).surround_with("<", ">").to_buf(&mut buf); 31 join(lifetime_params.chain(type_params)).surround_with("<", ">").to_buf(&mut buf);
32 } 32 }
33 buf.push_str(" {\n"); 33 buf.push_str(" {\n");
diff --git a/crates/ra_assists/src/add_missing_impl_members.rs b/crates/ra_assists/src/add_missing_impl_members.rs
index 6ffdad0b1..b992a4dc8 100644
--- a/crates/ra_assists/src/add_missing_impl_members.rs
+++ b/crates/ra_assists/src/add_missing_impl_members.rs
@@ -5,8 +5,8 @@ use crate::{
5 5
6use hir::{db::HirDatabase, HasSource}; 6use hir::{db::HirDatabase, HasSource};
7use ra_db::FilePosition; 7use ra_db::FilePosition;
8use ra_syntax::ast::{self, AstNode, ImplItem, ImplItemKind, NameOwner}; 8use ra_syntax::ast::{self, AstNode, ImplItemKind, NameOwner};
9use ra_syntax::{SmolStr, TreeArc}; 9use ra_syntax::SmolStr;
10 10
11#[derive(PartialEq)] 11#[derive(PartialEq)]
12enum AddMissingImplMembersMode { 12enum AddMissingImplMembersMode {
@@ -46,16 +46,16 @@ fn add_missing_impl_members_inner(
46 let position = FilePosition { file_id, offset: impl_node.syntax().range().start() }; 46 let position = FilePosition { file_id, offset: impl_node.syntax().range().start() };
47 let analyzer = hir::SourceAnalyzer::new(ctx.db, position.file_id, impl_node.syntax(), None); 47 let analyzer = hir::SourceAnalyzer::new(ctx.db, position.file_id, impl_node.syntax(), None);
48 48
49 resolve_target_trait_def(ctx.db, &analyzer, impl_node)? 49 resolve_target_trait_def(ctx.db, &analyzer, &impl_node)?
50 }; 50 };
51 51
52 let def_name = |kind| -> Option<&SmolStr> { 52 let def_name = |kind| -> Option<SmolStr> {
53 match kind { 53 match kind {
54 ImplItemKind::FnDef(def) => def.name(), 54 ast::ImplItemKind::FnDef(def) => def.name(),
55 ImplItemKind::TypeAliasDef(def) => def.name(), 55 ast::ImplItemKind::TypeAliasDef(def) => def.name(),
56 ImplItemKind::ConstDef(def) => def.name(), 56 ast::ImplItemKind::ConstDef(def) => def.name(),
57 } 57 }
58 .map(ast::Name::text) 58 .map(|it| it.text().clone())
59 }; 59 };
60 60
61 let trait_items = trait_def.item_list()?.impl_items(); 61 let trait_items = trait_def.item_list()?.impl_items();
@@ -78,18 +78,13 @@ fn add_missing_impl_members_inner(
78 78
79 ctx.add_action(AssistId(assist_id), label, |edit| { 79 ctx.add_action(AssistId(assist_id), label, |edit| {
80 let n_existing_items = impl_item_list.impl_items().count(); 80 let n_existing_items = impl_item_list.impl_items().count();
81 let items: Vec<_> = missing_items 81 let items = missing_items.into_iter().map(|it| match it.kind() {
82 .into_iter() 82 ImplItemKind::FnDef(def) => strip_docstring(add_body(def).into()),
83 .map(|it| match it.kind() { 83 _ => strip_docstring(it),
84 ImplItemKind::FnDef(def) => { 84 });
85 strip_docstring(ImplItem::cast(add_body(def).syntax()).unwrap())
86 }
87 _ => strip_docstring(it),
88 })
89 .collect();
90 let mut ast_editor = AstEditor::new(impl_item_list); 85 let mut ast_editor = AstEditor::new(impl_item_list);
91 86
92 ast_editor.append_items(items.iter().map(|it| &**it)); 87 ast_editor.append_items(items);
93 88
94 let first_new_item = ast_editor.ast().impl_items().nth(n_existing_items).unwrap(); 89 let first_new_item = ast_editor.ast().impl_items().nth(n_existing_items).unwrap();
95 let cursor_position = first_new_item.syntax().range().start(); 90 let cursor_position = first_new_item.syntax().range().start();
@@ -101,14 +96,14 @@ fn add_missing_impl_members_inner(
101 ctx.build() 96 ctx.build()
102} 97}
103 98
104fn strip_docstring(item: &ast::ImplItem) -> TreeArc<ast::ImplItem> { 99fn strip_docstring(item: ast::ImplItem) -> ast::ImplItem {
105 let mut ast_editor = AstEditor::new(item); 100 let mut ast_editor = AstEditor::new(item);
106 ast_editor.strip_attrs_and_docs(); 101 ast_editor.strip_attrs_and_docs();
107 ast_editor.ast().to_owned() 102 ast_editor.ast().to_owned()
108} 103}
109 104
110fn add_body(fn_def: &ast::FnDef) -> TreeArc<ast::FnDef> { 105fn add_body(fn_def: ast::FnDef) -> ast::FnDef {
111 let mut ast_editor = AstEditor::new(fn_def); 106 let mut ast_editor = AstEditor::new(fn_def.clone());
112 if fn_def.body().is_none() { 107 if fn_def.body().is_none() {
113 ast_editor.set_body(&AstBuilder::<ast::Block>::single_expr( 108 ast_editor.set_body(&AstBuilder::<ast::Block>::single_expr(
114 &AstBuilder::<ast::Expr>::unimplemented(), 109 &AstBuilder::<ast::Expr>::unimplemented(),
@@ -123,9 +118,12 @@ fn resolve_target_trait_def(
123 db: &impl HirDatabase, 118 db: &impl HirDatabase,
124 analyzer: &hir::SourceAnalyzer, 119 analyzer: &hir::SourceAnalyzer,
125 impl_block: &ast::ImplBlock, 120 impl_block: &ast::ImplBlock,
126) -> Option<TreeArc<ast::TraitDef>> { 121) -> Option<ast::TraitDef> {
127 let ast_path = 122 let ast_path = impl_block
128 impl_block.target_trait().map(AstNode::syntax).and_then(ast::PathType::cast)?.path()?; 123 .target_trait()
124 .map(|it| it.syntax().clone())
125 .and_then(ast::PathType::cast)?
126 .path()?;
129 127
130 match analyzer.resolve_path(db, &ast_path) { 128 match analyzer.resolve_path(db, &ast_path) {
131 Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def.source(db).ast), 129 Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def.source(db).ast),
diff --git a/crates/ra_assists/src/assist_ctx.rs b/crates/ra_assists/src/assist_ctx.rs
index 34b207154..e52085f85 100644
--- a/crates/ra_assists/src/assist_ctx.rs
+++ b/crates/ra_assists/src/assist_ctx.rs
@@ -49,7 +49,7 @@ pub(crate) enum Assist {
49pub(crate) struct AssistCtx<'a, DB> { 49pub(crate) struct AssistCtx<'a, DB> {
50 pub(crate) db: &'a DB, 50 pub(crate) db: &'a DB,
51 pub(crate) frange: FileRange, 51 pub(crate) frange: FileRange,
52 source_file: &'a SourceFile, 52 source_file: SourceFile,
53 should_compute_edit: bool, 53 should_compute_edit: bool,
54 assist: Assist, 54 assist: Assist,
55} 55}
@@ -59,7 +59,7 @@ impl<'a, DB> Clone for AssistCtx<'a, DB> {
59 AssistCtx { 59 AssistCtx {
60 db: self.db, 60 db: self.db,
61 frange: self.frange, 61 frange: self.frange,
62 source_file: self.source_file, 62 source_file: self.source_file.clone(),
63 should_compute_edit: self.should_compute_edit, 63 should_compute_edit: self.should_compute_edit,
64 assist: self.assist.clone(), 64 assist: self.assist.clone(),
65 } 65 }
@@ -104,18 +104,18 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> {
104 Some(self.assist) 104 Some(self.assist)
105 } 105 }
106 106
107 pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken<'a>> { 107 pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> {
108 find_token_at_offset(self.source_file.syntax(), self.frange.range.start()) 108 find_token_at_offset(self.source_file.syntax(), self.frange.range.start())
109 } 109 }
110 110
111 pub(crate) fn node_at_offset<N: AstNode>(&self) -> Option<&'a N> { 111 pub(crate) fn node_at_offset<N: AstNode>(&self) -> Option<N> {
112 find_node_at_offset(self.source_file.syntax(), self.frange.range.start()) 112 find_node_at_offset(self.source_file.syntax(), self.frange.range.start())
113 } 113 }
114 pub(crate) fn covering_element(&self) -> SyntaxElement<'a> { 114 pub(crate) fn covering_element(&self) -> SyntaxElement {
115 find_covering_element(self.source_file.syntax(), self.frange.range) 115 find_covering_element(self.source_file.syntax(), self.frange.range)
116 } 116 }
117 117
118 pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement<'a> { 118 pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement {
119 find_covering_element(self.source_file.syntax(), range) 119 find_covering_element(self.source_file.syntax(), range)
120 } 120 }
121} 121}
@@ -139,7 +139,7 @@ impl AssistBuilder {
139 ) { 139 ) {
140 let mut replace_with = replace_with.into(); 140 let mut replace_with = replace_with.into();
141 if let Some(indent) = leading_indent(node) { 141 if let Some(indent) = leading_indent(node) {
142 replace_with = reindent(&replace_with, indent) 142 replace_with = reindent(&replace_with, &indent)
143 } 143 }
144 self.replace(node.range(), replace_with) 144 self.replace(node.range(), replace_with)
145 } 145 }
diff --git a/crates/ra_assists/src/ast_editor.rs b/crates/ra_assists/src/ast_editor.rs
index 7b743c9f0..5fbcadfee 100644
--- a/crates/ra_assists/src/ast_editor.rs
+++ b/crates/ra_assists/src/ast_editor.rs
@@ -4,18 +4,18 @@ use arrayvec::ArrayVec;
4use hir::Name; 4use hir::Name;
5use ra_fmt::leading_indent; 5use ra_fmt::leading_indent;
6use ra_syntax::{ 6use ra_syntax::{
7 ast, AstNode, Direction, InsertPosition, SourceFile, SyntaxElement, SyntaxKind::*, TreeArc, T, 7 ast, AstNode, Direction, InsertPosition, SourceFile, SyntaxElement, SyntaxKind::*, T,
8}; 8};
9use ra_text_edit::TextEditBuilder; 9use ra_text_edit::TextEditBuilder;
10 10
11pub struct AstEditor<N: AstNode> { 11pub struct AstEditor<N: AstNode> {
12 original_ast: TreeArc<N>, 12 original_ast: N,
13 ast: TreeArc<N>, 13 ast: N,
14} 14}
15 15
16impl<N: AstNode> AstEditor<N> { 16impl<N: AstNode> AstEditor<N> {
17 pub fn new(node: &N) -> AstEditor<N> { 17 pub fn new(node: N) -> AstEditor<N> {
18 AstEditor { original_ast: node.to_owned(), ast: node.to_owned() } 18 AstEditor { original_ast: node.clone(), ast: node }
19 } 19 }
20 20
21 pub fn into_text_edit(self, builder: &mut TextEditBuilder) { 21 pub fn into_text_edit(self, builder: &mut TextEditBuilder) {
@@ -26,27 +26,27 @@ impl<N: AstNode> AstEditor<N> {
26 } 26 }
27 27
28 pub fn ast(&self) -> &N { 28 pub fn ast(&self) -> &N {
29 &*self.ast 29 &self.ast
30 } 30 }
31 31
32 #[must_use] 32 #[must_use]
33 fn insert_children<'a>( 33 fn insert_children(
34 &self, 34 &self,
35 position: InsertPosition<SyntaxElement<'_>>, 35 position: InsertPosition<SyntaxElement>,
36 to_insert: impl Iterator<Item = SyntaxElement<'a>>, 36 to_insert: impl Iterator<Item = SyntaxElement>,
37 ) -> TreeArc<N> { 37 ) -> N {
38 let new_syntax = self.ast().syntax().insert_children(position, to_insert); 38 let new_syntax = self.ast().syntax().insert_children(position, to_insert);
39 N::cast(&new_syntax).unwrap().to_owned() 39 N::cast(new_syntax).unwrap()
40 } 40 }
41 41
42 #[must_use] 42 #[must_use]
43 fn replace_children<'a>( 43 fn replace_children(
44 &self, 44 &self,
45 to_delete: RangeInclusive<SyntaxElement<'_>>, 45 to_delete: RangeInclusive<SyntaxElement>,
46 to_insert: impl Iterator<Item = SyntaxElement<'a>>, 46 to_insert: impl Iterator<Item = SyntaxElement>,
47 ) -> TreeArc<N> { 47 ) -> N {
48 let new_syntax = self.ast().syntax().replace_children(to_delete, to_insert); 48 let new_syntax = self.ast().syntax().replace_children(to_delete, to_insert);
49 N::cast(&new_syntax).unwrap().to_owned() 49 N::cast(new_syntax).unwrap()
50 } 50 }
51 51
52 fn do_make_multiline(&mut self) { 52 fn do_make_multiline(&mut self) {
@@ -66,16 +66,18 @@ impl<N: AstNode> AstEditor<N> {
66 if ws.text().contains('\n') { 66 if ws.text().contains('\n') {
67 return; 67 return;
68 } 68 }
69 Some(ws) 69 Some(ws.clone())
70 } 70 }
71 }; 71 };
72 72
73 let indent = leading_indent(self.ast().syntax()).unwrap_or(""); 73 let indent = leading_indent(self.ast().syntax()).unwrap_or("".into());
74 let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); 74 let ws = tokens::WsBuilder::new(&format!("\n{}", indent));
75 let to_insert = iter::once(ws.ws().into()); 75 let to_insert = iter::once(ws.ws().into());
76 self.ast = match existing_ws { 76 self.ast = match existing_ws {
77 None => self.insert_children(InsertPosition::After(l_curly), to_insert), 77 None => self.insert_children(InsertPosition::After(l_curly), to_insert),
78 Some(ws) => self.replace_children(RangeInclusive::new(ws.into(), ws.into()), to_insert), 78 Some(ws) => {
79 self.replace_children(RangeInclusive::new(ws.clone().into(), ws.into()), to_insert)
80 }
79 }; 81 };
80 } 82 }
81} 83}
@@ -95,7 +97,7 @@ impl AstEditor<ast::NamedFieldList> {
95 let space = if is_multiline { 97 let space = if is_multiline {
96 ws = tokens::WsBuilder::new(&format!( 98 ws = tokens::WsBuilder::new(&format!(
97 "\n{} ", 99 "\n{} ",
98 leading_indent(self.ast().syntax()).unwrap_or("") 100 leading_indent(self.ast().syntax()).unwrap_or("".into())
99 )); 101 ));
100 ws.ws() 102 ws.ws()
101 } else { 103 } else {
@@ -104,7 +106,7 @@ impl AstEditor<ast::NamedFieldList> {
104 106
105 let mut to_insert: ArrayVec<[SyntaxElement; 4]> = ArrayVec::new(); 107 let mut to_insert: ArrayVec<[SyntaxElement; 4]> = ArrayVec::new();
106 to_insert.push(space.into()); 108 to_insert.push(space.into());
107 to_insert.push(field.syntax().into()); 109 to_insert.push(field.syntax().clone().into());
108 to_insert.push(tokens::comma().into()); 110 to_insert.push(tokens::comma().into());
109 111
110 macro_rules! after_l_curly { 112 macro_rules! after_l_curly {
@@ -127,7 +129,7 @@ impl AstEditor<ast::NamedFieldList> {
127 InsertPosition::After(comma) 129 InsertPosition::After(comma)
128 } else { 130 } else {
129 to_insert.insert(0, tokens::comma().into()); 131 to_insert.insert(0, tokens::comma().into());
130 InsertPosition::After($anchor.syntax().into()) 132 InsertPosition::After($anchor.syntax().clone().into())
131 } 133 }
132 }; 134 };
133 }; 135 };
@@ -144,7 +146,9 @@ impl AstEditor<ast::NamedFieldList> {
144 None => after_l_curly!(), 146 None => after_l_curly!(),
145 } 147 }
146 } 148 }
147 InsertPosition::Before(anchor) => InsertPosition::Before(anchor.syntax().into()), 149 InsertPosition::Before(anchor) => {
150 InsertPosition::Before(anchor.syntax().clone().into())
151 }
148 InsertPosition::After(anchor) => after_field!(anchor), 152 InsertPosition::After(anchor) => after_field!(anchor),
149 }; 153 };
150 154
@@ -157,7 +161,7 @@ impl AstEditor<ast::NamedFieldList> {
157} 161}
158 162
159impl AstEditor<ast::ItemList> { 163impl AstEditor<ast::ItemList> {
160 pub fn append_items<'a>(&mut self, items: impl Iterator<Item = &'a ast::ImplItem>) { 164 pub fn append_items(&mut self, items: impl Iterator<Item = ast::ImplItem>) {
161 let n_existing_items = self.ast().impl_items().count(); 165 let n_existing_items = self.ast().impl_items().count();
162 if n_existing_items == 0 { 166 if n_existing_items == 0 {
163 self.do_make_multiline(); 167 self.do_make_multiline();
@@ -165,22 +169,23 @@ impl AstEditor<ast::ItemList> {
165 items.for_each(|it| self.append_item(it)); 169 items.for_each(|it| self.append_item(it));
166 } 170 }
167 171
168 pub fn append_item(&mut self, item: &ast::ImplItem) { 172 pub fn append_item(&mut self, item: ast::ImplItem) {
169 let (indent, position) = match self.ast().impl_items().last() { 173 let (indent, position) = match self.ast().impl_items().last() {
170 Some(it) => ( 174 Some(it) => (
171 leading_indent(it.syntax()).unwrap_or("").to_string(), 175 leading_indent(it.syntax()).unwrap_or_default().to_string(),
172 InsertPosition::After(it.syntax().into()), 176 InsertPosition::After(it.syntax().clone().into()),
173 ), 177 ),
174 None => match self.l_curly() { 178 None => match self.l_curly() {
175 Some(it) => ( 179 Some(it) => (
176 " ".to_string() + leading_indent(self.ast().syntax()).unwrap_or(""), 180 " ".to_string() + &leading_indent(self.ast().syntax()).unwrap_or_default(),
177 InsertPosition::After(it), 181 InsertPosition::After(it),
178 ), 182 ),
179 None => return, 183 None => return,
180 }, 184 },
181 }; 185 };
182 let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); 186 let ws = tokens::WsBuilder::new(&format!("\n{}", indent));
183 let to_insert: ArrayVec<[SyntaxElement; 2]> = [ws.ws().into(), item.syntax().into()].into(); 187 let to_insert: ArrayVec<[SyntaxElement; 2]> =
188 [ws.ws().into(), item.syntax().clone().into()].into();
184 self.ast = self.insert_children(position, to_insert.into_iter()); 189 self.ast = self.insert_children(position, to_insert.into_iter());
185 } 190 }
186 191
@@ -197,9 +202,9 @@ impl AstEditor<ast::ImplItem> {
197 .children_with_tokens() 202 .children_with_tokens()
198 .find(|it| it.kind() == ATTR || it.kind() == COMMENT) 203 .find(|it| it.kind() == ATTR || it.kind() == COMMENT)
199 { 204 {
200 let end = match start.next_sibling_or_token() { 205 let end = match &start.next_sibling_or_token() {
201 Some(el) if el.kind() == WHITESPACE => el, 206 Some(el) if el.kind() == WHITESPACE => el.clone(),
202 Some(_) | None => start, 207 Some(_) | None => start.clone(),
203 }; 208 };
204 self.ast = self.replace_children(RangeInclusive::new(start, end), iter::empty()); 209 self.ast = self.replace_children(RangeInclusive::new(start, end), iter::empty());
205 } 210 }
@@ -210,18 +215,18 @@ impl AstEditor<ast::FnDef> {
210 pub fn set_body(&mut self, body: &ast::Block) { 215 pub fn set_body(&mut self, body: &ast::Block) {
211 let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new(); 216 let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new();
212 let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.ast().body() { 217 let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.ast().body() {
213 old_body.syntax().into() 218 old_body.syntax().clone().into()
214 } else if let Some(semi) = self.ast().semicolon_token() { 219 } else if let Some(semi) = self.ast().semicolon_token() {
215 to_insert.push(tokens::single_space().into()); 220 to_insert.push(tokens::single_space().into());
216 semi.into() 221 semi.into()
217 } else { 222 } else {
218 to_insert.push(tokens::single_space().into()); 223 to_insert.push(tokens::single_space().into());
219 to_insert.push(body.syntax().into()); 224 to_insert.push(body.syntax().clone().into());
220 self.ast = self.insert_children(InsertPosition::Last, to_insert.into_iter()); 225 self.ast = self.insert_children(InsertPosition::Last, to_insert.into_iter());
221 return; 226 return;
222 }; 227 };
223 to_insert.push(body.syntax().into()); 228 to_insert.push(body.syntax().clone().into());
224 let replace_range = RangeInclusive::new(old_body_or_semi, old_body_or_semi); 229 let replace_range = RangeInclusive::new(old_body_or_semi.clone(), old_body_or_semi);
225 self.ast = self.replace_children(replace_range, to_insert.into_iter()) 230 self.ast = self.replace_children(replace_range, to_insert.into_iter())
226 } 231 }
227} 232}
@@ -231,15 +236,15 @@ pub struct AstBuilder<N: AstNode> {
231} 236}
232 237
233impl AstBuilder<ast::NamedField> { 238impl AstBuilder<ast::NamedField> {
234 pub fn from_name(name: &Name) -> TreeArc<ast::NamedField> { 239 pub fn from_name(name: &Name) -> ast::NamedField {
235 ast_node_from_file_text(&format!("fn f() {{ S {{ {}: (), }} }}", name)) 240 ast_node_from_file_text(&format!("fn f() {{ S {{ {}: (), }} }}", name))
236 } 241 }
237 242
238 fn from_text(text: &str) -> TreeArc<ast::NamedField> { 243 fn from_text(text: &str) -> ast::NamedField {
239 ast_node_from_file_text(&format!("fn f() {{ S {{ {}, }} }}", text)) 244 ast_node_from_file_text(&format!("fn f() {{ S {{ {}, }} }}", text))
240 } 245 }
241 246
242 pub fn from_pieces(name: &ast::NameRef, expr: Option<&ast::Expr>) -> TreeArc<ast::NamedField> { 247 pub fn from_pieces(name: &ast::NameRef, expr: Option<&ast::Expr>) -> ast::NamedField {
243 match expr { 248 match expr {
244 Some(expr) => Self::from_text(&format!("{}: {}", name.syntax(), expr.syntax())), 249 Some(expr) => Self::from_text(&format!("{}: {}", name.syntax(), expr.syntax())),
245 None => Self::from_text(&name.syntax().to_string()), 250 None => Self::from_text(&name.syntax().to_string()),
@@ -248,36 +253,36 @@ impl AstBuilder<ast::NamedField> {
248} 253}
249 254
250impl AstBuilder<ast::Block> { 255impl AstBuilder<ast::Block> {
251 fn from_text(text: &str) -> TreeArc<ast::Block> { 256 fn from_text(text: &str) -> ast::Block {
252 ast_node_from_file_text(&format!("fn f() {}", text)) 257 ast_node_from_file_text(&format!("fn f() {}", text))
253 } 258 }
254 259
255 pub fn single_expr(e: &ast::Expr) -> TreeArc<ast::Block> { 260 pub fn single_expr(e: &ast::Expr) -> ast::Block {
256 Self::from_text(&format!("{{ {} }}", e.syntax())) 261 Self::from_text(&format!("{{ {} }}", e.syntax()))
257 } 262 }
258} 263}
259 264
260impl AstBuilder<ast::Expr> { 265impl AstBuilder<ast::Expr> {
261 fn from_text(text: &str) -> TreeArc<ast::Expr> { 266 fn from_text(text: &str) -> ast::Expr {
262 ast_node_from_file_text(&format!("fn f() {{ {}; }}", text)) 267 ast_node_from_file_text(&format!("fn f() {{ {}; }}", text))
263 } 268 }
264 269
265 pub fn unit() -> TreeArc<ast::Expr> { 270 pub fn unit() -> ast::Expr {
266 Self::from_text("()") 271 Self::from_text("()")
267 } 272 }
268 273
269 pub fn unimplemented() -> TreeArc<ast::Expr> { 274 pub fn unimplemented() -> ast::Expr {
270 Self::from_text("unimplemented!()") 275 Self::from_text("unimplemented!()")
271 } 276 }
272} 277}
273 278
274impl AstBuilder<ast::NameRef> { 279impl AstBuilder<ast::NameRef> {
275 pub fn new(text: &str) -> TreeArc<ast::NameRef> { 280 pub fn new(text: &str) -> ast::NameRef {
276 ast_node_from_file_text(&format!("fn f() {{ {}; }}", text)) 281 ast_node_from_file_text(&format!("fn f() {{ {}; }}", text))
277 } 282 }
278} 283}
279 284
280fn ast_node_from_file_text<N: AstNode>(text: &str) -> TreeArc<N> { 285fn ast_node_from_file_text<N: AstNode>(text: &str) -> N {
281 let parse = SourceFile::parse(text); 286 let parse = SourceFile::parse(text);
282 let res = parse.tree().syntax().descendants().find_map(N::cast).unwrap().to_owned(); 287 let res = parse.tree().syntax().descendants().find_map(N::cast).unwrap().to_owned();
283 res 288 res
@@ -285,47 +290,49 @@ fn ast_node_from_file_text<N: AstNode>(text: &str) -> TreeArc<N> {
285 290
286mod tokens { 291mod tokens {
287 use once_cell::sync::Lazy; 292 use once_cell::sync::Lazy;
288 use ra_syntax::{AstNode, SourceFile, SyntaxKind::*, SyntaxToken, TreeArc, T}; 293 use ra_syntax::{AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken, T};
289 294
290 static SOURCE_FILE: Lazy<TreeArc<SourceFile>> = 295 static SOURCE_FILE: Lazy<Parse<SourceFile>> = Lazy::new(|| SourceFile::parse(",\n; ;"));
291 Lazy::new(|| SourceFile::parse(",\n; ;").tree().to_owned());
292 296
293 pub(crate) fn comma() -> SyntaxToken<'static> { 297 pub(crate) fn comma() -> SyntaxToken {
294 SOURCE_FILE 298 SOURCE_FILE
299 .tree()
295 .syntax() 300 .syntax()
296 .descendants_with_tokens() 301 .descendants_with_tokens()
297 .filter_map(|it| it.as_token()) 302 .filter_map(|it| it.as_token().cloned())
298 .find(|it| it.kind() == T![,]) 303 .find(|it| it.kind() == T![,])
299 .unwrap() 304 .unwrap()
300 } 305 }
301 306
302 pub(crate) fn single_space() -> SyntaxToken<'static> { 307 pub(crate) fn single_space() -> SyntaxToken {
303 SOURCE_FILE 308 SOURCE_FILE
309 .tree()
304 .syntax() 310 .syntax()
305 .descendants_with_tokens() 311 .descendants_with_tokens()
306 .filter_map(|it| it.as_token()) 312 .filter_map(|it| it.as_token().cloned())
307 .find(|it| it.kind() == WHITESPACE && it.text().as_str() == " ") 313 .find(|it| it.kind() == WHITESPACE && it.text().as_str() == " ")
308 .unwrap() 314 .unwrap()
309 } 315 }
310 316
311 #[allow(unused)] 317 #[allow(unused)]
312 pub(crate) fn single_newline() -> SyntaxToken<'static> { 318 pub(crate) fn single_newline() -> SyntaxToken {
313 SOURCE_FILE 319 SOURCE_FILE
320 .tree()
314 .syntax() 321 .syntax()
315 .descendants_with_tokens() 322 .descendants_with_tokens()
316 .filter_map(|it| it.as_token()) 323 .filter_map(|it| it.as_token().cloned())
317 .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n") 324 .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n")
318 .unwrap() 325 .unwrap()
319 } 326 }
320 327
321 pub(crate) struct WsBuilder(TreeArc<SourceFile>); 328 pub(crate) struct WsBuilder(SourceFile);
322 329
323 impl WsBuilder { 330 impl WsBuilder {
324 pub(crate) fn new(text: &str) -> WsBuilder { 331 pub(crate) fn new(text: &str) -> WsBuilder {
325 WsBuilder(SourceFile::parse(text).ok().unwrap()) 332 WsBuilder(SourceFile::parse(text).ok().unwrap())
326 } 333 }
327 pub(crate) fn ws(&self) -> SyntaxToken<'_> { 334 pub(crate) fn ws(&self) -> SyntaxToken {
328 self.0.syntax().first_child_or_token().unwrap().as_token().unwrap() 335 self.0.syntax().first_child_or_token().unwrap().as_token().cloned().unwrap()
329 } 336 }
330 } 337 }
331 338
diff --git a/crates/ra_assists/src/auto_import.rs b/crates/ra_assists/src/auto_import.rs
index f8f37e852..0eb4bdb62 100644
--- a/crates/ra_assists/src/auto_import.rs
+++ b/crates/ra_assists/src/auto_import.rs
@@ -12,25 +12,25 @@ use ra_syntax::{
12 SyntaxNode, TextRange, T, 12 SyntaxNode, TextRange, T,
13}; 13};
14 14
15fn collect_path_segments_raw<'a>( 15fn collect_path_segments_raw(
16 segments: &mut Vec<&'a ast::PathSegment>, 16 segments: &mut Vec<ast::PathSegment>,
17 mut path: &'a ast::Path, 17 mut path: ast::Path,
18) -> Option<usize> { 18) -> Option<usize> {
19 let oldlen = segments.len(); 19 let oldlen = segments.len();
20 loop { 20 loop {
21 let mut children = path.syntax().children_with_tokens(); 21 let mut children = path.syntax().children_with_tokens();
22 let (first, second, third) = ( 22 let (first, second, third) = (
23 children.next().map(|n| (n, n.kind())), 23 children.next().map(|n| (n.clone(), n.kind())),
24 children.next().map(|n| (n, n.kind())), 24 children.next().map(|n| (n.clone(), n.kind())),
25 children.next().map(|n| (n, n.kind())), 25 children.next().map(|n| (n.clone(), n.kind())),
26 ); 26 );
27 match (first, second, third) { 27 match (first, second, third) {
28 (Some((subpath, PATH)), Some((_, T![::])), Some((segment, PATH_SEGMENT))) => { 28 (Some((subpath, PATH)), Some((_, T![::])), Some((segment, PATH_SEGMENT))) => {
29 path = ast::Path::cast(subpath.as_node()?)?; 29 path = ast::Path::cast(subpath.as_node()?.clone())?;
30 segments.push(ast::PathSegment::cast(segment.as_node()?)?); 30 segments.push(ast::PathSegment::cast(segment.as_node()?.clone())?);
31 } 31 }
32 (Some((segment, PATH_SEGMENT)), _, _) => { 32 (Some((segment, PATH_SEGMENT)), _, _) => {
33 segments.push(ast::PathSegment::cast(segment.as_node()?)?); 33 segments.push(ast::PathSegment::cast(segment.as_node()?.clone())?);
34 break; 34 break;
35 } 35 }
36 (_, _, _) => return None, 36 (_, _, _) => return None,
@@ -60,7 +60,7 @@ fn fmt_segments_raw(segments: &[SmolStr], buf: &mut String) {
60} 60}
61 61
62// Returns the numeber of common segments. 62// Returns the numeber of common segments.
63fn compare_path_segments(left: &[SmolStr], right: &[&ast::PathSegment]) -> usize { 63fn compare_path_segments(left: &[SmolStr], right: &[ast::PathSegment]) -> usize {
64 left.iter().zip(right).filter(|(l, r)| compare_path_segment(l, r)).count() 64 left.iter().zip(right).filter(|(l, r)| compare_path_segment(l, r)).count()
65} 65}
66 66
@@ -81,12 +81,12 @@ fn compare_path_segment_with_name(a: &SmolStr, b: &ast::Name) -> bool {
81 a == b.text() 81 a == b.text()
82} 82}
83 83
84#[derive(Copy, Clone)] 84#[derive(Clone)]
85enum ImportAction<'a> { 85enum ImportAction {
86 Nothing, 86 Nothing,
87 // Add a brand new use statement. 87 // Add a brand new use statement.
88 AddNewUse { 88 AddNewUse {
89 anchor: Option<&'a SyntaxNode>, // anchor node 89 anchor: Option<SyntaxNode>, // anchor node
90 add_after_anchor: bool, 90 add_after_anchor: bool,
91 }, 91 },
92 92
@@ -94,9 +94,9 @@ enum ImportAction<'a> {
94 AddNestedImport { 94 AddNestedImport {
95 // how may segments matched with the target path 95 // how may segments matched with the target path
96 common_segments: usize, 96 common_segments: usize,
97 path_to_split: &'a ast::Path, 97 path_to_split: ast::Path,
98 // the first segment of path_to_split we want to add into the new nested list 98 // the first segment of path_to_split we want to add into the new nested list
99 first_segment_to_split: Option<&'a ast::PathSegment>, 99 first_segment_to_split: Option<ast::PathSegment>,
100 // Wether to add 'self' in addition to the target path 100 // Wether to add 'self' in addition to the target path
101 add_self: bool, 101 add_self: bool,
102 }, 102 },
@@ -104,20 +104,20 @@ enum ImportAction<'a> {
104 AddInTreeList { 104 AddInTreeList {
105 common_segments: usize, 105 common_segments: usize,
106 // The UseTreeList where to add the target path 106 // The UseTreeList where to add the target path
107 tree_list: &'a ast::UseTreeList, 107 tree_list: ast::UseTreeList,
108 add_self: bool, 108 add_self: bool,
109 }, 109 },
110} 110}
111 111
112impl<'a> ImportAction<'a> { 112impl ImportAction {
113 fn add_new_use(anchor: Option<&'a SyntaxNode>, add_after_anchor: bool) -> Self { 113 fn add_new_use(anchor: Option<SyntaxNode>, add_after_anchor: bool) -> Self {
114 ImportAction::AddNewUse { anchor, add_after_anchor } 114 ImportAction::AddNewUse { anchor, add_after_anchor }
115 } 115 }
116 116
117 fn add_nested_import( 117 fn add_nested_import(
118 common_segments: usize, 118 common_segments: usize,
119 path_to_split: &'a ast::Path, 119 path_to_split: ast::Path,
120 first_segment_to_split: Option<&'a ast::PathSegment>, 120 first_segment_to_split: Option<ast::PathSegment>,
121 add_self: bool, 121 add_self: bool,
122 ) -> Self { 122 ) -> Self {
123 ImportAction::AddNestedImport { 123 ImportAction::AddNestedImport {
@@ -130,14 +130,14 @@ impl<'a> ImportAction<'a> {
130 130
131 fn add_in_tree_list( 131 fn add_in_tree_list(
132 common_segments: usize, 132 common_segments: usize,
133 tree_list: &'a ast::UseTreeList, 133 tree_list: ast::UseTreeList,
134 add_self: bool, 134 add_self: bool,
135 ) -> Self { 135 ) -> Self {
136 ImportAction::AddInTreeList { common_segments, tree_list, add_self } 136 ImportAction::AddInTreeList { common_segments, tree_list, add_self }
137 } 137 }
138 138
139 fn better<'b>(left: &'b ImportAction<'a>, right: &'b ImportAction<'a>) -> &'b ImportAction<'a> { 139 fn better(left: ImportAction, right: ImportAction) -> ImportAction {
140 if left.is_better(right) { 140 if left.is_better(&right) {
141 left 141 left
142 } else { 142 } else {
143 right 143 right
@@ -166,12 +166,12 @@ impl<'a> ImportAction<'a> {
166 166
167// Find out the best ImportAction to import target path against current_use_tree. 167// Find out the best ImportAction to import target path against current_use_tree.
168// If current_use_tree has a nested import the function gets called recursively on every UseTree inside a UseTreeList. 168// If current_use_tree has a nested import the function gets called recursively on every UseTree inside a UseTreeList.
169fn walk_use_tree_for_best_action<'a>( 169fn walk_use_tree_for_best_action(
170 current_path_segments: &mut Vec<&'a ast::PathSegment>, // buffer containing path segments 170 current_path_segments: &mut Vec<ast::PathSegment>, // buffer containing path segments
171 current_parent_use_tree_list: Option<&'a ast::UseTreeList>, // will be Some value if we are in a nested import 171 current_parent_use_tree_list: Option<ast::UseTreeList>, // will be Some value if we are in a nested import
172 current_use_tree: &'a ast::UseTree, // the use tree we are currently examinating 172 current_use_tree: ast::UseTree, // the use tree we are currently examinating
173 target: &[SmolStr], // the path we want to import 173 target: &[SmolStr], // the path we want to import
174) -> ImportAction<'a> { 174) -> ImportAction {
175 // We save the number of segments in the buffer so we can restore the correct segments 175 // We save the number of segments in the buffer so we can restore the correct segments
176 // before returning. Recursive call will add segments so we need to delete them. 176 // before returning. Recursive call will add segments so we need to delete them.
177 let prev_len = current_path_segments.len(); 177 let prev_len = current_path_segments.len();
@@ -188,32 +188,36 @@ fn walk_use_tree_for_best_action<'a>(
188 .syntax() 188 .syntax()
189 .ancestors() 189 .ancestors()
190 .find_map(ast::UseItem::cast) 190 .find_map(ast::UseItem::cast)
191 .map(AstNode::syntax), 191 .map(|it| it.syntax().clone()),
192 true, 192 true,
193 ); 193 );
194 } 194 }
195 }; 195 };
196 196
197 // This can happen only if current_use_tree is a direct child of a UseItem 197 // This can happen only if current_use_tree is a direct child of a UseItem
198 if let Some(name) = alias.and_then(ast::NameOwner::name) { 198 if let Some(name) = alias.and_then(|it| it.name()) {
199 if compare_path_segment_with_name(&target[0], name) { 199 if compare_path_segment_with_name(&target[0], &name) {
200 return ImportAction::Nothing; 200 return ImportAction::Nothing;
201 } 201 }
202 } 202 }
203 203
204 collect_path_segments_raw(current_path_segments, path); 204 collect_path_segments_raw(current_path_segments, path.clone());
205 205
206 // We compare only the new segments added in the line just above. 206 // We compare only the new segments added in the line just above.
207 // The first prev_len segments were already compared in 'parent' recursive calls. 207 // The first prev_len segments were already compared in 'parent' recursive calls.
208 let left = target.split_at(prev_len).1; 208 let left = target.split_at(prev_len).1;
209 let right = current_path_segments.split_at(prev_len).1; 209 let right = current_path_segments.split_at(prev_len).1;
210 let common = compare_path_segments(left, right); 210 let common = compare_path_segments(left, &right);
211 let mut action = match common { 211 let mut action = match common {
212 0 => ImportAction::add_new_use( 212 0 => ImportAction::add_new_use(
213 // e.g: target is std::fmt and we can have 213 // e.g: target is std::fmt and we can have
214 // use foo::bar 214 // use foo::bar
215 // We add a brand new use statement 215 // We add a brand new use statement
216 current_use_tree.syntax().ancestors().find_map(ast::UseItem::cast).map(AstNode::syntax), 216 current_use_tree
217 .syntax()
218 .ancestors()
219 .find_map(ast::UseItem::cast)
220 .map(|it| it.syntax().clone()),
217 true, 221 true,
218 ), 222 ),
219 common if common == left.len() && left.len() == right.len() => { 223 common if common == left.len() && left.len() == right.len() => {
@@ -223,9 +227,9 @@ fn walk_use_tree_for_best_action<'a>(
223 if let Some(list) = tree_list { 227 if let Some(list) = tree_list {
224 // In case 2 we need to add self to the nested list 228 // In case 2 we need to add self to the nested list
225 // unless it's already there 229 // unless it's already there
226 let has_self = list.use_trees().map(ast::UseTree::path).any(|p| { 230 let has_self = list.use_trees().map(|it| it.path()).any(|p| {
227 p.and_then(ast::Path::segment) 231 p.and_then(|it| it.segment())
228 .and_then(ast::PathSegment::kind) 232 .and_then(|it| it.kind())
229 .filter(|k| *k == ast::PathSegmentKind::SelfKw) 233 .filter(|k| *k == ast::PathSegmentKind::SelfKw)
230 .is_some() 234 .is_some()
231 }); 235 });
@@ -248,7 +252,7 @@ fn walk_use_tree_for_best_action<'a>(
248 ImportAction::add_nested_import( 252 ImportAction::add_nested_import(
249 prev_len + common, 253 prev_len + common,
250 path, 254 path,
251 Some(segments_to_split[0]), 255 Some(segments_to_split[0].clone()),
252 false, 256 false,
253 ) 257 )
254 } 258 }
@@ -263,14 +267,18 @@ fn walk_use_tree_for_best_action<'a>(
263 .syntax() 267 .syntax()
264 .ancestors() 268 .ancestors()
265 .find_map(ast::UseItem::cast) 269 .find_map(ast::UseItem::cast)
266 .map(AstNode::syntax), 270 .map(|it| it.syntax().clone()),
267 true, 271 true,
268 ); 272 );
269 if let Some(list) = tree_list { 273 if let Some(list) = tree_list {
270 // Case 2, check recursively if the path is already imported in the nested list 274 // Case 2, check recursively if the path is already imported in the nested list
271 for u in list.use_trees() { 275 for u in list.use_trees() {
272 let child_action = 276 let child_action = walk_use_tree_for_best_action(
273 walk_use_tree_for_best_action(current_path_segments, Some(list), u, target); 277 current_path_segments,
278 Some(list.clone()),
279 u,
280 target,
281 );
274 if child_action.is_better(&better_action) { 282 if child_action.is_better(&better_action) {
275 better_action = child_action; 283 better_action = child_action;
276 if let ImportAction::Nothing = better_action { 284 if let ImportAction::Nothing = better_action {
@@ -291,7 +299,7 @@ fn walk_use_tree_for_best_action<'a>(
291 ImportAction::add_nested_import( 299 ImportAction::add_nested_import(
292 prev_len + common, 300 prev_len + common,
293 path, 301 path,
294 Some(segments_to_split[0]), 302 Some(segments_to_split[0].clone()),
295 true, 303 true,
296 ) 304 )
297 } 305 }
@@ -302,7 +310,7 @@ fn walk_use_tree_for_best_action<'a>(
302 ImportAction::add_nested_import( 310 ImportAction::add_nested_import(
303 prev_len + common, 311 prev_len + common,
304 path, 312 path,
305 Some(segments_to_split[0]), 313 Some(segments_to_split[0].clone()),
306 false, 314 false,
307 ) 315 )
308 } 316 }
@@ -311,7 +319,7 @@ fn walk_use_tree_for_best_action<'a>(
311 319
312 // If we are inside a UseTreeList adding a use statement become adding to the existing 320 // If we are inside a UseTreeList adding a use statement become adding to the existing
313 // tree list. 321 // tree list.
314 action = match (current_parent_use_tree_list, action) { 322 action = match (current_parent_use_tree_list, action.clone()) {
315 (Some(use_tree_list), ImportAction::AddNewUse { .. }) => { 323 (Some(use_tree_list), ImportAction::AddNewUse { .. }) => {
316 ImportAction::add_in_tree_list(prev_len, use_tree_list, false) 324 ImportAction::add_in_tree_list(prev_len, use_tree_list, false)
317 } 325 }
@@ -323,19 +331,20 @@ fn walk_use_tree_for_best_action<'a>(
323 action 331 action
324} 332}
325 333
326fn best_action_for_target<'b, 'a: 'b>( 334fn best_action_for_target(
327 container: &'a SyntaxNode, 335 container: SyntaxNode,
328 anchor: &'a SyntaxNode, 336 anchor: SyntaxNode,
329 target: &'b [SmolStr], 337 target: &[SmolStr],
330) -> ImportAction<'a> { 338) -> ImportAction {
331 let mut storage = Vec::with_capacity(16); // this should be the only allocation 339 let mut storage = Vec::with_capacity(16); // this should be the only allocation
332 let best_action = container 340 let best_action = container
333 .children() 341 .children()
334 .filter_map(ast::UseItem::cast) 342 .filter_map(ast::UseItem::cast)
335 .filter_map(ast::UseItem::use_tree) 343 .filter_map(|it| it.use_tree())
336 .map(|u| walk_use_tree_for_best_action(&mut storage, None, u, target)) 344 .map(|u| walk_use_tree_for_best_action(&mut storage, None, u, target))
337 .fold(None, |best, a| { 345 .fold(None, |best, a| match best {
338 best.and_then(|best| Some(*ImportAction::better(&best, &a))).or_else(|| Some(a)) 346 Some(best) => Some(ImportAction::better(best, a)),
347 None => Some(a),
339 }); 348 });
340 349
341 match best_action { 350 match best_action {
@@ -386,7 +395,7 @@ fn make_assist(action: &ImportAction, target: &[SmolStr], edit: &mut TextEditBui
386} 395}
387 396
388fn make_assist_add_new_use( 397fn make_assist_add_new_use(
389 anchor: &Option<&SyntaxNode>, 398 anchor: &Option<SyntaxNode>,
390 after: bool, 399 after: bool,
391 target: &[SmolStr], 400 target: &[SmolStr],
392 edit: &mut TextEditBuilder, 401 edit: &mut TextEditBuilder,
@@ -396,7 +405,7 @@ fn make_assist_add_new_use(
396 let mut buf = String::new(); 405 let mut buf = String::new();
397 if after { 406 if after {
398 buf.push_str("\n"); 407 buf.push_str("\n");
399 if let Some(spaces) = indent { 408 if let Some(spaces) = &indent {
400 buf.push_str(spaces); 409 buf.push_str(spaces);
401 } 410 }
402 } 411 }
@@ -405,8 +414,8 @@ fn make_assist_add_new_use(
405 buf.push_str(";"); 414 buf.push_str(";");
406 if !after { 415 if !after {
407 buf.push_str("\n\n"); 416 buf.push_str("\n\n");
408 if let Some(spaces) = indent { 417 if let Some(spaces) = &indent {
409 buf.push_str(spaces); 418 buf.push_str(&spaces);
410 } 419 }
411 } 420 }
412 let position = if after { anchor.range().end() } else { anchor.range().start() }; 421 let position = if after { anchor.range().end() } else { anchor.range().start() };
@@ -444,7 +453,7 @@ fn make_assist_add_in_tree_list(
444 453
445fn make_assist_add_nested_import( 454fn make_assist_add_nested_import(
446 path: &ast::Path, 455 path: &ast::Path,
447 first_segment_to_split: &Option<&ast::PathSegment>, 456 first_segment_to_split: &Option<ast::PathSegment>,
448 target: &[SmolStr], 457 target: &[SmolStr],
449 add_self: bool, 458 add_self: bool,
450 edit: &mut TextEditBuilder, 459 edit: &mut TextEditBuilder,
@@ -482,7 +491,7 @@ fn apply_auto_import(
482 target: &[SmolStr], 491 target: &[SmolStr],
483 edit: &mut TextEditBuilder, 492 edit: &mut TextEditBuilder,
484) { 493) {
485 let action = best_action_for_target(container, path.syntax(), target); 494 let action = best_action_for_target(container.clone(), path.syntax().clone(), target);
486 make_assist(&action, target, edit); 495 make_assist(&action, target, edit);
487 if let Some(last) = path.segment() { 496 if let Some(last) = path.segment() {
488 // Here we are assuming the assist will provide a correct use statement 497 // Here we are assuming the assist will provide a correct use statement
@@ -522,26 +531,26 @@ pub fn auto_import_text_edit(
522 edit: &mut TextEditBuilder, 531 edit: &mut TextEditBuilder,
523) { 532) {
524 let container = position.ancestors().find_map(|n| { 533 let container = position.ancestors().find_map(|n| {
525 if let Some(module) = ast::Module::cast(n) { 534 if let Some(module) = ast::Module::cast(n.clone()) {
526 return module.item_list().map(ast::AstNode::syntax); 535 return module.item_list().map(|it| it.syntax().clone());
527 } 536 }
528 ast::SourceFile::cast(n).map(ast::AstNode::syntax) 537 ast::SourceFile::cast(n).map(|it| it.syntax().clone())
529 }); 538 });
530 539
531 if let Some(container) = container { 540 if let Some(container) = container {
532 let action = best_action_for_target(container, anchor, target); 541 let action = best_action_for_target(container, anchor.clone(), target);
533 make_assist(&action, target, edit); 542 make_assist(&action, target, edit);
534 } 543 }
535} 544}
536 545
537pub(crate) fn auto_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { 546pub(crate) fn auto_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
538 let path: &ast::Path = ctx.node_at_offset()?; 547 let path: ast::Path = ctx.node_at_offset()?;
539 // We don't want to mess with use statements 548 // We don't want to mess with use statements
540 if path.syntax().ancestors().find_map(ast::UseItem::cast).is_some() { 549 if path.syntax().ancestors().find_map(ast::UseItem::cast).is_some() {
541 return None; 550 return None;
542 } 551 }
543 552
544 let hir_path = hir::Path::from_ast(path)?; 553 let hir_path = hir::Path::from_ast(path.clone())?;
545 let segments = collect_hir_path_segments(&hir_path); 554 let segments = collect_hir_path_segments(&hir_path);
546 if segments.len() < 2 { 555 if segments.len() < 2 {
547 return None; 556 return None;
@@ -554,7 +563,7 @@ pub(crate) fn auto_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist
554 format!("import {} in mod {}", fmt_segments(&segments), name.text()), 563 format!("import {} in mod {}", fmt_segments(&segments), name.text()),
555 |edit| { 564 |edit| {
556 let mut text_edit = TextEditBuilder::default(); 565 let mut text_edit = TextEditBuilder::default();
557 apply_auto_import(item_list.syntax(), path, &segments, &mut text_edit); 566 apply_auto_import(item_list.syntax(), &path, &segments, &mut text_edit);
558 edit.set_edit_builder(text_edit); 567 edit.set_edit_builder(text_edit);
559 }, 568 },
560 ); 569 );
@@ -566,7 +575,7 @@ pub(crate) fn auto_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist
566 format!("import {} in the current file", fmt_segments(&segments)), 575 format!("import {} in the current file", fmt_segments(&segments)),
567 |edit| { 576 |edit| {
568 let mut text_edit = TextEditBuilder::default(); 577 let mut text_edit = TextEditBuilder::default();
569 apply_auto_import(current_file.syntax(), path, &segments, &mut text_edit); 578 apply_auto_import(current_file.syntax(), &path, &segments, &mut text_edit);
570 edit.set_edit_builder(text_edit); 579 edit.set_edit_builder(text_edit);
571 }, 580 },
572 ); 581 );
diff --git a/crates/ra_assists/src/change_visibility.rs b/crates/ra_assists/src/change_visibility.rs
index 6cabba3e3..ab10d2aa4 100644
--- a/crates/ra_assists/src/change_visibility.rs
+++ b/crates/ra_assists/src/change_visibility.rs
@@ -35,7 +35,7 @@ fn add_vis(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
35 if parent.children().any(|child| child.kind() == VISIBILITY) { 35 if parent.children().any(|child| child.kind() == VISIBILITY) {
36 return None; 36 return None;
37 } 37 }
38 (vis_offset(parent), keyword.range()) 38 (vis_offset(&parent), keyword.range())
39 } else { 39 } else {
40 let ident = ctx.token_at_offset().find(|leaf| leaf.kind() == IDENT)?; 40 let ident = ctx.token_at_offset().find(|leaf| leaf.kind() == IDENT)?;
41 let field = ident.parent().ancestors().find_map(ast::NamedFieldDef::cast)?; 41 let field = ident.parent().ancestors().find_map(ast::NamedFieldDef::cast)?;
@@ -65,7 +65,7 @@ fn vis_offset(node: &SyntaxNode) -> TextUnit {
65 .unwrap_or_else(|| node.range().start()) 65 .unwrap_or_else(|| node.range().start())
66} 66}
67 67
68fn change_vis(mut ctx: AssistCtx<impl HirDatabase>, vis: &ast::Visibility) -> Option<Assist> { 68fn change_vis(mut ctx: AssistCtx<impl HirDatabase>, vis: ast::Visibility) -> Option<Assist> {
69 if vis.syntax().text() == "pub" { 69 if vis.syntax().text() == "pub" {
70 ctx.add_action(AssistId("change_visibility"), "change to pub(crate)", |edit| { 70 ctx.add_action(AssistId("change_visibility"), "change to pub(crate)", |edit| {
71 edit.target(vis.syntax().range()); 71 edit.target(vis.syntax().range());
diff --git a/crates/ra_assists/src/fill_match_arms.rs b/crates/ra_assists/src/fill_match_arms.rs
index deef166b5..b96806ac6 100644
--- a/crates/ra_assists/src/fill_match_arms.rs
+++ b/crates/ra_assists/src/fill_match_arms.rs
@@ -27,7 +27,7 @@ pub(crate) fn fill_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<As
27 let mut arm_iter = arm_list.arms(); 27 let mut arm_iter = arm_list.arms();
28 let first = arm_iter.next(); 28 let first = arm_iter.next();
29 29
30 match first { 30 match &first {
31 // If there arm list is empty or there is only one trivial arm, then proceed. 31 // If there arm list is empty or there is only one trivial arm, then proceed.
32 Some(arm) if is_trivial_arm(arm) => { 32 Some(arm) if is_trivial_arm(arm) => {
33 if arm_iter.next() != None { 33 if arm_iter.next() != None {
@@ -44,7 +44,7 @@ pub(crate) fn fill_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<As
44 44
45 let expr = match_expr.expr()?; 45 let expr = match_expr.expr()?;
46 let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, expr.syntax(), None); 46 let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, expr.syntax(), None);
47 let match_expr_ty = analyzer.type_of(ctx.db, expr)?; 47 let match_expr_ty = analyzer.type_of(ctx.db, &expr)?;
48 let enum_def = analyzer.autoderef(ctx.db, match_expr_ty).find_map(|ty| match ty.as_adt() { 48 let enum_def = analyzer.autoderef(ctx.db, match_expr_ty).find_map(|ty| match ty.as_adt() {
49 Some((AdtDef::Enum(e), _)) => Some(e), 49 Some((AdtDef::Enum(e), _)) => Some(e),
50 _ => None, 50 _ => None,
diff --git a/crates/ra_assists/src/flip_binexpr.rs b/crates/ra_assists/src/flip_binexpr.rs
index 5e41f9346..2e591ad3b 100644
--- a/crates/ra_assists/src/flip_binexpr.rs
+++ b/crates/ra_assists/src/flip_binexpr.rs
@@ -6,8 +6,8 @@ use crate::{Assist, AssistCtx, AssistId};
6/// Flip binary expression assist. 6/// Flip binary expression assist.
7pub(crate) fn flip_binexpr(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { 7pub(crate) fn flip_binexpr(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
8 let expr = ctx.node_at_offset::<BinExpr>()?; 8 let expr = ctx.node_at_offset::<BinExpr>()?;
9 let lhs = expr.lhs()?.syntax(); 9 let lhs = expr.lhs()?.syntax().clone();
10 let rhs = expr.rhs()?.syntax(); 10 let rhs = expr.rhs()?.syntax().clone();
11 let op_range = expr.op_token()?.range(); 11 let op_range = expr.op_token()?.range();
12 // The assist should be applied only if the cursor is on the operator 12 // The assist should be applied only if the cursor is on the operator
13 let cursor_in_range = ctx.frange.range.is_subrange(&op_range); 13 let cursor_in_range = ctx.frange.range.is_subrange(&op_range);
diff --git a/crates/ra_assists/src/flip_comma.rs b/crates/ra_assists/src/flip_comma.rs
index d8dba779f..13016ae06 100644
--- a/crates/ra_assists/src/flip_comma.rs
+++ b/crates/ra_assists/src/flip_comma.rs
@@ -5,8 +5,8 @@ use crate::{Assist, AssistCtx, AssistId};
5 5
6pub(crate) fn flip_comma(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { 6pub(crate) fn flip_comma(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
7 let comma = ctx.token_at_offset().find(|leaf| leaf.kind() == T![,])?; 7 let comma = ctx.token_at_offset().find(|leaf| leaf.kind() == T![,])?;
8 let prev = non_trivia_sibling(comma.into(), Direction::Prev)?; 8 let prev = non_trivia_sibling(comma.clone().into(), Direction::Prev)?;
9 let next = non_trivia_sibling(comma.into(), Direction::Next)?; 9 let next = non_trivia_sibling(comma.clone().into(), Direction::Next)?;
10 ctx.add_action(AssistId("flip_comma"), "flip comma", |edit| { 10 ctx.add_action(AssistId("flip_comma"), "flip comma", |edit| {
11 edit.target(comma.range()); 11 edit.target(comma.range());
12 edit.replace(prev.range(), next.to_string()); 12 edit.replace(prev.range(), next.to_string());
diff --git a/crates/ra_assists/src/inline_local_variable.rs b/crates/ra_assists/src/inline_local_variable.rs
index 554de8b46..3c17089de 100644
--- a/crates/ra_assists/src/inline_local_variable.rs
+++ b/crates/ra_assists/src/inline_local_variable.rs
@@ -16,18 +16,18 @@ pub(crate) fn inline_local_varialbe(mut ctx: AssistCtx<impl HirDatabase>) -> Opt
16 if bind_pat.is_mutable() { 16 if bind_pat.is_mutable() {
17 return None; 17 return None;
18 } 18 }
19 let initializer_expr = let_stmt.initializer(); 19 let initializer_expr = let_stmt.initializer()?;
20 let delete_range = if let Some(whitespace) = let_stmt 20 let delete_range = if let Some(whitespace) = let_stmt
21 .syntax() 21 .syntax()
22 .next_sibling_or_token() 22 .next_sibling_or_token()
23 .and_then(|it| ast::Whitespace::cast(it.as_token()?)) 23 .and_then(|it| ast::Whitespace::cast(it.as_token()?.clone()))
24 { 24 {
25 TextRange::from_to(let_stmt.syntax().range().start(), whitespace.syntax().range().end()) 25 TextRange::from_to(let_stmt.syntax().range().start(), whitespace.syntax().range().end())
26 } else { 26 } else {
27 let_stmt.syntax().range() 27 let_stmt.syntax().range()
28 }; 28 };
29 let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, bind_pat.syntax(), None); 29 let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, bind_pat.syntax(), None);
30 let refs = analyzer.find_all_refs(bind_pat); 30 let refs = analyzer.find_all_refs(&bind_pat);
31 31
32 let mut wrap_in_parens = vec![true; refs.len()]; 32 let mut wrap_in_parens = vec![true; refs.len()];
33 33
@@ -45,7 +45,7 @@ pub(crate) fn inline_local_varialbe(mut ctx: AssistCtx<impl HirDatabase>) -> Opt
45 } 45 }
46 }; 46 };
47 47
48 wrap_in_parens[i] = match (initializer_expr?.kind(), usage_parent.kind()) { 48 wrap_in_parens[i] = match (initializer_expr.kind(), usage_parent.kind()) {
49 (ExprKind::CallExpr(_), _) 49 (ExprKind::CallExpr(_), _)
50 | (ExprKind::IndexExpr(_), _) 50 | (ExprKind::IndexExpr(_), _)
51 | (ExprKind::MethodCallExpr(_), _) 51 | (ExprKind::MethodCallExpr(_), _)
@@ -71,7 +71,7 @@ pub(crate) fn inline_local_varialbe(mut ctx: AssistCtx<impl HirDatabase>) -> Opt
71 }; 71 };
72 } 72 }
73 73
74 let init_str = initializer_expr?.syntax().text().to_string(); 74 let init_str = initializer_expr.syntax().text().to_string();
75 let init_in_paren = format!("({})", &init_str); 75 let init_in_paren = format!("({})", &init_str);
76 76
77 ctx.add_action( 77 ctx.add_action(
diff --git a/crates/ra_assists/src/introduce_variable.rs b/crates/ra_assists/src/introduce_variable.rs
index f7f5ccafa..ce28132c9 100644
--- a/crates/ra_assists/src/introduce_variable.rs
+++ b/crates/ra_assists/src/introduce_variable.rs
@@ -20,8 +20,8 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx<impl HirDatabase>) -> Option
20 return None; 20 return None;
21 } 21 }
22 let expr = node.ancestors().find_map(valid_target_expr)?; 22 let expr = node.ancestors().find_map(valid_target_expr)?;
23 let (anchor_stmt, wrap_in_block) = anchor_stmt(expr)?; 23 let (anchor_stmt, wrap_in_block) = anchor_stmt(expr.clone())?;
24 let indent = anchor_stmt.prev_sibling_or_token()?.as_token()?; 24 let indent = anchor_stmt.prev_sibling_or_token()?.as_token()?.clone();
25 if indent.kind() != WHITESPACE { 25 if indent.kind() != WHITESPACE {
26 return None; 26 return None;
27 } 27 }
@@ -37,9 +37,9 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx<impl HirDatabase>) -> Option
37 }; 37 };
38 38
39 expr.syntax().text().push_to(&mut buf); 39 expr.syntax().text().push_to(&mut buf);
40 let full_stmt = ast::ExprStmt::cast(anchor_stmt); 40 let full_stmt = ast::ExprStmt::cast(anchor_stmt.clone());
41 let is_full_stmt = if let Some(expr_stmt) = full_stmt { 41 let is_full_stmt = if let Some(expr_stmt) = &full_stmt {
42 Some(expr.syntax()) == expr_stmt.expr().map(|e| e.syntax()) 42 Some(expr.syntax().clone()) == expr_stmt.expr().map(|e| e.syntax().clone())
43 } else { 43 } else {
44 false 44 false
45 }; 45 };
@@ -81,7 +81,7 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx<impl HirDatabase>) -> Option
81 81
82/// Check whether the node is a valid expression which can be extracted to a variable. 82/// Check whether the node is a valid expression which can be extracted to a variable.
83/// In general that's true for any expression, but in some cases that would produce invalid code. 83/// In general that's true for any expression, but in some cases that would produce invalid code.
84fn valid_target_expr(node: &SyntaxNode) -> Option<&ast::Expr> { 84fn valid_target_expr(node: SyntaxNode) -> Option<ast::Expr> {
85 match node.kind() { 85 match node.kind() {
86 PATH_EXPR => None, 86 PATH_EXPR => None,
87 BREAK_EXPR => ast::BreakExpr::cast(node).and_then(|e| e.expr()), 87 BREAK_EXPR => ast::BreakExpr::cast(node).and_then(|e| e.expr()),
@@ -96,14 +96,10 @@ fn valid_target_expr(node: &SyntaxNode) -> Option<&ast::Expr> {
96/// to produce correct code. 96/// to produce correct code.
97/// It can be a statement, the last in a block expression or a wanna be block 97/// It can be a statement, the last in a block expression or a wanna be block
98/// expression like a lambda or match arm. 98/// expression like a lambda or match arm.
99fn anchor_stmt(expr: &ast::Expr) -> Option<(&SyntaxNode, bool)> { 99fn anchor_stmt(expr: ast::Expr) -> Option<(SyntaxNode, bool)> {
100 expr.syntax().ancestors().find_map(|node| { 100 expr.syntax().ancestors().find_map(|node| {
101 if ast::Stmt::cast(node).is_some() {
102 return Some((node, false));
103 }
104
105 if let Some(expr) = node.parent().and_then(ast::Block::cast).and_then(|it| it.expr()) { 101 if let Some(expr) = node.parent().and_then(ast::Block::cast).and_then(|it| it.expr()) {
106 if expr.syntax() == node { 102 if expr.syntax() == &node {
107 tested_by!(test_introduce_var_last_expr); 103 tested_by!(test_introduce_var_last_expr);
108 return Some((node, false)); 104 return Some((node, false));
109 } 105 }
@@ -115,6 +111,10 @@ fn anchor_stmt(expr: &ast::Expr) -> Option<(&SyntaxNode, bool)> {
115 } 111 }
116 } 112 }
117 113
114 if ast::Stmt::cast(node.clone()).is_some() {
115 return Some((node, false));
116 }
117
118 None 118 None
119 }) 119 })
120} 120}
diff --git a/crates/ra_assists/src/move_guard.rs b/crates/ra_assists/src/move_guard.rs
index e1ce86a33..313c9ad18 100644
--- a/crates/ra_assists/src/move_guard.rs
+++ b/crates/ra_assists/src/move_guard.rs
@@ -18,9 +18,9 @@ pub(crate) fn move_guard_to_arm_body(mut ctx: AssistCtx<impl HirDatabase>) -> Op
18 18
19 ctx.add_action(AssistId("move_guard_to_arm_body"), "move guard to arm body", |edit| { 19 ctx.add_action(AssistId("move_guard_to_arm_body"), "move guard to arm body", |edit| {
20 edit.target(guard.syntax().range()); 20 edit.target(guard.syntax().range());
21 let offseting_amount = match space_before_guard { 21 let offseting_amount = match &space_before_guard {
22 Some(SyntaxElement::Token(tok)) => { 22 Some(SyntaxElement::Token(tok)) => {
23 if let Some(_) = ast::Whitespace::cast(tok) { 23 if let Some(_) = ast::Whitespace::cast(tok.clone()) {
24 let ele = space_before_guard.unwrap().range(); 24 let ele = space_before_guard.unwrap().range();
25 edit.delete(ele); 25 edit.delete(ele);
26 ele.len() 26 ele.len()
@@ -39,11 +39,11 @@ pub(crate) fn move_guard_to_arm_body(mut ctx: AssistCtx<impl HirDatabase>) -> Op
39} 39}
40 40
41pub(crate) fn move_arm_cond_to_match_guard(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { 41pub(crate) fn move_arm_cond_to_match_guard(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
42 let match_arm: &MatchArm = ctx.node_at_offset::<MatchArm>()?; 42 let match_arm: MatchArm = ctx.node_at_offset::<MatchArm>()?;
43 let last_match_pat = match_arm.pats().last()?; 43 let last_match_pat = match_arm.pats().last()?;
44 44
45 let arm_body = match_arm.expr()?; 45 let arm_body = match_arm.expr()?;
46 let if_expr: &IfExpr = IfExpr::cast(arm_body.syntax())?; 46 let if_expr: IfExpr = IfExpr::cast(arm_body.syntax().clone())?;
47 let cond = if_expr.condition()?; 47 let cond = if_expr.condition()?;
48 let then_block = if_expr.then_branch()?; 48 let then_block = if_expr.then_branch()?;
49 49
@@ -65,7 +65,7 @@ pub(crate) fn move_arm_cond_to_match_guard(mut ctx: AssistCtx<impl HirDatabase>)
65 edit.target(if_expr.syntax().range()); 65 edit.target(if_expr.syntax().range());
66 let then_only_expr = then_block.statements().next().is_none(); 66 let then_only_expr = then_block.statements().next().is_none();
67 67
68 match then_block.expr() { 68 match &then_block.expr() {
69 Some(then_expr) if then_only_expr => { 69 Some(then_expr) if then_only_expr => {
70 edit.replace(if_expr.syntax().range(), then_expr.syntax().text()) 70 edit.replace(if_expr.syntax().range(), then_expr.syntax().text())
71 } 71 }
diff --git a/crates/ra_assists/src/remove_dbg.rs b/crates/ra_assists/src/remove_dbg.rs
index 5680f76ca..c330bc827 100644
--- a/crates/ra_assists/src/remove_dbg.rs
+++ b/crates/ra_assists/src/remove_dbg.rs
@@ -8,7 +8,7 @@ use ra_syntax::{
8pub(crate) fn remove_dbg(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { 8pub(crate) fn remove_dbg(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
9 let macro_call = ctx.node_at_offset::<ast::MacroCall>()?; 9 let macro_call = ctx.node_at_offset::<ast::MacroCall>()?;
10 10
11 if !is_valid_macrocall(macro_call, "dbg")? { 11 if !is_valid_macrocall(&macro_call, "dbg")? {
12 return None; 12 return None;
13 } 13 }
14 14
@@ -35,7 +35,7 @@ pub(crate) fn remove_dbg(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist>
35 }; 35 };
36 36
37 let macro_content = { 37 let macro_content = {
38 let macro_args = macro_call.token_tree()?.syntax(); 38 let macro_args = macro_call.token_tree()?.syntax().clone();
39 let range = macro_args.range(); 39 let range = macro_args.range();
40 let start = range.start() + TextUnit::of_char('('); 40 let start = range.start() + TextUnit::of_char('(');
41 let end = range.end() - TextUnit::of_char(')'); 41 let end = range.end() - TextUnit::of_char(')');
@@ -65,7 +65,7 @@ fn is_valid_macrocall(macro_call: &ast::MacroCall, macro_name: &str) -> Option<b
65 return None; 65 return None;
66 } 66 }
67 67
68 let node = macro_call.token_tree()?.syntax(); 68 let node = macro_call.token_tree()?.syntax().clone();
69 let first_child = node.first_child_or_token()?; 69 let first_child = node.first_child_or_token()?;
70 let last_child = node.last_child_or_token()?; 70 let last_child = node.last_child_or_token()?;
71 71
diff --git a/crates/ra_assists/src/replace_if_let_with_match.rs b/crates/ra_assists/src/replace_if_let_with_match.rs
index c2c7cf70b..5de6aa266 100644
--- a/crates/ra_assists/src/replace_if_let_with_match.rs
+++ b/crates/ra_assists/src/replace_if_let_with_match.rs
@@ -5,7 +5,7 @@ use ra_syntax::{ast, AstNode};
5use crate::{Assist, AssistCtx, AssistId}; 5use crate::{Assist, AssistCtx, AssistId};
6 6
7pub(crate) fn replace_if_let_with_match(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { 7pub(crate) fn replace_if_let_with_match(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
8 let if_expr: &ast::IfExpr = ctx.node_at_offset()?; 8 let if_expr: ast::IfExpr = ctx.node_at_offset()?;
9 let cond = if_expr.condition()?; 9 let cond = if_expr.condition()?;
10 let pat = cond.pat()?; 10 let pat = cond.pat()?;
11 let expr = cond.expr()?; 11 let expr = cond.expr()?;
@@ -25,16 +25,11 @@ pub(crate) fn replace_if_let_with_match(mut ctx: AssistCtx<impl HirDatabase>) ->
25 ctx.build() 25 ctx.build()
26} 26}
27 27
28fn build_match_expr( 28fn build_match_expr(expr: ast::Expr, pat1: ast::Pat, arm1: ast::Block, arm2: ast::Block) -> String {
29 expr: &ast::Expr,
30 pat1: &ast::Pat,
31 arm1: &ast::Block,
32 arm2: &ast::Block,
33) -> String {
34 let mut buf = String::new(); 29 let mut buf = String::new();
35 buf.push_str(&format!("match {} {{\n", expr.syntax().text())); 30 buf.push_str(&format!("match {} {{\n", expr.syntax().text()));
36 buf.push_str(&format!(" {} => {}\n", pat1.syntax().text(), format_arm(arm1))); 31 buf.push_str(&format!(" {} => {}\n", pat1.syntax().text(), format_arm(&arm1)));
37 buf.push_str(&format!(" _ => {}\n", format_arm(arm2))); 32 buf.push_str(&format!(" _ => {}\n", format_arm(&arm2)));
38 buf.push_str("}"); 33 buf.push_str("}");
39 buf 34 buf
40} 35}
diff --git a/crates/ra_cli/src/main.rs b/crates/ra_cli/src/main.rs
index b063193cf..375e2f508 100644
--- a/crates/ra_cli/src/main.rs
+++ b/crates/ra_cli/src/main.rs
@@ -7,7 +7,7 @@ use clap::{App, Arg, SubCommand};
7use flexi_logger::Logger; 7use flexi_logger::Logger;
8use ra_ide_api::{file_structure, Analysis}; 8use ra_ide_api::{file_structure, Analysis};
9use ra_prof::profile; 9use ra_prof::profile;
10use ra_syntax::{AstNode, SourceFile, TreeArc}; 10use ra_syntax::{AstNode, SourceFile};
11 11
12type Result<T> = std::result::Result<T, Box<dyn Error + Send + Sync>>; 12type Result<T> = std::result::Result<T, Box<dyn Error + Send + Sync>>;
13 13
@@ -100,9 +100,9 @@ fn main() -> Result<()> {
100 Ok(()) 100 Ok(())
101} 101}
102 102
103fn file() -> Result<TreeArc<SourceFile>> { 103fn file() -> Result<SourceFile> {
104 let text = read_stdin()?; 104 let text = read_stdin()?;
105 Ok(SourceFile::parse(&text).tree().to_owned()) 105 Ok(SourceFile::parse(&text).tree())
106} 106}
107 107
108fn read_stdin() -> Result<String> { 108fn read_stdin() -> Result<String> {
diff --git a/crates/ra_fmt/src/lib.rs b/crates/ra_fmt/src/lib.rs
index 1c2c04ad2..d6e895729 100644
--- a/crates/ra_fmt/src/lib.rs
+++ b/crates/ra_fmt/src/lib.rs
@@ -3,7 +3,7 @@
3use itertools::Itertools; 3use itertools::Itertools;
4use ra_syntax::{ 4use ra_syntax::{
5 ast::{self, AstNode, AstToken}, 5 ast::{self, AstNode, AstToken},
6 SyntaxKind, 6 SmolStr, SyntaxKind,
7 SyntaxKind::*, 7 SyntaxKind::*,
8 SyntaxNode, SyntaxToken, T, 8 SyntaxNode, SyntaxToken, T,
9}; 9};
@@ -15,12 +15,12 @@ pub fn reindent(text: &str, indent: &str) -> String {
15} 15}
16 16
17/// If the node is on the beginning of the line, calculate indent. 17/// If the node is on the beginning of the line, calculate indent.
18pub fn leading_indent(node: &SyntaxNode) -> Option<&str> { 18pub fn leading_indent(node: &SyntaxNode) -> Option<SmolStr> {
19 for token in prev_tokens(node.first_token()?) { 19 for token in prev_tokens(node.first_token()?) {
20 if let Some(ws) = ast::Whitespace::cast(token) { 20 if let Some(ws) = ast::Whitespace::cast(token.clone()) {
21 let ws_text = ws.text(); 21 let ws_text = ws.text();
22 if let Some(pos) = ws_text.rfind('\n') { 22 if let Some(pos) = ws_text.rfind('\n') {
23 return Some(&ws_text[pos + 1..]); 23 return Some(ws_text[pos + 1..].into());
24 } 24 }
25 } 25 }
26 if token.text().contains('\n') { 26 if token.text().contains('\n') {
@@ -31,17 +31,17 @@ pub fn leading_indent(node: &SyntaxNode) -> Option<&str> {
31} 31}
32 32
33fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> { 33fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> {
34 successors(token.prev_token(), |&token| token.prev_token()) 34 successors(token.prev_token(), |token| token.prev_token())
35} 35}
36 36
37pub fn extract_trivial_expression(block: &ast::Block) -> Option<&ast::Expr> { 37pub fn extract_trivial_expression(block: &ast::Block) -> Option<ast::Expr> {
38 let expr = block.expr()?; 38 let expr = block.expr()?;
39 if expr.syntax().text().contains('\n') { 39 if expr.syntax().text().contains('\n') {
40 return None; 40 return None;
41 } 41 }
42 let non_trivial_children = block.syntax().children().filter(|it| match it.kind() { 42 let non_trivial_children = block.syntax().children().filter(|it| match it.kind() {
43 WHITESPACE | T!['{'] | T!['}'] => false, 43 WHITESPACE | T!['{'] | T!['}'] => false,
44 _ => it != &expr.syntax(), 44 _ => it != expr.syntax(),
45 }); 45 });
46 if non_trivial_children.count() > 0 { 46 if non_trivial_children.count() > 0 {
47 return None; 47 return None;
diff --git a/crates/ra_hir/src/adt.rs b/crates/ra_hir/src/adt.rs
index 8afdac801..c65446df4 100644
--- a/crates/ra_hir/src/adt.rs
+++ b/crates/ra_hir/src/adt.rs
@@ -4,10 +4,7 @@
4use std::sync::Arc; 4use std::sync::Arc;
5 5
6use ra_arena::{impl_arena_id, Arena, RawId}; 6use ra_arena::{impl_arena_id, Arena, RawId};
7use ra_syntax::{ 7use ra_syntax::ast::{self, NameOwner, StructKind, TypeAscriptionOwner};
8 ast::{self, NameOwner, StructKind, TypeAscriptionOwner},
9 TreeArc,
10};
11 8
12use crate::{ 9use crate::{
13 type_ref::TypeRef, AsName, AstDatabase, Crate, DefDatabase, Enum, EnumVariant, FieldSource, 10 type_ref::TypeRef, AsName, AstDatabase, Crate, DefDatabase, Enum, EnumVariant, FieldSource,
@@ -59,11 +56,11 @@ impl StructData {
59 struct_: Struct, 56 struct_: Struct,
60 ) -> Arc<StructData> { 57 ) -> Arc<StructData> {
61 let src = struct_.source(db); 58 let src = struct_.source(db);
62 Arc::new(StructData::new(&*src.ast)) 59 Arc::new(StructData::new(&src.ast))
63 } 60 }
64} 61}
65 62
66fn variants(enum_def: &ast::EnumDef) -> impl Iterator<Item = &ast::EnumVariant> { 63fn variants(enum_def: &ast::EnumDef) -> impl Iterator<Item = ast::EnumVariant> {
67 enum_def.variant_list().into_iter().flat_map(|it| it.variants()) 64 enum_def.variant_list().into_iter().flat_map(|it| it.variants())
68} 65}
69 66
@@ -71,9 +68,9 @@ impl EnumVariant {
71 pub(crate) fn source_impl( 68 pub(crate) fn source_impl(
72 self, 69 self,
73 db: &(impl DefDatabase + AstDatabase), 70 db: &(impl DefDatabase + AstDatabase),
74 ) -> Source<TreeArc<ast::EnumVariant>> { 71 ) -> Source<ast::EnumVariant> {
75 let src = self.parent.source(db); 72 let src = self.parent.source(db);
76 let ast = variants(&*src.ast) 73 let ast = variants(&src.ast)
77 .zip(db.enum_data(self.parent).variants.iter()) 74 .zip(db.enum_data(self.parent).variants.iter())
78 .find(|(_syntax, (id, _))| *id == self.id) 75 .find(|(_syntax, (id, _))| *id == self.id)
79 .unwrap() 76 .unwrap()
@@ -96,7 +93,7 @@ impl EnumData {
96 pub(crate) fn enum_data_query(db: &(impl DefDatabase + AstDatabase), e: Enum) -> Arc<EnumData> { 93 pub(crate) fn enum_data_query(db: &(impl DefDatabase + AstDatabase), e: Enum) -> Arc<EnumData> {
97 let src = e.source(db); 94 let src = e.source(db);
98 let name = src.ast.name().map(|n| n.as_name()); 95 let name = src.ast.name().map(|n| n.as_name());
99 let variants = variants(&*src.ast) 96 let variants = variants(&src.ast)
100 .map(|var| EnumVariantData { 97 .map(|var| EnumVariantData {
101 name: var.name().map(|it| it.as_name()), 98 name: var.name().map(|it| it.as_name()),
102 variant_data: Arc::new(VariantData::new(var.kind())), 99 variant_data: Arc::new(VariantData::new(var.kind())),
diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs
index 4fb5844f4..779764590 100644
--- a/crates/ra_hir/src/code_model.rs
+++ b/crates/ra_hir/src/code_model.rs
@@ -4,10 +4,7 @@ pub(crate) mod docs;
4use std::sync::Arc; 4use std::sync::Arc;
5 5
6use ra_db::{CrateId, Edition, FileId, SourceRootId}; 6use ra_db::{CrateId, Edition, FileId, SourceRootId};
7use ra_syntax::{ 7use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner};
8 ast::{self, NameOwner, TypeAscriptionOwner},
9 TreeArc,
10};
11 8
12use crate::{ 9use crate::{
13 adt::{EnumVariantId, StructFieldId, VariantDef}, 10 adt::{EnumVariantId, StructFieldId, VariantDef},
@@ -155,8 +152,8 @@ impl_froms!(
155); 152);
156 153
157pub enum ModuleSource { 154pub enum ModuleSource {
158 SourceFile(TreeArc<ast::SourceFile>), 155 SourceFile(ast::SourceFile),
159 Module(TreeArc<ast::Module>), 156 Module(ast::Module),
160} 157}
161 158
162impl ModuleSource { 159impl ModuleSource {
@@ -199,7 +196,7 @@ impl Module {
199 self, 196 self,
200 db: &impl HirDatabase, 197 db: &impl HirDatabase,
201 import: ImportId, 198 import: ImportId,
202 ) -> Either<TreeArc<ast::UseTree>, TreeArc<ast::ExternCrateItem>> { 199 ) -> Either<ast::UseTree, ast::ExternCrateItem> {
203 let src = self.definition_source(db); 200 let src = self.definition_source(db);
204 let (_, source_map) = db.raw_items_with_source_map(src.file_id); 201 let (_, source_map) = db.raw_items_with_source_map(src.file_id);
205 source_map.get(&src.ast, import) 202 source_map.get(&src.ast, import)
@@ -321,8 +318,8 @@ pub struct StructField {
321 318
322#[derive(Debug)] 319#[derive(Debug)]
323pub enum FieldSource { 320pub enum FieldSource {
324 Named(TreeArc<ast::NamedFieldDef>), 321 Named(ast::NamedFieldDef),
325 Pos(TreeArc<ast::PosFieldDef>), 322 Pos(ast::PosFieldDef),
326} 323}
327 324
328impl StructField { 325impl StructField {
@@ -736,7 +733,7 @@ impl ConstData {
736 konst: Const, 733 konst: Const,
737 ) -> Arc<ConstData> { 734 ) -> Arc<ConstData> {
738 let node = konst.source(db).ast; 735 let node = konst.source(db).ast;
739 const_data_for(&*node) 736 const_data_for(&node)
740 } 737 }
741 738
742 pub(crate) fn static_data_query( 739 pub(crate) fn static_data_query(
@@ -744,7 +741,7 @@ impl ConstData {
744 konst: Static, 741 konst: Static,
745 ) -> Arc<ConstData> { 742 ) -> Arc<ConstData> {
746 let node = konst.source(db).ast; 743 let node = konst.source(db).ast;
747 const_data_for(&*node) 744 const_data_for(&node)
748 } 745 }
749} 746}
750 747
diff --git a/crates/ra_hir/src/code_model/docs.rs b/crates/ra_hir/src/code_model/docs.rs
index 007ef315d..a2b4d8e97 100644
--- a/crates/ra_hir/src/code_model/docs.rs
+++ b/crates/ra_hir/src/code_model/docs.rs
@@ -71,21 +71,21 @@ pub(crate) fn documentation_query(
71 def: DocDef, 71 def: DocDef,
72) -> Option<Documentation> { 72) -> Option<Documentation> {
73 match def { 73 match def {
74 DocDef::Module(it) => docs_from_ast(&*it.declaration_source(db)?.ast), 74 DocDef::Module(it) => docs_from_ast(&it.declaration_source(db)?.ast),
75 DocDef::StructField(it) => match it.source(db).ast { 75 DocDef::StructField(it) => match it.source(db).ast {
76 FieldSource::Named(named) => docs_from_ast(&*named), 76 FieldSource::Named(named) => docs_from_ast(&named),
77 FieldSource::Pos(..) => None, 77 FieldSource::Pos(..) => None,
78 }, 78 },
79 DocDef::Struct(it) => docs_from_ast(&*it.source(db).ast), 79 DocDef::Struct(it) => docs_from_ast(&it.source(db).ast),
80 DocDef::Enum(it) => docs_from_ast(&*it.source(db).ast), 80 DocDef::Enum(it) => docs_from_ast(&it.source(db).ast),
81 DocDef::EnumVariant(it) => docs_from_ast(&*it.source(db).ast), 81 DocDef::EnumVariant(it) => docs_from_ast(&it.source(db).ast),
82 DocDef::Static(it) => docs_from_ast(&*it.source(db).ast), 82 DocDef::Static(it) => docs_from_ast(&it.source(db).ast),
83 DocDef::Const(it) => docs_from_ast(&*it.source(db).ast), 83 DocDef::Const(it) => docs_from_ast(&it.source(db).ast),
84 DocDef::Function(it) => docs_from_ast(&*it.source(db).ast), 84 DocDef::Function(it) => docs_from_ast(&it.source(db).ast),
85 DocDef::Union(it) => docs_from_ast(&*it.source(db).ast), 85 DocDef::Union(it) => docs_from_ast(&it.source(db).ast),
86 DocDef::Trait(it) => docs_from_ast(&*it.source(db).ast), 86 DocDef::Trait(it) => docs_from_ast(&it.source(db).ast),
87 DocDef::TypeAlias(it) => docs_from_ast(&*it.source(db).ast), 87 DocDef::TypeAlias(it) => docs_from_ast(&it.source(db).ast),
88 DocDef::MacroDef(it) => docs_from_ast(&*it.source(db).ast), 88 DocDef::MacroDef(it) => docs_from_ast(&it.source(db).ast),
89 } 89 }
90} 90}
91 91
diff --git a/crates/ra_hir/src/code_model/src.rs b/crates/ra_hir/src/code_model/src.rs
index 72451e0e7..32bd9c661 100644
--- a/crates/ra_hir/src/code_model/src.rs
+++ b/crates/ra_hir/src/code_model/src.rs
@@ -1,4 +1,4 @@
1use ra_syntax::{ast, TreeArc}; 1use ra_syntax::ast;
2 2
3use crate::{ 3use crate::{
4 ids::AstItemDef, AstDatabase, Const, DefDatabase, Enum, EnumVariant, FieldSource, Function, 4 ids::AstItemDef, AstDatabase, Const, DefDatabase, Enum, EnumVariant, FieldSource, Function,
@@ -34,7 +34,7 @@ impl Module {
34 pub fn declaration_source( 34 pub fn declaration_source(
35 self, 35 self,
36 db: &(impl DefDatabase + AstDatabase), 36 db: &(impl DefDatabase + AstDatabase),
37 ) -> Option<Source<TreeArc<ast::Module>>> { 37 ) -> Option<Source<ast::Module>> {
38 let def_map = db.crate_def_map(self.krate); 38 let def_map = db.crate_def_map(self.krate);
39 let decl = def_map[self.module_id].declaration?; 39 let decl = def_map[self.module_id].declaration?;
40 let ast = decl.to_node(db); 40 let ast = decl.to_node(db);
@@ -49,62 +49,62 @@ impl HasSource for StructField {
49 } 49 }
50} 50}
51impl HasSource for Struct { 51impl HasSource for Struct {
52 type Ast = TreeArc<ast::StructDef>; 52 type Ast = ast::StructDef;
53 fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::StructDef>> { 53 fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::StructDef> {
54 self.id.source(db) 54 self.id.source(db)
55 } 55 }
56} 56}
57impl HasSource for Union { 57impl HasSource for Union {
58 type Ast = TreeArc<ast::StructDef>; 58 type Ast = ast::StructDef;
59 fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::StructDef>> { 59 fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::StructDef> {
60 self.id.source(db) 60 self.id.source(db)
61 } 61 }
62} 62}
63impl HasSource for Enum { 63impl HasSource for Enum {
64 type Ast = TreeArc<ast::EnumDef>; 64 type Ast = ast::EnumDef;
65 fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::EnumDef>> { 65 fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::EnumDef> {
66 self.id.source(db) 66 self.id.source(db)
67 } 67 }
68} 68}
69impl HasSource for EnumVariant { 69impl HasSource for EnumVariant {
70 type Ast = TreeArc<ast::EnumVariant>; 70 type Ast = ast::EnumVariant;
71 fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::EnumVariant>> { 71 fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::EnumVariant> {
72 self.source_impl(db) 72 self.source_impl(db)
73 } 73 }
74} 74}
75impl HasSource for Function { 75impl HasSource for Function {
76 type Ast = TreeArc<ast::FnDef>; 76 type Ast = ast::FnDef;
77 fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::FnDef>> { 77 fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::FnDef> {
78 self.id.source(db) 78 self.id.source(db)
79 } 79 }
80} 80}
81impl HasSource for Const { 81impl HasSource for Const {
82 type Ast = TreeArc<ast::ConstDef>; 82 type Ast = ast::ConstDef;
83 fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::ConstDef>> { 83 fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::ConstDef> {
84 self.id.source(db) 84 self.id.source(db)
85 } 85 }
86} 86}
87impl HasSource for Static { 87impl HasSource for Static {
88 type Ast = TreeArc<ast::StaticDef>; 88 type Ast = ast::StaticDef;
89 fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::StaticDef>> { 89 fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::StaticDef> {
90 self.id.source(db) 90 self.id.source(db)
91 } 91 }
92} 92}
93impl HasSource for Trait { 93impl HasSource for Trait {
94 type Ast = TreeArc<ast::TraitDef>; 94 type Ast = ast::TraitDef;
95 fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::TraitDef>> { 95 fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::TraitDef> {
96 self.id.source(db) 96 self.id.source(db)
97 } 97 }
98} 98}
99impl HasSource for TypeAlias { 99impl HasSource for TypeAlias {
100 type Ast = TreeArc<ast::TypeAliasDef>; 100 type Ast = ast::TypeAliasDef;
101 fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::TypeAliasDef>> { 101 fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::TypeAliasDef> {
102 self.id.source(db) 102 self.id.source(db)
103 } 103 }
104} 104}
105impl HasSource for MacroDef { 105impl HasSource for MacroDef {
106 type Ast = TreeArc<ast::MacroCall>; 106 type Ast = ast::MacroCall;
107 fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::MacroCall>> { 107 fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::MacroCall> {
108 Source { file_id: self.id.0.file_id(), ast: self.id.0.to_node(db) } 108 Source { file_id: self.id.0.file_id(), ast: self.id.0.to_node(db) }
109 } 109 }
110} 110}
diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs
index da9f3e32d..358365176 100644
--- a/crates/ra_hir/src/db.rs
+++ b/crates/ra_hir/src/db.rs
@@ -2,7 +2,7 @@ use std::sync::Arc;
2 2
3use parking_lot::Mutex; 3use parking_lot::Mutex;
4use ra_db::{salsa, SourceDatabase}; 4use ra_db::{salsa, SourceDatabase};
5use ra_syntax::{ast, Parse, SmolStr, SyntaxNode, TreeArc}; 5use ra_syntax::{ast, Parse, SmolStr, SyntaxNode};
6 6
7use crate::{ 7use crate::{
8 adt::{EnumData, StructData}, 8 adt::{EnumData, StructData},
@@ -62,11 +62,11 @@ pub trait AstDatabase: InternDatabase {
62 62
63 #[salsa::transparent] 63 #[salsa::transparent]
64 #[salsa::invoke(crate::source_id::AstIdMap::file_item_query)] 64 #[salsa::invoke(crate::source_id::AstIdMap::file_item_query)]
65 fn ast_id_to_node(&self, file_id: HirFileId, ast_id: ErasedFileAstId) -> TreeArc<SyntaxNode>; 65 fn ast_id_to_node(&self, file_id: HirFileId, ast_id: ErasedFileAstId) -> SyntaxNode;
66 66
67 #[salsa::transparent] 67 #[salsa::transparent]
68 #[salsa::invoke(crate::ids::HirFileId::parse_or_expand_query)] 68 #[salsa::invoke(crate::ids::HirFileId::parse_or_expand_query)]
69 fn parse_or_expand(&self, file_id: HirFileId) -> Option<TreeArc<SyntaxNode>>; 69 fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>;
70 70
71 #[salsa::invoke(crate::ids::HirFileId::parse_macro_query)] 71 #[salsa::invoke(crate::ids::HirFileId::parse_macro_query)]
72 fn parse_macro(&self, macro_file: ids::MacroFile) -> Option<Parse<SyntaxNode>>; 72 fn parse_macro(&self, macro_file: ids::MacroFile) -> Option<Parse<SyntaxNode>>;
diff --git a/crates/ra_hir/src/diagnostics.rs b/crates/ra_hir/src/diagnostics.rs
index c97f0656d..0290483b3 100644
--- a/crates/ra_hir/src/diagnostics.rs
+++ b/crates/ra_hir/src/diagnostics.rs
@@ -1,6 +1,6 @@
1use std::{any::Any, fmt}; 1use std::{any::Any, fmt};
2 2
3use ra_syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, TextRange, TreeArc}; 3use ra_syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, TextRange};
4use relative_path::RelativePathBuf; 4use relative_path::RelativePathBuf;
5 5
6use crate::{HirDatabase, HirFileId, Name}; 6use crate::{HirDatabase, HirFileId, Name};
@@ -33,9 +33,9 @@ pub trait AstDiagnostic {
33} 33}
34 34
35impl dyn Diagnostic { 35impl dyn Diagnostic {
36 pub fn syntax_node(&self, db: &impl HirDatabase) -> TreeArc<SyntaxNode> { 36 pub fn syntax_node(&self, db: &impl HirDatabase) -> SyntaxNode {
37 let node = db.parse_or_expand(self.file()).unwrap(); 37 let node = db.parse_or_expand(self.file()).unwrap();
38 self.syntax_node_ptr().to_node(&*node).to_owned() 38 self.syntax_node_ptr().to_node(&node)
39 } 39 }
40 40
41 pub fn downcast_ref<D: Diagnostic>(&self) -> Option<&D> { 41 pub fn downcast_ref<D: Diagnostic>(&self) -> Option<&D> {
@@ -143,11 +143,11 @@ impl Diagnostic for MissingFields {
143} 143}
144 144
145impl AstDiagnostic for MissingFields { 145impl AstDiagnostic for MissingFields {
146 type AST = TreeArc<ast::NamedFieldList>; 146 type AST = ast::NamedFieldList;
147 147
148 fn ast(&self, db: &impl HirDatabase) -> Self::AST { 148 fn ast(&self, db: &impl HirDatabase) -> Self::AST {
149 let root = db.parse_or_expand(self.file()).unwrap(); 149 let root = db.parse_or_expand(self.file()).unwrap();
150 let node = self.syntax_node_ptr().to_node(&*root); 150 let node = self.syntax_node_ptr().to_node(&root);
151 ast::NamedFieldList::cast(&node).unwrap().to_owned() 151 ast::NamedFieldList::cast(node).unwrap()
152 } 152 }
153} 153}
diff --git a/crates/ra_hir/src/expr.rs b/crates/ra_hir/src/expr.rs
index 3a97d97ce..70af3f119 100644
--- a/crates/ra_hir/src/expr.rs
+++ b/crates/ra_hir/src/expr.rs
@@ -550,7 +550,7 @@ where
550 self.exprs.alloc(block) 550 self.exprs.alloc(block)
551 } 551 }
552 552
553 fn collect_expr(&mut self, expr: &ast::Expr) -> ExprId { 553 fn collect_expr(&mut self, expr: ast::Expr) -> ExprId {
554 let syntax_ptr = SyntaxNodePtr::new(expr.syntax()); 554 let syntax_ptr = SyntaxNodePtr::new(expr.syntax());
555 match expr.kind() { 555 match expr.kind() {
556 ast::ExprKind::IfExpr(e) => { 556 ast::ExprKind::IfExpr(e) => {
@@ -565,7 +565,8 @@ where
565 .map(|b| match b { 565 .map(|b| match b {
566 ast::ElseBranch::Block(it) => self.collect_block(it), 566 ast::ElseBranch::Block(it) => self.collect_block(it),
567 ast::ElseBranch::IfExpr(elif) => { 567 ast::ElseBranch::IfExpr(elif) => {
568 let expr: &ast::Expr = ast::Expr::cast(elif.syntax()).unwrap(); 568 let expr: ast::Expr =
569 ast::Expr::cast(elif.syntax().clone()).unwrap();
569 self.collect_expr(expr) 570 self.collect_expr(expr)
570 } 571 }
571 }) 572 })
@@ -582,7 +583,7 @@ where
582 let else_branch = e.else_branch().map(|b| match b { 583 let else_branch = e.else_branch().map(|b| match b {
583 ast::ElseBranch::Block(it) => self.collect_block(it), 584 ast::ElseBranch::Block(it) => self.collect_block(it),
584 ast::ElseBranch::IfExpr(elif) => { 585 ast::ElseBranch::IfExpr(elif) => {
585 let expr: &ast::Expr = ast::Expr::cast(elif.syntax()).unwrap(); 586 let expr: ast::Expr = ast::Expr::cast(elif.syntax().clone()).unwrap();
586 self.collect_expr(expr) 587 self.collect_expr(expr)
587 } 588 }
588 }); 589 });
@@ -689,7 +690,7 @@ where
689 let struct_lit = if let Some(nfl) = e.named_field_list() { 690 let struct_lit = if let Some(nfl) = e.named_field_list() {
690 let fields = nfl 691 let fields = nfl
691 .fields() 692 .fields()
692 .inspect(|field| field_ptrs.push(AstPtr::new(*field))) 693 .inspect(|field| field_ptrs.push(AstPtr::new(field)))
693 .map(|field| StructLitField { 694 .map(|field| StructLitField {
694 name: field 695 name: field
695 .name_ref() 696 .name_ref()
@@ -699,7 +700,7 @@ where
699 self.collect_expr(e) 700 self.collect_expr(e)
700 } else if let Some(nr) = field.name_ref() { 701 } else if let Some(nr) = field.name_ref() {
701 // field shorthand 702 // field shorthand
702 let id = self.exprs.alloc(Expr::Path(Path::from_name_ref(nr))); 703 let id = self.exprs.alloc(Expr::Path(Path::from_name_ref(&nr)));
703 self.source_map 704 self.source_map
704 .expr_map 705 .expr_map
705 .insert(SyntaxNodePtr::new(nr.syntax()), id); 706 .insert(SyntaxNodePtr::new(nr.syntax()), id);
@@ -837,7 +838,7 @@ where
837 let ast_id = self 838 let ast_id = self
838 .db 839 .db
839 .ast_id_map(self.current_file_id) 840 .ast_id_map(self.current_file_id)
840 .ast_id(e) 841 .ast_id(&e)
841 .with_file_id(self.current_file_id); 842 .with_file_id(self.current_file_id);
842 843
843 if let Some(path) = e.path().and_then(Path::from_ast) { 844 if let Some(path) = e.path().and_then(Path::from_ast) {
@@ -845,11 +846,11 @@ where
845 let call_id = MacroCallLoc { def: def.id, ast_id }.id(self.db); 846 let call_id = MacroCallLoc { def: def.id, ast_id }.id(self.db);
846 let file_id = call_id.as_file(MacroFileKind::Expr); 847 let file_id = call_id.as_file(MacroFileKind::Expr);
847 if let Some(node) = self.db.parse_or_expand(file_id) { 848 if let Some(node) = self.db.parse_or_expand(file_id) {
848 if let Some(expr) = ast::Expr::cast(&*node) { 849 if let Some(expr) = ast::Expr::cast(node) {
849 log::debug!("macro expansion {}", expr.syntax().debug_dump()); 850 log::debug!("macro expansion {}", expr.syntax().debug_dump());
850 let old_file_id = 851 let old_file_id =
851 std::mem::replace(&mut self.current_file_id, file_id); 852 std::mem::replace(&mut self.current_file_id, file_id);
852 let id = self.collect_expr(&expr); 853 let id = self.collect_expr(expr);
853 self.current_file_id = old_file_id; 854 self.current_file_id = old_file_id;
854 return id; 855 return id;
855 } 856 }
@@ -863,7 +864,7 @@ where
863 } 864 }
864 } 865 }
865 866
866 fn collect_expr_opt(&mut self, expr: Option<&ast::Expr>) -> ExprId { 867 fn collect_expr_opt(&mut self, expr: Option<ast::Expr>) -> ExprId {
867 if let Some(expr) = expr { 868 if let Some(expr) = expr {
868 self.collect_expr(expr) 869 self.collect_expr(expr)
869 } else { 870 } else {
@@ -871,7 +872,7 @@ where
871 } 872 }
872 } 873 }
873 874
874 fn collect_block(&mut self, block: &ast::Block) -> ExprId { 875 fn collect_block(&mut self, block: ast::Block) -> ExprId {
875 let statements = block 876 let statements = block
876 .statements() 877 .statements()
877 .map(|s| match s.kind() { 878 .map(|s| match s.kind() {
@@ -890,7 +891,7 @@ where
890 self.alloc_expr(Expr::Block { statements, tail }, SyntaxNodePtr::new(block.syntax())) 891 self.alloc_expr(Expr::Block { statements, tail }, SyntaxNodePtr::new(block.syntax()))
891 } 892 }
892 893
893 fn collect_block_opt(&mut self, block: Option<&ast::Block>) -> ExprId { 894 fn collect_block_opt(&mut self, block: Option<ast::Block>) -> ExprId {
894 if let Some(block) = block { 895 if let Some(block) = block {
895 self.collect_block(block) 896 self.collect_block(block)
896 } else { 897 } else {
@@ -898,7 +899,7 @@ where
898 } 899 }
899 } 900 }
900 901
901 fn collect_pat(&mut self, pat: &ast::Pat) -> PatId { 902 fn collect_pat(&mut self, pat: ast::Pat) -> PatId {
902 let pattern = match pat.kind() { 903 let pattern = match pat.kind() {
903 ast::PatKind::BindPat(bp) => { 904 ast::PatKind::BindPat(bp) => {
904 let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing); 905 let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing);
@@ -932,7 +933,8 @@ where
932 let mut fields: Vec<_> = field_pat_list 933 let mut fields: Vec<_> = field_pat_list
933 .bind_pats() 934 .bind_pats()
934 .filter_map(|bind_pat| { 935 .filter_map(|bind_pat| {
935 let ast_pat = ast::Pat::cast(bind_pat.syntax()).expect("bind pat is a pat"); 936 let ast_pat =
937 ast::Pat::cast(bind_pat.syntax().clone()).expect("bind pat is a pat");
936 let pat = self.collect_pat(ast_pat); 938 let pat = self.collect_pat(ast_pat);
937 let name = bind_pat.name()?.as_name(); 939 let name = bind_pat.name()?.as_name();
938 Some(FieldPat { name, pat }) 940 Some(FieldPat { name, pat })
@@ -953,11 +955,11 @@ where
953 ast::PatKind::LiteralPat(_) => Pat::Missing, 955 ast::PatKind::LiteralPat(_) => Pat::Missing,
954 ast::PatKind::SlicePat(_) | ast::PatKind::RangePat(_) => Pat::Missing, 956 ast::PatKind::SlicePat(_) | ast::PatKind::RangePat(_) => Pat::Missing,
955 }; 957 };
956 let ptr = AstPtr::new(pat); 958 let ptr = AstPtr::new(&pat);
957 self.alloc_pat(pattern, Either::A(ptr)) 959 self.alloc_pat(pattern, Either::A(ptr))
958 } 960 }
959 961
960 fn collect_pat_opt(&mut self, pat: Option<&ast::Pat>) -> PatId { 962 fn collect_pat_opt(&mut self, pat: Option<ast::Pat>) -> PatId {
961 if let Some(pat) = pat { 963 if let Some(pat) = pat {
962 self.collect_pat(pat) 964 self.collect_pat(pat)
963 } else { 965 } else {
@@ -965,20 +967,20 @@ where
965 } 967 }
966 } 968 }
967 969
968 fn collect_const_body(&mut self, node: &ast::ConstDef) { 970 fn collect_const_body(&mut self, node: ast::ConstDef) {
969 let body = self.collect_expr_opt(node.body()); 971 let body = self.collect_expr_opt(node.body());
970 self.body_expr = Some(body); 972 self.body_expr = Some(body);
971 } 973 }
972 974
973 fn collect_static_body(&mut self, node: &ast::StaticDef) { 975 fn collect_static_body(&mut self, node: ast::StaticDef) {
974 let body = self.collect_expr_opt(node.body()); 976 let body = self.collect_expr_opt(node.body());
975 self.body_expr = Some(body); 977 self.body_expr = Some(body);
976 } 978 }
977 979
978 fn collect_fn_body(&mut self, node: &ast::FnDef) { 980 fn collect_fn_body(&mut self, node: ast::FnDef) {
979 if let Some(param_list) = node.param_list() { 981 if let Some(param_list) = node.param_list() {
980 if let Some(self_param) = param_list.self_param() { 982 if let Some(self_param) = param_list.self_param() {
981 let ptr = AstPtr::new(self_param); 983 let ptr = AstPtr::new(&self_param);
982 let param_pat = self.alloc_pat( 984 let param_pat = self.alloc_pat(
983 Pat::Bind { 985 Pat::Bind {
984 name: SELF_PARAM, 986 name: SELF_PARAM,
@@ -1027,17 +1029,17 @@ pub(crate) fn body_with_source_map_query(
1027 DefWithBody::Const(ref c) => { 1029 DefWithBody::Const(ref c) => {
1028 let src = c.source(db); 1030 let src = c.source(db);
1029 collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); 1031 collector = ExprCollector::new(def, src.file_id, def.resolver(db), db);
1030 collector.collect_const_body(&src.ast) 1032 collector.collect_const_body(src.ast)
1031 } 1033 }
1032 DefWithBody::Function(ref f) => { 1034 DefWithBody::Function(ref f) => {
1033 let src = f.source(db); 1035 let src = f.source(db);
1034 collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); 1036 collector = ExprCollector::new(def, src.file_id, def.resolver(db), db);
1035 collector.collect_fn_body(&src.ast) 1037 collector.collect_fn_body(src.ast)
1036 } 1038 }
1037 DefWithBody::Static(ref s) => { 1039 DefWithBody::Static(ref s) => {
1038 let src = s.source(db); 1040 let src = s.source(db);
1039 collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); 1041 collector = ExprCollector::new(def, src.file_id, def.resolver(db), db);
1040 collector.collect_static_body(&src.ast) 1042 collector.collect_static_body(src.ast)
1041 } 1043 }
1042 } 1044 }
1043 1045
diff --git a/crates/ra_hir/src/expr/scope.rs b/crates/ra_hir/src/expr/scope.rs
index 28fd52684..6589b782c 100644
--- a/crates/ra_hir/src/expr/scope.rs
+++ b/crates/ra_hir/src/expr/scope.rs
@@ -190,7 +190,7 @@ mod tests {
190 190
191 let (db, _source_root, file_id) = MockDatabase::with_single_file(&code); 191 let (db, _source_root, file_id) = MockDatabase::with_single_file(&code);
192 let file = db.parse(file_id).ok().unwrap(); 192 let file = db.parse(file_id).ok().unwrap();
193 let marker: &ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap(); 193 let marker: ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap();
194 let analyzer = SourceAnalyzer::new(&db, file_id, marker.syntax(), None); 194 let analyzer = SourceAnalyzer::new(&db, file_id, marker.syntax(), None);
195 195
196 let scopes = analyzer.scopes(); 196 let scopes = analyzer.scopes();
@@ -290,10 +290,10 @@ mod tests {
290 let file = db.parse(file_id).ok().unwrap(); 290 let file = db.parse(file_id).ok().unwrap();
291 let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into()) 291 let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into())
292 .expect("failed to find a name at the target offset"); 292 .expect("failed to find a name at the target offset");
293 let name_ref: &ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap(); 293 let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap();
294 let analyzer = SourceAnalyzer::new(&db, file_id, name_ref.syntax(), None); 294 let analyzer = SourceAnalyzer::new(&db, file_id, name_ref.syntax(), None);
295 295
296 let local_name_entry = analyzer.resolve_local_name(name_ref).unwrap(); 296 let local_name_entry = analyzer.resolve_local_name(&name_ref).unwrap();
297 let local_name = 297 let local_name =
298 local_name_entry.ptr().either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()); 298 local_name_entry.ptr().either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
299 assert_eq!(local_name.range(), expected_name.syntax().range()); 299 assert_eq!(local_name.range(), expected_name.syntax().range());
diff --git a/crates/ra_hir/src/expr/validation.rs b/crates/ra_hir/src/expr/validation.rs
index c2a10a0b5..82a06ca25 100644
--- a/crates/ra_hir/src/expr/validation.rs
+++ b/crates/ra_hir/src/expr/validation.rs
@@ -79,7 +79,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
79 .and_then(StructLit::cast) 79 .and_then(StructLit::cast)
80 .and_then(|lit| lit.named_field_list()) 80 .and_then(|lit| lit.named_field_list())
81 { 81 {
82 let field_list_ptr = AstPtr::new(field_list_node); 82 let field_list_ptr = AstPtr::new(&field_list_node);
83 self.sink.push(MissingFields { 83 self.sink.push(MissingFields {
84 file: file_id, 84 file: file_id,
85 field_list: field_list_ptr, 85 field_list: field_list_ptr,
diff --git a/crates/ra_hir/src/generics.rs b/crates/ra_hir/src/generics.rs
index 07a59193f..bcbb4988d 100644
--- a/crates/ra_hir/src/generics.rs
+++ b/crates/ra_hir/src/generics.rs
@@ -76,17 +76,17 @@ impl GenericParams {
76 generics.parent_params = parent.map(|p| db.generic_params(p)); 76 generics.parent_params = parent.map(|p| db.generic_params(p));
77 let start = generics.parent_params.as_ref().map(|p| p.params.len()).unwrap_or(0) as u32; 77 let start = generics.parent_params.as_ref().map(|p| p.params.len()).unwrap_or(0) as u32;
78 match def { 78 match def {
79 GenericDef::Function(it) => generics.fill(&*it.source(db).ast, start), 79 GenericDef::Function(it) => generics.fill(&it.source(db).ast, start),
80 GenericDef::Struct(it) => generics.fill(&*it.source(db).ast, start), 80 GenericDef::Struct(it) => generics.fill(&it.source(db).ast, start),
81 GenericDef::Union(it) => generics.fill(&*it.source(db).ast, start), 81 GenericDef::Union(it) => generics.fill(&it.source(db).ast, start),
82 GenericDef::Enum(it) => generics.fill(&*it.source(db).ast, start), 82 GenericDef::Enum(it) => generics.fill(&it.source(db).ast, start),
83 GenericDef::Trait(it) => { 83 GenericDef::Trait(it) => {
84 // traits get the Self type as an implicit first type parameter 84 // traits get the Self type as an implicit first type parameter
85 generics.params.push(GenericParam { idx: start, name: SELF_TYPE, default: None }); 85 generics.params.push(GenericParam { idx: start, name: SELF_TYPE, default: None });
86 generics.fill(&*it.source(db).ast, start + 1); 86 generics.fill(&it.source(db).ast, start + 1);
87 } 87 }
88 GenericDef::TypeAlias(it) => generics.fill(&*it.source(db).ast, start), 88 GenericDef::TypeAlias(it) => generics.fill(&it.source(db).ast, start),
89 GenericDef::ImplBlock(it) => generics.fill(&*it.source(db).ast, start), 89 GenericDef::ImplBlock(it) => generics.fill(&it.source(db).ast, start),
90 GenericDef::EnumVariant(_) => {} 90 GenericDef::EnumVariant(_) => {}
91 } 91 }
92 92
@@ -102,9 +102,9 @@ impl GenericParams {
102 } 102 }
103 } 103 }
104 104
105 fn fill_params(&mut self, params: &ast::TypeParamList, start: u32) { 105 fn fill_params(&mut self, params: ast::TypeParamList, start: u32) {
106 for (idx, type_param) in params.type_params().enumerate() { 106 for (idx, type_param) in params.type_params().enumerate() {
107 let name = type_param.name().map(AsName::as_name).unwrap_or_else(Name::missing); 107 let name = type_param.name().map_or_else(Name::missing, |it| it.as_name());
108 let default = type_param.default_type().and_then(|t| t.path()).and_then(Path::from_ast); 108 let default = type_param.default_type().and_then(|t| t.path()).and_then(Path::from_ast);
109 109
110 let param = GenericParam { idx: idx as u32 + start, name: name.clone(), default }; 110 let param = GenericParam { idx: idx as u32 + start, name: name.clone(), default };
@@ -121,7 +121,7 @@ impl GenericParams {
121 } 121 }
122 } 122 }
123 123
124 fn fill_where_predicates(&mut self, where_clause: &ast::WhereClause) { 124 fn fill_where_predicates(&mut self, where_clause: ast::WhereClause) {
125 for pred in where_clause.predicates() { 125 for pred in where_clause.predicates() {
126 let type_ref = match pred.type_ref() { 126 let type_ref = match pred.type_ref() {
127 Some(type_ref) => type_ref, 127 Some(type_ref) => type_ref,
@@ -134,7 +134,7 @@ impl GenericParams {
134 } 134 }
135 } 135 }
136 136
137 fn add_where_predicate_from_bound(&mut self, bound: &ast::TypeBound, type_ref: TypeRef) { 137 fn add_where_predicate_from_bound(&mut self, bound: ast::TypeBound, type_ref: TypeRef) {
138 let path = bound 138 let path = bound
139 .type_ref() 139 .type_ref()
140 .and_then(|tr| match tr.kind() { 140 .and_then(|tr| match tr.kind() {
diff --git a/crates/ra_hir/src/ids.rs b/crates/ra_hir/src/ids.rs
index 83f5c3f39..05a18eb56 100644
--- a/crates/ra_hir/src/ids.rs
+++ b/crates/ra_hir/src/ids.rs
@@ -6,7 +6,7 @@ use std::{
6use mbe::MacroRules; 6use mbe::MacroRules;
7use ra_db::{salsa, FileId}; 7use ra_db::{salsa, FileId};
8use ra_prof::profile; 8use ra_prof::profile;
9use ra_syntax::{ast, AstNode, Parse, SyntaxNode, TreeArc}; 9use ra_syntax::{ast, AstNode, Parse, SyntaxNode};
10 10
11use crate::{AstDatabase, AstId, DefDatabase, FileAstId, InternDatabase, Module, Source}; 11use crate::{AstDatabase, AstId, DefDatabase, FileAstId, InternDatabase, Module, Source};
12 12
@@ -58,11 +58,11 @@ impl HirFileId {
58 pub(crate) fn parse_or_expand_query( 58 pub(crate) fn parse_or_expand_query(
59 db: &impl AstDatabase, 59 db: &impl AstDatabase,
60 file_id: HirFileId, 60 file_id: HirFileId,
61 ) -> Option<TreeArc<SyntaxNode>> { 61 ) -> Option<SyntaxNode> {
62 match file_id.0 { 62 match file_id.0 {
63 HirFileIdRepr::File(file_id) => Some(db.parse(file_id).tree().syntax().to_owned()), 63 HirFileIdRepr::File(file_id) => Some(db.parse(file_id).tree().syntax().clone()),
64 HirFileIdRepr::Macro(macro_file) => { 64 HirFileIdRepr::Macro(macro_file) => {
65 db.parse_macro(macro_file).map(|it| it.tree().to_owned()) 65 db.parse_macro(macro_file).map(|it| it.syntax_node())
66 } 66 }
67 } 67 }
68 } 68 }
@@ -123,7 +123,7 @@ pub struct MacroDefId(pub(crate) AstId<ast::MacroCall>);
123pub(crate) fn macro_def_query(db: &impl AstDatabase, id: MacroDefId) -> Option<Arc<MacroRules>> { 123pub(crate) fn macro_def_query(db: &impl AstDatabase, id: MacroDefId) -> Option<Arc<MacroRules>> {
124 let macro_call = id.0.to_node(db); 124 let macro_call = id.0.to_node(db);
125 let arg = macro_call.token_tree()?; 125 let arg = macro_call.token_tree()?;
126 let (tt, _) = mbe::ast_to_token_tree(arg).or_else(|| { 126 let (tt, _) = mbe::ast_to_token_tree(&arg).or_else(|| {
127 log::warn!("fail on macro_def to token tree: {:#?}", arg); 127 log::warn!("fail on macro_def to token tree: {:#?}", arg);
128 None 128 None
129 })?; 129 })?;
@@ -138,7 +138,7 @@ pub(crate) fn macro_arg_query(db: &impl AstDatabase, id: MacroCallId) -> Option<
138 let loc = id.loc(db); 138 let loc = id.loc(db);
139 let macro_call = loc.ast_id.to_node(db); 139 let macro_call = loc.ast_id.to_node(db);
140 let arg = macro_call.token_tree()?; 140 let arg = macro_call.token_tree()?;
141 let (tt, _) = mbe::ast_to_token_tree(arg)?; 141 let (tt, _) = mbe::ast_to_token_tree(&arg)?;
142 Some(Arc::new(tt)) 142 Some(Arc::new(tt))
143} 143}
144 144
@@ -262,7 +262,7 @@ pub(crate) trait AstItemDef<N: AstNode>: salsa::InternKey + Clone {
262 let loc = ItemLoc { module: ctx.module, ast_id: ast_id.with_file_id(ctx.file_id) }; 262 let loc = ItemLoc { module: ctx.module, ast_id: ast_id.with_file_id(ctx.file_id) };
263 Self::intern(ctx.db, loc) 263 Self::intern(ctx.db, loc)
264 } 264 }
265 fn source(self, db: &(impl AstDatabase + DefDatabase)) -> Source<TreeArc<N>> { 265 fn source(self, db: &(impl AstDatabase + DefDatabase)) -> Source<N> {
266 let loc = self.lookup_intern(db); 266 let loc = self.lookup_intern(db);
267 let ast = loc.ast_id.to_node(db); 267 let ast = loc.ast_id.to_node(db);
268 Source { file_id: loc.ast_id.file_id(), ast } 268 Source { file_id: loc.ast_id.file_id(), ast }
diff --git a/crates/ra_hir/src/impl_block.rs b/crates/ra_hir/src/impl_block.rs
index ce134b27a..8e62cf66d 100644
--- a/crates/ra_hir/src/impl_block.rs
+++ b/crates/ra_hir/src/impl_block.rs
@@ -4,7 +4,7 @@ use std::sync::Arc;
4use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId}; 4use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId};
5use ra_syntax::{ 5use ra_syntax::{
6 ast::{self, AstNode}, 6 ast::{self, AstNode},
7 AstPtr, SourceFile, TreeArc, 7 AstPtr, SourceFile,
8}; 8};
9 9
10use crate::{ 10use crate::{
@@ -28,9 +28,9 @@ impl ImplSourceMap {
28 self.map.insert(impl_id, AstPtr::new(impl_block)) 28 self.map.insert(impl_id, AstPtr::new(impl_block))
29 } 29 }
30 30
31 pub fn get(&self, source: &ModuleSource, impl_id: ImplId) -> TreeArc<ast::ImplBlock> { 31 pub fn get(&self, source: &ModuleSource, impl_id: ImplId) -> ast::ImplBlock {
32 let file = match source { 32 let file = match source {
33 ModuleSource::SourceFile(file) => &*file, 33 ModuleSource::SourceFile(file) => file.clone(),
34 ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(), 34 ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(),
35 }; 35 };
36 36
@@ -45,8 +45,8 @@ pub struct ImplBlock {
45} 45}
46 46
47impl HasSource for ImplBlock { 47impl HasSource for ImplBlock {
48 type Ast = TreeArc<ast::ImplBlock>; 48 type Ast = ast::ImplBlock;
49 fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::ImplBlock>> { 49 fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::ImplBlock> {
50 let source_map = db.impls_in_module_with_source_map(self.module).1; 50 let source_map = db.impls_in_module_with_source_map(self.module).1;
51 let src = self.module.definition_source(db); 51 let src = self.module.definition_source(db);
52 Source { file_id: src.file_id, ast: source_map.get(&src.ast, self.impl_id) } 52 Source { file_id: src.file_id, ast: source_map.get(&src.ast, self.impl_id) }
@@ -132,9 +132,9 @@ impl ImplData {
132 item_list 132 item_list
133 .impl_items() 133 .impl_items()
134 .map(|item_node| match item_node.kind() { 134 .map(|item_node| match item_node.kind() {
135 ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(it) }.into(), 135 ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(&it) }.into(),
136 ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(it) }.into(), 136 ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(&it) }.into(),
137 ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(it) }.into(), 137 ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(&it) }.into(),
138 }) 138 })
139 .collect() 139 .collect()
140 } else { 140 } else {
@@ -202,20 +202,20 @@ impl ModuleImplBlocks {
202 202
203 let src = m.module.definition_source(db); 203 let src = m.module.definition_source(db);
204 let node = match &src.ast { 204 let node = match &src.ast {
205 ModuleSource::SourceFile(node) => node.syntax(), 205 ModuleSource::SourceFile(node) => node.syntax().clone(),
206 ModuleSource::Module(node) => { 206 ModuleSource::Module(node) => {
207 node.item_list().expect("inline module should have item list").syntax() 207 node.item_list().expect("inline module should have item list").syntax().clone()
208 } 208 }
209 }; 209 };
210 210
211 for impl_block_ast in node.children().filter_map(ast::ImplBlock::cast) { 211 for impl_block_ast in node.children().filter_map(ast::ImplBlock::cast) {
212 let impl_block = ImplData::from_ast(db, src.file_id, m.module, impl_block_ast); 212 let impl_block = ImplData::from_ast(db, src.file_id, m.module, &impl_block_ast);
213 let id = m.impls.alloc(impl_block); 213 let id = m.impls.alloc(impl_block);
214 for &impl_item in &m.impls[id].items { 214 for &impl_item in &m.impls[id].items {
215 m.impls_by_def.insert(impl_item, id); 215 m.impls_by_def.insert(impl_item, id);
216 } 216 }
217 217
218 source_map.insert(id, impl_block_ast); 218 source_map.insert(id, &impl_block_ast);
219 } 219 }
220 220
221 m 221 m
diff --git a/crates/ra_hir/src/lang_item.rs b/crates/ra_hir/src/lang_item.rs
index 0443d4d9a..fd6609fb8 100644
--- a/crates/ra_hir/src/lang_item.rs
+++ b/crates/ra_hir/src/lang_item.rs
@@ -1,7 +1,7 @@
1use rustc_hash::FxHashMap; 1use rustc_hash::FxHashMap;
2use std::sync::Arc; 2use std::sync::Arc;
3 3
4use ra_syntax::{ast::AttrsOwner, SmolStr, TreeArc}; 4use ra_syntax::{ast::AttrsOwner, SmolStr};
5 5
6use crate::{ 6use crate::{
7 AstDatabase, Crate, DefDatabase, Enum, Function, HasSource, HirDatabase, ImplBlock, Module, 7 AstDatabase, Crate, DefDatabase, Enum, Function, HasSource, HirDatabase, ImplBlock, Module,
@@ -95,7 +95,7 @@ impl LangItems {
95 // Look for impl targets 95 // Look for impl targets
96 for impl_block in module.impl_blocks(db) { 96 for impl_block in module.impl_blocks(db) {
97 let src = impl_block.source(db); 97 let src = impl_block.source(db);
98 if let Some(lang_item_name) = lang_item_name(&*src.ast) { 98 if let Some(lang_item_name) = lang_item_name(&src.ast) {
99 self.items 99 self.items
100 .entry(lang_item_name) 100 .entry(lang_item_name)
101 .or_insert_with(|| LangItemTarget::ImplBlock(impl_block)); 101 .or_insert_with(|| LangItemTarget::ImplBlock(impl_block));
@@ -137,11 +137,11 @@ impl LangItems {
137 item: T, 137 item: T,
138 constructor: fn(T) -> LangItemTarget, 138 constructor: fn(T) -> LangItemTarget,
139 ) where 139 ) where
140 T: Copy + HasSource<Ast = TreeArc<N>>, 140 T: Copy + HasSource<Ast = N>,
141 N: AttrsOwner, 141 N: AttrsOwner,
142 { 142 {
143 let node = item.source(db).ast; 143 let node = item.source(db).ast;
144 if let Some(lang_item_name) = lang_item_name(&*node) { 144 if let Some(lang_item_name) = lang_item_name(&node) {
145 self.items.entry(lang_item_name).or_insert_with(|| constructor(item)); 145 self.items.entry(lang_item_name).or_insert_with(|| constructor(item));
146 } 146 }
147 } 147 }
diff --git a/crates/ra_hir/src/name.rs b/crates/ra_hir/src/name.rs
index 40c9d6002..c589f8aba 100644
--- a/crates/ra_hir/src/name.rs
+++ b/crates/ra_hir/src/name.rs
@@ -75,7 +75,7 @@ impl AsName for ast::Name {
75 } 75 }
76} 76}
77 77
78impl<'a> AsName for ast::FieldKind<'a> { 78impl AsName for ast::FieldKind {
79 fn as_name(&self) -> Name { 79 fn as_name(&self) -> Name {
80 match self { 80 match self {
81 ast::FieldKind::Name(nr) => nr.as_name(), 81 ast::FieldKind::Name(nr) => nr.as_name(),
diff --git a/crates/ra_hir/src/nameres/raw.rs b/crates/ra_hir/src/nameres/raw.rs
index 46b2bef5b..8517f3c43 100644
--- a/crates/ra_hir/src/nameres/raw.rs
+++ b/crates/ra_hir/src/nameres/raw.rs
@@ -3,7 +3,7 @@ use std::{ops::Index, sync::Arc};
3use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId}; 3use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId};
4use ra_syntax::{ 4use ra_syntax::{
5 ast::{self, AttrsOwner, NameOwner}, 5 ast::{self, AttrsOwner, NameOwner},
6 AstNode, AstPtr, SmolStr, SourceFile, TreeArc, 6 AstNode, AstPtr, SmolStr, SourceFile,
7}; 7};
8use test_utils::tested_by; 8use test_utils::tested_by;
9 9
@@ -32,7 +32,7 @@ pub struct ImportSourceMap {
32} 32}
33 33
34type ImportSourcePtr = Either<AstPtr<ast::UseTree>, AstPtr<ast::ExternCrateItem>>; 34type ImportSourcePtr = Either<AstPtr<ast::UseTree>, AstPtr<ast::ExternCrateItem>>;
35type ImportSource = Either<TreeArc<ast::UseTree>, TreeArc<ast::ExternCrateItem>>; 35type ImportSource = Either<ast::UseTree, ast::ExternCrateItem>;
36 36
37impl ImportSourcePtr { 37impl ImportSourcePtr {
38 fn to_node(self, file: &SourceFile) -> ImportSource { 38 fn to_node(self, file: &SourceFile) -> ImportSource {
@@ -50,11 +50,11 @@ impl ImportSourceMap {
50 50
51 pub(crate) fn get(&self, source: &ModuleSource, import: ImportId) -> ImportSource { 51 pub(crate) fn get(&self, source: &ModuleSource, import: ImportId) -> ImportSource {
52 let file = match source { 52 let file = match source {
53 ModuleSource::SourceFile(file) => &*file, 53 ModuleSource::SourceFile(file) => file.clone(),
54 ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(), 54 ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(),
55 }; 55 };
56 56
57 self.map[import].to_node(file) 57 self.map[import].to_node(&file)
58 } 58 }
59} 59}
60 60
@@ -76,8 +76,8 @@ impl RawItems {
76 source_map: ImportSourceMap::default(), 76 source_map: ImportSourceMap::default(),
77 }; 77 };
78 if let Some(node) = db.parse_or_expand(file_id) { 78 if let Some(node) = db.parse_or_expand(file_id) {
79 if let Some(source_file) = ast::SourceFile::cast(&node) { 79 if let Some(source_file) = ast::SourceFile::cast(node) {
80 collector.process_module(None, &*source_file); 80 collector.process_module(None, source_file);
81 } 81 }
82 } 82 }
83 (Arc::new(collector.raw_items), Arc::new(collector.source_map)) 83 (Arc::new(collector.raw_items), Arc::new(collector.source_map))
@@ -188,7 +188,7 @@ struct RawItemsCollector {
188} 188}
189 189
190impl RawItemsCollector { 190impl RawItemsCollector {
191 fn process_module(&mut self, current_module: Option<Module>, body: &impl ast::ModuleItemOwner) { 191 fn process_module(&mut self, current_module: Option<Module>, body: impl ast::ModuleItemOwner) {
192 for item_or_macro in body.items_with_macros() { 192 for item_or_macro in body.items_with_macros() {
193 match item_or_macro { 193 match item_or_macro {
194 ast::ItemOrMacro::Macro(m) => self.add_macro(current_module, m), 194 ast::ItemOrMacro::Macro(m) => self.add_macro(current_module, m),
@@ -197,7 +197,7 @@ impl RawItemsCollector {
197 } 197 }
198 } 198 }
199 199
200 fn add_item(&mut self, current_module: Option<Module>, item: &ast::ModuleItem) { 200 fn add_item(&mut self, current_module: Option<Module>, item: ast::ModuleItem) {
201 let (kind, name) = match item.kind() { 201 let (kind, name) = match item.kind() {
202 ast::ModuleItemKind::Module(module) => { 202 ast::ModuleItemKind::Module(module) => {
203 self.add_module(current_module, module); 203 self.add_module(current_module, module);
@@ -216,7 +216,7 @@ impl RawItemsCollector {
216 return; 216 return;
217 } 217 }
218 ast::ModuleItemKind::StructDef(it) => { 218 ast::ModuleItemKind::StructDef(it) => {
219 let id = self.source_ast_id_map.ast_id(it); 219 let id = self.source_ast_id_map.ast_id(&it);
220 let name = it.name(); 220 let name = it.name();
221 if it.is_union() { 221 if it.is_union() {
222 (DefKind::Union(id), name) 222 (DefKind::Union(id), name)
@@ -225,22 +225,22 @@ impl RawItemsCollector {
225 } 225 }
226 } 226 }
227 ast::ModuleItemKind::EnumDef(it) => { 227 ast::ModuleItemKind::EnumDef(it) => {
228 (DefKind::Enum(self.source_ast_id_map.ast_id(it)), it.name()) 228 (DefKind::Enum(self.source_ast_id_map.ast_id(&it)), it.name())
229 } 229 }
230 ast::ModuleItemKind::FnDef(it) => { 230 ast::ModuleItemKind::FnDef(it) => {
231 (DefKind::Function(self.source_ast_id_map.ast_id(it)), it.name()) 231 (DefKind::Function(self.source_ast_id_map.ast_id(&it)), it.name())
232 } 232 }
233 ast::ModuleItemKind::TraitDef(it) => { 233 ast::ModuleItemKind::TraitDef(it) => {
234 (DefKind::Trait(self.source_ast_id_map.ast_id(it)), it.name()) 234 (DefKind::Trait(self.source_ast_id_map.ast_id(&it)), it.name())
235 } 235 }
236 ast::ModuleItemKind::TypeAliasDef(it) => { 236 ast::ModuleItemKind::TypeAliasDef(it) => {
237 (DefKind::TypeAlias(self.source_ast_id_map.ast_id(it)), it.name()) 237 (DefKind::TypeAlias(self.source_ast_id_map.ast_id(&it)), it.name())
238 } 238 }
239 ast::ModuleItemKind::ConstDef(it) => { 239 ast::ModuleItemKind::ConstDef(it) => {
240 (DefKind::Const(self.source_ast_id_map.ast_id(it)), it.name()) 240 (DefKind::Const(self.source_ast_id_map.ast_id(&it)), it.name())
241 } 241 }
242 ast::ModuleItemKind::StaticDef(it) => { 242 ast::ModuleItemKind::StaticDef(it) => {
243 (DefKind::Static(self.source_ast_id_map.ast_id(it)), it.name()) 243 (DefKind::Static(self.source_ast_id_map.ast_id(&it)), it.name())
244 } 244 }
245 }; 245 };
246 if let Some(name) = name { 246 if let Some(name) = name {
@@ -250,14 +250,14 @@ impl RawItemsCollector {
250 } 250 }
251 } 251 }
252 252
253 fn add_module(&mut self, current_module: Option<Module>, module: &ast::Module) { 253 fn add_module(&mut self, current_module: Option<Module>, module: ast::Module) {
254 let name = match module.name() { 254 let name = match module.name() {
255 Some(it) => it.as_name(), 255 Some(it) => it.as_name(),
256 None => return, 256 None => return,
257 }; 257 };
258 258
259 let attr_path = extract_mod_path_attribute(module); 259 let attr_path = extract_mod_path_attribute(&module);
260 let ast_id = self.source_ast_id_map.ast_id(module); 260 let ast_id = self.source_ast_id_map.ast_id(&module);
261 if module.has_semi() { 261 if module.has_semi() {
262 let item = 262 let item =
263 self.raw_items.modules.alloc(ModuleData::Declaration { name, ast_id, attr_path }); 263 self.raw_items.modules.alloc(ModuleData::Declaration { name, ast_id, attr_path });
@@ -278,10 +278,10 @@ impl RawItemsCollector {
278 tested_by!(name_res_works_for_broken_modules); 278 tested_by!(name_res_works_for_broken_modules);
279 } 279 }
280 280
281 fn add_use_item(&mut self, current_module: Option<Module>, use_item: &ast::UseItem) { 281 fn add_use_item(&mut self, current_module: Option<Module>, use_item: ast::UseItem) {
282 let is_prelude = use_item.has_atom_attr("prelude_import"); 282 let is_prelude = use_item.has_atom_attr("prelude_import");
283 283
284 Path::expand_use_item(use_item, |path, use_tree, is_glob, alias| { 284 Path::expand_use_item(&use_item, |path, use_tree, is_glob, alias| {
285 let import_data = 285 let import_data =
286 ImportData { path, alias, is_glob, is_prelude, is_extern_crate: false }; 286 ImportData { path, alias, is_glob, is_prelude, is_extern_crate: false };
287 self.push_import(current_module, import_data, Either::A(AstPtr::new(use_tree))); 287 self.push_import(current_module, import_data, Either::A(AstPtr::new(use_tree)));
@@ -291,11 +291,11 @@ impl RawItemsCollector {
291 fn add_extern_crate_item( 291 fn add_extern_crate_item(
292 &mut self, 292 &mut self,
293 current_module: Option<Module>, 293 current_module: Option<Module>,
294 extern_crate: &ast::ExternCrateItem, 294 extern_crate: ast::ExternCrateItem,
295 ) { 295 ) {
296 if let Some(name_ref) = extern_crate.name_ref() { 296 if let Some(name_ref) = extern_crate.name_ref() {
297 let path = Path::from_name_ref(name_ref); 297 let path = Path::from_name_ref(&name_ref);
298 let alias = extern_crate.alias().and_then(|a| a.name()).map(AsName::as_name); 298 let alias = extern_crate.alias().and_then(|a| a.name()).map(|it| it.as_name());
299 let import_data = ImportData { 299 let import_data = ImportData {
300 path, 300 path,
301 alias, 301 alias,
@@ -303,18 +303,18 @@ impl RawItemsCollector {
303 is_prelude: false, 303 is_prelude: false,
304 is_extern_crate: true, 304 is_extern_crate: true,
305 }; 305 };
306 self.push_import(current_module, import_data, Either::B(AstPtr::new(extern_crate))); 306 self.push_import(current_module, import_data, Either::B(AstPtr::new(&extern_crate)));
307 } 307 }
308 } 308 }
309 309
310 fn add_macro(&mut self, current_module: Option<Module>, m: &ast::MacroCall) { 310 fn add_macro(&mut self, current_module: Option<Module>, m: ast::MacroCall) {
311 let path = match m.path().and_then(Path::from_ast) { 311 let path = match m.path().and_then(Path::from_ast) {
312 Some(it) => it, 312 Some(it) => it,
313 _ => return, 313 _ => return,
314 }; 314 };
315 315
316 let name = m.name().map(|it| it.as_name()); 316 let name = m.name().map(|it| it.as_name());
317 let ast_id = self.source_ast_id_map.ast_id(m); 317 let ast_id = self.source_ast_id_map.ast_id(&m);
318 let export = m.has_atom_attr("macro_export"); 318 let export = m.has_atom_attr("macro_export");
319 let m = self.raw_items.macros.alloc(MacroData { ast_id, path, name, export }); 319 let m = self.raw_items.macros.alloc(MacroData { ast_id, path, name, export });
320 self.push_item(current_module, RawItem::Macro(m)); 320 self.push_item(current_module, RawItem::Macro(m));
diff --git a/crates/ra_hir/src/path.rs b/crates/ra_hir/src/path.rs
index bce9d2d4b..882db7681 100644
--- a/crates/ra_hir/src/path.rs
+++ b/crates/ra_hir/src/path.rs
@@ -47,9 +47,9 @@ pub enum PathKind {
47 47
48impl Path { 48impl Path {
49 /// Calls `cb` with all paths, represented by this use item. 49 /// Calls `cb` with all paths, represented by this use item.
50 pub fn expand_use_item<'a>( 50 pub fn expand_use_item(
51 item: &'a ast::UseItem, 51 item: &ast::UseItem,
52 mut cb: impl FnMut(Path, &'a ast::UseTree, bool, Option<Name>), 52 mut cb: impl FnMut(Path, &ast::UseTree, bool, Option<Name>),
53 ) { 53 ) {
54 if let Some(tree) = item.use_tree() { 54 if let Some(tree) = item.use_tree() {
55 expand_use_tree(None, tree, &mut cb); 55 expand_use_tree(None, tree, &mut cb);
@@ -57,7 +57,7 @@ impl Path {
57 } 57 }
58 58
59 /// Converts an `ast::Path` to `Path`. Works with use trees. 59 /// Converts an `ast::Path` to `Path`. Works with use trees.
60 pub fn from_ast(mut path: &ast::Path) -> Option<Path> { 60 pub fn from_ast(mut path: ast::Path) -> Option<Path> {
61 let mut kind = PathKind::Plain; 61 let mut kind = PathKind::Plain;
62 let mut segments = Vec::new(); 62 let mut segments = Vec::new();
63 loop { 63 loop {
@@ -87,7 +87,7 @@ impl Path {
87 break; 87 break;
88 } 88 }
89 } 89 }
90 path = match qualifier(path) { 90 path = match qualifier(&path) {
91 Some(it) => it, 91 Some(it) => it,
92 None => break, 92 None => break,
93 }; 93 };
@@ -95,7 +95,7 @@ impl Path {
95 segments.reverse(); 95 segments.reverse();
96 return Some(Path { kind, segments }); 96 return Some(Path { kind, segments });
97 97
98 fn qualifier(path: &ast::Path) -> Option<&ast::Path> { 98 fn qualifier(path: &ast::Path) -> Option<ast::Path> {
99 if let Some(q) = path.qualifier() { 99 if let Some(q) = path.qualifier() {
100 return Some(q); 100 return Some(q);
101 } 101 }
@@ -136,7 +136,7 @@ impl Path {
136} 136}
137 137
138impl GenericArgs { 138impl GenericArgs {
139 pub(crate) fn from_ast(node: &ast::TypeArgList) -> Option<GenericArgs> { 139 pub(crate) fn from_ast(node: ast::TypeArgList) -> Option<GenericArgs> {
140 let mut args = Vec::new(); 140 let mut args = Vec::new();
141 for type_arg in node.type_args() { 141 for type_arg in node.type_args() {
142 let type_ref = TypeRef::from_ast_opt(type_arg.type_ref()); 142 let type_ref = TypeRef::from_ast_opt(type_arg.type_ref());
@@ -160,10 +160,10 @@ impl From<Name> for Path {
160 } 160 }
161} 161}
162 162
163fn expand_use_tree<'a>( 163fn expand_use_tree(
164 prefix: Option<Path>, 164 prefix: Option<Path>,
165 tree: &'a ast::UseTree, 165 tree: ast::UseTree,
166 cb: &mut impl FnMut(Path, &'a ast::UseTree, bool, Option<Name>), 166 cb: &mut impl FnMut(Path, &ast::UseTree, bool, Option<Name>),
167) { 167) {
168 if let Some(use_tree_list) = tree.use_tree_list() { 168 if let Some(use_tree_list) = tree.use_tree_list() {
169 let prefix = match tree.path() { 169 let prefix = match tree.path() {
@@ -188,7 +188,7 @@ fn expand_use_tree<'a>(
188 if let Some(segment) = ast_path.segment() { 188 if let Some(segment) = ast_path.segment() {
189 if segment.kind() == Some(ast::PathSegmentKind::SelfKw) { 189 if segment.kind() == Some(ast::PathSegmentKind::SelfKw) {
190 if let Some(prefix) = prefix { 190 if let Some(prefix) = prefix {
191 cb(prefix, tree, false, alias); 191 cb(prefix, &tree, false, alias);
192 return; 192 return;
193 } 193 }
194 } 194 }
@@ -196,7 +196,7 @@ fn expand_use_tree<'a>(
196 } 196 }
197 if let Some(path) = convert_path(prefix, ast_path) { 197 if let Some(path) = convert_path(prefix, ast_path) {
198 let is_glob = tree.has_star(); 198 let is_glob = tree.has_star();
199 cb(path, tree, is_glob, alias) 199 cb(path, &tree, is_glob, alias)
200 } 200 }
201 // FIXME: report errors somewhere 201 // FIXME: report errors somewhere
202 // We get here if we do 202 // We get here if we do
@@ -204,7 +204,7 @@ fn expand_use_tree<'a>(
204 } 204 }
205} 205}
206 206
207fn convert_path(prefix: Option<Path>, path: &ast::Path) -> Option<Path> { 207fn convert_path(prefix: Option<Path>, path: ast::Path) -> Option<Path> {
208 let prefix = 208 let prefix =
209 if let Some(qual) = path.qualifier() { Some(convert_path(prefix, qual)?) } else { prefix }; 209 if let Some(qual) = path.qualifier() { Some(convert_path(prefix, qual)?) } else { prefix };
210 let segment = path.segment()?; 210 let segment = path.segment()?;
diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs
index 071c1bb18..e7bc4df97 100644
--- a/crates/ra_hir/src/source_binder.rs
+++ b/crates/ra_hir/src/source_binder.rs
@@ -37,7 +37,7 @@ pub fn module_from_file_id(db: &impl HirDatabase, file_id: FileId) -> Option<Mod
37pub fn module_from_declaration( 37pub fn module_from_declaration(
38 db: &impl HirDatabase, 38 db: &impl HirDatabase,
39 file_id: FileId, 39 file_id: FileId,
40 decl: &ast::Module, 40 decl: ast::Module,
41) -> Option<Module> { 41) -> Option<Module> {
42 let parent_module = module_from_file_id(db, file_id); 42 let parent_module = module_from_file_id(db, file_id);
43 let child_name = decl.name(); 43 let child_name = decl.name();
@@ -50,8 +50,8 @@ pub fn module_from_declaration(
50/// Locates the module by position in the source code. 50/// Locates the module by position in the source code.
51pub fn module_from_position(db: &impl HirDatabase, position: FilePosition) -> Option<Module> { 51pub fn module_from_position(db: &impl HirDatabase, position: FilePosition) -> Option<Module> {
52 let parse = db.parse(position.file_id); 52 let parse = db.parse(position.file_id);
53 match find_node_at_offset::<ast::Module>(parse.tree().syntax(), position.offset) { 53 match &find_node_at_offset::<ast::Module>(parse.tree().syntax(), position.offset) {
54 Some(m) if !m.has_semi() => module_from_inline(db, position.file_id, m), 54 Some(m) if !m.has_semi() => module_from_inline(db, position.file_id, m.clone()),
55 _ => module_from_file_id(db, position.file_id), 55 _ => module_from_file_id(db, position.file_id),
56 } 56 }
57} 57}
@@ -59,12 +59,12 @@ pub fn module_from_position(db: &impl HirDatabase, position: FilePosition) -> Op
59fn module_from_inline( 59fn module_from_inline(
60 db: &impl HirDatabase, 60 db: &impl HirDatabase,
61 file_id: FileId, 61 file_id: FileId,
62 module: &ast::Module, 62 module: ast::Module,
63) -> Option<Module> { 63) -> Option<Module> {
64 assert!(!module.has_semi()); 64 assert!(!module.has_semi());
65 let file_id = file_id.into(); 65 let file_id = file_id.into();
66 let ast_id_map = db.ast_id_map(file_id); 66 let ast_id_map = db.ast_id_map(file_id);
67 let item_id = ast_id_map.ast_id(module).with_file_id(file_id); 67 let item_id = ast_id_map.ast_id(&module).with_file_id(file_id);
68 module_from_source(db, file_id, Some(item_id)) 68 module_from_source(db, file_id, Some(item_id))
69} 69}
70 70
@@ -127,16 +127,16 @@ fn try_get_resolver_for_node(
127 file_id: FileId, 127 file_id: FileId,
128 node: &SyntaxNode, 128 node: &SyntaxNode,
129) -> Option<Resolver> { 129) -> Option<Resolver> {
130 if let Some(module) = ast::Module::cast(node) { 130 if let Some(module) = ast::Module::cast(node.clone()) {
131 Some(module_from_declaration(db, file_id, module)?.resolver(db)) 131 Some(module_from_declaration(db, file_id, module)?.resolver(db))
132 } else if let Some(_) = ast::SourceFile::cast(node) { 132 } else if let Some(_) = ast::SourceFile::cast(node.clone()) {
133 Some(module_from_source(db, file_id.into(), None)?.resolver(db)) 133 Some(module_from_source(db, file_id.into(), None)?.resolver(db))
134 } else if let Some(s) = ast::StructDef::cast(node) { 134 } else if let Some(s) = ast::StructDef::cast(node.clone()) {
135 let module = module_from_child_node(db, file_id, s.syntax())?; 135 let module = module_from_child_node(db, file_id, s.syntax())?;
136 Some(struct_from_module(db, module, s).resolver(db)) 136 Some(struct_from_module(db, module, &s).resolver(db))
137 } else if let Some(e) = ast::EnumDef::cast(node) { 137 } else if let Some(e) = ast::EnumDef::cast(node.clone()) {
138 let module = module_from_child_node(db, file_id, e.syntax())?; 138 let module = module_from_child_node(db, file_id, e.syntax())?;
139 Some(enum_from_module(db, module, e).resolver(db)) 139 Some(enum_from_module(db, module, &e).resolver(db))
140 } else if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF { 140 } else if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF {
141 Some(def_with_body_from_child_node(db, file_id, node)?.resolver(db)) 141 Some(def_with_body_from_child_node(db, file_id, node)?.resolver(db))
142 } else { 142 } else {
@@ -153,14 +153,14 @@ fn def_with_body_from_child_node(
153 let module = module_from_child_node(db, file_id, node)?; 153 let module = module_from_child_node(db, file_id, node)?;
154 let ctx = LocationCtx::new(db, module, file_id.into()); 154 let ctx = LocationCtx::new(db, module, file_id.into());
155 node.ancestors().find_map(|node| { 155 node.ancestors().find_map(|node| {
156 if let Some(def) = ast::FnDef::cast(node) { 156 if let Some(def) = ast::FnDef::cast(node.clone()) {
157 return Some(Function { id: ctx.to_def(def) }.into()); 157 return Some(Function { id: ctx.to_def(&def) }.into());
158 } 158 }
159 if let Some(def) = ast::ConstDef::cast(node) { 159 if let Some(def) = ast::ConstDef::cast(node.clone()) {
160 return Some(Const { id: ctx.to_def(def) }.into()); 160 return Some(Const { id: ctx.to_def(&def) }.into());
161 } 161 }
162 if let Some(def) = ast::StaticDef::cast(node) { 162 if let Some(def) = ast::StaticDef::cast(node.clone()) {
163 return Some(Static { id: ctx.to_def(def) }.into()); 163 return Some(Static { id: ctx.to_def(&def) }.into());
164 } 164 }
165 None 165 None
166 }) 166 })
@@ -237,7 +237,7 @@ impl SourceAnalyzer {
237 SourceAnalyzer { 237 SourceAnalyzer {
238 resolver: node 238 resolver: node
239 .ancestors() 239 .ancestors()
240 .find_map(|node| try_get_resolver_for_node(db, file_id, node)) 240 .find_map(|node| try_get_resolver_for_node(db, file_id, &node))
241 .unwrap_or_default(), 241 .unwrap_or_default(),
242 body_source_map: None, 242 body_source_map: None,
243 infer: None, 243 infer: None,
@@ -257,17 +257,17 @@ impl SourceAnalyzer {
257 } 257 }
258 258
259 pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> { 259 pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
260 let expr_id = self.body_source_map.as_ref()?.node_expr(call.into())?; 260 let expr_id = self.body_source_map.as_ref()?.node_expr(&call.clone().into())?;
261 self.infer.as_ref()?.method_resolution(expr_id) 261 self.infer.as_ref()?.method_resolution(expr_id)
262 } 262 }
263 263
264 pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<crate::StructField> { 264 pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<crate::StructField> {
265 let expr_id = self.body_source_map.as_ref()?.node_expr(field.into())?; 265 let expr_id = self.body_source_map.as_ref()?.node_expr(&field.clone().into())?;
266 self.infer.as_ref()?.field_resolution(expr_id) 266 self.infer.as_ref()?.field_resolution(expr_id)
267 } 267 }
268 268
269 pub fn resolve_variant(&self, struct_lit: &ast::StructLit) -> Option<crate::VariantDef> { 269 pub fn resolve_variant(&self, struct_lit: &ast::StructLit) -> Option<crate::VariantDef> {
270 let expr_id = self.body_source_map.as_ref()?.node_expr(struct_lit.into())?; 270 let expr_id = self.body_source_map.as_ref()?.node_expr(&struct_lit.clone().into())?;
271 self.infer.as_ref()?.variant_resolution(expr_id) 271 self.infer.as_ref()?.variant_resolution(expr_id)
272 } 272 }
273 273
@@ -290,18 +290,18 @@ impl SourceAnalyzer {
290 290
291 pub fn resolve_path(&self, db: &impl HirDatabase, path: &ast::Path) -> Option<PathResolution> { 291 pub fn resolve_path(&self, db: &impl HirDatabase, path: &ast::Path) -> Option<PathResolution> {
292 if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) { 292 if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) {
293 let expr_id = self.body_source_map.as_ref()?.node_expr(path_expr.into())?; 293 let expr_id = self.body_source_map.as_ref()?.node_expr(&path_expr.into())?;
294 if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) { 294 if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) {
295 return Some(PathResolution::AssocItem(assoc)); 295 return Some(PathResolution::AssocItem(assoc));
296 } 296 }
297 } 297 }
298 if let Some(path_pat) = path.syntax().parent().and_then(ast::PathPat::cast) { 298 if let Some(path_pat) = path.syntax().parent().and_then(ast::PathPat::cast) {
299 let pat_id = self.body_source_map.as_ref()?.node_pat(path_pat.into())?; 299 let pat_id = self.body_source_map.as_ref()?.node_pat(&path_pat.into())?;
300 if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) { 300 if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) {
301 return Some(PathResolution::AssocItem(assoc)); 301 return Some(PathResolution::AssocItem(assoc));
302 } 302 }
303 } 303 }
304 let hir_path = crate::Path::from_ast(path)?; 304 let hir_path = crate::Path::from_ast(path.clone())?;
305 let res = self.resolver.resolve_path_without_assoc_items(db, &hir_path); 305 let res = self.resolver.resolve_path_without_assoc_items(db, &hir_path);
306 let res = res.clone().take_types().or_else(|| res.take_values())?; 306 let res = res.clone().take_types().or_else(|| res.take_values())?;
307 let res = match res { 307 let res = match res {
@@ -343,12 +343,12 @@ impl SourceAnalyzer {
343 // FIXME: at least, this should work with any DefWithBody, but ideally 343 // FIXME: at least, this should work with any DefWithBody, but ideally
344 // this should be hir-based altogether 344 // this should be hir-based altogether
345 let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap(); 345 let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap();
346 let ptr = Either::A(AstPtr::new(pat.into())); 346 let ptr = Either::A(AstPtr::new(&ast::Pat::from(pat.clone())));
347 fn_def 347 fn_def
348 .syntax() 348 .syntax()
349 .descendants() 349 .descendants()
350 .filter_map(ast::NameRef::cast) 350 .filter_map(ast::NameRef::cast)
351 .filter(|name_ref| match self.resolve_local_name(*name_ref) { 351 .filter(|name_ref| match self.resolve_local_name(&name_ref) {
352 None => false, 352 None => false,
353 Some(entry) => entry.ptr() == ptr, 353 Some(entry) => entry.ptr() == ptr,
354 }) 354 })
@@ -411,7 +411,7 @@ fn scope_for(
411 node: &SyntaxNode, 411 node: &SyntaxNode,
412) -> Option<ScopeId> { 412) -> Option<ScopeId> {
413 node.ancestors() 413 node.ancestors()
414 .map(SyntaxNodePtr::new) 414 .map(|it| SyntaxNodePtr::new(&it))
415 .filter_map(|ptr| source_map.syntax_expr(ptr)) 415 .filter_map(|ptr| source_map.syntax_expr(ptr))
416 .find_map(|it| scopes.scope_for(it)) 416 .find_map(|it| scopes.scope_for(it))
417} 417}
diff --git a/crates/ra_hir/src/source_id.rs b/crates/ra_hir/src/source_id.rs
index 6cdb90141..51cd65dda 100644
--- a/crates/ra_hir/src/source_id.rs
+++ b/crates/ra_hir/src/source_id.rs
@@ -5,7 +5,7 @@ use std::{
5}; 5};
6 6
7use ra_arena::{impl_arena_id, Arena, RawId}; 7use ra_arena::{impl_arena_id, Arena, RawId};
8use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxNodePtr, TreeArc}; 8use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxNodePtr};
9 9
10use crate::{AstDatabase, HirFileId}; 10use crate::{AstDatabase, HirFileId};
11 11
@@ -42,9 +42,9 @@ impl<N: AstNode> AstId<N> {
42 self.file_id 42 self.file_id
43 } 43 }
44 44
45 pub(crate) fn to_node(&self, db: &impl AstDatabase) -> TreeArc<N> { 45 pub(crate) fn to_node(&self, db: &impl AstDatabase) -> N {
46 let syntax_node = db.ast_id_to_node(self.file_id, self.file_ast_id.raw); 46 let syntax_node = db.ast_id_to_node(self.file_id, self.file_ast_id.raw);
47 N::cast(&syntax_node).unwrap().to_owned() 47 N::cast(syntax_node).unwrap()
48 } 48 }
49} 49}
50 50
@@ -93,7 +93,7 @@ pub struct AstIdMap {
93impl AstIdMap { 93impl AstIdMap {
94 pub(crate) fn ast_id_map_query(db: &impl AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { 94 pub(crate) fn ast_id_map_query(db: &impl AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
95 let map = if let Some(node) = db.parse_or_expand(file_id) { 95 let map = if let Some(node) = db.parse_or_expand(file_id) {
96 AstIdMap::from_source(&*node) 96 AstIdMap::from_source(&node)
97 } else { 97 } else {
98 AstIdMap::default() 98 AstIdMap::default()
99 }; 99 };
@@ -104,9 +104,9 @@ impl AstIdMap {
104 db: &impl AstDatabase, 104 db: &impl AstDatabase,
105 file_id: HirFileId, 105 file_id: HirFileId,
106 ast_id: ErasedFileAstId, 106 ast_id: ErasedFileAstId,
107 ) -> TreeArc<SyntaxNode> { 107 ) -> SyntaxNode {
108 let node = db.parse_or_expand(file_id).unwrap(); 108 let node = db.parse_or_expand(file_id).unwrap();
109 db.ast_id_map(file_id).arena[ast_id].to_node(&*node).to_owned() 109 db.ast_id_map(file_id).arena[ast_id].to_node(&node)
110 } 110 }
111 111
112 pub(crate) fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> { 112 pub(crate) fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> {
@@ -131,7 +131,7 @@ impl AstIdMap {
131 // change parent's id. This means that, say, adding a new function to a 131 // change parent's id. This means that, say, adding a new function to a
132 // trait does not change ids of top-level items, which helps caching. 132 // trait does not change ids of top-level items, which helps caching.
133 bfs(node, |it| { 133 bfs(node, |it| {
134 if let Some(module_item) = ast::ModuleItem::cast(it) { 134 if let Some(module_item) = ast::ModuleItem::cast(it.clone()) {
135 res.alloc(module_item.syntax()); 135 res.alloc(module_item.syntax());
136 } else if let Some(macro_call) = ast::MacroCall::cast(it) { 136 } else if let Some(macro_call) = ast::MacroCall::cast(it) {
137 res.alloc(macro_call.syntax()); 137 res.alloc(macro_call.syntax());
@@ -146,8 +146,8 @@ impl AstIdMap {
146} 146}
147 147
148/// Walks the subtree in bfs order, calling `f` for each node. 148/// Walks the subtree in bfs order, calling `f` for each node.
149fn bfs(node: &SyntaxNode, mut f: impl FnMut(&SyntaxNode)) { 149fn bfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode)) {
150 let mut curr_layer = vec![node]; 150 let mut curr_layer = vec![node.clone()];
151 let mut next_layer = vec![]; 151 let mut next_layer = vec![];
152 while !curr_layer.is_empty() { 152 while !curr_layer.is_empty() {
153 curr_layer.drain(..).for_each(|node| { 153 curr_layer.drain(..).for_each(|node| {
diff --git a/crates/ra_hir/src/traits.rs b/crates/ra_hir/src/traits.rs
index fc0368303..de26f1a68 100644
--- a/crates/ra_hir/src/traits.rs
+++ b/crates/ra_hir/src/traits.rs
@@ -31,9 +31,9 @@ impl TraitData {
31 item_list 31 item_list
32 .impl_items() 32 .impl_items()
33 .map(|item_node| match item_node.kind() { 33 .map(|item_node| match item_node.kind() {
34 ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(it) }.into(), 34 ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(&it) }.into(),
35 ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(it) }.into(), 35 ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(&it) }.into(),
36 ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(it) }.into(), 36 ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(&it) }.into(),
37 }) 37 })
38 .collect() 38 .collect()
39 } else { 39 } else {
diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs
index 2410602a6..265740e54 100644
--- a/crates/ra_hir/src/ty/tests.rs
+++ b/crates/ra_hir/src/ty/tests.rs
@@ -3086,7 +3086,7 @@ fn type_at_pos(db: &MockDatabase, pos: FilePosition) -> String {
3086 let file = db.parse(pos.file_id).ok().unwrap(); 3086 let file = db.parse(pos.file_id).ok().unwrap();
3087 let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap(); 3087 let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap();
3088 let analyzer = SourceAnalyzer::new(db, pos.file_id, expr.syntax(), Some(pos.offset)); 3088 let analyzer = SourceAnalyzer::new(db, pos.file_id, expr.syntax(), Some(pos.offset));
3089 let ty = analyzer.type_of(db, expr).unwrap(); 3089 let ty = analyzer.type_of(db, &expr).unwrap();
3090 ty.display(db).to_string() 3090 ty.display(db).to_string()
3091} 3091}
3092 3092
@@ -3126,7 +3126,7 @@ fn infer(content: &str) -> String {
3126 types.sort_by_key(|(ptr, _)| (ptr.range().start(), ptr.range().end())); 3126 types.sort_by_key(|(ptr, _)| (ptr.range().start(), ptr.range().end()));
3127 for (syntax_ptr, ty) in &types { 3127 for (syntax_ptr, ty) in &types {
3128 let node = syntax_ptr.to_node(source_file.syntax()); 3128 let node = syntax_ptr.to_node(source_file.syntax());
3129 let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node) { 3129 let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.clone()) {
3130 (self_param.self_kw_token().range(), "self".to_string()) 3130 (self_param.self_kw_token().range(), "self".to_string())
3131 } else { 3131 } else {
3132 (syntax_ptr.range(), node.text().to_string().replace("\n", " ")) 3132 (syntax_ptr.range(), node.text().to_string().replace("\n", " "))
@@ -3137,7 +3137,7 @@ fn infer(content: &str) -> String {
3137 3137
3138 for node in source_file.syntax().descendants() { 3138 for node in source_file.syntax().descendants() {
3139 if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF { 3139 if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF {
3140 let analyzer = SourceAnalyzer::new(&db, file_id, node, None); 3140 let analyzer = SourceAnalyzer::new(&db, file_id, &node, None);
3141 infer_def(analyzer.inference_result(), analyzer.body_source_map()); 3141 infer_def(analyzer.inference_result(), analyzer.body_source_map());
3142 } 3142 }
3143 } 3143 }
@@ -3179,7 +3179,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
3179 let node = 3179 let node =
3180 algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent(); 3180 algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent();
3181 let events = db.log_executed(|| { 3181 let events = db.log_executed(|| {
3182 SourceAnalyzer::new(&db, pos.file_id, node, None); 3182 SourceAnalyzer::new(&db, pos.file_id, &node, None);
3183 }); 3183 });
3184 assert!(format!("{:?}", events).contains("infer")) 3184 assert!(format!("{:?}", events).contains("infer"))
3185 } 3185 }
@@ -3200,7 +3200,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
3200 let node = 3200 let node =
3201 algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent(); 3201 algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent();
3202 let events = db.log_executed(|| { 3202 let events = db.log_executed(|| {
3203 SourceAnalyzer::new(&db, pos.file_id, node, None); 3203 SourceAnalyzer::new(&db, pos.file_id, &node, None);
3204 }); 3204 });
3205 assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events) 3205 assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events)
3206 } 3206 }
diff --git a/crates/ra_hir/src/type_ref.rs b/crates/ra_hir/src/type_ref.rs
index 8aa807648..8536ae44a 100644
--- a/crates/ra_hir/src/type_ref.rs
+++ b/crates/ra_hir/src/type_ref.rs
@@ -56,7 +56,7 @@ pub enum TypeRef {
56 56
57impl TypeRef { 57impl TypeRef {
58 /// Converts an `ast::TypeRef` to a `hir::TypeRef`. 58 /// Converts an `ast::TypeRef` to a `hir::TypeRef`.
59 pub(crate) fn from_ast(node: &ast::TypeRef) -> Self { 59 pub(crate) fn from_ast(node: ast::TypeRef) -> Self {
60 use ra_syntax::ast::TypeRefKind::*; 60 use ra_syntax::ast::TypeRefKind::*;
61 match node.kind() { 61 match node.kind() {
62 ParenType(inner) => TypeRef::from_ast_opt(inner.type_ref()), 62 ParenType(inner) => TypeRef::from_ast_opt(inner.type_ref()),
@@ -95,7 +95,7 @@ impl TypeRef {
95 } 95 }
96 } 96 }
97 97
98 pub(crate) fn from_ast_opt(node: Option<&ast::TypeRef>) -> Self { 98 pub(crate) fn from_ast_opt(node: Option<ast::TypeRef>) -> Self {
99 if let Some(node) = node { 99 if let Some(node) = node {
100 TypeRef::from_ast(node) 100 TypeRef::from_ast(node)
101 } else { 101 } else {
diff --git a/crates/ra_ide_api/src/call_info.rs b/crates/ra_ide_api/src/call_info.rs
index 11dea7c14..270499612 100644
--- a/crates/ra_ide_api/src/call_info.rs
+++ b/crates/ra_ide_api/src/call_info.rs
@@ -11,24 +11,24 @@ use crate::{db::RootDatabase, CallInfo, FilePosition, FunctionSignature};
11/// Computes parameter information for the given call expression. 11/// Computes parameter information for the given call expression.
12pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> { 12pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> {
13 let parse = db.parse(position.file_id); 13 let parse = db.parse(position.file_id);
14 let syntax = parse.tree().syntax(); 14 let syntax = parse.tree().syntax().clone();
15 15
16 // Find the calling expression and it's NameRef 16 // Find the calling expression and it's NameRef
17 let calling_node = FnCallNode::with_node(syntax, position.offset)?; 17 let calling_node = FnCallNode::with_node(&syntax, position.offset)?;
18 let name_ref = calling_node.name_ref()?; 18 let name_ref = calling_node.name_ref()?;
19 19
20 let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None); 20 let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None);
21 let function = match calling_node { 21 let function = match &calling_node {
22 FnCallNode::CallExpr(expr) => { 22 FnCallNode::CallExpr(expr) => {
23 //FIXME: apply subst 23 //FIXME: apply subst
24 let (callable_def, _subst) = analyzer.type_of(db, expr.expr()?)?.as_callable()?; 24 let (callable_def, _subst) = analyzer.type_of(db, &expr.expr()?)?.as_callable()?;
25 match callable_def { 25 match callable_def {
26 hir::CallableDef::Function(it) => it, 26 hir::CallableDef::Function(it) => it,
27 //FIXME: handle other callables 27 //FIXME: handle other callables
28 _ => return None, 28 _ => return None,
29 } 29 }
30 } 30 }
31 FnCallNode::MethodCallExpr(expr) => analyzer.resolve_method_call(expr)?, 31 FnCallNode::MethodCallExpr(expr) => analyzer.resolve_method_call(&expr)?,
32 }; 32 };
33 33
34 let mut call_info = CallInfo::new(db, function); 34 let mut call_info = CallInfo::new(db, function);
@@ -73,13 +73,13 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal
73 Some(call_info) 73 Some(call_info)
74} 74}
75 75
76enum FnCallNode<'a> { 76enum FnCallNode {
77 CallExpr(&'a ast::CallExpr), 77 CallExpr(ast::CallExpr),
78 MethodCallExpr(&'a ast::MethodCallExpr), 78 MethodCallExpr(ast::MethodCallExpr),
79} 79}
80 80
81impl<'a> FnCallNode<'a> { 81impl FnCallNode {
82 fn with_node(syntax: &'a SyntaxNode, offset: TextUnit) -> Option<FnCallNode<'a>> { 82 fn with_node(syntax: &SyntaxNode, offset: TextUnit) -> Option<FnCallNode> {
83 if let Some(expr) = find_node_at_offset::<ast::CallExpr>(syntax, offset) { 83 if let Some(expr) = find_node_at_offset::<ast::CallExpr>(syntax, offset) {
84 return Some(FnCallNode::CallExpr(expr)); 84 return Some(FnCallNode::CallExpr(expr));
85 } 85 }
@@ -89,8 +89,8 @@ impl<'a> FnCallNode<'a> {
89 None 89 None
90 } 90 }
91 91
92 fn name_ref(&self) -> Option<&'a ast::NameRef> { 92 fn name_ref(&self) -> Option<ast::NameRef> {
93 match *self { 93 match self {
94 FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()?.kind() { 94 FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()?.kind() {
95 ast::ExprKind::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?, 95 ast::ExprKind::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?,
96 _ => return None, 96 _ => return None,
@@ -102,8 +102,8 @@ impl<'a> FnCallNode<'a> {
102 } 102 }
103 } 103 }
104 104
105 fn arg_list(&self) -> Option<&'a ast::ArgList> { 105 fn arg_list(&self) -> Option<ast::ArgList> {
106 match *self { 106 match self {
107 FnCallNode::CallExpr(expr) => expr.arg_list(), 107 FnCallNode::CallExpr(expr) => expr.arg_list(),
108 FnCallNode::MethodCallExpr(expr) => expr.arg_list(), 108 FnCallNode::MethodCallExpr(expr) => expr.arg_list(),
109 } 109 }
diff --git a/crates/ra_ide_api/src/completion/complete_dot.rs b/crates/ra_ide_api/src/completion/complete_dot.rs
index a5f071442..536ba36df 100644
--- a/crates/ra_ide_api/src/completion/complete_dot.rs
+++ b/crates/ra_ide_api/src/completion/complete_dot.rs
@@ -5,10 +5,11 @@ use rustc_hash::FxHashSet;
5 5
6/// Complete dot accesses, i.e. fields or methods (currently only fields). 6/// Complete dot accesses, i.e. fields or methods (currently only fields).
7pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) { 7pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) {
8 let receiver_ty = match ctx.dot_receiver.and_then(|it| ctx.analyzer.type_of(ctx.db, it)) { 8 let receiver_ty =
9 Some(it) => it, 9 match ctx.dot_receiver.as_ref().and_then(|it| ctx.analyzer.type_of(ctx.db, it)) {
10 None => return, 10 Some(it) => it,
11 }; 11 None => return,
12 };
12 if !ctx.is_call { 13 if !ctx.is_call {
13 complete_fields(acc, ctx, receiver_ty.clone()); 14 complete_fields(acc, ctx, receiver_ty.clone());
14 } 15 }
diff --git a/crates/ra_ide_api/src/completion/complete_fn_param.rs b/crates/ra_ide_api/src/completion/complete_fn_param.rs
index 5a117c485..0887ef1f6 100644
--- a/crates/ra_ide_api/src/completion/complete_fn_param.rs
+++ b/crates/ra_ide_api/src/completion/complete_fn_param.rs
@@ -20,7 +20,7 @@ pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext)
20 let _ = visitor_ctx(&mut params) 20 let _ = visitor_ctx(&mut params)
21 .visit::<ast::SourceFile, _>(process) 21 .visit::<ast::SourceFile, _>(process)
22 .visit::<ast::ItemList, _>(process) 22 .visit::<ast::ItemList, _>(process)
23 .accept(node); 23 .accept(&node);
24 } 24 }
25 params 25 params
26 .into_iter() 26 .into_iter()
@@ -38,10 +38,7 @@ pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext)
38 .add_to(acc) 38 .add_to(acc)
39 }); 39 });
40 40
41 fn process<'a, N: ast::FnDefOwner>( 41 fn process<N: ast::FnDefOwner>(node: N, params: &mut FxHashMap<String, (u32, ast::Param)>) {
42 node: &'a N,
43 params: &mut FxHashMap<String, (u32, &'a ast::Param)>,
44 ) {
45 node.functions().filter_map(|it| it.param_list()).flat_map(|it| it.params()).for_each( 42 node.functions().filter_map(|it| it.param_list()).flat_map(|it| it.params()).for_each(
46 |param| { 43 |param| {
47 let text = param.syntax().text().to_string(); 44 let text = param.syntax().text().to_string();
diff --git a/crates/ra_ide_api/src/completion/complete_keyword.rs b/crates/ra_ide_api/src/completion/complete_keyword.rs
index 034ed934d..4cf34eff8 100644
--- a/crates/ra_ide_api/src/completion/complete_keyword.rs
+++ b/crates/ra_ide_api/src/completion/complete_keyword.rs
@@ -52,7 +52,7 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte
52 return; 52 return;
53 } 53 }
54 54
55 let fn_def = match ctx.function_syntax { 55 let fn_def = match &ctx.function_syntax {
56 Some(it) => it, 56 Some(it) => it,
57 None => return, 57 None => return,
58 }; 58 };
@@ -65,7 +65,7 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte
65 acc.add(keyword(ctx, "else", "else {$0}")); 65 acc.add(keyword(ctx, "else", "else {$0}"));
66 acc.add(keyword(ctx, "else if", "else if $0 {}")); 66 acc.add(keyword(ctx, "else if", "else if $0 {}"));
67 } 67 }
68 if is_in_loop_body(ctx.token) { 68 if is_in_loop_body(&ctx.token) {
69 if ctx.can_be_stmt { 69 if ctx.can_be_stmt {
70 acc.add(keyword(ctx, "continue", "continue;")); 70 acc.add(keyword(ctx, "continue", "continue;"));
71 acc.add(keyword(ctx, "break", "break;")); 71 acc.add(keyword(ctx, "break", "break;"));
@@ -74,19 +74,19 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte
74 acc.add(keyword(ctx, "break", "break")); 74 acc.add(keyword(ctx, "break", "break"));
75 } 75 }
76 } 76 }
77 acc.add_all(complete_return(ctx, fn_def, ctx.can_be_stmt)); 77 acc.add_all(complete_return(ctx, &fn_def, ctx.can_be_stmt));
78} 78}
79 79
80fn is_in_loop_body(leaf: SyntaxToken) -> bool { 80fn is_in_loop_body(leaf: &SyntaxToken) -> bool {
81 for node in leaf.parent().ancestors() { 81 for node in leaf.parent().ancestors() {
82 if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR { 82 if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR {
83 break; 83 break;
84 } 84 }
85 let loop_body = visitor() 85 let loop_body = visitor()
86 .visit::<ast::ForExpr, _>(LoopBodyOwner::loop_body) 86 .visit::<ast::ForExpr, _>(|it| it.loop_body())
87 .visit::<ast::WhileExpr, _>(LoopBodyOwner::loop_body) 87 .visit::<ast::WhileExpr, _>(|it| it.loop_body())
88 .visit::<ast::LoopExpr, _>(LoopBodyOwner::loop_body) 88 .visit::<ast::LoopExpr, _>(|it| it.loop_body())
89 .accept(node); 89 .accept(&node);
90 if let Some(Some(body)) = loop_body { 90 if let Some(Some(body)) = loop_body {
91 if leaf.range().is_subrange(&body.syntax().range()) { 91 if leaf.range().is_subrange(&body.syntax().range()) {
92 return true; 92 return true;
diff --git a/crates/ra_ide_api/src/completion/complete_postfix.rs b/crates/ra_ide_api/src/completion/complete_postfix.rs
index 4f5062214..c75b1c159 100644
--- a/crates/ra_ide_api/src/completion/complete_postfix.rs
+++ b/crates/ra_ide_api/src/completion/complete_postfix.rs
@@ -11,7 +11,8 @@ use ra_text_edit::TextEditBuilder;
11 11
12fn postfix_snippet(ctx: &CompletionContext, label: &str, detail: &str, snippet: &str) -> Builder { 12fn postfix_snippet(ctx: &CompletionContext, label: &str, detail: &str, snippet: &str) -> Builder {
13 let edit = { 13 let edit = {
14 let receiver_range = ctx.dot_receiver.expect("no receiver available").syntax().range(); 14 let receiver_range =
15 ctx.dot_receiver.as_ref().expect("no receiver available").syntax().range();
15 let delete_range = TextRange::from_to(receiver_range.start(), ctx.source_range().end()); 16 let delete_range = TextRange::from_to(receiver_range.start(), ctx.source_range().end());
16 let mut builder = TextEditBuilder::default(); 17 let mut builder = TextEditBuilder::default();
17 builder.replace(delete_range, snippet.to_string()); 18 builder.replace(delete_range, snippet.to_string());
@@ -38,9 +39,9 @@ fn is_bool_or_unknown(ty: Option<Ty>) -> bool {
38} 39}
39 40
40pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { 41pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
41 if let Some(dot_receiver) = ctx.dot_receiver { 42 if let Some(dot_receiver) = &ctx.dot_receiver {
42 let receiver_text = dot_receiver.syntax().text().to_string(); 43 let receiver_text = dot_receiver.syntax().text().to_string();
43 let receiver_ty = ctx.analyzer.type_of(ctx.db, dot_receiver); 44 let receiver_ty = ctx.analyzer.type_of(ctx.db, &dot_receiver);
44 if is_bool_or_unknown(receiver_ty) { 45 if is_bool_or_unknown(receiver_ty) {
45 postfix_snippet(ctx, "if", "if expr {}", &format!("if {} {{$0}}", receiver_text)) 46 postfix_snippet(ctx, "if", "if expr {}", &format!("if {} {{$0}}", receiver_text))
46 .add_to(acc); 47 .add_to(acc);
diff --git a/crates/ra_ide_api/src/completion/complete_scope.rs b/crates/ra_ide_api/src/completion/complete_scope.rs
index 1ba871257..f92034055 100644
--- a/crates/ra_ide_api/src/completion/complete_scope.rs
+++ b/crates/ra_ide_api/src/completion/complete_scope.rs
@@ -20,8 +20,8 @@ pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) {
20 let mut builder = TextEditBuilder::default(); 20 let mut builder = TextEditBuilder::default();
21 builder.replace(ctx.source_range(), name.to_string()); 21 builder.replace(ctx.source_range(), name.to_string());
22 auto_import::auto_import_text_edit( 22 auto_import::auto_import_text_edit(
23 ctx.token.parent(), 23 &ctx.token.parent(),
24 ctx.token.parent(), 24 &ctx.token.parent(),
25 &path, 25 &path,
26 &mut builder, 26 &mut builder,
27 ); 27 );
diff --git a/crates/ra_ide_api/src/completion/complete_struct_literal.rs b/crates/ra_ide_api/src/completion/complete_struct_literal.rs
index b6216f857..9410f740f 100644
--- a/crates/ra_ide_api/src/completion/complete_struct_literal.rs
+++ b/crates/ra_ide_api/src/completion/complete_struct_literal.rs
@@ -4,8 +4,8 @@ use crate::completion::{CompletionContext, Completions};
4 4
5/// Complete fields in fields literals. 5/// Complete fields in fields literals.
6pub(super) fn complete_struct_literal(acc: &mut Completions, ctx: &CompletionContext) { 6pub(super) fn complete_struct_literal(acc: &mut Completions, ctx: &CompletionContext) {
7 let (ty, variant) = match ctx.struct_lit_syntax.and_then(|it| { 7 let (ty, variant) = match ctx.struct_lit_syntax.as_ref().and_then(|it| {
8 Some((ctx.analyzer.type_of(ctx.db, it.into())?, ctx.analyzer.resolve_variant(it)?)) 8 Some((ctx.analyzer.type_of(ctx.db, &it.clone().into())?, ctx.analyzer.resolve_variant(it)?))
9 }) { 9 }) {
10 Some(it) => it, 10 Some(it) => it,
11 _ => return, 11 _ => return,
diff --git a/crates/ra_ide_api/src/completion/completion_context.rs b/crates/ra_ide_api/src/completion/completion_context.rs
index 4aa84751f..b803271ab 100644
--- a/crates/ra_ide_api/src/completion/completion_context.rs
+++ b/crates/ra_ide_api/src/completion/completion_context.rs
@@ -16,11 +16,11 @@ pub(crate) struct CompletionContext<'a> {
16 pub(super) db: &'a db::RootDatabase, 16 pub(super) db: &'a db::RootDatabase,
17 pub(super) analyzer: hir::SourceAnalyzer, 17 pub(super) analyzer: hir::SourceAnalyzer,
18 pub(super) offset: TextUnit, 18 pub(super) offset: TextUnit,
19 pub(super) token: SyntaxToken<'a>, 19 pub(super) token: SyntaxToken,
20 pub(super) module: Option<hir::Module>, 20 pub(super) module: Option<hir::Module>,
21 pub(super) function_syntax: Option<&'a ast::FnDef>, 21 pub(super) function_syntax: Option<ast::FnDef>,
22 pub(super) use_item_syntax: Option<&'a ast::UseItem>, 22 pub(super) use_item_syntax: Option<ast::UseItem>,
23 pub(super) struct_lit_syntax: Option<&'a ast::StructLit>, 23 pub(super) struct_lit_syntax: Option<ast::StructLit>,
24 pub(super) is_param: bool, 24 pub(super) is_param: bool,
25 /// If a name-binding or reference to a const in a pattern. 25 /// If a name-binding or reference to a const in a pattern.
26 /// Irrefutable patterns (like let) are excluded. 26 /// Irrefutable patterns (like let) are excluded.
@@ -35,7 +35,7 @@ pub(crate) struct CompletionContext<'a> {
35 /// Something is typed at the "top" level, in module or impl/trait. 35 /// Something is typed at the "top" level, in module or impl/trait.
36 pub(super) is_new_item: bool, 36 pub(super) is_new_item: bool,
37 /// The receiver if this is a field or method access, i.e. writing something.<|> 37 /// The receiver if this is a field or method access, i.e. writing something.<|>
38 pub(super) dot_receiver: Option<&'a ast::Expr>, 38 pub(super) dot_receiver: Option<ast::Expr>,
39 /// If this is a call (method or function) in particular, i.e. the () are already there. 39 /// If this is a call (method or function) in particular, i.e. the () are already there.
40 pub(super) is_call: bool, 40 pub(super) is_call: bool,
41} 41}
@@ -50,7 +50,7 @@ impl<'a> CompletionContext<'a> {
50 let token = 50 let token =
51 find_token_at_offset(original_parse.tree().syntax(), position.offset).left_biased()?; 51 find_token_at_offset(original_parse.tree().syntax(), position.offset).left_biased()?;
52 let analyzer = 52 let analyzer =
53 hir::SourceAnalyzer::new(db, position.file_id, token.parent(), Some(position.offset)); 53 hir::SourceAnalyzer::new(db, position.file_id, &token.parent(), Some(position.offset));
54 let mut ctx = CompletionContext { 54 let mut ctx = CompletionContext {
55 db, 55 db,
56 analyzer, 56 analyzer,
@@ -109,7 +109,7 @@ impl<'a> CompletionContext<'a> {
109 if is_node::<ast::BindPat>(name.syntax()) { 109 if is_node::<ast::BindPat>(name.syntax()) {
110 let bind_pat = name.syntax().ancestors().find_map(ast::BindPat::cast).unwrap(); 110 let bind_pat = name.syntax().ancestors().find_map(ast::BindPat::cast).unwrap();
111 let parent = bind_pat.syntax().parent(); 111 let parent = bind_pat.syntax().parent();
112 if parent.and_then(ast::MatchArm::cast).is_some() 112 if parent.clone().and_then(ast::MatchArm::cast).is_some()
113 || parent.and_then(ast::Condition::cast).is_some() 113 || parent.and_then(ast::Condition::cast).is_some()
114 { 114 {
115 self.is_pat_binding = true; 115 self.is_pat_binding = true;
@@ -122,7 +122,7 @@ impl<'a> CompletionContext<'a> {
122 } 122 }
123 } 123 }
124 124
125 fn classify_name_ref(&mut self, original_file: &'a SourceFile, name_ref: &ast::NameRef) { 125 fn classify_name_ref(&mut self, original_file: SourceFile, name_ref: ast::NameRef) {
126 let name_range = name_ref.syntax().range(); 126 let name_range = name_ref.syntax().range();
127 if name_ref.syntax().parent().and_then(ast::NamedField::cast).is_some() { 127 if name_ref.syntax().parent().and_then(ast::NamedField::cast).is_some() {
128 self.struct_lit_syntax = find_node_at_offset(original_file.syntax(), self.offset); 128 self.struct_lit_syntax = find_node_at_offset(original_file.syntax(), self.offset);
@@ -153,7 +153,7 @@ impl<'a> CompletionContext<'a> {
153 None => return, 153 None => return,
154 }; 154 };
155 155
156 if let Some(segment) = ast::PathSegment::cast(parent) { 156 if let Some(segment) = ast::PathSegment::cast(parent.clone()) {
157 let path = segment.parent_path(); 157 let path = segment.parent_path();
158 self.is_call = path 158 self.is_call = path
159 .syntax() 159 .syntax()
@@ -162,7 +162,7 @@ impl<'a> CompletionContext<'a> {
162 .and_then(|it| it.syntax().parent().and_then(ast::CallExpr::cast)) 162 .and_then(|it| it.syntax().parent().and_then(ast::CallExpr::cast))
163 .is_some(); 163 .is_some();
164 164
165 if let Some(mut path) = hir::Path::from_ast(path) { 165 if let Some(mut path) = hir::Path::from_ast(path.clone()) {
166 if !path.is_ident() { 166 if !path.is_ident() {
167 path.segments.pop().unwrap(); 167 path.segments.pop().unwrap();
168 self.path_prefix = Some(path); 168 self.path_prefix = Some(path);
@@ -179,7 +179,7 @@ impl<'a> CompletionContext<'a> {
179 .syntax() 179 .syntax()
180 .ancestors() 180 .ancestors()
181 .find_map(|node| { 181 .find_map(|node| {
182 if let Some(stmt) = ast::ExprStmt::cast(node) { 182 if let Some(stmt) = ast::ExprStmt::cast(node.clone()) {
183 return Some(stmt.syntax().range() == name_ref.syntax().range()); 183 return Some(stmt.syntax().range() == name_ref.syntax().range());
184 } 184 }
185 if let Some(block) = ast::Block::cast(node) { 185 if let Some(block) = ast::Block::cast(node) {
@@ -203,7 +203,7 @@ impl<'a> CompletionContext<'a> {
203 } 203 }
204 } 204 }
205 } 205 }
206 if let Some(field_expr) = ast::FieldExpr::cast(parent) { 206 if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) {
207 // The receiver comes before the point of insertion of the fake 207 // The receiver comes before the point of insertion of the fake
208 // ident, so it should have the same range in the non-modified file 208 // ident, so it should have the same range in the non-modified file
209 self.dot_receiver = field_expr 209 self.dot_receiver = field_expr
@@ -222,7 +222,7 @@ impl<'a> CompletionContext<'a> {
222 } 222 }
223} 223}
224 224
225fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<&N> { 225fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<N> {
226 find_covering_element(syntax, range).ancestors().find_map(N::cast) 226 find_covering_element(syntax, range).ancestors().find_map(N::cast)
227} 227}
228 228
diff --git a/crates/ra_ide_api/src/diagnostics.rs b/crates/ra_ide_api/src/diagnostics.rs
index 3f5b9e0a0..affbad6cd 100644
--- a/crates/ra_ide_api/src/diagnostics.rs
+++ b/crates/ra_ide_api/src/diagnostics.rs
@@ -35,8 +35,8 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
35 })); 35 }));
36 36
37 for node in parse.tree().syntax().descendants() { 37 for node in parse.tree().syntax().descendants() {
38 check_unnecessary_braces_in_use_statement(&mut res, file_id, node); 38 check_unnecessary_braces_in_use_statement(&mut res, file_id, &node);
39 check_struct_shorthand_initialization(&mut res, file_id, node); 39 check_struct_shorthand_initialization(&mut res, file_id, &node);
40 } 40 }
41 let res = RefCell::new(res); 41 let res = RefCell::new(res);
42 let mut sink = DiagnosticSink::new(|d| { 42 let mut sink = DiagnosticSink::new(|d| {
@@ -60,7 +60,7 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
60 }) 60 })
61 .on::<hir::diagnostics::MissingFields, _>(|d| { 61 .on::<hir::diagnostics::MissingFields, _>(|d| {
62 let node = d.ast(db); 62 let node = d.ast(db);
63 let mut ast_editor = AstEditor::new(&*node); 63 let mut ast_editor = AstEditor::new(node);
64 for f in d.missed_fields.iter() { 64 for f in d.missed_fields.iter() {
65 ast_editor.append_field(&AstBuilder::<NamedField>::from_name(f)); 65 ast_editor.append_field(&AstBuilder::<NamedField>::from_name(f));
66 } 66 }
@@ -94,11 +94,11 @@ fn check_unnecessary_braces_in_use_statement(
94 file_id: FileId, 94 file_id: FileId,
95 node: &SyntaxNode, 95 node: &SyntaxNode,
96) -> Option<()> { 96) -> Option<()> {
97 let use_tree_list = ast::UseTreeList::cast(node)?; 97 let use_tree_list = ast::UseTreeList::cast(node.clone())?;
98 if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() { 98 if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() {
99 let range = use_tree_list.syntax().range(); 99 let range = use_tree_list.syntax().range();
100 let edit = 100 let edit =
101 text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(single_use_tree) 101 text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(&single_use_tree)
102 .unwrap_or_else(|| { 102 .unwrap_or_else(|| {
103 let to_replace = single_use_tree.syntax().text().to_string(); 103 let to_replace = single_use_tree.syntax().text().to_string();
104 let mut edit_builder = TextEditBuilder::default(); 104 let mut edit_builder = TextEditBuilder::default();
@@ -141,7 +141,7 @@ fn check_struct_shorthand_initialization(
141 file_id: FileId, 141 file_id: FileId,
142 node: &SyntaxNode, 142 node: &SyntaxNode,
143) -> Option<()> { 143) -> Option<()> {
144 let struct_lit = ast::StructLit::cast(node)?; 144 let struct_lit = ast::StructLit::cast(node.clone())?;
145 let named_field_list = struct_lit.named_field_list()?; 145 let named_field_list = struct_lit.named_field_list()?;
146 for named_field in named_field_list.fields() { 146 for named_field in named_field_list.fields() {
147 if let (Some(name_ref), Some(expr)) = (named_field.name_ref(), named_field.expr()) { 147 if let (Some(name_ref), Some(expr)) = (named_field.name_ref(), named_field.expr()) {
@@ -184,7 +184,7 @@ mod tests {
184 let parse = SourceFile::parse(code); 184 let parse = SourceFile::parse(code);
185 let mut diagnostics = Vec::new(); 185 let mut diagnostics = Vec::new();
186 for node in parse.tree().syntax().descendants() { 186 for node in parse.tree().syntax().descendants() {
187 func(&mut diagnostics, FileId(0), node); 187 func(&mut diagnostics, FileId(0), &node);
188 } 188 }
189 assert!(diagnostics.is_empty()); 189 assert!(diagnostics.is_empty());
190 } 190 }
@@ -193,7 +193,7 @@ mod tests {
193 let parse = SourceFile::parse(before); 193 let parse = SourceFile::parse(before);
194 let mut diagnostics = Vec::new(); 194 let mut diagnostics = Vec::new();
195 for node in parse.tree().syntax().descendants() { 195 for node in parse.tree().syntax().descendants() {
196 func(&mut diagnostics, FileId(0), node); 196 func(&mut diagnostics, FileId(0), &node);
197 } 197 }
198 let diagnostic = 198 let diagnostic =
199 diagnostics.pop().unwrap_or_else(|| panic!("no diagnostics for:\n{}\n", before)); 199 diagnostics.pop().unwrap_or_else(|| panic!("no diagnostics for:\n{}\n", before));
diff --git a/crates/ra_ide_api/src/display/function_signature.rs b/crates/ra_ide_api/src/display/function_signature.rs
index e7ad5a0d1..644a4532b 100644
--- a/crates/ra_ide_api/src/display/function_signature.rs
+++ b/crates/ra_ide_api/src/display/function_signature.rs
@@ -38,7 +38,7 @@ impl FunctionSignature {
38 pub(crate) fn from_hir(db: &db::RootDatabase, function: hir::Function) -> Self { 38 pub(crate) fn from_hir(db: &db::RootDatabase, function: hir::Function) -> Self {
39 let doc = function.docs(db); 39 let doc = function.docs(db);
40 let ast_node = function.source(db).ast; 40 let ast_node = function.source(db).ast;
41 FunctionSignature::from(&*ast_node).with_doc_opt(doc) 41 FunctionSignature::from(&ast_node).with_doc_opt(doc)
42 } 42 }
43} 43}
44 44
diff --git a/crates/ra_ide_api/src/display/navigation_target.rs b/crates/ra_ide_api/src/display/navigation_target.rs
index 20a8d418e..8cc853dd1 100644
--- a/crates/ra_ide_api/src/display/navigation_target.rs
+++ b/crates/ra_ide_api/src/display/navigation_target.rs
@@ -5,7 +5,7 @@ use ra_syntax::{
5 ast::{self, DocCommentsOwner}, 5 ast::{self, DocCommentsOwner},
6 AstNode, AstPtr, SmolStr, 6 AstNode, AstPtr, SmolStr,
7 SyntaxKind::{self, NAME}, 7 SyntaxKind::{self, NAME},
8 SyntaxNode, TextRange, TreeArc, 8 SyntaxNode, TextRange,
9}; 9};
10 10
11use super::short_label::ShortLabel; 11use super::short_label::ShortLabel;
@@ -169,7 +169,7 @@ impl NavigationTarget {
169 let file_id = src.file_id.original_file(db); 169 let file_id = src.file_id.original_file(db);
170 match src.ast { 170 match src.ast {
171 FieldSource::Named(it) => { 171 FieldSource::Named(it) => {
172 NavigationTarget::from_named(file_id, &*it, it.doc_comment_text(), it.short_label()) 172 NavigationTarget::from_named(file_id, &it, it.doc_comment_text(), it.short_label())
173 } 173 }
174 FieldSource::Pos(it) => { 174 FieldSource::Pos(it) => {
175 NavigationTarget::from_syntax(file_id, "".into(), None, it.syntax(), None, None) 175 NavigationTarget::from_syntax(file_id, "".into(), None, it.syntax(), None, None)
@@ -179,13 +179,13 @@ impl NavigationTarget {
179 179
180 pub(crate) fn from_def_source<A, D>(db: &RootDatabase, def: D) -> NavigationTarget 180 pub(crate) fn from_def_source<A, D>(db: &RootDatabase, def: D) -> NavigationTarget
181 where 181 where
182 D: HasSource<Ast = TreeArc<A>>, 182 D: HasSource<Ast = A>,
183 A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel, 183 A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel,
184 { 184 {
185 let src = def.source(db); 185 let src = def.source(db);
186 NavigationTarget::from_named( 186 NavigationTarget::from_named(
187 src.file_id.original_file(db), 187 src.file_id.original_file(db),
188 &*src.ast, 188 &src.ast,
189 src.ast.doc_comment_text(), 189 src.ast.doc_comment_text(),
190 src.ast.short_label(), 190 src.ast.short_label(),
191 ) 191 )
@@ -249,7 +249,7 @@ impl NavigationTarget {
249 log::debug!("nav target {}", src.ast.syntax().debug_dump()); 249 log::debug!("nav target {}", src.ast.syntax().debug_dump());
250 NavigationTarget::from_named( 250 NavigationTarget::from_named(
251 src.file_id.original_file(db), 251 src.file_id.original_file(db),
252 &*src.ast, 252 &src.ast,
253 src.ast.doc_comment_text(), 253 src.ast.doc_comment_text(),
254 None, 254 None,
255 ) 255 )
@@ -318,22 +318,18 @@ pub(crate) fn docs_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option
318 let parse = db.parse(symbol.file_id); 318 let parse = db.parse(symbol.file_id);
319 let node = symbol.ptr.to_node(parse.tree().syntax()).to_owned(); 319 let node = symbol.ptr.to_node(parse.tree().syntax()).to_owned();
320 320
321 fn doc_comments<N: ast::DocCommentsOwner>(node: &N) -> Option<String> {
322 node.doc_comment_text()
323 }
324
325 visitor() 321 visitor()
326 .visit(doc_comments::<ast::FnDef>) 322 .visit(|it: ast::FnDef| it.doc_comment_text())
327 .visit(doc_comments::<ast::StructDef>) 323 .visit(|it: ast::StructDef| it.doc_comment_text())
328 .visit(doc_comments::<ast::EnumDef>) 324 .visit(|it: ast::EnumDef| it.doc_comment_text())
329 .visit(doc_comments::<ast::TraitDef>) 325 .visit(|it: ast::TraitDef| it.doc_comment_text())
330 .visit(doc_comments::<ast::Module>) 326 .visit(|it: ast::Module| it.doc_comment_text())
331 .visit(doc_comments::<ast::TypeAliasDef>) 327 .visit(|it: ast::TypeAliasDef| it.doc_comment_text())
332 .visit(doc_comments::<ast::ConstDef>) 328 .visit(|it: ast::ConstDef| it.doc_comment_text())
333 .visit(doc_comments::<ast::StaticDef>) 329 .visit(|it: ast::StaticDef| it.doc_comment_text())
334 .visit(doc_comments::<ast::NamedFieldDef>) 330 .visit(|it: ast::NamedFieldDef| it.doc_comment_text())
335 .visit(doc_comments::<ast::EnumVariant>) 331 .visit(|it: ast::EnumVariant| it.doc_comment_text())
336 .visit(doc_comments::<ast::MacroCall>) 332 .visit(|it: ast::MacroCall| it.doc_comment_text())
337 .accept(&node)? 333 .accept(&node)?
338} 334}
339 335
@@ -345,15 +341,15 @@ pub(crate) fn description_from_symbol(db: &RootDatabase, symbol: &FileSymbol) ->
345 let node = symbol.ptr.to_node(parse.tree().syntax()).to_owned(); 341 let node = symbol.ptr.to_node(parse.tree().syntax()).to_owned();
346 342
347 visitor() 343 visitor()
348 .visit(|node: &ast::FnDef| node.short_label()) 344 .visit(|node: ast::FnDef| node.short_label())
349 .visit(|node: &ast::StructDef| node.short_label()) 345 .visit(|node: ast::StructDef| node.short_label())
350 .visit(|node: &ast::EnumDef| node.short_label()) 346 .visit(|node: ast::EnumDef| node.short_label())
351 .visit(|node: &ast::TraitDef| node.short_label()) 347 .visit(|node: ast::TraitDef| node.short_label())
352 .visit(|node: &ast::Module| node.short_label()) 348 .visit(|node: ast::Module| node.short_label())
353 .visit(|node: &ast::TypeAliasDef| node.short_label()) 349 .visit(|node: ast::TypeAliasDef| node.short_label())
354 .visit(|node: &ast::ConstDef| node.short_label()) 350 .visit(|node: ast::ConstDef| node.short_label())
355 .visit(|node: &ast::StaticDef| node.short_label()) 351 .visit(|node: ast::StaticDef| node.short_label())
356 .visit(|node: &ast::NamedFieldDef| node.short_label()) 352 .visit(|node: ast::NamedFieldDef| node.short_label())
357 .visit(|node: &ast::EnumVariant| node.short_label()) 353 .visit(|node: ast::EnumVariant| node.short_label())
358 .accept(&node)? 354 .accept(&node)?
359} 355}
diff --git a/crates/ra_ide_api/src/display/structure.rs b/crates/ra_ide_api/src/display/structure.rs
index 638484a9b..2e183d2f6 100644
--- a/crates/ra_ide_api/src/display/structure.rs
+++ b/crates/ra_ide_api/src/display/structure.rs
@@ -24,14 +24,14 @@ pub fn file_structure(file: &SourceFile) -> Vec<StructureNode> {
24 for event in file.syntax().preorder() { 24 for event in file.syntax().preorder() {
25 match event { 25 match event {
26 WalkEvent::Enter(node) => { 26 WalkEvent::Enter(node) => {
27 if let Some(mut symbol) = structure_node(node) { 27 if let Some(mut symbol) = structure_node(&node) {
28 symbol.parent = stack.last().copied(); 28 symbol.parent = stack.last().copied();
29 stack.push(res.len()); 29 stack.push(res.len());
30 res.push(symbol); 30 res.push(symbol);
31 } 31 }
32 } 32 }
33 WalkEvent::Leave(node) => { 33 WalkEvent::Leave(node) => {
34 if structure_node(node).is_some() { 34 if structure_node(&node).is_some() {
35 stack.pop().unwrap(); 35 stack.pop().unwrap();
36 } 36 }
37 } 37 }
@@ -41,19 +41,20 @@ pub fn file_structure(file: &SourceFile) -> Vec<StructureNode> {
41} 41}
42 42
43fn structure_node(node: &SyntaxNode) -> Option<StructureNode> { 43fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
44 fn decl<N: NameOwner + AttrsOwner>(node: &N) -> Option<StructureNode> { 44 fn decl<N: NameOwner + AttrsOwner>(node: N) -> Option<StructureNode> {
45 decl_with_detail(node, None) 45 decl_with_detail(node, None)
46 } 46 }
47 47
48 fn decl_with_ascription<N: NameOwner + AttrsOwner + TypeAscriptionOwner>( 48 fn decl_with_ascription<N: NameOwner + AttrsOwner + TypeAscriptionOwner>(
49 node: &N, 49 node: N,
50 ) -> Option<StructureNode> { 50 ) -> Option<StructureNode> {
51 decl_with_type_ref(node, node.ascribed_type()) 51 let ty = node.ascribed_type();
52 decl_with_type_ref(node, ty)
52 } 53 }
53 54
54 fn decl_with_type_ref<N: NameOwner + AttrsOwner>( 55 fn decl_with_type_ref<N: NameOwner + AttrsOwner>(
55 node: &N, 56 node: N,
56 type_ref: Option<&ast::TypeRef>, 57 type_ref: Option<ast::TypeRef>,
57 ) -> Option<StructureNode> { 58 ) -> Option<StructureNode> {
58 let detail = type_ref.map(|type_ref| { 59 let detail = type_ref.map(|type_ref| {
59 let mut detail = String::new(); 60 let mut detail = String::new();
@@ -64,7 +65,7 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
64 } 65 }
65 66
66 fn decl_with_detail<N: NameOwner + AttrsOwner>( 67 fn decl_with_detail<N: NameOwner + AttrsOwner>(
67 node: &N, 68 node: N,
68 detail: Option<String>, 69 detail: Option<String>,
69 ) -> Option<StructureNode> { 70 ) -> Option<StructureNode> {
70 let name = node.name()?; 71 let name = node.name()?;
@@ -82,22 +83,24 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
82 83
83 fn collapse_ws(node: &SyntaxNode, output: &mut String) { 84 fn collapse_ws(node: &SyntaxNode, output: &mut String) {
84 let mut can_insert_ws = false; 85 let mut can_insert_ws = false;
85 for line in node.text().chunks().flat_map(|chunk| chunk.lines()) { 86 for chunk in node.text().chunks() {
86 let line = line.trim(); 87 for line in chunk.lines() {
87 if line.is_empty() { 88 let line = line.trim();
88 if can_insert_ws { 89 if line.is_empty() {
89 output.push_str(" "); 90 if can_insert_ws {
90 can_insert_ws = false; 91 output.push_str(" ");
92 can_insert_ws = false;
93 }
94 } else {
95 output.push_str(line);
96 can_insert_ws = true;
91 } 97 }
92 } else {
93 output.push_str(line);
94 can_insert_ws = true;
95 } 98 }
96 } 99 }
97 } 100 }
98 101
99 visitor() 102 visitor()
100 .visit(|fn_def: &ast::FnDef| { 103 .visit(|fn_def: ast::FnDef| {
101 let mut detail = String::from("fn"); 104 let mut detail = String::from("fn");
102 if let Some(type_param_list) = fn_def.type_param_list() { 105 if let Some(type_param_list) = fn_def.type_param_list() {
103 collapse_ws(type_param_list.syntax(), &mut detail); 106 collapse_ws(type_param_list.syntax(), &mut detail);
@@ -117,11 +120,14 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
117 .visit(decl::<ast::EnumVariant>) 120 .visit(decl::<ast::EnumVariant>)
118 .visit(decl::<ast::TraitDef>) 121 .visit(decl::<ast::TraitDef>)
119 .visit(decl::<ast::Module>) 122 .visit(decl::<ast::Module>)
120 .visit(|td: &ast::TypeAliasDef| decl_with_type_ref(td, td.type_ref())) 123 .visit(|td: ast::TypeAliasDef| {
124 let ty = td.type_ref();
125 decl_with_type_ref(td, ty)
126 })
121 .visit(decl_with_ascription::<ast::NamedFieldDef>) 127 .visit(decl_with_ascription::<ast::NamedFieldDef>)
122 .visit(decl_with_ascription::<ast::ConstDef>) 128 .visit(decl_with_ascription::<ast::ConstDef>)
123 .visit(decl_with_ascription::<ast::StaticDef>) 129 .visit(decl_with_ascription::<ast::StaticDef>)
124 .visit(|im: &ast::ImplBlock| { 130 .visit(|im: ast::ImplBlock| {
125 let target_type = im.target_type()?; 131 let target_type = im.target_type()?;
126 let target_trait = im.target_trait(); 132 let target_trait = im.target_trait();
127 let label = match target_trait { 133 let label = match target_trait {
@@ -142,14 +148,14 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
142 }; 148 };
143 Some(node) 149 Some(node)
144 }) 150 })
145 .visit(|mc: &ast::MacroCall| { 151 .visit(|mc: ast::MacroCall| {
146 let first_token = mc.syntax().first_token().unwrap(); 152 let first_token = mc.syntax().first_token().unwrap();
147 if first_token.text().as_str() != "macro_rules" { 153 if first_token.text().as_str() != "macro_rules" {
148 return None; 154 return None;
149 } 155 }
150 decl(mc) 156 decl(mc)
151 }) 157 })
152 .accept(node)? 158 .accept(&node)?
153} 159}
154 160
155#[cfg(test)] 161#[cfg(test)]
diff --git a/crates/ra_ide_api/src/extend_selection.rs b/crates/ra_ide_api/src/extend_selection.rs
index 491b15702..8c49960f5 100644
--- a/crates/ra_ide_api/src/extend_selection.rs
+++ b/crates/ra_ide_api/src/extend_selection.rs
@@ -42,7 +42,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
42 TokenAtOffset::None => return None, 42 TokenAtOffset::None => return None,
43 TokenAtOffset::Single(l) => { 43 TokenAtOffset::Single(l) => {
44 if string_kinds.contains(&l.kind()) { 44 if string_kinds.contains(&l.kind()) {
45 extend_single_word_in_comment_or_string(l, offset).unwrap_or_else(|| l.range()) 45 extend_single_word_in_comment_or_string(&l, offset).unwrap_or_else(|| l.range())
46 } else { 46 } else {
47 l.range() 47 l.range()
48 } 48 }
@@ -56,7 +56,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
56 if token.range() != range { 56 if token.range() != range {
57 return Some(token.range()); 57 return Some(token.range());
58 } 58 }
59 if let Some(comment) = ast::Comment::cast(token) { 59 if let Some(comment) = ast::Comment::cast(token.clone()) {
60 if let Some(range) = extend_comments(comment) { 60 if let Some(range) = extend_comments(comment) {
61 return Some(range); 61 return Some(range);
62 } 62 }
@@ -73,7 +73,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
73 let node = node.ancestors().take_while(|n| n.range() == node.range()).last().unwrap(); 73 let node = node.ancestors().take_while(|n| n.range() == node.range()).last().unwrap();
74 74
75 if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) { 75 if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) {
76 if let Some(range) = extend_list_item(node) { 76 if let Some(range) = extend_list_item(&node) {
77 return Some(range); 77 return Some(range);
78 } 78 }
79 } 79 }
@@ -82,7 +82,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
82} 82}
83 83
84fn extend_single_word_in_comment_or_string( 84fn extend_single_word_in_comment_or_string(
85 leaf: SyntaxToken, 85 leaf: &SyntaxToken,
86 offset: TextUnit, 86 offset: TextUnit,
87) -> Option<TextRange> { 87) -> Option<TextRange> {
88 let text: &str = leaf.text(); 88 let text: &str = leaf.text();
@@ -131,9 +131,9 @@ fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextUnit) -> TextRange
131 ws.range() 131 ws.range()
132} 132}
133 133
134fn pick_best<'a>(l: SyntaxToken<'a>, r: SyntaxToken<'a>) -> SyntaxToken<'a> { 134fn pick_best<'a>(l: SyntaxToken, r: SyntaxToken) -> SyntaxToken {
135 return if priority(r) > priority(l) { r } else { l }; 135 return if priority(&r) > priority(&l) { r } else { l };
136 fn priority(n: SyntaxToken) -> usize { 136 fn priority(n: &SyntaxToken) -> usize {
137 match n.kind() { 137 match n.kind() {
138 WHITESPACE => 0, 138 WHITESPACE => 0,
139 IDENT | T![self] | T![super] | T![crate] | LIFETIME => 2, 139 IDENT | T![self] | T![super] | T![crate] | LIFETIME => 2,
@@ -156,7 +156,7 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
156 SyntaxElement::Token(it) => is_single_line_ws(it), 156 SyntaxElement::Token(it) => is_single_line_ws(it),
157 }) 157 })
158 .next() 158 .next()
159 .and_then(|it| it.as_token()) 159 .and_then(|it| it.as_token().cloned())
160 .filter(|node| node.kind() == T![,]) 160 .filter(|node| node.kind() == T![,])
161 } 161 }
162 162
@@ -167,7 +167,7 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
167 // Include any following whitespace when comma if after list item. 167 // Include any following whitespace when comma if after list item.
168 let final_node = comma_node 168 let final_node = comma_node
169 .next_sibling_or_token() 169 .next_sibling_or_token()
170 .and_then(|it| it.as_token()) 170 .and_then(|it| it.as_token().cloned())
171 .filter(|node| is_single_line_ws(node)) 171 .filter(|node| is_single_line_ws(node))
172 .unwrap_or(comma_node); 172 .unwrap_or(comma_node);
173 173
@@ -178,8 +178,8 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
178} 178}
179 179
180fn extend_comments(comment: ast::Comment) -> Option<TextRange> { 180fn extend_comments(comment: ast::Comment) -> Option<TextRange> {
181 let prev = adj_comments(comment, Direction::Prev); 181 let prev = adj_comments(&comment, Direction::Prev);
182 let next = adj_comments(comment, Direction::Next); 182 let next = adj_comments(&comment, Direction::Next);
183 if prev != next { 183 if prev != next {
184 Some(TextRange::from_to(prev.syntax().range().start(), next.syntax().range().end())) 184 Some(TextRange::from_to(prev.syntax().range().start(), next.syntax().range().end()))
185 } else { 185 } else {
@@ -187,14 +187,14 @@ fn extend_comments(comment: ast::Comment) -> Option<TextRange> {
187 } 187 }
188} 188}
189 189
190fn adj_comments(comment: ast::Comment, dir: Direction) -> ast::Comment { 190fn adj_comments(comment: &ast::Comment, dir: Direction) -> ast::Comment {
191 let mut res = comment; 191 let mut res = comment.clone();
192 for element in comment.syntax().siblings_with_tokens(dir) { 192 for element in comment.syntax().siblings_with_tokens(dir) {
193 let token = match element.as_token() { 193 let token = match element.as_token() {
194 None => break, 194 None => break,
195 Some(token) => token, 195 Some(token) => token,
196 }; 196 };
197 if let Some(c) = ast::Comment::cast(token) { 197 if let Some(c) = ast::Comment::cast(token.clone()) {
198 res = c 198 res = c
199 } else if token.kind() != WHITESPACE || token.text().contains("\n\n") { 199 } else if token.kind() != WHITESPACE || token.text().contains("\n\n") {
200 break; 200 break;
diff --git a/crates/ra_ide_api/src/folding_ranges.rs b/crates/ra_ide_api/src/folding_ranges.rs
index 9d4855a64..c2b981aed 100644
--- a/crates/ra_ide_api/src/folding_ranges.rs
+++ b/crates/ra_ide_api/src/folding_ranges.rs
@@ -30,7 +30,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
30 for element in file.syntax().descendants_with_tokens() { 30 for element in file.syntax().descendants_with_tokens() {
31 // Fold items that span multiple lines 31 // Fold items that span multiple lines
32 if let Some(kind) = fold_kind(element.kind()) { 32 if let Some(kind) = fold_kind(element.kind()) {
33 let is_multiline = match element { 33 let is_multiline = match &element {
34 SyntaxElement::Node(node) => node.text().contains('\n'), 34 SyntaxElement::Node(node) => node.text().contains('\n'),
35 SyntaxElement::Token(token) => token.text().contains('\n'), 35 SyntaxElement::Token(token) => token.text().contains('\n'),
36 }; 36 };
@@ -56,7 +56,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
56 SyntaxElement::Node(node) => { 56 SyntaxElement::Node(node) => {
57 // Fold groups of imports 57 // Fold groups of imports
58 if node.kind() == USE_ITEM && !visited_imports.contains(&node) { 58 if node.kind() == USE_ITEM && !visited_imports.contains(&node) {
59 if let Some(range) = contiguous_range_for_group(node, &mut visited_imports) { 59 if let Some(range) = contiguous_range_for_group(&node, &mut visited_imports) {
60 res.push(Fold { range, kind: FoldKind::Imports }) 60 res.push(Fold { range, kind: FoldKind::Imports })
61 } 61 }
62 } 62 }
@@ -65,7 +65,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
65 if node.kind() == MODULE && !has_visibility(&node) && !visited_mods.contains(&node) 65 if node.kind() == MODULE && !has_visibility(&node) && !visited_mods.contains(&node)
66 { 66 {
67 if let Some(range) = 67 if let Some(range) =
68 contiguous_range_for_group_unless(node, has_visibility, &mut visited_mods) 68 contiguous_range_for_group_unless(&node, has_visibility, &mut visited_mods)
69 { 69 {
70 res.push(Fold { range, kind: FoldKind::Mods }) 70 res.push(Fold { range, kind: FoldKind::Mods })
71 } 71 }
@@ -88,24 +88,24 @@ fn fold_kind(kind: SyntaxKind) -> Option<FoldKind> {
88} 88}
89 89
90fn has_visibility(node: &SyntaxNode) -> bool { 90fn has_visibility(node: &SyntaxNode) -> bool {
91 ast::Module::cast(node).and_then(|m| m.visibility()).is_some() 91 ast::Module::cast(node.clone()).and_then(|m| m.visibility()).is_some()
92} 92}
93 93
94fn contiguous_range_for_group<'a>( 94fn contiguous_range_for_group(
95 first: &'a SyntaxNode, 95 first: &SyntaxNode,
96 visited: &mut FxHashSet<&'a SyntaxNode>, 96 visited: &mut FxHashSet<SyntaxNode>,
97) -> Option<TextRange> { 97) -> Option<TextRange> {
98 contiguous_range_for_group_unless(first, |_| false, visited) 98 contiguous_range_for_group_unless(first, |_| false, visited)
99} 99}
100 100
101fn contiguous_range_for_group_unless<'a>( 101fn contiguous_range_for_group_unless(
102 first: &'a SyntaxNode, 102 first: &SyntaxNode,
103 unless: impl Fn(&'a SyntaxNode) -> bool, 103 unless: impl Fn(&SyntaxNode) -> bool,
104 visited: &mut FxHashSet<&'a SyntaxNode>, 104 visited: &mut FxHashSet<SyntaxNode>,
105) -> Option<TextRange> { 105) -> Option<TextRange> {
106 visited.insert(first); 106 visited.insert(first.clone());
107 107
108 let mut last = first; 108 let mut last = first.clone();
109 for element in first.siblings_with_tokens(Direction::Next) { 109 for element in first.siblings_with_tokens(Direction::Next) {
110 let node = match element { 110 let node = match element {
111 SyntaxElement::Token(token) => { 111 SyntaxElement::Token(token) => {
@@ -123,15 +123,15 @@ fn contiguous_range_for_group_unless<'a>(
123 }; 123 };
124 124
125 // Stop if we find a node that doesn't belong to the group 125 // Stop if we find a node that doesn't belong to the group
126 if node.kind() != first.kind() || unless(node) { 126 if node.kind() != first.kind() || unless(&node) {
127 break; 127 break;
128 } 128 }
129 129
130 visited.insert(node); 130 visited.insert(node.clone());
131 last = node; 131 last = node;
132 } 132 }
133 133
134 if first != last { 134 if first != &last {
135 Some(TextRange::from_to(first.range().start(), last.range().end())) 135 Some(TextRange::from_to(first.range().start(), last.range().end()))
136 } else { 136 } else {
137 // The group consists of only one element, therefore it cannot be folded 137 // The group consists of only one element, therefore it cannot be folded
@@ -139,11 +139,11 @@ fn contiguous_range_for_group_unless<'a>(
139 } 139 }
140} 140}
141 141
142fn contiguous_range_for_comment<'a>( 142fn contiguous_range_for_comment(
143 first: ast::Comment<'a>, 143 first: ast::Comment,
144 visited: &mut FxHashSet<ast::Comment<'a>>, 144 visited: &mut FxHashSet<ast::Comment>,
145) -> Option<TextRange> { 145) -> Option<TextRange> {
146 visited.insert(first); 146 visited.insert(first.clone());
147 147
148 // Only fold comments of the same flavor 148 // Only fold comments of the same flavor
149 let group_kind = first.kind(); 149 let group_kind = first.kind();
@@ -151,11 +151,11 @@ fn contiguous_range_for_comment<'a>(
151 return None; 151 return None;
152 } 152 }
153 153
154 let mut last = first; 154 let mut last = first.clone();
155 for element in first.syntax().siblings_with_tokens(Direction::Next) { 155 for element in first.syntax().siblings_with_tokens(Direction::Next) {
156 match element { 156 match element {
157 SyntaxElement::Token(token) => { 157 SyntaxElement::Token(token) => {
158 if let Some(ws) = ast::Whitespace::cast(token) { 158 if let Some(ws) = ast::Whitespace::cast(token.clone()) {
159 if !ws.spans_multiple_lines() { 159 if !ws.spans_multiple_lines() {
160 // Ignore whitespace without blank lines 160 // Ignore whitespace without blank lines
161 continue; 161 continue;
@@ -163,7 +163,7 @@ fn contiguous_range_for_comment<'a>(
163 } 163 }
164 if let Some(c) = ast::Comment::cast(token) { 164 if let Some(c) = ast::Comment::cast(token) {
165 if c.kind() == group_kind { 165 if c.kind() == group_kind {
166 visited.insert(c); 166 visited.insert(c.clone());
167 last = c; 167 last = c;
168 continue; 168 continue;
169 } 169 }
@@ -193,7 +193,7 @@ mod tests {
193 fn do_check(text: &str, fold_kinds: &[FoldKind]) { 193 fn do_check(text: &str, fold_kinds: &[FoldKind]) {
194 let (ranges, text) = extract_ranges(text, "fold"); 194 let (ranges, text) = extract_ranges(text, "fold");
195 let parse = SourceFile::parse(&text); 195 let parse = SourceFile::parse(&text);
196 let folds = folding_ranges(parse.tree()); 196 let folds = folding_ranges(&parse.tree());
197 197
198 assert_eq!( 198 assert_eq!(
199 folds.len(), 199 folds.len(),
diff --git a/crates/ra_ide_api/src/goto_definition.rs b/crates/ra_ide_api/src/goto_definition.rs
index 1066bf155..82b5e3b5e 100644
--- a/crates/ra_ide_api/src/goto_definition.rs
+++ b/crates/ra_ide_api/src/goto_definition.rs
@@ -20,13 +20,13 @@ pub(crate) fn goto_definition(
20 position: FilePosition, 20 position: FilePosition,
21) -> Option<RangeInfo<Vec<NavigationTarget>>> { 21) -> Option<RangeInfo<Vec<NavigationTarget>>> {
22 let parse = db.parse(position.file_id); 22 let parse = db.parse(position.file_id);
23 let syntax = parse.tree().syntax(); 23 let syntax = parse.tree().syntax().clone();
24 if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, position.offset) { 24 if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(&syntax, position.offset) {
25 let navs = reference_definition(db, position.file_id, name_ref).to_vec(); 25 let navs = reference_definition(db, position.file_id, &name_ref).to_vec();
26 return Some(RangeInfo::new(name_ref.syntax().range(), navs.to_vec())); 26 return Some(RangeInfo::new(name_ref.syntax().range(), navs.to_vec()));
27 } 27 }
28 if let Some(name) = find_node_at_offset::<ast::Name>(syntax, position.offset) { 28 if let Some(name) = find_node_at_offset::<ast::Name>(&syntax, position.offset) {
29 let navs = name_definition(db, position.file_id, name)?; 29 let navs = name_definition(db, position.file_id, &name)?;
30 return Some(RangeInfo::new(name.syntax().range(), navs)); 30 return Some(RangeInfo::new(name.syntax().range(), navs));
31 } 31 }
32 None 32 None
@@ -94,7 +94,7 @@ pub(crate) fn name_definition(
94) -> Option<Vec<NavigationTarget>> { 94) -> Option<Vec<NavigationTarget>> {
95 let parent = name.syntax().parent()?; 95 let parent = name.syntax().parent()?;
96 96
97 if let Some(module) = ast::Module::cast(&parent) { 97 if let Some(module) = ast::Module::cast(parent.clone()) {
98 if module.has_semi() { 98 if module.has_semi() {
99 if let Some(child_module) = 99 if let Some(child_module) =
100 hir::source_binder::module_from_declaration(db, file_id, module) 100 hir::source_binder::module_from_declaration(db, file_id, module)
@@ -114,38 +114,88 @@ pub(crate) fn name_definition(
114 114
115fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget> { 115fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget> {
116 visitor() 116 visitor()
117 .visit(|node: &ast::StructDef| { 117 .visit(|node: ast::StructDef| {
118 NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) 118 NavigationTarget::from_named(
119 file_id,
120 &node,
121 node.doc_comment_text(),
122 node.short_label(),
123 )
119 }) 124 })
120 .visit(|node: &ast::EnumDef| { 125 .visit(|node: ast::EnumDef| {
121 NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) 126 NavigationTarget::from_named(
127 file_id,
128 &node,
129 node.doc_comment_text(),
130 node.short_label(),
131 )
122 }) 132 })
123 .visit(|node: &ast::EnumVariant| { 133 .visit(|node: ast::EnumVariant| {
124 NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) 134 NavigationTarget::from_named(
135 file_id,
136 &node,
137 node.doc_comment_text(),
138 node.short_label(),
139 )
125 }) 140 })
126 .visit(|node: &ast::FnDef| { 141 .visit(|node: ast::FnDef| {
127 NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) 142 NavigationTarget::from_named(
143 file_id,
144 &node,
145 node.doc_comment_text(),
146 node.short_label(),
147 )
128 }) 148 })
129 .visit(|node: &ast::TypeAliasDef| { 149 .visit(|node: ast::TypeAliasDef| {
130 NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) 150 NavigationTarget::from_named(
151 file_id,
152 &node,
153 node.doc_comment_text(),
154 node.short_label(),
155 )
131 }) 156 })
132 .visit(|node: &ast::ConstDef| { 157 .visit(|node: ast::ConstDef| {
133 NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) 158 NavigationTarget::from_named(
159 file_id,
160 &node,
161 node.doc_comment_text(),
162 node.short_label(),
163 )
134 }) 164 })
135 .visit(|node: &ast::StaticDef| { 165 .visit(|node: ast::StaticDef| {
136 NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) 166 NavigationTarget::from_named(
167 file_id,
168 &node,
169 node.doc_comment_text(),
170 node.short_label(),
171 )
137 }) 172 })
138 .visit(|node: &ast::TraitDef| { 173 .visit(|node: ast::TraitDef| {
139 NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) 174 NavigationTarget::from_named(
175 file_id,
176 &node,
177 node.doc_comment_text(),
178 node.short_label(),
179 )
140 }) 180 })
141 .visit(|node: &ast::NamedFieldDef| { 181 .visit(|node: ast::NamedFieldDef| {
142 NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) 182 NavigationTarget::from_named(
183 file_id,
184 &node,
185 node.doc_comment_text(),
186 node.short_label(),
187 )
143 }) 188 })
144 .visit(|node: &ast::Module| { 189 .visit(|node: ast::Module| {
145 NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) 190 NavigationTarget::from_named(
191 file_id,
192 &node,
193 node.doc_comment_text(),
194 node.short_label(),
195 )
146 }) 196 })
147 .visit(|node: &ast::MacroCall| { 197 .visit(|node: ast::MacroCall| {
148 NavigationTarget::from_named(file_id, node, node.doc_comment_text(), None) 198 NavigationTarget::from_named(file_id, &node, node.doc_comment_text(), None)
149 }) 199 })
150 .accept(node) 200 .accept(node)
151} 201}
diff --git a/crates/ra_ide_api/src/goto_type_definition.rs b/crates/ra_ide_api/src/goto_type_definition.rs
index 6ce5e214f..fc4b6e1af 100644
--- a/crates/ra_ide_api/src/goto_type_definition.rs
+++ b/crates/ra_ide_api/src/goto_type_definition.rs
@@ -13,15 +13,17 @@ pub(crate) fn goto_type_definition(
13 token 13 token
14 .parent() 14 .parent()
15 .ancestors() 15 .ancestors()
16 .find(|n| ast::Expr::cast(*n).is_some() || ast::Pat::cast(*n).is_some()) 16 .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())
17 })?; 17 })?;
18 18
19 let analyzer = hir::SourceAnalyzer::new(db, position.file_id, node, None); 19 let analyzer = hir::SourceAnalyzer::new(db, position.file_id, &node, None);
20 20
21 let ty: hir::Ty = if let Some(ty) = ast::Expr::cast(node).and_then(|e| analyzer.type_of(db, e)) 21 let ty: hir::Ty = if let Some(ty) =
22 ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e))
22 { 23 {
23 ty 24 ty
24 } else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, p)) { 25 } else if let Some(ty) = ast::Pat::cast(node.clone()).and_then(|p| analyzer.type_of_pat(db, &p))
26 {
25 ty 27 ty
26 } else { 28 } else {
27 return None; 29 return None;
diff --git a/crates/ra_ide_api/src/hover.rs b/crates/ra_ide_api/src/hover.rs
index 253d21f48..e503bf6a9 100644
--- a/crates/ra_ide_api/src/hover.rs
+++ b/crates/ra_ide_api/src/hover.rs
@@ -6,7 +6,7 @@ use ra_syntax::{
6 visit::{visitor, Visitor}, 6 visit::{visitor, Visitor},
7 }, 7 },
8 ast::{self, DocCommentsOwner}, 8 ast::{self, DocCommentsOwner},
9 AstNode, TreeArc, 9 AstNode,
10}; 10};
11 11
12use crate::{ 12use crate::{
@@ -104,7 +104,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
104 104
105 let mut no_fallback = false; 105 let mut no_fallback = false;
106 106
107 match classify_name_ref(db, &analyzer, name_ref) { 107 match classify_name_ref(db, &analyzer, &name_ref) {
108 Some(Method(it)) => res.extend(from_def_source(db, it)), 108 Some(Method(it)) => res.extend(from_def_source(db, it)),
109 Some(Macro(it)) => { 109 Some(Macro(it)) => {
110 let src = it.source(db); 110 let src = it.source(db);
@@ -163,7 +163,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
163 163
164 if res.is_empty() && !no_fallback { 164 if res.is_empty() && !no_fallback {
165 // Fallback index based approach: 165 // Fallback index based approach:
166 let symbols = crate::symbol_index::index_resolve(db, name_ref); 166 let symbols = crate::symbol_index::index_resolve(db, &name_ref);
167 for sym in symbols { 167 for sym in symbols {
168 let docs = docs_from_symbol(db, &sym); 168 let docs = docs_from_symbol(db, &sym);
169 let desc = description_from_symbol(db, &sym); 169 let desc = description_from_symbol(db, &sym);
@@ -177,34 +177,32 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
177 } else if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), position.offset) { 177 } else if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), position.offset) {
178 if let Some(parent) = name.syntax().parent() { 178 if let Some(parent) = name.syntax().parent() {
179 let text = visitor() 179 let text = visitor()
180 .visit(|node: &ast::StructDef| { 180 .visit(|node: ast::StructDef| {
181 hover_text(node.doc_comment_text(), node.short_label()) 181 hover_text(node.doc_comment_text(), node.short_label())
182 }) 182 })
183 .visit(|node: &ast::EnumDef| { 183 .visit(|node: ast::EnumDef| hover_text(node.doc_comment_text(), node.short_label()))
184 .visit(|node: ast::EnumVariant| {
184 hover_text(node.doc_comment_text(), node.short_label()) 185 hover_text(node.doc_comment_text(), node.short_label())
185 }) 186 })
186 .visit(|node: &ast::EnumVariant| { 187 .visit(|node: ast::FnDef| hover_text(node.doc_comment_text(), node.short_label()))
188 .visit(|node: ast::TypeAliasDef| {
187 hover_text(node.doc_comment_text(), node.short_label()) 189 hover_text(node.doc_comment_text(), node.short_label())
188 }) 190 })
189 .visit(|node: &ast::FnDef| hover_text(node.doc_comment_text(), node.short_label())) 191 .visit(|node: ast::ConstDef| {
190 .visit(|node: &ast::TypeAliasDef| {
191 hover_text(node.doc_comment_text(), node.short_label()) 192 hover_text(node.doc_comment_text(), node.short_label())
192 }) 193 })
193 .visit(|node: &ast::ConstDef| { 194 .visit(|node: ast::StaticDef| {
194 hover_text(node.doc_comment_text(), node.short_label()) 195 hover_text(node.doc_comment_text(), node.short_label())
195 }) 196 })
196 .visit(|node: &ast::StaticDef| { 197 .visit(|node: ast::TraitDef| {
197 hover_text(node.doc_comment_text(), node.short_label()) 198 hover_text(node.doc_comment_text(), node.short_label())
198 }) 199 })
199 .visit(|node: &ast::TraitDef| { 200 .visit(|node: ast::NamedFieldDef| {
200 hover_text(node.doc_comment_text(), node.short_label()) 201 hover_text(node.doc_comment_text(), node.short_label())
201 }) 202 })
202 .visit(|node: &ast::NamedFieldDef| { 203 .visit(|node: ast::Module| hover_text(node.doc_comment_text(), node.short_label()))
203 hover_text(node.doc_comment_text(), node.short_label()) 204 .visit(|node: ast::MacroCall| hover_text(node.doc_comment_text(), None))
204 }) 205 .accept(&parent);
205 .visit(|node: &ast::Module| hover_text(node.doc_comment_text(), node.short_label()))
206 .visit(|node: &ast::MacroCall| hover_text(node.doc_comment_text(), None))
207 .accept(parent);
208 206
209 if let Some(text) = text { 207 if let Some(text) = text {
210 res.extend(text); 208 res.extend(text);
@@ -217,8 +215,9 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
217 } 215 }
218 216
219 if range.is_none() { 217 if range.is_none() {
220 let node = ancestors_at_offset(file.syntax(), position.offset) 218 let node = ancestors_at_offset(file.syntax(), position.offset).find(|n| {
221 .find(|n| ast::Expr::cast(*n).is_some() || ast::Pat::cast(*n).is_some())?; 219 ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some()
220 })?;
222 let frange = FileRange { file_id: position.file_id, range: node.range() }; 221 let frange = FileRange { file_id: position.file_id, range: node.range() };
223 res.extend(type_of(db, frange).map(rust_code_markup)); 222 res.extend(type_of(db, frange).map(rust_code_markup));
224 range = Some(node.range()); 223 range = Some(node.range());
@@ -233,7 +232,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
233 232
234 fn from_def_source<A, D>(db: &RootDatabase, def: D) -> Option<String> 233 fn from_def_source<A, D>(db: &RootDatabase, def: D) -> Option<String>
235 where 234 where
236 D: HasSource<Ast = TreeArc<A>>, 235 D: HasSource<Ast = A>,
237 A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel, 236 A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel,
238 { 237 {
239 let src = def.source(db); 238 let src = def.source(db);
@@ -243,17 +242,17 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
243 242
244pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> { 243pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {
245 let parse = db.parse(frange.file_id); 244 let parse = db.parse(frange.file_id);
246 let syntax = parse.tree().syntax(); 245 let leaf_node = find_covering_element(parse.tree().syntax(), frange.range);
247 let leaf_node = find_covering_element(syntax, frange.range);
248 // if we picked identifier, expand to pattern/expression 246 // if we picked identifier, expand to pattern/expression
249 let node = leaf_node 247 let node = leaf_node
250 .ancestors() 248 .ancestors()
251 .take_while(|it| it.range() == leaf_node.range()) 249 .take_while(|it| it.range() == leaf_node.range())
252 .find(|&it| ast::Expr::cast(it).is_some() || ast::Pat::cast(it).is_some())?; 250 .find(|it| ast::Expr::cast(it.clone()).is_some() || ast::Pat::cast(it.clone()).is_some())?;
253 let analyzer = hir::SourceAnalyzer::new(db, frange.file_id, node, None); 251 let analyzer = hir::SourceAnalyzer::new(db, frange.file_id, &node, None);
254 let ty = if let Some(ty) = ast::Expr::cast(node).and_then(|e| analyzer.type_of(db, e)) { 252 let ty = if let Some(ty) = ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e))
253 {
255 ty 254 ty
256 } else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, p)) { 255 } else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, &p)) {
257 ty 256 ty
258 } else { 257 } else {
259 return None; 258 return None;
diff --git a/crates/ra_ide_api/src/impls.rs b/crates/ra_ide_api/src/impls.rs
index 6d69f36aa..15999d433 100644
--- a/crates/ra_ide_api/src/impls.rs
+++ b/crates/ra_ide_api/src/impls.rs
@@ -9,19 +9,19 @@ pub(crate) fn goto_implementation(
9 position: FilePosition, 9 position: FilePosition,
10) -> Option<RangeInfo<Vec<NavigationTarget>>> { 10) -> Option<RangeInfo<Vec<NavigationTarget>>> {
11 let parse = db.parse(position.file_id); 11 let parse = db.parse(position.file_id);
12 let syntax = parse.tree().syntax(); 12 let syntax = parse.tree().syntax().clone();
13 13
14 let module = source_binder::module_from_position(db, position)?; 14 let module = source_binder::module_from_position(db, position)?;
15 15
16 if let Some(nominal_def) = find_node_at_offset::<ast::NominalDef>(syntax, position.offset) { 16 if let Some(nominal_def) = find_node_at_offset::<ast::NominalDef>(&syntax, position.offset) {
17 return Some(RangeInfo::new( 17 return Some(RangeInfo::new(
18 nominal_def.syntax().range(), 18 nominal_def.syntax().range(),
19 impls_for_def(db, nominal_def, module)?, 19 impls_for_def(db, &nominal_def, module)?,
20 )); 20 ));
21 } else if let Some(trait_def) = find_node_at_offset::<ast::TraitDef>(syntax, position.offset) { 21 } else if let Some(trait_def) = find_node_at_offset::<ast::TraitDef>(&syntax, position.offset) {
22 return Some(RangeInfo::new( 22 return Some(RangeInfo::new(
23 trait_def.syntax().range(), 23 trait_def.syntax().range(),
24 impls_for_trait(db, trait_def, module)?, 24 impls_for_trait(db, &trait_def, module)?,
25 )); 25 ));
26 } 26 }
27 27
diff --git a/crates/ra_ide_api/src/join_lines.rs b/crates/ra_ide_api/src/join_lines.rs
index e20cb1370..50bcfb5b7 100644
--- a/crates/ra_ide_api/src/join_lines.rs
+++ b/crates/ra_ide_api/src/join_lines.rs
@@ -27,7 +27,7 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
27 SyntaxElement::Token(token) => token.parent(), 27 SyntaxElement::Token(token) => token.parent(),
28 }; 28 };
29 let mut edit = TextEditBuilder::default(); 29 let mut edit = TextEditBuilder::default();
30 for token in node.descendants_with_tokens().filter_map(|it| it.as_token()) { 30 for token in node.descendants_with_tokens().filter_map(|it| it.as_token().cloned()) {
31 let range = match range.intersection(&token.range()) { 31 let range = match range.intersection(&token.range()) {
32 Some(range) => range, 32 Some(range) => range,
33 None => continue, 33 None => continue,
@@ -37,7 +37,7 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
37 let pos: TextUnit = (pos as u32).into(); 37 let pos: TextUnit = (pos as u32).into();
38 let off = token.range().start() + range.start() + pos; 38 let off = token.range().start() + range.start() + pos;
39 if !edit.invalidates_offset(off) { 39 if !edit.invalidates_offset(off) {
40 remove_newline(&mut edit, token, off); 40 remove_newline(&mut edit, &token, off);
41 } 41 }
42 } 42 }
43 } 43 }
@@ -45,7 +45,7 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
45 edit.finish() 45 edit.finish()
46} 46}
47 47
48fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUnit) { 48fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextUnit) {
49 if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 { 49 if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 {
50 // The node is either the first or the last in the file 50 // The node is either the first or the last in the file
51 let suff = &token.text()[TextRange::from_to( 51 let suff = &token.text()[TextRange::from_to(
@@ -98,9 +98,10 @@ fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUn
98 TextRange::from_to(prev.range().start(), token.range().end()), 98 TextRange::from_to(prev.range().start(), token.range().end()),
99 space.to_string(), 99 space.to_string(),
100 ); 100 );
101 } else if let (Some(_), Some(next)) = 101 } else if let (Some(_), Some(next)) = (
102 (prev.as_token().and_then(ast::Comment::cast), next.as_token().and_then(ast::Comment::cast)) 102 prev.as_token().cloned().and_then(ast::Comment::cast),
103 { 103 next.as_token().cloned().and_then(ast::Comment::cast),
104 ) {
104 // Removes: newline (incl. surrounding whitespace), start of the next comment 105 // Removes: newline (incl. surrounding whitespace), start of the next comment
105 edit.delete(TextRange::from_to( 106 edit.delete(TextRange::from_to(
106 token.range().start(), 107 token.range().start(),
@@ -113,16 +114,16 @@ fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUn
113} 114}
114 115
115fn has_comma_after(node: &SyntaxNode) -> bool { 116fn has_comma_after(node: &SyntaxNode) -> bool {
116 match non_trivia_sibling(node.into(), Direction::Next) { 117 match non_trivia_sibling(node.clone().into(), Direction::Next) {
117 Some(n) => n.kind() == T![,], 118 Some(n) => n.kind() == T![,],
118 _ => false, 119 _ => false,
119 } 120 }
120} 121}
121 122
122fn join_single_expr_block(edit: &mut TextEditBuilder, token: SyntaxToken) -> Option<()> { 123fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> {
123 let block = ast::Block::cast(token.parent())?; 124 let block = ast::Block::cast(token.parent())?;
124 let block_expr