diff options
author | bors[bot] <26634292+bors[bot]@users.noreply.github.com> | 2019-07-19 12:15:55 +0100 |
---|---|---|
committer | bors[bot] <26634292+bors[bot]@users.noreply.github.com> | 2019-07-19 12:15:55 +0100 |
commit | f209843e31af7f0e0212aa28ffec2efad2a70c6f (patch) | |
tree | 548227da78a3bea644f57714d075410c0bdf7469 /crates | |
parent | 58d4983ba5745975446d60f2886d96f8d2adf0f2 (diff) | |
parent | d4a66166c002f0a49e41d856a49cb5685ac93202 (diff) |
Merge #1545
1545: migrate ra_syntax to the new rowan API r=matklad a=matklad
Co-authored-by: Aleksey Kladov <[email protected]>
Diffstat (limited to 'crates')
91 files changed, 1980 insertions, 3096 deletions
diff --git a/crates/ra_assists/src/add_derive.rs b/crates/ra_assists/src/add_derive.rs index bf7d55d6d..f19196f53 100644 --- a/crates/ra_assists/src/add_derive.rs +++ b/crates/ra_assists/src/add_derive.rs | |||
@@ -9,7 +9,7 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
9 | 9 | ||
10 | pub(crate) fn add_derive(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 10 | pub(crate) fn add_derive(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { |
11 | let nominal = ctx.node_at_offset::<ast::NominalDef>()?; | 11 | let nominal = ctx.node_at_offset::<ast::NominalDef>()?; |
12 | let node_start = derive_insertion_offset(nominal)?; | 12 | let node_start = derive_insertion_offset(&nominal)?; |
13 | ctx.add_action(AssistId("add_derive"), "add `#[derive]`", |edit| { | 13 | ctx.add_action(AssistId("add_derive"), "add `#[derive]`", |edit| { |
14 | let derive_attr = nominal | 14 | let derive_attr = nominal |
15 | .attrs() | 15 | .attrs() |
diff --git a/crates/ra_assists/src/add_explicit_type.rs b/crates/ra_assists/src/add_explicit_type.rs index bb47a32f0..a69cfc8e3 100644 --- a/crates/ra_assists/src/add_explicit_type.rs +++ b/crates/ra_assists/src/add_explicit_type.rs | |||
@@ -27,7 +27,7 @@ pub(crate) fn add_explicit_type(mut ctx: AssistCtx<impl HirDatabase>) -> Option< | |||
27 | // Infer type | 27 | // Infer type |
28 | let db = ctx.db; | 28 | let db = ctx.db; |
29 | let analyzer = hir::SourceAnalyzer::new(db, ctx.frange.file_id, stmt.syntax(), None); | 29 | let analyzer = hir::SourceAnalyzer::new(db, ctx.frange.file_id, stmt.syntax(), None); |
30 | let ty = analyzer.type_of(db, expr)?; | 30 | let ty = analyzer.type_of(db, &expr)?; |
31 | // Assist not applicable if the type is unknown | 31 | // Assist not applicable if the type is unknown |
32 | if is_unknown(&ty) { | 32 | if is_unknown(&ty) { |
33 | return None; | 33 | return None; |
diff --git a/crates/ra_assists/src/add_impl.rs b/crates/ra_assists/src/add_impl.rs index b81922c1d..cebc19539 100644 --- a/crates/ra_assists/src/add_impl.rs +++ b/crates/ra_assists/src/add_impl.rs | |||
@@ -16,7 +16,7 @@ pub(crate) fn add_impl(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | |||
16 | let start_offset = nominal.syntax().range().end(); | 16 | let start_offset = nominal.syntax().range().end(); |
17 | let mut buf = String::new(); | 17 | let mut buf = String::new(); |
18 | buf.push_str("\n\nimpl"); | 18 | buf.push_str("\n\nimpl"); |
19 | if let Some(type_params) = type_params { | 19 | if let Some(type_params) = &type_params { |
20 | type_params.syntax().text().push_to(&mut buf); | 20 | type_params.syntax().text().push_to(&mut buf); |
21 | } | 21 | } |
22 | buf.push_str(" "); | 22 | buf.push_str(" "); |
@@ -25,9 +25,9 @@ pub(crate) fn add_impl(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | |||
25 | let lifetime_params = type_params | 25 | let lifetime_params = type_params |
26 | .lifetime_params() | 26 | .lifetime_params() |
27 | .filter_map(|it| it.lifetime_token()) | 27 | .filter_map(|it| it.lifetime_token()) |
28 | .map(|it| it.text()); | 28 | .map(|it| it.text().clone()); |
29 | let type_params = | 29 | let type_params = |
30 | type_params.type_params().filter_map(|it| it.name()).map(|it| it.text()); | 30 | type_params.type_params().filter_map(|it| it.name()).map(|it| it.text().clone()); |
31 | join(lifetime_params.chain(type_params)).surround_with("<", ">").to_buf(&mut buf); | 31 | join(lifetime_params.chain(type_params)).surround_with("<", ">").to_buf(&mut buf); |
32 | } | 32 | } |
33 | buf.push_str(" {\n"); | 33 | buf.push_str(" {\n"); |
diff --git a/crates/ra_assists/src/add_missing_impl_members.rs b/crates/ra_assists/src/add_missing_impl_members.rs index 6ffdad0b1..b992a4dc8 100644 --- a/crates/ra_assists/src/add_missing_impl_members.rs +++ b/crates/ra_assists/src/add_missing_impl_members.rs | |||
@@ -5,8 +5,8 @@ use crate::{ | |||
5 | 5 | ||
6 | use hir::{db::HirDatabase, HasSource}; | 6 | use hir::{db::HirDatabase, HasSource}; |
7 | use ra_db::FilePosition; | 7 | use ra_db::FilePosition; |
8 | use ra_syntax::ast::{self, AstNode, ImplItem, ImplItemKind, NameOwner}; | 8 | use ra_syntax::ast::{self, AstNode, ImplItemKind, NameOwner}; |
9 | use ra_syntax::{SmolStr, TreeArc}; | 9 | use ra_syntax::SmolStr; |
10 | 10 | ||
11 | #[derive(PartialEq)] | 11 | #[derive(PartialEq)] |
12 | enum AddMissingImplMembersMode { | 12 | enum AddMissingImplMembersMode { |
@@ -46,16 +46,16 @@ fn add_missing_impl_members_inner( | |||
46 | let position = FilePosition { file_id, offset: impl_node.syntax().range().start() }; | 46 | let position = FilePosition { file_id, offset: impl_node.syntax().range().start() }; |
47 | let analyzer = hir::SourceAnalyzer::new(ctx.db, position.file_id, impl_node.syntax(), None); | 47 | let analyzer = hir::SourceAnalyzer::new(ctx.db, position.file_id, impl_node.syntax(), None); |
48 | 48 | ||
49 | resolve_target_trait_def(ctx.db, &analyzer, impl_node)? | 49 | resolve_target_trait_def(ctx.db, &analyzer, &impl_node)? |
50 | }; | 50 | }; |
51 | 51 | ||
52 | let def_name = |kind| -> Option<&SmolStr> { | 52 | let def_name = |kind| -> Option<SmolStr> { |
53 | match kind { | 53 | match kind { |
54 | ImplItemKind::FnDef(def) => def.name(), | 54 | ast::ImplItemKind::FnDef(def) => def.name(), |
55 | ImplItemKind::TypeAliasDef(def) => def.name(), | 55 | ast::ImplItemKind::TypeAliasDef(def) => def.name(), |
56 | ImplItemKind::ConstDef(def) => def.name(), | 56 | ast::ImplItemKind::ConstDef(def) => def.name(), |
57 | } | 57 | } |
58 | .map(ast::Name::text) | 58 | .map(|it| it.text().clone()) |
59 | }; | 59 | }; |
60 | 60 | ||
61 | let trait_items = trait_def.item_list()?.impl_items(); | 61 | let trait_items = trait_def.item_list()?.impl_items(); |
@@ -78,18 +78,13 @@ fn add_missing_impl_members_inner( | |||
78 | 78 | ||
79 | ctx.add_action(AssistId(assist_id), label, |edit| { | 79 | ctx.add_action(AssistId(assist_id), label, |edit| { |
80 | let n_existing_items = impl_item_list.impl_items().count(); | 80 | let n_existing_items = impl_item_list.impl_items().count(); |
81 | let items: Vec<_> = missing_items | 81 | let items = missing_items.into_iter().map(|it| match it.kind() { |
82 | .into_iter() | 82 | ImplItemKind::FnDef(def) => strip_docstring(add_body(def).into()), |
83 | .map(|it| match it.kind() { | 83 | _ => strip_docstring(it), |
84 | ImplItemKind::FnDef(def) => { | 84 | }); |
85 | strip_docstring(ImplItem::cast(add_body(def).syntax()).unwrap()) | ||
86 | } | ||
87 | _ => strip_docstring(it), | ||
88 | }) | ||
89 | .collect(); | ||
90 | let mut ast_editor = AstEditor::new(impl_item_list); | 85 | let mut ast_editor = AstEditor::new(impl_item_list); |
91 | 86 | ||
92 | ast_editor.append_items(items.iter().map(|it| &**it)); | 87 | ast_editor.append_items(items); |
93 | 88 | ||
94 | let first_new_item = ast_editor.ast().impl_items().nth(n_existing_items).unwrap(); | 89 | let first_new_item = ast_editor.ast().impl_items().nth(n_existing_items).unwrap(); |
95 | let cursor_position = first_new_item.syntax().range().start(); | 90 | let cursor_position = first_new_item.syntax().range().start(); |
@@ -101,14 +96,14 @@ fn add_missing_impl_members_inner( | |||
101 | ctx.build() | 96 | ctx.build() |
102 | } | 97 | } |
103 | 98 | ||
104 | fn strip_docstring(item: &ast::ImplItem) -> TreeArc<ast::ImplItem> { | 99 | fn strip_docstring(item: ast::ImplItem) -> ast::ImplItem { |
105 | let mut ast_editor = AstEditor::new(item); | 100 | let mut ast_editor = AstEditor::new(item); |
106 | ast_editor.strip_attrs_and_docs(); | 101 | ast_editor.strip_attrs_and_docs(); |
107 | ast_editor.ast().to_owned() | 102 | ast_editor.ast().to_owned() |
108 | } | 103 | } |
109 | 104 | ||
110 | fn add_body(fn_def: &ast::FnDef) -> TreeArc<ast::FnDef> { | 105 | fn add_body(fn_def: ast::FnDef) -> ast::FnDef { |
111 | let mut ast_editor = AstEditor::new(fn_def); | 106 | let mut ast_editor = AstEditor::new(fn_def.clone()); |
112 | if fn_def.body().is_none() { | 107 | if fn_def.body().is_none() { |
113 | ast_editor.set_body(&AstBuilder::<ast::Block>::single_expr( | 108 | ast_editor.set_body(&AstBuilder::<ast::Block>::single_expr( |
114 | &AstBuilder::<ast::Expr>::unimplemented(), | 109 | &AstBuilder::<ast::Expr>::unimplemented(), |
@@ -123,9 +118,12 @@ fn resolve_target_trait_def( | |||
123 | db: &impl HirDatabase, | 118 | db: &impl HirDatabase, |
124 | analyzer: &hir::SourceAnalyzer, | 119 | analyzer: &hir::SourceAnalyzer, |
125 | impl_block: &ast::ImplBlock, | 120 | impl_block: &ast::ImplBlock, |
126 | ) -> Option<TreeArc<ast::TraitDef>> { | 121 | ) -> Option<ast::TraitDef> { |
127 | let ast_path = | 122 | let ast_path = impl_block |
128 | impl_block.target_trait().map(AstNode::syntax).and_then(ast::PathType::cast)?.path()?; | 123 | .target_trait() |
124 | .map(|it| it.syntax().clone()) | ||
125 | .and_then(ast::PathType::cast)? | ||
126 | .path()?; | ||
129 | 127 | ||
130 | match analyzer.resolve_path(db, &ast_path) { | 128 | match analyzer.resolve_path(db, &ast_path) { |
131 | Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def.source(db).ast), | 129 | Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def.source(db).ast), |
diff --git a/crates/ra_assists/src/assist_ctx.rs b/crates/ra_assists/src/assist_ctx.rs index 34b207154..e52085f85 100644 --- a/crates/ra_assists/src/assist_ctx.rs +++ b/crates/ra_assists/src/assist_ctx.rs | |||
@@ -49,7 +49,7 @@ pub(crate) enum Assist { | |||
49 | pub(crate) struct AssistCtx<'a, DB> { | 49 | pub(crate) struct AssistCtx<'a, DB> { |
50 | pub(crate) db: &'a DB, | 50 | pub(crate) db: &'a DB, |
51 | pub(crate) frange: FileRange, | 51 | pub(crate) frange: FileRange, |
52 | source_file: &'a SourceFile, | 52 | source_file: SourceFile, |
53 | should_compute_edit: bool, | 53 | should_compute_edit: bool, |
54 | assist: Assist, | 54 | assist: Assist, |
55 | } | 55 | } |
@@ -59,7 +59,7 @@ impl<'a, DB> Clone for AssistCtx<'a, DB> { | |||
59 | AssistCtx { | 59 | AssistCtx { |
60 | db: self.db, | 60 | db: self.db, |
61 | frange: self.frange, | 61 | frange: self.frange, |
62 | source_file: self.source_file, | 62 | source_file: self.source_file.clone(), |
63 | should_compute_edit: self.should_compute_edit, | 63 | should_compute_edit: self.should_compute_edit, |
64 | assist: self.assist.clone(), | 64 | assist: self.assist.clone(), |
65 | } | 65 | } |
@@ -104,18 +104,18 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> { | |||
104 | Some(self.assist) | 104 | Some(self.assist) |
105 | } | 105 | } |
106 | 106 | ||
107 | pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken<'a>> { | 107 | pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> { |
108 | find_token_at_offset(self.source_file.syntax(), self.frange.range.start()) | 108 | find_token_at_offset(self.source_file.syntax(), self.frange.range.start()) |
109 | } | 109 | } |
110 | 110 | ||
111 | pub(crate) fn node_at_offset<N: AstNode>(&self) -> Option<&'a N> { | 111 | pub(crate) fn node_at_offset<N: AstNode>(&self) -> Option<N> { |
112 | find_node_at_offset(self.source_file.syntax(), self.frange.range.start()) | 112 | find_node_at_offset(self.source_file.syntax(), self.frange.range.start()) |
113 | } | 113 | } |
114 | pub(crate) fn covering_element(&self) -> SyntaxElement<'a> { | 114 | pub(crate) fn covering_element(&self) -> SyntaxElement { |
115 | find_covering_element(self.source_file.syntax(), self.frange.range) | 115 | find_covering_element(self.source_file.syntax(), self.frange.range) |
116 | } | 116 | } |
117 | 117 | ||
118 | pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement<'a> { | 118 | pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement { |
119 | find_covering_element(self.source_file.syntax(), range) | 119 | find_covering_element(self.source_file.syntax(), range) |
120 | } | 120 | } |
121 | } | 121 | } |
@@ -139,7 +139,7 @@ impl AssistBuilder { | |||
139 | ) { | 139 | ) { |
140 | let mut replace_with = replace_with.into(); | 140 | let mut replace_with = replace_with.into(); |
141 | if let Some(indent) = leading_indent(node) { | 141 | if let Some(indent) = leading_indent(node) { |
142 | replace_with = reindent(&replace_with, indent) | 142 | replace_with = reindent(&replace_with, &indent) |
143 | } | 143 | } |
144 | self.replace(node.range(), replace_with) | 144 | self.replace(node.range(), replace_with) |
145 | } | 145 | } |
diff --git a/crates/ra_assists/src/ast_editor.rs b/crates/ra_assists/src/ast_editor.rs index 7b743c9f0..5fbcadfee 100644 --- a/crates/ra_assists/src/ast_editor.rs +++ b/crates/ra_assists/src/ast_editor.rs | |||
@@ -4,18 +4,18 @@ use arrayvec::ArrayVec; | |||
4 | use hir::Name; | 4 | use hir::Name; |
5 | use ra_fmt::leading_indent; | 5 | use ra_fmt::leading_indent; |
6 | use ra_syntax::{ | 6 | use ra_syntax::{ |
7 | ast, AstNode, Direction, InsertPosition, SourceFile, SyntaxElement, SyntaxKind::*, TreeArc, T, | 7 | ast, AstNode, Direction, InsertPosition, SourceFile, SyntaxElement, SyntaxKind::*, T, |
8 | }; | 8 | }; |
9 | use ra_text_edit::TextEditBuilder; | 9 | use ra_text_edit::TextEditBuilder; |
10 | 10 | ||
11 | pub struct AstEditor<N: AstNode> { | 11 | pub struct AstEditor<N: AstNode> { |
12 | original_ast: TreeArc<N>, | 12 | original_ast: N, |
13 | ast: TreeArc<N>, | 13 | ast: N, |
14 | } | 14 | } |
15 | 15 | ||
16 | impl<N: AstNode> AstEditor<N> { | 16 | impl<N: AstNode> AstEditor<N> { |
17 | pub fn new(node: &N) -> AstEditor<N> { | 17 | pub fn new(node: N) -> AstEditor<N> { |
18 | AstEditor { original_ast: node.to_owned(), ast: node.to_owned() } | 18 | AstEditor { original_ast: node.clone(), ast: node } |
19 | } | 19 | } |
20 | 20 | ||
21 | pub fn into_text_edit(self, builder: &mut TextEditBuilder) { | 21 | pub fn into_text_edit(self, builder: &mut TextEditBuilder) { |
@@ -26,27 +26,27 @@ impl<N: AstNode> AstEditor<N> { | |||
26 | } | 26 | } |
27 | 27 | ||
28 | pub fn ast(&self) -> &N { | 28 | pub fn ast(&self) -> &N { |
29 | &*self.ast | 29 | &self.ast |
30 | } | 30 | } |
31 | 31 | ||
32 | #[must_use] | 32 | #[must_use] |
33 | fn insert_children<'a>( | 33 | fn insert_children( |
34 | &self, | 34 | &self, |
35 | position: InsertPosition<SyntaxElement<'_>>, | 35 | position: InsertPosition<SyntaxElement>, |
36 | to_insert: impl Iterator<Item = SyntaxElement<'a>>, | 36 | to_insert: impl Iterator<Item = SyntaxElement>, |
37 | ) -> TreeArc<N> { | 37 | ) -> N { |
38 | let new_syntax = self.ast().syntax().insert_children(position, to_insert); | 38 | let new_syntax = self.ast().syntax().insert_children(position, to_insert); |
39 | N::cast(&new_syntax).unwrap().to_owned() | 39 | N::cast(new_syntax).unwrap() |
40 | } | 40 | } |
41 | 41 | ||
42 | #[must_use] | 42 | #[must_use] |
43 | fn replace_children<'a>( | 43 | fn replace_children( |
44 | &self, | 44 | &self, |
45 | to_delete: RangeInclusive<SyntaxElement<'_>>, | 45 | to_delete: RangeInclusive<SyntaxElement>, |
46 | to_insert: impl Iterator<Item = SyntaxElement<'a>>, | 46 | to_insert: impl Iterator<Item = SyntaxElement>, |
47 | ) -> TreeArc<N> { | 47 | ) -> N { |
48 | let new_syntax = self.ast().syntax().replace_children(to_delete, to_insert); | 48 | let new_syntax = self.ast().syntax().replace_children(to_delete, to_insert); |
49 | N::cast(&new_syntax).unwrap().to_owned() | 49 | N::cast(new_syntax).unwrap() |
50 | } | 50 | } |
51 | 51 | ||
52 | fn do_make_multiline(&mut self) { | 52 | fn do_make_multiline(&mut self) { |
@@ -66,16 +66,18 @@ impl<N: AstNode> AstEditor<N> { | |||
66 | if ws.text().contains('\n') { | 66 | if ws.text().contains('\n') { |
67 | return; | 67 | return; |
68 | } | 68 | } |
69 | Some(ws) | 69 | Some(ws.clone()) |
70 | } | 70 | } |
71 | }; | 71 | }; |
72 | 72 | ||
73 | let indent = leading_indent(self.ast().syntax()).unwrap_or(""); | 73 | let indent = leading_indent(self.ast().syntax()).unwrap_or("".into()); |
74 | let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); | 74 | let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); |
75 | let to_insert = iter::once(ws.ws().into()); | 75 | let to_insert = iter::once(ws.ws().into()); |
76 | self.ast = match existing_ws { | 76 | self.ast = match existing_ws { |
77 | None => self.insert_children(InsertPosition::After(l_curly), to_insert), | 77 | None => self.insert_children(InsertPosition::After(l_curly), to_insert), |
78 | Some(ws) => self.replace_children(RangeInclusive::new(ws.into(), ws.into()), to_insert), | 78 | Some(ws) => { |
79 | self.replace_children(RangeInclusive::new(ws.clone().into(), ws.into()), to_insert) | ||
80 | } | ||
79 | }; | 81 | }; |
80 | } | 82 | } |
81 | } | 83 | } |
@@ -95,7 +97,7 @@ impl AstEditor<ast::NamedFieldList> { | |||
95 | let space = if is_multiline { | 97 | let space = if is_multiline { |
96 | ws = tokens::WsBuilder::new(&format!( | 98 | ws = tokens::WsBuilder::new(&format!( |
97 | "\n{} ", | 99 | "\n{} ", |
98 | leading_indent(self.ast().syntax()).unwrap_or("") | 100 | leading_indent(self.ast().syntax()).unwrap_or("".into()) |
99 | )); | 101 | )); |
100 | ws.ws() | 102 | ws.ws() |
101 | } else { | 103 | } else { |
@@ -104,7 +106,7 @@ impl AstEditor<ast::NamedFieldList> { | |||
104 | 106 | ||
105 | let mut to_insert: ArrayVec<[SyntaxElement; 4]> = ArrayVec::new(); | 107 | let mut to_insert: ArrayVec<[SyntaxElement; 4]> = ArrayVec::new(); |
106 | to_insert.push(space.into()); | 108 | to_insert.push(space.into()); |
107 | to_insert.push(field.syntax().into()); | 109 | to_insert.push(field.syntax().clone().into()); |
108 | to_insert.push(tokens::comma().into()); | 110 | to_insert.push(tokens::comma().into()); |
109 | 111 | ||
110 | macro_rules! after_l_curly { | 112 | macro_rules! after_l_curly { |
@@ -127,7 +129,7 @@ impl AstEditor<ast::NamedFieldList> { | |||
127 | InsertPosition::After(comma) | 129 | InsertPosition::After(comma) |
128 | } else { | 130 | } else { |
129 | to_insert.insert(0, tokens::comma().into()); | 131 | to_insert.insert(0, tokens::comma().into()); |
130 | InsertPosition::After($anchor.syntax().into()) | 132 | InsertPosition::After($anchor.syntax().clone().into()) |
131 | } | 133 | } |
132 | }; | 134 | }; |
133 | }; | 135 | }; |
@@ -144,7 +146,9 @@ impl AstEditor<ast::NamedFieldList> { | |||
144 | None => after_l_curly!(), | 146 | None => after_l_curly!(), |
145 | } | 147 | } |
146 | } | 148 | } |
147 | InsertPosition::Before(anchor) => InsertPosition::Before(anchor.syntax().into()), | 149 | InsertPosition::Before(anchor) => { |
150 | InsertPosition::Before(anchor.syntax().clone().into()) | ||
151 | } | ||
148 | InsertPosition::After(anchor) => after_field!(anchor), | 152 | InsertPosition::After(anchor) => after_field!(anchor), |
149 | }; | 153 | }; |
150 | 154 | ||
@@ -157,7 +161,7 @@ impl AstEditor<ast::NamedFieldList> { | |||
157 | } | 161 | } |
158 | 162 | ||
159 | impl AstEditor<ast::ItemList> { | 163 | impl AstEditor<ast::ItemList> { |
160 | pub fn append_items<'a>(&mut self, items: impl Iterator<Item = &'a ast::ImplItem>) { | 164 | pub fn append_items(&mut self, items: impl Iterator<Item = ast::ImplItem>) { |
161 | let n_existing_items = self.ast().impl_items().count(); | 165 | let n_existing_items = self.ast().impl_items().count(); |
162 | if n_existing_items == 0 { | 166 | if n_existing_items == 0 { |
163 | self.do_make_multiline(); | 167 | self.do_make_multiline(); |
@@ -165,22 +169,23 @@ impl AstEditor<ast::ItemList> { | |||
165 | items.for_each(|it| self.append_item(it)); | 169 | items.for_each(|it| self.append_item(it)); |
166 | } | 170 | } |
167 | 171 | ||
168 | pub fn append_item(&mut self, item: &ast::ImplItem) { | 172 | pub fn append_item(&mut self, item: ast::ImplItem) { |
169 | let (indent, position) = match self.ast().impl_items().last() { | 173 | let (indent, position) = match self.ast().impl_items().last() { |
170 | Some(it) => ( | 174 | Some(it) => ( |
171 | leading_indent(it.syntax()).unwrap_or("").to_string(), | 175 | leading_indent(it.syntax()).unwrap_or_default().to_string(), |
172 | InsertPosition::After(it.syntax().into()), | 176 | InsertPosition::After(it.syntax().clone().into()), |
173 | ), | 177 | ), |
174 | None => match self.l_curly() { | 178 | None => match self.l_curly() { |
175 | Some(it) => ( | 179 | Some(it) => ( |
176 | " ".to_string() + leading_indent(self.ast().syntax()).unwrap_or(""), | 180 | " ".to_string() + &leading_indent(self.ast().syntax()).unwrap_or_default(), |
177 | InsertPosition::After(it), | 181 | InsertPosition::After(it), |
178 | ), | 182 | ), |
179 | None => return, | 183 | None => return, |
180 | }, | 184 | }, |
181 | }; | 185 | }; |
182 | let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); | 186 | let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); |
183 | let to_insert: ArrayVec<[SyntaxElement; 2]> = [ws.ws().into(), item.syntax().into()].into(); | 187 | let to_insert: ArrayVec<[SyntaxElement; 2]> = |
188 | [ws.ws().into(), item.syntax().clone().into()].into(); | ||
184 | self.ast = self.insert_children(position, to_insert.into_iter()); | 189 | self.ast = self.insert_children(position, to_insert.into_iter()); |
185 | } | 190 | } |
186 | 191 | ||
@@ -197,9 +202,9 @@ impl AstEditor<ast::ImplItem> { | |||
197 | .children_with_tokens() | 202 | .children_with_tokens() |
198 | .find(|it| it.kind() == ATTR || it.kind() == COMMENT) | 203 | .find(|it| it.kind() == ATTR || it.kind() == COMMENT) |
199 | { | 204 | { |
200 | let end = match start.next_sibling_or_token() { | 205 | let end = match &start.next_sibling_or_token() { |
201 | Some(el) if el.kind() == WHITESPACE => el, | 206 | Some(el) if el.kind() == WHITESPACE => el.clone(), |
202 | Some(_) | None => start, | 207 | Some(_) | None => start.clone(), |
203 | }; | 208 | }; |
204 | self.ast = self.replace_children(RangeInclusive::new(start, end), iter::empty()); | 209 | self.ast = self.replace_children(RangeInclusive::new(start, end), iter::empty()); |
205 | } | 210 | } |
@@ -210,18 +215,18 @@ impl AstEditor<ast::FnDef> { | |||
210 | pub fn set_body(&mut self, body: &ast::Block) { | 215 | pub fn set_body(&mut self, body: &ast::Block) { |
211 | let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new(); | 216 | let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new(); |
212 | let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.ast().body() { | 217 | let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.ast().body() { |
213 | old_body.syntax().into() | 218 | old_body.syntax().clone().into() |
214 | } else if let Some(semi) = self.ast().semicolon_token() { | 219 | } else if let Some(semi) = self.ast().semicolon_token() { |
215 | to_insert.push(tokens::single_space().into()); | 220 | to_insert.push(tokens::single_space().into()); |
216 | semi.into() | 221 | semi.into() |
217 | } else { | 222 | } else { |
218 | to_insert.push(tokens::single_space().into()); | 223 | to_insert.push(tokens::single_space().into()); |
219 | to_insert.push(body.syntax().into()); | 224 | to_insert.push(body.syntax().clone().into()); |
220 | self.ast = self.insert_children(InsertPosition::Last, to_insert.into_iter()); | 225 | self.ast = self.insert_children(InsertPosition::Last, to_insert.into_iter()); |
221 | return; | 226 | return; |
222 | }; | 227 | }; |
223 | to_insert.push(body.syntax().into()); | 228 | to_insert.push(body.syntax().clone().into()); |
224 | let replace_range = RangeInclusive::new(old_body_or_semi, old_body_or_semi); | 229 | let replace_range = RangeInclusive::new(old_body_or_semi.clone(), old_body_or_semi); |
225 | self.ast = self.replace_children(replace_range, to_insert.into_iter()) | 230 | self.ast = self.replace_children(replace_range, to_insert.into_iter()) |
226 | } | 231 | } |
227 | } | 232 | } |
@@ -231,15 +236,15 @@ pub struct AstBuilder<N: AstNode> { | |||
231 | } | 236 | } |
232 | 237 | ||
233 | impl AstBuilder<ast::NamedField> { | 238 | impl AstBuilder<ast::NamedField> { |
234 | pub fn from_name(name: &Name) -> TreeArc<ast::NamedField> { | 239 | pub fn from_name(name: &Name) -> ast::NamedField { |
235 | ast_node_from_file_text(&format!("fn f() {{ S {{ {}: (), }} }}", name)) | 240 | ast_node_from_file_text(&format!("fn f() {{ S {{ {}: (), }} }}", name)) |
236 | } | 241 | } |
237 | 242 | ||
238 | fn from_text(text: &str) -> TreeArc<ast::NamedField> { | 243 | fn from_text(text: &str) -> ast::NamedField { |
239 | ast_node_from_file_text(&format!("fn f() {{ S {{ {}, }} }}", text)) | 244 | ast_node_from_file_text(&format!("fn f() {{ S {{ {}, }} }}", text)) |
240 | } | 245 | } |
241 | 246 | ||
242 | pub fn from_pieces(name: &ast::NameRef, expr: Option<&ast::Expr>) -> TreeArc<ast::NamedField> { | 247 | pub fn from_pieces(name: &ast::NameRef, expr: Option<&ast::Expr>) -> ast::NamedField { |
243 | match expr { | 248 | match expr { |
244 | Some(expr) => Self::from_text(&format!("{}: {}", name.syntax(), expr.syntax())), | 249 | Some(expr) => Self::from_text(&format!("{}: {}", name.syntax(), expr.syntax())), |
245 | None => Self::from_text(&name.syntax().to_string()), | 250 | None => Self::from_text(&name.syntax().to_string()), |
@@ -248,36 +253,36 @@ impl AstBuilder<ast::NamedField> { | |||
248 | } | 253 | } |
249 | 254 | ||
250 | impl AstBuilder<ast::Block> { | 255 | impl AstBuilder<ast::Block> { |
251 | fn from_text(text: &str) -> TreeArc<ast::Block> { | 256 | fn from_text(text: &str) -> ast::Block { |
252 | ast_node_from_file_text(&format!("fn f() {}", text)) | 257 | ast_node_from_file_text(&format!("fn f() {}", text)) |
253 | } | 258 | } |
254 | 259 | ||
255 | pub fn single_expr(e: &ast::Expr) -> TreeArc<ast::Block> { | 260 | pub fn single_expr(e: &ast::Expr) -> ast::Block { |
256 | Self::from_text(&format!("{{ {} }}", e.syntax())) | 261 | Self::from_text(&format!("{{ {} }}", e.syntax())) |
257 | } | 262 | } |
258 | } | 263 | } |
259 | 264 | ||
260 | impl AstBuilder<ast::Expr> { | 265 | impl AstBuilder<ast::Expr> { |
261 | fn from_text(text: &str) -> TreeArc<ast::Expr> { | 266 | fn from_text(text: &str) -> ast::Expr { |
262 | ast_node_from_file_text(&format!("fn f() {{ {}; }}", text)) | 267 | ast_node_from_file_text(&format!("fn f() {{ {}; }}", text)) |
263 | } | 268 | } |
264 | 269 | ||
265 | pub fn unit() -> TreeArc<ast::Expr> { | 270 | pub fn unit() -> ast::Expr { |
266 | Self::from_text("()") | 271 | Self::from_text("()") |
267 | } | 272 | } |
268 | 273 | ||
269 | pub fn unimplemented() -> TreeArc<ast::Expr> { | 274 | pub fn unimplemented() -> ast::Expr { |
270 | Self::from_text("unimplemented!()") | 275 | Self::from_text("unimplemented!()") |
271 | } | 276 | } |
272 | } | 277 | } |
273 | 278 | ||
274 | impl AstBuilder<ast::NameRef> { | 279 | impl AstBuilder<ast::NameRef> { |
275 | pub fn new(text: &str) -> TreeArc<ast::NameRef> { | 280 | pub fn new(text: &str) -> ast::NameRef { |
276 | ast_node_from_file_text(&format!("fn f() {{ {}; }}", text)) | 281 | ast_node_from_file_text(&format!("fn f() {{ {}; }}", text)) |
277 | } | 282 | } |
278 | } | 283 | } |
279 | 284 | ||
280 | fn ast_node_from_file_text<N: AstNode>(text: &str) -> TreeArc<N> { | 285 | fn ast_node_from_file_text<N: AstNode>(text: &str) -> N { |
281 | let parse = SourceFile::parse(text); | 286 | let parse = SourceFile::parse(text); |
282 | let res = parse.tree().syntax().descendants().find_map(N::cast).unwrap().to_owned(); | 287 | let res = parse.tree().syntax().descendants().find_map(N::cast).unwrap().to_owned(); |
283 | res | 288 | res |
@@ -285,47 +290,49 @@ fn ast_node_from_file_text<N: AstNode>(text: &str) -> TreeArc<N> { | |||
285 | 290 | ||
286 | mod tokens { | 291 | mod tokens { |
287 | use once_cell::sync::Lazy; | 292 | use once_cell::sync::Lazy; |
288 | use ra_syntax::{AstNode, SourceFile, SyntaxKind::*, SyntaxToken, TreeArc, T}; | 293 | use ra_syntax::{AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken, T}; |
289 | 294 | ||
290 | static SOURCE_FILE: Lazy<TreeArc<SourceFile>> = | 295 | static SOURCE_FILE: Lazy<Parse<SourceFile>> = Lazy::new(|| SourceFile::parse(",\n; ;")); |
291 | Lazy::new(|| SourceFile::parse(",\n; ;").tree().to_owned()); | ||
292 | 296 | ||
293 | pub(crate) fn comma() -> SyntaxToken<'static> { | 297 | pub(crate) fn comma() -> SyntaxToken { |
294 | SOURCE_FILE | 298 | SOURCE_FILE |
299 | .tree() | ||
295 | .syntax() | 300 | .syntax() |
296 | .descendants_with_tokens() | 301 | .descendants_with_tokens() |
297 | .filter_map(|it| it.as_token()) | 302 | .filter_map(|it| it.as_token().cloned()) |
298 | .find(|it| it.kind() == T![,]) | 303 | .find(|it| it.kind() == T![,]) |
299 | .unwrap() | 304 | .unwrap() |
300 | } | 305 | } |
301 | 306 | ||
302 | pub(crate) fn single_space() -> SyntaxToken<'static> { | 307 | pub(crate) fn single_space() -> SyntaxToken { |
303 | SOURCE_FILE | 308 | SOURCE_FILE |
309 | .tree() | ||
304 | .syntax() | 310 | .syntax() |
305 | .descendants_with_tokens() | 311 | .descendants_with_tokens() |
306 | .filter_map(|it| it.as_token()) | 312 | .filter_map(|it| it.as_token().cloned()) |
307 | .find(|it| it.kind() == WHITESPACE && it.text().as_str() == " ") | 313 | .find(|it| it.kind() == WHITESPACE && it.text().as_str() == " ") |
308 | .unwrap() | 314 | .unwrap() |
309 | } | 315 | } |
310 | 316 | ||
311 | #[allow(unused)] | 317 | #[allow(unused)] |
312 | pub(crate) fn single_newline() -> SyntaxToken<'static> { | 318 | pub(crate) fn single_newline() -> SyntaxToken { |
313 | SOURCE_FILE | 319 | SOURCE_FILE |
320 | .tree() | ||
314 | .syntax() | 321 | .syntax() |
315 | .descendants_with_tokens() | 322 | .descendants_with_tokens() |
316 | .filter_map(|it| it.as_token()) | 323 | .filter_map(|it| it.as_token().cloned()) |
317 | .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n") | 324 | .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n") |
318 | .unwrap() | 325 | .unwrap() |
319 | } | 326 | } |
320 | 327 | ||
321 | pub(crate) struct WsBuilder(TreeArc<SourceFile>); | 328 | pub(crate) struct WsBuilder(SourceFile); |
322 | 329 | ||
323 | impl WsBuilder { | 330 | impl WsBuilder { |
324 | pub(crate) fn new(text: &str) -> WsBuilder { | 331 | pub(crate) fn new(text: &str) -> WsBuilder { |
325 | WsBuilder(SourceFile::parse(text).ok().unwrap()) | 332 | WsBuilder(SourceFile::parse(text).ok().unwrap()) |
326 | } | 333 | } |
327 | pub(crate) fn ws(&self) -> SyntaxToken<'_> { | 334 | pub(crate) fn ws(&self) -> SyntaxToken { |
328 | self.0.syntax().first_child_or_token().unwrap().as_token().unwrap() | 335 | self.0.syntax().first_child_or_token().unwrap().as_token().cloned().unwrap() |
329 | } | 336 | } |
330 | } | 337 | } |
331 | 338 | ||
diff --git a/crates/ra_assists/src/auto_import.rs b/crates/ra_assists/src/auto_import.rs index f8f37e852..0eb4bdb62 100644 --- a/crates/ra_assists/src/auto_import.rs +++ b/crates/ra_assists/src/auto_import.rs | |||
@@ -12,25 +12,25 @@ use ra_syntax::{ | |||
12 | SyntaxNode, TextRange, T, | 12 | SyntaxNode, TextRange, T, |
13 | }; | 13 | }; |
14 | 14 | ||
15 | fn collect_path_segments_raw<'a>( | 15 | fn collect_path_segments_raw( |
16 | segments: &mut Vec<&'a ast::PathSegment>, | 16 | segments: &mut Vec<ast::PathSegment>, |
17 | mut path: &'a ast::Path, | 17 | mut path: ast::Path, |
18 | ) -> Option<usize> { | 18 | ) -> Option<usize> { |
19 | let oldlen = segments.len(); | 19 | let oldlen = segments.len(); |
20 | loop { | 20 | loop { |
21 | let mut children = path.syntax().children_with_tokens(); | 21 | let mut children = path.syntax().children_with_tokens(); |
22 | let (first, second, third) = ( | 22 | let (first, second, third) = ( |
23 | children.next().map(|n| (n, n.kind())), | 23 | children.next().map(|n| (n.clone(), n.kind())), |
24 | children.next().map(|n| (n, n.kind())), | 24 | children.next().map(|n| (n.clone(), n.kind())), |
25 | children.next().map(|n| (n, n.kind())), | 25 | children.next().map(|n| (n.clone(), n.kind())), |
26 | ); | 26 | ); |
27 | match (first, second, third) { | 27 | match (first, second, third) { |
28 | (Some((subpath, PATH)), Some((_, T![::])), Some((segment, PATH_SEGMENT))) => { | 28 | (Some((subpath, PATH)), Some((_, T![::])), Some((segment, PATH_SEGMENT))) => { |
29 | path = ast::Path::cast(subpath.as_node()?)?; | 29 | path = ast::Path::cast(subpath.as_node()?.clone())?; |
30 | segments.push(ast::PathSegment::cast(segment.as_node()?)?); | 30 | segments.push(ast::PathSegment::cast(segment.as_node()?.clone())?); |
31 | } | 31 | } |
32 | (Some((segment, PATH_SEGMENT)), _, _) => { | 32 | (Some((segment, PATH_SEGMENT)), _, _) => { |
33 | segments.push(ast::PathSegment::cast(segment.as_node()?)?); | 33 | segments.push(ast::PathSegment::cast(segment.as_node()?.clone())?); |
34 | break; | 34 | break; |
35 | } | 35 | } |
36 | (_, _, _) => return None, | 36 | (_, _, _) => return None, |
@@ -60,7 +60,7 @@ fn fmt_segments_raw(segments: &[SmolStr], buf: &mut String) { | |||
60 | } | 60 | } |
61 | 61 | ||
62 | // Returns the numeber of common segments. | 62 | // Returns the numeber of common segments. |
63 | fn compare_path_segments(left: &[SmolStr], right: &[&ast::PathSegment]) -> usize { | 63 | fn compare_path_segments(left: &[SmolStr], right: &[ast::PathSegment]) -> usize { |
64 | left.iter().zip(right).filter(|(l, r)| compare_path_segment(l, r)).count() | 64 | left.iter().zip(right).filter(|(l, r)| compare_path_segment(l, r)).count() |
65 | } | 65 | } |
66 | 66 | ||
@@ -81,12 +81,12 @@ fn compare_path_segment_with_name(a: &SmolStr, b: &ast::Name) -> bool { | |||
81 | a == b.text() | 81 | a == b.text() |
82 | } | 82 | } |
83 | 83 | ||
84 | #[derive(Copy, Clone)] | 84 | #[derive(Clone)] |
85 | enum ImportAction<'a> { | 85 | enum ImportAction { |
86 | Nothing, | 86 | Nothing, |
87 | // Add a brand new use statement. | 87 | // Add a brand new use statement. |
88 | AddNewUse { | 88 | AddNewUse { |
89 | anchor: Option<&'a SyntaxNode>, // anchor node | 89 | anchor: Option<SyntaxNode>, // anchor node |
90 | add_after_anchor: bool, | 90 | add_after_anchor: bool, |
91 | }, | 91 | }, |
92 | 92 | ||
@@ -94,9 +94,9 @@ enum ImportAction<'a> { | |||
94 | AddNestedImport { | 94 | AddNestedImport { |
95 | // how may segments matched with the target path | 95 | // how may segments matched with the target path |
96 | common_segments: usize, | 96 | common_segments: usize, |
97 | path_to_split: &'a ast::Path, | 97 | path_to_split: ast::Path, |
98 | // the first segment of path_to_split we want to add into the new nested list | 98 | // the first segment of path_to_split we want to add into the new nested list |
99 | first_segment_to_split: Option<&'a ast::PathSegment>, | 99 | first_segment_to_split: Option<ast::PathSegment>, |
100 | // Wether to add 'self' in addition to the target path | 100 | // Wether to add 'self' in addition to the target path |
101 | add_self: bool, | 101 | add_self: bool, |
102 | }, | 102 | }, |
@@ -104,20 +104,20 @@ enum ImportAction<'a> { | |||
104 | AddInTreeList { | 104 | AddInTreeList { |
105 | common_segments: usize, | 105 | common_segments: usize, |
106 | // The UseTreeList where to add the target path | 106 | // The UseTreeList where to add the target path |
107 | tree_list: &'a ast::UseTreeList, | 107 | tree_list: ast::UseTreeList, |
108 | add_self: bool, | 108 | add_self: bool, |
109 | }, | 109 | }, |
110 | } | 110 | } |
111 | 111 | ||
112 | impl<'a> ImportAction<'a> { | 112 | impl ImportAction { |
113 | fn add_new_use(anchor: Option<&'a SyntaxNode>, add_after_anchor: bool) -> Self { | 113 | fn add_new_use(anchor: Option<SyntaxNode>, add_after_anchor: bool) -> Self { |
114 | ImportAction::AddNewUse { anchor, add_after_anchor } | 114 | ImportAction::AddNewUse { anchor, add_after_anchor } |
115 | } | 115 | } |
116 | 116 | ||
117 | fn add_nested_import( | 117 | fn add_nested_import( |
118 | common_segments: usize, | 118 | common_segments: usize, |
119 | path_to_split: &'a ast::Path, | 119 | path_to_split: ast::Path, |
120 | first_segment_to_split: Option<&'a ast::PathSegment>, | 120 | first_segment_to_split: Option<ast::PathSegment>, |
121 | add_self: bool, | 121 | add_self: bool, |
122 | ) -> Self { | 122 | ) -> Self { |
123 | ImportAction::AddNestedImport { | 123 | ImportAction::AddNestedImport { |
@@ -130,14 +130,14 @@ impl<'a> ImportAction<'a> { | |||
130 | 130 | ||
131 | fn add_in_tree_list( | 131 | fn add_in_tree_list( |
132 | common_segments: usize, | 132 | common_segments: usize, |
133 | tree_list: &'a ast::UseTreeList, | 133 | tree_list: ast::UseTreeList, |
134 | add_self: bool, | 134 | add_self: bool, |
135 | ) -> Self { | 135 | ) -> Self { |
136 | ImportAction::AddInTreeList { common_segments, tree_list, add_self } | 136 | ImportAction::AddInTreeList { common_segments, tree_list, add_self } |
137 | } | 137 | } |
138 | 138 | ||
139 | fn better<'b>(left: &'b ImportAction<'a>, right: &'b ImportAction<'a>) -> &'b ImportAction<'a> { | 139 | fn better(left: ImportAction, right: ImportAction) -> ImportAction { |
140 | if left.is_better(right) { | 140 | if left.is_better(&right) { |
141 | left | 141 | left |
142 | } else { | 142 | } else { |
143 | right | 143 | right |
@@ -166,12 +166,12 @@ impl<'a> ImportAction<'a> { | |||
166 | 166 | ||
167 | // Find out the best ImportAction to import target path against current_use_tree. | 167 | // Find out the best ImportAction to import target path against current_use_tree. |
168 | // If current_use_tree has a nested import the function gets called recursively on every UseTree inside a UseTreeList. | 168 | // If current_use_tree has a nested import the function gets called recursively on every UseTree inside a UseTreeList. |
169 | fn walk_use_tree_for_best_action<'a>( | 169 | fn walk_use_tree_for_best_action( |
170 | current_path_segments: &mut Vec<&'a ast::PathSegment>, // buffer containing path segments | 170 | current_path_segments: &mut Vec<ast::PathSegment>, // buffer containing path segments |
171 | current_parent_use_tree_list: Option<&'a ast::UseTreeList>, // will be Some value if we are in a nested import | 171 | current_parent_use_tree_list: Option<ast::UseTreeList>, // will be Some value if we are in a nested import |
172 | current_use_tree: &'a ast::UseTree, // the use tree we are currently examinating | 172 | current_use_tree: ast::UseTree, // the use tree we are currently examinating |
173 | target: &[SmolStr], // the path we want to import | 173 | target: &[SmolStr], // the path we want to import |
174 | ) -> ImportAction<'a> { | 174 | ) -> ImportAction { |
175 | // We save the number of segments in the buffer so we can restore the correct segments | 175 | // We save the number of segments in the buffer so we can restore the correct segments |
176 | // before returning. Recursive call will add segments so we need to delete them. | 176 | // before returning. Recursive call will add segments so we need to delete them. |
177 | let prev_len = current_path_segments.len(); | 177 | let prev_len = current_path_segments.len(); |
@@ -188,32 +188,36 @@ fn walk_use_tree_for_best_action<'a>( | |||
188 | .syntax() | 188 | .syntax() |
189 | .ancestors() | 189 | .ancestors() |
190 | .find_map(ast::UseItem::cast) | 190 | .find_map(ast::UseItem::cast) |
191 | .map(AstNode::syntax), | 191 | .map(|it| it.syntax().clone()), |
192 | true, | 192 | true, |
193 | ); | 193 | ); |
194 | } | 194 | } |
195 | }; | 195 | }; |
196 | 196 | ||
197 | // This can happen only if current_use_tree is a direct child of a UseItem | 197 | // This can happen only if current_use_tree is a direct child of a UseItem |
198 | if let Some(name) = alias.and_then(ast::NameOwner::name) { | 198 | if let Some(name) = alias.and_then(|it| it.name()) { |
199 | if compare_path_segment_with_name(&target[0], name) { | 199 | if compare_path_segment_with_name(&target[0], &name) { |
200 | return ImportAction::Nothing; | 200 | return ImportAction::Nothing; |
201 | } | 201 | } |
202 | } | 202 | } |
203 | 203 | ||
204 | collect_path_segments_raw(current_path_segments, path); | 204 | collect_path_segments_raw(current_path_segments, path.clone()); |
205 | 205 | ||
206 | // We compare only the new segments added in the line just above. | 206 | // We compare only the new segments added in the line just above. |
207 | // The first prev_len segments were already compared in 'parent' recursive calls. | 207 | // The first prev_len segments were already compared in 'parent' recursive calls. |
208 | let left = target.split_at(prev_len).1; | 208 | let left = target.split_at(prev_len).1; |
209 | let right = current_path_segments.split_at(prev_len).1; | 209 | let right = current_path_segments.split_at(prev_len).1; |
210 | let common = compare_path_segments(left, right); | 210 | let common = compare_path_segments(left, &right); |
211 | let mut action = match common { | 211 | let mut action = match common { |
212 | 0 => ImportAction::add_new_use( | 212 | 0 => ImportAction::add_new_use( |
213 | // e.g: target is std::fmt and we can have | 213 | // e.g: target is std::fmt and we can have |
214 | // use foo::bar | 214 | // use foo::bar |
215 | // We add a brand new use statement | 215 | // We add a brand new use statement |
216 | current_use_tree.syntax().ancestors().find_map(ast::UseItem::cast).map(AstNode::syntax), | 216 | current_use_tree |
217 | .syntax() | ||
218 | .ancestors() | ||
219 | .find_map(ast::UseItem::cast) | ||
220 | .map(|it| it.syntax().clone()), | ||
217 | true, | 221 | true, |
218 | ), | 222 | ), |
219 | common if common == left.len() && left.len() == right.len() => { | 223 | common if common == left.len() && left.len() == right.len() => { |
@@ -223,9 +227,9 @@ fn walk_use_tree_for_best_action<'a>( | |||
223 | if let Some(list) = tree_list { | 227 | if let Some(list) = tree_list { |
224 | // In case 2 we need to add self to the nested list | 228 | // In case 2 we need to add self to the nested list |
225 | // unless it's already there | 229 | // unless it's already there |
226 | let has_self = list.use_trees().map(ast::UseTree::path).any(|p| { | 230 | let has_self = list.use_trees().map(|it| it.path()).any(|p| { |
227 | p.and_then(ast::Path::segment) | 231 | p.and_then(|it| it.segment()) |
228 | .and_then(ast::PathSegment::kind) | 232 | .and_then(|it| it.kind()) |
229 | .filter(|k| *k == ast::PathSegmentKind::SelfKw) | 233 | .filter(|k| *k == ast::PathSegmentKind::SelfKw) |
230 | .is_some() | 234 | .is_some() |
231 | }); | 235 | }); |
@@ -248,7 +252,7 @@ fn walk_use_tree_for_best_action<'a>( | |||
248 | ImportAction::add_nested_import( | 252 | ImportAction::add_nested_import( |
249 | prev_len + common, | 253 | prev_len + common, |
250 | path, | 254 | path, |
251 | Some(segments_to_split[0]), | 255 | Some(segments_to_split[0].clone()), |
252 | false, | 256 | false, |
253 | ) | 257 | ) |
254 | } | 258 | } |
@@ -263,14 +267,18 @@ fn walk_use_tree_for_best_action<'a>( | |||
263 | .syntax() | 267 | .syntax() |
264 | .ancestors() | 268 | .ancestors() |
265 | .find_map(ast::UseItem::cast) | 269 | .find_map(ast::UseItem::cast) |
266 | .map(AstNode::syntax), | 270 | .map(|it| it.syntax().clone()), |
267 | true, | 271 | true, |
268 | ); | 272 | ); |
269 | if let Some(list) = tree_list { | 273 | if let Some(list) = tree_list { |
270 | // Case 2, check recursively if the path is already imported in the nested list | 274 | // Case 2, check recursively if the path is already imported in the nested list |
271 | for u in list.use_trees() { | 275 | for u in list.use_trees() { |
272 | let child_action = | 276 | let child_action = walk_use_tree_for_best_action( |
273 | walk_use_tree_for_best_action(current_path_segments, Some(list), u, target); | 277 | current_path_segments, |
278 | Some(list.clone()), | ||
279 | u, | ||
280 | target, | ||
281 | ); | ||
274 | if child_action.is_better(&better_action) { | 282 | if child_action.is_better(&better_action) { |
275 | better_action = child_action; | 283 | better_action = child_action; |
276 | if let ImportAction::Nothing = better_action { | 284 | if let ImportAction::Nothing = better_action { |
@@ -291,7 +299,7 @@ fn walk_use_tree_for_best_action<'a>( | |||
291 | ImportAction::add_nested_import( | 299 | ImportAction::add_nested_import( |
292 | prev_len + common, | 300 | prev_len + common, |
293 | path, | 301 | path, |
294 | Some(segments_to_split[0]), | 302 | Some(segments_to_split[0].clone()), |
295 | true, | 303 | true, |
296 | ) | 304 | ) |
297 | } | 305 | } |
@@ -302,7 +310,7 @@ fn walk_use_tree_for_best_action<'a>( | |||
302 | ImportAction::add_nested_import( | 310 | ImportAction::add_nested_import( |
303 | prev_len + common, | 311 | prev_len + common, |
304 | path, | 312 | path, |
305 | Some(segments_to_split[0]), | 313 | Some(segments_to_split[0].clone()), |
306 | false, | 314 | false, |
307 | ) | 315 | ) |
308 | } | 316 | } |
@@ -311,7 +319,7 @@ fn walk_use_tree_for_best_action<'a>( | |||
311 | 319 | ||
312 | // If we are inside a UseTreeList adding a use statement become adding to the existing | 320 | // If we are inside a UseTreeList adding a use statement become adding to the existing |
313 | // tree list. | 321 | // tree list. |
314 | action = match (current_parent_use_tree_list, action) { | 322 | action = match (current_parent_use_tree_list, action.clone()) { |
315 | (Some(use_tree_list), ImportAction::AddNewUse { .. }) => { | 323 | (Some(use_tree_list), ImportAction::AddNewUse { .. }) => { |
316 | ImportAction::add_in_tree_list(prev_len, use_tree_list, false) | 324 | ImportAction::add_in_tree_list(prev_len, use_tree_list, false) |
317 | } | 325 | } |
@@ -323,19 +331,20 @@ fn walk_use_tree_for_best_action<'a>( | |||
323 | action | 331 | action |
324 | } | 332 | } |
325 | 333 | ||
326 | fn best_action_for_target<'b, 'a: 'b>( | 334 | fn best_action_for_target( |
327 | container: &'a SyntaxNode, | 335 | container: SyntaxNode, |
328 | anchor: &'a SyntaxNode, | 336 | anchor: SyntaxNode, |
329 | target: &'b [SmolStr], | 337 | target: &[SmolStr], |
330 | ) -> ImportAction<'a> { | 338 | ) -> ImportAction { |
331 | let mut storage = Vec::with_capacity(16); // this should be the only allocation | 339 | let mut storage = Vec::with_capacity(16); // this should be the only allocation |
332 | let best_action = container | 340 | let best_action = container |
333 | .children() | 341 | .children() |
334 | .filter_map(ast::UseItem::cast) | 342 | .filter_map(ast::UseItem::cast) |
335 | .filter_map(ast::UseItem::use_tree) | 343 | .filter_map(|it| it.use_tree()) |
336 | .map(|u| walk_use_tree_for_best_action(&mut storage, None, u, target)) | 344 | .map(|u| walk_use_tree_for_best_action(&mut storage, None, u, target)) |
337 | .fold(None, |best, a| { | 345 | .fold(None, |best, a| match best { |
338 | best.and_then(|best| Some(*ImportAction::better(&best, &a))).or_else(|| Some(a)) | 346 | Some(best) => Some(ImportAction::better(best, a)), |
347 | None => Some(a), | ||
339 | }); | 348 | }); |
340 | 349 | ||
341 | match best_action { | 350 | match best_action { |
@@ -386,7 +395,7 @@ fn make_assist(action: &ImportAction, target: &[SmolStr], edit: &mut TextEditBui | |||
386 | } | 395 | } |
387 | 396 | ||
388 | fn make_assist_add_new_use( | 397 | fn make_assist_add_new_use( |
389 | anchor: &Option<&SyntaxNode>, | 398 | anchor: &Option<SyntaxNode>, |
390 | after: bool, | 399 | after: bool, |
391 | target: &[SmolStr], | 400 | target: &[SmolStr], |
392 | edit: &mut TextEditBuilder, | 401 | edit: &mut TextEditBuilder, |
@@ -396,7 +405,7 @@ fn make_assist_add_new_use( | |||
396 | let mut buf = String::new(); | 405 | let mut buf = String::new(); |
397 | if after { | 406 | if after { |
398 | buf.push_str("\n"); | 407 | buf.push_str("\n"); |
399 | if let Some(spaces) = indent { | 408 | if let Some(spaces) = &indent { |
400 | buf.push_str(spaces); | 409 | buf.push_str(spaces); |
401 | } | 410 | } |
402 | } | 411 | } |
@@ -405,8 +414,8 @@ fn make_assist_add_new_use( | |||
405 | buf.push_str(";"); | 414 | buf.push_str(";"); |
406 | if !after { | 415 | if !after { |
407 | buf.push_str("\n\n"); | 416 | buf.push_str("\n\n"); |
408 | if let Some(spaces) = indent { | 417 | if let Some(spaces) = &indent { |
409 | buf.push_str(spaces); | 418 | buf.push_str(&spaces); |
410 | } | 419 | } |
411 | } | 420 | } |
412 | let position = if after { anchor.range().end() } else { anchor.range().start() }; | 421 | let position = if after { anchor.range().end() } else { anchor.range().start() }; |
@@ -444,7 +453,7 @@ fn make_assist_add_in_tree_list( | |||
444 | 453 | ||
445 | fn make_assist_add_nested_import( | 454 | fn make_assist_add_nested_import( |
446 | path: &ast::Path, | 455 | path: &ast::Path, |
447 | first_segment_to_split: &Option<&ast::PathSegment>, | 456 | first_segment_to_split: &Option<ast::PathSegment>, |
448 | target: &[SmolStr], | 457 | target: &[SmolStr], |
449 | add_self: bool, | 458 | add_self: bool, |
450 | edit: &mut TextEditBuilder, | 459 | edit: &mut TextEditBuilder, |
@@ -482,7 +491,7 @@ fn apply_auto_import( | |||
482 | target: &[SmolStr], | 491 | target: &[SmolStr], |
483 | edit: &mut TextEditBuilder, | 492 | edit: &mut TextEditBuilder, |
484 | ) { | 493 | ) { |
485 | let action = best_action_for_target(container, path.syntax(), target); | 494 | let action = best_action_for_target(container.clone(), path.syntax().clone(), target); |
486 | make_assist(&action, target, edit); | 495 | make_assist(&action, target, edit); |
487 | if let Some(last) = path.segment() { | 496 | if let Some(last) = path.segment() { |
488 | // Here we are assuming the assist will provide a correct use statement | 497 | // Here we are assuming the assist will provide a correct use statement |
@@ -522,26 +531,26 @@ pub fn auto_import_text_edit( | |||
522 | edit: &mut TextEditBuilder, | 531 | edit: &mut TextEditBuilder, |
523 | ) { | 532 | ) { |
524 | let container = position.ancestors().find_map(|n| { | 533 | let container = position.ancestors().find_map(|n| { |
525 | if let Some(module) = ast::Module::cast(n) { | 534 | if let Some(module) = ast::Module::cast(n.clone()) { |
526 | return module.item_list().map(ast::AstNode::syntax); | 535 | return module.item_list().map(|it| it.syntax().clone()); |
527 | } | 536 | } |
528 | ast::SourceFile::cast(n).map(ast::AstNode::syntax) | 537 | ast::SourceFile::cast(n).map(|it| it.syntax().clone()) |
529 | }); | 538 | }); |
530 | 539 | ||
531 | if let Some(container) = container { | 540 | if let Some(container) = container { |
532 | let action = best_action_for_target(container, anchor, target); | 541 | let action = best_action_for_target(container, anchor.clone(), target); |
533 | make_assist(&action, target, edit); | 542 | make_assist(&action, target, edit); |
534 | } | 543 | } |
535 | } | 544 | } |
536 | 545 | ||
537 | pub(crate) fn auto_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 546 | pub(crate) fn auto_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { |
538 | let path: &ast::Path = ctx.node_at_offset()?; | 547 | let path: ast::Path = ctx.node_at_offset()?; |
539 | // We don't want to mess with use statements | 548 | // We don't want to mess with use statements |
540 | if path.syntax().ancestors().find_map(ast::UseItem::cast).is_some() { | 549 | if path.syntax().ancestors().find_map(ast::UseItem::cast).is_some() { |
541 | return None; | 550 | return None; |
542 | } | 551 | } |
543 | 552 | ||
544 | let hir_path = hir::Path::from_ast(path)?; | 553 | let hir_path = hir::Path::from_ast(path.clone())?; |
545 | let segments = collect_hir_path_segments(&hir_path); | 554 | let segments = collect_hir_path_segments(&hir_path); |
546 | if segments.len() < 2 { | 555 | if segments.len() < 2 { |
547 | return None; | 556 | return None; |
@@ -554,7 +563,7 @@ pub(crate) fn auto_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist | |||
554 | format!("import {} in mod {}", fmt_segments(&segments), name.text()), | 563 | format!("import {} in mod {}", fmt_segments(&segments), name.text()), |
555 | |edit| { | 564 | |edit| { |
556 | let mut text_edit = TextEditBuilder::default(); | 565 | let mut text_edit = TextEditBuilder::default(); |
557 | apply_auto_import(item_list.syntax(), path, &segments, &mut text_edit); | 566 | apply_auto_import(item_list.syntax(), &path, &segments, &mut text_edit); |
558 | edit.set_edit_builder(text_edit); | 567 | edit.set_edit_builder(text_edit); |
559 | }, | 568 | }, |
560 | ); | 569 | ); |
@@ -566,7 +575,7 @@ pub(crate) fn auto_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist | |||
566 | format!("import {} in the current file", fmt_segments(&segments)), | 575 | format!("import {} in the current file", fmt_segments(&segments)), |
567 | |edit| { | 576 | |edit| { |
568 | let mut text_edit = TextEditBuilder::default(); | 577 | let mut text_edit = TextEditBuilder::default(); |
569 | apply_auto_import(current_file.syntax(), path, &segments, &mut text_edit); | 578 | apply_auto_import(current_file.syntax(), &path, &segments, &mut text_edit); |
570 | edit.set_edit_builder(text_edit); | 579 | edit.set_edit_builder(text_edit); |
571 | }, | 580 | }, |
572 | ); | 581 | ); |
diff --git a/crates/ra_assists/src/change_visibility.rs b/crates/ra_assists/src/change_visibility.rs index 6cabba3e3..ab10d2aa4 100644 --- a/crates/ra_assists/src/change_visibility.rs +++ b/crates/ra_assists/src/change_visibility.rs | |||
@@ -35,7 +35,7 @@ fn add_vis(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | |||
35 | if parent.children().any(|child| child.kind() == VISIBILITY) { | 35 | if parent.children().any(|child| child.kind() == VISIBILITY) { |
36 | return None; | 36 | return None; |
37 | } | 37 | } |
38 | (vis_offset(parent), keyword.range()) | 38 | (vis_offset(&parent), keyword.range()) |
39 | } else { | 39 | } else { |
40 | let ident = ctx.token_at_offset().find(|leaf| leaf.kind() == IDENT)?; | 40 | let ident = ctx.token_at_offset().find(|leaf| leaf.kind() == IDENT)?; |
41 | let field = ident.parent().ancestors().find_map(ast::NamedFieldDef::cast)?; | 41 | let field = ident.parent().ancestors().find_map(ast::NamedFieldDef::cast)?; |
@@ -65,7 +65,7 @@ fn vis_offset(node: &SyntaxNode) -> TextUnit { | |||
65 | .unwrap_or_else(|| node.range().start()) | 65 | .unwrap_or_else(|| node.range().start()) |
66 | } | 66 | } |
67 | 67 | ||
68 | fn change_vis(mut ctx: AssistCtx<impl HirDatabase>, vis: &ast::Visibility) -> Option<Assist> { | 68 | fn change_vis(mut ctx: AssistCtx<impl HirDatabase>, vis: ast::Visibility) -> Option<Assist> { |
69 | if vis.syntax().text() == "pub" { | 69 | if vis.syntax().text() == "pub" { |
70 | ctx.add_action(AssistId("change_visibility"), "change to pub(crate)", |edit| { | 70 | ctx.add_action(AssistId("change_visibility"), "change to pub(crate)", |edit| { |
71 | edit.target(vis.syntax().range()); | 71 | edit.target(vis.syntax().range()); |
diff --git a/crates/ra_assists/src/fill_match_arms.rs b/crates/ra_assists/src/fill_match_arms.rs index deef166b5..b96806ac6 100644 --- a/crates/ra_assists/src/fill_match_arms.rs +++ b/crates/ra_assists/src/fill_match_arms.rs | |||
@@ -27,7 +27,7 @@ pub(crate) fn fill_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<As | |||
27 | let mut arm_iter = arm_list.arms(); | 27 | let mut arm_iter = arm_list.arms(); |
28 | let first = arm_iter.next(); | 28 | let first = arm_iter.next(); |
29 | 29 | ||
30 | match first { | 30 | match &first { |
31 | // If there arm list is empty or there is only one trivial arm, then proceed. | 31 | // If there arm list is empty or there is only one trivial arm, then proceed. |
32 | Some(arm) if is_trivial_arm(arm) => { | 32 | Some(arm) if is_trivial_arm(arm) => { |
33 | if arm_iter.next() != None { | 33 | if arm_iter.next() != None { |
@@ -44,7 +44,7 @@ pub(crate) fn fill_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<As | |||
44 | 44 | ||
45 | let expr = match_expr.expr()?; | 45 | let expr = match_expr.expr()?; |
46 | let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, expr.syntax(), None); | 46 | let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, expr.syntax(), None); |
47 | let match_expr_ty = analyzer.type_of(ctx.db, expr)?; | 47 | let match_expr_ty = analyzer.type_of(ctx.db, &expr)?; |
48 | let enum_def = analyzer.autoderef(ctx.db, match_expr_ty).find_map(|ty| match ty.as_adt() { | 48 | let enum_def = analyzer.autoderef(ctx.db, match_expr_ty).find_map(|ty| match ty.as_adt() { |
49 | Some((AdtDef::Enum(e), _)) => Some(e), | 49 | Some((AdtDef::Enum(e), _)) => Some(e), |
50 | _ => None, | 50 | _ => None, |
diff --git a/crates/ra_assists/src/flip_binexpr.rs b/crates/ra_assists/src/flip_binexpr.rs index 5e41f9346..2e591ad3b 100644 --- a/crates/ra_assists/src/flip_binexpr.rs +++ b/crates/ra_assists/src/flip_binexpr.rs | |||
@@ -6,8 +6,8 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
6 | /// Flip binary expression assist. | 6 | /// Flip binary expression assist. |
7 | pub(crate) fn flip_binexpr(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 7 | pub(crate) fn flip_binexpr(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { |
8 | let expr = ctx.node_at_offset::<BinExpr>()?; | 8 | let expr = ctx.node_at_offset::<BinExpr>()?; |
9 | let lhs = expr.lhs()?.syntax(); | 9 | let lhs = expr.lhs()?.syntax().clone(); |
10 | let rhs = expr.rhs()?.syntax(); | 10 | let rhs = expr.rhs()?.syntax().clone(); |
11 | let op_range = expr.op_token()?.range(); | 11 | let op_range = expr.op_token()?.range(); |
12 | // The assist should be applied only if the cursor is on the operator | 12 | // The assist should be applied only if the cursor is on the operator |
13 | let cursor_in_range = ctx.frange.range.is_subrange(&op_range); | 13 | let cursor_in_range = ctx.frange.range.is_subrange(&op_range); |
diff --git a/crates/ra_assists/src/flip_comma.rs b/crates/ra_assists/src/flip_comma.rs index d8dba779f..13016ae06 100644 --- a/crates/ra_assists/src/flip_comma.rs +++ b/crates/ra_assists/src/flip_comma.rs | |||
@@ -5,8 +5,8 @@ use crate::{Assist, AssistCtx, AssistId}; | |||
5 | 5 | ||
6 | pub(crate) fn flip_comma(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 6 | pub(crate) fn flip_comma(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { |
7 | let comma = ctx.token_at_offset().find(|leaf| leaf.kind() == T![,])?; | 7 | let comma = ctx.token_at_offset().find(|leaf| leaf.kind() == T![,])?; |
8 | let prev = non_trivia_sibling(comma.into(), Direction::Prev)?; | 8 | let prev = non_trivia_sibling(comma.clone().into(), Direction::Prev)?; |
9 | let next = non_trivia_sibling(comma.into(), Direction::Next)?; | 9 | let next = non_trivia_sibling(comma.clone().into(), Direction::Next)?; |
10 | ctx.add_action(AssistId("flip_comma"), "flip comma", |edit| { | 10 | ctx.add_action(AssistId("flip_comma"), "flip comma", |edit| { |
11 | edit.target(comma.range()); | 11 | edit.target(comma.range()); |
12 | edit.replace(prev.range(), next.to_string()); | 12 | edit.replace(prev.range(), next.to_string()); |
diff --git a/crates/ra_assists/src/inline_local_variable.rs b/crates/ra_assists/src/inline_local_variable.rs index 554de8b46..3c17089de 100644 --- a/crates/ra_assists/src/inline_local_variable.rs +++ b/crates/ra_assists/src/inline_local_variable.rs | |||
@@ -16,18 +16,18 @@ pub(crate) fn inline_local_varialbe(mut ctx: AssistCtx<impl HirDatabase>) -> Opt | |||
16 | if bind_pat.is_mutable() { | 16 | if bind_pat.is_mutable() { |
17 | return None; | 17 | return None; |
18 | } | 18 | } |
19 | let initializer_expr = let_stmt.initializer(); | 19 | let initializer_expr = let_stmt.initializer()?; |
20 | let delete_range = if let Some(whitespace) = let_stmt | 20 | let delete_range = if let Some(whitespace) = let_stmt |
21 | .syntax() | 21 | .syntax() |
22 | .next_sibling_or_token() | 22 | .next_sibling_or_token() |
23 | .and_then(|it| ast::Whitespace::cast(it.as_token()?)) | 23 | .and_then(|it| ast::Whitespace::cast(it.as_token()?.clone())) |
24 | { | 24 | { |
25 | TextRange::from_to(let_stmt.syntax().range().start(), whitespace.syntax().range().end()) | 25 | TextRange::from_to(let_stmt.syntax().range().start(), whitespace.syntax().range().end()) |
26 | } else { | 26 | } else { |
27 | let_stmt.syntax().range() | 27 | let_stmt.syntax().range() |
28 | }; | 28 | }; |
29 | let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, bind_pat.syntax(), None); | 29 | let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, bind_pat.syntax(), None); |
30 | let refs = analyzer.find_all_refs(bind_pat); | 30 | let refs = analyzer.find_all_refs(&bind_pat); |
31 | 31 | ||
32 | let mut wrap_in_parens = vec![true; refs.len()]; | 32 | let mut wrap_in_parens = vec![true; refs.len()]; |
33 | 33 | ||
@@ -45,7 +45,7 @@ pub(crate) fn inline_local_varialbe(mut ctx: AssistCtx<impl HirDatabase>) -> Opt | |||
45 | } | 45 | } |
46 | }; | 46 | }; |
47 | 47 | ||
48 | wrap_in_parens[i] = match (initializer_expr?.kind(), usage_parent.kind()) { | 48 | wrap_in_parens[i] = match (initializer_expr.kind(), usage_parent.kind()) { |
49 | (ExprKind::CallExpr(_), _) | 49 | (ExprKind::CallExpr(_), _) |
50 | | (ExprKind::IndexExpr(_), _) | 50 | | (ExprKind::IndexExpr(_), _) |
51 | | (ExprKind::MethodCallExpr(_), _) | 51 | | (ExprKind::MethodCallExpr(_), _) |
@@ -71,7 +71,7 @@ pub(crate) fn inline_local_varialbe(mut ctx: AssistCtx<impl HirDatabase>) -> Opt | |||
71 | }; | 71 | }; |
72 | } | 72 | } |
73 | 73 | ||
74 | let init_str = initializer_expr?.syntax().text().to_string(); | 74 | let init_str = initializer_expr.syntax().text().to_string(); |
75 | let init_in_paren = format!("({})", &init_str); | 75 | let init_in_paren = format!("({})", &init_str); |
76 | 76 | ||
77 | ctx.add_action( | 77 | ctx.add_action( |
diff --git a/crates/ra_assists/src/introduce_variable.rs b/crates/ra_assists/src/introduce_variable.rs index f7f5ccafa..ce28132c9 100644 --- a/crates/ra_assists/src/introduce_variable.rs +++ b/crates/ra_assists/src/introduce_variable.rs | |||
@@ -20,8 +20,8 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx<impl HirDatabase>) -> Option | |||
20 | return None; | 20 | return None; |
21 | } | 21 | } |
22 | let expr = node.ancestors().find_map(valid_target_expr)?; | 22 | let expr = node.ancestors().find_map(valid_target_expr)?; |
23 | let (anchor_stmt, wrap_in_block) = anchor_stmt(expr)?; | 23 | let (anchor_stmt, wrap_in_block) = anchor_stmt(expr.clone())?; |
24 | let indent = anchor_stmt.prev_sibling_or_token()?.as_token()?; | 24 | let indent = anchor_stmt.prev_sibling_or_token()?.as_token()?.clone(); |
25 | if indent.kind() != WHITESPACE { | 25 | if indent.kind() != WHITESPACE { |
26 | return None; | 26 | return None; |
27 | } | 27 | } |
@@ -37,9 +37,9 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx<impl HirDatabase>) -> Option | |||
37 | }; | 37 | }; |
38 | 38 | ||
39 | expr.syntax().text().push_to(&mut buf); | 39 | expr.syntax().text().push_to(&mut buf); |
40 | let full_stmt = ast::ExprStmt::cast(anchor_stmt); | 40 | let full_stmt = ast::ExprStmt::cast(anchor_stmt.clone()); |
41 | let is_full_stmt = if let Some(expr_stmt) = full_stmt { | 41 | let is_full_stmt = if let Some(expr_stmt) = &full_stmt { |
42 | Some(expr.syntax()) == expr_stmt.expr().map(|e| e.syntax()) | 42 | Some(expr.syntax().clone()) == expr_stmt.expr().map(|e| e.syntax().clone()) |
43 | } else { | 43 | } else { |
44 | false | 44 | false |
45 | }; | 45 | }; |
@@ -81,7 +81,7 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx<impl HirDatabase>) -> Option | |||
81 | 81 | ||
82 | /// Check whether the node is a valid expression which can be extracted to a variable. | 82 | /// Check whether the node is a valid expression which can be extracted to a variable. |
83 | /// In general that's true for any expression, but in some cases that would produce invalid code. | 83 | /// In general that's true for any expression, but in some cases that would produce invalid code. |
84 | fn valid_target_expr(node: &SyntaxNode) -> Option<&ast::Expr> { | 84 | fn valid_target_expr(node: SyntaxNode) -> Option<ast::Expr> { |
85 | match node.kind() { | 85 | match node.kind() { |
86 | PATH_EXPR => None, | 86 | PATH_EXPR => None, |
87 | BREAK_EXPR => ast::BreakExpr::cast(node).and_then(|e| e.expr()), | 87 | BREAK_EXPR => ast::BreakExpr::cast(node).and_then(|e| e.expr()), |
@@ -96,14 +96,10 @@ fn valid_target_expr(node: &SyntaxNode) -> Option<&ast::Expr> { | |||
96 | /// to produce correct code. | 96 | /// to produce correct code. |
97 | /// It can be a statement, the last in a block expression or a wanna be block | 97 | /// It can be a statement, the last in a block expression or a wanna be block |
98 | /// expression like a lambda or match arm. | 98 | /// expression like a lambda or match arm. |
99 | fn anchor_stmt(expr: &ast::Expr) -> Option<(&SyntaxNode, bool)> { | 99 | fn anchor_stmt(expr: ast::Expr) -> Option<(SyntaxNode, bool)> { |
100 | expr.syntax().ancestors().find_map(|node| { | 100 | expr.syntax().ancestors().find_map(|node| { |
101 | if ast::Stmt::cast(node).is_some() { | ||
102 | return Some((node, false)); | ||
103 | } | ||
104 | |||
105 | if let Some(expr) = node.parent().and_then(ast::Block::cast).and_then(|it| it.expr()) { | 101 | if let Some(expr) = node.parent().and_then(ast::Block::cast).and_then(|it| it.expr()) { |
106 | if expr.syntax() == node { | 102 | if expr.syntax() == &node { |
107 | tested_by!(test_introduce_var_last_expr); | 103 | tested_by!(test_introduce_var_last_expr); |
108 | return Some((node, false)); | 104 | return Some((node, false)); |
109 | } | 105 | } |
@@ -115,6 +111,10 @@ fn anchor_stmt(expr: &ast::Expr) -> Option<(&SyntaxNode, bool)> { | |||
115 | } | 111 | } |
116 | } | 112 | } |
117 | 113 | ||
114 | if ast::Stmt::cast(node.clone()).is_some() { | ||
115 | return Some((node, false)); | ||
116 | } | ||
117 | |||
118 | None | 118 | None |
119 | }) | 119 | }) |
120 | } | 120 | } |
diff --git a/crates/ra_assists/src/move_guard.rs b/crates/ra_assists/src/move_guard.rs index e1ce86a33..313c9ad18 100644 --- a/crates/ra_assists/src/move_guard.rs +++ b/crates/ra_assists/src/move_guard.rs | |||
@@ -18,9 +18,9 @@ pub(crate) fn move_guard_to_arm_body(mut ctx: AssistCtx<impl HirDatabase>) -> Op | |||
18 | 18 | ||
19 | ctx.add_action(AssistId("move_guard_to_arm_body"), "move guard to arm body", |edit| { | 19 | ctx.add_action(AssistId("move_guard_to_arm_body"), "move guard to arm body", |edit| { |
20 | edit.target(guard.syntax().range()); | 20 | edit.target(guard.syntax().range()); |
21 | let offseting_amount = match space_before_guard { | 21 | let offseting_amount = match &space_before_guard { |
22 | Some(SyntaxElement::Token(tok)) => { | 22 | Some(SyntaxElement::Token(tok)) => { |
23 | if let Some(_) = ast::Whitespace::cast(tok) { | 23 | if let Some(_) = ast::Whitespace::cast(tok.clone()) { |
24 | let ele = space_before_guard.unwrap().range(); | 24 | let ele = space_before_guard.unwrap().range(); |
25 | edit.delete(ele); | 25 | edit.delete(ele); |
26 | ele.len() | 26 | ele.len() |
@@ -39,11 +39,11 @@ pub(crate) fn move_guard_to_arm_body(mut ctx: AssistCtx<impl HirDatabase>) -> Op | |||
39 | } | 39 | } |
40 | 40 | ||
41 | pub(crate) fn move_arm_cond_to_match_guard(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 41 | pub(crate) fn move_arm_cond_to_match_guard(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { |
42 | let match_arm: &MatchArm = ctx.node_at_offset::<MatchArm>()?; | 42 | let match_arm: MatchArm = ctx.node_at_offset::<MatchArm>()?; |
43 | let last_match_pat = match_arm.pats().last()?; | 43 | let last_match_pat = match_arm.pats().last()?; |
44 | 44 | ||
45 | let arm_body = match_arm.expr()?; | 45 | let arm_body = match_arm.expr()?; |
46 | let if_expr: &IfExpr = IfExpr::cast(arm_body.syntax())?; | 46 | let if_expr: IfExpr = IfExpr::cast(arm_body.syntax().clone())?; |
47 | let cond = if_expr.condition()?; | 47 | let cond = if_expr.condition()?; |
48 | let then_block = if_expr.then_branch()?; | 48 | let then_block = if_expr.then_branch()?; |
49 | 49 | ||
@@ -65,7 +65,7 @@ pub(crate) fn move_arm_cond_to_match_guard(mut ctx: AssistCtx<impl HirDatabase>) | |||
65 | edit.target(if_expr.syntax().range()); | 65 | edit.target(if_expr.syntax().range()); |
66 | let then_only_expr = then_block.statements().next().is_none(); | 66 | let then_only_expr = then_block.statements().next().is_none(); |
67 | 67 | ||
68 | match then_block.expr() { | 68 | match &then_block.expr() { |
69 | Some(then_expr) if then_only_expr => { | 69 | Some(then_expr) if then_only_expr => { |
70 | edit.replace(if_expr.syntax().range(), then_expr.syntax().text()) | 70 | edit.replace(if_expr.syntax().range(), then_expr.syntax().text()) |
71 | } | 71 | } |
diff --git a/crates/ra_assists/src/remove_dbg.rs b/crates/ra_assists/src/remove_dbg.rs index 5680f76ca..c330bc827 100644 --- a/crates/ra_assists/src/remove_dbg.rs +++ b/crates/ra_assists/src/remove_dbg.rs | |||
@@ -8,7 +8,7 @@ use ra_syntax::{ | |||
8 | pub(crate) fn remove_dbg(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 8 | pub(crate) fn remove_dbg(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { |
9 | let macro_call = ctx.node_at_offset::<ast::MacroCall>()?; | 9 | let macro_call = ctx.node_at_offset::<ast::MacroCall>()?; |
10 | 10 | ||
11 | if !is_valid_macrocall(macro_call, "dbg")? { | 11 | if !is_valid_macrocall(¯o_call, "dbg")? { |
12 | return None; | 12 | return None; |
13 | } | 13 | } |
14 | 14 | ||
@@ -35,7 +35,7 @@ pub(crate) fn remove_dbg(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> | |||
35 | }; | 35 | }; |
36 | 36 | ||
37 | let macro_content = { | 37 | let macro_content = { |
38 | let macro_args = macro_call.token_tree()?.syntax(); | 38 | let macro_args = macro_call.token_tree()?.syntax().clone(); |
39 | let range = macro_args.range(); | 39 | let range = macro_args.range(); |
40 | let start = range.start() + TextUnit::of_char('('); | 40 | let start = range.start() + TextUnit::of_char('('); |
41 | let end = range.end() - TextUnit::of_char(')'); | 41 | let end = range.end() - TextUnit::of_char(')'); |
@@ -65,7 +65,7 @@ fn is_valid_macrocall(macro_call: &ast::MacroCall, macro_name: &str) -> Option<b | |||
65 | return None; | 65 | return None; |
66 | } | 66 | } |
67 | 67 | ||
68 | let node = macro_call.token_tree()?.syntax(); | 68 | let node = macro_call.token_tree()?.syntax().clone(); |
69 | let first_child = node.first_child_or_token()?; | 69 | let first_child = node.first_child_or_token()?; |
70 | let last_child = node.last_child_or_token()?; | 70 | let last_child = node.last_child_or_token()?; |
71 | 71 | ||
diff --git a/crates/ra_assists/src/replace_if_let_with_match.rs b/crates/ra_assists/src/replace_if_let_with_match.rs index c2c7cf70b..5de6aa266 100644 --- a/crates/ra_assists/src/replace_if_let_with_match.rs +++ b/crates/ra_assists/src/replace_if_let_with_match.rs | |||
@@ -5,7 +5,7 @@ use ra_syntax::{ast, AstNode}; | |||
5 | use crate::{Assist, AssistCtx, AssistId}; | 5 | use crate::{Assist, AssistCtx, AssistId}; |
6 | 6 | ||
7 | pub(crate) fn replace_if_let_with_match(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 7 | pub(crate) fn replace_if_let_with_match(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { |
8 | let if_expr: &ast::IfExpr = ctx.node_at_offset()?; | 8 | let if_expr: ast::IfExpr = ctx.node_at_offset()?; |
9 | let cond = if_expr.condition()?; | 9 | let cond = if_expr.condition()?; |
10 | let pat = cond.pat()?; | 10 | let pat = cond.pat()?; |
11 | let expr = cond.expr()?; | 11 | let expr = cond.expr()?; |
@@ -25,16 +25,11 @@ pub(crate) fn replace_if_let_with_match(mut ctx: AssistCtx<impl HirDatabase>) -> | |||
25 | ctx.build() | 25 | ctx.build() |
26 | } | 26 | } |
27 | 27 | ||
28 | fn build_match_expr( | 28 | fn build_match_expr(expr: ast::Expr, pat1: ast::Pat, arm1: ast::Block, arm2: ast::Block) -> String { |
29 | expr: &ast::Expr, | ||
30 | pat1: &ast::Pat, | ||
31 | arm1: &ast::Block, | ||
32 | arm2: &ast::Block, | ||
33 | ) -> String { | ||
34 | let mut buf = String::new(); | 29 | let mut buf = String::new(); |
35 | buf.push_str(&format!("match {} {{\n", expr.syntax().text())); | 30 | buf.push_str(&format!("match {} {{\n", expr.syntax().text())); |
36 | buf.push_str(&format!(" {} => {}\n", pat1.syntax().text(), format_arm(arm1))); | 31 | buf.push_str(&format!(" {} => {}\n", pat1.syntax().text(), format_arm(&arm1))); |
37 | buf.push_str(&format!(" _ => {}\n", format_arm(arm2))); | 32 | buf.push_str(&format!(" _ => {}\n", format_arm(&arm2))); |
38 | buf.push_str("}"); | 33 | buf.push_str("}"); |
39 | buf | 34 | buf |
40 | } | 35 | } |
diff --git a/crates/ra_cli/src/main.rs b/crates/ra_cli/src/main.rs index b063193cf..375e2f508 100644 --- a/crates/ra_cli/src/main.rs +++ b/crates/ra_cli/src/main.rs | |||
@@ -7,7 +7,7 @@ use clap::{App, Arg, SubCommand}; | |||
7 | use flexi_logger::Logger; | 7 | use flexi_logger::Logger; |
8 | use ra_ide_api::{file_structure, Analysis}; | 8 | use ra_ide_api::{file_structure, Analysis}; |
9 | use ra_prof::profile; | 9 | use ra_prof::profile; |
10 | use ra_syntax::{AstNode, SourceFile, TreeArc}; | 10 | use ra_syntax::{AstNode, SourceFile}; |
11 | 11 | ||
12 | type Result<T> = std::result::Result<T, Box<dyn Error + Send + Sync>>; | 12 | type Result<T> = std::result::Result<T, Box<dyn Error + Send + Sync>>; |
13 | 13 | ||
@@ -100,9 +100,9 @@ fn main() -> Result<()> { | |||
100 | Ok(()) | 100 | Ok(()) |
101 | } | 101 | } |
102 | 102 | ||
103 | fn file() -> Result<TreeArc<SourceFile>> { | 103 | fn file() -> Result<SourceFile> { |
104 | let text = read_stdin()?; | 104 | let text = read_stdin()?; |
105 | Ok(SourceFile::parse(&text).tree().to_owned()) | 105 | Ok(SourceFile::parse(&text).tree()) |
106 | } | 106 | } |
107 | 107 | ||
108 | fn read_stdin() -> Result<String> { | 108 | fn read_stdin() -> Result<String> { |
diff --git a/crates/ra_fmt/src/lib.rs b/crates/ra_fmt/src/lib.rs index 1c2c04ad2..d6e895729 100644 --- a/crates/ra_fmt/src/lib.rs +++ b/crates/ra_fmt/src/lib.rs | |||
@@ -3,7 +3,7 @@ | |||
3 | use itertools::Itertools; | 3 | use itertools::Itertools; |
4 | use ra_syntax::{ | 4 | use ra_syntax::{ |
5 | ast::{self, AstNode, AstToken}, | 5 | ast::{self, AstNode, AstToken}, |
6 | SyntaxKind, | 6 | SmolStr, SyntaxKind, |
7 | SyntaxKind::*, | 7 | SyntaxKind::*, |
8 | SyntaxNode, SyntaxToken, T, | 8 | SyntaxNode, SyntaxToken, T, |
9 | }; | 9 | }; |
@@ -15,12 +15,12 @@ pub fn reindent(text: &str, indent: &str) -> String { | |||
15 | } | 15 | } |
16 | 16 | ||
17 | /// If the node is on the beginning of the line, calculate indent. | 17 | /// If the node is on the beginning of the line, calculate indent. |
18 | pub fn leading_indent(node: &SyntaxNode) -> Option<&str> { | 18 | pub fn leading_indent(node: &SyntaxNode) -> Option<SmolStr> { |
19 | for token in prev_tokens(node.first_token()?) { | 19 | for token in prev_tokens(node.first_token()?) { |
20 | if let Some(ws) = ast::Whitespace::cast(token) { | 20 | if let Some(ws) = ast::Whitespace::cast(token.clone()) { |
21 | let ws_text = ws.text(); | 21 | let ws_text = ws.text(); |
22 | if let Some(pos) = ws_text.rfind('\n') { | 22 | if let Some(pos) = ws_text.rfind('\n') { |
23 | return Some(&ws_text[pos + 1..]); | 23 | return Some(ws_text[pos + 1..].into()); |
24 | } | 24 | } |
25 | } | 25 | } |
26 | if token.text().contains('\n') { | 26 | if token.text().contains('\n') { |
@@ -31,17 +31,17 @@ pub fn leading_indent(node: &SyntaxNode) -> Option<&str> { | |||
31 | } | 31 | } |
32 | 32 | ||
33 | fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> { | 33 | fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> { |
34 | successors(token.prev_token(), |&token| token.prev_token()) | 34 | successors(token.prev_token(), |token| token.prev_token()) |
35 | } | 35 | } |
36 | 36 | ||
37 | pub fn extract_trivial_expression(block: &ast::Block) -> Option<&ast::Expr> { | 37 | pub fn extract_trivial_expression(block: &ast::Block) -> Option<ast::Expr> { |
38 | let expr = block.expr()?; | 38 | let expr = block.expr()?; |
39 | if expr.syntax().text().contains('\n') { | 39 | if expr.syntax().text().contains('\n') { |
40 | return None; | 40 | return None; |
41 | } | 41 | } |
42 | let non_trivial_children = block.syntax().children().filter(|it| match it.kind() { | 42 | let non_trivial_children = block.syntax().children().filter(|it| match it.kind() { |
43 | WHITESPACE | T!['{'] | T!['}'] => false, | 43 | WHITESPACE | T!['{'] | T!['}'] => false, |
44 | _ => it != &expr.syntax(), | 44 | _ => it != expr.syntax(), |
45 | }); | 45 | }); |
46 | if non_trivial_children.count() > 0 { | 46 | if non_trivial_children.count() > 0 { |
47 | return None; | 47 | return None; |
diff --git a/crates/ra_hir/src/adt.rs b/crates/ra_hir/src/adt.rs index 8afdac801..c65446df4 100644 --- a/crates/ra_hir/src/adt.rs +++ b/crates/ra_hir/src/adt.rs | |||
@@ -4,10 +4,7 @@ | |||
4 | use std::sync::Arc; | 4 | use std::sync::Arc; |
5 | 5 | ||
6 | use ra_arena::{impl_arena_id, Arena, RawId}; | 6 | use ra_arena::{impl_arena_id, Arena, RawId}; |
7 | use ra_syntax::{ | 7 | use ra_syntax::ast::{self, NameOwner, StructKind, TypeAscriptionOwner}; |
8 | ast::{self, NameOwner, StructKind, TypeAscriptionOwner}, | ||
9 | TreeArc, | ||
10 | }; | ||
11 | 8 | ||
12 | use crate::{ | 9 | use crate::{ |
13 | type_ref::TypeRef, AsName, AstDatabase, Crate, DefDatabase, Enum, EnumVariant, FieldSource, | 10 | type_ref::TypeRef, AsName, AstDatabase, Crate, DefDatabase, Enum, EnumVariant, FieldSource, |
@@ -59,11 +56,11 @@ impl StructData { | |||
59 | struct_: Struct, | 56 | struct_: Struct, |
60 | ) -> Arc<StructData> { | 57 | ) -> Arc<StructData> { |
61 | let src = struct_.source(db); | 58 | let src = struct_.source(db); |
62 | Arc::new(StructData::new(&*src.ast)) | 59 | Arc::new(StructData::new(&src.ast)) |
63 | } | 60 | } |
64 | } | 61 | } |
65 | 62 | ||
66 | fn variants(enum_def: &ast::EnumDef) -> impl Iterator<Item = &ast::EnumVariant> { | 63 | fn variants(enum_def: &ast::EnumDef) -> impl Iterator<Item = ast::EnumVariant> { |
67 | enum_def.variant_list().into_iter().flat_map(|it| it.variants()) | 64 | enum_def.variant_list().into_iter().flat_map(|it| it.variants()) |
68 | } | 65 | } |
69 | 66 | ||
@@ -71,9 +68,9 @@ impl EnumVariant { | |||
71 | pub(crate) fn source_impl( | 68 | pub(crate) fn source_impl( |
72 | self, | 69 | self, |
73 | db: &(impl DefDatabase + AstDatabase), | 70 | db: &(impl DefDatabase + AstDatabase), |
74 | ) -> Source<TreeArc<ast::EnumVariant>> { | 71 | ) -> Source<ast::EnumVariant> { |
75 | let src = self.parent.source(db); | 72 | let src = self.parent.source(db); |
76 | let ast = variants(&*src.ast) | 73 | let ast = variants(&src.ast) |
77 | .zip(db.enum_data(self.parent).variants.iter()) | 74 | .zip(db.enum_data(self.parent).variants.iter()) |
78 | .find(|(_syntax, (id, _))| *id == self.id) | 75 | .find(|(_syntax, (id, _))| *id == self.id) |
79 | .unwrap() | 76 | .unwrap() |
@@ -96,7 +93,7 @@ impl EnumData { | |||
96 | pub(crate) fn enum_data_query(db: &(impl DefDatabase + AstDatabase), e: Enum) -> Arc<EnumData> { | 93 | pub(crate) fn enum_data_query(db: &(impl DefDatabase + AstDatabase), e: Enum) -> Arc<EnumData> { |
97 | let src = e.source(db); | 94 | let src = e.source(db); |
98 | let name = src.ast.name().map(|n| n.as_name()); | 95 | let name = src.ast.name().map(|n| n.as_name()); |
99 | let variants = variants(&*src.ast) | 96 | let variants = variants(&src.ast) |
100 | .map(|var| EnumVariantData { | 97 | .map(|var| EnumVariantData { |
101 | name: var.name().map(|it| it.as_name()), | 98 | name: var.name().map(|it| it.as_name()), |
102 | variant_data: Arc::new(VariantData::new(var.kind())), | 99 | variant_data: Arc::new(VariantData::new(var.kind())), |
diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs index 4fb5844f4..779764590 100644 --- a/crates/ra_hir/src/code_model.rs +++ b/crates/ra_hir/src/code_model.rs | |||
@@ -4,10 +4,7 @@ pub(crate) mod docs; | |||
4 | use std::sync::Arc; | 4 | use std::sync::Arc; |
5 | 5 | ||
6 | use ra_db::{CrateId, Edition, FileId, SourceRootId}; | 6 | use ra_db::{CrateId, Edition, FileId, SourceRootId}; |
7 | use ra_syntax::{ | 7 | use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner}; |
8 | ast::{self, NameOwner, TypeAscriptionOwner}, | ||
9 | TreeArc, | ||
10 | }; | ||
11 | 8 | ||
12 | use crate::{ | 9 | use crate::{ |
13 | adt::{EnumVariantId, StructFieldId, VariantDef}, | 10 | adt::{EnumVariantId, StructFieldId, VariantDef}, |
@@ -155,8 +152,8 @@ impl_froms!( | |||
155 | ); | 152 | ); |
156 | 153 | ||
157 | pub enum ModuleSource { | 154 | pub enum ModuleSource { |
158 | SourceFile(TreeArc<ast::SourceFile>), | 155 | SourceFile(ast::SourceFile), |
159 | Module(TreeArc<ast::Module>), | 156 | Module(ast::Module), |
160 | } | 157 | } |
161 | 158 | ||
162 | impl ModuleSource { | 159 | impl ModuleSource { |
@@ -199,7 +196,7 @@ impl Module { | |||
199 | self, | 196 | self, |
200 | db: &impl HirDatabase, | 197 | db: &impl HirDatabase, |
201 | import: ImportId, | 198 | import: ImportId, |
202 | ) -> Either<TreeArc<ast::UseTree>, TreeArc<ast::ExternCrateItem>> { | 199 | ) -> Either<ast::UseTree, ast::ExternCrateItem> { |
203 | let src = self.definition_source(db); | 200 | let src = self.definition_source(db); |
204 | let (_, source_map) = db.raw_items_with_source_map(src.file_id); | 201 | let (_, source_map) = db.raw_items_with_source_map(src.file_id); |
205 | source_map.get(&src.ast, import) | 202 | source_map.get(&src.ast, import) |
@@ -321,8 +318,8 @@ pub struct StructField { | |||
321 | 318 | ||
322 | #[derive(Debug)] | 319 | #[derive(Debug)] |
323 | pub enum FieldSource { | 320 | pub enum FieldSource { |
324 | Named(TreeArc<ast::NamedFieldDef>), | 321 | Named(ast::NamedFieldDef), |
325 | Pos(TreeArc<ast::PosFieldDef>), | 322 | Pos(ast::PosFieldDef), |
326 | } | 323 | } |
327 | 324 | ||
328 | impl StructField { | 325 | impl StructField { |
@@ -736,7 +733,7 @@ impl ConstData { | |||
736 | konst: Const, | 733 | konst: Const, |
737 | ) -> Arc<ConstData> { | 734 | ) -> Arc<ConstData> { |
738 | let node = konst.source(db).ast; | 735 | let node = konst.source(db).ast; |
739 | const_data_for(&*node) | 736 | const_data_for(&node) |
740 | } | 737 | } |
741 | 738 | ||
742 | pub(crate) fn static_data_query( | 739 | pub(crate) fn static_data_query( |
@@ -744,7 +741,7 @@ impl ConstData { | |||
744 | konst: Static, | 741 | konst: Static, |
745 | ) -> Arc<ConstData> { | 742 | ) -> Arc<ConstData> { |
746 | let node = konst.source(db).ast; | 743 | let node = konst.source(db).ast; |
747 | const_data_for(&*node) | 744 | const_data_for(&node) |
748 | } | 745 | } |
749 | } | 746 | } |
750 | 747 | ||
diff --git a/crates/ra_hir/src/code_model/docs.rs b/crates/ra_hir/src/code_model/docs.rs index 007ef315d..a2b4d8e97 100644 --- a/crates/ra_hir/src/code_model/docs.rs +++ b/crates/ra_hir/src/code_model/docs.rs | |||
@@ -71,21 +71,21 @@ pub(crate) fn documentation_query( | |||
71 | def: DocDef, | 71 | def: DocDef, |
72 | ) -> Option<Documentation> { | 72 | ) -> Option<Documentation> { |
73 | match def { | 73 | match def { |
74 | DocDef::Module(it) => docs_from_ast(&*it.declaration_source(db)?.ast), | 74 | DocDef::Module(it) => docs_from_ast(&it.declaration_source(db)?.ast), |
75 | DocDef::StructField(it) => match it.source(db).ast { | 75 | DocDef::StructField(it) => match it.source(db).ast { |
76 | FieldSource::Named(named) => docs_from_ast(&*named), | 76 | FieldSource::Named(named) => docs_from_ast(&named), |
77 | FieldSource::Pos(..) => None, | 77 | FieldSource::Pos(..) => None, |
78 | }, | 78 | }, |
79 | DocDef::Struct(it) => docs_from_ast(&*it.source(db).ast), | 79 | DocDef::Struct(it) => docs_from_ast(&it.source(db).ast), |
80 | DocDef::Enum(it) => docs_from_ast(&*it.source(db).ast), | 80 | DocDef::Enum(it) => docs_from_ast(&it.source(db).ast), |
81 | DocDef::EnumVariant(it) => docs_from_ast(&*it.source(db).ast), | 81 | DocDef::EnumVariant(it) => docs_from_ast(&it.source(db).ast), |
82 | DocDef::Static(it) => docs_from_ast(&*it.source(db).ast), | 82 | DocDef::Static(it) => docs_from_ast(&it.source(db).ast), |
83 | DocDef::Const(it) => docs_from_ast(&*it.source(db).ast), | 83 | DocDef::Const(it) => docs_from_ast(&it.source(db).ast), |
84 | DocDef::Function(it) => docs_from_ast(&*it.source(db).ast), | 84 | DocDef::Function(it) => docs_from_ast(&it.source(db).ast), |
85 | DocDef::Union(it) => docs_from_ast(&*it.source(db).ast), | 85 | DocDef::Union(it) => docs_from_ast(&it.source(db).ast), |
86 | DocDef::Trait(it) => docs_from_ast(&*it.source(db).ast), | 86 | DocDef::Trait(it) => docs_from_ast(&it.source(db).ast), |
87 | DocDef::TypeAlias(it) => docs_from_ast(&*it.source(db).ast), | 87 | DocDef::TypeAlias(it) => docs_from_ast(&it.source(db).ast), |
88 | DocDef::MacroDef(it) => docs_from_ast(&*it.source(db).ast), | 88 | DocDef::MacroDef(it) => docs_from_ast(&it.source(db).ast), |
89 | } | 89 | } |
90 | } | 90 | } |
91 | 91 | ||
diff --git a/crates/ra_hir/src/code_model/src.rs b/crates/ra_hir/src/code_model/src.rs index 72451e0e7..32bd9c661 100644 --- a/crates/ra_hir/src/code_model/src.rs +++ b/crates/ra_hir/src/code_model/src.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | use ra_syntax::{ast, TreeArc}; | 1 | use ra_syntax::ast; |
2 | 2 | ||
3 | use crate::{ | 3 | use crate::{ |
4 | ids::AstItemDef, AstDatabase, Const, DefDatabase, Enum, EnumVariant, FieldSource, Function, | 4 | ids::AstItemDef, AstDatabase, Const, DefDatabase, Enum, EnumVariant, FieldSource, Function, |
@@ -34,7 +34,7 @@ impl Module { | |||
34 | pub fn declaration_source( | 34 | pub fn declaration_source( |
35 | self, | 35 | self, |
36 | db: &(impl DefDatabase + AstDatabase), | 36 | db: &(impl DefDatabase + AstDatabase), |
37 | ) -> Option<Source<TreeArc<ast::Module>>> { | 37 | ) -> Option<Source<ast::Module>> { |
38 | let def_map = db.crate_def_map(self.krate); | 38 | let def_map = db.crate_def_map(self.krate); |
39 | let decl = def_map[self.module_id].declaration?; | 39 | let decl = def_map[self.module_id].declaration?; |
40 | let ast = decl.to_node(db); | 40 | let ast = decl.to_node(db); |
@@ -49,62 +49,62 @@ impl HasSource for StructField { | |||
49 | } | 49 | } |
50 | } | 50 | } |
51 | impl HasSource for Struct { | 51 | impl HasSource for Struct { |
52 | type Ast = TreeArc<ast::StructDef>; | 52 | type Ast = ast::StructDef; |
53 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::StructDef>> { | 53 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::StructDef> { |
54 | self.id.source(db) | 54 | self.id.source(db) |
55 | } | 55 | } |
56 | } | 56 | } |
57 | impl HasSource for Union { | 57 | impl HasSource for Union { |
58 | type Ast = TreeArc<ast::StructDef>; | 58 | type Ast = ast::StructDef; |
59 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::StructDef>> { | 59 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::StructDef> { |
60 | self.id.source(db) | 60 | self.id.source(db) |
61 | } | 61 | } |
62 | } | 62 | } |
63 | impl HasSource for Enum { | 63 | impl HasSource for Enum { |
64 | type Ast = TreeArc<ast::EnumDef>; | 64 | type Ast = ast::EnumDef; |
65 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::EnumDef>> { | 65 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::EnumDef> { |
66 | self.id.source(db) | 66 | self.id.source(db) |
67 | } | 67 | } |
68 | } | 68 | } |
69 | impl HasSource for EnumVariant { | 69 | impl HasSource for EnumVariant { |
70 | type Ast = TreeArc<ast::EnumVariant>; | 70 | type Ast = ast::EnumVariant; |
71 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::EnumVariant>> { | 71 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::EnumVariant> { |
72 | self.source_impl(db) | 72 | self.source_impl(db) |
73 | } | 73 | } |
74 | } | 74 | } |
75 | impl HasSource for Function { | 75 | impl HasSource for Function { |
76 | type Ast = TreeArc<ast::FnDef>; | 76 | type Ast = ast::FnDef; |
77 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::FnDef>> { | 77 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::FnDef> { |
78 | self.id.source(db) | 78 | self.id.source(db) |
79 | } | 79 | } |
80 | } | 80 | } |
81 | impl HasSource for Const { | 81 | impl HasSource for Const { |
82 | type Ast = TreeArc<ast::ConstDef>; | 82 | type Ast = ast::ConstDef; |
83 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::ConstDef>> { | 83 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::ConstDef> { |
84 | self.id.source(db) | 84 | self.id.source(db) |
85 | } | 85 | } |
86 | } | 86 | } |
87 | impl HasSource for Static { | 87 | impl HasSource for Static { |
88 | type Ast = TreeArc<ast::StaticDef>; | 88 | type Ast = ast::StaticDef; |
89 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::StaticDef>> { | 89 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::StaticDef> { |
90 | self.id.source(db) | 90 | self.id.source(db) |
91 | } | 91 | } |
92 | } | 92 | } |
93 | impl HasSource for Trait { | 93 | impl HasSource for Trait { |
94 | type Ast = TreeArc<ast::TraitDef>; | 94 | type Ast = ast::TraitDef; |
95 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::TraitDef>> { | 95 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::TraitDef> { |
96 | self.id.source(db) | 96 | self.id.source(db) |
97 | } | 97 | } |
98 | } | 98 | } |
99 | impl HasSource for TypeAlias { | 99 | impl HasSource for TypeAlias { |
100 | type Ast = TreeArc<ast::TypeAliasDef>; | 100 | type Ast = ast::TypeAliasDef; |
101 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::TypeAliasDef>> { | 101 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::TypeAliasDef> { |
102 | self.id.source(db) | 102 | self.id.source(db) |
103 | } | 103 | } |
104 | } | 104 | } |
105 | impl HasSource for MacroDef { | 105 | impl HasSource for MacroDef { |
106 | type Ast = TreeArc<ast::MacroCall>; | 106 | type Ast = ast::MacroCall; |
107 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::MacroCall>> { | 107 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::MacroCall> { |
108 | Source { file_id: self.id.0.file_id(), ast: self.id.0.to_node(db) } | 108 | Source { file_id: self.id.0.file_id(), ast: self.id.0.to_node(db) } |
109 | } | 109 | } |
110 | } | 110 | } |
diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs index da9f3e32d..358365176 100644 --- a/crates/ra_hir/src/db.rs +++ b/crates/ra_hir/src/db.rs | |||
@@ -2,7 +2,7 @@ use std::sync::Arc; | |||
2 | 2 | ||
3 | use parking_lot::Mutex; | 3 | use parking_lot::Mutex; |
4 | use ra_db::{salsa, SourceDatabase}; | 4 | use ra_db::{salsa, SourceDatabase}; |
5 | use ra_syntax::{ast, Parse, SmolStr, SyntaxNode, TreeArc}; | 5 | use ra_syntax::{ast, Parse, SmolStr, SyntaxNode}; |
6 | 6 | ||
7 | use crate::{ | 7 | use crate::{ |
8 | adt::{EnumData, StructData}, | 8 | adt::{EnumData, StructData}, |
@@ -62,11 +62,11 @@ pub trait AstDatabase: InternDatabase { | |||
62 | 62 | ||
63 | #[salsa::transparent] | 63 | #[salsa::transparent] |
64 | #[salsa::invoke(crate::source_id::AstIdMap::file_item_query)] | 64 | #[salsa::invoke(crate::source_id::AstIdMap::file_item_query)] |
65 | fn ast_id_to_node(&self, file_id: HirFileId, ast_id: ErasedFileAstId) -> TreeArc<SyntaxNode>; | 65 | fn ast_id_to_node(&self, file_id: HirFileId, ast_id: ErasedFileAstId) -> SyntaxNode; |
66 | 66 | ||
67 | #[salsa::transparent] | 67 | #[salsa::transparent] |
68 | #[salsa::invoke(crate::ids::HirFileId::parse_or_expand_query)] | 68 | #[salsa::invoke(crate::ids::HirFileId::parse_or_expand_query)] |
69 | fn parse_or_expand(&self, file_id: HirFileId) -> Option<TreeArc<SyntaxNode>>; | 69 | fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>; |
70 | 70 | ||
71 | #[salsa::invoke(crate::ids::HirFileId::parse_macro_query)] | 71 | #[salsa::invoke(crate::ids::HirFileId::parse_macro_query)] |
72 | fn parse_macro(&self, macro_file: ids::MacroFile) -> Option<Parse<SyntaxNode>>; | 72 | fn parse_macro(&self, macro_file: ids::MacroFile) -> Option<Parse<SyntaxNode>>; |
diff --git a/crates/ra_hir/src/diagnostics.rs b/crates/ra_hir/src/diagnostics.rs index c97f0656d..0290483b3 100644 --- a/crates/ra_hir/src/diagnostics.rs +++ b/crates/ra_hir/src/diagnostics.rs | |||
@@ -1,6 +1,6 @@ | |||
1 | use std::{any::Any, fmt}; | 1 | use std::{any::Any, fmt}; |
2 | 2 | ||
3 | use ra_syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, TextRange, TreeArc}; | 3 | use ra_syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, TextRange}; |
4 | use relative_path::RelativePathBuf; | 4 | use relative_path::RelativePathBuf; |
5 | 5 | ||
6 | use crate::{HirDatabase, HirFileId, Name}; | 6 | use crate::{HirDatabase, HirFileId, Name}; |
@@ -33,9 +33,9 @@ pub trait AstDiagnostic { | |||
33 | } | 33 | } |
34 | 34 | ||
35 | impl dyn Diagnostic { | 35 | impl dyn Diagnostic { |
36 | pub fn syntax_node(&self, db: &impl HirDatabase) -> TreeArc<SyntaxNode> { | 36 | pub fn syntax_node(&self, db: &impl HirDatabase) -> SyntaxNode { |
37 | let node = db.parse_or_expand(self.file()).unwrap(); | 37 | let node = db.parse_or_expand(self.file()).unwrap(); |
38 | self.syntax_node_ptr().to_node(&*node).to_owned() | 38 | self.syntax_node_ptr().to_node(&node) |
39 | } | 39 | } |
40 | 40 | ||
41 | pub fn downcast_ref<D: Diagnostic>(&self) -> Option<&D> { | 41 | pub fn downcast_ref<D: Diagnostic>(&self) -> Option<&D> { |
@@ -143,11 +143,11 @@ impl Diagnostic for MissingFields { | |||
143 | } | 143 | } |
144 | 144 | ||
145 | impl AstDiagnostic for MissingFields { | 145 | impl AstDiagnostic for MissingFields { |
146 | type AST = TreeArc<ast::NamedFieldList>; | 146 | type AST = ast::NamedFieldList; |
147 | 147 | ||
148 | fn ast(&self, db: &impl HirDatabase) -> Self::AST { | 148 | fn ast(&self, db: &impl HirDatabase) -> Self::AST { |
149 | let root = db.parse_or_expand(self.file()).unwrap(); | 149 | let root = db.parse_or_expand(self.file()).unwrap(); |
150 | let node = self.syntax_node_ptr().to_node(&*root); | 150 | let node = self.syntax_node_ptr().to_node(&root); |
151 | ast::NamedFieldList::cast(&node).unwrap().to_owned() | 151 | ast::NamedFieldList::cast(node).unwrap() |
152 | } | 152 | } |
153 | } | 153 | } |
diff --git a/crates/ra_hir/src/expr.rs b/crates/ra_hir/src/expr.rs index 3a97d97ce..70af3f119 100644 --- a/crates/ra_hir/src/expr.rs +++ b/crates/ra_hir/src/expr.rs | |||
@@ -550,7 +550,7 @@ where | |||
550 | self.exprs.alloc(block) | 550 | self.exprs.alloc(block) |
551 | } | 551 | } |
552 | 552 | ||
553 | fn collect_expr(&mut self, expr: &ast::Expr) -> ExprId { | 553 | fn collect_expr(&mut self, expr: ast::Expr) -> ExprId { |
554 | let syntax_ptr = SyntaxNodePtr::new(expr.syntax()); | 554 | let syntax_ptr = SyntaxNodePtr::new(expr.syntax()); |
555 | match expr.kind() { | 555 | match expr.kind() { |
556 | ast::ExprKind::IfExpr(e) => { | 556 | ast::ExprKind::IfExpr(e) => { |
@@ -565,7 +565,8 @@ where | |||
565 | .map(|b| match b { | 565 | .map(|b| match b { |
566 | ast::ElseBranch::Block(it) => self.collect_block(it), | 566 | ast::ElseBranch::Block(it) => self.collect_block(it), |
567 | ast::ElseBranch::IfExpr(elif) => { | 567 | ast::ElseBranch::IfExpr(elif) => { |
568 | let expr: &ast::Expr = ast::Expr::cast(elif.syntax()).unwrap(); | 568 | let expr: ast::Expr = |
569 | ast::Expr::cast(elif.syntax().clone()).unwrap(); | ||
569 | self.collect_expr(expr) | 570 | self.collect_expr(expr) |
570 | } | 571 | } |
571 | }) | 572 | }) |
@@ -582,7 +583,7 @@ where | |||
582 | let else_branch = e.else_branch().map(|b| match b { | 583 | let else_branch = e.else_branch().map(|b| match b { |
583 | ast::ElseBranch::Block(it) => self.collect_block(it), | 584 | ast::ElseBranch::Block(it) => self.collect_block(it), |
584 | ast::ElseBranch::IfExpr(elif) => { | 585 | ast::ElseBranch::IfExpr(elif) => { |
585 | let expr: &ast::Expr = ast::Expr::cast(elif.syntax()).unwrap(); | 586 | let expr: ast::Expr = ast::Expr::cast(elif.syntax().clone()).unwrap(); |
586 | self.collect_expr(expr) | 587 | self.collect_expr(expr) |
587 | } | 588 | } |
588 | }); | 589 | }); |
@@ -689,7 +690,7 @@ where | |||
689 | let struct_lit = if let Some(nfl) = e.named_field_list() { | 690 | let struct_lit = if let Some(nfl) = e.named_field_list() { |
690 | let fields = nfl | 691 | let fields = nfl |
691 | .fields() | 692 | .fields() |
692 | .inspect(|field| field_ptrs.push(AstPtr::new(*field))) | 693 | .inspect(|field| field_ptrs.push(AstPtr::new(field))) |
693 | .map(|field| StructLitField { | 694 | .map(|field| StructLitField { |
694 | name: field | 695 | name: field |
695 | .name_ref() | 696 | .name_ref() |
@@ -699,7 +700,7 @@ where | |||
699 | self.collect_expr(e) | 700 | self.collect_expr(e) |
700 | } else if let Some(nr) = field.name_ref() { | 701 | } else if let Some(nr) = field.name_ref() { |
701 | // field shorthand | 702 | // field shorthand |
702 | let id = self.exprs.alloc(Expr::Path(Path::from_name_ref(nr))); | 703 | let id = self.exprs.alloc(Expr::Path(Path::from_name_ref(&nr))); |
703 | self.source_map | 704 | self.source_map |
704 | .expr_map | 705 | .expr_map |
705 | .insert(SyntaxNodePtr::new(nr.syntax()), id); | 706 | .insert(SyntaxNodePtr::new(nr.syntax()), id); |
@@ -837,7 +838,7 @@ where | |||
837 | let ast_id = self | 838 | let ast_id = self |
838 | .db | 839 | .db |
839 | .ast_id_map(self.current_file_id) | 840 | .ast_id_map(self.current_file_id) |
840 | .ast_id(e) | 841 | .ast_id(&e) |
841 | .with_file_id(self.current_file_id); | 842 | .with_file_id(self.current_file_id); |
842 | 843 | ||
843 | if let Some(path) = e.path().and_then(Path::from_ast) { | 844 | if let Some(path) = e.path().and_then(Path::from_ast) { |
@@ -845,11 +846,11 @@ where | |||
845 | let call_id = MacroCallLoc { def: def.id, ast_id }.id(self.db); | 846 | let call_id = MacroCallLoc { def: def.id, ast_id }.id(self.db); |
846 | let file_id = call_id.as_file(MacroFileKind::Expr); | 847 | let file_id = call_id.as_file(MacroFileKind::Expr); |
847 | if let Some(node) = self.db.parse_or_expand(file_id) { | 848 | if let Some(node) = self.db.parse_or_expand(file_id) { |
848 | if let Some(expr) = ast::Expr::cast(&*node) { | 849 | if let Some(expr) = ast::Expr::cast(node) { |
849 | log::debug!("macro expansion {}", expr.syntax().debug_dump()); | 850 | log::debug!("macro expansion {}", expr.syntax().debug_dump()); |
850 | let old_file_id = | 851 | let old_file_id = |
851 | std::mem::replace(&mut self.current_file_id, file_id); | 852 | std::mem::replace(&mut self.current_file_id, file_id); |
852 | let id = self.collect_expr(&expr); | 853 | let id = self.collect_expr(expr); |
853 | self.current_file_id = old_file_id; | 854 | self.current_file_id = old_file_id; |
854 | return id; | 855 | return id; |
855 | } | 856 | } |
@@ -863,7 +864,7 @@ where | |||
863 | } | 864 | } |
864 | } | 865 | } |
865 | 866 | ||
866 | fn collect_expr_opt(&mut self, expr: Option<&ast::Expr>) -> ExprId { | 867 | fn collect_expr_opt(&mut self, expr: Option<ast::Expr>) -> ExprId { |
867 | if let Some(expr) = expr { | 868 | if let Some(expr) = expr { |
868 | self.collect_expr(expr) | 869 | self.collect_expr(expr) |
869 | } else { | 870 | } else { |
@@ -871,7 +872,7 @@ where | |||
871 | } | 872 | } |
872 | } | 873 | } |
873 | 874 | ||
874 | fn collect_block(&mut self, block: &ast::Block) -> ExprId { | 875 | fn collect_block(&mut self, block: ast::Block) -> ExprId { |
875 | let statements = block | 876 | let statements = block |
876 | .statements() | 877 | .statements() |
877 | .map(|s| match s.kind() { | 878 | .map(|s| match s.kind() { |
@@ -890,7 +891,7 @@ where | |||
890 | self.alloc_expr(Expr::Block { statements, tail }, SyntaxNodePtr::new(block.syntax())) | 891 | self.alloc_expr(Expr::Block { statements, tail }, SyntaxNodePtr::new(block.syntax())) |
891 | } | 892 | } |
892 | 893 | ||
893 | fn collect_block_opt(&mut self, block: Option<&ast::Block>) -> ExprId { | 894 | fn collect_block_opt(&mut self, block: Option<ast::Block>) -> ExprId { |
894 | if let Some(block) = block { | 895 | if let Some(block) = block { |
895 | self.collect_block(block) | 896 | self.collect_block(block) |
896 | } else { | 897 | } else { |
@@ -898,7 +899,7 @@ where | |||
898 | } | 899 | } |
899 | } | 900 | } |
900 | 901 | ||
901 | fn collect_pat(&mut self, pat: &ast::Pat) -> PatId { | 902 | fn collect_pat(&mut self, pat: ast::Pat) -> PatId { |
902 | let pattern = match pat.kind() { | 903 | let pattern = match pat.kind() { |
903 | ast::PatKind::BindPat(bp) => { | 904 | ast::PatKind::BindPat(bp) => { |
904 | let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing); | 905 | let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing); |
@@ -932,7 +933,8 @@ where | |||
932 | let mut fields: Vec<_> = field_pat_list | 933 | let mut fields: Vec<_> = field_pat_list |
933 | .bind_pats() | 934 | .bind_pats() |
934 | .filter_map(|bind_pat| { | 935 | .filter_map(|bind_pat| { |
935 | let ast_pat = ast::Pat::cast(bind_pat.syntax()).expect("bind pat is a pat"); | 936 | let ast_pat = |
937 | ast::Pat::cast(bind_pat.syntax().clone()).expect("bind pat is a pat"); | ||
936 | let pat = self.collect_pat(ast_pat); | 938 | let pat = self.collect_pat(ast_pat); |
937 | let name = bind_pat.name()?.as_name(); | 939 | let name = bind_pat.name()?.as_name(); |
938 | Some(FieldPat { name, pat }) | 940 | Some(FieldPat { name, pat }) |
@@ -953,11 +955,11 @@ where | |||
953 | ast::PatKind::LiteralPat(_) => Pat::Missing, | 955 | ast::PatKind::LiteralPat(_) => Pat::Missing, |
954 | ast::PatKind::SlicePat(_) | ast::PatKind::RangePat(_) => Pat::Missing, | 956 | ast::PatKind::SlicePat(_) | ast::PatKind::RangePat(_) => Pat::Missing, |
955 | }; | 957 | }; |
956 | let ptr = AstPtr::new(pat); | 958 | let ptr = AstPtr::new(&pat); |
957 | self.alloc_pat(pattern, Either::A(ptr)) | 959 | self.alloc_pat(pattern, Either::A(ptr)) |
958 | } | 960 | } |
959 | 961 | ||
960 | fn collect_pat_opt(&mut self, pat: Option<&ast::Pat>) -> PatId { | 962 | fn collect_pat_opt(&mut self, pat: Option<ast::Pat>) -> PatId { |
961 | if let Some(pat) = pat { | 963 | if let Some(pat) = pat { |
962 | self.collect_pat(pat) | 964 | self.collect_pat(pat) |
963 | } else { | 965 | } else { |
@@ -965,20 +967,20 @@ where | |||
965 | } | 967 | } |
966 | } | 968 | } |
967 | 969 | ||
968 | fn collect_const_body(&mut self, node: &ast::ConstDef) { | 970 | fn collect_const_body(&mut self, node: ast::ConstDef) { |
969 | let body = self.collect_expr_opt(node.body()); | 971 | let body = self.collect_expr_opt(node.body()); |
970 | self.body_expr = Some(body); | 972 | self.body_expr = Some(body); |
971 | } | 973 | } |
972 | 974 | ||
973 | fn collect_static_body(&mut self, node: &ast::StaticDef) { | 975 | fn collect_static_body(&mut self, node: ast::StaticDef) { |
974 | let body = self.collect_expr_opt(node.body()); | 976 | let body = self.collect_expr_opt(node.body()); |
975 | self.body_expr = Some(body); | 977 | self.body_expr = Some(body); |
976 | } | 978 | } |
977 | 979 | ||
978 | fn collect_fn_body(&mut self, node: &ast::FnDef) { | 980 | fn collect_fn_body(&mut self, node: ast::FnDef) { |
979 | if let Some(param_list) = node.param_list() { | 981 | if let Some(param_list) = node.param_list() { |
980 | if let Some(self_param) = param_list.self_param() { | 982 | if let Some(self_param) = param_list.self_param() { |
981 | let ptr = AstPtr::new(self_param); | 983 | let ptr = AstPtr::new(&self_param); |
982 | let param_pat = self.alloc_pat( | 984 | let param_pat = self.alloc_pat( |
983 | Pat::Bind { | 985 | Pat::Bind { |
984 | name: SELF_PARAM, | 986 | name: SELF_PARAM, |
@@ -1027,17 +1029,17 @@ pub(crate) fn body_with_source_map_query( | |||
1027 | DefWithBody::Const(ref c) => { | 1029 | DefWithBody::Const(ref c) => { |
1028 | let src = c.source(db); | 1030 | let src = c.source(db); |
1029 | collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); | 1031 | collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); |
1030 | collector.collect_const_body(&src.ast) | 1032 | collector.collect_const_body(src.ast) |
1031 | } | 1033 | } |
1032 | DefWithBody::Function(ref f) => { | 1034 | DefWithBody::Function(ref f) => { |
1033 | let src = f.source(db); | 1035 | let src = f.source(db); |
1034 | collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); | 1036 | collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); |
1035 | collector.collect_fn_body(&src.ast) | 1037 | collector.collect_fn_body(src.ast) |
1036 | } | 1038 | } |
1037 | DefWithBody::Static(ref s) => { | 1039 | DefWithBody::Static(ref s) => { |
1038 | let src = s.source(db); | 1040 | let src = s.source(db); |
1039 | collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); | 1041 | collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); |
1040 | collector.collect_static_body(&src.ast) | 1042 | collector.collect_static_body(src.ast) |
1041 | } | 1043 | } |
1042 | } | 1044 | } |
1043 | 1045 | ||
diff --git a/crates/ra_hir/src/expr/scope.rs b/crates/ra_hir/src/expr/scope.rs index 28fd52684..6589b782c 100644 --- a/crates/ra_hir/src/expr/scope.rs +++ b/crates/ra_hir/src/expr/scope.rs | |||
@@ -190,7 +190,7 @@ mod tests { | |||
190 | 190 | ||
191 | let (db, _source_root, file_id) = MockDatabase::with_single_file(&code); | 191 | let (db, _source_root, file_id) = MockDatabase::with_single_file(&code); |
192 | let file = db.parse(file_id).ok().unwrap(); | 192 | let file = db.parse(file_id).ok().unwrap(); |
193 | let marker: &ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap(); | 193 | let marker: ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap(); |
194 | let analyzer = SourceAnalyzer::new(&db, file_id, marker.syntax(), None); | 194 | let analyzer = SourceAnalyzer::new(&db, file_id, marker.syntax(), None); |
195 | 195 | ||
196 | let scopes = analyzer.scopes(); | 196 | let scopes = analyzer.scopes(); |
@@ -290,10 +290,10 @@ mod tests { | |||
290 | let file = db.parse(file_id).ok().unwrap(); | 290 | let file = db.parse(file_id).ok().unwrap(); |
291 | let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into()) | 291 | let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into()) |
292 | .expect("failed to find a name at the target offset"); | 292 | .expect("failed to find a name at the target offset"); |
293 | let name_ref: &ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap(); | 293 | let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap(); |
294 | let analyzer = SourceAnalyzer::new(&db, file_id, name_ref.syntax(), None); | 294 | let analyzer = SourceAnalyzer::new(&db, file_id, name_ref.syntax(), None); |
295 | 295 | ||
296 | let local_name_entry = analyzer.resolve_local_name(name_ref).unwrap(); | 296 | let local_name_entry = analyzer.resolve_local_name(&name_ref).unwrap(); |
297 | let local_name = | 297 | let local_name = |
298 | local_name_entry.ptr().either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()); | 298 | local_name_entry.ptr().either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()); |
299 | assert_eq!(local_name.range(), expected_name.syntax().range()); | 299 | assert_eq!(local_name.range(), expected_name.syntax().range()); |
diff --git a/crates/ra_hir/src/expr/validation.rs b/crates/ra_hir/src/expr/validation.rs index c2a10a0b5..82a06ca25 100644 --- a/crates/ra_hir/src/expr/validation.rs +++ b/crates/ra_hir/src/expr/validation.rs | |||
@@ -79,7 +79,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> { | |||
79 | .and_then(StructLit::cast) | 79 | .and_then(StructLit::cast) |
80 | .and_then(|lit| lit.named_field_list()) | 80 | .and_then(|lit| lit.named_field_list()) |
81 | { | 81 | { |
82 | let field_list_ptr = AstPtr::new(field_list_node); | 82 | let field_list_ptr = AstPtr::new(&field_list_node); |
83 | self.sink.push(MissingFields { | 83 | self.sink.push(MissingFields { |
84 | file: file_id, | 84 | file: file_id, |
85 | field_list: field_list_ptr, | 85 | field_list: field_list_ptr, |
diff --git a/crates/ra_hir/src/generics.rs b/crates/ra_hir/src/generics.rs index 07a59193f..bcbb4988d 100644 --- a/crates/ra_hir/src/generics.rs +++ b/crates/ra_hir/src/generics.rs | |||
@@ -76,17 +76,17 @@ impl GenericParams { | |||
76 | generics.parent_params = parent.map(|p| db.generic_params(p)); | 76 | generics.parent_params = parent.map(|p| db.generic_params(p)); |
77 | let start = generics.parent_params.as_ref().map(|p| p.params.len()).unwrap_or(0) as u32; | 77 | let start = generics.parent_params.as_ref().map(|p| p.params.len()).unwrap_or(0) as u32; |
78 | match def { | 78 | match def { |
79 | GenericDef::Function(it) => generics.fill(&*it.source(db).ast, start), | 79 | GenericDef::Function(it) => generics.fill(&it.source(db).ast, start), |
80 | GenericDef::Struct(it) => generics.fill(&*it.source(db).ast, start), | 80 | GenericDef::Struct(it) => generics.fill(&it.source(db).ast, start), |
81 | GenericDef::Union(it) => generics.fill(&*it.source(db).ast, start), | 81 | GenericDef::Union(it) => generics.fill(&it.source(db).ast, start), |
82 | GenericDef::Enum(it) => generics.fill(&*it.source(db).ast, start), | 82 | GenericDef::Enum(it) => generics.fill(&it.source(db).ast, start), |
83 | GenericDef::Trait(it) => { | 83 | GenericDef::Trait(it) => { |
84 | // traits get the Self type as an implicit first type parameter | 84 | // traits get the Self type as an implicit first type parameter |
85 | generics.params.push(GenericParam { idx: start, name: SELF_TYPE, default: None }); | 85 | generics.params.push(GenericParam { idx: start, name: SELF_TYPE, default: None }); |
86 | generics.fill(&*it.source(db).ast, start + 1); | 86 | generics.fill(&it.source(db).ast, start + 1); |
87 | } | 87 | } |
88 | GenericDef::TypeAlias(it) => generics.fill(&*it.source(db).ast, start), | 88 | GenericDef::TypeAlias(it) => generics.fill(&it.source(db).ast, start), |
89 | GenericDef::ImplBlock(it) => generics.fill(&*it.source(db).ast, start), | 89 | GenericDef::ImplBlock(it) => generics.fill(&it.source(db).ast, start), |
90 | GenericDef::EnumVariant(_) => {} | 90 | GenericDef::EnumVariant(_) => {} |
91 | } | 91 | } |
92 | 92 | ||
@@ -102,9 +102,9 @@ impl GenericParams { | |||
102 | } | 102 | } |
103 | } | 103 | } |
104 | 104 | ||
105 | fn fill_params(&mut self, params: &ast::TypeParamList, start: u32) { | 105 | fn fill_params(&mut self, params: ast::TypeParamList, start: u32) { |
106 | for (idx, type_param) in params.type_params().enumerate() { | 106 | for (idx, type_param) in params.type_params().enumerate() { |
107 | let name = type_param.name().map(AsName::as_name).unwrap_or_else(Name::missing); | 107 | let name = type_param.name().map_or_else(Name::missing, |it| it.as_name()); |
108 | let default = type_param.default_type().and_then(|t| t.path()).and_then(Path::from_ast); | 108 | let default = type_param.default_type().and_then(|t| t.path()).and_then(Path::from_ast); |
109 | 109 | ||
110 | let param = GenericParam { idx: idx as u32 + start, name: name.clone(), default }; | 110 | let param = GenericParam { idx: idx as u32 + start, name: name.clone(), default }; |
@@ -121,7 +121,7 @@ impl GenericParams { | |||
121 | } | 121 | } |
122 | } | 122 | } |
123 | 123 | ||
124 | fn fill_where_predicates(&mut self, where_clause: &ast::WhereClause) { | 124 | fn fill_where_predicates(&mut self, where_clause: ast::WhereClause) { |
125 | for pred in where_clause.predicates() { | 125 | for pred in where_clause.predicates() { |
126 | let type_ref = match pred.type_ref() { | 126 | let type_ref = match pred.type_ref() { |
127 | Some(type_ref) => type_ref, | 127 | Some(type_ref) => type_ref, |
@@ -134,7 +134,7 @@ impl GenericParams { | |||
134 | } | 134 | } |
135 | } | 135 | } |
136 | 136 | ||
137 | fn add_where_predicate_from_bound(&mut self, bound: &ast::TypeBound, type_ref: TypeRef) { | 137 | fn add_where_predicate_from_bound(&mut self, bound: ast::TypeBound, type_ref: TypeRef) { |
138 | let path = bound | 138 | let path = bound |
139 | .type_ref() | 139 | .type_ref() |
140 | .and_then(|tr| match tr.kind() { | 140 | .and_then(|tr| match tr.kind() { |
diff --git a/crates/ra_hir/src/ids.rs b/crates/ra_hir/src/ids.rs index 83f5c3f39..05a18eb56 100644 --- a/crates/ra_hir/src/ids.rs +++ b/crates/ra_hir/src/ids.rs | |||
@@ -6,7 +6,7 @@ use std::{ | |||
6 | use mbe::MacroRules; | 6 | use mbe::MacroRules; |
7 | use ra_db::{salsa, FileId}; | 7 | use ra_db::{salsa, FileId}; |
8 | use ra_prof::profile; | 8 | use ra_prof::profile; |
9 | use ra_syntax::{ast, AstNode, Parse, SyntaxNode, TreeArc}; | 9 | use ra_syntax::{ast, AstNode, Parse, SyntaxNode}; |
10 | 10 | ||
11 | use crate::{AstDatabase, AstId, DefDatabase, FileAstId, InternDatabase, Module, Source}; | 11 | use crate::{AstDatabase, AstId, DefDatabase, FileAstId, InternDatabase, Module, Source}; |
12 | 12 | ||
@@ -58,11 +58,11 @@ impl HirFileId { | |||
58 | pub(crate) fn parse_or_expand_query( | 58 | pub(crate) fn parse_or_expand_query( |
59 | db: &impl AstDatabase, | 59 | db: &impl AstDatabase, |
60 | file_id: HirFileId, | 60 | file_id: HirFileId, |
61 | ) -> Option<TreeArc<SyntaxNode>> { | 61 | ) -> Option<SyntaxNode> { |
62 | match file_id.0 { | 62 | match file_id.0 { |
63 | HirFileIdRepr::File(file_id) => Some(db.parse(file_id).tree().syntax().to_owned()), | 63 | HirFileIdRepr::File(file_id) => Some(db.parse(file_id).tree().syntax().clone()), |
64 | HirFileIdRepr::Macro(macro_file) => { | 64 | HirFileIdRepr::Macro(macro_file) => { |
65 | db.parse_macro(macro_file).map(|it| it.tree().to_owned()) | 65 | db.parse_macro(macro_file).map(|it| it.syntax_node()) |
66 | } | 66 | } |
67 | } | 67 | } |
68 | } | 68 | } |
@@ -123,7 +123,7 @@ pub struct MacroDefId(pub(crate) AstId<ast::MacroCall>); | |||
123 | pub(crate) fn macro_def_query(db: &impl AstDatabase, id: MacroDefId) -> Option<Arc<MacroRules>> { | 123 | pub(crate) fn macro_def_query(db: &impl AstDatabase, id: MacroDefId) -> Option<Arc<MacroRules>> { |
124 | let macro_call = id.0.to_node(db); | 124 | let macro_call = id.0.to_node(db); |
125 | let arg = macro_call.token_tree()?; | 125 | let arg = macro_call.token_tree()?; |
126 | let (tt, _) = mbe::ast_to_token_tree(arg).or_else(|| { | 126 | let (tt, _) = mbe::ast_to_token_tree(&arg).or_else(|| { |
127 | log::warn!("fail on macro_def to token tree: {:#?}", arg); | 127 | log::warn!("fail on macro_def to token tree: {:#?}", arg); |
128 | None | 128 | None |
129 | })?; | 129 | })?; |
@@ -138,7 +138,7 @@ pub(crate) fn macro_arg_query(db: &impl AstDatabase, id: MacroCallId) -> Option< | |||
138 | let loc = id.loc(db); | 138 | let loc = id.loc(db); |
139 | let macro_call = loc.ast_id.to_node(db); | 139 | let macro_call = loc.ast_id.to_node(db); |
140 | let arg = macro_call.token_tree()?; | 140 | let arg = macro_call.token_tree()?; |
141 | let (tt, _) = mbe::ast_to_token_tree(arg)?; | 141 | let (tt, _) = mbe::ast_to_token_tree(&arg)?; |
142 | Some(Arc::new(tt)) | 142 | Some(Arc::new(tt)) |
143 | } | 143 | } |
144 | 144 | ||
@@ -262,7 +262,7 @@ pub(crate) trait AstItemDef<N: AstNode>: salsa::InternKey + Clone { | |||
262 | let loc = ItemLoc { module: ctx.module, ast_id: ast_id.with_file_id(ctx.file_id) }; | 262 | let loc = ItemLoc { module: ctx.module, ast_id: ast_id.with_file_id(ctx.file_id) }; |
263 | Self::intern(ctx.db, loc) | 263 | Self::intern(ctx.db, loc) |
264 | } | 264 | } |
265 | fn source(self, db: &(impl AstDatabase + DefDatabase)) -> Source<TreeArc<N>> { | 265 | fn source(self, db: &(impl AstDatabase + DefDatabase)) -> Source<N> { |
266 | let loc = self.lookup_intern(db); | 266 | let loc = self.lookup_intern(db); |
267 | let ast = loc.ast_id.to_node(db); | 267 | let ast = loc.ast_id.to_node(db); |
268 | Source { file_id: loc.ast_id.file_id(), ast } | 268 | Source { file_id: loc.ast_id.file_id(), ast } |
diff --git a/crates/ra_hir/src/impl_block.rs b/crates/ra_hir/src/impl_block.rs index ce134b27a..8e62cf66d 100644 --- a/crates/ra_hir/src/impl_block.rs +++ b/crates/ra_hir/src/impl_block.rs | |||
@@ -4,7 +4,7 @@ use std::sync::Arc; | |||
4 | use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId}; | 4 | use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId}; |
5 | use ra_syntax::{ | 5 | use ra_syntax::{ |
6 | ast::{self, AstNode}, | 6 | ast::{self, AstNode}, |
7 | AstPtr, SourceFile, TreeArc, | 7 | AstPtr, SourceFile, |
8 | }; | 8 | }; |
9 | 9 | ||
10 | use crate::{ | 10 | use crate::{ |
@@ -28,9 +28,9 @@ impl ImplSourceMap { | |||
28 | self.map.insert(impl_id, AstPtr::new(impl_block)) | 28 | self.map.insert(impl_id, AstPtr::new(impl_block)) |
29 | } | 29 | } |
30 | 30 | ||
31 | pub fn get(&self, source: &ModuleSource, impl_id: ImplId) -> TreeArc<ast::ImplBlock> { | 31 | pub fn get(&self, source: &ModuleSource, impl_id: ImplId) -> ast::ImplBlock { |
32 | let file = match source { | 32 | let file = match source { |
33 | ModuleSource::SourceFile(file) => &*file, | 33 | ModuleSource::SourceFile(file) => file.clone(), |
34 | ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(), | 34 | ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(), |
35 | }; | 35 | }; |
36 | 36 | ||
@@ -45,8 +45,8 @@ pub struct ImplBlock { | |||
45 | } | 45 | } |
46 | 46 | ||
47 | impl HasSource for ImplBlock { | 47 | impl HasSource for ImplBlock { |
48 | type Ast = TreeArc<ast::ImplBlock>; | 48 | type Ast = ast::ImplBlock; |
49 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::ImplBlock>> { | 49 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::ImplBlock> { |
50 | let source_map = db.impls_in_module_with_source_map(self.module).1; | 50 | let source_map = db.impls_in_module_with_source_map(self.module).1; |
51 | let src = self.module.definition_source(db); | 51 | let src = self.module.definition_source(db); |
52 | Source { file_id: src.file_id, ast: source_map.get(&src.ast, self.impl_id) } | 52 | Source { file_id: src.file_id, ast: source_map.get(&src.ast, self.impl_id) } |
@@ -132,9 +132,9 @@ impl ImplData { | |||
132 | item_list | 132 | item_list |
133 | .impl_items() | 133 | .impl_items() |
134 | .map(|item_node| match item_node.kind() { | 134 | .map(|item_node| match item_node.kind() { |
135 | ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(it) }.into(), | 135 | ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(&it) }.into(), |
136 | ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(it) }.into(), | 136 | ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(&it) }.into(), |
137 | ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(it) }.into(), | 137 | ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(&it) }.into(), |
138 | }) | 138 | }) |
139 | .collect() | 139 | .collect() |
140 | } else { | 140 | } else { |
@@ -202,20 +202,20 @@ impl ModuleImplBlocks { | |||
202 | 202 | ||
203 | let src = m.module.definition_source(db); | 203 | let src = m.module.definition_source(db); |
204 | let node = match &src.ast { | 204 | let node = match &src.ast { |
205 | ModuleSource::SourceFile(node) => node.syntax(), | 205 | ModuleSource::SourceFile(node) => node.syntax().clone(), |
206 | ModuleSource::Module(node) => { | 206 | ModuleSource::Module(node) => { |
207 | node.item_list().expect("inline module should have item list").syntax() | 207 | node.item_list().expect("inline module should have item list").syntax().clone() |
208 | } | 208 | } |
209 | }; | 209 | }; |
210 | 210 | ||
211 | for impl_block_ast in node.children().filter_map(ast::ImplBlock::cast) { | 211 | for impl_block_ast in node.children().filter_map(ast::ImplBlock::cast) { |
212 | let impl_block = ImplData::from_ast(db, src.file_id, m.module, impl_block_ast); | 212 | let impl_block = ImplData::from_ast(db, src.file_id, m.module, &impl_block_ast); |
213 | let id = m.impls.alloc(impl_block); | 213 | let id = m.impls.alloc(impl_block); |
214 | for &impl_item in &m.impls[id].items { | 214 | for &impl_item in &m.impls[id].items { |
215 | m.impls_by_def.insert(impl_item, id); | 215 | m.impls_by_def.insert(impl_item, id); |
216 | } | 216 | } |
217 | 217 | ||
218 | source_map.insert(id, impl_block_ast); | 218 | source_map.insert(id, &impl_block_ast); |
219 | } | 219 | } |
220 | 220 | ||
221 | m | 221 | m |
diff --git a/crates/ra_hir/src/lang_item.rs b/crates/ra_hir/src/lang_item.rs index 0443d4d9a..fd6609fb8 100644 --- a/crates/ra_hir/src/lang_item.rs +++ b/crates/ra_hir/src/lang_item.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | use rustc_hash::FxHashMap; | 1 | use rustc_hash::FxHashMap; |
2 | use std::sync::Arc; | 2 | use std::sync::Arc; |
3 | 3 | ||
4 | use ra_syntax::{ast::AttrsOwner, SmolStr, TreeArc}; | 4 | use ra_syntax::{ast::AttrsOwner, SmolStr}; |
5 | 5 | ||
6 | use crate::{ | 6 | use crate::{ |
7 | AstDatabase, Crate, DefDatabase, Enum, Function, HasSource, HirDatabase, ImplBlock, Module, | 7 | AstDatabase, Crate, DefDatabase, Enum, Function, HasSource, HirDatabase, ImplBlock, Module, |
@@ -95,7 +95,7 @@ impl LangItems { | |||
95 | // Look for impl targets | 95 | // Look for impl targets |
96 | for impl_block in module.impl_blocks(db) { | 96 | for impl_block in module.impl_blocks(db) { |
97 | let src = impl_block.source(db); | 97 | let src = impl_block.source(db); |
98 | if let Some(lang_item_name) = lang_item_name(&*src.ast) { | 98 | if let Some(lang_item_name) = lang_item_name(&src.ast) { |
99 | self.items | 99 | self.items |
100 | .entry(lang_item_name) | 100 | .entry(lang_item_name) |
101 | .or_insert_with(|| LangItemTarget::ImplBlock(impl_block)); | 101 | .or_insert_with(|| LangItemTarget::ImplBlock(impl_block)); |
@@ -137,11 +137,11 @@ impl LangItems { | |||
137 | item: T, | 137 | item: T, |
138 | constructor: fn(T) -> LangItemTarget, | 138 | constructor: fn(T) -> LangItemTarget, |
139 | ) where | 139 | ) where |
140 | T: Copy + HasSource<Ast = TreeArc<N>>, | 140 | T: Copy + HasSource<Ast = N>, |
141 | N: AttrsOwner, | 141 | N: AttrsOwner, |
142 | { | 142 | { |
143 | let node = item.source(db).ast; | 143 | let node = item.source(db).ast; |
144 | if let Some(lang_item_name) = lang_item_name(&*node) { | 144 | if let Some(lang_item_name) = lang_item_name(&node) { |
145 | self.items.entry(lang_item_name).or_insert_with(|| constructor(item)); | 145 | self.items.entry(lang_item_name).or_insert_with(|| constructor(item)); |
146 | } | 146 | } |
147 | } | 147 | } |
diff --git a/crates/ra_hir/src/name.rs b/crates/ra_hir/src/name.rs index 40c9d6002..c589f8aba 100644 --- a/crates/ra_hir/src/name.rs +++ b/crates/ra_hir/src/name.rs | |||
@@ -75,7 +75,7 @@ impl AsName for ast::Name { | |||
75 | } | 75 | } |
76 | } | 76 | } |
77 | 77 | ||
78 | impl<'a> AsName for ast::FieldKind<'a> { | 78 | impl AsName for ast::FieldKind { |
79 | fn as_name(&self) -> Name { | 79 | fn as_name(&self) -> Name { |
80 | match self { | 80 | match self { |
81 | ast::FieldKind::Name(nr) => nr.as_name(), | 81 | ast::FieldKind::Name(nr) => nr.as_name(), |
diff --git a/crates/ra_hir/src/nameres/raw.rs b/crates/ra_hir/src/nameres/raw.rs index 46b2bef5b..8517f3c43 100644 --- a/crates/ra_hir/src/nameres/raw.rs +++ b/crates/ra_hir/src/nameres/raw.rs | |||
@@ -3,7 +3,7 @@ use std::{ops::Index, sync::Arc}; | |||
3 | use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId}; | 3 | use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId}; |
4 | use ra_syntax::{ | 4 | use ra_syntax::{ |
5 | ast::{self, AttrsOwner, NameOwner}, | 5 | ast::{self, AttrsOwner, NameOwner}, |
6 | AstNode, AstPtr, SmolStr, SourceFile, TreeArc, | 6 | AstNode, AstPtr, SmolStr, SourceFile, |
7 | }; | 7 | }; |
8 | use test_utils::tested_by; | 8 | use test_utils::tested_by; |
9 | 9 | ||
@@ -32,7 +32,7 @@ pub struct ImportSourceMap { | |||
32 | } | 32 | } |
33 | 33 | ||
34 | type ImportSourcePtr = Either<AstPtr<ast::UseTree>, AstPtr<ast::ExternCrateItem>>; | 34 | type ImportSourcePtr = Either<AstPtr<ast::UseTree>, AstPtr<ast::ExternCrateItem>>; |
35 | type ImportSource = Either<TreeArc<ast::UseTree>, TreeArc<ast::ExternCrateItem>>; | 35 | type ImportSource = Either<ast::UseTree, ast::ExternCrateItem>; |
36 | 36 | ||
37 | impl ImportSourcePtr { | 37 | impl ImportSourcePtr { |
38 | fn to_node(self, file: &SourceFile) -> ImportSource { | 38 | fn to_node(self, file: &SourceFile) -> ImportSource { |
@@ -50,11 +50,11 @@ impl ImportSourceMap { | |||
50 | 50 | ||
51 | pub(crate) fn get(&self, source: &ModuleSource, import: ImportId) -> ImportSource { | 51 | pub(crate) fn get(&self, source: &ModuleSource, import: ImportId) -> ImportSource { |
52 | let file = match source { | 52 | let file = match source { |
53 | ModuleSource::SourceFile(file) => &*file, | 53 | ModuleSource::SourceFile(file) => file.clone(), |
54 | ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(), | 54 | ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(), |
55 | }; | 55 | }; |
56 | 56 | ||
57 | self.map[import].to_node(file) | 57 | self.map[import].to_node(&file) |
58 | } | 58 | } |
59 | } | 59 | } |
60 | 60 | ||
@@ -76,8 +76,8 @@ impl RawItems { | |||
76 | source_map: ImportSourceMap::default(), | 76 | source_map: ImportSourceMap::default(), |
77 | }; | 77 | }; |
78 | if let Some(node) = db.parse_or_expand(file_id) { | 78 | if let Some(node) = db.parse_or_expand(file_id) { |
79 | if let Some(source_file) = ast::SourceFile::cast(&node) { | 79 | if let Some(source_file) = ast::SourceFile::cast(node) { |
80 | collector.process_module(None, &*source_file); | 80 | collector.process_module(None, source_file); |
81 | } | 81 | } |
82 | } | 82 | } |
83 | (Arc::new(collector.raw_items), Arc::new(collector.source_map)) | 83 | (Arc::new(collector.raw_items), Arc::new(collector.source_map)) |
@@ -188,7 +188,7 @@ struct RawItemsCollector { | |||
188 | } | 188 | } |
189 | 189 | ||
190 | impl RawItemsCollector { | 190 | impl RawItemsCollector { |
191 | fn process_module(&mut self, current_module: Option<Module>, body: &impl ast::ModuleItemOwner) { | 191 | fn process_module(&mut self, current_module: Option<Module>, body: impl ast::ModuleItemOwner) { |
192 | for item_or_macro in body.items_with_macros() { | 192 | for item_or_macro in body.items_with_macros() { |
193 | match item_or_macro { | 193 | match item_or_macro { |
194 | ast::ItemOrMacro::Macro(m) => self.add_macro(current_module, m), | 194 | ast::ItemOrMacro::Macro(m) => self.add_macro(current_module, m), |
@@ -197,7 +197,7 @@ impl RawItemsCollector { | |||
197 | } | 197 | } |
198 | } | 198 | } |
199 | 199 | ||
200 | fn add_item(&mut self, current_module: Option<Module>, item: &ast::ModuleItem) { | 200 | fn add_item(&mut self, current_module: Option<Module>, item: ast::ModuleItem) { |
201 | let (kind, name) = match item.kind() { | 201 | let (kind, name) = match item.kind() { |
202 | ast::ModuleItemKind::Module(module) => { | 202 | ast::ModuleItemKind::Module(module) => { |
203 | self.add_module(current_module, module); | 203 | self.add_module(current_module, module); |
@@ -216,7 +216,7 @@ impl RawItemsCollector { | |||
216 | return; | 216 | return; |
217 | } | 217 | } |
218 | ast::ModuleItemKind::StructDef(it) => { | 218 | ast::ModuleItemKind::StructDef(it) => { |
219 | let id = self.source_ast_id_map.ast_id(it); | 219 | let id = self.source_ast_id_map.ast_id(&it); |
220 | let name = it.name(); | 220 | let name = it.name(); |
221 | if it.is_union() { | 221 | if it.is_union() { |
222 | (DefKind::Union(id), name) | 222 | (DefKind::Union(id), name) |
@@ -225,22 +225,22 @@ impl RawItemsCollector { | |||
225 | } | 225 | } |
226 | } | 226 | } |
227 | ast::ModuleItemKind::EnumDef(it) => { | 227 | ast::ModuleItemKind::EnumDef(it) => { |
228 | (DefKind::Enum(self.source_ast_id_map.ast_id(it)), it.name()) | 228 | (DefKind::Enum(self.source_ast_id_map.ast_id(&it)), it.name()) |
229 | } | 229 | } |
230 | ast::ModuleItemKind::FnDef(it) => { | 230 | ast::ModuleItemKind::FnDef(it) => { |
231 | (DefKind::Function(self.source_ast_id_map.ast_id(it)), it.name()) | 231 | (DefKind::Function(self.source_ast_id_map.ast_id(&it)), it.name()) |
232 | } | 232 | } |
233 | ast::ModuleItemKind::TraitDef(it) => { | 233 | ast::ModuleItemKind::TraitDef(it) => { |
234 | (DefKind::Trait(self.source_ast_id_map.ast_id(it)), it.name()) | 234 | (DefKind::Trait(self.source_ast_id_map.ast_id(&it)), it.name()) |
235 | } | 235 | } |
236 | ast::ModuleItemKind::TypeAliasDef(it) => { | 236 | ast::ModuleItemKind::TypeAliasDef(it) => { |
237 | (DefKind::TypeAlias(self.source_ast_id_map.ast_id(it)), it.name()) | 237 | (DefKind::TypeAlias(self.source_ast_id_map.ast_id(&it)), it.name()) |
238 | } | 238 | } |
239 | ast::ModuleItemKind::ConstDef(it) => { | 239 | ast::ModuleItemKind::ConstDef(it) => { |
240 | (DefKind::Const(self.source_ast_id_map.ast_id(it)), it.name()) | 240 | (DefKind::Const(self.source_ast_id_map.ast_id(&it)), it.name()) |
241 | } | 241 | } |
242 | ast::ModuleItemKind::StaticDef(it) => { | 242 | ast::ModuleItemKind::StaticDef(it) => { |
243 | (DefKind::Static(self.source_ast_id_map.ast_id(it)), it.name()) | 243 | (DefKind::Static(self.source_ast_id_map.ast_id(&it)), it.name()) |
244 | } | 244 | } |
245 | }; | 245 | }; |
246 | if let Some(name) = name { | 246 | if let Some(name) = name { |
@@ -250,14 +250,14 @@ impl RawItemsCollector { | |||
250 | } | 250 | } |
251 | } | 251 | } |
252 | 252 | ||
253 | fn add_module(&mut self, current_module: Option<Module>, module: &ast::Module) { | 253 | fn add_module(&mut self, current_module: Option<Module>, module: ast::Module) { |
254 | let name = match module.name() { | 254 | let name = match module.name() { |
255 | Some(it) => it.as_name(), | 255 | Some(it) => it.as_name(), |
256 | None => return, | 256 | None => return, |
257 | }; | 257 | }; |
258 | 258 | ||
259 | let attr_path = extract_mod_path_attribute(module); | 259 | let attr_path = extract_mod_path_attribute(&module); |
260 | let ast_id = self.source_ast_id_map.ast_id(module); | 260 | let ast_id = self.source_ast_id_map.ast_id(&module); |
261 | if module.has_semi() { | 261 | if module.has_semi() { |
262 | let item = | 262 | let item = |
263 | self.raw_items.modules.alloc(ModuleData::Declaration { name, ast_id, attr_path }); | 263 | self.raw_items.modules.alloc(ModuleData::Declaration { name, ast_id, attr_path }); |
@@ -278,10 +278,10 @@ impl RawItemsCollector { | |||
278 | tested_by!(name_res_works_for_broken_modules); | 278 | tested_by!(name_res_works_for_broken_modules); |
279 | } | 279 | } |
280 | 280 | ||
281 | fn add_use_item(&mut self, current_module: Option<Module>, use_item: &ast::UseItem) { | 281 | fn add_use_item(&mut self, current_module: Option<Module>, use_item: ast::UseItem) { |
282 | let is_prelude = use_item.has_atom_attr("prelude_import"); | 282 | let is_prelude = use_item.has_atom_attr("prelude_import"); |
283 | 283 | ||
284 | Path::expand_use_item(use_item, |path, use_tree, is_glob, alias| { | 284 | Path::expand_use_item(&use_item, |path, use_tree, is_glob, alias| { |
285 | let import_data = | 285 | let import_data = |
286 | ImportData { path, alias, is_glob, is_prelude, is_extern_crate: false }; | 286 | ImportData { path, alias, is_glob, is_prelude, is_extern_crate: false }; |
287 | self.push_import(current_module, import_data, Either::A(AstPtr::new(use_tree))); | 287 | self.push_import(current_module, import_data, Either::A(AstPtr::new(use_tree))); |
@@ -291,11 +291,11 @@ impl RawItemsCollector { | |||
291 | fn add_extern_crate_item( | 291 | fn add_extern_crate_item( |
292 | &mut self, | 292 | &mut self, |
293 | current_module: Option<Module>, | 293 | current_module: Option<Module>, |
294 | extern_crate: &ast::ExternCrateItem, | 294 | extern_crate: ast::ExternCrateItem, |
295 | ) { | 295 | ) { |
296 | if let Some(name_ref) = extern_crate.name_ref() { | 296 | if let Some(name_ref) = extern_crate.name_ref() { |
297 | let path = Path::from_name_ref(name_ref); | 297 | let path = Path::from_name_ref(&name_ref); |
298 | let alias = extern_crate.alias().and_then(|a| a.name()).map(AsName::as_name); | 298 | let alias = extern_crate.alias().and_then(|a| a.name()).map(|it| it.as_name()); |
299 | let import_data = ImportData { | 299 | let import_data = ImportData { |
300 | path, | 300 | path, |
301 | alias, | 301 | alias, |
@@ -303,18 +303,18 @@ impl RawItemsCollector { | |||
303 | is_prelude: false, | 303 | is_prelude: false, |
304 | is_extern_crate: true, | 304 | is_extern_crate: true, |
305 | }; | 305 | }; |
306 | self.push_import(current_module, import_data, Either::B(AstPtr::new(extern_crate))); | 306 | self.push_import(current_module, import_data, Either::B(AstPtr::new(&extern_crate))); |
307 | } | 307 | } |
308 | } | 308 | } |
309 | 309 | ||
310 | fn add_macro(&mut self, current_module: Option<Module>, m: &ast::MacroCall) { | 310 | fn add_macro(&mut self, current_module: Option<Module>, m: ast::MacroCall) { |
311 | let path = match m.path().and_then(Path::from_ast) { | 311 | let path = match m.path().and_then(Path::from_ast) { |
312 | Some(it) => it, | 312 | Some(it) => it, |
313 | _ => return, | 313 | _ => return, |
314 | }; | 314 | }; |
315 | 315 | ||
316 | let name = m.name().map(|it| it.as_name()); | 316 | let name = m.name().map(|it| it.as_name()); |
317 | let ast_id = self.source_ast_id_map.ast_id(m); | 317 | let ast_id = self.source_ast_id_map.ast_id(&m); |
318 | let export = m.has_atom_attr("macro_export"); | 318 | let export = m.has_atom_attr("macro_export"); |
319 | let m = self.raw_items.macros.alloc(MacroData { ast_id, path, name, export }); | 319 | let m = self.raw_items.macros.alloc(MacroData { ast_id, path, name, export }); |
320 | self.push_item(current_module, RawItem::Macro(m)); | 320 | self.push_item(current_module, RawItem::Macro(m)); |
diff --git a/crates/ra_hir/src/path.rs b/crates/ra_hir/src/path.rs index bce9d2d4b..882db7681 100644 --- a/crates/ra_hir/src/path.rs +++ b/crates/ra_hir/src/path.rs | |||
@@ -47,9 +47,9 @@ pub enum PathKind { | |||
47 | 47 | ||
48 | impl Path { | 48 | impl Path { |
49 | /// Calls `cb` with all paths, represented by this use item. | 49 | /// Calls `cb` with all paths, represented by this use item. |
50 | pub fn expand_use_item<'a>( | 50 | pub fn expand_use_item( |
51 | item: &'a ast::UseItem, | 51 | item: &ast::UseItem, |
52 | mut cb: impl FnMut(Path, &'a ast::UseTree, bool, Option<Name>), | 52 | mut cb: impl FnMut(Path, &ast::UseTree, bool, Option<Name>), |
53 | ) { | 53 | ) { |
54 | if let Some(tree) = item.use_tree() { | 54 | if let Some(tree) = item.use_tree() { |
55 | expand_use_tree(None, tree, &mut cb); | 55 | expand_use_tree(None, tree, &mut cb); |
@@ -57,7 +57,7 @@ impl Path { | |||
57 | } | 57 | } |
58 | 58 | ||
59 | /// Converts an `ast::Path` to `Path`. Works with use trees. | 59 | /// Converts an `ast::Path` to `Path`. Works with use trees. |
60 | pub fn from_ast(mut path: &ast::Path) -> Option<Path> { | 60 | pub fn from_ast(mut path: ast::Path) -> Option<Path> { |
61 | let mut kind = PathKind::Plain; | 61 | let mut kind = PathKind::Plain; |
62 | let mut segments = Vec::new(); | 62 | let mut segments = Vec::new(); |
63 | loop { | 63 | loop { |
@@ -87,7 +87,7 @@ impl Path { | |||
87 | break; | 87 | break; |
88 | } | 88 | } |
89 | } | 89 | } |
90 | path = match qualifier(path) { | 90 | path = match qualifier(&path) { |
91 | Some(it) => it, | 91 | Some(it) => it, |
92 | None => break, | 92 | None => break, |
93 | }; | 93 | }; |
@@ -95,7 +95,7 @@ impl Path { | |||
95 | segments.reverse(); | 95 | segments.reverse(); |
96 | return Some(Path { kind, segments }); | 96 | return Some(Path { kind, segments }); |
97 | 97 | ||
98 | fn qualifier(path: &ast::Path) -> Option<&ast::Path> { | 98 | fn qualifier(path: &ast::Path) -> Option<ast::Path> { |
99 | if let Some(q) = path.qualifier() { | 99 | if let Some(q) = path.qualifier() { |
100 | return Some(q); | 100 | return Some(q); |
101 | } | 101 | } |
@@ -136,7 +136,7 @@ impl Path { | |||
136 | } | 136 | } |
137 | 137 | ||
138 | impl GenericArgs { | 138 | impl GenericArgs { |
139 | pub(crate) fn from_ast(node: &ast::TypeArgList) -> Option<GenericArgs> { | 139 | pub(crate) fn from_ast(node: ast::TypeArgList) -> Option<GenericArgs> { |
140 | let mut args = Vec::new(); | 140 | let mut args = Vec::new(); |
141 | for type_arg in node.type_args() { | 141 | for type_arg in node.type_args() { |
142 | let type_ref = TypeRef::from_ast_opt(type_arg.type_ref()); | 142 | let type_ref = TypeRef::from_ast_opt(type_arg.type_ref()); |
@@ -160,10 +160,10 @@ impl From<Name> for Path { | |||
160 | } | 160 | } |
161 | } | 161 | } |
162 | 162 | ||
163 | fn expand_use_tree<'a>( | 163 | fn expand_use_tree( |
164 | prefix: Option<Path>, | 164 | prefix: Option<Path>, |
165 | tree: &'a ast::UseTree, | 165 | tree: ast::UseTree, |
166 | cb: &mut impl FnMut(Path, &'a ast::UseTree, bool, Option<Name>), | 166 | cb: &mut impl FnMut(Path, &ast::UseTree, bool, Option<Name>), |
167 | ) { | 167 | ) { |
168 | if let Some(use_tree_list) = tree.use_tree_list() { | 168 | if let Some(use_tree_list) = tree.use_tree_list() { |
169 | let prefix = match tree.path() { | 169 | let prefix = match tree.path() { |
@@ -188,7 +188,7 @@ fn expand_use_tree<'a>( | |||
188 | if let Some(segment) = ast_path.segment() { | 188 | if let Some(segment) = ast_path.segment() { |
189 | if segment.kind() == Some(ast::PathSegmentKind::SelfKw) { | 189 | if segment.kind() == Some(ast::PathSegmentKind::SelfKw) { |
190 | if let Some(prefix) = prefix { | 190 | if let Some(prefix) = prefix { |
191 | cb(prefix, tree, false, alias); | 191 | cb(prefix, &tree, false, alias); |
192 | return; | 192 | return; |
193 | } | 193 | } |
194 | } | 194 | } |
@@ -196,7 +196,7 @@ fn expand_use_tree<'a>( | |||
196 | } | 196 | } |
197 | if let Some(path) = convert_path(prefix, ast_path) { | 197 | if let Some(path) = convert_path(prefix, ast_path) { |
198 | let is_glob = tree.has_star(); | 198 | let is_glob = tree.has_star(); |
199 | cb(path, tree, is_glob, alias) | 199 | cb(path, &tree, is_glob, alias) |
200 | } | 200 | } |
201 | // FIXME: report errors somewhere | 201 | // FIXME: report errors somewhere |
202 | // We get here if we do | 202 | // We get here if we do |
@@ -204,7 +204,7 @@ fn expand_use_tree<'a>( | |||
204 | } | 204 | } |
205 | } | 205 | } |
206 | 206 | ||
207 | fn convert_path(prefix: Option<Path>, path: &ast::Path) -> Option<Path> { | 207 | fn convert_path(prefix: Option<Path>, path: ast::Path) -> Option<Path> { |
208 | let prefix = | 208 | let prefix = |
209 | if let Some(qual) = path.qualifier() { Some(convert_path(prefix, qual)?) } else { prefix }; | 209 | if let Some(qual) = path.qualifier() { Some(convert_path(prefix, qual)?) } else { prefix }; |
210 | let segment = path.segment()?; | 210 | let segment = path.segment()?; |
diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs index 071c1bb18..e7bc4df97 100644 --- a/crates/ra_hir/src/source_binder.rs +++ b/crates/ra_hir/src/source_binder.rs | |||
@@ -37,7 +37,7 @@ pub fn module_from_file_id(db: &impl HirDatabase, file_id: FileId) -> Option<Mod | |||
37 | pub fn module_from_declaration( | 37 | pub fn module_from_declaration( |
38 | db: &impl HirDatabase, | 38 | db: &impl HirDatabase, |
39 | file_id: FileId, | 39 | file_id: FileId, |
40 | decl: &ast::Module, | 40 | decl: ast::Module, |
41 | ) -> Option<Module> { | 41 | ) -> Option<Module> { |
42 | let parent_module = module_from_file_id(db, file_id); | 42 | let parent_module = module_from_file_id(db, file_id); |
43 | let child_name = decl.name(); | 43 | let child_name = decl.name(); |
@@ -50,8 +50,8 @@ pub fn module_from_declaration( | |||
50 | /// Locates the module by position in the source code. | 50 | /// Locates the module by position in the source code. |
51 | pub fn module_from_position(db: &impl HirDatabase, position: FilePosition) -> Option<Module> { | 51 | pub fn module_from_position(db: &impl HirDatabase, position: FilePosition) -> Option<Module> { |
52 | let parse = db.parse(position.file_id); | 52 | let parse = db.parse(position.file_id); |
53 | match find_node_at_offset::<ast::Module>(parse.tree().syntax(), position.offset) { | 53 | match &find_node_at_offset::<ast::Module>(parse.tree().syntax(), position.offset) { |
54 | Some(m) if !m.has_semi() => module_from_inline(db, position.file_id, m), | 54 | Some(m) if !m.has_semi() => module_from_inline(db, position.file_id, m.clone()), |
55 | _ => module_from_file_id(db, position.file_id), | 55 | _ => module_from_file_id(db, position.file_id), |
56 | } | 56 | } |
57 | } | 57 | } |
@@ -59,12 +59,12 @@ pub fn module_from_position(db: &impl HirDatabase, position: FilePosition) -> Op | |||
59 | fn module_from_inline( | 59 | fn module_from_inline( |
60 | db: &impl HirDatabase, | 60 | db: &impl HirDatabase, |
61 | file_id: FileId, | 61 | file_id: FileId, |
62 | module: &ast::Module, | 62 | module: ast::Module, |
63 | ) -> Option<Module> { | 63 | ) -> Option<Module> { |
64 | assert!(!module.has_semi()); | 64 | assert!(!module.has_semi()); |
65 | let file_id = file_id.into(); | 65 | let file_id = file_id.into(); |
66 | let ast_id_map = db.ast_id_map(file_id); | 66 | let ast_id_map = db.ast_id_map(file_id); |
67 | let item_id = ast_id_map.ast_id(module).with_file_id(file_id); | 67 | let item_id = ast_id_map.ast_id(&module).with_file_id(file_id); |
68 | module_from_source(db, file_id, Some(item_id)) | 68 | module_from_source(db, file_id, Some(item_id)) |
69 | } | 69 | } |
70 | 70 | ||
@@ -127,16 +127,16 @@ fn try_get_resolver_for_node( | |||
127 | file_id: FileId, | 127 | file_id: FileId, |
128 | node: &SyntaxNode, | 128 | node: &SyntaxNode, |
129 | ) -> Option<Resolver> { | 129 | ) -> Option<Resolver> { |
130 | if let Some(module) = ast::Module::cast(node) { | 130 | if let Some(module) = ast::Module::cast(node.clone()) { |
131 | Some(module_from_declaration(db, file_id, module)?.resolver(db)) | 131 | Some(module_from_declaration(db, file_id, module)?.resolver(db)) |
132 | } else if let Some(_) = ast::SourceFile::cast(node) { | 132 | } else if let Some(_) = ast::SourceFile::cast(node.clone()) { |
133 | Some(module_from_source(db, file_id.into(), None)?.resolver(db)) | 133 | Some(module_from_source(db, file_id.into(), None)?.resolver(db)) |
134 | } else if let Some(s) = ast::StructDef::cast(node) { | 134 | } else if let Some(s) = ast::StructDef::cast(node.clone()) { |
135 | let module = module_from_child_node(db, file_id, s.syntax())?; | 135 | let module = module_from_child_node(db, file_id, s.syntax())?; |
136 | Some(struct_from_module(db, module, s).resolver(db)) | 136 | Some(struct_from_module(db, module, &s).resolver(db)) |
137 | } else if let Some(e) = ast::EnumDef::cast(node) { | 137 | } else if let Some(e) = ast::EnumDef::cast(node.clone()) { |
138 | let module = module_from_child_node(db, file_id, e.syntax())?; | 138 | let module = module_from_child_node(db, file_id, e.syntax())?; |
139 | Some(enum_from_module(db, module, e).resolver(db)) | 139 | Some(enum_from_module(db, module, &e).resolver(db)) |
140 | } else if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF { | 140 | } else if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF { |
141 | Some(def_with_body_from_child_node(db, file_id, node)?.resolver(db)) | 141 | Some(def_with_body_from_child_node(db, file_id, node)?.resolver(db)) |
142 | } else { | 142 | } else { |
@@ -153,14 +153,14 @@ fn def_with_body_from_child_node( | |||
153 | let module = module_from_child_node(db, file_id, node)?; | 153 | let module = module_from_child_node(db, file_id, node)?; |
154 | let ctx = LocationCtx::new(db, module, file_id.into()); | 154 | let ctx = LocationCtx::new(db, module, file_id.into()); |
155 | node.ancestors().find_map(|node| { | 155 | node.ancestors().find_map(|node| { |
156 | if let Some(def) = ast::FnDef::cast(node) { | 156 | if let Some(def) = ast::FnDef::cast(node.clone()) { |
157 | return Some(Function { id: ctx.to_def(def) }.into()); | 157 | return Some(Function { id: ctx.to_def(&def) }.into()); |
158 | } | 158 | } |
159 | if let Some(def) = ast::ConstDef::cast(node) { | 159 | if let Some(def) = ast::ConstDef::cast(node.clone()) { |
160 | return Some(Const { id: ctx.to_def(def) }.into()); | 160 | return Some(Const { id: ctx.to_def(&def) }.into()); |
161 | } | 161 | } |
162 | if let Some(def) = ast::StaticDef::cast(node) { | 162 | if let Some(def) = ast::StaticDef::cast(node.clone()) { |
163 | return Some(Static { id: ctx.to_def(def) }.into()); | 163 | return Some(Static { id: ctx.to_def(&def) }.into()); |
164 | } | 164 | } |
165 | None | 165 | None |
166 | }) | 166 | }) |
@@ -237,7 +237,7 @@ impl SourceAnalyzer { | |||
237 | SourceAnalyzer { | 237 | SourceAnalyzer { |
238 | resolver: node | 238 | resolver: node |
239 | .ancestors() | 239 | .ancestors() |
240 | .find_map(|node| try_get_resolver_for_node(db, file_id, node)) | 240 | .find_map(|node| try_get_resolver_for_node(db, file_id, &node)) |
241 | .unwrap_or_default(), | 241 | .unwrap_or_default(), |
242 | body_source_map: None, | 242 | body_source_map: None, |
243 | infer: None, | 243 | infer: None, |
@@ -257,17 +257,17 @@ impl SourceAnalyzer { | |||
257 | } | 257 | } |
258 | 258 | ||
259 | pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> { | 259 | pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> { |
260 | let expr_id = self.body_source_map.as_ref()?.node_expr(call.into())?; | 260 | let expr_id = self.body_source_map.as_ref()?.node_expr(&call.clone().into())?; |
261 | self.infer.as_ref()?.method_resolution(expr_id) | 261 | self.infer.as_ref()?.method_resolution(expr_id) |
262 | } | 262 | } |
263 | 263 | ||
264 | pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<crate::StructField> { | 264 | pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<crate::StructField> { |
265 | let expr_id = self.body_source_map.as_ref()?.node_expr(field.into())?; | 265 | let expr_id = self.body_source_map.as_ref()?.node_expr(&field.clone().into())?; |
266 | self.infer.as_ref()?.field_resolution(expr_id) | 266 | self.infer.as_ref()?.field_resolution(expr_id) |
267 | } | 267 | } |
268 | 268 | ||
269 | pub fn resolve_variant(&self, struct_lit: &ast::StructLit) -> Option<crate::VariantDef> { | 269 | pub fn resolve_variant(&self, struct_lit: &ast::StructLit) -> Option<crate::VariantDef> { |
270 | let expr_id = self.body_source_map.as_ref()?.node_expr(struct_lit.into())?; | 270 | let expr_id = self.body_source_map.as_ref()?.node_expr(&struct_lit.clone().into())?; |
271 | self.infer.as_ref()?.variant_resolution(expr_id) | 271 | self.infer.as_ref()?.variant_resolution(expr_id) |
272 | } | 272 | } |
273 | 273 | ||
@@ -290,18 +290,18 @@ impl SourceAnalyzer { | |||
290 | 290 | ||
291 | pub fn resolve_path(&self, db: &impl HirDatabase, path: &ast::Path) -> Option<PathResolution> { | 291 | pub fn resolve_path(&self, db: &impl HirDatabase, path: &ast::Path) -> Option<PathResolution> { |
292 | if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) { | 292 | if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) { |
293 | let expr_id = self.body_source_map.as_ref()?.node_expr(path_expr.into())?; | 293 | let expr_id = self.body_source_map.as_ref()?.node_expr(&path_expr.into())?; |
294 | if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) { | 294 | if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) { |
295 | return Some(PathResolution::AssocItem(assoc)); | 295 | return Some(PathResolution::AssocItem(assoc)); |
296 | } | 296 | } |
297 | } | 297 | } |
298 | if let Some(path_pat) = path.syntax().parent().and_then(ast::PathPat::cast) { | 298 | if let Some(path_pat) = path.syntax().parent().and_then(ast::PathPat::cast) { |
299 | let pat_id = self.body_source_map.as_ref()?.node_pat(path_pat.into())?; | 299 | let pat_id = self.body_source_map.as_ref()?.node_pat(&path_pat.into())?; |
300 | if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) { | 300 | if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) { |
301 | return Some(PathResolution::AssocItem(assoc)); | 301 | return Some(PathResolution::AssocItem(assoc)); |
302 | } | 302 | } |
303 | } | 303 | } |
304 | let hir_path = crate::Path::from_ast(path)?; | 304 | let hir_path = crate::Path::from_ast(path.clone())?; |
305 | let res = self.resolver.resolve_path_without_assoc_items(db, &hir_path); | 305 | let res = self.resolver.resolve_path_without_assoc_items(db, &hir_path); |
306 | let res = res.clone().take_types().or_else(|| res.take_values())?; | 306 | let res = res.clone().take_types().or_else(|| res.take_values())?; |
307 | let res = match res { | 307 | let res = match res { |
@@ -343,12 +343,12 @@ impl SourceAnalyzer { | |||
343 | // FIXME: at least, this should work with any DefWithBody, but ideally | 343 | // FIXME: at least, this should work with any DefWithBody, but ideally |
344 | // this should be hir-based altogether | 344 | // this should be hir-based altogether |
345 | let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap(); | 345 | let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap(); |
346 | let ptr = Either::A(AstPtr::new(pat.into())); | 346 | let ptr = Either::A(AstPtr::new(&ast::Pat::from(pat.clone()))); |
347 | fn_def | 347 | fn_def |
348 | .syntax() | 348 | .syntax() |
349 | .descendants() | 349 | .descendants() |
350 | .filter_map(ast::NameRef::cast) | 350 | .filter_map(ast::NameRef::cast) |
351 | .filter(|name_ref| match self.resolve_local_name(*name_ref) { | 351 | .filter(|name_ref| match self.resolve_local_name(&name_ref) { |
352 | None => false, | 352 | None => false, |
353 | Some(entry) => entry.ptr() == ptr, | 353 | Some(entry) => entry.ptr() == ptr, |
354 | }) | 354 | }) |
@@ -411,7 +411,7 @@ fn scope_for( | |||
411 | node: &SyntaxNode, | 411 | node: &SyntaxNode, |
412 | ) -> Option<ScopeId> { | 412 | ) -> Option<ScopeId> { |
413 | node.ancestors() | 413 | node.ancestors() |
414 | .map(SyntaxNodePtr::new) | 414 | .map(|it| SyntaxNodePtr::new(&it)) |
415 | .filter_map(|ptr| source_map.syntax_expr(ptr)) | 415 | .filter_map(|ptr| source_map.syntax_expr(ptr)) |
416 | .find_map(|it| scopes.scope_for(it)) | 416 | .find_map(|it| scopes.scope_for(it)) |
417 | } | 417 | } |
diff --git a/crates/ra_hir/src/source_id.rs b/crates/ra_hir/src/source_id.rs index 6cdb90141..51cd65dda 100644 --- a/crates/ra_hir/src/source_id.rs +++ b/crates/ra_hir/src/source_id.rs | |||
@@ -5,7 +5,7 @@ use std::{ | |||
5 | }; | 5 | }; |
6 | 6 | ||
7 | use ra_arena::{impl_arena_id, Arena, RawId}; | 7 | use ra_arena::{impl_arena_id, Arena, RawId}; |
8 | use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxNodePtr, TreeArc}; | 8 | use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxNodePtr}; |
9 | 9 | ||
10 | use crate::{AstDatabase, HirFileId}; | 10 | use crate::{AstDatabase, HirFileId}; |
11 | 11 | ||
@@ -42,9 +42,9 @@ impl<N: AstNode> AstId<N> { | |||
42 | self.file_id | 42 | self.file_id |
43 | } | 43 | } |
44 | 44 | ||
45 | pub(crate) fn to_node(&self, db: &impl AstDatabase) -> TreeArc<N> { | 45 | pub(crate) fn to_node(&self, db: &impl AstDatabase) -> N { |
46 | let syntax_node = db.ast_id_to_node(self.file_id, self.file_ast_id.raw); | 46 | let syntax_node = db.ast_id_to_node(self.file_id, self.file_ast_id.raw); |
47 | N::cast(&syntax_node).unwrap().to_owned() | 47 | N::cast(syntax_node).unwrap() |
48 | } | 48 | } |
49 | } | 49 | } |
50 | 50 | ||
@@ -93,7 +93,7 @@ pub struct AstIdMap { | |||
93 | impl AstIdMap { | 93 | impl AstIdMap { |
94 | pub(crate) fn ast_id_map_query(db: &impl AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { | 94 | pub(crate) fn ast_id_map_query(db: &impl AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { |
95 | let map = if let Some(node) = db.parse_or_expand(file_id) { | 95 | let map = if let Some(node) = db.parse_or_expand(file_id) { |
96 | AstIdMap::from_source(&*node) | 96 | AstIdMap::from_source(&node) |
97 | } else { | 97 | } else { |
98 | AstIdMap::default() | 98 | AstIdMap::default() |
99 | }; | 99 | }; |
@@ -104,9 +104,9 @@ impl AstIdMap { | |||
104 | db: &impl AstDatabase, | 104 | db: &impl AstDatabase, |
105 | file_id: HirFileId, | 105 | file_id: HirFileId, |
106 | ast_id: ErasedFileAstId, | 106 | ast_id: ErasedFileAstId, |
107 | ) -> TreeArc<SyntaxNode> { | 107 | ) -> SyntaxNode { |
108 | let node = db.parse_or_expand(file_id).unwrap(); | 108 | let node = db.parse_or_expand(file_id).unwrap(); |
109 | db.ast_id_map(file_id).arena[ast_id].to_node(&*node).to_owned() | 109 | db.ast_id_map(file_id).arena[ast_id].to_node(&node) |
110 | } | 110 | } |
111 | 111 | ||
112 | pub(crate) fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> { | 112 | pub(crate) fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> { |
@@ -131,7 +131,7 @@ impl AstIdMap { | |||
131 | // change parent's id. This means that, say, adding a new function to a | 131 | // change parent's id. This means that, say, adding a new function to a |
132 | // trait does not change ids of top-level items, which helps caching. | 132 | // trait does not change ids of top-level items, which helps caching. |
133 | bfs(node, |it| { | 133 | bfs(node, |it| { |
134 | if let Some(module_item) = ast::ModuleItem::cast(it) { | 134 | if let Some(module_item) = ast::ModuleItem::cast(it.clone()) { |
135 | res.alloc(module_item.syntax()); | 135 | res.alloc(module_item.syntax()); |
136 | } else if let Some(macro_call) = ast::MacroCall::cast(it) { | 136 | } else if let Some(macro_call) = ast::MacroCall::cast(it) { |
137 | res.alloc(macro_call.syntax()); | 137 | res.alloc(macro_call.syntax()); |
@@ -146,8 +146,8 @@ impl AstIdMap { | |||
146 | } | 146 | } |
147 | 147 | ||
148 | /// Walks the subtree in bfs order, calling `f` for each node. | 148 | /// Walks the subtree in bfs order, calling `f` for each node. |
149 | fn bfs(node: &SyntaxNode, mut f: impl FnMut(&SyntaxNode)) { | 149 | fn bfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode)) { |
150 | let mut curr_layer = vec![node]; | 150 | let mut curr_layer = vec![node.clone()]; |
151 | let mut next_layer = vec![]; | 151 | let mut next_layer = vec![]; |
152 | while !curr_layer.is_empty() { | 152 | while !curr_layer.is_empty() { |
153 | curr_layer.drain(..).for_each(|node| { | 153 | curr_layer.drain(..).for_each(|node| { |
diff --git a/crates/ra_hir/src/traits.rs b/crates/ra_hir/src/traits.rs index fc0368303..de26f1a68 100644 --- a/crates/ra_hir/src/traits.rs +++ b/crates/ra_hir/src/traits.rs | |||
@@ -31,9 +31,9 @@ impl TraitData { | |||
31 | item_list | 31 | item_list |
32 | .impl_items() | 32 | .impl_items() |
33 | .map(|item_node| match item_node.kind() { | 33 | .map(|item_node| match item_node.kind() { |
34 | ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(it) }.into(), | 34 | ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(&it) }.into(), |
35 | ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(it) }.into(), | 35 | ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(&it) }.into(), |
36 | ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(it) }.into(), | 36 | ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(&it) }.into(), |
37 | }) | 37 | }) |
38 | .collect() | 38 | .collect() |
39 | } else { | 39 | } else { |
diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs index 2410602a6..265740e54 100644 --- a/crates/ra_hir/src/ty/tests.rs +++ b/crates/ra_hir/src/ty/tests.rs | |||
@@ -3086,7 +3086,7 @@ fn type_at_pos(db: &MockDatabase, pos: FilePosition) -> String { | |||
3086 | let file = db.parse(pos.file_id).ok().unwrap(); | 3086 | let file = db.parse(pos.file_id).ok().unwrap(); |
3087 | let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap(); | 3087 | let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap(); |
3088 | let analyzer = SourceAnalyzer::new(db, pos.file_id, expr.syntax(), Some(pos.offset)); | 3088 | let analyzer = SourceAnalyzer::new(db, pos.file_id, expr.syntax(), Some(pos.offset)); |
3089 | let ty = analyzer.type_of(db, expr).unwrap(); | 3089 | let ty = analyzer.type_of(db, &expr).unwrap(); |
3090 | ty.display(db).to_string() | 3090 | ty.display(db).to_string() |
3091 | } | 3091 | } |
3092 | 3092 | ||
@@ -3126,7 +3126,7 @@ fn infer(content: &str) -> String { | |||
3126 | types.sort_by_key(|(ptr, _)| (ptr.range().start(), ptr.range().end())); | 3126 | types.sort_by_key(|(ptr, _)| (ptr.range().start(), ptr.range().end())); |
3127 | for (syntax_ptr, ty) in &types { | 3127 | for (syntax_ptr, ty) in &types { |
3128 | let node = syntax_ptr.to_node(source_file.syntax()); | 3128 | let node = syntax_ptr.to_node(source_file.syntax()); |
3129 | let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node) { | 3129 | let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.clone()) { |
3130 | (self_param.self_kw_token().range(), "self".to_string()) | 3130 | (self_param.self_kw_token().range(), "self".to_string()) |
3131 | } else { | 3131 | } else { |
3132 | (syntax_ptr.range(), node.text().to_string().replace("\n", " ")) | 3132 | (syntax_ptr.range(), node.text().to_string().replace("\n", " ")) |
@@ -3137,7 +3137,7 @@ fn infer(content: &str) -> String { | |||
3137 | 3137 | ||
3138 | for node in source_file.syntax().descendants() { | 3138 | for node in source_file.syntax().descendants() { |
3139 | if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF { | 3139 | if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF { |
3140 | let analyzer = SourceAnalyzer::new(&db, file_id, node, None); | 3140 | let analyzer = SourceAnalyzer::new(&db, file_id, &node, None); |
3141 | infer_def(analyzer.inference_result(), analyzer.body_source_map()); | 3141 | infer_def(analyzer.inference_result(), analyzer.body_source_map()); |
3142 | } | 3142 | } |
3143 | } | 3143 | } |
@@ -3179,7 +3179,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() { | |||
3179 | let node = | 3179 | let node = |
3180 | algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent(); | 3180 | algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent(); |
3181 | let events = db.log_executed(|| { | 3181 | let events = db.log_executed(|| { |
3182 | SourceAnalyzer::new(&db, pos.file_id, node, None); | 3182 | SourceAnalyzer::new(&db, pos.file_id, &node, None); |
3183 | }); | 3183 | }); |
3184 | assert!(format!("{:?}", events).contains("infer")) | 3184 | assert!(format!("{:?}", events).contains("infer")) |
3185 | } | 3185 | } |
@@ -3200,7 +3200,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() { | |||
3200 | let node = | 3200 | let node = |
3201 | algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent(); | 3201 | algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent(); |
3202 | let events = db.log_executed(|| { | 3202 | let events = db.log_executed(|| { |
3203 | SourceAnalyzer::new(&db, pos.file_id, node, None); | 3203 | SourceAnalyzer::new(&db, pos.file_id, &node, None); |
3204 | }); | 3204 | }); |
3205 | assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events) | 3205 | assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events) |
3206 | } | 3206 | } |
diff --git a/crates/ra_hir/src/type_ref.rs b/crates/ra_hir/src/type_ref.rs index 8aa807648..8536ae44a 100644 --- a/crates/ra_hir/src/type_ref.rs +++ b/crates/ra_hir/src/type_ref.rs | |||
@@ -56,7 +56,7 @@ pub enum TypeRef { | |||
56 | 56 | ||
57 | impl TypeRef { | 57 | impl TypeRef { |
58 | /// Converts an `ast::TypeRef` to a `hir::TypeRef`. | 58 | /// Converts an `ast::TypeRef` to a `hir::TypeRef`. |
59 | pub(crate) fn from_ast(node: &ast::TypeRef) -> Self { | 59 | pub(crate) fn from_ast(node: ast::TypeRef) -> Self { |
60 | use ra_syntax::ast::TypeRefKind::*; | 60 | use ra_syntax::ast::TypeRefKind::*; |
61 | match node.kind() { | 61 | match node.kind() { |
62 | ParenType(inner) => TypeRef::from_ast_opt(inner.type_ref()), | 62 | ParenType(inner) => TypeRef::from_ast_opt(inner.type_ref()), |
@@ -95,7 +95,7 @@ impl TypeRef { | |||
95 | } | 95 | } |
96 | } | 96 | } |
97 | 97 | ||
98 | pub(crate) fn from_ast_opt(node: Option<&ast::TypeRef>) -> Self { | 98 | pub(crate) fn from_ast_opt(node: Option<ast::TypeRef>) -> Self { |
99 | if let Some(node) = node { | 99 | if let Some(node) = node { |
100 | TypeRef::from_ast(node) | 100 | TypeRef::from_ast(node) |
101 | } else { | 101 | } else { |
diff --git a/crates/ra_ide_api/src/call_info.rs b/crates/ra_ide_api/src/call_info.rs index 11dea7c14..270499612 100644 --- a/crates/ra_ide_api/src/call_info.rs +++ b/crates/ra_ide_api/src/call_info.rs | |||
@@ -11,24 +11,24 @@ use crate::{db::RootDatabase, CallInfo, FilePosition, FunctionSignature}; | |||
11 | /// Computes parameter information for the given call expression. | 11 | /// Computes parameter information for the given call expression. |
12 | pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> { | 12 | pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> { |
13 | let parse = db.parse(position.file_id); | 13 | let parse = db.parse(position.file_id); |
14 | let syntax = parse.tree().syntax(); | 14 | let syntax = parse.tree().syntax().clone(); |
15 | 15 | ||
16 | // Find the calling expression and it's NameRef | 16 | // Find the calling expression and it's NameRef |
17 | let calling_node = FnCallNode::with_node(syntax, position.offset)?; | 17 | let calling_node = FnCallNode::with_node(&syntax, position.offset)?; |
18 | let name_ref = calling_node.name_ref()?; | 18 | let name_ref = calling_node.name_ref()?; |
19 | 19 | ||
20 | let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None); | 20 | let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None); |
21 | let function = match calling_node { | 21 | let function = match &calling_node { |
22 | FnCallNode::CallExpr(expr) => { | 22 | FnCallNode::CallExpr(expr) => { |
23 | //FIXME: apply subst | 23 | //FIXME: apply subst |
24 | let (callable_def, _subst) = analyzer.type_of(db, expr.expr()?)?.as_callable()?; | 24 | let (callable_def, _subst) = analyzer.type_of(db, &expr.expr()?)?.as_callable()?; |
25 | match callable_def { | 25 | match callable_def { |
26 | hir::CallableDef::Function(it) => it, | 26 | hir::CallableDef::Function(it) => it, |
27 | //FIXME: handle other callables | 27 | //FIXME: handle other callables |
28 | _ => return None, | 28 | _ => return None, |
29 | } | 29 | } |
30 | } | 30 | } |
31 | FnCallNode::MethodCallExpr(expr) => analyzer.resolve_method_call(expr)?, | 31 | FnCallNode::MethodCallExpr(expr) => analyzer.resolve_method_call(&expr)?, |
32 | }; | 32 | }; |
33 | 33 | ||
34 | let mut call_info = CallInfo::new(db, function); | 34 | let mut call_info = CallInfo::new(db, function); |
@@ -73,13 +73,13 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal | |||
73 | Some(call_info) | 73 | Some(call_info) |
74 | } | 74 | } |
75 | 75 | ||
76 | enum FnCallNode<'a> { | 76 | enum FnCallNode { |
77 | CallExpr(&'a ast::CallExpr), | 77 | CallExpr(ast::CallExpr), |
78 | MethodCallExpr(&'a ast::MethodCallExpr), | 78 | MethodCallExpr(ast::MethodCallExpr), |
79 | } | 79 | } |
80 | 80 | ||
81 | impl<'a> FnCallNode<'a> { | 81 | impl FnCallNode { |
82 | fn with_node(syntax: &'a SyntaxNode, offset: TextUnit) -> Option<FnCallNode<'a>> { | 82 | fn with_node(syntax: &SyntaxNode, offset: TextUnit) -> Option<FnCallNode> { |
83 | if let Some(expr) = find_node_at_offset::<ast::CallExpr>(syntax, offset) { | 83 | if let Some(expr) = find_node_at_offset::<ast::CallExpr>(syntax, offset) { |
84 | return Some(FnCallNode::CallExpr(expr)); | 84 | return Some(FnCallNode::CallExpr(expr)); |
85 | } | 85 | } |
@@ -89,8 +89,8 @@ impl<'a> FnCallNode<'a> { | |||
89 | None | 89 | None |
90 | } | 90 | } |
91 | 91 | ||
92 | fn name_ref(&self) -> Option<&'a ast::NameRef> { | 92 | fn name_ref(&self) -> Option<ast::NameRef> { |
93 | match *self { | 93 | match self { |
94 | FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()?.kind() { | 94 | FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()?.kind() { |
95 | ast::ExprKind::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?, | 95 | ast::ExprKind::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?, |
96 | _ => return None, | 96 | _ => return None, |
@@ -102,8 +102,8 @@ impl<'a> FnCallNode<'a> { | |||
102 | } | 102 | } |
103 | } | 103 | } |
104 | 104 | ||
105 | fn arg_list(&self) -> Option<&'a ast::ArgList> { | 105 | fn arg_list(&self) -> Option<ast::ArgList> { |
106 | match *self { | 106 | match self { |
107 | FnCallNode::CallExpr(expr) => expr.arg_list(), | 107 | FnCallNode::CallExpr(expr) => expr.arg_list(), |
108 | FnCallNode::MethodCallExpr(expr) => expr.arg_list(), | 108 | FnCallNode::MethodCallExpr(expr) => expr.arg_list(), |
109 | } | 109 | } |
diff --git a/crates/ra_ide_api/src/completion/complete_dot.rs b/crates/ra_ide_api/src/completion/complete_dot.rs index a5f071442..536ba36df 100644 --- a/crates/ra_ide_api/src/completion/complete_dot.rs +++ b/crates/ra_ide_api/src/completion/complete_dot.rs | |||
@@ -5,10 +5,11 @@ use rustc_hash::FxHashSet; | |||
5 | 5 | ||
6 | /// Complete dot accesses, i.e. fields or methods (currently only fields). | 6 | /// Complete dot accesses, i.e. fields or methods (currently only fields). |
7 | pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) { | 7 | pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) { |
8 | let receiver_ty = match ctx.dot_receiver.and_then(|it| ctx.analyzer.type_of(ctx.db, it)) { | 8 | let receiver_ty = |
9 | Some(it) => it, | 9 | match ctx.dot_receiver.as_ref().and_then(|it| ctx.analyzer.type_of(ctx.db, it)) { |
10 | None => return, | 10 | Some(it) => it, |
11 | }; | 11 | None => return, |
12 | }; | ||
12 | if !ctx.is_call { | 13 | if !ctx.is_call { |
13 | complete_fields(acc, ctx, receiver_ty.clone()); | 14 | complete_fields(acc, ctx, receiver_ty.clone()); |
14 | } | 15 | } |
diff --git a/crates/ra_ide_api/src/completion/complete_fn_param.rs b/crates/ra_ide_api/src/completion/complete_fn_param.rs index 5a117c485..0887ef1f6 100644 --- a/crates/ra_ide_api/src/completion/complete_fn_param.rs +++ b/crates/ra_ide_api/src/completion/complete_fn_param.rs | |||
@@ -20,7 +20,7 @@ pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext) | |||
20 | let _ = visitor_ctx(&mut params) | 20 | let _ = visitor_ctx(&mut params) |
21 | .visit::<ast::SourceFile, _>(process) | 21 | .visit::<ast::SourceFile, _>(process) |
22 | .visit::<ast::ItemList, _>(process) | 22 | .visit::<ast::ItemList, _>(process) |
23 | .accept(node); | 23 | .accept(&node); |
24 | } | 24 | } |
25 | params | 25 | params |
26 | .into_iter() | 26 | .into_iter() |
@@ -38,10 +38,7 @@ pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext) | |||
38 | .add_to(acc) | 38 | .add_to(acc) |
39 | }); | 39 | }); |
40 | 40 | ||
41 | fn process<'a, N: ast::FnDefOwner>( | 41 | fn process<N: ast::FnDefOwner>(node: N, params: &mut FxHashMap<String, (u32, ast::Param)>) { |
42 | node: &'a N, | ||
43 | params: &mut FxHashMap<String, (u32, &'a ast::Param)>, | ||
44 | ) { | ||
45 | node.functions().filter_map(|it| it.param_list()).flat_map(|it| it.params()).for_each( | 42 | node.functions().filter_map(|it| it.param_list()).flat_map(|it| it.params()).for_each( |
46 | |param| { | 43 | |param| { |
47 | let text = param.syntax().text().to_string(); | 44 | let text = param.syntax().text().to_string(); |
diff --git a/crates/ra_ide_api/src/completion/complete_keyword.rs b/crates/ra_ide_api/src/completion/complete_keyword.rs index 034ed934d..4cf34eff8 100644 --- a/crates/ra_ide_api/src/completion/complete_keyword.rs +++ b/crates/ra_ide_api/src/completion/complete_keyword.rs | |||
@@ -52,7 +52,7 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte | |||
52 | return; | 52 | return; |
53 | } | 53 | } |
54 | 54 | ||
55 | let fn_def = match ctx.function_syntax { | 55 | let fn_def = match &ctx.function_syntax { |
56 | Some(it) => it, | 56 | Some(it) => it, |
57 | None => return, | 57 | None => return, |
58 | }; | 58 | }; |
@@ -65,7 +65,7 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte | |||
65 | acc.add(keyword(ctx, "else", "else {$0}")); | 65 | acc.add(keyword(ctx, "else", "else {$0}")); |
66 | acc.add(keyword(ctx, "else if", "else if $0 {}")); | 66 | acc.add(keyword(ctx, "else if", "else if $0 {}")); |
67 | } | 67 | } |
68 | if is_in_loop_body(ctx.token) { | 68 | if is_in_loop_body(&ctx.token) { |
69 | if ctx.can_be_stmt { | 69 | if ctx.can_be_stmt { |
70 | acc.add(keyword(ctx, "continue", "continue;")); | 70 | acc.add(keyword(ctx, "continue", "continue;")); |
71 | acc.add(keyword(ctx, "break", "break;")); | 71 | acc.add(keyword(ctx, "break", "break;")); |
@@ -74,19 +74,19 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte | |||
74 | acc.add(keyword(ctx, "break", "break")); | 74 | acc.add(keyword(ctx, "break", "break")); |
75 | } | 75 | } |
76 | } | 76 | } |
77 | acc.add_all(complete_return(ctx, fn_def, ctx.can_be_stmt)); | 77 | acc.add_all(complete_return(ctx, &fn_def, ctx.can_be_stmt)); |
78 | } | 78 | } |
79 | 79 | ||
80 | fn is_in_loop_body(leaf: SyntaxToken) -> bool { | 80 | fn is_in_loop_body(leaf: &SyntaxToken) -> bool { |
81 | for node in leaf.parent().ancestors() { | 81 | for node in leaf.parent().ancestors() { |
82 | if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR { | 82 | if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR { |
83 | break; | 83 | break; |
84 | } | 84 | } |
85 | let loop_body = visitor() | 85 | let loop_body = visitor() |
86 | .visit::<ast::ForExpr, _>(LoopBodyOwner::loop_body) | 86 | .visit::<ast::ForExpr, _>(|it| it.loop_body()) |
87 | .visit::<ast::WhileExpr, _>(LoopBodyOwner::loop_body) | 87 | .visit::<ast::WhileExpr, _>(|it| it.loop_body()) |
88 | .visit::<ast::LoopExpr, _>(LoopBodyOwner::loop_body) | 88 | .visit::<ast::LoopExpr, _>(|it| it.loop_body()) |
89 | .accept(node); | 89 | .accept(&node); |
90 | if let Some(Some(body)) = loop_body { | 90 | if let Some(Some(body)) = loop_body { |
91 | if leaf.range().is_subrange(&body.syntax().range()) { | 91 | if leaf.range().is_subrange(&body.syntax().range()) { |
92 | return true; | 92 | return true; |
diff --git a/crates/ra_ide_api/src/completion/complete_postfix.rs b/crates/ra_ide_api/src/completion/complete_postfix.rs index 4f5062214..c75b1c159 100644 --- a/crates/ra_ide_api/src/completion/complete_postfix.rs +++ b/crates/ra_ide_api/src/completion/complete_postfix.rs | |||
@@ -11,7 +11,8 @@ use ra_text_edit::TextEditBuilder; | |||
11 | 11 | ||
12 | fn postfix_snippet(ctx: &CompletionContext, label: &str, detail: &str, snippet: &str) -> Builder { | 12 | fn postfix_snippet(ctx: &CompletionContext, label: &str, detail: &str, snippet: &str) -> Builder { |
13 | let edit = { | 13 | let edit = { |
14 | let receiver_range = ctx.dot_receiver.expect("no receiver available").syntax().range(); | 14 | let receiver_range = |
15 | ctx.dot_receiver.as_ref().expect("no receiver available").syntax().range(); | ||
15 | let delete_range = TextRange::from_to(receiver_range.start(), ctx.source_range().end()); | 16 | let delete_range = TextRange::from_to(receiver_range.start(), ctx.source_range().end()); |
16 | let mut builder = TextEditBuilder::default(); | 17 | let mut builder = TextEditBuilder::default(); |
17 | builder.replace(delete_range, snippet.to_string()); | 18 | builder.replace(delete_range, snippet.to_string()); |
@@ -38,9 +39,9 @@ fn is_bool_or_unknown(ty: Option<Ty>) -> bool { | |||
38 | } | 39 | } |
39 | 40 | ||
40 | pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { | 41 | pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { |
41 | if let Some(dot_receiver) = ctx.dot_receiver { | 42 | if let Some(dot_receiver) = &ctx.dot_receiver { |
42 | let receiver_text = dot_receiver.syntax().text().to_string(); | 43 | let receiver_text = dot_receiver.syntax().text().to_string(); |
43 | let receiver_ty = ctx.analyzer.type_of(ctx.db, dot_receiver); | 44 | let receiver_ty = ctx.analyzer.type_of(ctx.db, &dot_receiver); |
44 | if is_bool_or_unknown(receiver_ty) { | 45 | if is_bool_or_unknown(receiver_ty) { |
45 | postfix_snippet(ctx, "if", "if expr {}", &format!("if {} {{$0}}", receiver_text)) | 46 | postfix_snippet(ctx, "if", "if expr {}", &format!("if {} {{$0}}", receiver_text)) |
46 | .add_to(acc); | 47 | .add_to(acc); |
diff --git a/crates/ra_ide_api/src/completion/complete_scope.rs b/crates/ra_ide_api/src/completion/complete_scope.rs index 1ba871257..f92034055 100644 --- a/crates/ra_ide_api/src/completion/complete_scope.rs +++ b/crates/ra_ide_api/src/completion/complete_scope.rs | |||
@@ -20,8 +20,8 @@ pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) { | |||
20 | let mut builder = TextEditBuilder::default(); | 20 | let mut builder = TextEditBuilder::default(); |
21 | builder.replace(ctx.source_range(), name.to_string()); | 21 | builder.replace(ctx.source_range(), name.to_string()); |
22 | auto_import::auto_import_text_edit( | 22 | auto_import::auto_import_text_edit( |
23 | ctx.token.parent(), | 23 | &ctx.token.parent(), |
24 | ctx.token.parent(), | 24 | &ctx.token.parent(), |
25 | &path, | 25 | &path, |
26 | &mut builder, | 26 | &mut builder, |
27 | ); | 27 | ); |
diff --git a/crates/ra_ide_api/src/completion/complete_struct_literal.rs b/crates/ra_ide_api/src/completion/complete_struct_literal.rs index b6216f857..9410f740f 100644 --- a/crates/ra_ide_api/src/completion/complete_struct_literal.rs +++ b/crates/ra_ide_api/src/completion/complete_struct_literal.rs | |||
@@ -4,8 +4,8 @@ use crate::completion::{CompletionContext, Completions}; | |||
4 | 4 | ||
5 | /// Complete fields in fields literals. | 5 | /// Complete fields in fields literals. |
6 | pub(super) fn complete_struct_literal(acc: &mut Completions, ctx: &CompletionContext) { | 6 | pub(super) fn complete_struct_literal(acc: &mut Completions, ctx: &CompletionContext) { |
7 | let (ty, variant) = match ctx.struct_lit_syntax.and_then(|it| { | 7 | let (ty, variant) = match ctx.struct_lit_syntax.as_ref().and_then(|it| { |
8 | Some((ctx.analyzer.type_of(ctx.db, it.into())?, ctx.analyzer.resolve_variant(it)?)) | 8 | Some((ctx.analyzer.type_of(ctx.db, &it.clone().into())?, ctx.analyzer.resolve_variant(it)?)) |
9 | }) { | 9 | }) { |
10 | Some(it) => it, | 10 | Some(it) => it, |
11 | _ => return, | 11 | _ => return, |
diff --git a/crates/ra_ide_api/src/completion/completion_context.rs b/crates/ra_ide_api/src/completion/completion_context.rs index 4aa84751f..b803271ab 100644 --- a/crates/ra_ide_api/src/completion/completion_context.rs +++ b/crates/ra_ide_api/src/completion/completion_context.rs | |||
@@ -16,11 +16,11 @@ pub(crate) struct CompletionContext<'a> { | |||
16 | pub(super) db: &'a db::RootDatabase, | 16 | pub(super) db: &'a db::RootDatabase, |
17 | pub(super) analyzer: hir::SourceAnalyzer, | 17 | pub(super) analyzer: hir::SourceAnalyzer, |
18 | pub(super) offset: TextUnit, | 18 | pub(super) offset: TextUnit, |
19 | pub(super) token: SyntaxToken<'a>, | 19 | pub(super) token: SyntaxToken, |
20 | pub(super) module: Option<hir::Module>, | 20 | pub(super) module: Option<hir::Module>, |
21 | pub(super) function_syntax: Option<&'a ast::FnDef>, | 21 | pub(super) function_syntax: Option<ast::FnDef>, |
22 | pub(super) use_item_syntax: Option<&'a ast::UseItem>, | 22 | pub(super) use_item_syntax: Option<ast::UseItem>, |
23 | pub(super) struct_lit_syntax: Option<&'a ast::StructLit>, | 23 | pub(super) struct_lit_syntax: Option<ast::StructLit>, |
24 | pub(super) is_param: bool, | 24 | pub(super) is_param: bool, |
25 | /// If a name-binding or reference to a const in a pattern. | 25 | /// If a name-binding or reference to a const in a pattern. |
26 | /// Irrefutable patterns (like let) are excluded. | 26 | /// Irrefutable patterns (like let) are excluded. |
@@ -35,7 +35,7 @@ pub(crate) struct CompletionContext<'a> { | |||
35 | /// Something is typed at the "top" level, in module or impl/trait. | 35 | /// Something is typed at the "top" level, in module or impl/trait. |
36 | pub(super) is_new_item: bool, | 36 | pub(super) is_new_item: bool, |
37 | /// The receiver if this is a field or method access, i.e. writing something.<|> | 37 | /// The receiver if this is a field or method access, i.e. writing something.<|> |
38 | pub(super) dot_receiver: Option<&'a ast::Expr>, | 38 | pub(super) dot_receiver: Option<ast::Expr>, |
39 | /// If this is a call (method or function) in particular, i.e. the () are already there. | 39 | /// If this is a call (method or function) in particular, i.e. the () are already there. |
40 | pub(super) is_call: bool, | 40 | pub(super) is_call: bool, |
41 | } | 41 | } |
@@ -50,7 +50,7 @@ impl<'a> CompletionContext<'a> { | |||
50 | let token = | 50 | let token = |
51 | find_token_at_offset(original_parse.tree().syntax(), position.offset).left_biased()?; | 51 | find_token_at_offset(original_parse.tree().syntax(), position.offset).left_biased()?; |
52 | let analyzer = | 52 | let analyzer = |
53 | hir::SourceAnalyzer::new(db, position.file_id, token.parent(), Some(position.offset)); | 53 | hir::SourceAnalyzer::new(db, position.file_id, &token.parent(), Some(position.offset)); |
54 | let mut ctx = CompletionContext { | 54 | let mut ctx = CompletionContext { |
55 | db, | 55 | db, |
56 | analyzer, | 56 | analyzer, |
@@ -109,7 +109,7 @@ impl<'a> CompletionContext<'a> { | |||
109 | if is_node::<ast::BindPat>(name.syntax()) { | 109 | if is_node::<ast::BindPat>(name.syntax()) { |
110 | let bind_pat = name.syntax().ancestors().find_map(ast::BindPat::cast).unwrap(); | 110 | let bind_pat = name.syntax().ancestors().find_map(ast::BindPat::cast).unwrap(); |
111 | let parent = bind_pat.syntax().parent(); | 111 | let parent = bind_pat.syntax().parent(); |
112 | if parent.and_then(ast::MatchArm::cast).is_some() | 112 | if parent.clone().and_then(ast::MatchArm::cast).is_some() |
113 | || parent.and_then(ast::Condition::cast).is_some() | 113 | || parent.and_then(ast::Condition::cast).is_some() |
114 | { | 114 | { |
115 | self.is_pat_binding = true; | 115 | self.is_pat_binding = true; |
@@ -122,7 +122,7 @@ impl<'a> CompletionContext<'a> { | |||
122 | } | 122 | } |
123 | } | 123 | } |
124 | 124 | ||
125 | fn classify_name_ref(&mut self, original_file: &'a SourceFile, name_ref: &ast::NameRef) { | 125 | fn classify_name_ref(&mut self, original_file: SourceFile, name_ref: ast::NameRef) { |
126 | let name_range = name_ref.syntax().range(); | 126 | let name_range = name_ref.syntax().range(); |
127 | if name_ref.syntax().parent().and_then(ast::NamedField::cast).is_some() { | 127 | if name_ref.syntax().parent().and_then(ast::NamedField::cast).is_some() { |
128 | self.struct_lit_syntax = find_node_at_offset(original_file.syntax(), self.offset); | 128 | self.struct_lit_syntax = find_node_at_offset(original_file.syntax(), self.offset); |
@@ -153,7 +153,7 @@ impl<'a> CompletionContext<'a> { | |||
153 | None => return, | 153 | None => return, |
154 | }; | 154 | }; |
155 | 155 | ||
156 | if let Some(segment) = ast::PathSegment::cast(parent) { | 156 | if let Some(segment) = ast::PathSegment::cast(parent.clone()) { |
157 | let path = segment.parent_path(); | 157 | let path = segment.parent_path(); |
158 | self.is_call = path | 158 | self.is_call = path |
159 | .syntax() | 159 | .syntax() |
@@ -162,7 +162,7 @@ impl<'a> CompletionContext<'a> { | |||
162 | .and_then(|it| it.syntax().parent().and_then(ast::CallExpr::cast)) | 162 | .and_then(|it| it.syntax().parent().and_then(ast::CallExpr::cast)) |
163 | .is_some(); | 163 | .is_some(); |
164 | 164 | ||
165 | if let Some(mut path) = hir::Path::from_ast(path) { | 165 | if let Some(mut path) = hir::Path::from_ast(path.clone()) { |
166 | if !path.is_ident() { | 166 | if !path.is_ident() { |
167 | path.segments.pop().unwrap(); | 167 | path.segments.pop().unwrap(); |
168 | self.path_prefix = Some(path); | 168 | self.path_prefix = Some(path); |
@@ -179,7 +179,7 @@ impl<'a> CompletionContext<'a> { | |||
179 | .syntax() | 179 | .syntax() |
180 | .ancestors() | 180 | .ancestors() |
181 | .find_map(|node| { | 181 | .find_map(|node| { |
182 | if let Some(stmt) = ast::ExprStmt::cast(node) { | 182 | if let Some(stmt) = ast::ExprStmt::cast(node.clone()) { |
183 | return Some(stmt.syntax().range() == name_ref.syntax().range()); | 183 | return Some(stmt.syntax().range() == name_ref.syntax().range()); |
184 | } | 184 | } |
185 | if let Some(block) = ast::Block::cast(node) { | 185 | if let Some(block) = ast::Block::cast(node) { |
@@ -203,7 +203,7 @@ impl<'a> CompletionContext<'a> { | |||
203 | } | 203 | } |
204 | } | 204 | } |
205 | } | 205 | } |
206 | if let Some(field_expr) = ast::FieldExpr::cast(parent) { | 206 | if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) { |
207 | // The receiver comes before the point of insertion of the fake | 207 | // The receiver comes before the point of insertion of the fake |
208 | // ident, so it should have the same range in the non-modified file | 208 | // ident, so it should have the same range in the non-modified file |
209 | self.dot_receiver = field_expr | 209 | self.dot_receiver = field_expr |
@@ -222,7 +222,7 @@ impl<'a> CompletionContext<'a> { | |||
222 | } | 222 | } |
223 | } | 223 | } |
224 | 224 | ||
225 | fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<&N> { | 225 | fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<N> { |
226 | find_covering_element(syntax, range).ancestors().find_map(N::cast) | 226 | find_covering_element(syntax, range).ancestors().find_map(N::cast) |
227 | } | 227 | } |
228 | 228 | ||
diff --git a/crates/ra_ide_api/src/diagnostics.rs b/crates/ra_ide_api/src/diagnostics.rs index 3f5b9e0a0..affbad6cd 100644 --- a/crates/ra_ide_api/src/diagnostics.rs +++ b/crates/ra_ide_api/src/diagnostics.rs | |||
@@ -35,8 +35,8 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic> | |||
35 | })); | 35 | })); |
36 | 36 | ||
37 | for node in parse.tree().syntax().descendants() { | 37 | for node in parse.tree().syntax().descendants() { |
38 | check_unnecessary_braces_in_use_statement(&mut res, file_id, node); | 38 | check_unnecessary_braces_in_use_statement(&mut res, file_id, &node); |
39 | check_struct_shorthand_initialization(&mut res, file_id, node); | 39 | check_struct_shorthand_initialization(&mut res, file_id, &node); |
40 | } | 40 | } |
41 | let res = RefCell::new(res); | 41 | let res = RefCell::new(res); |
42 | let mut sink = DiagnosticSink::new(|d| { | 42 | let mut sink = DiagnosticSink::new(|d| { |
@@ -60,7 +60,7 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic> | |||
60 | }) | 60 | }) |
61 | .on::<hir::diagnostics::MissingFields, _>(|d| { | 61 | .on::<hir::diagnostics::MissingFields, _>(|d| { |
62 | let node = d.ast(db); | 62 | let node = d.ast(db); |
63 | let mut ast_editor = AstEditor::new(&*node); | 63 | let mut ast_editor = AstEditor::new(node); |
64 | for f in d.missed_fields.iter() { | 64 | for f in d.missed_fields.iter() { |
65 | ast_editor.append_field(&AstBuilder::<NamedField>::from_name(f)); | 65 | ast_editor.append_field(&AstBuilder::<NamedField>::from_name(f)); |
66 | } | 66 | } |
@@ -94,11 +94,11 @@ fn check_unnecessary_braces_in_use_statement( | |||
94 | file_id: FileId, | 94 | file_id: FileId, |
95 | node: &SyntaxNode, | 95 | node: &SyntaxNode, |
96 | ) -> Option<()> { | 96 | ) -> Option<()> { |
97 | let use_tree_list = ast::UseTreeList::cast(node)?; | 97 | let use_tree_list = ast::UseTreeList::cast(node.clone())?; |
98 | if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() { | 98 | if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() { |
99 | let range = use_tree_list.syntax().range(); | 99 | let range = use_tree_list.syntax().range(); |
100 | let edit = | 100 | let edit = |
101 | text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(single_use_tree) | 101 | text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(&single_use_tree) |
102 | .unwrap_or_else(|| { | 102 | .unwrap_or_else(|| { |
103 | let to_replace = single_use_tree.syntax().text().to_string(); | 103 | let to_replace = single_use_tree.syntax().text().to_string(); |
104 | let mut edit_builder = TextEditBuilder::default(); | 104 | let mut edit_builder = TextEditBuilder::default(); |
@@ -141,7 +141,7 @@ fn check_struct_shorthand_initialization( | |||
141 | file_id: FileId, | 141 | file_id: FileId, |
142 | node: &SyntaxNode, | 142 | node: &SyntaxNode, |
143 | ) -> Option<()> { | 143 | ) -> Option<()> { |
144 | let struct_lit = ast::StructLit::cast(node)?; | 144 | let struct_lit = ast::StructLit::cast(node.clone())?; |
145 | let named_field_list = struct_lit.named_field_list()?; | 145 | let named_field_list = struct_lit.named_field_list()?; |
146 | for named_field in named_field_list.fields() { | 146 | for named_field in named_field_list.fields() { |
147 | if let (Some(name_ref), Some(expr)) = (named_field.name_ref(), named_field.expr()) { | 147 | if let (Some(name_ref), Some(expr)) = (named_field.name_ref(), named_field.expr()) { |
@@ -184,7 +184,7 @@ mod tests { | |||
184 | let parse = SourceFile::parse(code); | 184 | let parse = SourceFile::parse(code); |
185 | let mut diagnostics = Vec::new(); | 185 | let mut diagnostics = Vec::new(); |
186 | for node in parse.tree().syntax().descendants() { | 186 | for node in parse.tree().syntax().descendants() { |
187 | func(&mut diagnostics, FileId(0), node); | 187 | func(&mut diagnostics, FileId(0), &node); |
188 | } | 188 | } |
189 | assert!(diagnostics.is_empty()); | 189 | assert!(diagnostics.is_empty()); |
190 | } | 190 | } |
@@ -193,7 +193,7 @@ mod tests { | |||
193 | let parse = SourceFile::parse(before); | 193 | let parse = SourceFile::parse(before); |
194 | let mut diagnostics = Vec::new(); | 194 | let mut diagnostics = Vec::new(); |
195 | for node in parse.tree().syntax().descendants() { | 195 | for node in parse.tree().syntax().descendants() { |
196 | func(&mut diagnostics, FileId(0), node); | 196 | func(&mut diagnostics, FileId(0), &node); |
197 | } | 197 | } |
198 | let diagnostic = | 198 | let diagnostic = |
199 | diagnostics.pop().unwrap_or_else(|| panic!("no diagnostics for:\n{}\n", before)); | 199 | diagnostics.pop().unwrap_or_else(|| panic!("no diagnostics for:\n{}\n", before)); |
diff --git a/crates/ra_ide_api/src/display/function_signature.rs b/crates/ra_ide_api/src/display/function_signature.rs index e7ad5a0d1..644a4532b 100644 --- a/crates/ra_ide_api/src/display/function_signature.rs +++ b/crates/ra_ide_api/src/display/function_signature.rs | |||
@@ -38,7 +38,7 @@ impl FunctionSignature { | |||
38 | pub(crate) fn from_hir(db: &db::RootDatabase, function: hir::Function) -> Self { | 38 | pub(crate) fn from_hir(db: &db::RootDatabase, function: hir::Function) -> Self { |
39 | let doc = function.docs(db); | 39 | let doc = function.docs(db); |
40 | let ast_node = function.source(db).ast; | 40 | let ast_node = function.source(db).ast; |
41 | FunctionSignature::from(&*ast_node).with_doc_opt(doc) | 41 | FunctionSignature::from(&ast_node).with_doc_opt(doc) |
42 | } | 42 | } |
43 | } | 43 | } |
44 | 44 | ||
diff --git a/crates/ra_ide_api/src/display/navigation_target.rs b/crates/ra_ide_api/src/display/navigation_target.rs index 20a8d418e..8cc853dd1 100644 --- a/crates/ra_ide_api/src/display/navigation_target.rs +++ b/crates/ra_ide_api/src/display/navigation_target.rs | |||
@@ -5,7 +5,7 @@ use ra_syntax::{ | |||
5 | ast::{self, DocCommentsOwner}, | 5 | ast::{self, DocCommentsOwner}, |
6 | AstNode, AstPtr, SmolStr, | 6 | AstNode, AstPtr, SmolStr, |
7 | SyntaxKind::{self, NAME}, | 7 | SyntaxKind::{self, NAME}, |
8 | SyntaxNode, TextRange, TreeArc, | 8 | SyntaxNode, TextRange, |
9 | }; | 9 | }; |
10 | 10 | ||
11 | use super::short_label::ShortLabel; | 11 | use super::short_label::ShortLabel; |
@@ -169,7 +169,7 @@ impl NavigationTarget { | |||
169 | let file_id = src.file_id.original_file(db); | 169 | let file_id = src.file_id.original_file(db); |
170 | match src.ast { | 170 | match src.ast { |
171 | FieldSource::Named(it) => { | 171 | FieldSource::Named(it) => { |
172 | NavigationTarget::from_named(file_id, &*it, it.doc_comment_text(), it.short_label()) | 172 | NavigationTarget::from_named(file_id, &it, it.doc_comment_text(), it.short_label()) |
173 | } | 173 | } |
174 | FieldSource::Pos(it) => { | 174 | FieldSource::Pos(it) => { |
175 | NavigationTarget::from_syntax(file_id, "".into(), None, it.syntax(), None, None) | 175 | NavigationTarget::from_syntax(file_id, "".into(), None, it.syntax(), None, None) |
@@ -179,13 +179,13 @@ impl NavigationTarget { | |||
179 | 179 | ||
180 | pub(crate) fn from_def_source<A, D>(db: &RootDatabase, def: D) -> NavigationTarget | 180 | pub(crate) fn from_def_source<A, D>(db: &RootDatabase, def: D) -> NavigationTarget |
181 | where | 181 | where |
182 | D: HasSource<Ast = TreeArc<A>>, | 182 | D: HasSource<Ast = A>, |
183 | A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel, | 183 | A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel, |
184 | { | 184 | { |
185 | let src = def.source(db); | 185 | let src = def.source(db); |
186 | NavigationTarget::from_named( | 186 | NavigationTarget::from_named( |
187 | src.file_id.original_file(db), | 187 | src.file_id.original_file(db), |
188 | &*src.ast, | 188 | &src.ast, |
189 | src.ast.doc_comment_text(), | 189 | src.ast.doc_comment_text(), |
190 | src.ast.short_label(), | 190 | src.ast.short_label(), |
191 | ) | 191 | ) |
@@ -249,7 +249,7 @@ impl NavigationTarget { | |||
249 | log::debug!("nav target {}", src.ast.syntax().debug_dump()); | 249 | log::debug!("nav target {}", src.ast.syntax().debug_dump()); |
250 | NavigationTarget::from_named( | 250 | NavigationTarget::from_named( |
251 | src.file_id.original_file(db), | 251 | src.file_id.original_file(db), |
252 | &*src.ast, | 252 | &src.ast, |
253 | src.ast.doc_comment_text(), | 253 | src.ast.doc_comment_text(), |
254 | None, | 254 | None, |
255 | ) | 255 | ) |
@@ -318,22 +318,18 @@ pub(crate) fn docs_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option | |||
318 | let parse = db.parse(symbol.file_id); | 318 | let parse = db.parse(symbol.file_id); |
319 | let node = symbol.ptr.to_node(parse.tree().syntax()).to_owned(); | 319 | let node = symbol.ptr.to_node(parse.tree().syntax()).to_owned(); |
320 | 320 | ||
321 | fn doc_comments<N: ast::DocCommentsOwner>(node: &N) -> Option<String> { | ||
322 | node.doc_comment_text() | ||
323 | } | ||
324 | |||
325 | visitor() | 321 | visitor() |
326 | .visit(doc_comments::<ast::FnDef>) | 322 | .visit(|it: ast::FnDef| it.doc_comment_text()) |
327 | .visit(doc_comments::<ast::StructDef>) | 323 | .visit(|it: ast::StructDef| it.doc_comment_text()) |
328 | .visit(doc_comments::<ast::EnumDef>) | 324 | .visit(|it: ast::EnumDef| it.doc_comment_text()) |
329 | .visit(doc_comments::<ast::TraitDef>) | 325 | .visit(|it: ast::TraitDef| it.doc_comment_text()) |
330 | .visit(doc_comments::<ast::Module>) | 326 | .visit(|it: ast::Module| it.doc_comment_text()) |
331 | .visit(doc_comments::<ast::TypeAliasDef>) | 327 | .visit(|it: ast::TypeAliasDef| it.doc_comment_text()) |
332 | .visit(doc_comments::<ast::ConstDef>) | 328 | .visit(|it: ast::ConstDef| it.doc_comment_text()) |
333 | .visit(doc_comments::<ast::StaticDef>) | 329 | .visit(|it: ast::StaticDef| it.doc_comment_text()) |
334 | .visit(doc_comments::<ast::NamedFieldDef>) | 330 | .visit(|it: ast::NamedFieldDef| it.doc_comment_text()) |
335 | .visit(doc_comments::<ast::EnumVariant>) | 331 | .visit(|it: ast::EnumVariant| it.doc_comment_text()) |
336 | .visit(doc_comments::<ast::MacroCall>) | 332 | .visit(|it: ast::MacroCall| it.doc_comment_text()) |
337 | .accept(&node)? | 333 | .accept(&node)? |
338 | } | 334 | } |
339 | 335 | ||
@@ -345,15 +341,15 @@ pub(crate) fn description_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> | |||
345 | let node = symbol.ptr.to_node(parse.tree().syntax()).to_owned(); | 341 | let node = symbol.ptr.to_node(parse.tree().syntax()).to_owned(); |
346 | 342 | ||
347 | visitor() | 343 | visitor() |
348 | .visit(|node: &ast::FnDef| node.short_label()) | 344 | .visit(|node: ast::FnDef| node.short_label()) |
349 | .visit(|node: &ast::StructDef| node.short_label()) | 345 | .visit(|node: ast::StructDef| node.short_label()) |
350 | .visit(|node: &ast::EnumDef| node.short_label()) | 346 | .visit(|node: ast::EnumDef| node.short_label()) |
351 | .visit(|node: &ast::TraitDef| node.short_label()) | 347 | .visit(|node: ast::TraitDef| node.short_label()) |
352 | .visit(|node: &ast::Module| node.short_label()) | 348 | .visit(|node: ast::Module| node.short_label()) |
353 | .visit(|node: &ast::TypeAliasDef| node.short_label()) | 349 | .visit(|node: ast::TypeAliasDef| node.short_label()) |
354 | .visit(|node: &ast::ConstDef| node.short_label()) | 350 | .visit(|node: ast::ConstDef| node.short_label()) |
355 | .visit(|node: &ast::StaticDef| node.short_label()) | 351 | .visit(|node: ast::StaticDef| node.short_label()) |
356 | .visit(|node: &ast::NamedFieldDef| node.short_label()) | 352 | .visit(|node: ast::NamedFieldDef| node.short_label()) |
357 | .visit(|node: &ast::EnumVariant| node.short_label()) | 353 | .visit(|node: ast::EnumVariant| node.short_label()) |
358 | .accept(&node)? | 354 | .accept(&node)? |
359 | } | 355 | } |
diff --git a/crates/ra_ide_api/src/display/structure.rs b/crates/ra_ide_api/src/display/structure.rs index 638484a9b..2e183d2f6 100644 --- a/crates/ra_ide_api/src/display/structure.rs +++ b/crates/ra_ide_api/src/display/structure.rs | |||
@@ -24,14 +24,14 @@ pub fn file_structure(file: &SourceFile) -> Vec<StructureNode> { | |||
24 | for event in file.syntax().preorder() { | 24 | for event in file.syntax().preorder() { |
25 | match event { | 25 | match event { |
26 | WalkEvent::Enter(node) => { | 26 | WalkEvent::Enter(node) => { |
27 | if let Some(mut symbol) = structure_node(node) { | 27 | if let Some(mut symbol) = structure_node(&node) { |
28 | symbol.parent = stack.last().copied(); | 28 | symbol.parent = stack.last().copied(); |
29 | stack.push(res.len()); | 29 | stack.push(res.len()); |
30 | res.push(symbol); | 30 | res.push(symbol); |
31 | } | 31 | } |
32 | } | 32 | } |
33 | WalkEvent::Leave(node) => { | 33 | WalkEvent::Leave(node) => { |
34 | if structure_node(node).is_some() { | 34 | if structure_node(&node).is_some() { |
35 | stack.pop().unwrap(); | 35 | stack.pop().unwrap(); |
36 | } | 36 | } |
37 | } | 37 | } |
@@ -41,19 +41,20 @@ pub fn file_structure(file: &SourceFile) -> Vec<StructureNode> { | |||
41 | } | 41 | } |
42 | 42 | ||
43 | fn structure_node(node: &SyntaxNode) -> Option<StructureNode> { | 43 | fn structure_node(node: &SyntaxNode) -> Option<StructureNode> { |
44 | fn decl<N: NameOwner + AttrsOwner>(node: &N) -> Option<StructureNode> { | 44 | fn decl<N: NameOwner + AttrsOwner>(node: N) -> Option<StructureNode> { |
45 | decl_with_detail(node, None) | 45 | decl_with_detail(node, None) |
46 | } | 46 | } |
47 | 47 | ||
48 | fn decl_with_ascription<N: NameOwner + AttrsOwner + TypeAscriptionOwner>( | 48 | fn decl_with_ascription<N: NameOwner + AttrsOwner + TypeAscriptionOwner>( |
49 | node: &N, | 49 | node: N, |
50 | ) -> Option<StructureNode> { | 50 | ) -> Option<StructureNode> { |
51 | decl_with_type_ref(node, node.ascribed_type()) | 51 | let ty = node.ascribed_type(); |
52 | decl_with_type_ref(node, ty) | ||
52 | } | 53 | } |
53 | 54 | ||
54 | fn decl_with_type_ref<N: NameOwner + AttrsOwner>( | 55 | fn decl_with_type_ref<N: NameOwner + AttrsOwner>( |
55 | node: &N, | 56 | node: N, |
56 | type_ref: Option<&ast::TypeRef>, | 57 | type_ref: Option<ast::TypeRef>, |
57 | ) -> Option<StructureNode> { | 58 | ) -> Option<StructureNode> { |
58 | let detail = type_ref.map(|type_ref| { | 59 | let detail = type_ref.map(|type_ref| { |
59 | let mut detail = String::new(); | 60 | let mut detail = String::new(); |
@@ -64,7 +65,7 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> { | |||
64 | } | 65 | } |
65 | 66 | ||
66 | fn decl_with_detail<N: NameOwner + AttrsOwner>( | 67 | fn decl_with_detail<N: NameOwner + AttrsOwner>( |
67 | node: &N, | 68 | node: N, |
68 | detail: Option<String>, | 69 | detail: Option<String>, |
69 | ) -> Option<StructureNode> { | 70 | ) -> Option<StructureNode> { |
70 | let name = node.name()?; | 71 | let name = node.name()?; |
@@ -82,22 +83,24 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> { | |||
82 | 83 | ||
83 | fn collapse_ws(node: &SyntaxNode, output: &mut String) { | 84 | fn collapse_ws(node: &SyntaxNode, output: &mut String) { |
84 | let mut can_insert_ws = false; | 85 | let mut can_insert_ws = false; |
85 | for line in node.text().chunks().flat_map(|chunk| chunk.lines()) { | 86 | for chunk in node.text().chunks() { |
86 | let line = line.trim(); | 87 | for line in chunk.lines() { |
87 | if line.is_empty() { | 88 | let line = line.trim(); |
88 | if can_insert_ws { | 89 | if line.is_empty() { |
89 | output.push_str(" "); | 90 | if can_insert_ws { |
90 | can_insert_ws = false; | 91 | output.push_str(" "); |
92 | can_insert_ws = false; | ||
93 | } | ||
94 | } else { | ||
95 | output.push_str(line); | ||
96 | can_insert_ws = true; | ||
91 | } | 97 | } |
92 | } else { | ||
93 | output.push_str(line); | ||
94 | can_insert_ws = true; | ||
95 | } | 98 | } |
96 | } | 99 | } |
97 | } | 100 | } |
98 | 101 | ||
99 | visitor() | 102 | visitor() |
100 | .visit(|fn_def: &ast::FnDef| { | 103 | .visit(|fn_def: ast::FnDef| { |
101 | let mut detail = String::from("fn"); | 104 | let mut detail = String::from("fn"); |
102 | if let Some(type_param_list) = fn_def.type_param_list() { | 105 | if let Some(type_param_list) = fn_def.type_param_list() { |
103 | collapse_ws(type_param_list.syntax(), &mut detail); | 106 | collapse_ws(type_param_list.syntax(), &mut detail); |
@@ -117,11 +120,14 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> { | |||
117 | .visit(decl::<ast::EnumVariant>) | 120 | .visit(decl::<ast::EnumVariant>) |
118 | .visit(decl::<ast::TraitDef>) | 121 | .visit(decl::<ast::TraitDef>) |
119 | .visit(decl::<ast::Module>) | 122 | .visit(decl::<ast::Module>) |
120 | .visit(|td: &ast::TypeAliasDef| decl_with_type_ref(td, td.type_ref())) | 123 | .visit(|td: ast::TypeAliasDef| { |
124 | let ty = td.type_ref(); | ||
125 | decl_with_type_ref(td, ty) | ||
126 | }) | ||
121 | .visit(decl_with_ascription::<ast::NamedFieldDef>) | 127 | .visit(decl_with_ascription::<ast::NamedFieldDef>) |
122 | .visit(decl_with_ascription::<ast::ConstDef>) | 128 | .visit(decl_with_ascription::<ast::ConstDef>) |
123 | .visit(decl_with_ascription::<ast::StaticDef>) | 129 | .visit(decl_with_ascription::<ast::StaticDef>) |
124 | .visit(|im: &ast::ImplBlock| { | 130 | .visit(|im: ast::ImplBlock| { |
125 | let target_type = im.target_type()?; | 131 | let target_type = im.target_type()?; |
126 | let target_trait = im.target_trait(); | 132 | let target_trait = im.target_trait(); |
127 | let label = match target_trait { | 133 | let label = match target_trait { |
@@ -142,14 +148,14 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> { | |||
142 | }; | 148 | }; |
143 | Some(node) | 149 | Some(node) |
144 | }) | 150 | }) |
145 | .visit(|mc: &ast::MacroCall| { | 151 | .visit(|mc: ast::MacroCall| { |
146 | let first_token = mc.syntax().first_token().unwrap(); | 152 | let first_token = mc.syntax().first_token().unwrap(); |
147 | if first_token.text().as_str() != "macro_rules" { | 153 | if first_token.text().as_str() != "macro_rules" { |
148 | return None; | 154 | return None; |
149 | } | 155 | } |
150 | decl(mc) | 156 | decl(mc) |
151 | }) | 157 | }) |
152 | .accept(node)? | 158 | .accept(&node)? |
153 | } | 159 | } |
154 | 160 | ||
155 | #[cfg(test)] | 161 | #[cfg(test)] |
diff --git a/crates/ra_ide_api/src/extend_selection.rs b/crates/ra_ide_api/src/extend_selection.rs index 491b15702..8c49960f5 100644 --- a/crates/ra_ide_api/src/extend_selection.rs +++ b/crates/ra_ide_api/src/extend_selection.rs | |||
@@ -42,7 +42,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange | |||
42 | TokenAtOffset::None => return None, | 42 | TokenAtOffset::None => return None, |
43 | TokenAtOffset::Single(l) => { | 43 | TokenAtOffset::Single(l) => { |
44 | if string_kinds.contains(&l.kind()) { | 44 | if string_kinds.contains(&l.kind()) { |
45 | extend_single_word_in_comment_or_string(l, offset).unwrap_or_else(|| l.range()) | 45 | extend_single_word_in_comment_or_string(&l, offset).unwrap_or_else(|| l.range()) |
46 | } else { | 46 | } else { |
47 | l.range() | 47 | l.range() |
48 | } | 48 | } |
@@ -56,7 +56,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange | |||
56 | if token.range() != range { | 56 | if token.range() != range { |
57 | return Some(token.range()); | 57 | return Some(token.range()); |
58 | } | 58 | } |
59 | if let Some(comment) = ast::Comment::cast(token) { | 59 | if let Some(comment) = ast::Comment::cast(token.clone()) { |
60 | if let Some(range) = extend_comments(comment) { | 60 | if let Some(range) = extend_comments(comment) { |
61 | return Some(range); | 61 | return Some(range); |
62 | } | 62 | } |
@@ -73,7 +73,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange | |||
73 | let node = node.ancestors().take_while(|n| n.range() == node.range()).last().unwrap(); | 73 | let node = node.ancestors().take_while(|n| n.range() == node.range()).last().unwrap(); |
74 | 74 | ||
75 | if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) { | 75 | if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) { |
76 | if let Some(range) = extend_list_item(node) { | 76 | if let Some(range) = extend_list_item(&node) { |
77 | return Some(range); | 77 | return Some(range); |
78 | } | 78 | } |
79 | } | 79 | } |
@@ -82,7 +82,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange | |||
82 | } | 82 | } |
83 | 83 | ||
84 | fn extend_single_word_in_comment_or_string( | 84 | fn extend_single_word_in_comment_or_string( |
85 | leaf: SyntaxToken, | 85 | leaf: &SyntaxToken, |
86 | offset: TextUnit, | 86 | offset: TextUnit, |
87 | ) -> Option<TextRange> { | 87 | ) -> Option<TextRange> { |
88 | let text: &str = leaf.text(); | 88 | let text: &str = leaf.text(); |
@@ -131,9 +131,9 @@ fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextUnit) -> TextRange | |||
131 | ws.range() | 131 | ws.range() |
132 | } | 132 | } |
133 | 133 | ||
134 | fn pick_best<'a>(l: SyntaxToken<'a>, r: SyntaxToken<'a>) -> SyntaxToken<'a> { | 134 | fn pick_best<'a>(l: SyntaxToken, r: SyntaxToken) -> SyntaxToken { |
135 | return if priority(r) > priority(l) { r } else { l }; | 135 | return if priority(&r) > priority(&l) { r } else { l }; |
136 | fn priority(n: SyntaxToken) -> usize { | 136 | fn priority(n: &SyntaxToken) -> usize { |
137 | match n.kind() { | 137 | match n.kind() { |
138 | WHITESPACE => 0, | 138 | WHITESPACE => 0, |
139 | IDENT | T![self] | T![super] | T![crate] | LIFETIME => 2, | 139 | IDENT | T![self] | T![super] | T![crate] | LIFETIME => 2, |
@@ -156,7 +156,7 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> { | |||
156 | SyntaxElement::Token(it) => is_single_line_ws(it), | 156 | SyntaxElement::Token(it) => is_single_line_ws(it), |
157 | }) | 157 | }) |
158 | .next() | 158 | .next() |
159 | .and_then(|it| it.as_token()) | 159 | .and_then(|it| it.as_token().cloned()) |
160 | .filter(|node| node.kind() == T![,]) | 160 | .filter(|node| node.kind() == T![,]) |
161 | } | 161 | } |
162 | 162 | ||
@@ -167,7 +167,7 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> { | |||
167 | // Include any following whitespace when comma if after list item. | 167 | // Include any following whitespace when comma if after list item. |
168 | let final_node = comma_node | 168 | let final_node = comma_node |
169 | .next_sibling_or_token() | 169 | .next_sibling_or_token() |
170 | .and_then(|it| it.as_token()) | 170 | .and_then(|it| it.as_token().cloned()) |
171 | .filter(|node| is_single_line_ws(node)) | 171 | .filter(|node| is_single_line_ws(node)) |
172 | .unwrap_or(comma_node); | 172 | .unwrap_or(comma_node); |
173 | 173 | ||
@@ -178,8 +178,8 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> { | |||
178 | } | 178 | } |
179 | 179 | ||
180 | fn extend_comments(comment: ast::Comment) -> Option<TextRange> { | 180 | fn extend_comments(comment: ast::Comment) -> Option<TextRange> { |
181 | let prev = adj_comments(comment, Direction::Prev); | 181 | let prev = adj_comments(&comment, Direction::Prev); |
182 | let next = adj_comments(comment, Direction::Next); | 182 | let next = adj_comments(&comment, Direction::Next); |
183 | if prev != next { | 183 | if prev != next { |
184 | Some(TextRange::from_to(prev.syntax().range().start(), next.syntax().range().end())) | 184 | Some(TextRange::from_to(prev.syntax().range().start(), next.syntax().range().end())) |
185 | } else { | 185 | } else { |
@@ -187,14 +187,14 @@ fn extend_comments(comment: ast::Comment) -> Option<TextRange> { | |||
187 | } | 187 | } |
188 | } | 188 | } |
189 | 189 | ||
190 | fn adj_comments(comment: ast::Comment, dir: Direction) -> ast::Comment { | 190 | fn adj_comments(comment: &ast::Comment, dir: Direction) -> ast::Comment { |
191 | let mut res = comment; | 191 | let mut res = comment.clone(); |
192 | for element in comment.syntax().siblings_with_tokens(dir) { | 192 | for element in comment.syntax().siblings_with_tokens(dir) { |
193 | let token = match element.as_token() { | 193 | let token = match element.as_token() { |
194 | None => break, | 194 | None => break, |
195 | Some(token) => token, | 195 | Some(token) => token, |
196 | }; | 196 | }; |
197 | if let Some(c) = ast::Comment::cast(token) { | 197 | if let Some(c) = ast::Comment::cast(token.clone()) { |
198 | res = c | 198 | res = c |
199 | } else if token.kind() != WHITESPACE || token.text().contains("\n\n") { | 199 | } else if token.kind() != WHITESPACE || token.text().contains("\n\n") { |
200 | break; | 200 | break; |
diff --git a/crates/ra_ide_api/src/folding_ranges.rs b/crates/ra_ide_api/src/folding_ranges.rs index 9d4855a64..c2b981aed 100644 --- a/crates/ra_ide_api/src/folding_ranges.rs +++ b/crates/ra_ide_api/src/folding_ranges.rs | |||
@@ -30,7 +30,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> { | |||
30 | for element in file.syntax().descendants_with_tokens() { | 30 | for element in file.syntax().descendants_with_tokens() { |
31 | // Fold items that span multiple lines | 31 | // Fold items that span multiple lines |
32 | if let Some(kind) = fold_kind(element.kind()) { | 32 | if let Some(kind) = fold_kind(element.kind()) { |
33 | let is_multiline = match element { | 33 | let is_multiline = match &element { |
34 | SyntaxElement::Node(node) => node.text().contains('\n'), | 34 | SyntaxElement::Node(node) => node.text().contains('\n'), |
35 | SyntaxElement::Token(token) => token.text().contains('\n'), | 35 | SyntaxElement::Token(token) => token.text().contains('\n'), |
36 | }; | 36 | }; |
@@ -56,7 +56,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> { | |||
56 | SyntaxElement::Node(node) => { | 56 | SyntaxElement::Node(node) => { |
57 | // Fold groups of imports | 57 | // Fold groups of imports |
58 | if node.kind() == USE_ITEM && !visited_imports.contains(&node) { | 58 | if node.kind() == USE_ITEM && !visited_imports.contains(&node) { |
59 | if let Some(range) = contiguous_range_for_group(node, &mut visited_imports) { | 59 | if let Some(range) = contiguous_range_for_group(&node, &mut visited_imports) { |
60 | res.push(Fold { range, kind: FoldKind::Imports }) | 60 | res.push(Fold { range, kind: FoldKind::Imports }) |
61 | } | 61 | } |
62 | } | 62 | } |
@@ -65,7 +65,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> { | |||
65 | if node.kind() == MODULE && !has_visibility(&node) && !visited_mods.contains(&node) | 65 | if node.kind() == MODULE && !has_visibility(&node) && !visited_mods.contains(&node) |
66 | { | 66 | { |
67 | if let Some(range) = | 67 | if let Some(range) = |
68 | contiguous_range_for_group_unless(node, has_visibility, &mut visited_mods) | 68 | contiguous_range_for_group_unless(&node, has_visibility, &mut visited_mods) |
69 | { | 69 | { |
70 | res.push(Fold { range, kind: FoldKind::Mods }) | 70 | res.push(Fold { range, kind: FoldKind::Mods }) |
71 | } | 71 | } |
@@ -88,24 +88,24 @@ fn fold_kind(kind: SyntaxKind) -> Option<FoldKind> { | |||
88 | } | 88 | } |
89 | 89 | ||
90 | fn has_visibility(node: &SyntaxNode) -> bool { | 90 | fn has_visibility(node: &SyntaxNode) -> bool { |
91 | ast::Module::cast(node).and_then(|m| m.visibility()).is_some() | 91 | ast::Module::cast(node.clone()).and_then(|m| m.visibility()).is_some() |
92 | } | 92 | } |
93 | 93 | ||
94 | fn contiguous_range_for_group<'a>( | 94 | fn contiguous_range_for_group( |
95 | first: &'a SyntaxNode, | 95 | first: &SyntaxNode, |
96 | visited: &mut FxHashSet<&'a SyntaxNode>, | 96 | visited: &mut FxHashSet<SyntaxNode>, |
97 | ) -> Option<TextRange> { | 97 | ) -> Option<TextRange> { |
98 | contiguous_range_for_group_unless(first, |_| false, visited) | 98 | contiguous_range_for_group_unless(first, |_| false, visited) |
99 | } | 99 | } |
100 | 100 | ||
101 | fn contiguous_range_for_group_unless<'a>( | 101 | fn contiguous_range_for_group_unless( |
102 | first: &'a SyntaxNode, | 102 | first: &SyntaxNode, |
103 | unless: impl Fn(&'a SyntaxNode) -> bool, | 103 | unless: impl Fn(&SyntaxNode) -> bool, |
104 | visited: &mut FxHashSet<&'a SyntaxNode>, | 104 | visited: &mut FxHashSet<SyntaxNode>, |
105 | ) -> Option<TextRange> { | 105 | ) -> Option<TextRange> { |
106 | visited.insert(first); | 106 | visited.insert(first.clone()); |
107 | 107 | ||
108 | let mut last = first; | 108 | let mut last = first.clone(); |
109 | for element in first.siblings_with_tokens(Direction::Next) { | 109 | for element in first.siblings_with_tokens(Direction::Next) { |
110 | let node = match element { | 110 | let node = match element { |
111 | SyntaxElement::Token(token) => { | 111 | SyntaxElement::Token(token) => { |
@@ -123,15 +123,15 @@ fn contiguous_range_for_group_unless<'a>( | |||
123 | }; | 123 | }; |
124 | 124 | ||
125 | // Stop if we find a node that doesn't belong to the group | 125 | // Stop if we find a node that doesn't belong to the group |
126 | if node.kind() != first.kind() || unless(node) { | 126 | if node.kind() != first.kind() || unless(&node) { |
127 | break; | 127 | break; |
128 | } | 128 | } |
129 | 129 | ||
130 | visited.insert(node); | 130 | visited.insert(node.clone()); |
131 | last = node; | 131 | last = node; |
132 | } | 132 | } |
133 | 133 | ||
134 | if first != last { | 134 | if first != &last { |
135 | Some(TextRange::from_to(first.range().start(), last.range().end())) | 135 | Some(TextRange::from_to(first.range().start(), last.range().end())) |
136 | } else { | 136 | } else { |
137 | // The group consists of only one element, therefore it cannot be folded | 137 | // The group consists of only one element, therefore it cannot be folded |
@@ -139,11 +139,11 @@ fn contiguous_range_for_group_unless<'a>( | |||
139 | } | 139 | } |
140 | } | 140 | } |
141 | 141 | ||
142 | fn contiguous_range_for_comment<'a>( | 142 | fn contiguous_range_for_comment( |
143 | first: ast::Comment<'a>, | 143 | first: ast::Comment, |
144 | visited: &mut FxHashSet<ast::Comment<'a>>, | 144 | visited: &mut FxHashSet<ast::Comment>, |
145 | ) -> Option<TextRange> { | 145 | ) -> Option<TextRange> { |
146 | visited.insert(first); | 146 | visited.insert(first.clone()); |
147 | 147 | ||
148 | // Only fold comments of the same flavor | 148 | // Only fold comments of the same flavor |
149 | let group_kind = first.kind(); | 149 | let group_kind = first.kind(); |
@@ -151,11 +151,11 @@ fn contiguous_range_for_comment<'a>( | |||
151 | return None; | 151 | return None; |
152 | } | 152 | } |
153 | 153 | ||
154 | let mut last = first; | 154 | let mut last = first.clone(); |
155 | for element in first.syntax().siblings_with_tokens(Direction::Next) { | 155 | for element in first.syntax().siblings_with_tokens(Direction::Next) { |
156 | match element { | 156 | match element { |
157 | SyntaxElement::Token(token) => { | 157 | SyntaxElement::Token(token) => { |
158 | if let Some(ws) = ast::Whitespace::cast(token) { | 158 | if let Some(ws) = ast::Whitespace::cast(token.clone()) { |
159 | if !ws.spans_multiple_lines() { | 159 | if !ws.spans_multiple_lines() { |
160 | // Ignore whitespace without blank lines | 160 | // Ignore whitespace without blank lines |
161 | continue; | 161 | continue; |
@@ -163,7 +163,7 @@ fn contiguous_range_for_comment<'a>( | |||
163 | } | 163 | } |
164 | if let Some(c) = ast::Comment::cast(token) { | 164 | if let Some(c) = ast::Comment::cast(token) { |
165 | if c.kind() == group_kind { | 165 | if c.kind() == group_kind { |
166 | visited.insert(c); | 166 | visited.insert(c.clone()); |
167 | last = c; | 167 | last = c; |
168 | continue; | 168 | continue; |
169 | } | 169 | } |
@@ -193,7 +193,7 @@ mod tests { | |||
193 | fn do_check(text: &str, fold_kinds: &[FoldKind]) { | 193 | fn do_check(text: &str, fold_kinds: &[FoldKind]) { |
194 | let (ranges, text) = extract_ranges(text, "fold"); | 194 | let (ranges, text) = extract_ranges(text, "fold"); |
195 | let parse = SourceFile::parse(&text); | 195 | let parse = SourceFile::parse(&text); |
196 | let folds = folding_ranges(parse.tree()); | 196 | let folds = folding_ranges(&parse.tree()); |
197 | 197 | ||
198 | assert_eq!( | 198 | assert_eq!( |
199 | folds.len(), | 199 | folds.len(), |
diff --git a/crates/ra_ide_api/src/goto_definition.rs b/crates/ra_ide_api/src/goto_definition.rs index 1066bf155..82b5e3b5e 100644 --- a/crates/ra_ide_api/src/goto_definition.rs +++ b/crates/ra_ide_api/src/goto_definition.rs | |||
@@ -20,13 +20,13 @@ pub(crate) fn goto_definition( | |||
20 | position: FilePosition, | 20 | position: FilePosition, |
21 | ) -> Option<RangeInfo<Vec<NavigationTarget>>> { | 21 | ) -> Option<RangeInfo<Vec<NavigationTarget>>> { |
22 | let parse = db.parse(position.file_id); | 22 | let parse = db.parse(position.file_id); |
23 | let syntax = parse.tree().syntax(); | 23 | let syntax = parse.tree().syntax().clone(); |
24 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, position.offset) { | 24 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(&syntax, position.offset) { |
25 | let navs = reference_definition(db, position.file_id, name_ref).to_vec(); | 25 | let navs = reference_definition(db, position.file_id, &name_ref).to_vec(); |
26 | return Some(RangeInfo::new(name_ref.syntax().range(), navs.to_vec())); | 26 | return Some(RangeInfo::new(name_ref.syntax().range(), navs.to_vec())); |
27 | } | 27 | } |
28 | if let Some(name) = find_node_at_offset::<ast::Name>(syntax, position.offset) { | 28 | if let Some(name) = find_node_at_offset::<ast::Name>(&syntax, position.offset) { |
29 | let navs = name_definition(db, position.file_id, name)?; | 29 | let navs = name_definition(db, position.file_id, &name)?; |
30 | return Some(RangeInfo::new(name.syntax().range(), navs)); | 30 | return Some(RangeInfo::new(name.syntax().range(), navs)); |
31 | } | 31 | } |
32 | None | 32 | None |
@@ -94,7 +94,7 @@ pub(crate) fn name_definition( | |||
94 | ) -> Option<Vec<NavigationTarget>> { | 94 | ) -> Option<Vec<NavigationTarget>> { |
95 | let parent = name.syntax().parent()?; | 95 | let parent = name.syntax().parent()?; |
96 | 96 | ||
97 | if let Some(module) = ast::Module::cast(&parent) { | 97 | if let Some(module) = ast::Module::cast(parent.clone()) { |
98 | if module.has_semi() { | 98 | if module.has_semi() { |
99 | if let Some(child_module) = | 99 | if let Some(child_module) = |
100 | hir::source_binder::module_from_declaration(db, file_id, module) | 100 | hir::source_binder::module_from_declaration(db, file_id, module) |
@@ -114,38 +114,88 @@ pub(crate) fn name_definition( | |||
114 | 114 | ||
115 | fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget> { | 115 | fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget> { |
116 | visitor() | 116 | visitor() |
117 | .visit(|node: &ast::StructDef| { | 117 | .visit(|node: ast::StructDef| { |
118 | NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) | 118 | NavigationTarget::from_named( |
119 | file_id, | ||
120 | &node, | ||
121 | node.doc_comment_text(), | ||
122 | node.short_label(), | ||
123 | ) | ||
119 | }) | 124 | }) |
120 | .visit(|node: &ast::EnumDef| { | 125 | .visit(|node: ast::EnumDef| { |
121 | NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) | 126 | NavigationTarget::from_named( |
127 | file_id, | ||
128 | &node, | ||
129 | node.doc_comment_text(), | ||
130 | node.short_label(), | ||
131 | ) | ||
122 | }) | 132 | }) |
123 | .visit(|node: &ast::EnumVariant| { | 133 | .visit(|node: ast::EnumVariant| { |
124 | NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) | 134 | NavigationTarget::from_named( |
135 | file_id, | ||
136 | &node, | ||
137 | node.doc_comment_text(), | ||
138 | node.short_label(), | ||
139 | ) | ||
125 | }) | 140 | }) |
126 | .visit(|node: &ast::FnDef| { | 141 | .visit(|node: ast::FnDef| { |
127 | NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) | 142 | NavigationTarget::from_named( |
143 | file_id, | ||
144 | &node, | ||
145 | node.doc_comment_text(), | ||
146 | node.short_label(), | ||
147 | ) | ||
128 | }) | 148 | }) |
129 | .visit(|node: &ast::TypeAliasDef| { | 149 | .visit(|node: ast::TypeAliasDef| { |
130 | NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) | 150 | NavigationTarget::from_named( |
151 | file_id, | ||
152 | &node, | ||
153 | node.doc_comment_text(), | ||
154 | node.short_label(), | ||
155 | ) | ||
131 | }) | 156 | }) |
132 | .visit(|node: &ast::ConstDef| { | 157 | .visit(|node: ast::ConstDef| { |
133 | NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) | 158 | NavigationTarget::from_named( |
159 | file_id, | ||
160 | &node, | ||
161 | node.doc_comment_text(), | ||
162 | node.short_label(), | ||
163 | ) | ||
134 | }) | 164 | }) |
135 | .visit(|node: &ast::StaticDef| { | 165 | .visit(|node: ast::StaticDef| { |
136 | NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) | 166 | NavigationTarget::from_named( |
167 | file_id, | ||
168 | &node, | ||
169 | node.doc_comment_text(), | ||
170 | node.short_label(), | ||
171 | ) | ||
137 | }) | 172 | }) |
138 | .visit(|node: &ast::TraitDef| { | 173 | .visit(|node: ast::TraitDef| { |
139 | NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) | 174 | NavigationTarget::from_named( |
175 | file_id, | ||
176 | &node, | ||
177 | node.doc_comment_text(), | ||
178 | node.short_label(), | ||
179 | ) | ||
140 | }) | 180 | }) |
141 | .visit(|node: &ast::NamedFieldDef| { | 181 | .visit(|node: ast::NamedFieldDef| { |
142 | NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) | 182 | NavigationTarget::from_named( |
183 | file_id, | ||
184 | &node, | ||
185 | node.doc_comment_text(), | ||
186 | node.short_label(), | ||
187 | ) | ||
143 | }) | 188 | }) |
144 | .visit(|node: &ast::Module| { | 189 | .visit(|node: ast::Module| { |
145 | NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) | 190 | NavigationTarget::from_named( |
191 | file_id, | ||
192 | &node, | ||
193 | node.doc_comment_text(), | ||
194 | node.short_label(), | ||
195 | ) | ||
146 | }) | 196 | }) |
147 | .visit(|node: &ast::MacroCall| { | 197 | .visit(|node: ast::MacroCall| { |
148 | NavigationTarget::from_named(file_id, node, node.doc_comment_text(), None) | 198 | NavigationTarget::from_named(file_id, &node, node.doc_comment_text(), None) |
149 | }) | 199 | }) |
150 | .accept(node) | 200 | .accept(node) |
151 | } | 201 | } |
diff --git a/crates/ra_ide_api/src/goto_type_definition.rs b/crates/ra_ide_api/src/goto_type_definition.rs index 6ce5e214f..fc4b6e1af 100644 --- a/crates/ra_ide_api/src/goto_type_definition.rs +++ b/crates/ra_ide_api/src/goto_type_definition.rs | |||
@@ -13,15 +13,17 @@ pub(crate) fn goto_type_definition( | |||
13 | token | 13 | token |
14 | .parent() | 14 | .parent() |
15 | .ancestors() | 15 | .ancestors() |
16 | .find(|n| ast::Expr::cast(*n).is_some() || ast::Pat::cast(*n).is_some()) | 16 | .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some()) |
17 | })?; | 17 | })?; |
18 | 18 | ||
19 | let analyzer = hir::SourceAnalyzer::new(db, position.file_id, node, None); | 19 | let analyzer = hir::SourceAnalyzer::new(db, position.file_id, &node, None); |
20 | 20 | ||
21 | let ty: hir::Ty = if let Some(ty) = ast::Expr::cast(node).and_then(|e| analyzer.type_of(db, e)) | 21 | let ty: hir::Ty = if let Some(ty) = |
22 | ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e)) | ||
22 | { | 23 | { |
23 | ty | 24 | ty |
24 | } else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, p)) { | 25 | } else if let Some(ty) = ast::Pat::cast(node.clone()).and_then(|p| analyzer.type_of_pat(db, &p)) |
26 | { | ||
25 | ty | 27 | ty |
26 | } else { | 28 | } else { |
27 | return None; | 29 | return None; |
diff --git a/crates/ra_ide_api/src/hover.rs b/crates/ra_ide_api/src/hover.rs index 253d21f48..e503bf6a9 100644 --- a/crates/ra_ide_api/src/hover.rs +++ b/crates/ra_ide_api/src/hover.rs | |||
@@ -6,7 +6,7 @@ use ra_syntax::{ | |||
6 | visit::{visitor, Visitor}, | 6 | visit::{visitor, Visitor}, |
7 | }, | 7 | }, |
8 | ast::{self, DocCommentsOwner}, | 8 | ast::{self, DocCommentsOwner}, |
9 | AstNode, TreeArc, | 9 | AstNode, |
10 | }; | 10 | }; |
11 | 11 | ||
12 | use crate::{ | 12 | use crate::{ |
@@ -104,7 +104,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn | |||
104 | 104 | ||
105 | let mut no_fallback = false; | 105 | let mut no_fallback = false; |
106 | 106 | ||
107 | match classify_name_ref(db, &analyzer, name_ref) { | 107 | match classify_name_ref(db, &analyzer, &name_ref) { |
108 | Some(Method(it)) => res.extend(from_def_source(db, it)), | 108 | Some(Method(it)) => res.extend(from_def_source(db, it)), |
109 | Some(Macro(it)) => { | 109 | Some(Macro(it)) => { |
110 | let src = it.source(db); | 110 | let src = it.source(db); |
@@ -163,7 +163,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn | |||
163 | 163 | ||
164 | if res.is_empty() && !no_fallback { | 164 | if res.is_empty() && !no_fallback { |
165 | // Fallback index based approach: | 165 | // Fallback index based approach: |
166 | let symbols = crate::symbol_index::index_resolve(db, name_ref); | 166 | let symbols = crate::symbol_index::index_resolve(db, &name_ref); |
167 | for sym in symbols { | 167 | for sym in symbols { |
168 | let docs = docs_from_symbol(db, &sym); | 168 | let docs = docs_from_symbol(db, &sym); |
169 | let desc = description_from_symbol(db, &sym); | 169 | let desc = description_from_symbol(db, &sym); |
@@ -177,34 +177,32 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn | |||
177 | } else if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), position.offset) { | 177 | } else if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), position.offset) { |
178 | if let Some(parent) = name.syntax().parent() { | 178 | if let Some(parent) = name.syntax().parent() { |
179 | let text = visitor() | 179 | let text = visitor() |
180 | .visit(|node: &ast::StructDef| { | 180 | .visit(|node: ast::StructDef| { |
181 | hover_text(node.doc_comment_text(), node.short_label()) | 181 | hover_text(node.doc_comment_text(), node.short_label()) |
182 | }) | 182 | }) |
183 | .visit(|node: &ast::EnumDef| { | 183 | .visit(|node: ast::EnumDef| hover_text(node.doc_comment_text(), node.short_label())) |
184 | .visit(|node: ast::EnumVariant| { | ||
184 | hover_text(node.doc_comment_text(), node.short_label()) | 185 | hover_text(node.doc_comment_text(), node.short_label()) |
185 | }) | 186 | }) |
186 | .visit(|node: &ast::EnumVariant| { | 187 | .visit(|node: ast::FnDef| hover_text(node.doc_comment_text(), node.short_label())) |
188 | .visit(|node: ast::TypeAliasDef| { | ||
187 | hover_text(node.doc_comment_text(), node.short_label()) | 189 | hover_text(node.doc_comment_text(), node.short_label()) |
188 | }) | 190 | }) |
189 | .visit(|node: &ast::FnDef| hover_text(node.doc_comment_text(), node.short_label())) | 191 | .visit(|node: ast::ConstDef| { |
190 | .visit(|node: &ast::TypeAliasDef| { | ||
191 | hover_text(node.doc_comment_text(), node.short_label()) | 192 | hover_text(node.doc_comment_text(), node.short_label()) |
192 | }) | 193 | }) |
193 | .visit(|node: &ast::ConstDef| { | 194 | .visit(|node: ast::StaticDef| { |
194 | hover_text(node.doc_comment_text(), node.short_label()) | 195 | hover_text(node.doc_comment_text(), node.short_label()) |
195 | }) | 196 | }) |
196 | .visit(|node: &ast::StaticDef| { | 197 | .visit(|node: ast::TraitDef| { |
197 | hover_text(node.doc_comment_text(), node.short_label()) | 198 | hover_text(node.doc_comment_text(), node.short_label()) |
198 | }) | 199 | }) |
199 | .visit(|node: &ast::TraitDef| { | 200 | .visit(|node: ast::NamedFieldDef| { |
200 | hover_text(node.doc_comment_text(), node.short_label()) | 201 | hover_text(node.doc_comment_text(), node.short_label()) |
201 | }) | 202 | }) |
202 | .visit(|node: &ast::NamedFieldDef| { | 203 | .visit(|node: ast::Module| hover_text(node.doc_comment_text(), node.short_label())) |
203 | hover_text(node.doc_comment_text(), node.short_label()) | 204 | .visit(|node: ast::MacroCall| hover_text(node.doc_comment_text(), None)) |
204 | }) | 205 | .accept(&parent); |
205 | .visit(|node: &ast::Module| hover_text(node.doc_comment_text(), node.short_label())) | ||
206 | .visit(|node: &ast::MacroCall| hover_text(node.doc_comment_text(), None)) | ||
207 | .accept(parent); | ||
208 | 206 | ||
209 | if let Some(text) = text { | 207 | if let Some(text) = text { |
210 | res.extend(text); | 208 | res.extend(text); |
@@ -217,8 +215,9 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn | |||
217 | } | 215 | } |
218 | 216 | ||
219 | if range.is_none() { | 217 | if range.is_none() { |
220 | let node = ancestors_at_offset(file.syntax(), position.offset) | 218 | let node = ancestors_at_offset(file.syntax(), position.offset).find(|n| { |
221 | .find(|n| ast::Expr::cast(*n).is_some() || ast::Pat::cast(*n).is_some())?; | 219 | ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some() |
220 | })?; | ||
222 | let frange = FileRange { file_id: position.file_id, range: node.range() }; | 221 | let frange = FileRange { file_id: position.file_id, range: node.range() }; |
223 | res.extend(type_of(db, frange).map(rust_code_markup)); | 222 | res.extend(type_of(db, frange).map(rust_code_markup)); |
224 | range = Some(node.range()); | 223 | range = Some(node.range()); |
@@ -233,7 +232,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn | |||
233 | 232 | ||
234 | fn from_def_source<A, D>(db: &RootDatabase, def: D) -> Option<String> | 233 | fn from_def_source<A, D>(db: &RootDatabase, def: D) -> Option<String> |
235 | where | 234 | where |
236 | D: HasSource<Ast = TreeArc<A>>, | 235 | D: HasSource<Ast = A>, |
237 | A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel, | 236 | A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel, |
238 | { | 237 | { |
239 | let src = def.source(db); | 238 | let src = def.source(db); |
@@ -243,17 +242,17 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn | |||
243 | 242 | ||
244 | pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> { | 243 | pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> { |
245 | let parse = db.parse(frange.file_id); | 244 | let parse = db.parse(frange.file_id); |
246 | let syntax = parse.tree().syntax(); | 245 | let leaf_node = find_covering_element(parse.tree().syntax(), frange.range); |
247 | let leaf_node = find_covering_element(syntax, frange.range); | ||
248 | // if we picked identifier, expand to pattern/expression | 246 | // if we picked identifier, expand to pattern/expression |
249 | let node = leaf_node | 247 | let node = leaf_node |
250 | .ancestors() | 248 | .ancestors() |
251 | .take_while(|it| it.range() == leaf_node.range()) | 249 | .take_while(|it| it.range() == leaf_node.range()) |
252 | .find(|&it| ast::Expr::cast(it).is_some() || ast::Pat::cast(it).is_some())?; | 250 | .find(|it| ast::Expr::cast(it.clone()).is_some() || ast::Pat::cast(it.clone()).is_some())?; |
253 | let analyzer = hir::SourceAnalyzer::new(db, frange.file_id, node, None); | 251 | let analyzer = hir::SourceAnalyzer::new(db, frange.file_id, &node, None); |
254 | let ty = if let Some(ty) = ast::Expr::cast(node).and_then(|e| analyzer.type_of(db, e)) { | 252 | let ty = if let Some(ty) = ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e)) |
253 | { | ||
255 | ty | 254 | ty |
256 | } else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, p)) { | 255 | } else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, &p)) { |
257 | ty | 256 | ty |
258 | } else { | 257 | } else { |
259 | return None; | 258 | return None; |
diff --git a/crates/ra_ide_api/src/impls.rs b/crates/ra_ide_api/src/impls.rs index 6d69f36aa..15999d433 100644 --- a/crates/ra_ide_api/src/impls.rs +++ b/crates/ra_ide_api/src/impls.rs | |||
@@ -9,19 +9,19 @@ pub(crate) fn goto_implementation( | |||
9 | position: FilePosition, | 9 | position: FilePosition, |
10 | ) -> Option<RangeInfo<Vec<NavigationTarget>>> { | 10 | ) -> Option<RangeInfo<Vec<NavigationTarget>>> { |
11 | let parse = db.parse(position.file_id); | 11 | let parse = db.parse(position.file_id); |
12 | let syntax = parse.tree().syntax(); | 12 | let syntax = parse.tree().syntax().clone(); |
13 | 13 | ||
14 | let module = source_binder::module_from_position(db, position)?; | 14 | let module = source_binder::module_from_position(db, position)?; |
15 | 15 | ||
16 | if let Some(nominal_def) = find_node_at_offset::<ast::NominalDef>(syntax, position.offset) { | 16 | if let Some(nominal_def) = find_node_at_offset::<ast::NominalDef>(&syntax, position.offset) { |
17 | return Some(RangeInfo::new( | 17 | return Some(RangeInfo::new( |
18 | nominal_def.syntax().range(), | 18 | nominal_def.syntax().range(), |
19 | impls_for_def(db, nominal_def, module)?, | 19 | impls_for_def(db, &nominal_def, module)?, |
20 | )); | 20 | )); |
21 | } else if let Some(trait_def) = find_node_at_offset::<ast::TraitDef>(syntax, position.offset) { | 21 | } else if let Some(trait_def) = find_node_at_offset::<ast::TraitDef>(&syntax, position.offset) { |
22 | return Some(RangeInfo::new( | 22 | return Some(RangeInfo::new( |
23 | trait_def.syntax().range(), | 23 | trait_def.syntax().range(), |
24 | impls_for_trait(db, trait_def, module)?, | 24 | impls_for_trait(db, &trait_def, module)?, |
25 | )); | 25 | )); |
26 | } | 26 | } |
27 | 27 | ||
diff --git a/crates/ra_ide_api/src/join_lines.rs b/crates/ra_ide_api/src/join_lines.rs index e20cb1370..50bcfb5b7 100644 --- a/crates/ra_ide_api/src/join_lines.rs +++ b/crates/ra_ide_api/src/join_lines.rs | |||
@@ -27,7 +27,7 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit { | |||
27 | SyntaxElement::Token(token) => token.parent(), | 27 | SyntaxElement::Token(token) => token.parent(), |
28 | }; | 28 | }; |
29 | let mut edit = TextEditBuilder::default(); | 29 | let mut edit = TextEditBuilder::default(); |
30 | for token in node.descendants_with_tokens().filter_map(|it| it.as_token()) { | 30 | for token in node.descendants_with_tokens().filter_map(|it| it.as_token().cloned()) { |
31 | let range = match range.intersection(&token.range()) { | 31 | let range = match range.intersection(&token.range()) { |
32 | Some(range) => range, | 32 | Some(range) => range, |
33 | None => continue, | 33 | None => continue, |
@@ -37,7 +37,7 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit { | |||
37 | let pos: TextUnit = (pos as u32).into(); | 37 | let pos: TextUnit = (pos as u32).into(); |
38 | let off = token.range().start() + range.start() + pos; | 38 | let off = token.range().start() + range.start() + pos; |
39 | if !edit.invalidates_offset(off) { | 39 | if !edit.invalidates_offset(off) { |
40 | remove_newline(&mut edit, token, off); | 40 | remove_newline(&mut edit, &token, off); |
41 | } | 41 | } |
42 | } | 42 | } |
43 | } | 43 | } |
@@ -45,7 +45,7 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit { | |||
45 | edit.finish() | 45 | edit.finish() |
46 | } | 46 | } |
47 | 47 | ||
48 | fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUnit) { | 48 | fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextUnit) { |
49 | if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 { | 49 | if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 { |
50 | // The node is either the first or the last in the file | 50 | // The node is either the first or the last in the file |
51 | let suff = &token.text()[TextRange::from_to( | 51 | let suff = &token.text()[TextRange::from_to( |
@@ -98,9 +98,10 @@ fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUn | |||
98 | TextRange::from_to(prev.range().start(), token.range().end()), | 98 | TextRange::from_to(prev.range().start(), token.range().end()), |
99 | space.to_string(), | 99 | space.to_string(), |
100 | ); | 100 | ); |
101 | } else if let (Some(_), Some(next)) = | 101 | } else if let (Some(_), Some(next)) = ( |
102 | (prev.as_token().and_then(ast::Comment::cast), next.as_token().and_then(ast::Comment::cast)) | 102 | prev.as_token().cloned().and_then(ast::Comment::cast), |
103 | { | 103 | next.as_token().cloned().and_then(ast::Comment::cast), |
104 | ) { | ||
104 | // Removes: newline (incl. surrounding whitespace), start of the next comment | 105 | // Removes: newline (incl. surrounding whitespace), start of the next comment |
105 | edit.delete(TextRange::from_to( | 106 | edit.delete(TextRange::from_to( |
106 | token.range().start(), | 107 | token.range().start(), |
@@ -113,16 +114,16 @@ fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUn | |||
113 | } | 114 | } |
114 | 115 | ||
115 | fn has_comma_after(node: &SyntaxNode) -> bool { | 116 | fn has_comma_after(node: &SyntaxNode) -> bool { |
116 | match non_trivia_sibling(node.into(), Direction::Next) { | 117 | match non_trivia_sibling(node.clone().into(), Direction::Next) { |
117 | Some(n) => n.kind() == T![,], | 118 | Some(n) => n.kind() == T![,], |
118 | _ => false, | 119 | _ => false, |
119 | } | 120 | } |
120 | } | 121 | } |
121 | 122 | ||
122 | fn join_single_expr_block(edit: &mut TextEditBuilder, token: SyntaxToken) -> Option<()> { | 123 | fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> { |
123 | let block = ast::Block::cast(token.parent())?; | 124 | let block = ast::Block::cast(token.parent())?; |
124 | let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?; | 125 | let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?; |
125 | let expr = extract_trivial_expression(block)?; | 126 | let expr = extract_trivial_expression(&block)?; |
126 | 127 | ||
127 | let block_range = block_expr.syntax().range(); | 128 | let block_range = block_expr.syntax().range(); |
128 | let mut buf = expr.syntax().text().to_string(); | 129 | let mut buf = expr.syntax().text().to_string(); |
@@ -139,7 +140,7 @@ fn join_single_expr_block(edit: &mut TextEditBuilder, token: SyntaxToken) -> Opt | |||
139 | Some(()) | 140 | Some(()) |
140 | } | 141 | } |
141 | 142 | ||
142 | fn join_single_use_tree(edit: &mut TextEditBuilder, token: SyntaxToken) -> Option<()> { | 143 | fn join_single_use_tree(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> { |
143 | let use_tree_list = ast::UseTreeList::cast(token.parent())?; | 144 | let use_tree_list = ast::UseTreeList::cast(token.parent())?; |
144 | let (tree,) = use_tree_list.use_trees().collect_tuple()?; | 145 | let (tree,) = use_tree_list.use_trees().collect_tuple()?; |
145 | edit.replace(use_tree_list.syntax().range(), tree.syntax().text().to_string()); | 146 | edit.replace(use_tree_list.syntax().range(), tree.syntax().text().to_string()); |
@@ -504,7 +505,7 @@ fn foo() { | |||
504 | fn check_join_lines_sel(before: &str, after: &str) { | 505 | fn check_join_lines_sel(before: &str, after: &str) { |
505 | let (sel, before) = extract_range(before); | 506 | let (sel, before) = extract_range(before); |
506 | let parse = SourceFile::parse(&before); | 507 | let parse = SourceFile::parse(&before); |
507 | let result = join_lines(parse.tree(), sel); | 508 | let result = join_lines(&parse.tree(), sel); |
508 | let actual = result.apply(&before); | 509 | let actual = result.apply(&before); |
509 | assert_eq_text!(after, &actual); | 510 | assert_eq_text!(after, &actual); |
510 | } | 511 | } |
diff --git a/crates/ra_ide_api/src/lib.rs b/crates/ra_ide_api/src/lib.rs index 9f3b18d9d..c54d574bc 100644 --- a/crates/ra_ide_api/src/lib.rs +++ b/crates/ra_ide_api/src/lib.rs | |||
@@ -50,7 +50,7 @@ use ra_db::{ | |||
50 | salsa::{self, ParallelDatabase}, | 50 | salsa::{self, ParallelDatabase}, |
51 | CheckCanceled, SourceDatabase, | 51 | CheckCanceled, SourceDatabase, |
52 | }; | 52 | }; |
53 | use ra_syntax::{SourceFile, TextRange, TextUnit, TreeArc}; | 53 | use ra_syntax::{SourceFile, TextRange, TextUnit}; |
54 | use ra_text_edit::TextEdit; | 54 | use ra_text_edit::TextEdit; |
55 | use relative_path::RelativePathBuf; | 55 | use relative_path::RelativePathBuf; |
56 | 56 | ||
@@ -325,8 +325,8 @@ impl Analysis { | |||
325 | } | 325 | } |
326 | 326 | ||
327 | /// Gets the syntax tree of the file. | 327 | /// Gets the syntax tree of the file. |
328 | pub fn parse(&self, file_id: FileId) -> TreeArc<SourceFile> { | 328 | pub fn parse(&self, file_id: FileId) -> SourceFile { |
329 | self.db.parse(file_id).tree().to_owned() | 329 | self.db.parse(file_id).tree() |
330 | } | 330 | } |
331 | 331 | ||
332 | /// Gets the file's `LineIndex`: data structure to convert between absolute | 332 | /// Gets the file's `LineIndex`: data structure to convert between absolute |
@@ -360,7 +360,7 @@ impl Analysis { | |||
360 | let parse = self.db.parse(frange.file_id); | 360 | let parse = self.db.parse(frange.file_id); |
361 | let file_edit = SourceFileEdit { | 361 | let file_edit = SourceFileEdit { |
362 | file_id: frange.file_id, | 362 | file_id: frange.file_id, |
363 | edit: join_lines::join_lines(parse.tree(), frange.range), | 363 | edit: join_lines::join_lines(&parse.tree(), frange.range), |
364 | }; | 364 | }; |
365 | SourceChange::source_file_edit("join lines", file_edit) | 365 | SourceChange::source_file_edit("join lines", file_edit) |
366 | } | 366 | } |
@@ -393,13 +393,13 @@ impl Analysis { | |||
393 | /// file outline. | 393 | /// file outline. |
394 | pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> { | 394 | pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> { |
395 | let parse = self.db.parse(file_id); | 395 | let parse = self.db.parse(file_id); |
396 | file_structure(parse.tree()) | 396 | file_structure(&parse.tree()) |
397 | } | 397 | } |
398 | 398 | ||
399 | /// Returns the set of folding ranges. | 399 | /// Returns the set of folding ranges. |
400 | pub fn folding_ranges(&self, file_id: FileId) -> Vec<Fold> { | 400 | pub fn folding_ranges(&self, file_id: FileId) -> Vec<Fold> { |
401 | let parse = self.db.parse(file_id); | 401 | let parse = self.db.parse(file_id); |
402 | folding_ranges::folding_ranges(parse.tree()) | 402 | folding_ranges::folding_ranges(&parse.tree()) |
403 | } | 403 | } |
404 | 404 | ||
405 | /// Fuzzy searches for a symbol. | 405 | /// Fuzzy searches for a symbol. |
diff --git a/crates/ra_ide_api/src/matching_brace.rs b/crates/ra_ide_api/src/matching_brace.rs index 455a5c891..102327fd7 100644 --- a/crates/ra_ide_api/src/matching_brace.rs +++ b/crates/ra_ide_api/src/matching_brace.rs | |||
@@ -26,7 +26,7 @@ mod tests { | |||
26 | fn do_check(before: &str, after: &str) { | 26 | fn do_check(before: &str, after: &str) { |
27 | let (pos, before) = extract_offset(before); | 27 | let (pos, before) = extract_offset(before); |
28 | let parse = SourceFile::parse(&before); | 28 | let parse = SourceFile::parse(&before); |
29 | let new_pos = match matching_brace(parse.tree(), pos) { | 29 | let new_pos = match matching_brace(&parse.tree(), pos) { |
30 | None => pos, | 30 | None => pos, |
31 | Some(pos) => pos, | 31 | Some(pos) => pos, |
32 | }; | 32 | }; |
diff --git a/crates/ra_ide_api/src/name_ref_kind.rs b/crates/ra_ide_api/src/name_ref_kind.rs index 67381c9c8..6832acf5d 100644 --- a/crates/ra_ide_api/src/name_ref_kind.rs +++ b/crates/ra_ide_api/src/name_ref_kind.rs | |||
@@ -26,7 +26,7 @@ pub(crate) fn classify_name_ref( | |||
26 | // Check if it is a method | 26 | // Check if it is a method |
27 | if let Some(method_call) = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast) { | 27 | if let Some(method_call) = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast) { |
28 | tested_by!(goto_definition_works_for_methods); | 28 | tested_by!(goto_definition_works_for_methods); |
29 | if let Some(func) = analyzer.resolve_method_call(method_call) { | 29 | if let Some(func) = analyzer.resolve_method_call(&method_call) { |
30 | return Some(Method(func)); | 30 | return Some(Method(func)); |
31 | } | 31 | } |
32 | } | 32 | } |
@@ -40,7 +40,7 @@ pub(crate) fn classify_name_ref( | |||
40 | .and_then(ast::MacroCall::cast) | 40 | .and_then(ast::MacroCall::cast) |
41 | { | 41 | { |
42 | tested_by!(goto_definition_works_for_macros); | 42 | tested_by!(goto_definition_works_for_macros); |
43 | if let Some(mac) = analyzer.resolve_macro_call(db, macro_call) { | 43 | if let Some(mac) = analyzer.resolve_macro_call(db, ¯o_call) { |
44 | return Some(Macro(mac)); | 44 | return Some(Macro(mac)); |
45 | } | 45 | } |
46 | } | 46 | } |
@@ -48,7 +48,7 @@ pub(crate) fn classify_name_ref( | |||
48 | // It could also be a field access | 48 | // It could also be a field access |
49 | if let Some(field_expr) = name_ref.syntax().parent().and_then(ast::FieldExpr::cast) { | 49 | if let Some(field_expr) = name_ref.syntax().parent().and_then(ast::FieldExpr::cast) { |
50 | tested_by!(goto_definition_works_for_fields); | 50 | tested_by!(goto_definition_works_for_fields); |
51 | if let Some(field) = analyzer.resolve_field(field_expr) { | 51 | if let Some(field) = analyzer.resolve_field(&field_expr) { |
52 | return Some(FieldAccess(field)); | 52 | return Some(FieldAccess(field)); |
53 | }; | 53 | }; |
54 | } | 54 | } |
@@ -59,7 +59,7 @@ pub(crate) fn classify_name_ref( | |||
59 | 59 | ||
60 | let struct_lit = field_expr.syntax().ancestors().find_map(ast::StructLit::cast); | 60 | let struct_lit = field_expr.syntax().ancestors().find_map(ast::StructLit::cast); |
61 | 61 | ||
62 | if let Some(ty) = struct_lit.and_then(|lit| analyzer.type_of(db, lit.into())) { | 62 | if let Some(ty) = struct_lit.and_then(|lit| analyzer.type_of(db, &lit.into())) { |
63 | if let Some((hir::AdtDef::Struct(s), _)) = ty.as_adt() { | 63 | if let Some((hir::AdtDef::Struct(s), _)) = ty.as_adt() { |
64 | let hir_path = hir::Path::from_name_ref(name_ref); | 64 | let hir_path = hir::Path::from_name_ref(name_ref); |
65 | let hir_name = hir_path.as_ident().unwrap(); | 65 | let hir_name = hir_path.as_ident().unwrap(); |
@@ -73,7 +73,7 @@ pub(crate) fn classify_name_ref( | |||
73 | 73 | ||
74 | // General case, a path or a local: | 74 | // General case, a path or a local: |
75 | if let Some(path) = name_ref.syntax().ancestors().find_map(ast::Path::cast) { | 75 | if let Some(path) = name_ref.syntax().ancestors().find_map(ast::Path::cast) { |
76 | if let Some(resolved) = analyzer.resolve_path(db, path) { | 76 | if let Some(resolved) = analyzer.resolve_path(db, &path) { |
77 | return match resolved { | 77 | return match resolved { |
78 | hir::PathResolution::Def(def) => Some(Def(def)), | 78 | hir::PathResolution::Def(def) => Some(Def(def)), |
79 | hir::PathResolution::LocalBinding(Either::A(pat)) => Some(Pat(pat)), | 79 | hir::PathResolution::LocalBinding(Either::A(pat)) => Some(Pat(pat)), |
diff --git a/crates/ra_ide_api/src/references.rs b/crates/ra_ide_api/src/references.rs index 0af1ae811..1c4cd49dc 100644 --- a/crates/ra_ide_api/src/references.rs +++ b/crates/ra_ide_api/src/references.rs | |||
@@ -50,11 +50,11 @@ pub(crate) fn find_all_refs( | |||
50 | position: FilePosition, | 50 | position: FilePosition, |
51 | ) -> Option<ReferenceSearchResult> { | 51 | ) -> Option<ReferenceSearchResult> { |
52 | let parse = db.parse(position.file_id); | 52 | let parse = db.parse(position.file_id); |
53 | let (binding, analyzer) = find_binding(db, parse.tree(), position)?; | 53 | let (binding, analyzer) = find_binding(db, &parse.tree(), position)?; |
54 | let declaration = NavigationTarget::from_bind_pat(position.file_id, binding); | 54 | let declaration = NavigationTarget::from_bind_pat(position.file_id, &binding); |
55 | 55 | ||
56 | let references = analyzer | 56 | let references = analyzer |
57 | .find_all_refs(binding) | 57 | .find_all_refs(&binding) |
58 | .into_iter() | 58 | .into_iter() |
59 | .map(move |ref_desc| FileRange { file_id: position.file_id, range: ref_desc.range }) | 59 | .map(move |ref_desc| FileRange { file_id: position.file_id, range: ref_desc.range }) |
60 | .collect::<Vec<_>>(); | 60 | .collect::<Vec<_>>(); |
@@ -63,9 +63,9 @@ pub(crate) fn find_all_refs( | |||
63 | 63 | ||
64 | fn find_binding<'a>( | 64 | fn find_binding<'a>( |
65 | db: &RootDatabase, | 65 | db: &RootDatabase, |
66 | source_file: &'a SourceFile, | 66 | source_file: &SourceFile, |
67 | position: FilePosition, | 67 | position: FilePosition, |
68 | ) -> Option<(&'a ast::BindPat, hir::SourceAnalyzer)> { | 68 | ) -> Option<(ast::BindPat, hir::SourceAnalyzer)> { |
69 | let syntax = source_file.syntax(); | 69 | let syntax = source_file.syntax(); |
70 | if let Some(binding) = find_node_at_offset::<ast::BindPat>(syntax, position.offset) { | 70 | if let Some(binding) = find_node_at_offset::<ast::BindPat>(syntax, position.offset) { |
71 | let analyzer = hir::SourceAnalyzer::new(db, position.file_id, binding.syntax(), None); | 71 | let analyzer = hir::SourceAnalyzer::new(db, position.file_id, binding.syntax(), None); |
@@ -73,7 +73,7 @@ pub(crate) fn find_all_refs( | |||
73 | }; | 73 | }; |
74 | let name_ref = find_node_at_offset::<ast::NameRef>(syntax, position.offset)?; | 74 | let name_ref = find_node_at_offset::<ast::NameRef>(syntax, position.offset)?; |
75 | let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None); | 75 | let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None); |
76 | let resolved = analyzer.resolve_local_name(name_ref)?; | 76 | let resolved = analyzer.resolve_local_name(&name_ref)?; |
77 | if let Either::A(ptr) = resolved.ptr() { | 77 | if let Either::A(ptr) = resolved.ptr() { |
78 | if let ast::PatKind::BindPat(binding) = ptr.to_node(source_file.syntax()).kind() { | 78 | if let ast::PatKind::BindPat(binding) = ptr.to_node(source_file.syntax()).kind() { |
79 | return Some((binding, analyzer)); | 79 | return Some((binding, analyzer)); |
@@ -89,10 +89,10 @@ pub(crate) fn rename( | |||
89 | new_name: &str, | 89 | new_name: &str, |
90 | ) -> Option<SourceChange> { | 90 | ) -> Option<SourceChange> { |
91 | let parse = db.parse(position.file_id); | 91 | let parse = db.parse(position.file_id); |
92 | let syntax = parse.tree().syntax(); | 92 | if let Some((ast_name, ast_module)) = |
93 | 93 | find_name_and_module_at_offset(parse.tree().syntax(), position) | |
94 | if let Some((ast_name, ast_module)) = find_name_and_module_at_offset(syntax, position) { | 94 | { |
95 | rename_mod(db, ast_name, ast_module, position, new_name) | 95 | rename_mod(db, &ast_name, &ast_module, position, new_name) |
96 | } else { | 96 | } else { |
97 | rename_reference(db, position, new_name) | 97 | rename_reference(db, position, new_name) |
98 | } | 98 | } |
@@ -101,14 +101,10 @@ pub(crate) fn rename( | |||
101 | fn find_name_and_module_at_offset( | 101 | fn find_name_and_module_at_offset( |
102 | syntax: &SyntaxNode, | 102 | syntax: &SyntaxNode, |
103 | position: FilePosition, | 103 | position: FilePosition, |
104 | ) -> Option<(&ast::Name, &ast::Module)> { | 104 | ) -> Option<(ast::Name, ast::Module)> { |
105 | let ast_name = find_node_at_offset::<ast::Name>(syntax, position.offset); | 105 | let ast_name = find_node_at_offset::<ast::Name>(syntax, position.offset)?; |
106 | let ast_name_parent = ast::Module::cast(ast_name?.syntax().parent()?); | 106 | let ast_module = ast::Module::cast(ast_name.syntax().parent()?)?; |
107 | 107 | Some((ast_name, ast_module)) | |
108 | if let (Some(ast_module), Some(name)) = (ast_name_parent, ast_name) { | ||
109 | return Some((name, ast_module)); | ||
110 | } | ||
111 | None | ||
112 | } | 108 | } |
113 | 109 | ||
114 | fn source_edit_from_fileid_range( | 110 | fn source_edit_from_fileid_range( |
@@ -135,7 +131,8 @@ fn rename_mod( | |||
135 | ) -> Option<SourceChange> { | 131 | ) -> Option<SourceChange> { |
136 | let mut source_file_edits = Vec::new(); | 132 | let mut source_file_edits = Vec::new(); |
137 | let mut file_system_edits = Vec::new(); | 133 | let mut file_system_edits = Vec::new(); |
138 | if let Some(module) = source_binder::module_from_declaration(db, position.file_id, &ast_module) | 134 | if let Some(module) = |
135 | source_binder::module_from_declaration(db, position.file_id, ast_module.clone()) | ||
139 | { | 136 | { |
140 | let src = module.definition_source(db); | 137 | let src = module.definition_source(db); |
141 | let file_id = src.file_id.as_original_file(); | 138 | let file_id = src.file_id.as_original_file(); |
diff --git a/crates/ra_ide_api/src/runnables.rs b/crates/ra_ide_api/src/runnables.rs index 8cb859b37..200958434 100644 --- a/crates/ra_ide_api/src/runnables.rs +++ b/crates/ra_ide_api/src/runnables.rs | |||
@@ -26,8 +26,8 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> { | |||
26 | parse.tree().syntax().descendants().filter_map(|i| runnable(db, file_id, i)).collect() | 26 | parse.tree().syntax().descendants().filter_map(|i| runnable(db, file_id, i)).collect() |
27 | } | 27 | } |
28 | 28 | ||
29 | fn runnable(db: &RootDatabase, file_id: FileId, item: &SyntaxNode) -> Option<Runnable> { | 29 | fn runnable(db: &RootDatabase, file_id: FileId, item: SyntaxNode) -> Option<Runnable> { |
30 | if let Some(fn_def) = ast::FnDef::cast(item) { | 30 | if let Some(fn_def) = ast::FnDef::cast(item.clone()) { |
31 | runnable_fn(fn_def) | 31 | runnable_fn(fn_def) |
32 | } else if let Some(m) = ast::Module::cast(item) { | 32 | } else if let Some(m) = ast::Module::cast(item) { |
33 | runnable_mod(db, file_id, m) | 33 | runnable_mod(db, file_id, m) |
@@ -36,8 +36,8 @@ fn runnable(db: &RootDatabase, file_id: FileId, item: &SyntaxNode) -> Option<Run | |||
36 | } | 36 | } |
37 | } | 37 | } |
38 | 38 | ||
39 | fn runnable_fn(fn_def: &ast::FnDef) -> Option<Runnable> { | 39 | fn runnable_fn(fn_def: ast::FnDef) -> Option<Runnable> { |
40 | let name = fn_def.name()?.text(); | 40 | let name = fn_def.name()?.text().clone(); |
41 | let kind = if name == "main" { | 41 | let kind = if name == "main" { |
42 | RunnableKind::Bin | 42 | RunnableKind::Bin |
43 | } else if fn_def.has_atom_attr("test") { | 43 | } else if fn_def.has_atom_attr("test") { |
@@ -50,7 +50,7 @@ fn runnable_fn(fn_def: &ast::FnDef) -> Option<Runnable> { | |||
50 | Some(Runnable { range: fn_def.syntax().range(), kind }) | 50 | Some(Runnable { range: fn_def.syntax().range(), kind }) |
51 | } | 51 | } |
52 | 52 | ||
53 | fn runnable_mod(db: &RootDatabase, file_id: FileId, module: &ast::Module) -> Option<Runnable> { | 53 | fn runnable_mod(db: &RootDatabase, file_id: FileId, module: ast::Module) -> Option<Runnable> { |
54 | let has_test_function = module | 54 | let has_test_function = module |
55 | .item_list()? | 55 | .item_list()? |
56 | .items() | 56 | .items() |
diff --git a/crates/ra_ide_api/src/status.rs b/crates/ra_ide_api/src/status.rs index a31e15245..d533d1742 100644 --- a/crates/ra_ide_api/src/status.rs +++ b/crates/ra_ide_api/src/status.rs | |||
@@ -104,7 +104,7 @@ impl FromIterator<TableEntry<MacroFile, Option<Parse<SyntaxNode>>>> for SyntaxTr | |||
104 | let mut res = SyntaxTreeStats::default(); | 104 | let mut res = SyntaxTreeStats::default(); |
105 | for entry in iter { | 105 | for entry in iter { |
106 | res.total += 1; | 106 | res.total += 1; |
107 | if let Some(tree) = entry.value.and_then(|it| it).map(|it| it.tree().to_owned()) { | 107 | if let Some(tree) = entry.value.and_then(|it| it).map(|it| it.syntax_node()) { |
108 | res.retained += 1; | 108 | res.retained += 1; |
109 | res.retained_size += tree.memory_size_of_subtree(); | 109 | res.retained_size += tree.memory_size_of_subtree(); |
110 | } | 110 | } |
diff --git a/crates/ra_ide_api/src/symbol_index.rs b/crates/ra_ide_api/src/symbol_index.rs index 9b3a45319..e784b5f69 100644 --- a/crates/ra_ide_api/src/symbol_index.rs +++ b/crates/ra_ide_api/src/symbol_index.rs | |||
@@ -61,7 +61,7 @@ fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Arc<SymbolIndex> | |||
61 | db.check_canceled(); | 61 | db.check_canceled(); |
62 | let parse = db.parse(file_id); | 62 | let parse = db.parse(file_id); |
63 | 63 | ||
64 | let symbols = source_file_to_file_symbols(parse.tree(), file_id); | 64 | let symbols = source_file_to_file_symbols(&parse.tree(), file_id); |
65 | 65 | ||
66 | // FIXME: add macros here | 66 | // FIXME: add macros here |
67 | 67 | ||
@@ -173,7 +173,7 @@ impl SymbolIndex { | |||
173 | files: impl ParallelIterator<Item = (FileId, Parse<ast::SourceFile>)>, | 173 | files: impl ParallelIterator<Item = (FileId, Parse<ast::SourceFile>)>, |
174 | ) -> SymbolIndex { | 174 | ) -> SymbolIndex { |
175 | let symbols = files | 175 | let symbols = files |
176 | .flat_map(|(file_id, file)| source_file_to_file_symbols(file.tree(), file_id)) | 176 | .flat_map(|(file_id, file)| source_file_to_file_symbols(&file.tree(), file_id)) |
177 | .collect::<Vec<_>>(); | 177 | .collect::<Vec<_>>(); |
178 | SymbolIndex::new(symbols) | 178 | SymbolIndex::new(symbols) |
179 | } | 179 | } |
@@ -249,7 +249,7 @@ fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec | |||
249 | for event in source_file.syntax().preorder() { | 249 | for event in source_file.syntax().preorder() { |
250 | match event { | 250 | match event { |
251 | WalkEvent::Enter(node) => { | 251 | WalkEvent::Enter(node) => { |
252 | if let Some(mut symbol) = to_file_symbol(node, file_id) { | 252 | if let Some(mut symbol) = to_file_symbol(&node, file_id) { |
253 | symbol.container_name = stack.last().cloned(); | 253 | symbol.container_name = stack.last().cloned(); |
254 | 254 | ||
255 | stack.push(symbol.name.clone()); | 255 | stack.push(symbol.name.clone()); |
@@ -258,7 +258,7 @@ fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec | |||
258 | } | 258 | } |
259 | 259 | ||
260 | WalkEvent::Leave(node) => { | 260 | WalkEvent::Leave(node) => { |
261 | if to_symbol(node).is_some() { | 261 | if to_symbol(&node).is_some() { |
262 | stack.pop(); | 262 | stack.pop(); |
263 | } | 263 | } |
264 | } | 264 | } |
@@ -269,7 +269,7 @@ fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec | |||
269 | } | 269 | } |
270 | 270 | ||
271 | fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { | 271 | fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { |
272 | fn decl<N: NameOwner>(node: &N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { | 272 | fn decl<N: NameOwner>(node: N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { |
273 | let name = node.name()?; | 273 | let name = node.name()?; |
274 | let name_range = name.syntax().range(); | 274 | let name_range = name.syntax().range(); |
275 | let name = name.text().clone(); | 275 | let name = name.text().clone(); |
diff --git a/crates/ra_ide_api/src/syntax_highlighting.rs b/crates/ra_ide_api/src/syntax_highlighting.rs index eb392d5da..16a728789 100644 --- a/crates/ra_ide_api/src/syntax_highlighting.rs +++ b/crates/ra_ide_api/src/syntax_highlighting.rs | |||
@@ -31,8 +31,8 @@ fn is_control_keyword(kind: SyntaxKind) -> bool { | |||
31 | } | 31 | } |
32 | } | 32 | } |
33 | 33 | ||
34 | fn is_variable_mutable(db: &RootDatabase, analyzer: &hir::SourceAnalyzer, pat: &ast::Pat) -> bool { | 34 | fn is_variable_mutable(db: &RootDatabase, analyzer: &hir::SourceAnalyzer, pat: ast::Pat) -> bool { |
35 | let ty = analyzer.type_of_pat(db, pat).unwrap_or(Ty::Unknown); | 35 | let ty = analyzer.type_of_pat(db, &pat).unwrap_or(Ty::Unknown); |
36 | let is_ty_mut = { | 36 | let is_ty_mut = { |
37 | if let Some((_, mutability)) = ty.as_reference() { | 37 | if let Some((_, mutability)) = ty.as_reference() { |
38 | match mutability { | 38 | match mutability { |
@@ -55,7 +55,7 @@ fn is_variable_mutable(db: &RootDatabase, analyzer: &hir::SourceAnalyzer, pat: & | |||
55 | pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> { | 55 | pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> { |
56 | let _p = profile("highlight"); | 56 | let _p = profile("highlight"); |
57 | let parse = db.parse(file_id); | 57 | let parse = db.parse(file_id); |
58 | let root = parse.tree().syntax(); | 58 | let root = parse.tree().syntax().clone(); |
59 | 59 | ||
60 | fn calc_binding_hash(file_id: FileId, text: &SmolStr, shadow_count: u32) -> u64 { | 60 | fn calc_binding_hash(file_id: FileId, text: &SmolStr, shadow_count: u32) -> u64 { |
61 | fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 { | 61 | fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 { |
@@ -70,6 +70,7 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa | |||
70 | } | 70 | } |
71 | 71 | ||
72 | // Visited nodes to handle highlighting priorities | 72 | // Visited nodes to handle highlighting priorities |
73 | // FIXME: retain only ranges here | ||
73 | let mut highlighted: FxHashSet<SyntaxElement> = FxHashSet::default(); | 74 | let mut highlighted: FxHashSet<SyntaxElement> = FxHashSet::default(); |
74 | let mut bindings_shadow_count: FxHashMap<SmolStr, u32> = FxHashMap::default(); | 75 | let mut bindings_shadow_count: FxHashMap<SmolStr, u32> = FxHashMap::default(); |
75 | 76 | ||
@@ -84,14 +85,14 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa | |||
84 | STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => "string", | 85 | STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => "string", |
85 | ATTR => "attribute", | 86 | ATTR => "attribute", |
86 | NAME_REF => { | 87 | NAME_REF => { |
87 | if let Some(name_ref) = node.as_node().and_then(ast::NameRef::cast) { | 88 | if let Some(name_ref) = node.as_node().cloned().and_then(ast::NameRef::cast) { |
88 | // FIXME: revisit this after #1340 | 89 | // FIXME: revisit this after #1340 |
89 | use crate::name_ref_kind::{classify_name_ref, NameRefKind::*}; | 90 | use crate::name_ref_kind::{classify_name_ref, NameRefKind::*}; |
90 | use hir::{ImplItem, ModuleDef}; | 91 | use hir::{ImplItem, ModuleDef}; |
91 | 92 | ||
92 | // FIXME: try to reuse the SourceAnalyzers | 93 | // FIXME: try to reuse the SourceAnalyzers |
93 | let analyzer = hir::SourceAnalyzer::new(db, file_id, name_ref.syntax(), None); | 94 | let analyzer = hir::SourceAnalyzer::new(db, file_id, name_ref.syntax(), None); |
94 | match classify_name_ref(db, &analyzer, name_ref) { | 95 | match classify_name_ref(db, &analyzer, &name_ref) { |
95 | Some(Method(_)) => "function", | 96 | Some(Method(_)) => "function", |
96 | Some(Macro(_)) => "macro", | 97 | Some(Macro(_)) => "macro", |
97 | Some(FieldAccess(_)) => "field", | 98 | Some(FieldAccess(_)) => "field", |
@@ -113,13 +114,13 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa | |||
113 | Some(Pat(ptr)) => { | 114 | Some(Pat(ptr)) => { |
114 | binding_hash = Some({ | 115 | binding_hash = Some({ |
115 | let text = | 116 | let text = |
116 | ptr.syntax_node_ptr().to_node(root).text().to_smol_string(); | 117 | ptr.syntax_node_ptr().to_node(&root).text().to_smol_string(); |
117 | let shadow_count = | 118 | let shadow_count = |
118 | bindings_shadow_count.entry(text.clone()).or_default(); | 119 | bindings_shadow_count.entry(text.clone()).or_default(); |
119 | calc_binding_hash(file_id, &text, *shadow_count) | 120 | calc_binding_hash(file_id, &text, *shadow_count) |
120 | }); | 121 | }); |
121 | 122 | ||
122 | if is_variable_mutable(db, &analyzer, ptr.to_node(root)) { | 123 | if is_variable_mutable(db, &analyzer, ptr.to_node(&root)) { |
123 | "variable.mut" | 124 | "variable.mut" |
124 | } else { | 125 | } else { |
125 | "variable" | 126 | "variable" |
@@ -134,7 +135,7 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa | |||
134 | } | 135 | } |
135 | } | 136 | } |
136 | NAME => { | 137 | NAME => { |
137 | if let Some(name) = node.as_node().and_then(ast::Name::cast) { | 138 | if let Some(name) = node.as_node().cloned().and_then(ast::Name::cast) { |
138 | let analyzer = hir::SourceAnalyzer::new(db, file_id, name.syntax(), None); | 139 | let analyzer = hir::SourceAnalyzer::new(db, file_id, name.syntax(), None); |
139 | if let Some(pat) = name.syntax().ancestors().find_map(ast::Pat::cast) { | 140 | if let Some(pat) = name.syntax().ancestors().find_map(ast::Pat::cast) { |
140 | binding_hash = Some({ | 141 | binding_hash = Some({ |
@@ -176,12 +177,11 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa | |||
176 | k if is_control_keyword(k) => "keyword.control", | 177 | k if is_control_keyword(k) => "keyword.control", |
177 | k if k.is_keyword() => "keyword", | 178 | k if k.is_keyword() => "keyword", |
178 | _ => { | 179 | _ => { |
179 | // let analyzer = hir::SourceAnalyzer::new(db, file_id, name_ref.syntax(), None); | 180 | if let Some(macro_call) = node.as_node().cloned().and_then(ast::MacroCall::cast) { |
180 | if let Some(macro_call) = node.as_node().and_then(ast::MacroCall::cast) { | ||
181 | if let Some(path) = macro_call.path() { | 181 | if let Some(path) = macro_call.path() { |
182 | if let Some(segment) = path.segment() { | 182 | if let Some(segment) = path.segment() { |
183 | if let Some(name_ref) = segment.name_ref() { | 183 | if let Some(name_ref) = segment.name_ref() { |
184 | highlighted.insert(name_ref.syntax().into()); | 184 | highlighted.insert(name_ref.syntax().clone().into()); |
185 | let range_start = name_ref.syntax().range().start(); | 185 | let range_start = name_ref.syntax().range().start(); |
186 | let mut range_end = name_ref.syntax().range().end(); | 186 | let mut range_end = name_ref.syntax().range().end(); |
187 | for sibling in path.syntax().siblings_with_tokens(Direction::Next) { | 187 | for sibling in path.syntax().siblings_with_tokens(Direction::Next) { |
@@ -230,7 +230,8 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo | |||
230 | let mut buf = String::new(); | 230 | let mut buf = String::new(); |
231 | buf.push_str(&STYLE); | 231 | buf.push_str(&STYLE); |
232 | buf.push_str("<pre><code>"); | 232 | buf.push_str("<pre><code>"); |
233 | let tokens = parse.tree().syntax().descendants_with_tokens().filter_map(|it| it.as_token()); | 233 | let tokens = |
234 | parse.tree().syntax().descendants_with_tokens().filter_map(|it| it.as_token().cloned()); | ||
234 | for token in tokens { | 235 | for token in tokens { |
235 | could_intersect.retain(|it| token.range().start() <= it.range.end()); | 236 | could_intersect.retain(|it| token.range().start() <= it.range.end()); |
236 | while let Some(r) = ranges.get(frontier) { | 237 | while let Some(r) = ranges.get(frontier) { |
diff --git a/crates/ra_ide_api/src/syntax_tree.rs b/crates/ra_ide_api/src/syntax_tree.rs index b3e08c041..3d7373d02 100644 --- a/crates/ra_ide_api/src/syntax_tree.rs +++ b/crates/ra_ide_api/src/syntax_tree.rs | |||
@@ -18,7 +18,7 @@ pub(crate) fn syntax_tree( | |||
18 | let node = match algo::find_covering_element(parse.tree().syntax(), text_range) { | 18 | let node = match algo::find_covering_element(parse.tree().syntax(), text_range) { |
19 | SyntaxElement::Node(node) => node, | 19 | SyntaxElement::Node(node) => node, |
20 | SyntaxElement::Token(token) => { | 20 | SyntaxElement::Token(token) => { |
21 | if let Some(tree) = syntax_tree_for_string(token, text_range) { | 21 | if let Some(tree) = syntax_tree_for_string(&token, text_range) { |
22 | return tree; | 22 | return tree; |
23 | } | 23 | } |
24 | token.parent() | 24 | token.parent() |
@@ -33,7 +33,7 @@ pub(crate) fn syntax_tree( | |||
33 | 33 | ||
34 | /// Attempts parsing the selected contents of a string literal | 34 | /// Attempts parsing the selected contents of a string literal |
35 | /// as rust syntax and returns its syntax tree | 35 | /// as rust syntax and returns its syntax tree |
36 | fn syntax_tree_for_string(token: SyntaxToken, text_range: TextRange) -> Option<String> { | 36 | fn syntax_tree_for_string(token: &SyntaxToken, text_range: TextRange) -> Option<String> { |
37 | // When the range is inside a string | 37 | // When the range is inside a string |
38 | // we'll attempt parsing it as rust syntax | 38 | // we'll attempt parsing it as rust syntax |
39 | // to provide the syntax tree of the contents of the string | 39 | // to provide the syntax tree of the contents of the string |
@@ -43,7 +43,7 @@ fn syntax_tree_for_string(token: SyntaxToken, text_range: TextRange) -> Option<S | |||
43 | } | 43 | } |
44 | } | 44 | } |
45 | 45 | ||
46 | fn syntax_tree_for_token(node: SyntaxToken, text_range: TextRange) -> Option<String> { | 46 | fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option<String> { |
47 | // Range of the full node | 47 | // Range of the full node |
48 | let node_range = node.range(); | 48 | let node_range = node.range(); |
49 | let text = node.text().to_string(); | 49 | let text = node.text().to_string(); |
diff --git a/crates/ra_ide_api/src/typing.rs b/crates/ra_ide_api/src/typing.rs index 01eb32b2f..ad0ababcc 100644 --- a/crates/ra_ide_api/src/typing.rs +++ b/crates/ra_ide_api/src/typing.rs | |||
@@ -1,15 +1,16 @@ | |||
1 | use crate::{db::RootDatabase, SourceChange, SourceFileEdit}; | ||
2 | use ra_db::{FilePosition, SourceDatabase}; | 1 | use ra_db::{FilePosition, SourceDatabase}; |
3 | use ra_fmt::leading_indent; | 2 | use ra_fmt::leading_indent; |
4 | use ra_syntax::{ | 3 | use ra_syntax::{ |
5 | algo::{find_node_at_offset, find_token_at_offset, TokenAtOffset}, | 4 | algo::{find_node_at_offset, find_token_at_offset, TokenAtOffset}, |
6 | ast::{self, AstToken}, | 5 | ast::{self, AstToken}, |
7 | AstNode, SourceFile, | 6 | AstNode, SmolStr, SourceFile, |
8 | SyntaxKind::*, | 7 | SyntaxKind::*, |
9 | SyntaxToken, TextRange, TextUnit, | 8 | SyntaxToken, TextRange, TextUnit, |
10 | }; | 9 | }; |
11 | use ra_text_edit::{TextEdit, TextEditBuilder}; | 10 | use ra_text_edit::{TextEdit, TextEditBuilder}; |
12 | 11 | ||
12 | use crate::{db::RootDatabase, SourceChange, SourceFileEdit}; | ||
13 | |||
13 | pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> { | 14 | pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> { |
14 | let parse = db.parse(position.file_id); | 15 | let parse = db.parse(position.file_id); |
15 | let file = parse.tree(); | 16 | let file = parse.tree(); |
@@ -43,15 +44,15 @@ pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<Sour | |||
43 | ) | 44 | ) |
44 | } | 45 | } |
45 | 46 | ||
46 | fn node_indent<'a>(file: &'a SourceFile, token: SyntaxToken) -> Option<&'a str> { | 47 | fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option<SmolStr> { |
47 | let ws = match find_token_at_offset(file.syntax(), token.range().start()) { | 48 | let ws = match find_token_at_offset(file.syntax(), token.range().start()) { |
48 | TokenAtOffset::Between(l, r) => { | 49 | TokenAtOffset::Between(l, r) => { |
49 | assert!(r == token); | 50 | assert!(r == *token); |
50 | l | 51 | l |
51 | } | 52 | } |
52 | TokenAtOffset::Single(n) => { | 53 | TokenAtOffset::Single(n) => { |
53 | assert!(n == token); | 54 | assert!(n == *token); |
54 | return Some(""); | 55 | return Some("".into()); |
55 | } | 56 | } |
56 | TokenAtOffset::None => unreachable!(), | 57 | TokenAtOffset::None => unreachable!(), |
57 | }; | 58 | }; |
@@ -60,12 +61,12 @@ fn node_indent<'a>(file: &'a SourceFile, token: SyntaxToken) -> Option<&'a str> | |||
60 | } | 61 | } |
61 | let text = ws.text(); | 62 | let text = ws.text(); |
62 | let pos = text.rfind('\n').map(|it| it + 1).unwrap_or(0); | 63 | let pos = text.rfind('\n').map(|it| it + 1).unwrap_or(0); |
63 | Some(&text[pos..]) | 64 | Some(text[pos..].into()) |
64 | } | 65 | } |
65 | 66 | ||
66 | pub fn on_eq_typed(file: &SourceFile, eq_offset: TextUnit) -> Option<TextEdit> { | 67 | pub fn on_eq_typed(file: &SourceFile, eq_offset: TextUnit) -> Option<TextEdit> { |
67 | assert_eq!(file.syntax().text().char_at(eq_offset), Some('=')); | 68 | assert_eq!(file.syntax().text().char_at(eq_offset), Some('=')); |
68 | let let_stmt: &ast::LetStmt = find_node_at_offset(file.syntax(), eq_offset)?; | 69 | let let_stmt: ast::LetStmt = find_node_at_offset(file.syntax(), eq_offset)?; |
69 | if let_stmt.has_semi() { | 70 | if let_stmt.has_semi() { |
70 | return None; | 71 | return None; |
71 | } | 72 | } |
@@ -141,7 +142,7 @@ mod tests { | |||
141 | edit.insert(offset, "=".to_string()); | 142 | edit.insert(offset, "=".to_string()); |
142 | let before = edit.finish().apply(&before); | 143 | let before = edit.finish().apply(&before); |
143 | let parse = SourceFile::parse(&before); | 144 | let parse = SourceFile::parse(&before); |
144 | if let Some(result) = on_eq_typed(parse.tree(), offset) { | 145 | if let Some(result) = on_eq_typed(&parse.tree(), offset) { |
145 | let actual = result.apply(&before); | 146 | let actual = result.apply(&before); |
146 | assert_eq_text!(after, &actual); | 147 | assert_eq_text!(after, &actual); |
147 | } else { | 148 | } else { |
diff --git a/crates/ra_mbe/src/mbe_expander.rs b/crates/ra_mbe/src/mbe_expander.rs index c7c06c7fd..f185aecb7 100644 --- a/crates/ra_mbe/src/mbe_expander.rs +++ b/crates/ra_mbe/src/mbe_expander.rs | |||
@@ -599,7 +599,8 @@ mod tests { | |||
599 | let macro_definition = | 599 | let macro_definition = |
600 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); | 600 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); |
601 | 601 | ||
602 | let (definition_tt, _) = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap(); | 602 | let (definition_tt, _) = |
603 | ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap(); | ||
603 | crate::MacroRules::parse(&definition_tt).unwrap() | 604 | crate::MacroRules::parse(&definition_tt).unwrap() |
604 | } | 605 | } |
605 | 606 | ||
@@ -611,7 +612,8 @@ mod tests { | |||
611 | let macro_invocation = | 612 | let macro_invocation = |
612 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); | 613 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); |
613 | 614 | ||
614 | let (invocation_tt, _) = ast_to_token_tree(macro_invocation.token_tree().unwrap()).unwrap(); | 615 | let (invocation_tt, _) = |
616 | ast_to_token_tree(¯o_invocation.token_tree().unwrap()).unwrap(); | ||
615 | 617 | ||
616 | expand_rule(&rules.rules[0], &invocation_tt) | 618 | expand_rule(&rules.rules[0], &invocation_tt) |
617 | } | 619 | } |
diff --git a/crates/ra_mbe/src/mbe_parser.rs b/crates/ra_mbe/src/mbe_parser.rs index cddb4a7b4..954b84d9d 100644 --- a/crates/ra_mbe/src/mbe_parser.rs +++ b/crates/ra_mbe/src/mbe_parser.rs | |||
@@ -179,7 +179,8 @@ mod tests { | |||
179 | let macro_definition = | 179 | let macro_definition = |
180 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); | 180 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); |
181 | 181 | ||
182 | let (definition_tt, _) = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap(); | 182 | let (definition_tt, _) = |
183 | ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap(); | ||
183 | parse(&definition_tt) | 184 | parse(&definition_tt) |
184 | } | 185 | } |
185 | 186 | ||
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index bfc351f81..64ed6a517 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -2,7 +2,7 @@ use crate::subtree_source::SubtreeTokenSource; | |||
2 | use crate::ExpandError; | 2 | use crate::ExpandError; |
3 | use ra_parser::{ParseError, TreeSink}; | 3 | use ra_parser::{ParseError, TreeSink}; |
4 | use ra_syntax::{ | 4 | use ra_syntax::{ |
5 | ast, AstNode, Parse, SmolStr, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxNode, | 5 | ast, AstNode, AstToken, Parse, SmolStr, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxNode, |
6 | SyntaxTreeBuilder, TextRange, TextUnit, T, | 6 | SyntaxTreeBuilder, TextRange, TextUnit, T, |
7 | }; | 7 | }; |
8 | use tt::buffer::{Cursor, TokenBuffer}; | 8 | use tt::buffer::{Cursor, TokenBuffer}; |
@@ -116,8 +116,6 @@ impl TokenMap { | |||
116 | /// and strips the ending `*/` | 116 | /// and strips the ending `*/` |
117 | /// And then quote the string, which is needed to convert to `tt::Literal` | 117 | /// And then quote the string, which is needed to convert to `tt::Literal` |
118 | fn doc_comment_text(comment: &ast::Comment) -> SmolStr { | 118 | fn doc_comment_text(comment: &ast::Comment) -> SmolStr { |
119 | use ast::AstToken; | ||
120 | |||
121 | let prefix_len = comment.prefix().len(); | 119 | let prefix_len = comment.prefix().len(); |
122 | let mut text = &comment.text()[prefix_len..]; | 120 | let mut text = &comment.text()[prefix_len..]; |
123 | 121 | ||
@@ -132,9 +130,8 @@ fn doc_comment_text(comment: &ast::Comment) -> SmolStr { | |||
132 | text.into() | 130 | text.into() |
133 | } | 131 | } |
134 | 132 | ||
135 | fn convert_doc_comment<'a>(token: &ra_syntax::SyntaxToken<'a>) -> Option<Vec<tt::TokenTree>> { | 133 | fn convert_doc_comment(token: &ra_syntax::SyntaxToken) -> Option<Vec<tt::TokenTree>> { |
136 | use ast::AstToken; | 134 | let comment = ast::Comment::cast(token.clone())?; |
137 | let comment = ast::Comment::cast(*token)?; | ||
138 | let doc = comment.kind().doc?; | 135 | let doc = comment.kind().doc?; |
139 | 136 | ||
140 | // Make `doc="\" Comments\"" | 137 | // Make `doc="\" Comments\"" |
@@ -245,7 +242,7 @@ fn convert_tt( | |||
245 | } | 242 | } |
246 | } | 243 | } |
247 | SyntaxElement::Node(node) => { | 244 | SyntaxElement::Node(node) => { |
248 | let child = convert_tt(token_map, global_offset, node)?.into(); | 245 | let child = convert_tt(token_map, global_offset, &node)?.into(); |
249 | token_trees.push(child); | 246 | token_trees.push(child); |
250 | } | 247 | } |
251 | }; | 248 | }; |
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs index 419b2c099..38a31109d 100644 --- a/crates/ra_mbe/src/tests.rs +++ b/crates/ra_mbe/src/tests.rs | |||
@@ -37,8 +37,8 @@ impl_froms!(TokenTree: Leaf, Subtree); | |||
37 | let macro_invocation = | 37 | let macro_invocation = |
38 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); | 38 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); |
39 | 39 | ||
40 | let (definition_tt, _) = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap(); | 40 | let (definition_tt, _) = ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap(); |
41 | let (invocation_tt, _) = ast_to_token_tree(macro_invocation.token_tree().unwrap()).unwrap(); | 41 | let (invocation_tt, _) = ast_to_token_tree(¯o_invocation.token_tree().unwrap()).unwrap(); |
42 | let rules = crate::MacroRules::parse(&definition_tt).unwrap(); | 42 | let rules = crate::MacroRules::parse(&definition_tt).unwrap(); |
43 | let expansion = rules.expand(&invocation_tt).unwrap(); | 43 | let expansion = rules.expand(&invocation_tt).unwrap(); |
44 | assert_eq!( | 44 | assert_eq!( |
@@ -53,7 +53,7 @@ pub(crate) fn create_rules(macro_definition: &str) -> MacroRules { | |||
53 | let macro_definition = | 53 | let macro_definition = |
54 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); | 54 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); |
55 | 55 | ||
56 | let (definition_tt, _) = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap(); | 56 | let (definition_tt, _) = ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap(); |
57 | crate::MacroRules::parse(&definition_tt).unwrap() | 57 | crate::MacroRules::parse(&definition_tt).unwrap() |
58 | } | 58 | } |
59 | 59 | ||
@@ -62,34 +62,25 @@ pub(crate) fn expand(rules: &MacroRules, invocation: &str) -> tt::Subtree { | |||
62 | let macro_invocation = | 62 | let macro_invocation = |
63 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); | 63 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); |
64 | 64 | ||
65 | let (invocation_tt, _) = ast_to_token_tree(macro_invocation.token_tree().unwrap()).unwrap(); | 65 | let (invocation_tt, _) = ast_to_token_tree(¯o_invocation.token_tree().unwrap()).unwrap(); |
66 | 66 | ||
67 | rules.expand(&invocation_tt).unwrap() | 67 | rules.expand(&invocation_tt).unwrap() |
68 | } | 68 | } |
69 | 69 | ||
70 | pub(crate) fn expand_to_items( | 70 | pub(crate) fn expand_to_items(rules: &MacroRules, invocation: &str) -> ast::MacroItems { |
71 | rules: &MacroRules, | ||
72 | invocation: &str, | ||
73 | ) -> ra_syntax::TreeArc<ast::MacroItems> { | ||
74 | let expanded = expand(rules, invocation); | 71 | let expanded = expand(rules, invocation); |
75 | token_tree_to_macro_items(&expanded).unwrap().tree().to_owned() | 72 | token_tree_to_macro_items(&expanded).unwrap().tree() |
76 | } | 73 | } |
77 | 74 | ||
78 | #[allow(unused)] | 75 | #[allow(unused)] |
79 | pub(crate) fn expand_to_stmts( | 76 | pub(crate) fn expand_to_stmts(rules: &MacroRules, invocation: &str) -> ast::MacroStmts { |
80 | rules: &MacroRules, | ||
81 | invocation: &str, | ||
82 | ) -> ra_syntax::TreeArc<ast::MacroStmts> { | ||
83 | let expanded = expand(rules, invocation); | 77 | let expanded = expand(rules, invocation); |
84 | token_tree_to_macro_stmts(&expanded).unwrap().tree().to_owned() | 78 | token_tree_to_macro_stmts(&expanded).unwrap().tree() |
85 | } | 79 | } |
86 | 80 | ||
87 | pub(crate) fn expand_to_expr( | 81 | pub(crate) fn expand_to_expr(rules: &MacroRules, invocation: &str) -> ast::Expr { |
88 | rules: &MacroRules, | ||
89 | invocation: &str, | ||
90 | ) -> ra_syntax::TreeArc<ast::Expr> { | ||
91 | let expanded = expand(rules, invocation); | 82 | let expanded = expand(rules, invocation); |
92 | token_tree_to_expr(&expanded).unwrap().tree().to_owned() | 83 | token_tree_to_expr(&expanded).unwrap().tree() |
93 | } | 84 | } |
94 | 85 | ||
95 | pub(crate) fn text_to_tokentree(text: &str) -> tt::Subtree { | 86 | pub(crate) fn text_to_tokentree(text: &str) -> tt::Subtree { |
@@ -97,7 +88,7 @@ pub(crate) fn text_to_tokentree(text: &str) -> tt::Subtree { | |||
97 | let wrapped = format!("wrap_macro!( {} )", text); | 88 | let wrapped = format!("wrap_macro!( {} )", text); |
98 | let wrapped = ast::SourceFile::parse(&wrapped); | 89 | let wrapped = ast::SourceFile::parse(&wrapped); |
99 | let wrapped = wrapped.tree().syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); | 90 | let wrapped = wrapped.tree().syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); |
100 | let mut wrapped = ast_to_token_tree(wrapped).unwrap().0; | 91 | let mut wrapped = ast_to_token_tree(&wrapped).unwrap().0; |
101 | wrapped.delimiter = tt::Delimiter::None; | 92 | wrapped.delimiter = tt::Delimiter::None; |
102 | 93 | ||
103 | wrapped | 94 | wrapped |
@@ -164,8 +155,8 @@ pub(crate) fn assert_expansion( | |||
164 | 155 | ||
165 | let (expanded_tree, expected_tree) = match kind { | 156 | let (expanded_tree, expected_tree) = match kind { |
166 | MacroKind::Items => { | 157 | MacroKind::Items => { |
167 | let expanded_tree = token_tree_to_macro_items(&expanded).unwrap().tree().to_owned(); | 158 | let expanded_tree = token_tree_to_macro_items(&expanded).unwrap().tree(); |
168 | let expected_tree = token_tree_to_macro_items(&expected).unwrap().tree().to_owned(); | 159 | let expected_tree = token_tree_to_macro_items(&expected).unwrap().tree(); |
169 | 160 | ||
170 | ( | 161 | ( |
171 | debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(), | 162 | debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(), |
@@ -174,8 +165,8 @@ pub(crate) fn assert_expansion( | |||
174 | } | 165 | } |
175 | 166 | ||
176 | MacroKind::Stmts => { | 167 | MacroKind::Stmts => { |
177 | let expanded_tree = token_tree_to_macro_stmts(&expanded).unwrap().tree().to_owned(); | 168 | let expanded_tree = token_tree_to_macro_stmts(&expanded).unwrap().tree(); |
178 | let expected_tree = token_tree_to_macro_stmts(&expected).unwrap().tree().to_owned(); | 169 | let expected_tree = token_tree_to_macro_stmts(&expected).unwrap().tree(); |
179 | 170 | ||
180 | ( | 171 | ( |
181 | debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(), | 172 | debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(), |
@@ -419,7 +410,7 @@ fn test_expand_to_item_list() { | |||
419 | ", | 410 | ", |
420 | ); | 411 | ); |
421 | let expansion = expand(&rules, "structs!(Foo, Bar);"); | 412 | let expansion = expand(&rules, "structs!(Foo, Bar);"); |
422 | let tree = token_tree_to_macro_items(&expansion).unwrap().tree().to_owned(); | 413 | let tree = token_tree_to_macro_items(&expansion).unwrap().tree(); |
423 | assert_eq!( | 414 | assert_eq!( |
424 | tree.syntax().debug_dump().trim(), | 415 | tree.syntax().debug_dump().trim(), |
425 | r#" | 416 | r#" |
@@ -537,7 +528,7 @@ fn test_tt_to_stmts() { | |||
537 | ); | 528 | ); |
538 | 529 | ||
539 | let expanded = expand(&rules, "foo!{}"); | 530 | let expanded = expand(&rules, "foo!{}"); |
540 | let stmts = token_tree_to_macro_stmts(&expanded).unwrap().tree().to_owned(); | 531 | let stmts = token_tree_to_macro_stmts(&expanded).unwrap().tree(); |
541 | 532 | ||
542 | assert_eq!( | 533 | assert_eq!( |
543 | stmts.syntax().debug_dump().trim(), | 534 | stmts.syntax().debug_dump().trim(), |
diff --git a/crates/ra_syntax/Cargo.toml b/crates/ra_syntax/Cargo.toml index 37a91ea35..a5565de33 100644 --- a/crates/ra_syntax/Cargo.toml +++ b/crates/ra_syntax/Cargo.toml | |||
@@ -10,7 +10,7 @@ repository = "https://github.com/rust-analyzer/rust-analyzer" | |||
10 | [dependencies] | 10 | [dependencies] |
11 | unicode-xid = "0.1.0" | 11 | unicode-xid = "0.1.0" |
12 | itertools = "0.8.0" | 12 | itertools = "0.8.0" |
13 | rowan = "0.5.0" | 13 | rowan = "0.5.6" |
14 | 14 | ||
15 | # ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here | 15 | # ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here |
16 | # to reduce number of compilations | 16 | # to reduce number of compilations |
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs index fad8da132..e2de5e0e3 100644 --- a/crates/ra_syntax/src/algo.rs +++ b/crates/ra_syntax/src/algo.rs | |||
@@ -9,8 +9,8 @@ pub use rowan::TokenAtOffset; | |||
9 | pub fn find_token_at_offset(node: &SyntaxNode, offset: TextUnit) -> TokenAtOffset<SyntaxToken> { | 9 | pub fn find_token_at_offset(node: &SyntaxNode, offset: TextUnit) -> TokenAtOffset<SyntaxToken> { |
10 | match node.0.token_at_offset(offset) { | 10 | match node.0.token_at_offset(offset) { |
11 | TokenAtOffset::None => TokenAtOffset::None, | 11 | TokenAtOffset::None => TokenAtOffset::None, |
12 | TokenAtOffset::Single(n) => TokenAtOffset::Single(n.into()), | 12 | TokenAtOffset::Single(n) => TokenAtOffset::Single(SyntaxToken(n)), |
13 | TokenAtOffset::Between(l, r) => TokenAtOffset::Between(l.into(), r.into()), | 13 | TokenAtOffset::Between(l, r) => TokenAtOffset::Between(SyntaxToken(l), SyntaxToken(r)), |
14 | } | 14 | } |
15 | } | 15 | } |
16 | 16 | ||
@@ -22,7 +22,7 @@ pub fn find_token_at_offset(node: &SyntaxNode, offset: TextUnit) -> TokenAtOffse | |||
22 | pub fn ancestors_at_offset( | 22 | pub fn ancestors_at_offset( |
23 | node: &SyntaxNode, | 23 | node: &SyntaxNode, |
24 | offset: TextUnit, | 24 | offset: TextUnit, |
25 | ) -> impl Iterator<Item = &SyntaxNode> { | 25 | ) -> impl Iterator<Item = SyntaxNode> { |
26 | find_token_at_offset(node, offset) | 26 | find_token_at_offset(node, offset) |
27 | .map(|token| token.parent().ancestors()) | 27 | .map(|token| token.parent().ancestors()) |
28 | .kmerge_by(|node1, node2| node1.range().len() < node2.range().len()) | 28 | .kmerge_by(|node1, node2| node1.range().len() < node2.range().len()) |
@@ -37,7 +37,7 @@ pub fn ancestors_at_offset( | |||
37 | /// ``` | 37 | /// ``` |
38 | /// | 38 | /// |
39 | /// then the shorter node will be silently preferred. | 39 | /// then the shorter node will be silently preferred. |
40 | pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextUnit) -> Option<&N> { | 40 | pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextUnit) -> Option<N> { |
41 | ancestors_at_offset(syntax, offset).find_map(N::cast) | 41 | ancestors_at_offset(syntax, offset).find_map(N::cast) |
42 | } | 42 | } |
43 | 43 | ||
@@ -59,5 +59,5 @@ pub fn non_trivia_sibling(element: SyntaxElement, direction: Direction) -> Optio | |||
59 | } | 59 | } |
60 | 60 | ||
61 | pub fn find_covering_element(root: &SyntaxNode, range: TextRange) -> SyntaxElement { | 61 | pub fn find_covering_element(root: &SyntaxNode, range: TextRange) -> SyntaxElement { |
62 | root.0.covering_node(range).into() | 62 | SyntaxElement::new(root.0.covering_node(range)) |
63 | } | 63 | } |
diff --git a/crates/ra_syntax/src/algo/visit.rs b/crates/ra_syntax/src/algo/visit.rs index 81a99228f..87bd15cc0 100644 --- a/crates/ra_syntax/src/algo/visit.rs +++ b/crates/ra_syntax/src/algo/visit.rs | |||
@@ -16,7 +16,7 @@ pub trait Visitor<'a>: Sized { | |||
16 | fn visit<N, F>(self, f: F) -> Vis<Self, N, F> | 16 | fn visit<N, F>(self, f: F) -> Vis<Self, N, F> |
17 | where | 17 | where |
18 | N: AstNode + 'a, | 18 | N: AstNode + 'a, |
19 | F: FnOnce(&'a N) -> Self::Output, | 19 | F: FnOnce(N) -> Self::Output, |
20 | { | 20 | { |
21 | Vis { inner: self, f, ph: PhantomData } | 21 | Vis { inner: self, f, ph: PhantomData } |
22 | } | 22 | } |
@@ -29,7 +29,7 @@ pub trait VisitorCtx<'a>: Sized { | |||
29 | fn visit<N, F>(self, f: F) -> VisCtx<Self, N, F> | 29 | fn visit<N, F>(self, f: F) -> VisCtx<Self, N, F> |
30 | where | 30 | where |
31 | N: AstNode + 'a, | 31 | N: AstNode + 'a, |
32 | F: FnOnce(&'a N, Self::Ctx) -> Self::Output, | 32 | F: FnOnce(N, Self::Ctx) -> Self::Output, |
33 | { | 33 | { |
34 | VisCtx { inner: self, f, ph: PhantomData } | 34 | VisCtx { inner: self, f, ph: PhantomData } |
35 | } | 35 | } |
@@ -74,13 +74,13 @@ impl<'a, V, N, F> Visitor<'a> for Vis<V, N, F> | |||
74 | where | 74 | where |
75 | V: Visitor<'a>, | 75 | V: Visitor<'a>, |
76 | N: AstNode + 'a, | 76 | N: AstNode + 'a, |
77 | F: FnOnce(&'a N) -> <V as Visitor<'a>>::Output, | 77 | F: FnOnce(N) -> <V as Visitor<'a>>::Output, |
78 | { | 78 | { |
79 | type Output = <V as Visitor<'a>>::Output; | 79 | type Output = <V as Visitor<'a>>::Output; |
80 | 80 | ||
81 | fn accept(self, node: &'a SyntaxNode) -> Option<Self::Output> { | 81 | fn accept(self, node: &'a SyntaxNode) -> Option<Self::Output> { |
82 | let Vis { inner, f, .. } = self; | 82 | let Vis { inner, f, .. } = self; |
83 | inner.accept(node).or_else(|| N::cast(node).map(f)) | 83 | inner.accept(node).or_else(|| N::cast(node.clone()).map(f)) |
84 | } | 84 | } |
85 | } | 85 | } |
86 | 86 | ||
@@ -95,14 +95,14 @@ impl<'a, V, N, F> VisitorCtx<'a> for VisCtx<V, N, F> | |||
95 | where | 95 | where |
96 | V: VisitorCtx<'a>, | 96 | V: VisitorCtx<'a>, |
97 | N: AstNode + 'a, | 97 | N: AstNode + 'a, |
98 | F: FnOnce(&'a N, <V as VisitorCtx<'a>>::Ctx) -> <V as VisitorCtx<'a>>::Output, | 98 | F: FnOnce(N, <V as VisitorCtx<'a>>::Ctx) -> <V as VisitorCtx<'a>>::Output, |
99 | { | 99 | { |
100 | type Output = <V as VisitorCtx<'a>>::Output; | 100 | type Output = <V as VisitorCtx<'a>>::Output; |
101 | type Ctx = <V as VisitorCtx<'a>>::Ctx; | 101 | type Ctx = <V as VisitorCtx<'a>>::Ctx; |
102 | 102 | ||
103 | fn accept(self, node: &'a SyntaxNode) -> Result<Self::Output, Self::Ctx> { | 103 | fn accept(self, node: &'a SyntaxNode) -> Result<Self::Output, Self::Ctx> { |
104 | let VisCtx { inner, f, .. } = self; | 104 | let VisCtx { inner, f, .. } = self; |
105 | inner.accept(node).or_else(|ctx| match N::cast(node) { | 105 | inner.accept(node).or_else(|ctx| match N::cast(node.clone()) { |
106 | None => Err(ctx), | 106 | None => Err(ctx), |
107 | Some(node) => Ok(f(node, ctx)), | 107 | Some(node) => Ok(f(node, ctx)), |
108 | }) | 108 | }) |
diff --git a/crates/ra_syntax/src/ast.rs b/crates/ra_syntax/src/ast.rs index 3dcf39f7e..ceb603c50 100644 --- a/crates/ra_syntax/src/ast.rs +++ b/crates/ra_syntax/src/ast.rs | |||
@@ -9,7 +9,7 @@ mod expr_extensions; | |||
9 | use std::marker::PhantomData; | 9 | use std::marker::PhantomData; |
10 | 10 | ||
11 | use crate::{ | 11 | use crate::{ |
12 | syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken, TreeArc}, | 12 | syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken}, |
13 | SmolStr, | 13 | SmolStr, |
14 | }; | 14 | }; |
15 | 15 | ||
@@ -25,51 +25,49 @@ pub use self::{ | |||
25 | /// conversion itself has zero runtime cost: ast and syntax nodes have exactly | 25 | /// conversion itself has zero runtime cost: ast and syntax nodes have exactly |
26 | /// the same representation: a pointer to the tree root and a pointer to the | 26 | /// the same representation: a pointer to the tree root and a pointer to the |
27 | /// node itself. | 27 | /// node itself. |
28 | pub trait AstNode: | 28 | pub trait AstNode: Clone { |
29 | rowan::TransparentNewType<Repr = rowan::SyntaxNode> + ToOwned<Owned = TreeArc<Self>> | 29 | fn cast(syntax: SyntaxNode) -> Option<Self> |
30 | { | ||
31 | fn cast(syntax: &SyntaxNode) -> Option<&Self> | ||
32 | where | 30 | where |
33 | Self: Sized; | 31 | Self: Sized; |
34 | fn syntax(&self) -> &SyntaxNode; | 32 | fn syntax(&self) -> &SyntaxNode; |
35 | } | 33 | } |
36 | 34 | ||
37 | /// Like `AstNode`, but wraps tokens rather than interior nodes. | 35 | /// Like `AstNode`, but wraps tokens rather than interior nodes. |
38 | pub trait AstToken<'a> { | 36 | pub trait AstToken { |
39 | fn cast(token: SyntaxToken<'a>) -> Option<Self> | 37 | fn cast(token: SyntaxToken) -> Option<Self> |
40 | where | 38 | where |
41 | Self: Sized; | 39 | Self: Sized; |
42 | fn syntax(&self) -> SyntaxToken<'a>; | 40 | fn syntax(&self) -> &SyntaxToken; |
43 | fn text(&self) -> &'a SmolStr { | 41 | fn text(&self) -> &SmolStr { |
44 | self.syntax().text() | 42 | self.syntax().text() |
45 | } | 43 | } |
46 | } | 44 | } |
47 | 45 | ||
48 | /// An iterator over `SyntaxNode` children of a particular AST type. | 46 | /// An iterator over `SyntaxNode` children of a particular AST type. |
49 | #[derive(Debug)] | 47 | #[derive(Debug)] |
50 | pub struct AstChildren<'a, N> { | 48 | pub struct AstChildren<N> { |
51 | inner: SyntaxNodeChildren<'a>, | 49 | inner: SyntaxNodeChildren, |
52 | ph: PhantomData<N>, | 50 | ph: PhantomData<N>, |
53 | } | 51 | } |
54 | 52 | ||
55 | impl<'a, N> AstChildren<'a, N> { | 53 | impl<N> AstChildren<N> { |
56 | fn new(parent: &'a SyntaxNode) -> Self { | 54 | fn new(parent: &SyntaxNode) -> Self { |
57 | AstChildren { inner: parent.children(), ph: PhantomData } | 55 | AstChildren { inner: parent.children(), ph: PhantomData } |
58 | } | 56 | } |
59 | } | 57 | } |
60 | 58 | ||
61 | impl<'a, N: AstNode + 'a> Iterator for AstChildren<'a, N> { | 59 | impl<N: AstNode> Iterator for AstChildren<N> { |
62 | type Item = &'a N; | 60 | type Item = N; |
63 | fn next(&mut self) -> Option<&'a N> { | 61 | fn next(&mut self) -> Option<N> { |
64 | self.inner.by_ref().find_map(N::cast) | 62 | self.inner.by_ref().find_map(N::cast) |
65 | } | 63 | } |
66 | } | 64 | } |
67 | 65 | ||
68 | fn child_opt<P: AstNode, C: AstNode>(parent: &P) -> Option<&C> { | 66 | fn child_opt<P: AstNode + ?Sized, C: AstNode>(parent: &P) -> Option<C> { |
69 | children(parent).next() | 67 | children(parent).next() |
70 | } | 68 | } |
71 | 69 | ||
72 | fn children<P: AstNode, C: AstNode>(parent: &P) -> AstChildren<C> { | 70 | fn children<P: AstNode + ?Sized, C: AstNode>(parent: &P) -> AstChildren<C> { |
73 | AstChildren::new(parent.syntax()) | 71 | AstChildren::new(parent.syntax()) |
74 | } | 72 | } |
75 | 73 | ||
@@ -123,7 +121,7 @@ fn test_doc_comment_preserves_indents() { | |||
123 | 121 | ||
124 | #[test] | 122 | #[test] |
125 | fn test_where_predicates() { | 123 | fn test_where_predicates() { |
126 | fn assert_bound(text: &str, bound: Option<&TypeBound>) { | 124 | fn assert_bound(text: &str, bound: Option<TypeBound>) { |
127 | assert_eq!(text, bound.unwrap().syntax().text().to_string()); | 125 | assert_eq!(text, bound.unwrap().syntax().text().to_string()); |
128 | } | 126 | } |
129 | 127 | ||
diff --git a/crates/ra_syntax/src/ast/expr_extensions.rs b/crates/ra_syntax/src/ast/expr_extensions.rs index 4355e3587..ca1773908 100644 --- a/crates/ra_syntax/src/ast/expr_extensions.rs +++ b/crates/ra_syntax/src/ast/expr_extensions.rs | |||
@@ -8,20 +8,20 @@ use crate::{ | |||
8 | }; | 8 | }; |
9 | 9 | ||
10 | #[derive(Debug, Clone, PartialEq, Eq)] | 10 | #[derive(Debug, Clone, PartialEq, Eq)] |
11 | pub enum ElseBranch<'a> { | 11 | pub enum ElseBranch { |
12 | Block(&'a ast::Block), | 12 | Block(ast::Block), |
13 | IfExpr(&'a ast::IfExpr), | 13 | IfExpr(ast::IfExpr), |
14 | } | 14 | } |
15 | 15 | ||
16 | impl ast::IfExpr { | 16 | impl ast::IfExpr { |
17 | pub fn then_branch(&self) -> Option<&ast::Block> { | 17 | pub fn then_branch(&self) -> Option<ast::Block> { |
18 | self.blocks().nth(0) | 18 | self.blocks().nth(0) |
19 | } | 19 | } |
20 | pub fn else_branch(&self) -> Option<ElseBranch> { | 20 | pub fn else_branch(&self) -> Option<ElseBranch> { |
21 | let res = match self.blocks().nth(1) { | 21 | let res = match self.blocks().nth(1) { |
22 | Some(block) => ElseBranch::Block(block), | 22 | Some(block) => ElseBranch::Block(block), |
23 | None => { | 23 | None => { |
24 | let elif: &ast::IfExpr = child_opt(self)?; | 24 | let elif: ast::IfExpr = child_opt(self)?; |
25 | ElseBranch::IfExpr(elif) | 25 | ElseBranch::IfExpr(elif) |
26 | } | 26 | } |
27 | }; | 27 | }; |
@@ -60,7 +60,7 @@ impl ast::PrefixExpr { | |||
60 | } | 60 | } |
61 | 61 | ||
62 | pub fn op_token(&self) -> Option<SyntaxToken> { | 62 | pub fn op_token(&self) -> Option<SyntaxToken> { |
63 | self.syntax().first_child_or_token()?.as_token() | 63 | self.syntax().first_child_or_token()?.as_token().cloned() |
64 | } | 64 | } |
65 | } | 65 | } |
66 | 66 | ||
@@ -132,7 +132,7 @@ pub enum BinOp { | |||
132 | 132 | ||
133 | impl ast::BinExpr { | 133 | impl ast::BinExpr { |
134 | fn op_details(&self) -> Option<(SyntaxToken, BinOp)> { | 134 | fn op_details(&self) -> Option<(SyntaxToken, BinOp)> { |
135 | self.syntax().children_with_tokens().filter_map(|it| it.as_token()).find_map(|c| { | 135 | self.syntax().children_with_tokens().filter_map(|it| it.as_token().cloned()).find_map(|c| { |
136 | match c.kind() { | 136 | match c.kind() { |
137 | T![||] => Some((c, BinOp::BooleanOr)), | 137 | T![||] => Some((c, BinOp::BooleanOr)), |
138 | T![&&] => Some((c, BinOp::BooleanAnd)), | 138 | T![&&] => Some((c, BinOp::BooleanAnd)), |
@@ -178,15 +178,15 @@ impl ast::BinExpr { | |||
178 | self.op_details().map(|t| t.0) | 178 | self.op_details().map(|t| t.0) |
179 | } | 179 | } |
180 | 180 | ||
181 | pub fn lhs(&self) -> Option<&ast::Expr> { | 181 | pub fn lhs(&self) -> Option<ast::Expr> { |
182 | children(self).nth(0) | 182 | children(self).nth(0) |
183 | } | 183 | } |
184 | 184 | ||
185 | pub fn rhs(&self) -> Option<&ast::Expr> { | 185 | pub fn rhs(&self) -> Option<ast::Expr> { |
186 | children(self).nth(1) | 186 | children(self).nth(1) |
187 | } | 187 | } |
188 | 188 | ||
189 | pub fn sub_exprs(&self) -> (Option<&ast::Expr>, Option<&ast::Expr>) { | 189 | pub fn sub_exprs(&self) -> (Option<ast::Expr>, Option<ast::Expr>) { |
190 | let mut children = children(self); | 190 | let mut children = children(self); |
191 | let first = children.next(); | 191 | let first = children.next(); |
192 | let second = children.next(); | 192 | let second = children.next(); |
@@ -194,9 +194,9 @@ impl ast::BinExpr { | |||
194 | } | 194 | } |
195 | } | 195 | } |
196 | 196 | ||
197 | pub enum ArrayExprKind<'a> { | 197 | pub enum ArrayExprKind { |
198 | Repeat { initializer: Option<&'a ast::Expr>, repeat: Option<&'a ast::Expr> }, | 198 | Repeat { initializer: Option<ast::Expr>, repeat: Option<ast::Expr> }, |
199 | ElementList(AstChildren<'a, ast::Expr>), | 199 | ElementList(AstChildren<ast::Expr>), |
200 | } | 200 | } |
201 | 201 | ||
202 | impl ast::ArrayExpr { | 202 | impl ast::ArrayExpr { |
@@ -275,12 +275,12 @@ impl ast::Literal { | |||
275 | #[test] | 275 | #[test] |
276 | fn test_literal_with_attr() { | 276 | fn test_literal_with_attr() { |
277 | let parse = ast::SourceFile::parse(r#"const _: &str = { #[attr] "Hello" };"#); | 277 | let parse = ast::SourceFile::parse(r#"const _: &str = { #[attr] "Hello" };"#); |
278 | let lit = parse.tree.syntax().descendants().find_map(ast::Literal::cast).unwrap(); | 278 | let lit = parse.tree().syntax().descendants().find_map(ast::Literal::cast).unwrap(); |
279 | assert_eq!(lit.token().text(), r#""Hello""#); | 279 | assert_eq!(lit.token().text(), r#""Hello""#); |
280 | } | 280 | } |
281 | 281 | ||
282 | impl ast::NamedField { | 282 | impl ast::NamedField { |
283 | pub fn parent_struct_lit(&self) -> &ast::StructLit { | 283 | pub fn parent_struct_lit(&self) -> ast::StructLit { |
284 | self.syntax().ancestors().find_map(ast::StructLit::cast).unwrap() | 284 | self.syntax().ancestors().find_map(ast::StructLit::cast).unwrap() |
285 | } | 285 | } |
286 | } | 286 | } |
diff --git a/crates/ra_syntax/src/ast/extensions.rs b/crates/ra_syntax/src/ast/extensions.rs index 72a30232d..5420f67ff 100644 --- a/crates/ra_syntax/src/ast/extensions.rs +++ b/crates/ra_syntax/src/ast/extensions.rs | |||
@@ -4,7 +4,7 @@ | |||
4 | use itertools::Itertools; | 4 | use itertools::Itertools; |
5 | 5 | ||
6 | use crate::{ | 6 | use crate::{ |
7 | ast::{self, child_opt, children, AstNode}, | 7 | ast::{self, child_opt, children, AstNode, SyntaxNode}, |
8 | SmolStr, SyntaxElement, | 8 | SmolStr, SyntaxElement, |
9 | SyntaxKind::*, | 9 | SyntaxKind::*, |
10 | SyntaxToken, T, | 10 | SyntaxToken, T, |
@@ -13,15 +13,20 @@ use ra_parser::SyntaxKind; | |||
13 | 13 | ||
14 | impl ast::Name { | 14 | impl ast::Name { |
15 | pub fn text(&self) -> &SmolStr { | 15 | pub fn text(&self) -> &SmolStr { |
16 | let ident = self.syntax().first_child_or_token().unwrap().as_token().unwrap(); | 16 | text_of_first_token(self.syntax()) |
17 | ident.text() | ||
18 | } | 17 | } |
19 | } | 18 | } |
20 | 19 | ||
21 | impl ast::NameRef { | 20 | impl ast::NameRef { |
22 | pub fn text(&self) -> &SmolStr { | 21 | pub fn text(&self) -> &SmolStr { |
23 | let ident = self.syntax().first_child_or_token().unwrap().as_token().unwrap(); | 22 | text_of_first_token(self.syntax()) |
24 | ident.text() | 23 | } |
24 | } | ||
25 | |||
26 | fn text_of_first_token(node: &SyntaxNode) -> &SmolStr { | ||
27 | match node.0.green().children().first() { | ||
28 | Some(rowan::GreenElement::Token(it)) => it.text(), | ||
29 | _ => panic!(), | ||
25 | } | 30 | } |
26 | } | 31 | } |
27 | 32 | ||
@@ -50,10 +55,10 @@ impl ast::Attr { | |||
50 | } | 55 | } |
51 | } | 56 | } |
52 | 57 | ||
53 | pub fn as_call(&self) -> Option<(SmolStr, &ast::TokenTree)> { | 58 | pub fn as_call(&self) -> Option<(SmolStr, ast::TokenTree)> { |
54 | let tt = self.value()?; | 59 | let tt = self.value()?; |
55 | let (_bra, attr, args, _ket) = tt.syntax().children_with_tokens().collect_tuple()?; | 60 | let (_bra, attr, args, _ket) = tt.syntax().children_with_tokens().collect_tuple()?; |
56 | let args = ast::TokenTree::cast(args.as_node()?)?; | 61 | let args = ast::TokenTree::cast(args.as_node()?.clone())?; |
57 | if attr.kind() == IDENT { | 62 | if attr.kind() == IDENT { |
58 | Some((attr.as_token()?.text().clone(), args)) | 63 | Some((attr.as_token()?.text().clone(), args)) |
59 | } else { | 64 | } else { |
@@ -86,16 +91,16 @@ impl ast::Attr { | |||
86 | } | 91 | } |
87 | } | 92 | } |
88 | 93 | ||
89 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] | 94 | #[derive(Debug, Clone, PartialEq, Eq)] |
90 | pub enum PathSegmentKind<'a> { | 95 | pub enum PathSegmentKind { |
91 | Name(&'a ast::NameRef), | 96 | Name(ast::NameRef), |
92 | SelfKw, | 97 | SelfKw, |
93 | SuperKw, | 98 | SuperKw, |
94 | CrateKw, | 99 | CrateKw, |
95 | } | 100 | } |
96 | 101 | ||
97 | impl ast::PathSegment { | 102 | impl ast::PathSegment { |
98 | pub fn parent_path(&self) -> &ast::Path { | 103 | pub fn parent_path(&self) -> ast::Path { |
99 | self.syntax() | 104 | self.syntax() |
100 | .parent() | 105 | .parent() |
101 | .and_then(ast::Path::cast) | 106 | .and_then(ast::Path::cast) |
@@ -125,7 +130,7 @@ impl ast::PathSegment { | |||
125 | } | 130 | } |
126 | 131 | ||
127 | impl ast::Path { | 132 | impl ast::Path { |
128 | pub fn parent_path(&self) -> Option<&ast::Path> { | 133 | pub fn parent_path(&self) -> Option<ast::Path> { |
129 | self.syntax().parent().and_then(ast::Path::cast) | 134 | self.syntax().parent().and_then(ast::Path::cast) |
130 | } | 135 | } |
131 | } | 136 | } |
@@ -146,7 +151,7 @@ impl ast::UseTree { | |||
146 | } | 151 | } |
147 | 152 | ||
148 | impl ast::UseTreeList { | 153 | impl ast::UseTreeList { |
149 | pub fn parent_use_tree(&self) -> &ast::UseTree { | 154 | pub fn parent_use_tree(&self) -> ast::UseTree { |
150 | self.syntax() | 155 | self.syntax() |
151 | .parent() | 156 | .parent() |
152 | .and_then(ast::UseTree::cast) | 157 | .and_then(ast::UseTree::cast) |
@@ -155,21 +160,21 @@ impl ast::UseTreeList { | |||
155 | } | 160 | } |
156 | 161 | ||
157 | impl ast::ImplBlock { | 162 | impl ast::ImplBlock { |
158 | pub fn target_type(&self) -> Option<&ast::TypeRef> { | 163 | pub fn target_type(&self) -> Option<ast::TypeRef> { |
159 | match self.target() { | 164 | match self.target() { |
160 | (Some(t), None) | (_, Some(t)) => Some(t), | 165 | (Some(t), None) | (_, Some(t)) => Some(t), |
161 | _ => None, | 166 | _ => None, |
162 | } | 167 | } |
163 | } | 168 | } |
164 | 169 | ||
165 | pub fn target_trait(&self) -> Option<&ast::TypeRef> { | 170 | pub fn target_trait(&self) -> Option<ast::TypeRef> { |
166 | match self.target() { | 171 | match self.target() { |
167 | (Some(t), Some(_)) => Some(t), | 172 | (Some(t), Some(_)) => Some(t), |
168 | _ => None, | 173 | _ => None, |
169 | } | 174 | } |
170 | } | 175 | } |
171 | 176 | ||
172 | fn target(&self) -> (Option<&ast::TypeRef>, Option<&ast::TypeRef>) { | 177 | fn target(&self) -> (Option<ast::TypeRef>, Option<ast::TypeRef>) { |
173 | let mut types = children(self); | 178 | let mut types = children(self); |
174 | let first = types.next(); | 179 | let first = types.next(); |
175 | let second = types.next(); | 180 | let second = types.next(); |
@@ -182,13 +187,13 @@ impl ast::ImplBlock { | |||
182 | } | 187 | } |
183 | 188 | ||
184 | #[derive(Debug, Clone, PartialEq, Eq)] | 189 | #[derive(Debug, Clone, PartialEq, Eq)] |
185 | pub enum StructKind<'a> { | 190 | pub enum StructKind { |
186 | Tuple(&'a ast::PosFieldDefList), | 191 | Tuple(ast::PosFieldDefList), |
187 | Named(&'a ast::NamedFieldDefList), | 192 | Named(ast::NamedFieldDefList), |
188 | Unit, | 193 | Unit, |
189 | } | 194 | } |
190 | 195 | ||
191 | impl StructKind<'_> { | 196 | impl StructKind { |
192 | fn from_node<N: AstNode>(node: &N) -> StructKind { | 197 | fn from_node<N: AstNode>(node: &N) -> StructKind { |
193 | if let Some(nfdl) = child_opt::<_, ast::NamedFieldDefList>(node) { | 198 | if let Some(nfdl) = child_opt::<_, ast::NamedFieldDefList>(node) { |
194 | StructKind::Named(nfdl) | 199 | StructKind::Named(nfdl) |
@@ -218,7 +223,7 @@ impl ast::StructDef { | |||
218 | } | 223 | } |
219 | 224 | ||
220 | impl ast::EnumVariant { | 225 | impl ast::EnumVariant { |
221 | pub fn parent_enum(&self) -> &ast::EnumDef { | 226 | pub fn parent_enum(&self) -> ast::EnumDef { |
222 | self.syntax() | 227 | self.syntax() |
223 | .parent() | 228 | .parent() |
224 | .and_then(|it| it.parent()) | 229 | .and_then(|it| it.parent()) |
@@ -231,10 +236,10 @@ impl ast::EnumVariant { | |||
231 | } | 236 | } |
232 | 237 | ||
233 | impl ast::FnDef { | 238 | impl ast::FnDef { |
234 | pub fn semicolon_token(&self) -> Option<SyntaxToken<'_>> { | 239 | pub fn semicolon_token(&self) -> Option<SyntaxToken> { |
235 | self.syntax() | 240 | self.syntax() |
236 | .last_child_or_token() | 241 | .last_child_or_token() |
237 | .and_then(|it| it.as_token()) | 242 | .and_then(|it| it.as_token().cloned()) |
238 | .filter(|it| it.kind() == T![;]) | 243 | .filter(|it| it.kind() == T![;]) |
239 | } | 244 | } |
240 | } | 245 | } |
@@ -258,9 +263,9 @@ impl ast::ExprStmt { | |||
258 | } | 263 | } |
259 | 264 | ||
260 | #[derive(Debug, Clone, PartialEq, Eq)] | 265 | #[derive(Debug, Clone, PartialEq, Eq)] |
261 | pub enum FieldKind<'a> { | 266 | pub enum FieldKind { |
262 | Name(&'a ast::NameRef), | 267 | Name(ast::NameRef), |
263 | Index(SyntaxToken<'a>), | 268 | Index(SyntaxToken), |
264 | } | 269 | } |
265 | 270 | ||
266 | impl ast::FieldExpr { | 271 | impl ast::FieldExpr { |
@@ -271,6 +276,7 @@ impl ast::FieldExpr { | |||
271 | .find(|c| c.kind() == SyntaxKind::INT_NUMBER || c.kind() == SyntaxKind::FLOAT_NUMBER) | 276 | .find(|c| c.kind() == SyntaxKind::INT_NUMBER || c.kind() == SyntaxKind::FLOAT_NUMBER) |
272 | .as_ref() | 277 | .as_ref() |
273 | .and_then(SyntaxElement::as_token) | 278 | .and_then(SyntaxElement::as_token) |
279 | .cloned() | ||
274 | } | 280 | } |
275 | 281 | ||
276 | pub fn field_access(&self) -> Option<FieldKind> { | 282 | pub fn field_access(&self) -> Option<FieldKind> { |
@@ -326,7 +332,7 @@ impl ast::SelfParam { | |||
326 | pub fn self_kw_token(&self) -> SyntaxToken { | 332 | pub fn self_kw_token(&self) -> SyntaxToken { |
327 | self.syntax() | 333 | self.syntax() |
328 | .children_with_tokens() | 334 | .children_with_tokens() |
329 | .filter_map(|it| it.as_token()) | 335 | .filter_map(|it| it.as_token().cloned()) |
330 | .find(|it| it.kind() == T![self]) | 336 | .find(|it| it.kind() == T![self]) |
331 | .expect("invalid tree: self param must have self") | 337 | .expect("invalid tree: self param must have self") |
332 | } | 338 | } |
@@ -355,7 +361,7 @@ impl ast::LifetimeParam { | |||
355 | pub fn lifetime_token(&self) -> Option<SyntaxToken> { | 361 | pub fn lifetime_token(&self) -> Option<SyntaxToken> { |
356 | self.syntax() | 362 | self.syntax() |
357 | .children_with_tokens() | 363 | .children_with_tokens() |
358 | .filter_map(|it| it.as_token()) | 364 | .filter_map(|it| it.as_token().cloned()) |
359 | .find(|it| it.kind() == LIFETIME) | 365 | .find(|it| it.kind() == LIFETIME) |
360 | } | 366 | } |
361 | } | 367 | } |
@@ -364,7 +370,7 @@ impl ast::WherePred { | |||
364 | pub fn lifetime_token(&self) -> Option<SyntaxToken> { | 370 | pub fn lifetime_token(&self) -> Option<SyntaxToken> { |
365 | self.syntax() | 371 | self.syntax() |
366 | .children_with_tokens() | 372 | .children_with_tokens() |
367 | .filter_map(|it| it.as_token()) | 373 | .filter_map(|it| it.as_token().cloned()) |
368 | .find(|it| it.kind() == LIFETIME) | 374 | .find(|it| it.kind() == LIFETIME) |
369 | } | 375 | } |
370 | } | 376 | } |
diff --git a/crates/ra_syntax/src/ast/generated.rs b/crates/ra_syntax/src/ast/generated.rs index 1d888e709..a1f320257 100644 --- a/crates/ra_syntax/src/ast/generated.rs +++ b/crates/ra_syntax/src/ast/generated.rs | |||
@@ -9,503 +9,365 @@ | |||
9 | 9 | ||
10 | #![cfg_attr(rustfmt, rustfmt_skip)] | 10 | #![cfg_attr(rustfmt, rustfmt_skip)] |
11 | 11 | ||
12 | use rowan::TransparentNewType; | ||
13 | |||
14 | use crate::{ | 12 | use crate::{ |
15 | SyntaxNode, SyntaxKind::*, | 13 | SyntaxNode, SyntaxKind::*, |
16 | syntax_node::{TreeArc}, | ||
17 | ast::{self, AstNode}, | 14 | ast::{self, AstNode}, |
18 | }; | 15 | }; |
19 | 16 | ||
20 | // Alias | 17 | // Alias |
21 | #[derive(Debug, PartialEq, Eq, Hash)] | 18 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
22 | #[repr(transparent)] | ||
23 | pub struct Alias { | 19 | pub struct Alias { |
24 | pub(crate) syntax: SyntaxNode, | 20 | pub(crate) syntax: SyntaxNode, |
25 | } | 21 | } |
26 | unsafe impl TransparentNewType for Alias { | ||
27 | type Repr = rowan::SyntaxNode; | ||
28 | } | ||
29 | 22 | ||
30 | impl AstNode for Alias { | 23 | impl AstNode for Alias { |
31 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 24 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
32 | match syntax.kind() { | 25 | match syntax.kind() { |
33 | ALIAS => Some(Alias::from_repr(syntax.into_repr())), | 26 | ALIAS => Some(Alias { syntax }), |
34 | _ => None, | 27 | _ => None, |
35 | } | 28 | } |
36 | } | 29 | } |
37 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 30 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
38 | } | 31 | } |
39 | 32 | ||
40 | impl ToOwned for Alias { | ||
41 | type Owned = TreeArc<Alias>; | ||
42 | fn to_owned(&self) -> TreeArc<Alias> { TreeArc::cast(self.syntax.to_owned()) } | ||
43 | } | ||
44 | |||
45 | 33 | ||
46 | impl ast::NameOwner for Alias {} | 34 | impl ast::NameOwner for Alias {} |
47 | impl Alias {} | 35 | impl Alias {} |
48 | 36 | ||
49 | // ArgList | 37 | // ArgList |
50 | #[derive(Debug, PartialEq, Eq, Hash)] | 38 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
51 | #[repr(transparent)] | ||
52 | pub struct ArgList { | 39 | pub struct ArgList { |
53 | pub(crate) syntax: SyntaxNode, | 40 | pub(crate) syntax: SyntaxNode, |
54 | } | 41 | } |
55 | unsafe impl TransparentNewType for ArgList { | ||
56 | type Repr = rowan::SyntaxNode; | ||
57 | } | ||
58 | 42 | ||
59 | impl AstNode for ArgList { | 43 | impl AstNode for ArgList { |
60 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 44 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
61 | match syntax.kind() { | 45 | match syntax.kind() { |
62 | ARG_LIST => Some(ArgList::from_repr(syntax.into_repr())), | 46 | ARG_LIST => Some(ArgList { syntax }), |
63 | _ => None, | 47 | _ => None, |
64 | } | 48 | } |
65 | } | 49 | } |
66 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 50 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
67 | } | 51 | } |
68 | 52 | ||
69 | impl ToOwned for ArgList { | ||
70 | type Owned = TreeArc<ArgList>; | ||
71 | fn to_owned(&self) -> TreeArc<ArgList> { TreeArc::cast(self.syntax.to_owned()) } | ||
72 | } | ||
73 | |||
74 | 53 | ||
75 | impl ArgList { | 54 | impl ArgList { |
76 | pub fn args(&self) -> impl Iterator<Item = &Expr> { | 55 | pub fn args(&self) -> impl Iterator<Item = Expr> { |
77 | super::children(self) | 56 | super::children(self) |
78 | } | 57 | } |
79 | } | 58 | } |
80 | 59 | ||
81 | // ArrayExpr | 60 | // ArrayExpr |
82 | #[derive(Debug, PartialEq, Eq, Hash)] | 61 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
83 | #[repr(transparent)] | ||
84 | pub struct ArrayExpr { | 62 | pub struct ArrayExpr { |
85 | pub(crate) syntax: SyntaxNode, | 63 | pub(crate) syntax: SyntaxNode, |
86 | } | 64 | } |
87 | unsafe impl TransparentNewType for ArrayExpr { | ||
88 | type Repr = rowan::SyntaxNode; | ||
89 | } | ||
90 | 65 | ||
91 | impl AstNode for ArrayExpr { | 66 | impl AstNode for ArrayExpr { |
92 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 67 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
93 | match syntax.kind() { | 68 | match syntax.kind() { |
94 | ARRAY_EXPR => Some(ArrayExpr::from_repr(syntax.into_repr())), | 69 | ARRAY_EXPR => Some(ArrayExpr { syntax }), |
95 | _ => None, | 70 | _ => None, |
96 | } | 71 | } |
97 | } | 72 | } |
98 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 73 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
99 | } | 74 | } |
100 | 75 | ||
101 | impl ToOwned for ArrayExpr { | ||
102 | type Owned = TreeArc<ArrayExpr>; | ||
103 | fn to_owned(&self) -> TreeArc<ArrayExpr> { TreeArc::cast(self.syntax.to_owned()) } | ||
104 | } | ||
105 | |||
106 | 76 | ||
107 | impl ArrayExpr { | 77 | impl ArrayExpr { |
108 | pub fn exprs(&self) -> impl Iterator<Item = &Expr> { | 78 | pub fn exprs(&self) -> impl Iterator<Item = Expr> { |
109 | super::children(self) | 79 | super::children(self) |
110 | } | 80 | } |
111 | } | 81 | } |
112 | 82 | ||
113 | // ArrayType | 83 | // ArrayType |
114 | #[derive(Debug, PartialEq, Eq, Hash)] | 84 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
115 | #[repr(transparent)] | ||
116 | pub struct ArrayType { | 85 | pub struct ArrayType { |
117 | pub(crate) syntax: SyntaxNode, | 86 | pub(crate) syntax: SyntaxNode, |
118 | } | 87 | } |
119 | unsafe impl TransparentNewType for ArrayType { | ||
120 | type Repr = rowan::SyntaxNode; | ||
121 | } | ||
122 | 88 | ||
123 | impl AstNode for ArrayType { | 89 | impl AstNode for ArrayType { |
124 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 90 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
125 | match syntax.kind() { | 91 | match syntax.kind() { |
126 | ARRAY_TYPE => Some(ArrayType::from_repr(syntax.into_repr())), | 92 | ARRAY_TYPE => Some(ArrayType { syntax }), |
127 | _ => None, | 93 | _ => None, |
128 | } | 94 | } |
129 | } | 95 | } |
130 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 96 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
131 | } | 97 | } |
132 | 98 | ||
133 | impl ToOwned for ArrayType { | ||
134 | type Owned = TreeArc<ArrayType>; | ||
135 | fn to_owned(&self) -> TreeArc<ArrayType> { TreeArc::cast(self.syntax.to_owned()) } | ||
136 | } | ||
137 | |||
138 | 99 | ||
139 | impl ArrayType { | 100 | impl ArrayType { |
140 | pub fn type_ref(&self) -> Option<&TypeRef> { | 101 | pub fn type_ref(&self) -> Option<TypeRef> { |
141 | super::child_opt(self) | 102 | super::child_opt(self) |
142 | } | 103 | } |
143 | 104 | ||
144 | pub fn expr(&self) -> Option<&Expr> { | 105 | pub fn expr(&self) -> Option<Expr> { |
145 | super::child_opt(self) | 106 | super::child_opt(self) |
146 | } | 107 | } |
147 | } | 108 | } |
148 | 109 | ||
149 | // AssocTypeArg | 110 | // AssocTypeArg |
150 | #[derive(Debug, PartialEq, Eq, Hash)] | 111 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
151 | #[repr(transparent)] | ||
152 | pub struct AssocTypeArg { | 112 | pub struct AssocTypeArg { |
153 | pub(crate) syntax: SyntaxNode, | 113 | pub(crate) syntax: SyntaxNode, |
154 | } | 114 | } |
155 | unsafe impl TransparentNewType for AssocTypeArg { | ||
156 | type Repr = rowan::SyntaxNode; | ||
157 | } | ||
158 | 115 | ||
159 | impl AstNode for AssocTypeArg { | 116 | impl AstNode for AssocTypeArg { |
160 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 117 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
161 | match syntax.kind() { | 118 | match syntax.kind() { |
162 | ASSOC_TYPE_ARG => Some(AssocTypeArg::from_repr(syntax.into_repr())), | 119 | ASSOC_TYPE_ARG => Some(AssocTypeArg { syntax }), |
163 | _ => None, | 120 | _ => None, |
164 | } | 121 | } |
165 | } | 122 | } |
166 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 123 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
167 | } | 124 | } |
168 | 125 | ||
169 | impl ToOwned for AssocTypeArg { | ||
170 | type Owned = TreeArc<AssocTypeArg>; | ||
171 | fn to_owned(&self) -> TreeArc<AssocTypeArg> { TreeArc::cast(self.syntax.to_owned()) } | ||
172 | } | ||
173 | |||
174 | 126 | ||
175 | impl AssocTypeArg { | 127 | impl AssocTypeArg { |
176 | pub fn name_ref(&self) -> Option<&NameRef> { | 128 | pub fn name_ref(&self) -> Option<NameRef> { |
177 | super::child_opt(self) | 129 | super::child_opt(self) |
178 | } | 130 | } |
179 | 131 | ||
180 | pub fn type_ref(&self) -> Option<&TypeRef> { | 132 | pub fn type_ref(&self) -> Option<TypeRef> { |
181 | super::child_opt(self) | 133 | super::child_opt(self) |
182 | } | 134 | } |
183 | } | 135 | } |
184 | 136 | ||
185 | // Attr | 137 | // Attr |
186 | #[derive(Debug, PartialEq, Eq, Hash)] | 138 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
187 | #[repr(transparent)] | ||
188 | pub struct Attr { | 139 | pub struct Attr { |
189 | pub(crate) syntax: SyntaxNode, | 140 | pub(crate) syntax: SyntaxNode, |
190 | } | 141 | } |
191 | unsafe impl TransparentNewType for Attr { | ||
192 | type Repr = rowan::SyntaxNode; | ||
193 | } | ||
194 | 142 | ||
195 | impl AstNode for Attr { | 143 | impl AstNode for Attr { |
196 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 144 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
197 | match syntax.kind() { | 145 | match syntax.kind() { |
198 | ATTR => Some(Attr::from_repr(syntax.into_repr())), | 146 | ATTR => Some(Attr { syntax }), |
199 | _ => None, | 147 | _ => None, |
200 | } | 148 | } |
201 | } | 149 | } |
202 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 150 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
203 | } | 151 | } |
204 | 152 | ||
205 | impl ToOwned for Attr { | ||
206 | type Owned = TreeArc<Attr>; | ||
207 | fn to_owned(&self) -> TreeArc<Attr> { TreeArc::cast(self.syntax.to_owned()) } | ||
208 | } | ||
209 | |||
210 | 153 | ||
211 | impl Attr { | 154 | impl Attr { |
212 | pub fn value(&self) -> Option<&TokenTree> { | 155 | pub fn value(&self) -> Option<TokenTree> { |
213 | super::child_opt(self) | 156 | super::child_opt(self) |
214 | } | 157 | } |
215 | } | 158 | } |
216 | 159 | ||
217 | // BinExpr | 160 | // BinExpr |
218 | #[derive(Debug, PartialEq, Eq, Hash)] | 161 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
219 | #[repr(transparent)] | ||
220 | pub struct BinExpr { | 162 | pub struct BinExpr { |
221 | pub(crate) syntax: SyntaxNode, | 163 | pub(crate) syntax: SyntaxNode, |
222 | } | 164 | } |
223 | unsafe impl TransparentNewType for BinExpr { | ||
224 | type Repr = rowan::SyntaxNode; | ||
225 | } | ||
226 | 165 | ||
227 | impl AstNode for BinExpr { | 166 | impl AstNode for BinExpr { |
228 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 167 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
229 | match syntax.kind() { | 168 | match syntax.kind() { |
230 | BIN_EXPR => Some(BinExpr::from_repr(syntax.into_repr())), | 169 | BIN_EXPR => Some(BinExpr { syntax }), |
231 | _ => None, | 170 | _ => None, |
232 | } | 171 | } |
233 | } | 172 | } |
234 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 173 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
235 | } | 174 | } |
236 | 175 | ||
237 | impl ToOwned for BinExpr { | ||
238 | type Owned = TreeArc<BinExpr>; | ||
239 | fn to_owned(&self) -> TreeArc<BinExpr> { TreeArc::cast(self.syntax.to_owned()) } | ||
240 | } | ||
241 | |||
242 | 176 | ||
243 | impl BinExpr {} | 177 | impl BinExpr {} |
244 | 178 | ||
245 | // BindPat | 179 | // BindPat |
246 | #[derive(Debug, PartialEq, Eq, Hash)] | 180 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
247 | #[repr(transparent)] | ||
248 | pub struct BindPat { | 181 | pub struct BindPat { |
249 | pub(crate) syntax: SyntaxNode, | 182 | pub(crate) syntax: SyntaxNode, |
250 | } | 183 | } |
251 | unsafe impl TransparentNewType for BindPat { | ||
252 | type Repr = rowan::SyntaxNode; | ||
253 | } | ||
254 | 184 | ||
255 | impl AstNode for BindPat { | 185 | impl AstNode for BindPat { |
256 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 186 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
257 | match syntax.kind() { | 187 | match syntax.kind() { |
258 | BIND_PAT => Some(BindPat::from_repr(syntax.into_repr())), | 188 | BIND_PAT => Some(BindPat { syntax }), |
259 | _ => None, | 189 | _ => None, |
260 | } | 190 | } |
261 | } | 191 | } |
262 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 192 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
263 | } | 193 | } |
264 | 194 | ||
265 | impl ToOwned for BindPat { | ||
266 | type Owned = TreeArc<BindPat>; | ||
267 | fn to_owned(&self) -> TreeArc<BindPat> { TreeArc::cast(self.syntax.to_owned()) } | ||
268 | } | ||
269 | |||
270 | 195 | ||
271 | impl ast::NameOwner for BindPat {} | 196 | impl ast::NameOwner for BindPat {} |
272 | impl BindPat { | 197 | impl BindPat { |
273 | pub fn pat(&self) -> Option<&Pat> { | 198 | pub fn pat(&self) -> Option<Pat> { |
274 | super::child_opt(self) | 199 | super::child_opt(self) |
275 | } | 200 | } |
276 | } | 201 | } |
277 | 202 | ||
278 | // Block | 203 | // Block |
279 | #[derive(Debug, PartialEq, Eq, Hash)] | 204 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
280 | #[repr(transparent)] | ||
281 | pub struct Block { | 205 | pub struct Block { |
282 | pub(crate) syntax: SyntaxNode, | 206 | pub(crate) syntax: SyntaxNode, |
283 | } | 207 | } |
284 | unsafe impl TransparentNewType for Block { | ||
285 | type Repr = rowan::SyntaxNode; | ||
286 | } | ||
287 | 208 | ||
288 | impl AstNode for Block { | 209 | impl AstNode for Block { |
289 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 210 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
290 | match syntax.kind() { | 211 | match syntax.kind() { |
291 | BLOCK => Some(Block::from_repr(syntax.into_repr())), | 212 | BLOCK => Some(Block { syntax }), |
292 | _ => None, | 213 | _ => None, |
293 | } | 214 | } |
294 | } | 215 | } |
295 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 216 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
296 | } | 217 | } |
297 | 218 | ||
298 | impl ToOwned for Block { | ||
299 | type Owned = TreeArc<Block>; | ||
300 | fn to_owned(&self) -> TreeArc<Block> { TreeArc::cast(self.syntax.to_owned()) } | ||
301 | } | ||
302 | |||
303 | 219 | ||
304 | impl ast::AttrsOwner for Block {} | 220 | impl ast::AttrsOwner for Block {} |
305 | impl Block { | 221 | impl Block { |
306 | pub fn statements(&self) -> impl Iterator<Item = &Stmt> { | 222 | pub fn statements(&self) -> impl Iterator<Item = Stmt> { |
307 | super::children(self) | 223 | super::children(self) |
308 | } | 224 | } |
309 | 225 | ||
310 | pub fn expr(&self) -> Option<&Expr> { | 226 | pub fn expr(&self) -> Option<Expr> { |
311 | super::child_opt(self) | 227 | super::child_opt(self) |
312 | } | 228 | } |
313 | } | 229 | } |
314 | 230 | ||
315 | // BlockExpr | 231 | // BlockExpr |
316 | #[derive(Debug, PartialEq, Eq, Hash)] | 232 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
317 | #[repr(transparent)] | ||
318 | pub struct BlockExpr { | 233 | pub struct BlockExpr { |
319 | pub(crate) syntax: SyntaxNode, | 234 | pub(crate) syntax: SyntaxNode, |
320 | } | 235 | } |
321 | unsafe impl TransparentNewType for BlockExpr { | ||
322 | type Repr = rowan::SyntaxNode; | ||
323 | } | ||
324 | 236 | ||
325 | impl AstNode for BlockExpr { | 237 | impl AstNode for BlockExpr { |
326 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 238 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
327 | match syntax.kind() { | 239 | match syntax.kind() { |
328 | BLOCK_EXPR => Some(BlockExpr::from_repr(syntax.into_repr())), | 240 | BLOCK_EXPR => Some(BlockExpr { syntax }), |
329 | _ => None, | 241 | _ => None, |
330 | } | 242 | } |
331 | } | 243 | } |
332 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 244 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
333 | } | 245 | } |
334 | 246 | ||
335 | impl ToOwned for BlockExpr { | ||
336 | type Owned = TreeArc<BlockExpr>; | ||
337 | fn to_owned(&self) -> TreeArc<BlockExpr> { TreeArc::cast(self.syntax.to_owned()) } | ||
338 | } | ||
339 | |||
340 | 247 | ||
341 | impl BlockExpr { | 248 | impl BlockExpr { |
342 | pub fn block(&self) -> Option<&Block> { | 249 | pub fn block(&self) -> Option<Block> { |
343 | super::child_opt(self) | 250 | super::child_opt(self) |
344 | } | 251 | } |
345 | } | 252 | } |
346 | 253 | ||
347 | // BreakExpr | 254 | // BreakExpr |
348 | #[derive(Debug, PartialEq, Eq, Hash)] | 255 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
349 | #[repr(transparent)] | ||
350 | pub struct BreakExpr { | 256 | pub struct BreakExpr { |
351 | pub(crate) syntax: SyntaxNode, | 257 | pub(crate) syntax: SyntaxNode, |
352 | } | 258 | } |
353 | unsafe impl TransparentNewType for BreakExpr { | ||
354 | type Repr = rowan::SyntaxNode; | ||
355 | } | ||
356 | 259 | ||
357 | impl AstNode for BreakExpr { | 260 | impl AstNode for BreakExpr { |
358 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 261 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
359 | match syntax.kind() { | 262 | match syntax.kind() { |
360 | BREAK_EXPR => Some(BreakExpr::from_repr(syntax.into_repr())), | 263 | BREAK_EXPR => Some(BreakExpr { syntax }), |
361 | _ => None, | 264 | _ => None, |
362 | } | 265 | } |
363 | } | 266 | } |
364 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 267 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
365 | } | 268 | } |
366 | 269 | ||
367 | impl ToOwned for BreakExpr { | ||
368 | type Owned = TreeArc<BreakExpr>; | ||
369 | fn to_owned(&self) -> TreeArc<BreakExpr> { TreeArc::cast(self.syntax.to_owned()) } | ||
370 | } | ||
371 | |||
372 | 270 | ||
373 | impl BreakExpr { | 271 | impl BreakExpr { |
374 | pub fn expr(&self) -> Option<&Expr> { | 272 | pub fn expr(&self) -> Option<Expr> { |
375 | super::child_opt(self) | 273 | super::child_opt(self) |
376 | } | 274 | } |
377 | } | 275 | } |
378 | 276 | ||
379 | // CallExpr | 277 | // CallExpr |
380 | #[derive(Debug, PartialEq, Eq, Hash)] | 278 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
381 | #[repr(transparent)] | ||
382 | pub struct CallExpr { | 279 | pub struct CallExpr { |
383 | pub(crate) syntax: SyntaxNode, | 280 | pub(crate) syntax: SyntaxNode, |
384 | } | 281 | } |
385 | unsafe impl TransparentNewType for CallExpr { | ||
386 | type Repr = rowan::SyntaxNode; | ||
387 | } | ||
388 | 282 | ||
389 | impl AstNode for CallExpr { | 283 | impl AstNode for CallExpr { |
390 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 284 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
391 | match syntax.kind() { | 285 | match syntax.kind() { |
392 | CALL_EXPR => Some(CallExpr::from_repr(syntax.into_repr())), | 286 | CALL_EXPR => Some(CallExpr { syntax }), |
393 | _ => None, | 287 | _ => None, |
394 | } | 288 | } |
395 | } | 289 | } |
396 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 290 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
397 | } | 291 | } |
398 | 292 | ||
399 | impl ToOwned for CallExpr { | ||
400 | type Owned = TreeArc<CallExpr>; | ||
401 | fn to_owned(&self) -> TreeArc<CallExpr> { TreeArc::cast(self.syntax.to_owned()) } | ||
402 | } | ||
403 | |||
404 | 293 | ||
405 | impl ast::ArgListOwner for CallExpr {} | 294 | impl ast::ArgListOwner for CallExpr {} |
406 | impl CallExpr { | 295 | impl CallExpr { |
407 | pub fn expr(&self) -> Option<&Expr> { | 296 | pub fn expr(&self) -> Option<Expr> { |
408 | super::child_opt(self) | 297 | super::child_opt(self) |
409 | } | 298 | } |
410 | } | 299 | } |
411 | 300 | ||
412 | // CastExpr | 301 | // CastExpr |
413 | #[derive(Debug, PartialEq, Eq, Hash)] | 302 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
414 | #[repr(transparent)] | ||
415 | pub struct CastExpr { | 303 | pub struct CastExpr { |
416 | pub(crate) syntax: SyntaxNode, | 304 | pub(crate) syntax: SyntaxNode, |
417 | } | 305 | } |
418 | unsafe impl TransparentNewType for CastExpr { | ||
419 | type Repr = rowan::SyntaxNode; | ||
420 | } | ||
421 | 306 | ||
422 | impl AstNode for CastExpr { | 307 | impl AstNode for CastExpr { |
423 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 308 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
424 | match syntax.kind() { | 309 | match syntax.kind() { |
425 | CAST_EXPR => Some(CastExpr::from_repr(syntax.into_repr())), | 310 | CAST_EXPR => Some(CastExpr { syntax }), |
426 | _ => None, | 311 | _ => None, |
427 | } | 312 | } |
428 | } | 313 | } |
429 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 314 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
430 | } | 315 | } |
431 | 316 | ||
432 | impl ToOwned for CastExpr { | ||
433 | type Owned = TreeArc<CastExpr>; | ||
434 | fn to_owned(&self) -> TreeArc<CastExpr> { TreeArc::cast(self.syntax.to_owned()) } | ||
435 | } | ||
436 | |||
437 | 317 | ||
438 | impl CastExpr { | 318 | impl CastExpr { |
439 | pub fn expr(&self) -> Option<&Expr> { | 319 | pub fn expr(&self) -> Option<Expr> { |
440 | super::child_opt(self) | 320 | super::child_opt(self) |
441 | } | 321 | } |
442 | 322 | ||
443 | pub fn type_ref(&self) -> Option<&TypeRef> { | 323 | pub fn type_ref(&self) -> Option<TypeRef> { |
444 | super::child_opt(self) | 324 | super::child_opt(self) |
445 | } | 325 | } |
446 | } | 326 | } |
447 | 327 | ||
448 | // Condition | 328 | // Condition |
449 | #[derive(Debug, PartialEq, Eq, Hash)] | 329 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
450 | #[repr(transparent)] | ||
451 | pub struct Condition { | 330 | pub struct Condition { |
452 | pub(crate) syntax: SyntaxNode, | 331 | pub(crate) syntax: SyntaxNode, |
453 | } | 332 | } |
454 | unsafe impl TransparentNewType for Condition { | ||
455 | type Repr = rowan::SyntaxNode; | ||
456 | } | ||
457 | 333 | ||
458 | impl AstNode for Condition { | 334 | impl AstNode for Condition { |
459 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 335 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
460 | match syntax.kind() { | 336 | match syntax.kind() { |
461 | CONDITION => Some(Condition::from_repr(syntax.into_repr())), | 337 | CONDITION => Some(Condition { syntax }), |
462 | _ => None, | 338 | _ => None, |
463 | } | 339 | } |
464 | } | 340 | } |
465 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 341 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
466 | } | 342 | } |
467 | 343 | ||
468 | impl ToOwned for Condition { | ||
469 | type Owned = TreeArc<Condition>; | ||
470 | fn to_owned(&self) -> TreeArc<Condition> { TreeArc::cast(self.syntax.to_owned()) } | ||
471 | } | ||
472 | |||
473 | 344 | ||
474 | impl Condition { | 345 | impl Condition { |
475 | pub fn pat(&self) -> Option<&Pat> { | 346 | pub fn pat(&self) -> Option<Pat> { |
476 | super::child_opt(self) | 347 | super::child_opt(self) |
477 | } | 348 | } |
478 | 349 | ||
479 | pub fn expr(&self) -> Option<&Expr> { | 350 | pub fn expr(&self) -> Option<Expr> { |
480 | super::child_opt(self) | 351 | super::child_opt(self) |
481 | } | 352 | } |
482 | } | 353 | } |
483 | 354 | ||
484 | // ConstDef | 355 | // ConstDef |
485 | #[derive(Debug, PartialEq, Eq, Hash)] | 356 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
486 | #[repr(transparent)] | ||
487 | pub struct ConstDef { | 357 | pub struct ConstDef { |
488 | pub(crate) syntax: SyntaxNode, | 358 | pub(crate) syntax: SyntaxNode, |
489 | } | 359 | } |
490 | unsafe impl TransparentNewType for ConstDef { | ||
491 | type Repr = rowan::SyntaxNode; | ||
492 | } | ||
493 | 360 | ||
494 | impl AstNode for ConstDef { | 361 | impl AstNode for ConstDef { |
495 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 362 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
496 | match syntax.kind() { | 363 | match syntax.kind() { |
497 | CONST_DEF => Some(ConstDef::from_repr(syntax.into_repr())), | 364 | CONST_DEF => Some(ConstDef { syntax }), |
498 | _ => None, | 365 | _ => None, |
499 | } | 366 | } |
500 | } | 367 | } |
501 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 368 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
502 | } | 369 | } |
503 | 370 | ||
504 | impl ToOwned for ConstDef { | ||
505 | type Owned = TreeArc<ConstDef>; | ||
506 | fn to_owned(&self) -> TreeArc<ConstDef> { TreeArc::cast(self.syntax.to_owned()) } | ||
507 | } | ||
508 | |||
509 | 371 | ||
510 | impl ast::VisibilityOwner for ConstDef {} | 372 | impl ast::VisibilityOwner for ConstDef {} |
511 | impl ast::NameOwner for ConstDef {} | 373 | impl ast::NameOwner for ConstDef {} |
@@ -514,93 +376,66 @@ impl ast::AttrsOwner for ConstDef {} | |||
514 | impl ast::DocCommentsOwner for ConstDef {} | 376 | impl ast::DocCommentsOwner for ConstDef {} |
515 | impl ast::TypeAscriptionOwner for ConstDef {} | 377 | impl ast::TypeAscriptionOwner for ConstDef {} |
516 | impl ConstDef { | 378 | impl ConstDef { |
517 | pub fn body(&self) -> Option<&Expr> { | 379 | pub fn body(&self) -> Option<Expr> { |
518 | super::child_opt(self) | 380 | super::child_opt(self) |
519 | } | 381 | } |
520 | } | 382 | } |
521 | 383 | ||
522 | // ContinueExpr | 384 | // ContinueExpr |
523 | #[derive(Debug, PartialEq, Eq, Hash)] | 385 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
524 | #[repr(transparent)] | ||
525 | pub struct ContinueExpr { | 386 | pub struct ContinueExpr { |
526 | pub(crate) syntax: SyntaxNode, | 387 | pub(crate) syntax: SyntaxNode, |
527 | } | 388 | } |
528 | unsafe impl TransparentNewType for ContinueExpr { | ||
529 | type Repr = rowan::SyntaxNode; | ||
530 | } | ||
531 | 389 | ||
532 | impl AstNode for ContinueExpr { | 390 | impl AstNode for ContinueExpr { |
533 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 391 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
534 | match syntax.kind() { | 392 | match syntax.kind() { |
535 | CONTINUE_EXPR => Some(ContinueExpr::from_repr(syntax.into_repr())), | 393 | CONTINUE_EXPR => Some(ContinueExpr { syntax }), |
536 | _ => None, | 394 | _ => None, |
537 | } | 395 | } |
538 | } | 396 | } |
539 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 397 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
540 | } | 398 | } |
541 | 399 | ||
542 | impl ToOwned for ContinueExpr { | ||
543 | type Owned = TreeArc<ContinueExpr>; | ||
544 | fn to_owned(&self) -> TreeArc<ContinueExpr> { TreeArc::cast(self.syntax.to_owned()) } | ||
545 | } | ||
546 | |||
547 | 400 | ||
548 | impl ContinueExpr {} | 401 | impl ContinueExpr {} |
549 | 402 | ||
550 | // DynTraitType | 403 | // DynTraitType |
551 | #[derive(Debug, PartialEq, Eq, Hash)] | 404 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
552 | #[repr(transparent)] | ||
553 | pub struct DynTraitType { | 405 | pub struct DynTraitType { |
554 | pub(crate) syntax: SyntaxNode, | 406 | pub(crate) syntax: SyntaxNode, |
555 | } | 407 | } |
556 | unsafe impl TransparentNewType for DynTraitType { | ||
557 | type Repr = rowan::SyntaxNode; | ||
558 | } | ||
559 | 408 | ||
560 | impl AstNode for DynTraitType { | 409 | impl AstNode for DynTraitType { |
561 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 410 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
562 | match syntax.kind() { | 411 | match syntax.kind() { |
563 | DYN_TRAIT_TYPE => Some(DynTraitType::from_repr(syntax.into_repr())), | 412 | DYN_TRAIT_TYPE => Some(DynTraitType { syntax }), |
564 | _ => None, | 413 | _ => None, |
565 | } | 414 | } |
566 | } | 415 | } |
567 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 416 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
568 | } | 417 | } |
569 | 418 | ||
570 | impl ToOwned for DynTraitType { | ||
571 | type Owned = TreeArc<DynTraitType>; | ||
572 | fn to_owned(&self) -> TreeArc<DynTraitType> { TreeArc::cast(self.syntax.to_owned()) } | ||
573 | } | ||
574 | |||
575 | 419 | ||
576 | impl ast::TypeBoundsOwner for DynTraitType {} | 420 | impl ast::TypeBoundsOwner for DynTraitType {} |
577 | impl DynTraitType {} | 421 | impl DynTraitType {} |
578 | 422 | ||
579 | // EnumDef | 423 | // EnumDef |
580 | #[derive(Debug, PartialEq, Eq, Hash)] | 424 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
581 | #[repr(transparent)] | ||
582 | pub struct EnumDef { | 425 | pub struct EnumDef { |
583 | pub(crate) syntax: SyntaxNode, | 426 | pub(crate) syntax: SyntaxNode, |
584 | } | 427 | } |
585 | unsafe impl TransparentNewType for EnumDef { | ||
586 | type Repr = rowan::SyntaxNode; | ||
587 | } | ||
588 | 428 | ||
589 | impl AstNode for EnumDef { | 429 | impl AstNode for EnumDef { |
590 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 430 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
591 | match syntax.kind() { | 431 | match syntax.kind() { |
592 | ENUM_DEF => Some(EnumDef::from_repr(syntax.into_repr())), | 432 | ENUM_DEF => Some(EnumDef { syntax }), |
593 | _ => None, | 433 | _ => None, |
594 | } | 434 | } |
595 | } | 435 | } |
596 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 436 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
597 | } | 437 | } |
598 | 438 | ||
599 | impl ToOwned for EnumDef { | ||
600 | type Owned = TreeArc<EnumDef>; | ||
601 | fn to_owned(&self) -> TreeArc<EnumDef> { TreeArc::cast(self.syntax.to_owned()) } | ||
602 | } | ||
603 | |||
604 | 439 | ||
605 | impl ast::VisibilityOwner for EnumDef {} | 440 | impl ast::VisibilityOwner for EnumDef {} |
606 | impl ast::NameOwner for EnumDef {} | 441 | impl ast::NameOwner for EnumDef {} |
@@ -608,269 +443,247 @@ impl ast::TypeParamsOwner for EnumDef {} | |||
608 | impl ast::AttrsOwner for EnumDef {} | 443 | impl ast::AttrsOwner for EnumDef {} |
609 | impl ast::DocCommentsOwner for EnumDef {} | 444 | impl ast::DocCommentsOwner for EnumDef {} |
610 | impl EnumDef { | 445 | impl EnumDef { |
611 | pub fn variant_list(&self) -> Option<&EnumVariantList> { | 446 | pub fn variant_list(&self) -> Option<EnumVariantList> { |
612 | super::child_opt(self) | 447 | super::child_opt(self) |
613 | } | 448 | } |
614 | } | 449 | } |
615 | 450 | ||
616 | // EnumVariant | 451 | // EnumVariant |
617 | #[derive(Debug, PartialEq, Eq, Hash)] | 452 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
618 | #[repr(transparent)] | ||
619 | pub struct EnumVariant { | 453 | pub struct EnumVariant { |
620 | pub(crate) syntax: SyntaxNode, | 454 | pub(crate) syntax: SyntaxNode, |
621 | } | 455 | } |
622 | unsafe impl TransparentNewType for EnumVariant { | ||
623 | type Repr = rowan::SyntaxNode; | ||
624 | } | ||
625 | 456 | ||
626 | impl AstNode for EnumVariant { | 457 | impl AstNode for EnumVariant { |
627 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 458 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
628 | match syntax.kind() { | 459 | match syntax.kind() { |
629 | ENUM_VARIANT => Some(EnumVariant::from_repr(syntax.into_repr())), | 460 | ENUM_VARIANT => Some(EnumVariant { syntax }), |
630 | _ => None, | 461 | _ => None, |
631 | } | 462 | } |
632 | } | 463 | } |
633 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 464 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
634 | } | 465 | } |
635 | 466 | ||
636 | impl ToOwned for EnumVariant { | ||
637 | type Owned = TreeArc<EnumVariant>; | ||
638 | fn to_owned(&self) -> TreeArc<EnumVariant> { TreeArc::cast(self.syntax.to_owned()) } | ||
639 | } | ||
640 | |||
641 | 467 | ||
642 | impl ast::NameOwner for EnumVariant {} | 468 | impl ast::NameOwner for EnumVariant {} |
643 | impl ast::DocCommentsOwner for EnumVariant {} | 469 | impl ast::DocCommentsOwner for EnumVariant {} |
644 | impl ast::AttrsOwner for EnumVariant {} | 470 | impl ast::AttrsOwner for EnumVariant {} |
645 | impl EnumVariant { | 471 | impl EnumVariant { |
646 | pub fn expr(&self) -> Option<&Expr> { | 472 | pub fn expr(&self) -> Option<Expr> { |
647 | super::child_opt(self) | 473 | super::child_opt(self) |
648 | } | 474 | } |
649 | } | 475 | } |
650 | 476 | ||
651 | // EnumVariantList | 477 | // EnumVariantList |
652 | #[derive(Debug, PartialEq, Eq, Hash)] | 478 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
653 | #[repr(transparent)] | ||
654 | pub struct EnumVariantList { | 479 | pub struct EnumVariantList { |
655 | pub(crate) syntax: SyntaxNode, | 480 | pub(crate) syntax: SyntaxNode, |
656 | } | 481 | } |
657 | unsafe impl TransparentNewType for EnumVariantList { | ||
658 | type Repr = rowan::SyntaxNode; | ||
659 | } | ||
660 | 482 | ||
661 | impl AstNode for EnumVariantList { | 483 | impl AstNode for EnumVariantList { |
662 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 484 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
663 | match syntax.kind() { | 485 | match syntax.kind() { |
664 | ENUM_VARIANT_LIST => Some(EnumVariantList::from_repr(syntax.into_repr())), | 486 | ENUM_VARIANT_LIST => Some(EnumVariantList { syntax }), |
665 | _ => None, | 487 | _ => None, |
666 | } | 488 | } |
667 | } | 489 | } |
668 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 490 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
669 | } | 491 | } |
670 | 492 | ||
671 | impl ToOwned for EnumVariantList { | ||
672 | type Owned = TreeArc<EnumVariantList>; | ||
673 | fn to_owned(&self) -> TreeArc<EnumVariantList> { TreeArc::cast(self.syntax.to_owned()) } | ||
674 | } | ||
675 | |||
676 | 493 | ||
677 | impl EnumVariantList { | 494 | impl EnumVariantList { |
678 | pub fn variants(&self) -> impl Iterator<Item = &EnumVariant> { | 495 | pub fn variants(&self) -> impl Iterator<Item = EnumVariant> { |
679 | super::children(self) | 496 | super::children(self) |
680 | } | 497 | } |
681 | } | 498 | } |
682 | 499 | ||
683 | // Expr | 500 | // Expr |
684 | #[derive(Debug, PartialEq, Eq, Hash)] | 501 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
685 | #[repr(transparent)] | ||
686 | pub struct Expr { | 502 | pub struct Expr { |
687 | pub(crate) syntax: SyntaxNode, | 503 | pub(crate) syntax: SyntaxNode, |
688 | } | 504 | } |
689 | unsafe impl TransparentNewType for Expr { | ||
690 | type Repr = rowan::SyntaxNode; | ||
691 | } | ||
692 | 505 | ||
693 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] | 506 | #[derive(Debug, Clone, PartialEq, Eq)] |
694 | pub enum ExprKind<'a> { | 507 | pub enum ExprKind { |
695 | TupleExpr(&'a TupleExpr), | 508 | TupleExpr(TupleExpr), |
696 | ArrayExpr(&'a ArrayExpr), | 509 | ArrayExpr(ArrayExpr), |
697 | ParenExpr(&'a ParenExpr), | 510 | ParenExpr(ParenExpr), |
698 | PathExpr(&'a PathExpr), | 511 | PathExpr(PathExpr), |
699 | LambdaExpr(&'a LambdaExpr), | 512 | LambdaExpr(LambdaExpr), |
700 | IfExpr(&'a IfExpr), | 513 | IfExpr(IfExpr), |
701 | LoopExpr(&'a LoopExpr), | 514 | LoopExpr(LoopExpr), |
702 | ForExpr(&'a ForExpr), | 515 | ForExpr(ForExpr), |
703 | WhileExpr(&'a WhileExpr), | 516 | WhileExpr(WhileExpr), |
704 | ContinueExpr(&'a ContinueExpr), | 517 | ContinueExpr(ContinueExpr), |
705 | BreakExpr(&'a BreakExpr), | 518 | BreakExpr(BreakExpr), |
706 | Label(&'a Label), | 519 | Label(Label), |
707 | BlockExpr(&'a BlockExpr), | 520 | BlockExpr(BlockExpr), |
708 | ReturnExpr(&'a ReturnExpr), | 521 | ReturnExpr(ReturnExpr), |
709 | MatchExpr(&'a MatchExpr), | 522 | MatchExpr(MatchExpr), |
710 | StructLit(&'a StructLit), | 523 | StructLit(StructLit), |
711 | CallExpr(&'a CallExpr), | 524 | CallExpr(CallExpr), |
712 | IndexExpr(&'a IndexExpr), | 525 | IndexExpr(IndexExpr), |
713 | MethodCallExpr(&'a MethodCallExpr), | 526 | MethodCallExpr(MethodCallExpr), |
714 | FieldExpr(&'a FieldExpr), | 527 | FieldExpr(FieldExpr), |
715 | TryExpr(&'a TryExpr), | 528 | TryExpr(TryExpr), |
716 | TryBlockExpr(&'a TryBlockExpr), | 529 | TryBlockExpr(TryBlockExpr), |
717 | CastExpr(&'a CastExpr), | 530 | CastExpr(CastExpr), |
718 | RefExpr(&'a RefExpr), | 531 | RefExpr(RefExpr), |
719 | PrefixExpr(&'a PrefixExpr), | 532 | PrefixExpr(PrefixExpr), |
720 | RangeExpr(&'a RangeExpr), | 533 | RangeExpr(RangeExpr), |
721 | BinExpr(&'a BinExpr), | 534 | BinExpr(BinExpr), |
722 | Literal(&'a Literal), | 535 | Literal(Literal), |
723 | MacroCall(&'a MacroCall), | 536 | MacroCall(MacroCall), |
724 | } | 537 | } |
725 | impl<'a> From<&'a TupleExpr> for &'a Expr { | 538 | impl From<TupleExpr> for Expr { |
726 | fn from(n: &'a TupleExpr) -> &'a Expr { | 539 | fn from(n: TupleExpr) -> Expr { |
727 | Expr::cast(&n.syntax).unwrap() | 540 | Expr::cast(n.syntax).unwrap() |
728 | } | 541 | } |
729 | } | 542 | } |
730 | impl<'a> From<&'a ArrayExpr> for &'a Expr { | 543 | impl From<ArrayExpr> for Expr { |
731 | fn from(n: &'a ArrayExpr) -> &'a Expr { | 544 | fn from(n: ArrayExpr) -> Expr { |
732 | Expr::cast(&n.syntax).unwrap() | 545 | Expr::cast(n.syntax).unwrap() |
733 | } | 546 | } |
734 | } | 547 | } |
735 | impl<'a> From<&'a ParenExpr> for &'a Expr { | 548 | impl From<ParenExpr> for Expr { |
736 | fn from(n: &'a ParenExpr) -> &'a Expr { | 549 | fn from(n: ParenExpr) -> Expr { |
737 | Expr::cast(&n.syntax).unwrap() | 550 | Expr::cast(n.syntax).unwrap() |
738 | } | 551 | } |
739 | } | 552 | } |
740 | impl<'a> From<&'a PathExpr> for &'a Expr { | 553 | impl From<PathExpr> for Expr { |
741 | fn from(n: &'a PathExpr) -> &'a Expr { | 554 | fn from(n: PathExpr) -> Expr { |
742 | Expr::cast(&n.syntax).unwrap() | 555 | Expr::cast(n.syntax).unwrap() |
743 | } | 556 | } |
744 | } | 557 | } |
745 | impl<'a> From<&'a LambdaExpr> for &'a Expr { | 558 | impl From<LambdaExpr> for Expr { |
746 | fn from(n: &'a LambdaExpr) -> &'a Expr { | 559 | fn from(n: LambdaExpr) -> Expr { |
747 | Expr::cast(&n.syntax).unwrap() | 560 | Expr::cast(n.syntax).unwrap() |
748 | } | 561 | } |
749 | } | 562 | } |
750 | impl<'a> From<&'a IfExpr> for &'a Expr { | 563 | impl From<IfExpr> for Expr { |
751 | fn from(n: &'a IfExpr) -> &'a Expr { | 564 | fn from(n: IfExpr) -> Expr { |
752 | Expr::cast(&n.syntax).unwrap() | 565 | Expr::cast(n.syntax).unwrap() |
753 | } | 566 | } |
754 | } | 567 | } |
755 | impl<'a> From<&'a LoopExpr> for &'a Expr { | 568 | impl From<LoopExpr> for Expr { |
756 | fn from(n: &'a LoopExpr) -> &'a Expr { | 569 | fn from(n: LoopExpr) -> Expr { |
757 | Expr::cast(&n.syntax).unwrap() | 570 | Expr::cast(n.syntax).unwrap() |
758 | } | 571 | } |
759 | } | 572 | } |
760 | impl<'a> From<&'a ForExpr> for &'a Expr { | 573 | impl From<ForExpr> for Expr { |
761 | fn from(n: &'a ForExpr) -> &'a Expr { | 574 | fn from(n: ForExpr) -> Expr { |
762 | Expr::cast(&n.syntax).unwrap() | 575 | Expr::cast(n.syntax).unwrap() |
763 | } | 576 | } |
764 | } | 577 | } |
765 | impl<'a> From<&'a WhileExpr> for &'a Expr { | 578 | impl From<WhileExpr> for Expr { |
766 | fn from(n: &'a WhileExpr) -> &'a Expr { | 579 | fn from(n: WhileExpr) -> Expr { |
767 | Expr::cast(&n.syntax).unwrap() | 580 | Expr::cast(n.syntax).unwrap() |
768 | } | 581 | } |
769 | } | 582 | } |
770 | impl<'a> From<&'a ContinueExpr> for &'a Expr { | 583 | impl From<ContinueExpr> for Expr { |
771 | fn from(n: &'a ContinueExpr) -> &'a Expr { | 584 | fn from(n: ContinueExpr) -> Expr { |
772 | Expr::cast(&n.syntax).unwrap() | 585 | Expr::cast(n.syntax).unwrap() |
773 | } | 586 | } |
774 | } | 587 | } |
775 | impl<'a> From<&'a BreakExpr> for &'a Expr { | 588 | impl From<BreakExpr> for Expr { |
776 | fn from(n: &'a BreakExpr) -> &'a Expr { | 589 | fn from(n: BreakExpr) -> Expr { |
777 | Expr::cast(&n.syntax).unwrap() | 590 | Expr::cast(n.syntax).unwrap() |
778 | } | 591 | } |
779 | } | 592 | } |
780 | impl<'a> From<&'a Label> for &'a Expr { | 593 | impl From<Label> for Expr { |
781 | fn from(n: &'a Label) -> &'a Expr { | 594 | fn from(n: Label) -> Expr { |
782 | Expr::cast(&n.syntax).unwrap() | 595 | Expr::cast(n.syntax).unwrap() |
783 | } | 596 | } |
784 | } | 597 | } |
785 | impl<'a> From<&'a BlockExpr> for &'a Expr { | 598 | impl From<BlockExpr> for Expr { |
786 | fn from(n: &'a BlockExpr) -> &'a Expr { | 599 | fn from(n: BlockExpr) -> Expr { |
787 | Expr::cast(&n.syntax).unwrap() | 600 | Expr::cast(n.syntax).unwrap() |
788 | } | 601 | } |
789 | } | 602 | } |
790 | impl<'a> From<&'a ReturnExpr> for &'a Expr { | 603 | impl From<ReturnExpr> for Expr { |
791 | fn from(n: &'a ReturnExpr) -> &'a Expr { | 604 | fn from(n: ReturnExpr) -> Expr { |
792 | Expr::cast(&n.syntax).unwrap() | 605 | Expr::cast(n.syntax).unwrap() |
793 | } | 606 | } |
794 | } | 607 | } |
795 | impl<'a> From<&'a MatchExpr> for &'a Expr { | 608 | impl From<MatchExpr> for Expr { |
796 | fn from(n: &'a MatchExpr) -> &'a Expr { | 609 | fn from(n: MatchExpr) -> Expr { |
797 | Expr::cast(&n.syntax).unwrap() | 610 | Expr::cast(n.syntax).unwrap() |
798 | } | 611 | } |
799 | } | 612 | } |
800 | impl<'a> From<&'a StructLit> for &'a Expr { | 613 | impl From<StructLit> for Expr { |
801 | fn from(n: &'a StructLit) -> &'a Expr { | 614 | fn from(n: StructLit) -> Expr { |
802 | Expr::cast(&n.syntax).unwrap() | 615 | Expr::cast(n.syntax).unwrap() |
803 | } | 616 | } |
804 | } | 617 | } |
805 | impl<'a> From<&'a CallExpr> for &'a Expr { | 618 | impl From<CallExpr> for Expr { |
806 | fn from(n: &'a CallExpr) -> &'a Expr { | 619 | fn from(n: CallExpr) -> Expr { |
807 | Expr::cast(&n.syntax).unwrap() | 620 | Expr::cast(n.syntax).unwrap() |
808 | } | 621 | } |
809 | } | 622 | } |
810 | impl<'a> From<&'a IndexExpr> for &'a Expr { | 623 | impl From<IndexExpr> for Expr { |
811 | fn from(n: &'a IndexExpr) -> &'a Expr { | 624 | fn from(n: IndexExpr) -> Expr { |
812 | Expr::cast(&n.syntax).unwrap() | 625 | Expr::cast(n.syntax).unwrap() |
813 | } | 626 | } |
814 | } | 627 | } |
815 | impl<'a> From<&'a MethodCallExpr> for &'a Expr { | 628 | impl From<MethodCallExpr> for Expr { |
816 | fn from(n: &'a MethodCallExpr) -> &'a Expr { | 629 | fn from(n: MethodCallExpr) -> Expr { |
817 | Expr::cast(&n.syntax).unwrap() | 630 | Expr::cast(n.syntax).unwrap() |
818 | } | 631 | } |
819 | } | 632 | } |
820 | impl<'a> From<&'a FieldExpr> for &'a Expr { | 633 | impl From<FieldExpr> for Expr { |
821 | fn from(n: &'a FieldExpr) -> &'a Expr { | 634 | fn from(n: FieldExpr) -> Expr { |
822 | Expr::cast(&n.syntax).unwrap() | 635 | Expr::cast(n.syntax).unwrap() |
823 | } | 636 | } |
824 | } | 637 | } |
825 | impl<'a> From<&'a TryExpr> for &'a Expr { | 638 | impl From<TryExpr> for Expr { |
826 | fn from(n: &'a TryExpr) -> &'a Expr { | 639 | fn from(n: TryExpr) -> Expr { |
827 | Expr::cast(&n.syntax).unwrap() | 640 | Expr::cast(n.syntax).unwrap() |
828 | } | 641 | } |
829 | } | 642 | } |
830 | impl<'a> From<&'a TryBlockExpr> for &'a Expr { | 643 | impl From<TryBlockExpr> for Expr { |
831 | fn from(n: &'a TryBlockExpr) -> &'a Expr { | 644 | fn from(n: TryBlockExpr) -> Expr { |
832 | Expr::cast(&n.syntax).unwrap() | 645 | Expr::cast(n.syntax).unwrap() |
833 | } | 646 | } |
834 | } | 647 | } |
835 | impl<'a> From<&'a CastExpr> for &'a Expr { | 648 | impl From<CastExpr> for Expr { |
836 | fn from(n: &'a CastExpr) -> &'a Expr { | 649 | fn from(n: CastExpr) -> Expr { |
837 | Expr::cast(&n.syntax).unwrap() | 650 | Expr::cast(n.syntax).unwrap() |
838 | } | 651 | } |
839 | } | 652 | } |
840 | impl<'a> From<&'a RefExpr> for &'a Expr { | 653 | impl From<RefExpr> for Expr { |
841 | fn from(n: &'a RefExpr) -> &'a Expr { | 654 | fn from(n: RefExpr) -> Expr { |
842 | Expr::cast(&n.syntax).unwrap() | 655 | Expr::cast(n.syntax).unwrap() |
843 | } | 656 | } |
844 | } | 657 | } |
845 | impl<'a> From<&'a PrefixExpr> for &'a Expr { | 658 | impl From<PrefixExpr> for Expr { |
846 | fn from(n: &'a PrefixExpr) -> &'a Expr { | 659 | fn from(n: PrefixExpr) -> Expr { |
847 | Expr::cast(&n.syntax).unwrap() | 660 | Expr::cast(n.syntax).unwrap() |
848 | } | 661 | } |
849 | } | 662 | } |
850 | impl<'a> From<&'a RangeExpr> for &'a Expr { | 663 | impl From<RangeExpr> for Expr { |
851 | fn from(n: &'a RangeExpr) -> &'a Expr { | 664 | fn from(n: RangeExpr) -> Expr { |
852 | Expr::cast(&n.syntax).unwrap() | 665 | Expr::cast(n.syntax).unwrap() |
853 | } | 666 | } |
854 | } | 667 | } |
855 | impl<'a> From<&'a BinExpr> for &'a Expr { | 668 | impl From<BinExpr> for Expr { |
856 | fn from(n: &'a BinExpr) -> &'a Expr { | 669 | fn from(n: BinExpr) -> Expr { |
857 | Expr::cast(&n.syntax).unwrap() | 670 | Expr::cast(n.syntax).unwrap() |
858 | } | 671 | } |
859 | } | 672 | } |
860 | impl<'a> From<&'a Literal> for &'a Expr { | 673 | impl From<Literal> for Expr { |
861 | fn from(n: &'a Literal) -> &'a Expr { | 674 | fn from(n: Literal) -> Expr { |
862 | Expr::cast(&n.syntax).unwrap() | 675 | Expr::cast(n.syntax).unwrap() |
863 | } | 676 | } |
864 | } | 677 | } |
865 | impl<'a> From<&'a MacroCall> for &'a Expr { | 678 | impl From<MacroCall> for Expr { |
866 | fn from(n: &'a MacroCall) -> &'a Expr { | 679 | fn from(n: MacroCall) -> Expr { |
867 | Expr::cast(&n.syntax).unwrap() | 680 | Expr::cast(n.syntax).unwrap() |
868 | } | 681 | } |
869 | } | 682 | } |
870 | 683 | ||
871 | 684 | ||
872 | impl AstNode for Expr { | 685 | impl AstNode for Expr { |
873 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 686 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
874 | match syntax.kind() { | 687 | match syntax.kind() { |
875 | | TUPLE_EXPR | 688 | | TUPLE_EXPR |
876 | | ARRAY_EXPR | 689 | | ARRAY_EXPR |
@@ -900,50 +713,45 @@ impl AstNode for Expr { | |||
900 | | RANGE_EXPR | 713 | | RANGE_EXPR |
901 | | BIN_EXPR | 714 | | BIN_EXPR |
902 | | LITERAL | 715 | | LITERAL |
903 | | MACRO_CALL => Some(Expr::from_repr(syntax.into_repr())), | 716 | | MACRO_CALL => Some(Expr { syntax }), |
904 | _ => None, | 717 | _ => None, |
905 | } | 718 | } |
906 | } | 719 | } |
907 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 720 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
908 | } | 721 | } |
909 | 722 | ||
910 | impl ToOwned for Expr { | ||
911 | type Owned = TreeArc<Expr>; | ||
912 | fn to_owned(&self) -> TreeArc<Expr> { TreeArc::cast(self.syntax.to_owned()) } | ||
913 | } | ||
914 | |||
915 | impl Expr { | 723 | impl Expr { |
916 | pub fn kind(&self) -> ExprKind { | 724 | pub fn kind(&self) -> ExprKind { |
917 | match self.syntax.kind() { | 725 | match self.syntax.kind() { |
918 | TUPLE_EXPR => ExprKind::TupleExpr(TupleExpr::cast(&self.syntax).unwrap()), | 726 | TUPLE_EXPR => ExprKind::TupleExpr(TupleExpr::cast(self.syntax.clone()).unwrap()), |
919 | ARRAY_EXPR => ExprKind::ArrayExpr(ArrayExpr::cast(&self.syntax).unwrap()), | 727 | ARRAY_EXPR => ExprKind::ArrayExpr(ArrayExpr::cast(self.syntax.clone()).unwrap()), |
920 | PAREN_EXPR => ExprKind::ParenExpr(ParenExpr::cast(&self.syntax).unwrap()), | 728 | PAREN_EXPR => ExprKind::ParenExpr(ParenExpr::cast(self.syntax.clone()).unwrap()), |
921 | PATH_EXPR => ExprKind::PathExpr(PathExpr::cast(&self.syntax).unwrap()), | 729 | PATH_EXPR => ExprKind::PathExpr(PathExpr::cast(self.syntax.clone()).unwrap()), |
922 | LAMBDA_EXPR => ExprKind::LambdaExpr(LambdaExpr::cast(&self.syntax).unwrap()), | 730 | LAMBDA_EXPR => ExprKind::LambdaExpr(LambdaExpr::cast(self.syntax.clone()).unwrap()), |
923 | IF_EXPR => ExprKind::IfExpr(IfExpr::cast(&self.syntax).unwrap()), | 731 | IF_EXPR => ExprKind::IfExpr(IfExpr::cast(self.syntax.clone()).unwrap()), |
924 | LOOP_EXPR => ExprKind::LoopExpr(LoopExpr::cast(&self.syntax).unwrap()), | 732 | LOOP_EXPR => ExprKind::LoopExpr(LoopExpr::cast(self.syntax.clone()).unwrap()), |
925 | FOR_EXPR => ExprKind::ForExpr(ForExpr::cast(&self.syntax).unwrap()), | 733 | FOR_EXPR => ExprKind::ForExpr(ForExpr::cast(self.syntax.clone()).unwrap()), |
926 | WHILE_EXPR => ExprKind::WhileExpr(WhileExpr::cast(&self.syntax).unwrap()), | 734 | WHILE_EXPR => ExprKind::WhileExpr(WhileExpr::cast(self.syntax.clone()).unwrap()), |
927 | CONTINUE_EXPR => ExprKind::ContinueExpr(ContinueExpr::cast(&self.syntax).unwrap()), | 735 | CONTINUE_EXPR => ExprKind::ContinueExpr(ContinueExpr::cast(self.syntax.clone()).unwrap()), |
928 | BREAK_EXPR => ExprKind::BreakExpr(BreakExpr::cast(&self.syntax).unwrap()), | 736 | BREAK_EXPR => ExprKind::BreakExpr(BreakExpr::cast(self.syntax.clone()).unwrap()), |
929 | LABEL => ExprKind::Label(Label::cast(&self.syntax).unwrap()), | 737 | LABEL => ExprKind::Label(Label::cast(self.syntax.clone()).unwrap()), |
930 | BLOCK_EXPR => ExprKind::BlockExpr(BlockExpr::cast(&self.syntax).unwrap()), | 738 | BLOCK_EXPR => ExprKind::BlockExpr(BlockExpr::cast(self.syntax.clone()).unwrap()), |
931 | RETURN_EXPR => ExprKind::ReturnExpr(ReturnExpr::cast(&self.syntax).unwrap()), | 739 | RETURN_EXPR => ExprKind::ReturnExpr(ReturnExpr::cast(self.syntax.clone()).unwrap()), |
932 | MATCH_EXPR => ExprKind::MatchExpr(MatchExpr::cast(&self.syntax).unwrap()), | 740 | MATCH_EXPR => ExprKind::MatchExpr(MatchExpr::cast(self.syntax.clone()).unwrap()), |
933 | STRUCT_LIT => ExprKind::StructLit(StructLit::cast(&self.syntax).unwrap()), | 741 | STRUCT_LIT => ExprKind::StructLit(StructLit::cast(self.syntax.clone()).unwrap()), |
934 | CALL_EXPR => ExprKind::CallExpr(CallExpr::cast(&self.syntax).unwrap()), | 742 | CALL_EXPR => ExprKind::CallExpr(CallExpr::cast(self.syntax.clone()).unwrap()), |
935 | INDEX_EXPR => ExprKind::IndexExpr(IndexExpr::cast(&self.syntax).unwrap()), | 743 | INDEX_EXPR => ExprKind::IndexExpr(IndexExpr::cast(self.syntax.clone()).unwrap()), |
936 | METHOD_CALL_EXPR => ExprKind::MethodCallExpr(MethodCallExpr::cast(&self.syntax).unwrap()), | 744 | METHOD_CALL_EXPR => ExprKind::MethodCallExpr(MethodCallExpr::cast(self.syntax.clone()).unwrap()), |
937 | FIELD_EXPR => ExprKind::FieldExpr(FieldExpr::cast(&self.syntax).unwrap()), | 745 | FIELD_EXPR => ExprKind::FieldExpr(FieldExpr::cast(self.syntax.clone()).unwrap()), |
938 | TRY_EXPR => ExprKind::TryExpr(TryExpr::cast(&self.syntax).unwrap()), | 746 | TRY_EXPR => ExprKind::TryExpr(TryExpr::cast(self.syntax.clone()).unwrap()), |
939 | TRY_BLOCK_EXPR => ExprKind::TryBlockExpr(TryBlockExpr::cast(&self.syntax).unwrap()), | 747 | TRY_BLOCK_EXPR => ExprKind::TryBlockExpr(TryBlockExpr::cast(self.syntax.clone()).unwrap()), |
940 | CAST_EXPR => ExprKind::CastExpr(CastExpr::cast(&self.syntax).unwrap()), | 748 | CAST_EXPR => ExprKind::CastExpr(CastExpr::cast(self.syntax.clone()).unwrap()), |
941 | REF_EXPR => ExprKind::RefExpr(RefExpr::cast(&self.syntax).unwrap()), | 749 | REF_EXPR => ExprKind::RefExpr(RefExpr::cast(self.syntax.clone()).unwrap()), |
942 | PREFIX_EXPR => ExprKind::PrefixExpr(PrefixExpr::cast(&self.syntax).unwrap()), | 750 | PREFIX_EXPR => ExprKind::PrefixExpr(PrefixExpr::cast(self.syntax.clone()).unwrap()), |
943 | RANGE_EXPR => ExprKind::RangeExpr(RangeExpr::cast(&self.syntax).unwrap()), | 751 | RANGE_EXPR => ExprKind::RangeExpr(RangeExpr::cast(self.syntax.clone()).unwrap()), |
944 | BIN_EXPR => ExprKind::BinExpr(BinExpr::cast(&self.syntax).unwrap()), | 752 | BIN_EXPR => ExprKind::BinExpr(BinExpr::cast(self.syntax.clone()).unwrap()), |
945 | LITERAL => ExprKind::Literal(Literal::cast(&self.syntax).unwrap()), | 753 | LITERAL => ExprKind::Literal(Literal::cast(self.syntax.clone()).unwrap()), |
946 | MACRO_CALL => ExprKind::MacroCall(MacroCall::cast(&self.syntax).unwrap()), | 754 | MACRO_CALL => ExprKind::MacroCall(MacroCall::cast(self.syntax.clone()).unwrap()), |
947 | _ => unreachable!(), | 755 | _ => unreachable!(), |
948 | } | 756 | } |
949 | } | 757 | } |
@@ -952,203 +760,149 @@ impl Expr { | |||
952 | impl Expr {} | 760 | impl Expr {} |
953 | 761 | ||
954 | // ExprStmt | 762 | // ExprStmt |
955 | #[derive(Debug, PartialEq, Eq, Hash)] | 763 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
956 | #[repr(transparent)] | ||
957 | pub struct ExprStmt { | 764 | pub struct ExprStmt { |
958 | pub(crate) syntax: SyntaxNode, | 765 | pub(crate) syntax: SyntaxNode, |
959 | } | 766 | } |
960 | unsafe impl TransparentNewType for ExprStmt { | ||
961 | type Repr = rowan::SyntaxNode; | ||
962 | } | ||
963 | 767 | ||
964 | impl AstNode for ExprStmt { | 768 | impl AstNode for ExprStmt { |
965 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 769 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
966 | match syntax.kind() { | 770 | match syntax.kind() { |
967 | EXPR_STMT => Some(ExprStmt::from_repr(syntax.into_repr())), | 771 | EXPR_STMT => Some(ExprStmt { syntax }), |
968 | _ => None, | 772 | _ => None, |
969 | } | 773 | } |
970 | } | 774 | } |
971 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 775 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
972 | } | 776 | } |
973 | 777 | ||
974 | impl ToOwned for ExprStmt { | ||
975 | type Owned = TreeArc<ExprStmt>; | ||
976 | fn to_owned(&self) -> TreeArc<ExprStmt> { TreeArc::cast(self.syntax.to_owned()) } | ||
977 | } | ||
978 | |||
979 | 778 | ||
980 | impl ExprStmt { | 779 | impl ExprStmt { |
981 | pub fn expr(&self) -> Option<&Expr> { | 780 | pub fn expr(&self) -> Option<Expr> { |
982 | super::child_opt(self) | 781 | super::child_opt(self) |
983 | } | 782 | } |
984 | } | 783 | } |
985 | 784 | ||
986 | // ExternCrateItem | 785 | // ExternCrateItem |
987 | #[derive(Debug, PartialEq, Eq, Hash)] | 786 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
988 | #[repr(transparent)] | ||
989 | pub struct ExternCrateItem { | 787 | pub struct ExternCrateItem { |
990 | pub(crate) syntax: SyntaxNode, | 788 | pub(crate) syntax: SyntaxNode, |
991 | } | 789 | } |
992 | unsafe impl TransparentNewType for ExternCrateItem { | ||
993 | type Repr = rowan::SyntaxNode; | ||
994 | } | ||
995 | 790 | ||
996 | impl AstNode for ExternCrateItem { | 791 | impl AstNode for ExternCrateItem { |
997 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 792 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
998 | match syntax.kind() { | 793 | match syntax.kind() { |
999 | EXTERN_CRATE_ITEM => Some(ExternCrateItem::from_repr(syntax.into_repr())), | 794 | EXTERN_CRATE_ITEM => Some(ExternCrateItem { syntax }), |
1000 | _ => None, | 795 | _ => None, |
1001 | } | 796 | } |
1002 | } | 797 | } |
1003 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 798 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
1004 | } | 799 | } |
1005 | 800 | ||
1006 | impl ToOwned for ExternCrateItem { | ||
1007 | type Owned = TreeArc<ExternCrateItem>; | ||
1008 | fn to_owned(&self) -> TreeArc<ExternCrateItem> { TreeArc::cast(self.syntax.to_owned()) } | ||
1009 | } | ||
1010 | |||
1011 | 801 | ||
1012 | impl ExternCrateItem { | 802 | impl ExternCrateItem { |
1013 | pub fn name_ref(&self) -> Option<&NameRef> { | 803 | pub fn name_ref(&self) -> Option<NameRef> { |
1014 | super::child_opt(self) | 804 | super::child_opt(self) |
1015 | } | 805 | } |
1016 | 806 | ||
1017 | pub fn alias(&self) -> Option<&Alias> { | 807 | pub fn alias(&self) -> Option<Alias> { |
1018 | super::child_opt(self) | 808 | super::child_opt(self) |
1019 | } | 809 | } |
1020 | } | 810 | } |
1021 | 811 | ||
1022 | // FieldExpr | 812 | // FieldExpr |
1023 | #[derive(Debug, PartialEq, Eq, Hash)] | 813 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
1024 | #[repr(transparent)] | ||
1025 | pub struct FieldExpr { | 814 | pub struct FieldExpr { |
1026 | pub(crate) syntax: SyntaxNode, | 815 | pub(crate) syntax: SyntaxNode, |
1027 | } | 816 | } |
1028 | unsafe impl TransparentNewType for FieldExpr { | ||
1029 | type Repr = rowan::SyntaxNode; | ||
1030 | } | ||
1031 | 817 | ||
1032 | impl AstNode for FieldExpr { | 818 | impl AstNode for FieldExpr { |
1033 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 819 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
1034 | match syntax.kind() { | 820 | match syntax.kind() { |
1035 | FIELD_EXPR => Some(FieldExpr::from_repr(syntax.into_repr())), | 821 | FIELD_EXPR => Some(FieldExpr { syntax }), |
1036 | _ => None, | 822 | _ => None, |
1037 | } | 823 | } |
1038 | } | 824 | } |
1039 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 825 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
1040 | } | 826 | } |
1041 | 827 | ||
1042 | impl ToOwned for FieldExpr { | ||
1043 | type Owned = TreeArc<FieldExpr>; | ||
1044 | fn to_owned(&self) -> TreeArc<FieldExpr> { TreeArc::cast(self.syntax.to_owned()) } | ||
1045 | } | ||
1046 | |||
1047 | 828 | ||
1048 | impl FieldExpr { | 829 | impl FieldExpr { |
1049 | pub fn expr(&self) -> Option<&Expr> { | 830 | pub fn expr(&self) -> Option<Expr> { |
1050 | super::child_opt(self) | 831 | super::child_opt(self) |
1051 | } | 832 | } |
1052 | 833 | ||
1053 | pub fn name_ref(&self) -> Option<&NameRef> { | 834 | pub fn name_ref(&self) -> Option<NameRef> { |
1054 | super::child_opt(self) | 835 | super::child_opt(self) |
1055 | } | 836 | } |
1056 | } | 837 | } |
1057 | 838 | ||
1058 | // FieldPat | 839 | // FieldPat |
1059 | #[derive(Debug, PartialEq, Eq, Hash)] | 840 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
1060 | #[repr(transparent)] | ||
1061 | pub struct FieldPat { | 841 | pub struct FieldPat { |
1062 | pub(crate) syntax: SyntaxNode, | 842 | pub(crate) syntax: SyntaxNode, |
1063 | } | 843 | } |
1064 | unsafe impl TransparentNewType for FieldPat { | ||
1065 | type Repr = rowan::SyntaxNode; | ||
1066 | } | ||
1067 | 844 | ||
1068 | impl AstNode for FieldPat { | 845 | impl AstNode for FieldPat { |
1069 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 846 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
1070 | match syntax.kind() { | 847 | match syntax.kind() { |
1071 | FIELD_PAT => Some(FieldPat::from_repr(syntax.into_repr())), | 848 | FIELD_PAT => Some(FieldPat { syntax }), |
1072 | _ => None, | 849 | _ => None, |
1073 | } | 850 | } |
1074 | } | 851 | } |
1075 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 852 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
1076 | } | 853 | } |
1077 | 854 | ||
1078 | impl ToOwned for FieldPat { | ||
1079 | type Owned = TreeArc<FieldPat>; | ||
1080 | fn to_owned(&self) -> TreeArc<FieldPat> { TreeArc::cast(self.syntax.to_owned()) } | ||
1081 | } | ||
1082 | |||
1083 | 855 | ||
1084 | impl ast::NameOwner for FieldPat {} | 856 | impl ast::NameOwner for FieldPat {} |
1085 | impl FieldPat { | 857 | impl FieldPat { |
1086 | pub fn pat(&self) -> Option<&Pat> { | 858 | pub fn pat(&self) -> Option<Pat> { |
1087 | super::child_opt(self) | 859 | super::child_opt(self) |
1088 | } | 860 | } |
1089 | } | 861 | } |
1090 | 862 | ||
1091 | // FieldPatList | 863 | // FieldPatList |
1092 | #[derive(Debug, PartialEq, Eq, Hash)] | 864 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
1093 | #[repr(transparent)] | ||
1094 | pub struct FieldPatList { | 865 | pub struct FieldPatList { |
1095 | pub(crate) syntax: SyntaxNode, | 866 | pub(crate) syntax: SyntaxNode, |
1096 | } | 867 | } |
1097 | unsafe impl TransparentNewType for FieldPatList { | ||
1098 | type Repr = rowan::SyntaxNode; | ||
1099 | } | ||
1100 | 868 | ||
1101 | impl AstNode for FieldPatList { | 869 | impl AstNode for FieldPatList { |
1102 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 870 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
1103 | match syntax.kind() { | 871 | match syntax.kind() { |
1104 | FIELD_PAT_LIST => Some(FieldPatList::from_repr(syntax.into_repr())), | 872 | FIELD_PAT_LIST => Some(FieldPatList { syntax }), |
1105 | _ => None, | 873 | _ => None, |
1106 | } | 874 | } |
1107 | } | 875 | } |
1108 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 876 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
1109 | } | 877 | } |
1110 | 878 | ||
1111 | impl ToOwned for FieldPatList { | ||
1112 | type Owned = TreeArc<FieldPatList>; | ||
1113 | fn to_owned(&self) -> TreeArc<FieldPatList> { TreeArc::cast(self.syntax.to_owned()) } | ||
1114 | } | ||
1115 | |||
1116 | 879 | ||
1117 | impl FieldPatList { | 880 | impl FieldPatList { |
1118 | pub fn field_pats(&self) -> impl Iterator<Item = &FieldPat> { | 881 | pub fn field_pats(&self) -> impl Iterator<Item = FieldPat> { |
1119 | super::children(self) | 882 | super::children(self) |
1120 | } | 883 | } |
1121 | 884 | ||
1122 | pub fn bind_pats(&self) -> impl Iterator<Item = &BindPat> { | 885 | pub fn bind_pats(&self) -> impl Iterator<Item = BindPat> { |
1123 | super::children(self) | 886 | super::children(self) |
1124 | } | 887 | } |
1125 | } | 888 | } |
1126 | 889 | ||
1127 | // FnDef | 890 | // FnDef |
1128 | #[derive(Debug, PartialEq, Eq, Hash)] | 891 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
1129 | #[repr(transparent)] | ||
1130 | pub struct FnDef { | 892 | pub struct FnDef { |
1131 | pub(crate) syntax: SyntaxNode, | 893 | pub(crate) syntax: SyntaxNode, |
1132 | } | 894 | } |
1133 | unsafe impl TransparentNewType for FnDef { | ||
1134 | type Repr = rowan::SyntaxNode; | ||
1135 | } | ||
1136 | 895 | ||
1137 | impl AstNode for FnDef { | 896 | impl AstNode for FnDef { |
1138 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 897 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
1139 | match syntax.kind() { | 898 | match syntax.kind() { |
1140 | FN_DEF => Some(FnDef::from_repr(syntax.into_repr())), | 899 | FN_DEF => Some(FnDef { syntax }), |
1141 | _ => None, | 900 | _ => None, |
1142 | } | 901 | } |
1143 | } | 902 | } |
1144 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 903 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
1145 | } | 904 | } |
1146 | 905 | ||
1147 | impl ToOwned for FnDef { | ||
1148 | type Owned = TreeArc<FnDef>; | ||
1149 | fn to_owned(&self) -> TreeArc<FnDef> { TreeArc::cast(self.syntax.to_owned()) } | ||
1150 | } | ||
1151 | |||
1152 | 906 | ||
1153 | impl ast::VisibilityOwner for FnDef {} | 907 | impl ast::VisibilityOwner for FnDef {} |
1154 | impl ast::NameOwner for FnDef {} | 908 | impl ast::NameOwner for FnDef {} |
@@ -1156,246 +910,192 @@ impl ast::TypeParamsOwner for FnDef {} | |||
1156 | impl ast::AttrsOwner for FnDef {} | 910 | impl ast::AttrsOwner for FnDef {} |
1157 | impl ast::DocCommentsOwner for FnDef {} | 911 | impl ast::DocCommentsOwner for FnDef {} |
1158 | impl FnDef { | 912 | impl FnDef { |
1159 | pub fn param_list(&self) -> Option<&ParamList> { | 913 | pub fn param_list(&self) -> Option<ParamList> { |
1160 | super::child_opt(self) | 914 | super::child_opt(self) |
1161 | } | 915 | } |
1162 | 916 | ||
1163 | pub fn body(&self) -> Option<&Block> { | 917 | pub fn body(&self) -> Option<Block> { |
1164 | super::child_opt(self) | 918 | super::child_opt(self) |
1165 | } | 919 | } |
1166 | 920 | ||
1167 | pub fn ret_type(&self) -> Option<&RetType> { | 921 | pub fn ret_type(&self) -> Option<RetType> { |
1168 | super::child_opt(self) | 922 | super::child_opt(self) |
1169 | } | 923 | } |
1170 | } | 924 | } |
1171 | 925 | ||
1172 | // FnPointerType | 926 | // FnPointerType |
1173 | #[derive(Debug, PartialEq, Eq, Hash)] | 927 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
1174 | #[repr(transparent)] | ||
1175 | pub struct FnPointerType { | 928 | pub struct FnPointerType { |
1176 | pub(crate) syntax: SyntaxNode, | 929 | pub(crate) syntax: SyntaxNode, |
1177 | } | 930 | } |
1178 | unsafe impl TransparentNewType for FnPointerType { | ||
1179 | type Repr = rowan::SyntaxNode; | ||
1180 | } | ||
1181 | 931 | ||
1182 | impl AstNode for FnPointerType { | 932 | impl AstNode for FnPointerType { |
1183 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 933 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
1184 | match syntax.kind() { | 934 | match syntax.kind() { |
1185 | FN_POINTER_TYPE => Some(FnPointerType::from_repr(syntax.into_repr())), | 935 | FN_POINTER_TYPE => Some(FnPointerType { syntax }), |
1186 | _ => None, | 936 | _ => None, |
1187 | } | 937 | } |
1188 | } | 938 | } |
1189 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 939 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
1190 | } | 940 | } |
1191 | 941 | ||
1192 | impl ToOwned for FnPointerType { | ||
1193 | type Owned = TreeArc<FnPointerType>; | ||
1194 | fn to_owned(&self) -> TreeArc<FnPointerType> { TreeArc::cast(self.syntax.to_owned()) } | ||
1195 | } | ||
1196 | |||
1197 | 942 | ||
1198 | impl FnPointerType { | 943 | impl FnPointerType { |
1199 | pub fn param_list(&self) -> Option<&ParamList> { | 944 | pub fn param_list(&self) -> Option<ParamList> { |
1200 | super::child_opt(self) | 945 | super::child_opt(self) |
1201 | } | 946 | } |
1202 | 947 | ||
1203 | pub fn ret_type(&self) -> Option<&RetType> { | 948 | pub fn ret_type(&self) -> Option<RetType> { |
1204 | super::child_opt(self) | 949 | super::child_opt(self) |
1205 | } | 950 | } |
1206 | } | 951 | } |
1207 | 952 | ||
1208 | // ForExpr | 953 | // ForExpr |
1209 | #[derive(Debug, PartialEq, Eq, Hash)] | 954 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
1210 | #[repr(transparent)] | ||
1211 | pub struct ForExpr { | 955 | pub struct ForExpr { |
1212 | pub(crate) syntax: SyntaxNode, | 956 | pub(crate) syntax: SyntaxNode, |
1213 | } | 957 | } |
1214 | unsafe impl TransparentNewType for ForExpr { | ||
1215 | type Repr = rowan::SyntaxNode; | ||
1216 | } | ||
1217 | 958 | ||
1218 | impl AstNode for ForExpr { | 959 | impl AstNode for ForExpr { |
1219 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 960 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
1220 | match syntax.kind() { | 961 | match syntax.kind() { |
1221 | FOR_EXPR => Some(ForExpr::from_repr(syntax.into_repr())), | 962 | FOR_EXPR => Some(ForExpr { syntax }), |
1222 | _ => None, | 963 | _ => None, |
1223 | } | 964 | } |
1224 | } | 965 | } |
1225 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 966 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
1226 | } | 967 | } |
1227 | 968 | ||
1228 | impl ToOwned for ForExpr { | ||
1229 | type Owned = TreeArc<ForExpr>; | ||
1230 | fn to_owned(&self) -> TreeArc<ForExpr> { TreeArc::cast(self.syntax.to_owned()) } | ||
1231 | } | ||
1232 | |||
1233 | 969 | ||
1234 | impl ast::LoopBodyOwner for ForExpr {} | 970 | impl ast::LoopBodyOwner for ForExpr {} |
1235 | impl ForExpr { | 971 | impl ForExpr { |
1236 | pub fn pat(&self) -> Option<&Pat> { | 972 | pub fn pat(&self) -> Option<Pat> { |
1237 | super::child_opt(self) | 973 | super::child_opt(self) |
1238 | } | 974 | } |
1239 | 975 | ||
1240 | pub fn iterable(&self) -> Option<&Expr> { | 976 | pub fn iterable(&self) -> Option<Expr> { |
1241 | super::child_opt(self) | 977 | super::child_opt(self) |
1242 | } | 978 | } |
1243 | } | 979 | } |
1244 | 980 | ||
1245 | // ForType | 981 | // ForType |
1246 | #[derive(Debug, PartialEq, Eq, Hash)] | 982 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
1247 | #[repr(transparent)] | ||
1248 | pub struct ForType { | 983 | pub struct ForType { |
1249 | pub(crate) syntax: SyntaxNode, | 984 | pub(crate) syntax: SyntaxNode, |
1250 | } | 985 | } |
1251 | unsafe impl TransparentNewType for ForType { | ||
1252 | type Repr = rowan::SyntaxNode; | ||
1253 | } | ||
1254 | 986 | ||
1255 | impl AstNode for ForType { | 987 | impl AstNode for ForType { |
1256 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 988 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
1257 | match syntax.kind() { | 989 | match syntax.kind() { |
1258 | FOR_TYPE => Some(ForType::from_repr(syntax.into_repr())), | 990 | FOR_TYPE => Some(ForType { syntax }), |
1259 | _ => None, | 991 | _ => None, |
1260 | } | 992 | } |
1261 | } | 993 | } |
1262 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 994 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
1263 | } | 995 | } |
1264 | 996 | ||
1265 | impl ToOwned for ForType { | ||
1266 | type Owned = TreeArc<ForType>; | ||
1267 | fn to_owned(&self) -> TreeArc<ForType> { TreeArc::cast(self.syntax.to_owned()) } | ||
1268 | } | ||
1269 | |||
1270 | 997 | ||
1271 | impl ForType { | 998 | impl ForType { |
1272 | pub fn type_ref(&self) -> Option<&TypeRef> { | 999 | pub fn type_ref(&self) -> Option<TypeRef> { |
1273 | super::child_opt(self) | 1000 | super::child_opt(self) |
1274 | } | 1001 | } |
1275 | } | 1002 | } |
1276 | 1003 | ||
1277 | // IfExpr | 1004 | // IfExpr |
1278 | #[derive(Debug, PartialEq, Eq, Hash)] | 1005 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
1279 | #[repr(transparent)] | ||
1280 | pub struct IfExpr { | 1006 | pub struct IfExpr { |
1281 | pub(crate) syntax: SyntaxNode, | 1007 | pub(crate) syntax: SyntaxNode, |
1282 | } | 1008 | } |
1283 | unsafe impl TransparentNewType for IfExpr { | ||
1284 | type Repr = rowan::SyntaxNode; | ||
1285 | } | ||
1286 | 1009 | ||
1287 | impl AstNode for IfExpr { | 1010 | impl AstNode for IfExpr { |
1288 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 1011 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
1289 | match syntax.kind() { | 1012 | match syntax.kind() { |
1290 | IF_EXPR => Some(IfExpr::from_repr(syntax.into_repr())), | 1013 | IF_EXPR => Some(IfExpr { syntax }), |
1291 | _ => None, | 1014 | _ => None, |
1292 | } | 1015 | } |
1293 | } | 1016 | } |
1294 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 1017 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
1295 | } | 1018 | } |
1296 | 1019 | ||
1297 | impl ToOwned for IfExpr { | ||
1298 | type Owned = TreeArc<IfExpr>; | ||
1299 | fn to_owned(&self) -> TreeArc<IfExpr> { TreeArc::cast(self.syntax.to_owned()) } | ||
1300 | } | ||
1301 | |||
1302 | 1020 | ||
1303 | impl IfExpr { | 1021 | impl IfExpr { |
1304 | pub fn condition(&self) -> Option<&Condition> { | 1022 | pub fn condition(&self) -> Option<Condition> { |
1305 | super::child_opt(self) | 1023 | super::child_opt(self) |
1306 | } | 1024 | } |
1307 | } | 1025 | } |
1308 | 1026 | ||
1309 | // ImplBlock | 1027 | // ImplBlock |
1310 | #[derive(Debug, PartialEq, Eq, Hash)] | 1028 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
1311 | #[repr(transparent)] | ||
1312 | pub struct ImplBlock { | 1029 | pub struct ImplBlock { |
1313 | pub(crate) syntax: SyntaxNode, | 1030 | pub(crate) syntax: SyntaxNode, |
1314 | } | 1031 | } |
1315 | unsafe impl TransparentNewType for ImplBlock { | ||
1316 | type Repr = rowan::SyntaxNode; | ||
1317 | } | ||
1318 | 1032 | ||
1319 | impl AstNode for ImplBlock { | 1033 | impl AstNode for ImplBlock { |
1320 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 1034 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
1321 | match syntax.kind() { | 1035 | match syntax.kind() { |
1322 | IMPL_BLOCK => Some(ImplBlock::from_repr(syntax.into_repr())), | 1036 | IMPL_BLOCK => Some(ImplBlock { syntax }), |
1323 | _ => None, | 1037 | _ => None, |
1324 | } | 1038 | } |
1325 | } | 1039 | } |
1326 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 1040 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
1327 | } | 1041 | } |
1328 | 1042 | ||
1329 | impl ToOwned for ImplBlock { | ||
1330 | type Owned = TreeArc<ImplBlock>; | ||
1331 | fn to_owned(&self) -> TreeArc<ImplBlock> { TreeArc::cast(self.syntax.to_owned()) } | ||
1332 | } | ||
1333 | |||
1334 | 1043 | ||
1335 | impl ast::TypeParamsOwner for ImplBlock {} |