diff options
author | Aleksey Kladov <[email protected]> | 2019-07-19 08:43:01 +0100 |
---|---|---|
committer | Aleksey Kladov <[email protected]> | 2019-07-19 11:16:25 +0100 |
commit | e2b28f5bb8043e92b10f6a40696131007fc9dfe2 (patch) | |
tree | c14306038e386d71ddc894d63415bf8e9a94f7e8 /crates/ra_hir/src | |
parent | 7e02aa0efff228126ffc43e81e5e127e1b9e32dd (diff) |
migrate ra_hir to the new rowan
Diffstat (limited to 'crates/ra_hir/src')
-rw-r--r-- | crates/ra_hir/src/adt.rs | 15 | ||||
-rw-r--r-- | crates/ra_hir/src/code_model.rs | 19 | ||||
-rw-r--r-- | crates/ra_hir/src/code_model/docs.rs | 24 | ||||
-rw-r--r-- | crates/ra_hir/src/code_model/src.rs | 44 | ||||
-rw-r--r-- | crates/ra_hir/src/db.rs | 6 | ||||
-rw-r--r-- | crates/ra_hir/src/diagnostics.rs | 12 | ||||
-rw-r--r-- | crates/ra_hir/src/expr.rs | 46 | ||||
-rw-r--r-- | crates/ra_hir/src/expr/scope.rs | 6 | ||||
-rw-r--r-- | crates/ra_hir/src/expr/validation.rs | 2 | ||||
-rw-r--r-- | crates/ra_hir/src/generics.rs | 22 | ||||
-rw-r--r-- | crates/ra_hir/src/ids.rs | 14 | ||||
-rw-r--r-- | crates/ra_hir/src/impl_block.rs | 24 | ||||
-rw-r--r-- | crates/ra_hir/src/lang_item.rs | 8 | ||||
-rw-r--r-- | crates/ra_hir/src/name.rs | 2 | ||||
-rw-r--r-- | crates/ra_hir/src/nameres/raw.rs | 52 | ||||
-rw-r--r-- | crates/ra_hir/src/path.rs | 26 | ||||
-rw-r--r-- | crates/ra_hir/src/source_binder.rs | 54 | ||||
-rw-r--r-- | crates/ra_hir/src/source_id.rs | 18 | ||||
-rw-r--r-- | crates/ra_hir/src/traits.rs | 6 | ||||
-rw-r--r-- | crates/ra_hir/src/ty/tests.rs | 10 | ||||
-rw-r--r-- | crates/ra_hir/src/type_ref.rs | 4 |
21 files changed, 205 insertions, 209 deletions
diff --git a/crates/ra_hir/src/adt.rs b/crates/ra_hir/src/adt.rs index 8afdac801..c65446df4 100644 --- a/crates/ra_hir/src/adt.rs +++ b/crates/ra_hir/src/adt.rs | |||
@@ -4,10 +4,7 @@ | |||
4 | use std::sync::Arc; | 4 | use std::sync::Arc; |
5 | 5 | ||
6 | use ra_arena::{impl_arena_id, Arena, RawId}; | 6 | use ra_arena::{impl_arena_id, Arena, RawId}; |
7 | use ra_syntax::{ | 7 | use ra_syntax::ast::{self, NameOwner, StructKind, TypeAscriptionOwner}; |
8 | ast::{self, NameOwner, StructKind, TypeAscriptionOwner}, | ||
9 | TreeArc, | ||
10 | }; | ||
11 | 8 | ||
12 | use crate::{ | 9 | use crate::{ |
13 | type_ref::TypeRef, AsName, AstDatabase, Crate, DefDatabase, Enum, EnumVariant, FieldSource, | 10 | type_ref::TypeRef, AsName, AstDatabase, Crate, DefDatabase, Enum, EnumVariant, FieldSource, |
@@ -59,11 +56,11 @@ impl StructData { | |||
59 | struct_: Struct, | 56 | struct_: Struct, |
60 | ) -> Arc<StructData> { | 57 | ) -> Arc<StructData> { |
61 | let src = struct_.source(db); | 58 | let src = struct_.source(db); |
62 | Arc::new(StructData::new(&*src.ast)) | 59 | Arc::new(StructData::new(&src.ast)) |
63 | } | 60 | } |
64 | } | 61 | } |
65 | 62 | ||
66 | fn variants(enum_def: &ast::EnumDef) -> impl Iterator<Item = &ast::EnumVariant> { | 63 | fn variants(enum_def: &ast::EnumDef) -> impl Iterator<Item = ast::EnumVariant> { |
67 | enum_def.variant_list().into_iter().flat_map(|it| it.variants()) | 64 | enum_def.variant_list().into_iter().flat_map(|it| it.variants()) |
68 | } | 65 | } |
69 | 66 | ||
@@ -71,9 +68,9 @@ impl EnumVariant { | |||
71 | pub(crate) fn source_impl( | 68 | pub(crate) fn source_impl( |
72 | self, | 69 | self, |
73 | db: &(impl DefDatabase + AstDatabase), | 70 | db: &(impl DefDatabase + AstDatabase), |
74 | ) -> Source<TreeArc<ast::EnumVariant>> { | 71 | ) -> Source<ast::EnumVariant> { |
75 | let src = self.parent.source(db); | 72 | let src = self.parent.source(db); |
76 | let ast = variants(&*src.ast) | 73 | let ast = variants(&src.ast) |
77 | .zip(db.enum_data(self.parent).variants.iter()) | 74 | .zip(db.enum_data(self.parent).variants.iter()) |
78 | .find(|(_syntax, (id, _))| *id == self.id) | 75 | .find(|(_syntax, (id, _))| *id == self.id) |
79 | .unwrap() | 76 | .unwrap() |
@@ -96,7 +93,7 @@ impl EnumData { | |||
96 | pub(crate) fn enum_data_query(db: &(impl DefDatabase + AstDatabase), e: Enum) -> Arc<EnumData> { | 93 | pub(crate) fn enum_data_query(db: &(impl DefDatabase + AstDatabase), e: Enum) -> Arc<EnumData> { |
97 | let src = e.source(db); | 94 | let src = e.source(db); |
98 | let name = src.ast.name().map(|n| n.as_name()); | 95 | let name = src.ast.name().map(|n| n.as_name()); |
99 | let variants = variants(&*src.ast) | 96 | let variants = variants(&src.ast) |
100 | .map(|var| EnumVariantData { | 97 | .map(|var| EnumVariantData { |
101 | name: var.name().map(|it| it.as_name()), | 98 | name: var.name().map(|it| it.as_name()), |
102 | variant_data: Arc::new(VariantData::new(var.kind())), | 99 | variant_data: Arc::new(VariantData::new(var.kind())), |
diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs index 4fb5844f4..779764590 100644 --- a/crates/ra_hir/src/code_model.rs +++ b/crates/ra_hir/src/code_model.rs | |||
@@ -4,10 +4,7 @@ pub(crate) mod docs; | |||
4 | use std::sync::Arc; | 4 | use std::sync::Arc; |
5 | 5 | ||
6 | use ra_db::{CrateId, Edition, FileId, SourceRootId}; | 6 | use ra_db::{CrateId, Edition, FileId, SourceRootId}; |
7 | use ra_syntax::{ | 7 | use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner}; |
8 | ast::{self, NameOwner, TypeAscriptionOwner}, | ||
9 | TreeArc, | ||
10 | }; | ||
11 | 8 | ||
12 | use crate::{ | 9 | use crate::{ |
13 | adt::{EnumVariantId, StructFieldId, VariantDef}, | 10 | adt::{EnumVariantId, StructFieldId, VariantDef}, |
@@ -155,8 +152,8 @@ impl_froms!( | |||
155 | ); | 152 | ); |
156 | 153 | ||
157 | pub enum ModuleSource { | 154 | pub enum ModuleSource { |
158 | SourceFile(TreeArc<ast::SourceFile>), | 155 | SourceFile(ast::SourceFile), |
159 | Module(TreeArc<ast::Module>), | 156 | Module(ast::Module), |
160 | } | 157 | } |
161 | 158 | ||
162 | impl ModuleSource { | 159 | impl ModuleSource { |
@@ -199,7 +196,7 @@ impl Module { | |||
199 | self, | 196 | self, |
200 | db: &impl HirDatabase, | 197 | db: &impl HirDatabase, |
201 | import: ImportId, | 198 | import: ImportId, |
202 | ) -> Either<TreeArc<ast::UseTree>, TreeArc<ast::ExternCrateItem>> { | 199 | ) -> Either<ast::UseTree, ast::ExternCrateItem> { |
203 | let src = self.definition_source(db); | 200 | let src = self.definition_source(db); |
204 | let (_, source_map) = db.raw_items_with_source_map(src.file_id); | 201 | let (_, source_map) = db.raw_items_with_source_map(src.file_id); |
205 | source_map.get(&src.ast, import) | 202 | source_map.get(&src.ast, import) |
@@ -321,8 +318,8 @@ pub struct StructField { | |||
321 | 318 | ||
322 | #[derive(Debug)] | 319 | #[derive(Debug)] |
323 | pub enum FieldSource { | 320 | pub enum FieldSource { |
324 | Named(TreeArc<ast::NamedFieldDef>), | 321 | Named(ast::NamedFieldDef), |
325 | Pos(TreeArc<ast::PosFieldDef>), | 322 | Pos(ast::PosFieldDef), |
326 | } | 323 | } |
327 | 324 | ||
328 | impl StructField { | 325 | impl StructField { |
@@ -736,7 +733,7 @@ impl ConstData { | |||
736 | konst: Const, | 733 | konst: Const, |
737 | ) -> Arc<ConstData> { | 734 | ) -> Arc<ConstData> { |
738 | let node = konst.source(db).ast; | 735 | let node = konst.source(db).ast; |
739 | const_data_for(&*node) | 736 | const_data_for(&node) |
740 | } | 737 | } |
741 | 738 | ||
742 | pub(crate) fn static_data_query( | 739 | pub(crate) fn static_data_query( |
@@ -744,7 +741,7 @@ impl ConstData { | |||
744 | konst: Static, | 741 | konst: Static, |
745 | ) -> Arc<ConstData> { | 742 | ) -> Arc<ConstData> { |
746 | let node = konst.source(db).ast; | 743 | let node = konst.source(db).ast; |
747 | const_data_for(&*node) | 744 | const_data_for(&node) |
748 | } | 745 | } |
749 | } | 746 | } |
750 | 747 | ||
diff --git a/crates/ra_hir/src/code_model/docs.rs b/crates/ra_hir/src/code_model/docs.rs index 007ef315d..a2b4d8e97 100644 --- a/crates/ra_hir/src/code_model/docs.rs +++ b/crates/ra_hir/src/code_model/docs.rs | |||
@@ -71,21 +71,21 @@ pub(crate) fn documentation_query( | |||
71 | def: DocDef, | 71 | def: DocDef, |
72 | ) -> Option<Documentation> { | 72 | ) -> Option<Documentation> { |
73 | match def { | 73 | match def { |
74 | DocDef::Module(it) => docs_from_ast(&*it.declaration_source(db)?.ast), | 74 | DocDef::Module(it) => docs_from_ast(&it.declaration_source(db)?.ast), |
75 | DocDef::StructField(it) => match it.source(db).ast { | 75 | DocDef::StructField(it) => match it.source(db).ast { |
76 | FieldSource::Named(named) => docs_from_ast(&*named), | 76 | FieldSource::Named(named) => docs_from_ast(&named), |
77 | FieldSource::Pos(..) => None, | 77 | FieldSource::Pos(..) => None, |
78 | }, | 78 | }, |
79 | DocDef::Struct(it) => docs_from_ast(&*it.source(db).ast), | 79 | DocDef::Struct(it) => docs_from_ast(&it.source(db).ast), |
80 | DocDef::Enum(it) => docs_from_ast(&*it.source(db).ast), | 80 | DocDef::Enum(it) => docs_from_ast(&it.source(db).ast), |
81 | DocDef::EnumVariant(it) => docs_from_ast(&*it.source(db).ast), | 81 | DocDef::EnumVariant(it) => docs_from_ast(&it.source(db).ast), |
82 | DocDef::Static(it) => docs_from_ast(&*it.source(db).ast), | 82 | DocDef::Static(it) => docs_from_ast(&it.source(db).ast), |
83 | DocDef::Const(it) => docs_from_ast(&*it.source(db).ast), | 83 | DocDef::Const(it) => docs_from_ast(&it.source(db).ast), |
84 | DocDef::Function(it) => docs_from_ast(&*it.source(db).ast), | 84 | DocDef::Function(it) => docs_from_ast(&it.source(db).ast), |
85 | DocDef::Union(it) => docs_from_ast(&*it.source(db).ast), | 85 | DocDef::Union(it) => docs_from_ast(&it.source(db).ast), |
86 | DocDef::Trait(it) => docs_from_ast(&*it.source(db).ast), | 86 | DocDef::Trait(it) => docs_from_ast(&it.source(db).ast), |
87 | DocDef::TypeAlias(it) => docs_from_ast(&*it.source(db).ast), | 87 | DocDef::TypeAlias(it) => docs_from_ast(&it.source(db).ast), |
88 | DocDef::MacroDef(it) => docs_from_ast(&*it.source(db).ast), | 88 | DocDef::MacroDef(it) => docs_from_ast(&it.source(db).ast), |
89 | } | 89 | } |
90 | } | 90 | } |
91 | 91 | ||
diff --git a/crates/ra_hir/src/code_model/src.rs b/crates/ra_hir/src/code_model/src.rs index 72451e0e7..32bd9c661 100644 --- a/crates/ra_hir/src/code_model/src.rs +++ b/crates/ra_hir/src/code_model/src.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | use ra_syntax::{ast, TreeArc}; | 1 | use ra_syntax::ast; |
2 | 2 | ||
3 | use crate::{ | 3 | use crate::{ |
4 | ids::AstItemDef, AstDatabase, Const, DefDatabase, Enum, EnumVariant, FieldSource, Function, | 4 | ids::AstItemDef, AstDatabase, Const, DefDatabase, Enum, EnumVariant, FieldSource, Function, |
@@ -34,7 +34,7 @@ impl Module { | |||
34 | pub fn declaration_source( | 34 | pub fn declaration_source( |
35 | self, | 35 | self, |
36 | db: &(impl DefDatabase + AstDatabase), | 36 | db: &(impl DefDatabase + AstDatabase), |
37 | ) -> Option<Source<TreeArc<ast::Module>>> { | 37 | ) -> Option<Source<ast::Module>> { |
38 | let def_map = db.crate_def_map(self.krate); | 38 | let def_map = db.crate_def_map(self.krate); |
39 | let decl = def_map[self.module_id].declaration?; | 39 | let decl = def_map[self.module_id].declaration?; |
40 | let ast = decl.to_node(db); | 40 | let ast = decl.to_node(db); |
@@ -49,62 +49,62 @@ impl HasSource for StructField { | |||
49 | } | 49 | } |
50 | } | 50 | } |
51 | impl HasSource for Struct { | 51 | impl HasSource for Struct { |
52 | type Ast = TreeArc<ast::StructDef>; | 52 | type Ast = ast::StructDef; |
53 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::StructDef>> { | 53 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::StructDef> { |
54 | self.id.source(db) | 54 | self.id.source(db) |
55 | } | 55 | } |
56 | } | 56 | } |
57 | impl HasSource for Union { | 57 | impl HasSource for Union { |
58 | type Ast = TreeArc<ast::StructDef>; | 58 | type Ast = ast::StructDef; |
59 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::StructDef>> { | 59 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::StructDef> { |
60 | self.id.source(db) | 60 | self.id.source(db) |
61 | } | 61 | } |
62 | } | 62 | } |
63 | impl HasSource for Enum { | 63 | impl HasSource for Enum { |
64 | type Ast = TreeArc<ast::EnumDef>; | 64 | type Ast = ast::EnumDef; |
65 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::EnumDef>> { | 65 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::EnumDef> { |
66 | self.id.source(db) | 66 | self.id.source(db) |
67 | } | 67 | } |
68 | } | 68 | } |
69 | impl HasSource for EnumVariant { | 69 | impl HasSource for EnumVariant { |
70 | type Ast = TreeArc<ast::EnumVariant>; | 70 | type Ast = ast::EnumVariant; |
71 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::EnumVariant>> { | 71 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::EnumVariant> { |
72 | self.source_impl(db) | 72 | self.source_impl(db) |
73 | } | 73 | } |
74 | } | 74 | } |
75 | impl HasSource for Function { | 75 | impl HasSource for Function { |
76 | type Ast = TreeArc<ast::FnDef>; | 76 | type Ast = ast::FnDef; |
77 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::FnDef>> { | 77 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::FnDef> { |
78 | self.id.source(db) | 78 | self.id.source(db) |
79 | } | 79 | } |
80 | } | 80 | } |
81 | impl HasSource for Const { | 81 | impl HasSource for Const { |
82 | type Ast = TreeArc<ast::ConstDef>; | 82 | type Ast = ast::ConstDef; |
83 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::ConstDef>> { | 83 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::ConstDef> { |
84 | self.id.source(db) | 84 | self.id.source(db) |
85 | } | 85 | } |
86 | } | 86 | } |
87 | impl HasSource for Static { | 87 | impl HasSource for Static { |
88 | type Ast = TreeArc<ast::StaticDef>; | 88 | type Ast = ast::StaticDef; |
89 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::StaticDef>> { | 89 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::StaticDef> { |
90 | self.id.source(db) | 90 | self.id.source(db) |
91 | } | 91 | } |
92 | } | 92 | } |
93 | impl HasSource for Trait { | 93 | impl HasSource for Trait { |
94 | type Ast = TreeArc<ast::TraitDef>; | 94 | type Ast = ast::TraitDef; |
95 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::TraitDef>> { | 95 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::TraitDef> { |
96 | self.id.source(db) | 96 | self.id.source(db) |
97 | } | 97 | } |
98 | } | 98 | } |
99 | impl HasSource for TypeAlias { | 99 | impl HasSource for TypeAlias { |
100 | type Ast = TreeArc<ast::TypeAliasDef>; | 100 | type Ast = ast::TypeAliasDef; |
101 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::TypeAliasDef>> { | 101 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::TypeAliasDef> { |
102 | self.id.source(db) | 102 | self.id.source(db) |
103 | } | 103 | } |
104 | } | 104 | } |
105 | impl HasSource for MacroDef { | 105 | impl HasSource for MacroDef { |
106 | type Ast = TreeArc<ast::MacroCall>; | 106 | type Ast = ast::MacroCall; |
107 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::MacroCall>> { | 107 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::MacroCall> { |
108 | Source { file_id: self.id.0.file_id(), ast: self.id.0.to_node(db) } | 108 | Source { file_id: self.id.0.file_id(), ast: self.id.0.to_node(db) } |
109 | } | 109 | } |
110 | } | 110 | } |
diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs index da9f3e32d..358365176 100644 --- a/crates/ra_hir/src/db.rs +++ b/crates/ra_hir/src/db.rs | |||
@@ -2,7 +2,7 @@ use std::sync::Arc; | |||
2 | 2 | ||
3 | use parking_lot::Mutex; | 3 | use parking_lot::Mutex; |
4 | use ra_db::{salsa, SourceDatabase}; | 4 | use ra_db::{salsa, SourceDatabase}; |
5 | use ra_syntax::{ast, Parse, SmolStr, SyntaxNode, TreeArc}; | 5 | use ra_syntax::{ast, Parse, SmolStr, SyntaxNode}; |
6 | 6 | ||
7 | use crate::{ | 7 | use crate::{ |
8 | adt::{EnumData, StructData}, | 8 | adt::{EnumData, StructData}, |
@@ -62,11 +62,11 @@ pub trait AstDatabase: InternDatabase { | |||
62 | 62 | ||
63 | #[salsa::transparent] | 63 | #[salsa::transparent] |
64 | #[salsa::invoke(crate::source_id::AstIdMap::file_item_query)] | 64 | #[salsa::invoke(crate::source_id::AstIdMap::file_item_query)] |
65 | fn ast_id_to_node(&self, file_id: HirFileId, ast_id: ErasedFileAstId) -> TreeArc<SyntaxNode>; | 65 | fn ast_id_to_node(&self, file_id: HirFileId, ast_id: ErasedFileAstId) -> SyntaxNode; |
66 | 66 | ||
67 | #[salsa::transparent] | 67 | #[salsa::transparent] |
68 | #[salsa::invoke(crate::ids::HirFileId::parse_or_expand_query)] | 68 | #[salsa::invoke(crate::ids::HirFileId::parse_or_expand_query)] |
69 | fn parse_or_expand(&self, file_id: HirFileId) -> Option<TreeArc<SyntaxNode>>; | 69 | fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>; |
70 | 70 | ||
71 | #[salsa::invoke(crate::ids::HirFileId::parse_macro_query)] | 71 | #[salsa::invoke(crate::ids::HirFileId::parse_macro_query)] |
72 | fn parse_macro(&self, macro_file: ids::MacroFile) -> Option<Parse<SyntaxNode>>; | 72 | fn parse_macro(&self, macro_file: ids::MacroFile) -> Option<Parse<SyntaxNode>>; |
diff --git a/crates/ra_hir/src/diagnostics.rs b/crates/ra_hir/src/diagnostics.rs index c97f0656d..0290483b3 100644 --- a/crates/ra_hir/src/diagnostics.rs +++ b/crates/ra_hir/src/diagnostics.rs | |||
@@ -1,6 +1,6 @@ | |||
1 | use std::{any::Any, fmt}; | 1 | use std::{any::Any, fmt}; |
2 | 2 | ||
3 | use ra_syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, TextRange, TreeArc}; | 3 | use ra_syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, TextRange}; |
4 | use relative_path::RelativePathBuf; | 4 | use relative_path::RelativePathBuf; |
5 | 5 | ||
6 | use crate::{HirDatabase, HirFileId, Name}; | 6 | use crate::{HirDatabase, HirFileId, Name}; |
@@ -33,9 +33,9 @@ pub trait AstDiagnostic { | |||
33 | } | 33 | } |
34 | 34 | ||
35 | impl dyn Diagnostic { | 35 | impl dyn Diagnostic { |
36 | pub fn syntax_node(&self, db: &impl HirDatabase) -> TreeArc<SyntaxNode> { | 36 | pub fn syntax_node(&self, db: &impl HirDatabase) -> SyntaxNode { |
37 | let node = db.parse_or_expand(self.file()).unwrap(); | 37 | let node = db.parse_or_expand(self.file()).unwrap(); |
38 | self.syntax_node_ptr().to_node(&*node).to_owned() | 38 | self.syntax_node_ptr().to_node(&node) |
39 | } | 39 | } |
40 | 40 | ||
41 | pub fn downcast_ref<D: Diagnostic>(&self) -> Option<&D> { | 41 | pub fn downcast_ref<D: Diagnostic>(&self) -> Option<&D> { |
@@ -143,11 +143,11 @@ impl Diagnostic for MissingFields { | |||
143 | } | 143 | } |
144 | 144 | ||
145 | impl AstDiagnostic for MissingFields { | 145 | impl AstDiagnostic for MissingFields { |
146 | type AST = TreeArc<ast::NamedFieldList>; | 146 | type AST = ast::NamedFieldList; |
147 | 147 | ||
148 | fn ast(&self, db: &impl HirDatabase) -> Self::AST { | 148 | fn ast(&self, db: &impl HirDatabase) -> Self::AST { |
149 | let root = db.parse_or_expand(self.file()).unwrap(); | 149 | let root = db.parse_or_expand(self.file()).unwrap(); |
150 | let node = self.syntax_node_ptr().to_node(&*root); | 150 | let node = self.syntax_node_ptr().to_node(&root); |
151 | ast::NamedFieldList::cast(&node).unwrap().to_owned() | 151 | ast::NamedFieldList::cast(node).unwrap() |
152 | } | 152 | } |
153 | } | 153 | } |
diff --git a/crates/ra_hir/src/expr.rs b/crates/ra_hir/src/expr.rs index 3a97d97ce..70af3f119 100644 --- a/crates/ra_hir/src/expr.rs +++ b/crates/ra_hir/src/expr.rs | |||
@@ -550,7 +550,7 @@ where | |||
550 | self.exprs.alloc(block) | 550 | self.exprs.alloc(block) |
551 | } | 551 | } |
552 | 552 | ||
553 | fn collect_expr(&mut self, expr: &ast::Expr) -> ExprId { | 553 | fn collect_expr(&mut self, expr: ast::Expr) -> ExprId { |
554 | let syntax_ptr = SyntaxNodePtr::new(expr.syntax()); | 554 | let syntax_ptr = SyntaxNodePtr::new(expr.syntax()); |
555 | match expr.kind() { | 555 | match expr.kind() { |
556 | ast::ExprKind::IfExpr(e) => { | 556 | ast::ExprKind::IfExpr(e) => { |
@@ -565,7 +565,8 @@ where | |||
565 | .map(|b| match b { | 565 | .map(|b| match b { |
566 | ast::ElseBranch::Block(it) => self.collect_block(it), | 566 | ast::ElseBranch::Block(it) => self.collect_block(it), |
567 | ast::ElseBranch::IfExpr(elif) => { | 567 | ast::ElseBranch::IfExpr(elif) => { |
568 | let expr: &ast::Expr = ast::Expr::cast(elif.syntax()).unwrap(); | 568 | let expr: ast::Expr = |
569 | ast::Expr::cast(elif.syntax().clone()).unwrap(); | ||
569 | self.collect_expr(expr) | 570 | self.collect_expr(expr) |
570 | } | 571 | } |
571 | }) | 572 | }) |
@@ -582,7 +583,7 @@ where | |||
582 | let else_branch = e.else_branch().map(|b| match b { | 583 | let else_branch = e.else_branch().map(|b| match b { |
583 | ast::ElseBranch::Block(it) => self.collect_block(it), | 584 | ast::ElseBranch::Block(it) => self.collect_block(it), |
584 | ast::ElseBranch::IfExpr(elif) => { | 585 | ast::ElseBranch::IfExpr(elif) => { |
585 | let expr: &ast::Expr = ast::Expr::cast(elif.syntax()).unwrap(); | 586 | let expr: ast::Expr = ast::Expr::cast(elif.syntax().clone()).unwrap(); |
586 | self.collect_expr(expr) | 587 | self.collect_expr(expr) |
587 | } | 588 | } |
588 | }); | 589 | }); |
@@ -689,7 +690,7 @@ where | |||
689 | let struct_lit = if let Some(nfl) = e.named_field_list() { | 690 | let struct_lit = if let Some(nfl) = e.named_field_list() { |
690 | let fields = nfl | 691 | let fields = nfl |
691 | .fields() | 692 | .fields() |
692 | .inspect(|field| field_ptrs.push(AstPtr::new(*field))) | 693 | .inspect(|field| field_ptrs.push(AstPtr::new(field))) |
693 | .map(|field| StructLitField { | 694 | .map(|field| StructLitField { |
694 | name: field | 695 | name: field |
695 | .name_ref() | 696 | .name_ref() |
@@ -699,7 +700,7 @@ where | |||
699 | self.collect_expr(e) | 700 | self.collect_expr(e) |
700 | } else if let Some(nr) = field.name_ref() { | 701 | } else if let Some(nr) = field.name_ref() { |
701 | // field shorthand | 702 | // field shorthand |
702 | let id = self.exprs.alloc(Expr::Path(Path::from_name_ref(nr))); | 703 | let id = self.exprs.alloc(Expr::Path(Path::from_name_ref(&nr))); |
703 | self.source_map | 704 | self.source_map |
704 | .expr_map | 705 | .expr_map |
705 | .insert(SyntaxNodePtr::new(nr.syntax()), id); | 706 | .insert(SyntaxNodePtr::new(nr.syntax()), id); |
@@ -837,7 +838,7 @@ where | |||
837 | let ast_id = self | 838 | let ast_id = self |
838 | .db | 839 | .db |
839 | .ast_id_map(self.current_file_id) | 840 | .ast_id_map(self.current_file_id) |
840 | .ast_id(e) | 841 | .ast_id(&e) |
841 | .with_file_id(self.current_file_id); | 842 | .with_file_id(self.current_file_id); |
842 | 843 | ||
843 | if let Some(path) = e.path().and_then(Path::from_ast) { | 844 | if let Some(path) = e.path().and_then(Path::from_ast) { |
@@ -845,11 +846,11 @@ where | |||
845 | let call_id = MacroCallLoc { def: def.id, ast_id }.id(self.db); | 846 | let call_id = MacroCallLoc { def: def.id, ast_id }.id(self.db); |
846 | let file_id = call_id.as_file(MacroFileKind::Expr); | 847 | let file_id = call_id.as_file(MacroFileKind::Expr); |
847 | if let Some(node) = self.db.parse_or_expand(file_id) { | 848 | if let Some(node) = self.db.parse_or_expand(file_id) { |
848 | if let Some(expr) = ast::Expr::cast(&*node) { | 849 | if let Some(expr) = ast::Expr::cast(node) { |
849 | log::debug!("macro expansion {}", expr.syntax().debug_dump()); | 850 | log::debug!("macro expansion {}", expr.syntax().debug_dump()); |
850 | let old_file_id = | 851 | let old_file_id = |
851 | std::mem::replace(&mut self.current_file_id, file_id); | 852 | std::mem::replace(&mut self.current_file_id, file_id); |
852 | let id = self.collect_expr(&expr); | 853 | let id = self.collect_expr(expr); |
853 | self.current_file_id = old_file_id; | 854 | self.current_file_id = old_file_id; |
854 | return id; | 855 | return id; |
855 | } | 856 | } |
@@ -863,7 +864,7 @@ where | |||
863 | } | 864 | } |
864 | } | 865 | } |
865 | 866 | ||
866 | fn collect_expr_opt(&mut self, expr: Option<&ast::Expr>) -> ExprId { | 867 | fn collect_expr_opt(&mut self, expr: Option<ast::Expr>) -> ExprId { |
867 | if let Some(expr) = expr { | 868 | if let Some(expr) = expr { |
868 | self.collect_expr(expr) | 869 | self.collect_expr(expr) |
869 | } else { | 870 | } else { |
@@ -871,7 +872,7 @@ where | |||
871 | } | 872 | } |
872 | } | 873 | } |
873 | 874 | ||
874 | fn collect_block(&mut self, block: &ast::Block) -> ExprId { | 875 | fn collect_block(&mut self, block: ast::Block) -> ExprId { |
875 | let statements = block | 876 | let statements = block |
876 | .statements() | 877 | .statements() |
877 | .map(|s| match s.kind() { | 878 | .map(|s| match s.kind() { |
@@ -890,7 +891,7 @@ where | |||
890 | self.alloc_expr(Expr::Block { statements, tail }, SyntaxNodePtr::new(block.syntax())) | 891 | self.alloc_expr(Expr::Block { statements, tail }, SyntaxNodePtr::new(block.syntax())) |
891 | } | 892 | } |
892 | 893 | ||
893 | fn collect_block_opt(&mut self, block: Option<&ast::Block>) -> ExprId { | 894 | fn collect_block_opt(&mut self, block: Option<ast::Block>) -> ExprId { |
894 | if let Some(block) = block { | 895 | if let Some(block) = block { |
895 | self.collect_block(block) | 896 | self.collect_block(block) |
896 | } else { | 897 | } else { |
@@ -898,7 +899,7 @@ where | |||
898 | } | 899 | } |
899 | } | 900 | } |
900 | 901 | ||
901 | fn collect_pat(&mut self, pat: &ast::Pat) -> PatId { | 902 | fn collect_pat(&mut self, pat: ast::Pat) -> PatId { |
902 | let pattern = match pat.kind() { | 903 | let pattern = match pat.kind() { |
903 | ast::PatKind::BindPat(bp) => { | 904 | ast::PatKind::BindPat(bp) => { |
904 | let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing); | 905 | let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing); |
@@ -932,7 +933,8 @@ where | |||
932 | let mut fields: Vec<_> = field_pat_list | 933 | let mut fields: Vec<_> = field_pat_list |
933 | .bind_pats() | 934 | .bind_pats() |
934 | .filter_map(|bind_pat| { | 935 | .filter_map(|bind_pat| { |
935 | let ast_pat = ast::Pat::cast(bind_pat.syntax()).expect("bind pat is a pat"); | 936 | let ast_pat = |
937 | ast::Pat::cast(bind_pat.syntax().clone()).expect("bind pat is a pat"); | ||
936 | let pat = self.collect_pat(ast_pat); | 938 | let pat = self.collect_pat(ast_pat); |
937 | let name = bind_pat.name()?.as_name(); | 939 | let name = bind_pat.name()?.as_name(); |
938 | Some(FieldPat { name, pat }) | 940 | Some(FieldPat { name, pat }) |
@@ -953,11 +955,11 @@ where | |||
953 | ast::PatKind::LiteralPat(_) => Pat::Missing, | 955 | ast::PatKind::LiteralPat(_) => Pat::Missing, |
954 | ast::PatKind::SlicePat(_) | ast::PatKind::RangePat(_) => Pat::Missing, | 956 | ast::PatKind::SlicePat(_) | ast::PatKind::RangePat(_) => Pat::Missing, |
955 | }; | 957 | }; |
956 | let ptr = AstPtr::new(pat); | 958 | let ptr = AstPtr::new(&pat); |
957 | self.alloc_pat(pattern, Either::A(ptr)) | 959 | self.alloc_pat(pattern, Either::A(ptr)) |
958 | } | 960 | } |
959 | 961 | ||
960 | fn collect_pat_opt(&mut self, pat: Option<&ast::Pat>) -> PatId { | 962 | fn collect_pat_opt(&mut self, pat: Option<ast::Pat>) -> PatId { |
961 | if let Some(pat) = pat { | 963 | if let Some(pat) = pat { |
962 | self.collect_pat(pat) | 964 | self.collect_pat(pat) |
963 | } else { | 965 | } else { |
@@ -965,20 +967,20 @@ where | |||
965 | } | 967 | } |
966 | } | 968 | } |
967 | 969 | ||
968 | fn collect_const_body(&mut self, node: &ast::ConstDef) { | 970 | fn collect_const_body(&mut self, node: ast::ConstDef) { |
969 | let body = self.collect_expr_opt(node.body()); | 971 | let body = self.collect_expr_opt(node.body()); |
970 | self.body_expr = Some(body); | 972 | self.body_expr = Some(body); |
971 | } | 973 | } |
972 | 974 | ||
973 | fn collect_static_body(&mut self, node: &ast::StaticDef) { | 975 | fn collect_static_body(&mut self, node: ast::StaticDef) { |
974 | let body = self.collect_expr_opt(node.body()); | 976 | let body = self.collect_expr_opt(node.body()); |
975 | self.body_expr = Some(body); | 977 | self.body_expr = Some(body); |
976 | } | 978 | } |
977 | 979 | ||
978 | fn collect_fn_body(&mut self, node: &ast::FnDef) { | 980 | fn collect_fn_body(&mut self, node: ast::FnDef) { |
979 | if let Some(param_list) = node.param_list() { | 981 | if let Some(param_list) = node.param_list() { |
980 | if let Some(self_param) = param_list.self_param() { | 982 | if let Some(self_param) = param_list.self_param() { |
981 | let ptr = AstPtr::new(self_param); | 983 | let ptr = AstPtr::new(&self_param); |
982 | let param_pat = self.alloc_pat( | 984 | let param_pat = self.alloc_pat( |
983 | Pat::Bind { | 985 | Pat::Bind { |
984 | name: SELF_PARAM, | 986 | name: SELF_PARAM, |
@@ -1027,17 +1029,17 @@ pub(crate) fn body_with_source_map_query( | |||
1027 | DefWithBody::Const(ref c) => { | 1029 | DefWithBody::Const(ref c) => { |
1028 | let src = c.source(db); | 1030 | let src = c.source(db); |
1029 | collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); | 1031 | collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); |
1030 | collector.collect_const_body(&src.ast) | 1032 | collector.collect_const_body(src.ast) |
1031 | } | 1033 | } |
1032 | DefWithBody::Function(ref f) => { | 1034 | DefWithBody::Function(ref f) => { |
1033 | let src = f.source(db); | 1035 | let src = f.source(db); |
1034 | collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); | 1036 | collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); |
1035 | collector.collect_fn_body(&src.ast) | 1037 | collector.collect_fn_body(src.ast) |
1036 | } | 1038 | } |
1037 | DefWithBody::Static(ref s) => { | 1039 | DefWithBody::Static(ref s) => { |
1038 | let src = s.source(db); | 1040 | let src = s.source(db); |
1039 | collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); | 1041 | collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); |
1040 | collector.collect_static_body(&src.ast) | 1042 | collector.collect_static_body(src.ast) |
1041 | } | 1043 | } |
1042 | } | 1044 | } |
1043 | 1045 | ||
diff --git a/crates/ra_hir/src/expr/scope.rs b/crates/ra_hir/src/expr/scope.rs index 28fd52684..6589b782c 100644 --- a/crates/ra_hir/src/expr/scope.rs +++ b/crates/ra_hir/src/expr/scope.rs | |||
@@ -190,7 +190,7 @@ mod tests { | |||
190 | 190 | ||
191 | let (db, _source_root, file_id) = MockDatabase::with_single_file(&code); | 191 | let (db, _source_root, file_id) = MockDatabase::with_single_file(&code); |
192 | let file = db.parse(file_id).ok().unwrap(); | 192 | let file = db.parse(file_id).ok().unwrap(); |
193 | let marker: &ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap(); | 193 | let marker: ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap(); |
194 | let analyzer = SourceAnalyzer::new(&db, file_id, marker.syntax(), None); | 194 | let analyzer = SourceAnalyzer::new(&db, file_id, marker.syntax(), None); |
195 | 195 | ||
196 | let scopes = analyzer.scopes(); | 196 | let scopes = analyzer.scopes(); |
@@ -290,10 +290,10 @@ mod tests { | |||
290 | let file = db.parse(file_id).ok().unwrap(); | 290 | let file = db.parse(file_id).ok().unwrap(); |
291 | let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into()) | 291 | let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into()) |
292 | .expect("failed to find a name at the target offset"); | 292 | .expect("failed to find a name at the target offset"); |
293 | let name_ref: &ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap(); | 293 | let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap(); |
294 | let analyzer = SourceAnalyzer::new(&db, file_id, name_ref.syntax(), None); | 294 | let analyzer = SourceAnalyzer::new(&db, file_id, name_ref.syntax(), None); |
295 | 295 | ||
296 | let local_name_entry = analyzer.resolve_local_name(name_ref).unwrap(); | 296 | let local_name_entry = analyzer.resolve_local_name(&name_ref).unwrap(); |
297 | let local_name = | 297 | let local_name = |
298 | local_name_entry.ptr().either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()); | 298 | local_name_entry.ptr().either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()); |
299 | assert_eq!(local_name.range(), expected_name.syntax().range()); | 299 | assert_eq!(local_name.range(), expected_name.syntax().range()); |
diff --git a/crates/ra_hir/src/expr/validation.rs b/crates/ra_hir/src/expr/validation.rs index c2a10a0b5..82a06ca25 100644 --- a/crates/ra_hir/src/expr/validation.rs +++ b/crates/ra_hir/src/expr/validation.rs | |||
@@ -79,7 +79,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> { | |||
79 | .and_then(StructLit::cast) | 79 | .and_then(StructLit::cast) |
80 | .and_then(|lit| lit.named_field_list()) | 80 | .and_then(|lit| lit.named_field_list()) |
81 | { | 81 | { |
82 | let field_list_ptr = AstPtr::new(field_list_node); | 82 | let field_list_ptr = AstPtr::new(&field_list_node); |
83 | self.sink.push(MissingFields { | 83 | self.sink.push(MissingFields { |
84 | file: file_id, | 84 | file: file_id, |
85 | field_list: field_list_ptr, | 85 | field_list: field_list_ptr, |
diff --git a/crates/ra_hir/src/generics.rs b/crates/ra_hir/src/generics.rs index 07a59193f..bcbb4988d 100644 --- a/crates/ra_hir/src/generics.rs +++ b/crates/ra_hir/src/generics.rs | |||
@@ -76,17 +76,17 @@ impl GenericParams { | |||
76 | generics.parent_params = parent.map(|p| db.generic_params(p)); | 76 | generics.parent_params = parent.map(|p| db.generic_params(p)); |
77 | let start = generics.parent_params.as_ref().map(|p| p.params.len()).unwrap_or(0) as u32; | 77 | let start = generics.parent_params.as_ref().map(|p| p.params.len()).unwrap_or(0) as u32; |
78 | match def { | 78 | match def { |
79 | GenericDef::Function(it) => generics.fill(&*it.source(db).ast, start), | 79 | GenericDef::Function(it) => generics.fill(&it.source(db).ast, start), |
80 | GenericDef::Struct(it) => generics.fill(&*it.source(db).ast, start), | 80 | GenericDef::Struct(it) => generics.fill(&it.source(db).ast, start), |
81 | GenericDef::Union(it) => generics.fill(&*it.source(db).ast, start), | 81 | GenericDef::Union(it) => generics.fill(&it.source(db).ast, start), |
82 | GenericDef::Enum(it) => generics.fill(&*it.source(db).ast, start), | 82 | GenericDef::Enum(it) => generics.fill(&it.source(db).ast, start), |
83 | GenericDef::Trait(it) => { | 83 | GenericDef::Trait(it) => { |
84 | // traits get the Self type as an implicit first type parameter | 84 | // traits get the Self type as an implicit first type parameter |
85 | generics.params.push(GenericParam { idx: start, name: SELF_TYPE, default: None }); | 85 | generics.params.push(GenericParam { idx: start, name: SELF_TYPE, default: None }); |
86 | generics.fill(&*it.source(db).ast, start + 1); | 86 | generics.fill(&it.source(db).ast, start + 1); |
87 | } | 87 | } |
88 | GenericDef::TypeAlias(it) => generics.fill(&*it.source(db).ast, start), | 88 | GenericDef::TypeAlias(it) => generics.fill(&it.source(db).ast, start), |
89 | GenericDef::ImplBlock(it) => generics.fill(&*it.source(db).ast, start), | 89 | GenericDef::ImplBlock(it) => generics.fill(&it.source(db).ast, start), |
90 | GenericDef::EnumVariant(_) => {} | 90 | GenericDef::EnumVariant(_) => {} |
91 | } | 91 | } |
92 | 92 | ||
@@ -102,9 +102,9 @@ impl GenericParams { | |||
102 | } | 102 | } |
103 | } | 103 | } |
104 | 104 | ||
105 | fn fill_params(&mut self, params: &ast::TypeParamList, start: u32) { | 105 | fn fill_params(&mut self, params: ast::TypeParamList, start: u32) { |
106 | for (idx, type_param) in params.type_params().enumerate() { | 106 | for (idx, type_param) in params.type_params().enumerate() { |
107 | let name = type_param.name().map(AsName::as_name).unwrap_or_else(Name::missing); | 107 | let name = type_param.name().map_or_else(Name::missing, |it| it.as_name()); |
108 | let default = type_param.default_type().and_then(|t| t.path()).and_then(Path::from_ast); | 108 | let default = type_param.default_type().and_then(|t| t.path()).and_then(Path::from_ast); |
109 | 109 | ||
110 | let param = GenericParam { idx: idx as u32 + start, name: name.clone(), default }; | 110 | let param = GenericParam { idx: idx as u32 + start, name: name.clone(), default }; |
@@ -121,7 +121,7 @@ impl GenericParams { | |||
121 | } | 121 | } |
122 | } | 122 | } |
123 | 123 | ||
124 | fn fill_where_predicates(&mut self, where_clause: &ast::WhereClause) { | 124 | fn fill_where_predicates(&mut self, where_clause: ast::WhereClause) { |
125 | for pred in where_clause.predicates() { | 125 | for pred in where_clause.predicates() { |
126 | let type_ref = match pred.type_ref() { | 126 | let type_ref = match pred.type_ref() { |
127 | Some(type_ref) => type_ref, | 127 | Some(type_ref) => type_ref, |
@@ -134,7 +134,7 @@ impl GenericParams { | |||
134 | } | 134 | } |
135 | } | 135 | } |
136 | 136 | ||
137 | fn add_where_predicate_from_bound(&mut self, bound: &ast::TypeBound, type_ref: TypeRef) { | 137 | fn add_where_predicate_from_bound(&mut self, bound: ast::TypeBound, type_ref: TypeRef) { |
138 | let path = bound | 138 | let path = bound |
139 | .type_ref() | 139 | .type_ref() |
140 | .and_then(|tr| match tr.kind() { | 140 | .and_then(|tr| match tr.kind() { |
diff --git a/crates/ra_hir/src/ids.rs b/crates/ra_hir/src/ids.rs index 83f5c3f39..05a18eb56 100644 --- a/crates/ra_hir/src/ids.rs +++ b/crates/ra_hir/src/ids.rs | |||
@@ -6,7 +6,7 @@ use std::{ | |||
6 | use mbe::MacroRules; | 6 | use mbe::MacroRules; |
7 | use ra_db::{salsa, FileId}; | 7 | use ra_db::{salsa, FileId}; |
8 | use ra_prof::profile; | 8 | use ra_prof::profile; |
9 | use ra_syntax::{ast, AstNode, Parse, SyntaxNode, TreeArc}; | 9 | use ra_syntax::{ast, AstNode, Parse, SyntaxNode}; |
10 | 10 | ||
11 | use crate::{AstDatabase, AstId, DefDatabase, FileAstId, InternDatabase, Module, Source}; | 11 | use crate::{AstDatabase, AstId, DefDatabase, FileAstId, InternDatabase, Module, Source}; |
12 | 12 | ||
@@ -58,11 +58,11 @@ impl HirFileId { | |||
58 | pub(crate) fn parse_or_expand_query( | 58 | pub(crate) fn parse_or_expand_query( |
59 | db: &impl AstDatabase, | 59 | db: &impl AstDatabase, |
60 | file_id: HirFileId, | 60 | file_id: HirFileId, |
61 | ) -> Option<TreeArc<SyntaxNode>> { | 61 | ) -> Option<SyntaxNode> { |
62 | match file_id.0 { | 62 | match file_id.0 { |
63 | HirFileIdRepr::File(file_id) => Some(db.parse(file_id).tree().syntax().to_owned()), | 63 | HirFileIdRepr::File(file_id) => Some(db.parse(file_id).tree().syntax().clone()), |
64 | HirFileIdRepr::Macro(macro_file) => { | 64 | HirFileIdRepr::Macro(macro_file) => { |
65 | db.parse_macro(macro_file).map(|it| it.tree().to_owned()) | 65 | db.parse_macro(macro_file).map(|it| it.syntax_node()) |
66 | } | 66 | } |
67 | } | 67 | } |
68 | } | 68 | } |
@@ -123,7 +123,7 @@ pub struct MacroDefId(pub(crate) AstId<ast::MacroCall>); | |||
123 | pub(crate) fn macro_def_query(db: &impl AstDatabase, id: MacroDefId) -> Option<Arc<MacroRules>> { | 123 | pub(crate) fn macro_def_query(db: &impl AstDatabase, id: MacroDefId) -> Option<Arc<MacroRules>> { |
124 | let macro_call = id.0.to_node(db); | 124 | let macro_call = id.0.to_node(db); |
125 | let arg = macro_call.token_tree()?; | 125 | let arg = macro_call.token_tree()?; |
126 | let (tt, _) = mbe::ast_to_token_tree(arg).or_else(|| { | 126 | let (tt, _) = mbe::ast_to_token_tree(&arg).or_else(|| { |
127 | log::warn!("fail on macro_def to token tree: {:#?}", arg); | 127 | log::warn!("fail on macro_def to token tree: {:#?}", arg); |
128 | None | 128 | None |
129 | })?; | 129 | })?; |
@@ -138,7 +138,7 @@ pub(crate) fn macro_arg_query(db: &impl AstDatabase, id: MacroCallId) -> Option< | |||
138 | let loc = id.loc(db); | 138 | let loc = id.loc(db); |
139 | let macro_call = loc.ast_id.to_node(db); | 139 | let macro_call = loc.ast_id.to_node(db); |
140 | let arg = macro_call.token_tree()?; | 140 | let arg = macro_call.token_tree()?; |
141 | let (tt, _) = mbe::ast_to_token_tree(arg)?; | 141 | let (tt, _) = mbe::ast_to_token_tree(&arg)?; |
142 | Some(Arc::new(tt)) | 142 | Some(Arc::new(tt)) |
143 | } | 143 | } |
144 | 144 | ||
@@ -262,7 +262,7 @@ pub(crate) trait AstItemDef<N: AstNode>: salsa::InternKey + Clone { | |||
262 | let loc = ItemLoc { module: ctx.module, ast_id: ast_id.with_file_id(ctx.file_id) }; | 262 | let loc = ItemLoc { module: ctx.module, ast_id: ast_id.with_file_id(ctx.file_id) }; |
263 | Self::intern(ctx.db, loc) | 263 | Self::intern(ctx.db, loc) |
264 | } | 264 | } |
265 | fn source(self, db: &(impl AstDatabase + DefDatabase)) -> Source<TreeArc<N>> { | 265 | fn source(self, db: &(impl AstDatabase + DefDatabase)) -> Source<N> { |
266 | let loc = self.lookup_intern(db); | 266 | let loc = self.lookup_intern(db); |
267 | let ast = loc.ast_id.to_node(db); | 267 | let ast = loc.ast_id.to_node(db); |
268 | Source { file_id: loc.ast_id.file_id(), ast } | 268 | Source { file_id: loc.ast_id.file_id(), ast } |
diff --git a/crates/ra_hir/src/impl_block.rs b/crates/ra_hir/src/impl_block.rs index ce134b27a..8e62cf66d 100644 --- a/crates/ra_hir/src/impl_block.rs +++ b/crates/ra_hir/src/impl_block.rs | |||
@@ -4,7 +4,7 @@ use std::sync::Arc; | |||
4 | use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId}; | 4 | use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId}; |
5 | use ra_syntax::{ | 5 | use ra_syntax::{ |
6 | ast::{self, AstNode}, | 6 | ast::{self, AstNode}, |
7 | AstPtr, SourceFile, TreeArc, | 7 | AstPtr, SourceFile, |
8 | }; | 8 | }; |
9 | 9 | ||
10 | use crate::{ | 10 | use crate::{ |
@@ -28,9 +28,9 @@ impl ImplSourceMap { | |||
28 | self.map.insert(impl_id, AstPtr::new(impl_block)) | 28 | self.map.insert(impl_id, AstPtr::new(impl_block)) |
29 | } | 29 | } |
30 | 30 | ||
31 | pub fn get(&self, source: &ModuleSource, impl_id: ImplId) -> TreeArc<ast::ImplBlock> { | 31 | pub fn get(&self, source: &ModuleSource, impl_id: ImplId) -> ast::ImplBlock { |
32 | let file = match source { | 32 | let file = match source { |
33 | ModuleSource::SourceFile(file) => &*file, | 33 | ModuleSource::SourceFile(file) => file.clone(), |
34 | ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(), | 34 | ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(), |
35 | }; | 35 | }; |
36 | 36 | ||
@@ -45,8 +45,8 @@ pub struct ImplBlock { | |||
45 | } | 45 | } |
46 | 46 | ||
47 | impl HasSource for ImplBlock { | 47 | impl HasSource for ImplBlock { |
48 | type Ast = TreeArc<ast::ImplBlock>; | 48 | type Ast = ast::ImplBlock; |
49 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::ImplBlock>> { | 49 | fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::ImplBlock> { |
50 | let source_map = db.impls_in_module_with_source_map(self.module).1; | 50 | let source_map = db.impls_in_module_with_source_map(self.module).1; |
51 | let src = self.module.definition_source(db); | 51 | let src = self.module.definition_source(db); |
52 | Source { file_id: src.file_id, ast: source_map.get(&src.ast, self.impl_id) } | 52 | Source { file_id: src.file_id, ast: source_map.get(&src.ast, self.impl_id) } |
@@ -132,9 +132,9 @@ impl ImplData { | |||
132 | item_list | 132 | item_list |
133 | .impl_items() | 133 | .impl_items() |
134 | .map(|item_node| match item_node.kind() { | 134 | .map(|item_node| match item_node.kind() { |
135 | ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(it) }.into(), | 135 | ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(&it) }.into(), |
136 | ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(it) }.into(), | 136 | ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(&it) }.into(), |
137 | ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(it) }.into(), | 137 | ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(&it) }.into(), |
138 | }) | 138 | }) |
139 | .collect() | 139 | .collect() |
140 | } else { | 140 | } else { |
@@ -202,20 +202,20 @@ impl ModuleImplBlocks { | |||
202 | 202 | ||
203 | let src = m.module.definition_source(db); | 203 | let src = m.module.definition_source(db); |
204 | let node = match &src.ast { | 204 | let node = match &src.ast { |
205 | ModuleSource::SourceFile(node) => node.syntax(), | 205 | ModuleSource::SourceFile(node) => node.syntax().clone(), |
206 | ModuleSource::Module(node) => { | 206 | ModuleSource::Module(node) => { |
207 | node.item_list().expect("inline module should have item list").syntax() | 207 | node.item_list().expect("inline module should have item list").syntax().clone() |
208 | } | 208 | } |
209 | }; | 209 | }; |
210 | 210 | ||
211 | for impl_block_ast in node.children().filter_map(ast::ImplBlock::cast) { | 211 | for impl_block_ast in node.children().filter_map(ast::ImplBlock::cast) { |
212 | let impl_block = ImplData::from_ast(db, src.file_id, m.module, impl_block_ast); | 212 | let impl_block = ImplData::from_ast(db, src.file_id, m.module, &impl_block_ast); |
213 | let id = m.impls.alloc(impl_block); | 213 | let id = m.impls.alloc(impl_block); |
214 | for &impl_item in &m.impls[id].items { | 214 | for &impl_item in &m.impls[id].items { |
215 | m.impls_by_def.insert(impl_item, id); | 215 | m.impls_by_def.insert(impl_item, id); |
216 | } | 216 | } |
217 | 217 | ||
218 | source_map.insert(id, impl_block_ast); | 218 | source_map.insert(id, &impl_block_ast); |
219 | } | 219 | } |
220 | 220 | ||
221 | m | 221 | m |
diff --git a/crates/ra_hir/src/lang_item.rs b/crates/ra_hir/src/lang_item.rs index 0443d4d9a..fd6609fb8 100644 --- a/crates/ra_hir/src/lang_item.rs +++ b/crates/ra_hir/src/lang_item.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | use rustc_hash::FxHashMap; | 1 | use rustc_hash::FxHashMap; |
2 | use std::sync::Arc; | 2 | use std::sync::Arc; |
3 | 3 | ||
4 | use ra_syntax::{ast::AttrsOwner, SmolStr, TreeArc}; | 4 | use ra_syntax::{ast::AttrsOwner, SmolStr}; |
5 | 5 | ||
6 | use crate::{ | 6 | use crate::{ |
7 | AstDatabase, Crate, DefDatabase, Enum, Function, HasSource, HirDatabase, ImplBlock, Module, | 7 | AstDatabase, Crate, DefDatabase, Enum, Function, HasSource, HirDatabase, ImplBlock, Module, |
@@ -95,7 +95,7 @@ impl LangItems { | |||
95 | // Look for impl targets | 95 | // Look for impl targets |
96 | for impl_block in module.impl_blocks(db) { | 96 | for impl_block in module.impl_blocks(db) { |
97 | let src = impl_block.source(db); | 97 | let src = impl_block.source(db); |
98 | if let Some(lang_item_name) = lang_item_name(&*src.ast) { | 98 | if let Some(lang_item_name) = lang_item_name(&src.ast) { |
99 | self.items | 99 | self.items |
100 | .entry(lang_item_name) | 100 | .entry(lang_item_name) |
101 | .or_insert_with(|| LangItemTarget::ImplBlock(impl_block)); | 101 | .or_insert_with(|| LangItemTarget::ImplBlock(impl_block)); |
@@ -137,11 +137,11 @@ impl LangItems { | |||
137 | item: T, | 137 | item: T, |
138 | constructor: fn(T) -> LangItemTarget, | 138 | constructor: fn(T) -> LangItemTarget, |
139 | ) where | 139 | ) where |
140 | T: Copy + HasSource<Ast = TreeArc<N>>, | 140 | T: Copy + HasSource<Ast = N>, |
141 | N: AttrsOwner, | 141 | N: AttrsOwner, |
142 | { | 142 | { |
143 | let node = item.source(db).ast; | 143 | let node = item.source(db).ast; |
144 | if let Some(lang_item_name) = lang_item_name(&*node) { | 144 | if let Some(lang_item_name) = lang_item_name(&node) { |
145 | self.items.entry(lang_item_name).or_insert_with(|| constructor(item)); | 145 | self.items.entry(lang_item_name).or_insert_with(|| constructor(item)); |
146 | } | 146 | } |
147 | } | 147 | } |
diff --git a/crates/ra_hir/src/name.rs b/crates/ra_hir/src/name.rs index 40c9d6002..c589f8aba 100644 --- a/crates/ra_hir/src/name.rs +++ b/crates/ra_hir/src/name.rs | |||
@@ -75,7 +75,7 @@ impl AsName for ast::Name { | |||
75 | } | 75 | } |
76 | } | 76 | } |
77 | 77 | ||
78 | impl<'a> AsName for ast::FieldKind<'a> { | 78 | impl AsName for ast::FieldKind { |
79 | fn as_name(&self) -> Name { | 79 | fn as_name(&self) -> Name { |
80 | match self { | 80 | match self { |
81 | ast::FieldKind::Name(nr) => nr.as_name(), | 81 | ast::FieldKind::Name(nr) => nr.as_name(), |
diff --git a/crates/ra_hir/src/nameres/raw.rs b/crates/ra_hir/src/nameres/raw.rs index 46b2bef5b..8517f3c43 100644 --- a/crates/ra_hir/src/nameres/raw.rs +++ b/crates/ra_hir/src/nameres/raw.rs | |||
@@ -3,7 +3,7 @@ use std::{ops::Index, sync::Arc}; | |||
3 | use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId}; | 3 | use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId}; |
4 | use ra_syntax::{ | 4 | use ra_syntax::{ |
5 | ast::{self, AttrsOwner, NameOwner}, | 5 | ast::{self, AttrsOwner, NameOwner}, |
6 | AstNode, AstPtr, SmolStr, SourceFile, TreeArc, | 6 | AstNode, AstPtr, SmolStr, SourceFile, |
7 | }; | 7 | }; |
8 | use test_utils::tested_by; | 8 | use test_utils::tested_by; |
9 | 9 | ||
@@ -32,7 +32,7 @@ pub struct ImportSourceMap { | |||
32 | } | 32 | } |
33 | 33 | ||
34 | type ImportSourcePtr = Either<AstPtr<ast::UseTree>, AstPtr<ast::ExternCrateItem>>; | 34 | type ImportSourcePtr = Either<AstPtr<ast::UseTree>, AstPtr<ast::ExternCrateItem>>; |
35 | type ImportSource = Either<TreeArc<ast::UseTree>, TreeArc<ast::ExternCrateItem>>; | 35 | type ImportSource = Either<ast::UseTree, ast::ExternCrateItem>; |
36 | 36 | ||
37 | impl ImportSourcePtr { | 37 | impl ImportSourcePtr { |
38 | fn to_node(self, file: &SourceFile) -> ImportSource { | 38 | fn to_node(self, file: &SourceFile) -> ImportSource { |
@@ -50,11 +50,11 @@ impl ImportSourceMap { | |||
50 | 50 | ||
51 | pub(crate) fn get(&self, source: &ModuleSource, import: ImportId) -> ImportSource { | 51 | pub(crate) fn get(&self, source: &ModuleSource, import: ImportId) -> ImportSource { |
52 | let file = match source { | 52 | let file = match source { |
53 | ModuleSource::SourceFile(file) => &*file, | 53 | ModuleSource::SourceFile(file) => file.clone(), |
54 | ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(), | 54 | ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(), |
55 | }; | 55 | }; |
56 | 56 | ||
57 | self.map[import].to_node(file) | 57 | self.map[import].to_node(&file) |
58 | } | 58 | } |
59 | } | 59 | } |
60 | 60 | ||
@@ -76,8 +76,8 @@ impl RawItems { | |||
76 | source_map: ImportSourceMap::default(), | 76 | source_map: ImportSourceMap::default(), |
77 | }; | 77 | }; |
78 | if let Some(node) = db.parse_or_expand(file_id) { | 78 | if let Some(node) = db.parse_or_expand(file_id) { |
79 | if let Some(source_file) = ast::SourceFile::cast(&node) { | 79 | if let Some(source_file) = ast::SourceFile::cast(node) { |
80 | collector.process_module(None, &*source_file); | 80 | collector.process_module(None, source_file); |
81 | } | 81 | } |
82 | } | 82 | } |
83 | (Arc::new(collector.raw_items), Arc::new(collector.source_map)) | 83 | (Arc::new(collector.raw_items), Arc::new(collector.source_map)) |
@@ -188,7 +188,7 @@ struct RawItemsCollector { | |||
188 | } | 188 | } |
189 | 189 | ||
190 | impl RawItemsCollector { | 190 | impl RawItemsCollector { |
191 | fn process_module(&mut self, current_module: Option<Module>, body: &impl ast::ModuleItemOwner) { | 191 | fn process_module(&mut self, current_module: Option<Module>, body: impl ast::ModuleItemOwner) { |
192 | for item_or_macro in body.items_with_macros() { | 192 | for item_or_macro in body.items_with_macros() { |
193 | match item_or_macro { | 193 | match item_or_macro { |
194 | ast::ItemOrMacro::Macro(m) => self.add_macro(current_module, m), | 194 | ast::ItemOrMacro::Macro(m) => self.add_macro(current_module, m), |
@@ -197,7 +197,7 @@ impl RawItemsCollector { | |||
197 | } | 197 | } |
198 | } | 198 | } |
199 | 199 | ||
200 | fn add_item(&mut self, current_module: Option<Module>, item: &ast::ModuleItem) { | 200 | fn add_item(&mut self, current_module: Option<Module>, item: ast::ModuleItem) { |
201 | let (kind, name) = match item.kind() { | 201 | let (kind, name) = match item.kind() { |
202 | ast::ModuleItemKind::Module(module) => { | 202 | ast::ModuleItemKind::Module(module) => { |
203 | self.add_module(current_module, module); | 203 | self.add_module(current_module, module); |
@@ -216,7 +216,7 @@ impl RawItemsCollector { | |||
216 | return; | 216 | return; |
217 | } | 217 | } |
218 | ast::ModuleItemKind::StructDef(it) => { | 218 | ast::ModuleItemKind::StructDef(it) => { |
219 | let id = self.source_ast_id_map.ast_id(it); | 219 | let id = self.source_ast_id_map.ast_id(&it); |
220 | let name = it.name(); | 220 | let name = it.name(); |
221 | if it.is_union() { | 221 | if it.is_union() { |
222 | (DefKind::Union(id), name) | 222 | (DefKind::Union(id), name) |
@@ -225,22 +225,22 @@ impl RawItemsCollector { | |||
225 | } | 225 | } |
226 | } | 226 | } |
227 | ast::ModuleItemKind::EnumDef(it) => { | 227 | ast::ModuleItemKind::EnumDef(it) => { |
228 | (DefKind::Enum(self.source_ast_id_map.ast_id(it)), it.name()) | 228 | (DefKind::Enum(self.source_ast_id_map.ast_id(&it)), it.name()) |
229 | } | 229 | } |
230 | ast::ModuleItemKind::FnDef(it) => { | 230 | ast::ModuleItemKind::FnDef(it) => { |
231 | (DefKind::Function(self.source_ast_id_map.ast_id(it)), it.name()) | 231 | (DefKind::Function(self.source_ast_id_map.ast_id(&it)), it.name()) |
232 | } | 232 | } |
233 | ast::ModuleItemKind::TraitDef(it) => { | 233 | ast::ModuleItemKind::TraitDef(it) => { |
234 | (DefKind::Trait(self.source_ast_id_map.ast_id(it)), it.name()) | 234 | (DefKind::Trait(self.source_ast_id_map.ast_id(&it)), it.name()) |
235 | } | 235 | } |
236 | ast::ModuleItemKind::TypeAliasDef(it) => { | 236 | ast::ModuleItemKind::TypeAliasDef(it) => { |
237 | (DefKind::TypeAlias(self.source_ast_id_map.ast_id(it)), it.name()) | 237 | (DefKind::TypeAlias(self.source_ast_id_map.ast_id(&it)), it.name()) |
238 | } | 238 | } |
239 | ast::ModuleItemKind::ConstDef(it) => { | 239 | ast::ModuleItemKind::ConstDef(it) => { |
240 | (DefKind::Const(self.source_ast_id_map.ast_id(it)), it.name()) | 240 | (DefKind::Const(self.source_ast_id_map.ast_id(&it)), it.name()) |
241 | } | 241 | } |
242 | ast::ModuleItemKind::StaticDef(it) => { | 242 | ast::ModuleItemKind::StaticDef(it) => { |
243 | (DefKind::Static(self.source_ast_id_map.ast_id(it)), it.name()) | 243 | (DefKind::Static(self.source_ast_id_map.ast_id(&it)), it.name()) |
244 | } | 244 | } |
245 | }; | 245 | }; |
246 | if let Some(name) = name { | 246 | if let Some(name) = name { |
@@ -250,14 +250,14 @@ impl RawItemsCollector { | |||
250 | } | 250 | } |
251 | } | 251 | } |
252 | 252 | ||
253 | fn add_module(&mut self, current_module: Option<Module>, module: &ast::Module) { | 253 | fn add_module(&mut self, current_module: Option<Module>, module: ast::Module) { |
254 | let name = match module.name() { | 254 | let name = match module.name() { |
255 | Some(it) => it.as_name(), | 255 | Some(it) => it.as_name(), |
256 | None => return, | 256 | None => return, |
257 | }; | 257 | }; |
258 | 258 | ||
259 | let attr_path = extract_mod_path_attribute(module); | 259 | let attr_path = extract_mod_path_attribute(&module); |
260 | let ast_id = self.source_ast_id_map.ast_id(module); | 260 | let ast_id = self.source_ast_id_map.ast_id(&module); |
261 | if module.has_semi() { | 261 | if module.has_semi() { |
262 | let item = | 262 | let item = |
263 | self.raw_items.modules.alloc(ModuleData::Declaration { name, ast_id, attr_path }); | 263 | self.raw_items.modules.alloc(ModuleData::Declaration { name, ast_id, attr_path }); |
@@ -278,10 +278,10 @@ impl RawItemsCollector { | |||
278 | tested_by!(name_res_works_for_broken_modules); | 278 | tested_by!(name_res_works_for_broken_modules); |
279 | } | 279 | } |
280 | 280 | ||
281 | fn add_use_item(&mut self, current_module: Option<Module>, use_item: &ast::UseItem) { | 281 | fn add_use_item(&mut self, current_module: Option<Module>, use_item: ast::UseItem) { |
282 | let is_prelude = use_item.has_atom_attr("prelude_import"); | 282 | let is_prelude = use_item.has_atom_attr("prelude_import"); |
283 | 283 | ||
284 | Path::expand_use_item(use_item, |path, use_tree, is_glob, alias| { | 284 | Path::expand_use_item(&use_item, |path, use_tree, is_glob, alias| { |
285 | let import_data = | 285 | let import_data = |
286 | ImportData { path, alias, is_glob, is_prelude, is_extern_crate: false }; | 286 | ImportData { path, alias, is_glob, is_prelude, is_extern_crate: false }; |
287 | self.push_import(current_module, import_data, Either::A(AstPtr::new(use_tree))); | 287 | self.push_import(current_module, import_data, Either::A(AstPtr::new(use_tree))); |
@@ -291,11 +291,11 @@ impl RawItemsCollector { | |||
291 | fn add_extern_crate_item( | 291 | fn add_extern_crate_item( |
292 | &mut self, | 292 | &mut self, |
293 | current_module: Option<Module>, | 293 | current_module: Option<Module>, |
294 | extern_crate: &ast::ExternCrateItem, | 294 | extern_crate: ast::ExternCrateItem, |
295 | ) { | 295 | ) { |
296 | if let Some(name_ref) = extern_crate.name_ref() { | 296 | if let Some(name_ref) = extern_crate.name_ref() { |
297 | let path = Path::from_name_ref(name_ref); | 297 | let path = Path::from_name_ref(&name_ref); |
298 | let alias = extern_crate.alias().and_then(|a| a.name()).map(AsName::as_name); | 298 | let alias = extern_crate.alias().and_then(|a| a.name()).map(|it| it.as_name()); |
299 | let import_data = ImportData { | 299 | let import_data = ImportData { |
300 | path, | 300 | path, |
301 | alias, | 301 | alias, |
@@ -303,18 +303,18 @@ impl RawItemsCollector { | |||
303 | is_prelude: false, | 303 | is_prelude: false, |
304 | is_extern_crate: true, | 304 | is_extern_crate: true, |
305 | }; | 305 | }; |
306 | self.push_import(current_module, import_data, Either::B(AstPtr::new(extern_crate))); | 306 | self.push_import(current_module, import_data, Either::B(AstPtr::new(&extern_crate))); |
307 | } | 307 | } |
308 | } | 308 | } |
309 | 309 | ||
310 | fn add_macro(&mut self, current_module: Option<Module>, m: &ast::MacroCall) { | 310 | fn add_macro(&mut self, current_module: Option<Module>, m: ast::MacroCall) { |
311 | let path = match m.path().and_then(Path::from_ast) { | 311 | let path = match m.path().and_then(Path::from_ast) { |
312 | Some(it) => it, | 312 | Some(it) => it, |
313 | _ => return, | 313 | _ => return, |
314 | }; | 314 | }; |
315 | 315 | ||
316 | let name = m.name().map(|it| it.as_name()); | 316 | let name = m.name().map(|it| it.as_name()); |
317 | let ast_id = self.source_ast_id_map.ast_id(m); | 317 | let ast_id = self.source_ast_id_map.ast_id(&m); |
318 | let export = m.has_atom_attr("macro_export"); | 318 | let export = m.has_atom_attr("macro_export"); |
319 | let m = self.raw_items.macros.alloc(MacroData { ast_id, path, name, export }); | 319 | let m = self.raw_items.macros.alloc(MacroData { ast_id, path, name, export }); |
320 | self.push_item(current_module, RawItem::Macro(m)); | 320 | self.push_item(current_module, RawItem::Macro(m)); |
diff --git a/crates/ra_hir/src/path.rs b/crates/ra_hir/src/path.rs index bce9d2d4b..882db7681 100644 --- a/crates/ra_hir/src/path.rs +++ b/crates/ra_hir/src/path.rs | |||
@@ -47,9 +47,9 @@ pub enum PathKind { | |||
47 | 47 | ||
48 | impl Path { | 48 | impl Path { |
49 | /// Calls `cb` with all paths, represented by this use item. | 49 | /// Calls `cb` with all paths, represented by this use item. |
50 | pub fn expand_use_item<'a>( | 50 | pub fn expand_use_item( |
51 | item: &'a ast::UseItem, | 51 | item: &ast::UseItem, |
52 | mut cb: impl FnMut(Path, &'a ast::UseTree, bool, Option<Name>), | 52 | mut cb: impl FnMut(Path, &ast::UseTree, bool, Option<Name>), |
53 | ) { | 53 | ) { |
54 | if let Some(tree) = item.use_tree() { | 54 | if let Some(tree) = item.use_tree() { |
55 | expand_use_tree(None, tree, &mut cb); | 55 | expand_use_tree(None, tree, &mut cb); |
@@ -57,7 +57,7 @@ impl Path { | |||
57 | } | 57 | } |
58 | 58 | ||
59 | /// Converts an `ast::Path` to `Path`. Works with use trees. | 59 | /// Converts an `ast::Path` to `Path`. Works with use trees. |
60 | pub fn from_ast(mut path: &ast::Path) -> Option<Path> { | 60 | pub fn from_ast(mut path: ast::Path) -> Option<Path> { |
61 | let mut kind = PathKind::Plain; | 61 | let mut kind = PathKind::Plain; |
62 | let mut segments = Vec::new(); | 62 | let mut segments = Vec::new(); |
63 | loop { | 63 | loop { |
@@ -87,7 +87,7 @@ impl Path { | |||
87 | break; | 87 | break; |
88 | } | 88 | } |
89 | } | 89 | } |
90 | path = match qualifier(path) { | 90 | path = match qualifier(&path) { |
91 | Some(it) => it, | 91 | Some(it) => it, |
92 | None => break, | 92 | None => break, |
93 | }; | 93 | }; |
@@ -95,7 +95,7 @@ impl Path { | |||
95 | segments.reverse(); | 95 | segments.reverse(); |
96 | return Some(Path { kind, segments }); | 96 | return Some(Path { kind, segments }); |
97 | 97 | ||
98 | fn qualifier(path: &ast::Path) -> Option<&ast::Path> { | 98 | fn qualifier(path: &ast::Path) -> Option<ast::Path> { |
99 | if let Some(q) = path.qualifier() { | 99 | if let Some(q) = path.qualifier() { |
100 | return Some(q); | 100 | return Some(q); |
101 | } | 101 | } |
@@ -136,7 +136,7 @@ impl Path { | |||
136 | } | 136 | } |
137 | 137 | ||
138 | impl GenericArgs { | 138 | impl GenericArgs { |
139 | pub(crate) fn from_ast(node: &ast::TypeArgList) -> Option<GenericArgs> { | 139 | pub(crate) fn from_ast(node: ast::TypeArgList) -> Option<GenericArgs> { |
140 | let mut args = Vec::new(); | 140 | let mut args = Vec::new(); |
141 | for type_arg in node.type_args() { | 141 | for type_arg in node.type_args() { |
142 | let type_ref = TypeRef::from_ast_opt(type_arg.type_ref()); | 142 | let type_ref = TypeRef::from_ast_opt(type_arg.type_ref()); |
@@ -160,10 +160,10 @@ impl From<Name> for Path { | |||
160 | } | 160 | } |
161 | } | 161 | } |
162 | 162 | ||
163 | fn expand_use_tree<'a>( | 163 | fn expand_use_tree( |
164 | prefix: Option<Path>, | 164 | prefix: Option<Path>, |
165 | tree: &'a ast::UseTree, | 165 | tree: ast::UseTree, |
166 | cb: &mut impl FnMut(Path, &'a ast::UseTree, bool, Option<Name>), | 166 | cb: &mut impl FnMut(Path, &ast::UseTree, bool, Option<Name>), |
167 | ) { | 167 | ) { |
168 | if let Some(use_tree_list) = tree.use_tree_list() { | 168 | if let Some(use_tree_list) = tree.use_tree_list() { |
169 | let prefix = match tree.path() { | 169 | let prefix = match tree.path() { |
@@ -188,7 +188,7 @@ fn expand_use_tree<'a>( | |||
188 | if let Some(segment) = ast_path.segment() { | 188 | if let Some(segment) = ast_path.segment() { |
189 | if segment.kind() == Some(ast::PathSegmentKind::SelfKw) { | 189 | if segment.kind() == Some(ast::PathSegmentKind::SelfKw) { |
190 | if let Some(prefix) = prefix { | 190 | if let Some(prefix) = prefix { |
191 | cb(prefix, tree, false, alias); | 191 | cb(prefix, &tree, false, alias); |
192 | return; | 192 | return; |
193 | } | 193 | } |
194 | } | 194 | } |
@@ -196,7 +196,7 @@ fn expand_use_tree<'a>( | |||
196 | } | 196 | } |
197 | if let Some(path) = convert_path(prefix, ast_path) { | 197 | if let Some(path) = convert_path(prefix, ast_path) { |
198 | let is_glob = tree.has_star(); | 198 | let is_glob = tree.has_star(); |
199 | cb(path, tree, is_glob, alias) | 199 | cb(path, &tree, is_glob, alias) |
200 | } | 200 | } |
201 | // FIXME: report errors somewhere | 201 | // FIXME: report errors somewhere |
202 | // We get here if we do | 202 | // We get here if we do |
@@ -204,7 +204,7 @@ fn expand_use_tree<'a>( | |||
204 | } | 204 | } |
205 | } | 205 | } |
206 | 206 | ||
207 | fn convert_path(prefix: Option<Path>, path: &ast::Path) -> Option<Path> { | 207 | fn convert_path(prefix: Option<Path>, path: ast::Path) -> Option<Path> { |
208 | let prefix = | 208 | let prefix = |
209 | if let Some(qual) = path.qualifier() { Some(convert_path(prefix, qual)?) } else { prefix }; | 209 | if let Some(qual) = path.qualifier() { Some(convert_path(prefix, qual)?) } else { prefix }; |
210 | let segment = path.segment()?; | 210 | let segment = path.segment()?; |
diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs index 071c1bb18..e7bc4df97 100644 --- a/crates/ra_hir/src/source_binder.rs +++ b/crates/ra_hir/src/source_binder.rs | |||
@@ -37,7 +37,7 @@ pub fn module_from_file_id(db: &impl HirDatabase, file_id: FileId) -> Option<Mod | |||
37 | pub fn module_from_declaration( | 37 | pub fn module_from_declaration( |
38 | db: &impl HirDatabase, | 38 | db: &impl HirDatabase, |
39 | file_id: FileId, | 39 | file_id: FileId, |
40 | decl: &ast::Module, | 40 | decl: ast::Module, |
41 | ) -> Option<Module> { | 41 | ) -> Option<Module> { |
42 | let parent_module = module_from_file_id(db, file_id); | 42 | let parent_module = module_from_file_id(db, file_id); |
43 | let child_name = decl.name(); | 43 | let child_name = decl.name(); |
@@ -50,8 +50,8 @@ pub fn module_from_declaration( | |||
50 | /// Locates the module by position in the source code. | 50 | /// Locates the module by position in the source code. |
51 | pub fn module_from_position(db: &impl HirDatabase, position: FilePosition) -> Option<Module> { | 51 | pub fn module_from_position(db: &impl HirDatabase, position: FilePosition) -> Option<Module> { |
52 | let parse = db.parse(position.file_id); | 52 | let parse = db.parse(position.file_id); |
53 | match find_node_at_offset::<ast::Module>(parse.tree().syntax(), position.offset) { | 53 | match &find_node_at_offset::<ast::Module>(parse.tree().syntax(), position.offset) { |
54 | Some(m) if !m.has_semi() => module_from_inline(db, position.file_id, m), | 54 | Some(m) if !m.has_semi() => module_from_inline(db, position.file_id, m.clone()), |
55 | _ => module_from_file_id(db, position.file_id), | 55 | _ => module_from_file_id(db, position.file_id), |
56 | } | 56 | } |
57 | } | 57 | } |
@@ -59,12 +59,12 @@ pub fn module_from_position(db: &impl HirDatabase, position: FilePosition) -> Op | |||
59 | fn module_from_inline( | 59 | fn module_from_inline( |
60 | db: &impl HirDatabase, | 60 | db: &impl HirDatabase, |
61 | file_id: FileId, | 61 | file_id: FileId, |
62 | module: &ast::Module, | 62 | module: ast::Module, |
63 | ) -> Option<Module> { | 63 | ) -> Option<Module> { |
64 | assert!(!module.has_semi()); | 64 | assert!(!module.has_semi()); |
65 | let file_id = file_id.into(); | 65 | let file_id = file_id.into(); |
66 | let ast_id_map = db.ast_id_map(file_id); | 66 | let ast_id_map = db.ast_id_map(file_id); |
67 | let item_id = ast_id_map.ast_id(module).with_file_id(file_id); | 67 | let item_id = ast_id_map.ast_id(&module).with_file_id(file_id); |
68 | module_from_source(db, file_id, Some(item_id)) | 68 | module_from_source(db, file_id, Some(item_id)) |
69 | } | 69 | } |
70 | 70 | ||
@@ -127,16 +127,16 @@ fn try_get_resolver_for_node( | |||
127 | file_id: FileId, | 127 | file_id: FileId, |
128 | node: &SyntaxNode, | 128 | node: &SyntaxNode, |
129 | ) -> Option<Resolver> { | 129 | ) -> Option<Resolver> { |
130 | if let Some(module) = ast::Module::cast(node) { | 130 | if let Some(module) = ast::Module::cast(node.clone()) { |
131 | Some(module_from_declaration(db, file_id, module)?.resolver(db)) | 131 | Some(module_from_declaration(db, file_id, module)?.resolver(db)) |
132 | } else if let Some(_) = ast::SourceFile::cast(node) { | 132 | } else if let Some(_) = ast::SourceFile::cast(node.clone()) { |
133 | Some(module_from_source(db, file_id.into(), None)?.resolver(db)) | 133 | Some(module_from_source(db, file_id.into(), None)?.resolver(db)) |
134 | } else if let Some(s) = ast::StructDef::cast(node) { | 134 | } else if let Some(s) = ast::StructDef::cast(node.clone()) { |
135 | let module = module_from_child_node(db, file_id, s.syntax())?; | 135 | let module = module_from_child_node(db, file_id, s.syntax())?; |
136 | Some(struct_from_module(db, module, s).resolver(db)) | 136 | Some(struct_from_module(db, module, &s).resolver(db)) |
137 | } else if let Some(e) = ast::EnumDef::cast(node) { | 137 | } else if let Some(e) = ast::EnumDef::cast(node.clone()) { |
138 | let module = module_from_child_node(db, file_id, e.syntax())?; | 138 | let module = module_from_child_node(db, file_id, e.syntax())?; |
139 | Some(enum_from_module(db, module, e).resolver(db)) | 139 | Some(enum_from_module(db, module, &e).resolver(db)) |
140 | } else if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF { | 140 | } else if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF { |
141 | Some(def_with_body_from_child_node(db, file_id, node)?.resolver(db)) | 141 | Some(def_with_body_from_child_node(db, file_id, node)?.resolver(db)) |
142 | } else { | 142 | } else { |
@@ -153,14 +153,14 @@ fn def_with_body_from_child_node( | |||
153 | let module = module_from_child_node(db, file_id, node)?; | 153 | let module = module_from_child_node(db, file_id, node)?; |
154 | let ctx = LocationCtx::new(db, module, file_id.into()); | 154 | let ctx = LocationCtx::new(db, module, file_id.into()); |
155 | node.ancestors().find_map(|node| { | 155 | node.ancestors().find_map(|node| { |
156 | if let Some(def) = ast::FnDef::cast(node) { | 156 | if let Some(def) = ast::FnDef::cast(node.clone()) { |
157 | return Some(Function { id: ctx.to_def(def) }.into()); | 157 | return Some(Function { id: ctx.to_def(&def) }.into()); |
158 | } | 158 | } |
159 | if let Some(def) = ast::ConstDef::cast(node) { | 159 | if let Some(def) = ast::ConstDef::cast(node.clone()) { |
160 | return Some(Const { id: ctx.to_def(def) }.into()); | 160 | return Some(Const { id: ctx.to_def(&def) }.into()); |
161 | } | 161 | } |
162 | if let Some(def) = ast::StaticDef::cast(node) { | 162 | if let Some(def) = ast::StaticDef::cast(node.clone()) { |
163 | return Some(Static { id: ctx.to_def(def) }.into()); | 163 | return Some(Static { id: ctx.to_def(&def) }.into()); |
164 | } | 164 | } |
165 | None | 165 | None |
166 | }) | 166 | }) |
@@ -237,7 +237,7 @@ impl SourceAnalyzer { | |||
237 | SourceAnalyzer { | 237 | SourceAnalyzer { |
238 | resolver: node | 238 | resolver: node |
239 | .ancestors() | 239 | .ancestors() |
240 | .find_map(|node| try_get_resolver_for_node(db, file_id, node)) | 240 | .find_map(|node| try_get_resolver_for_node(db, file_id, &node)) |
241 | .unwrap_or_default(), | 241 | .unwrap_or_default(), |
242 | body_source_map: None, | 242 | body_source_map: None, |
243 | infer: None, | 243 | infer: None, |
@@ -257,17 +257,17 @@ impl SourceAnalyzer { | |||
257 | } | 257 | } |
258 | 258 | ||
259 | pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> { | 259 | pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> { |
260 | let expr_id = self.body_source_map.as_ref()?.node_expr(call.into())?; | 260 | let expr_id = self.body_source_map.as_ref()?.node_expr(&call.clone().into())?; |
261 | self.infer.as_ref()?.method_resolution(expr_id) | 261 | self.infer.as_ref()?.method_resolution(expr_id) |
262 | } | 262 | } |
263 | 263 | ||
264 | pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<crate::StructField> { | 264 | pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<crate::StructField> { |
265 | let expr_id = self.body_source_map.as_ref()?.node_expr(field.into())?; | 265 | let expr_id = self.body_source_map.as_ref()?.node_expr(&field.clone().into())?; |
266 | self.infer.as_ref()?.field_resolution(expr_id) | 266 | self.infer.as_ref()?.field_resolution(expr_id) |
267 | } | 267 | } |
268 | 268 | ||
269 | pub fn resolve_variant(&self, struct_lit: &ast::StructLit) -> Option<crate::VariantDef> { | 269 | pub fn resolve_variant(&self, struct_lit: &ast::StructLit) -> Option<crate::VariantDef> { |
270 | let expr_id = self.body_source_map.as_ref()?.node_expr(struct_lit.into())?; | 270 | let expr_id = self.body_source_map.as_ref()?.node_expr(&struct_lit.clone().into())?; |
271 | self.infer.as_ref()?.variant_resolution(expr_id) | 271 | self.infer.as_ref()?.variant_resolution(expr_id) |
272 | } | 272 | } |
273 | 273 | ||
@@ -290,18 +290,18 @@ impl SourceAnalyzer { | |||
290 | 290 | ||
291 | pub fn resolve_path(&self, db: &impl HirDatabase, path: &ast::Path) -> Option<PathResolution> { | 291 | pub fn resolve_path(&self, db: &impl HirDatabase, path: &ast::Path) -> Option<PathResolution> { |
292 | if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) { | 292 | if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) { |
293 | let expr_id = self.body_source_map.as_ref()?.node_expr(path_expr.into())?; | 293 | let expr_id = self.body_source_map.as_ref()?.node_expr(&path_expr.into())?; |
294 | if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) { | 294 | if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) { |
295 | return Some(PathResolution::AssocItem(assoc)); | 295 | return Some(PathResolution::AssocItem(assoc)); |
296 | } | 296 | } |
297 | } | 297 | } |
298 | if let Some(path_pat) = path.syntax().parent().and_then(ast::PathPat::cast) { | 298 | if let Some(path_pat) = path.syntax().parent().and_then(ast::PathPat::cast) { |
299 | let pat_id = self.body_source_map.as_ref()?.node_pat(path_pat.into())?; | 299 | let pat_id = self.body_source_map.as_ref()?.node_pat(&path_pat.into())?; |
300 | if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) { | 300 | if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) { |
301 | return Some(PathResolution::AssocItem(assoc)); | 301 | return Some(PathResolution::AssocItem(assoc)); |
302 | } | 302 | } |
303 | } | 303 | } |
304 | let hir_path = crate::Path::from_ast(path)?; | 304 | let hir_path = crate::Path::from_ast(path.clone())?; |
305 | let res = self.resolver.resolve_path_without_assoc_items(db, &hir_path); | 305 | let res = self.resolver.resolve_path_without_assoc_items(db, &hir_path); |
306 | let res = res.clone().take_types().or_else(|| res.take_values())?; | 306 | let res = res.clone().take_types().or_else(|| res.take_values())?; |
307 | let res = match res { | 307 | let res = match res { |
@@ -343,12 +343,12 @@ impl SourceAnalyzer { | |||
343 | // FIXME: at least, this should work with any DefWithBody, but ideally | 343 | // FIXME: at least, this should work with any DefWithBody, but ideally |
344 | // this should be hir-based altogether | 344 | // this should be hir-based altogether |
345 | let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap(); | 345 | let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap(); |
346 | let ptr = Either::A(AstPtr::new(pat.into())); | 346 | let ptr = Either::A(AstPtr::new(&ast::Pat::from(pat.clone()))); |
347 | fn_def | 347 | fn_def |
348 | .syntax() | 348 | .syntax() |
349 | .descendants() | 349 | .descendants() |
350 | .filter_map(ast::NameRef::cast) | 350 | .filter_map(ast::NameRef::cast) |
351 | .filter(|name_ref| match self.resolve_local_name(*name_ref) { | 351 | .filter(|name_ref| match self.resolve_local_name(&name_ref) { |
352 | None => false, | 352 | None => false, |
353 | Some(entry) => entry.ptr() == ptr, | 353 | Some(entry) => entry.ptr() == ptr, |
354 | }) | 354 | }) |
@@ -411,7 +411,7 @@ fn scope_for( | |||
411 | node: &SyntaxNode, | 411 | node: &SyntaxNode, |
412 | ) -> Option<ScopeId> { | 412 | ) -> Option<ScopeId> { |
413 | node.ancestors() | 413 | node.ancestors() |
414 | .map(SyntaxNodePtr::new) | 414 | .map(|it| SyntaxNodePtr::new(&it)) |
415 | .filter_map(|ptr| source_map.syntax_expr(ptr)) | 415 | .filter_map(|ptr| source_map.syntax_expr(ptr)) |
416 | .find_map(|it| scopes.scope_for(it)) | 416 | .find_map(|it| scopes.scope_for(it)) |
417 | } | 417 | } |
diff --git a/crates/ra_hir/src/source_id.rs b/crates/ra_hir/src/source_id.rs index 6cdb90141..51cd65dda 100644 --- a/crates/ra_hir/src/source_id.rs +++ b/crates/ra_hir/src/source_id.rs | |||
@@ -5,7 +5,7 @@ use std::{ | |||
5 | }; | 5 | }; |
6 | 6 | ||
7 | use ra_arena::{impl_arena_id, Arena, RawId}; | 7 | use ra_arena::{impl_arena_id, Arena, RawId}; |
8 | use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxNodePtr, TreeArc}; | 8 | use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxNodePtr}; |
9 | 9 | ||
10 | use crate::{AstDatabase, HirFileId}; | 10 | use crate::{AstDatabase, HirFileId}; |
11 | 11 | ||
@@ -42,9 +42,9 @@ impl<N: AstNode> AstId<N> { | |||
42 | self.file_id | 42 | self.file_id |
43 | } | 43 | } |
44 | 44 | ||
45 | pub(crate) fn to_node(&self, db: &impl AstDatabase) -> TreeArc<N> { | 45 | pub(crate) fn to_node(&self, db: &impl AstDatabase) -> N { |
46 | let syntax_node = db.ast_id_to_node(self.file_id, self.file_ast_id.raw); | 46 | let syntax_node = db.ast_id_to_node(self.file_id, self.file_ast_id.raw); |
47 | N::cast(&syntax_node).unwrap().to_owned() | 47 | N::cast(syntax_node).unwrap() |
48 | } | 48 | } |
49 | } | 49 | } |
50 | 50 | ||
@@ -93,7 +93,7 @@ pub struct AstIdMap { | |||
93 | impl AstIdMap { | 93 | impl AstIdMap { |
94 | pub(crate) fn ast_id_map_query(db: &impl AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { | 94 | pub(crate) fn ast_id_map_query(db: &impl AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { |
95 | let map = if let Some(node) = db.parse_or_expand(file_id) { | 95 | let map = if let Some(node) = db.parse_or_expand(file_id) { |
96 | AstIdMap::from_source(&*node) | 96 | AstIdMap::from_source(&node) |
97 | } else { | 97 | } else { |
98 | AstIdMap::default() | 98 | AstIdMap::default() |
99 | }; | 99 | }; |
@@ -104,9 +104,9 @@ impl AstIdMap { | |||
104 | db: &impl AstDatabase, | 104 | db: &impl AstDatabase, |
105 | file_id: HirFileId, | 105 | file_id: HirFileId, |
106 | ast_id: ErasedFileAstId, | 106 | ast_id: ErasedFileAstId, |
107 | ) -> TreeArc<SyntaxNode> { | 107 | ) -> SyntaxNode { |
108 | let node = db.parse_or_expand(file_id).unwrap(); | 108 | let node = db.parse_or_expand(file_id).unwrap(); |
109 | db.ast_id_map(file_id).arena[ast_id].to_node(&*node).to_owned() | 109 | db.ast_id_map(file_id).arena[ast_id].to_node(&node) |
110 | } | 110 | } |
111 | 111 | ||
112 | pub(crate) fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> { | 112 | pub(crate) fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> { |
@@ -131,7 +131,7 @@ impl AstIdMap { | |||
131 | // change parent's id. This means that, say, adding a new function to a | 131 | // change parent's id. This means that, say, adding a new function to a |
132 | // trait does not change ids of top-level items, which helps caching. | 132 | // trait does not change ids of top-level items, which helps caching. |
133 | bfs(node, |it| { | 133 | bfs(node, |it| { |
134 | if let Some(module_item) = ast::ModuleItem::cast(it) { | 134 | if let Some(module_item) = ast::ModuleItem::cast(it.clone()) { |
135 | res.alloc(module_item.syntax()); | 135 | res.alloc(module_item.syntax()); |
136 | } else if let Some(macro_call) = ast::MacroCall::cast(it) { | 136 | } else if let Some(macro_call) = ast::MacroCall::cast(it) { |
137 | res.alloc(macro_call.syntax()); | 137 | res.alloc(macro_call.syntax()); |
@@ -146,8 +146,8 @@ impl AstIdMap { | |||
146 | } | 146 | } |
147 | 147 | ||
148 | /// Walks the subtree in bfs order, calling `f` for each node. | 148 | /// Walks the subtree in bfs order, calling `f` for each node. |
149 | fn bfs(node: &SyntaxNode, mut f: impl FnMut(&SyntaxNode)) { | 149 | fn bfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode)) { |
150 | let mut curr_layer = vec![node]; | 150 | let mut curr_layer = vec![node.clone()]; |
151 | let mut next_layer = vec![]; | 151 | let mut next_layer = vec![]; |
152 | while !curr_layer.is_empty() { | 152 | while !curr_layer.is_empty() { |
153 | curr_layer.drain(..).for_each(|node| { | 153 | curr_layer.drain(..).for_each(|node| { |
diff --git a/crates/ra_hir/src/traits.rs b/crates/ra_hir/src/traits.rs index fc0368303..de26f1a68 100644 --- a/crates/ra_hir/src/traits.rs +++ b/crates/ra_hir/src/traits.rs | |||
@@ -31,9 +31,9 @@ impl TraitData { | |||
31 | item_list | 31 | item_list |
32 | .impl_items() | 32 | .impl_items() |
33 | .map(|item_node| match item_node.kind() { | 33 | .map(|item_node| match item_node.kind() { |
34 | ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(it) }.into(), | 34 | ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(&it) }.into(), |
35 | ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(it) }.into(), | 35 | ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(&it) }.into(), |
36 | ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(it) }.into(), | 36 | ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(&it) }.into(), |
37 | }) | 37 | }) |
38 | .collect() | 38 | .collect() |
39 | } else { | 39 | } else { |
diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs index 2410602a6..265740e54 100644 --- a/crates/ra_hir/src/ty/tests.rs +++ b/crates/ra_hir/src/ty/tests.rs | |||
@@ -3086,7 +3086,7 @@ fn type_at_pos(db: &MockDatabase, pos: FilePosition) -> String { | |||
3086 | let file = db.parse(pos.file_id).ok().unwrap(); | 3086 | let file = db.parse(pos.file_id).ok().unwrap(); |
3087 | let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap(); | 3087 | let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap(); |
3088 | let analyzer = SourceAnalyzer::new(db, pos.file_id, expr.syntax(), Some(pos.offset)); | 3088 | let analyzer = SourceAnalyzer::new(db, pos.file_id, expr.syntax(), Some(pos.offset)); |
3089 | let ty = analyzer.type_of(db, expr).unwrap(); | 3089 | let ty = analyzer.type_of(db, &expr).unwrap(); |
3090 | ty.display(db).to_string() | 3090 | ty.display(db).to_string() |
3091 | } | 3091 | } |
3092 | 3092 | ||
@@ -3126,7 +3126,7 @@ fn infer(content: &str) -> String { | |||
3126 | types.sort_by_key(|(ptr, _)| (ptr.range().start(), ptr.range().end())); | 3126 | types.sort_by_key(|(ptr, _)| (ptr.range().start(), ptr.range().end())); |
3127 | for (syntax_ptr, ty) in &types { | 3127 | for (syntax_ptr, ty) in &types { |
3128 | let node = syntax_ptr.to_node(source_file.syntax()); | 3128 | let node = syntax_ptr.to_node(source_file.syntax()); |
3129 | let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node) { | 3129 | let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.clone()) { |
3130 | (self_param.self_kw_token().range(), "self".to_string()) | 3130 | (self_param.self_kw_token().range(), "self".to_string()) |
3131 | } else { | 3131 | } else { |
3132 | (syntax_ptr.range(), node.text().to_string().replace("\n", " ")) | 3132 | (syntax_ptr.range(), node.text().to_string().replace("\n", " ")) |
@@ -3137,7 +3137,7 @@ fn infer(content: &str) -> String { | |||
3137 | 3137 | ||
3138 | for node in source_file.syntax().descendants() { | 3138 | for node in source_file.syntax().descendants() { |
3139 | if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF { | 3139 | if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF { |
3140 | let analyzer = SourceAnalyzer::new(&db, file_id, node, None); | 3140 | let analyzer = SourceAnalyzer::new(&db, file_id, &node, None); |
3141 | infer_def(analyzer.inference_result(), analyzer.body_source_map()); | 3141 | infer_def(analyzer.inference_result(), analyzer.body_source_map()); |
3142 | } | 3142 | } |
3143 | } | 3143 | } |
@@ -3179,7 +3179,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() { | |||
3179 | let node = | 3179 | let node = |
3180 | algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent(); | 3180 | algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent(); |
3181 | let events = db.log_executed(|| { | 3181 | let events = db.log_executed(|| { |
3182 | SourceAnalyzer::new(&db, pos.file_id, node, None); | 3182 | SourceAnalyzer::new(&db, pos.file_id, &node, None); |
3183 | }); | 3183 | }); |
3184 | assert!(format!("{:?}", events).contains("infer")) | 3184 | assert!(format!("{:?}", events).contains("infer")) |
3185 | } | 3185 | } |
@@ -3200,7 +3200,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() { | |||
3200 | let node = | 3200 | let node = |
3201 | algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent(); | 3201 | algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent(); |
3202 | let events = db.log_executed(|| { | 3202 | let events = db.log_executed(|| { |
3203 | SourceAnalyzer::new(&db, pos.file_id, node, None); | 3203 | SourceAnalyzer::new(&db, pos.file_id, &node, None); |
3204 | }); | 3204 | }); |
3205 | assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events) | 3205 | assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events) |
3206 | } | 3206 | } |
diff --git a/crates/ra_hir/src/type_ref.rs b/crates/ra_hir/src/type_ref.rs index 8aa807648..8536ae44a 100644 --- a/crates/ra_hir/src/type_ref.rs +++ b/crates/ra_hir/src/type_ref.rs | |||
@@ -56,7 +56,7 @@ pub enum TypeRef { | |||
56 | 56 | ||
57 | impl TypeRef { | 57 | impl TypeRef { |
58 | /// Converts an `ast::TypeRef` to a `hir::TypeRef`. | 58 | /// Converts an `ast::TypeRef` to a `hir::TypeRef`. |
59 | pub(crate) fn from_ast(node: &ast::TypeRef) -> Self { | 59 | pub(crate) fn from_ast(node: ast::TypeRef) -> Self { |
60 | use ra_syntax::ast::TypeRefKind::*; | 60 | use ra_syntax::ast::TypeRefKind::*; |
61 | match node.kind() { | 61 | match node.kind() { |
62 | ParenType(inner) => TypeRef::from_ast_opt(inner.type_ref()), | 62 | ParenType(inner) => TypeRef::from_ast_opt(inner.type_ref()), |
@@ -95,7 +95,7 @@ impl TypeRef { | |||
95 | } | 95 | } |
96 | } | 96 | } |
97 | 97 | ||
98 | pub(crate) fn from_ast_opt(node: Option<&ast::TypeRef>) -> Self { | 98 | pub(crate) fn from_ast_opt(node: Option<ast::TypeRef>) -> Self { |
99 | if let Some(node) = node { | 99 | if let Some(node) = node { |
100 | TypeRef::from_ast(node) | 100 | TypeRef::from_ast(node) |
101 | } else { | 101 | } else { |