aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
Diffstat (limited to 'crates')
-rw-r--r--crates/ra_hir/src/code_model.rs52
-rw-r--r--crates/ra_hir_def/src/data.rs41
-rw-r--r--crates/ra_hir_expand/src/builtin_macro.rs8
-rw-r--r--crates/ra_ide/src/completion/complete_dot.rs38
-rw-r--r--crates/ra_ide/src/completion/complete_path.rs105
-rw-r--r--crates/ra_ide/src/inlay_hints.rs32
-rw-r--r--crates/ra_mbe/src/lib.rs3
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs221
-rw-r--r--crates/ra_mbe/src/tests.rs12
-rw-r--r--crates/ra_syntax/src/ast/make.rs6
-rw-r--r--crates/rust-analyzer/src/main_loop.rs3
11 files changed, 465 insertions, 56 deletions
diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs
index 2944926e6..911c809fd 100644
--- a/crates/ra_hir/src/code_model.rs
+++ b/crates/ra_hir/src/code_model.rs
@@ -204,10 +204,26 @@ impl Module {
204 } 204 }
205 205
206 /// Returns a `ModuleScope`: a set of items, visible in this module. 206 /// Returns a `ModuleScope`: a set of items, visible in this module.
207 pub fn scope(self, db: &impl HirDatabase) -> Vec<(Name, ScopeDef)> { 207 pub fn scope(
208 self,
209 db: &impl HirDatabase,
210 visible_from: Option<Module>,
211 ) -> Vec<(Name, ScopeDef)> {
208 db.crate_def_map(self.id.krate)[self.id.local_id] 212 db.crate_def_map(self.id.krate)[self.id.local_id]
209 .scope 213 .scope
210 .entries() 214 .entries()
215 .filter_map(|(name, def)| {
216 if let Some(m) = visible_from {
217 let filtered = def.filter_visibility(|vis| vis.is_visible_from(db, m.id));
218 if filtered.is_none() && !def.is_none() {
219 None
220 } else {
221 Some((name, filtered))
222 }
223 } else {
224 Some((name, def))
225 }
226 })
211 .map(|(name, def)| (name.clone(), def.into())) 227 .map(|(name, def)| (name.clone(), def.into()))
212 .collect() 228 .collect()
213 } 229 }
@@ -571,6 +587,14 @@ impl Function {
571 } 587 }
572} 588}
573 589
590impl HasVisibility for Function {
591 fn visibility(&self, db: &impl HirDatabase) -> Visibility {
592 let function_data = db.function_data(self.id);
593 let visibility = &function_data.visibility;
594 visibility.resolve(db, &self.id.resolver(db))
595 }
596}
597
574#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] 598#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
575pub struct Const { 599pub struct Const {
576 pub(crate) id: ConstId, 600 pub(crate) id: ConstId,
@@ -590,6 +614,14 @@ impl Const {
590 } 614 }
591} 615}
592 616
617impl HasVisibility for Const {
618 fn visibility(&self, db: &impl HirDatabase) -> Visibility {
619 let function_data = db.const_data(self.id);
620 let visibility = &function_data.visibility;
621 visibility.resolve(db, &self.id.resolver(db))
622 }
623}
624
593#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] 625#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
594pub struct Static { 626pub struct Static {
595 pub(crate) id: StaticId, 627 pub(crate) id: StaticId,
@@ -664,6 +696,14 @@ impl TypeAlias {
664 } 696 }
665} 697}
666 698
699impl HasVisibility for TypeAlias {
700 fn visibility(&self, db: &impl HirDatabase) -> Visibility {
701 let function_data = db.type_alias_data(self.id);
702 let visibility = &function_data.visibility;
703 visibility.resolve(db, &self.id.resolver(db))
704 }
705}
706
667#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] 707#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
668pub struct MacroDef { 708pub struct MacroDef {
669 pub(crate) id: MacroDefId, 709 pub(crate) id: MacroDefId,
@@ -751,6 +791,16 @@ impl AssocItem {
751 } 791 }
752} 792}
753 793
794impl HasVisibility for AssocItem {
795 fn visibility(&self, db: &impl HirDatabase) -> Visibility {
796 match self {
797 AssocItem::Function(f) => f.visibility(db),
798 AssocItem::Const(c) => c.visibility(db),
799 AssocItem::TypeAlias(t) => t.visibility(db),
800 }
801 }
802}
803
754#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)] 804#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
755pub enum GenericDef { 805pub enum GenericDef {
756 Function(Function), 806 Function(Function),
diff --git a/crates/ra_hir_def/src/data.rs b/crates/ra_hir_def/src/data.rs
index 9fc43f3fb..a72eb5369 100644
--- a/crates/ra_hir_def/src/data.rs
+++ b/crates/ra_hir_def/src/data.rs
@@ -7,13 +7,16 @@ use hir_expand::{
7 AstId, InFile, 7 AstId, InFile,
8}; 8};
9use ra_prof::profile; 9use ra_prof::profile;
10use ra_syntax::ast::{self, AstNode, ImplItem, ModuleItemOwner, NameOwner, TypeAscriptionOwner}; 10use ra_syntax::ast::{
11 self, AstNode, ImplItem, ModuleItemOwner, NameOwner, TypeAscriptionOwner, VisibilityOwner,
12};
11 13
12use crate::{ 14use crate::{
13 db::DefDatabase, 15 db::DefDatabase,
14 path::{path, GenericArgs, Path}, 16 path::{path, GenericArgs, Path},
15 src::HasSource, 17 src::HasSource,
16 type_ref::{Mutability, TypeBound, TypeRef}, 18 type_ref::{Mutability, TypeBound, TypeRef},
19 visibility::RawVisibility,
17 AssocContainerId, AssocItemId, ConstId, ConstLoc, Expander, FunctionId, FunctionLoc, HasModule, 20 AssocContainerId, AssocItemId, ConstId, ConstLoc, Expander, FunctionId, FunctionLoc, HasModule,
18 ImplId, Intern, Lookup, ModuleId, StaticId, TraitId, TypeAliasId, TypeAliasLoc, 21 ImplId, Intern, Lookup, ModuleId, StaticId, TraitId, TypeAliasId, TypeAliasLoc,
19}; 22};
@@ -26,6 +29,7 @@ pub struct FunctionData {
26 /// True if the first param is `self`. This is relevant to decide whether this 29 /// True if the first param is `self`. This is relevant to decide whether this
27 /// can be called as a method. 30 /// can be called as a method.
28 pub has_self_param: bool, 31 pub has_self_param: bool,
32 pub visibility: RawVisibility,
29} 33}
30 34
31impl FunctionData { 35impl FunctionData {
@@ -72,7 +76,9 @@ impl FunctionData {
72 ret_type 76 ret_type
73 }; 77 };
74 78
75 let sig = FunctionData { name, params, ret_type, has_self_param }; 79 let visibility = RawVisibility::from_ast(db, src.map(|s| s.visibility()));
80
81 let sig = FunctionData { name, params, ret_type, has_self_param, visibility };
76 Arc::new(sig) 82 Arc::new(sig)
77 } 83 }
78} 84}
@@ -91,6 +97,7 @@ fn desugar_future_path(orig: TypeRef) -> Path {
91pub struct TypeAliasData { 97pub struct TypeAliasData {
92 pub name: Name, 98 pub name: Name,
93 pub type_ref: Option<TypeRef>, 99 pub type_ref: Option<TypeRef>,
100 pub visibility: RawVisibility,
94} 101}
95 102
96impl TypeAliasData { 103impl TypeAliasData {
@@ -98,10 +105,11 @@ impl TypeAliasData {
98 db: &impl DefDatabase, 105 db: &impl DefDatabase,
99 typ: TypeAliasId, 106 typ: TypeAliasId,
100 ) -> Arc<TypeAliasData> { 107 ) -> Arc<TypeAliasData> {
101 let node = typ.lookup(db).source(db).value; 108 let node = typ.lookup(db).source(db);
102 let name = node.name().map_or_else(Name::missing, |n| n.as_name()); 109 let name = node.value.name().map_or_else(Name::missing, |n| n.as_name());
103 let type_ref = node.type_ref().map(TypeRef::from_ast); 110 let type_ref = node.value.type_ref().map(TypeRef::from_ast);
104 Arc::new(TypeAliasData { name, type_ref }) 111 let visibility = RawVisibility::from_ast(db, node.map(|n| n.visibility()));
112 Arc::new(TypeAliasData { name, type_ref, visibility })
105 } 113 }
106} 114}
107 115
@@ -217,23 +225,28 @@ pub struct ConstData {
217 /// const _: () = (); 225 /// const _: () = ();
218 pub name: Option<Name>, 226 pub name: Option<Name>,
219 pub type_ref: TypeRef, 227 pub type_ref: TypeRef,
228 pub visibility: RawVisibility,
220} 229}
221 230
222impl ConstData { 231impl ConstData {
223 pub(crate) fn const_data_query(db: &impl DefDatabase, konst: ConstId) -> Arc<ConstData> { 232 pub(crate) fn const_data_query(db: &impl DefDatabase, konst: ConstId) -> Arc<ConstData> {
224 let node = konst.lookup(db).source(db).value; 233 let node = konst.lookup(db).source(db);
225 Arc::new(ConstData::new(&node)) 234 Arc::new(ConstData::new(db, node))
226 } 235 }
227 236
228 pub(crate) fn static_data_query(db: &impl DefDatabase, konst: StaticId) -> Arc<ConstData> { 237 pub(crate) fn static_data_query(db: &impl DefDatabase, konst: StaticId) -> Arc<ConstData> {
229 let node = konst.lookup(db).source(db).value; 238 let node = konst.lookup(db).source(db);
230 Arc::new(ConstData::new(&node)) 239 Arc::new(ConstData::new(db, node))
231 } 240 }
232 241
233 fn new<N: NameOwner + TypeAscriptionOwner>(node: &N) -> ConstData { 242 fn new<N: NameOwner + TypeAscriptionOwner + VisibilityOwner>(
234 let name = node.name().map(|n| n.as_name()); 243 db: &impl DefDatabase,
235 let type_ref = TypeRef::from_ast_opt(node.ascribed_type()); 244 node: InFile<N>,
236 ConstData { name, type_ref } 245 ) -> ConstData {
246 let name = node.value.name().map(|n| n.as_name());
247 let type_ref = TypeRef::from_ast_opt(node.value.ascribed_type());
248 let visibility = RawVisibility::from_ast(db, node.map(|n| n.visibility()));
249 ConstData { name, type_ref, visibility }
237 } 250 }
238} 251}
239 252
diff --git a/crates/ra_hir_expand/src/builtin_macro.rs b/crates/ra_hir_expand/src/builtin_macro.rs
index 9bc33cfa8..3f60b1cca 100644
--- a/crates/ra_hir_expand/src/builtin_macro.rs
+++ b/crates/ra_hir_expand/src/builtin_macro.rs
@@ -7,6 +7,7 @@ use crate::{
7 7
8use crate::{quote, EagerMacroId, LazyMacroId, MacroCallId}; 8use crate::{quote, EagerMacroId, LazyMacroId, MacroCallId};
9use either::Either; 9use either::Either;
10use mbe::parse_to_token_tree;
10use ra_db::{FileId, RelativePath}; 11use ra_db::{FileId, RelativePath};
11use ra_parser::FragmentKind; 12use ra_parser::FragmentKind;
12 13
@@ -306,10 +307,9 @@ fn include_expand(
306 307
307 // FIXME: 308 // FIXME:
308 // Handle include as expression 309 // Handle include as expression
309 let node = 310 let res = parse_to_token_tree(&db.file_text(file_id.into()))
310 db.parse_or_expand(file_id.into()).ok_or_else(|| mbe::ExpandError::ConversionError)?; 311 .ok_or_else(|| mbe::ExpandError::ConversionError)?
311 let res = 312 .0;
312 mbe::syntax_node_to_token_tree(&node).ok_or_else(|| mbe::ExpandError::ConversionError)?.0;
313 313
314 Ok((res, FragmentKind::Items)) 314 Ok((res, FragmentKind::Items))
315} 315}
diff --git a/crates/ra_ide/src/completion/complete_dot.rs b/crates/ra_ide/src/completion/complete_dot.rs
index 00fd158de..f275305e2 100644
--- a/crates/ra_ide/src/completion/complete_dot.rs
+++ b/crates/ra_ide/src/completion/complete_dot.rs
@@ -57,7 +57,10 @@ fn complete_methods(acc: &mut Completions, ctx: &CompletionContext, receiver: &T
57 let mut seen_methods = FxHashSet::default(); 57 let mut seen_methods = FxHashSet::default();
58 let traits_in_scope = ctx.scope().traits_in_scope(); 58 let traits_in_scope = ctx.scope().traits_in_scope();
59 receiver.iterate_method_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, func| { 59 receiver.iterate_method_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, func| {
60 if func.has_self_param(ctx.db) && seen_methods.insert(func.name(ctx.db)) { 60 if func.has_self_param(ctx.db)
61 && ctx.scope().module().map_or(true, |m| func.is_visible_from(ctx.db, m))
62 && seen_methods.insert(func.name(ctx.db))
63 {
61 acc.add_function(ctx, func); 64 acc.add_function(ctx, func);
62 } 65 }
63 None::<()> 66 None::<()>
@@ -308,6 +311,39 @@ mod tests {
308 } 311 }
309 312
310 #[test] 313 #[test]
314 fn test_method_completion_private() {
315 assert_debug_snapshot!(
316 do_ref_completion(
317 r"
318 struct A {}
319 mod m {
320 impl super::A {
321 fn private_method(&self) {}
322 pub(super) fn the_method(&self) {}
323 }
324 }
325 fn foo(a: A) {
326 a.<|>
327 }
328 ",
329 ),
330 @r###"
331 [
332 CompletionItem {
333 label: "the_method()",
334 source_range: [256; 256),
335 delete: [256; 256),
336 insert: "the_method()$0",
337 kind: Method,
338 lookup: "the_method",
339 detail: "pub(super) fn the_method(&self)",
340 },
341 ]
342 "###
343 );
344 }
345
346 #[test]
311 fn test_trait_method_completion() { 347 fn test_trait_method_completion() {
312 assert_debug_snapshot!( 348 assert_debug_snapshot!(
313 do_ref_completion( 349 do_ref_completion(
diff --git a/crates/ra_ide/src/completion/complete_path.rs b/crates/ra_ide/src/completion/complete_path.rs
index d43486d1a..3c4a70561 100644
--- a/crates/ra_ide/src/completion/complete_path.rs
+++ b/crates/ra_ide/src/completion/complete_path.rs
@@ -1,6 +1,6 @@
1//! Completion of paths, i.e. `some::prefix::<|>`. 1//! Completion of paths, i.e. `some::prefix::<|>`.
2 2
3use hir::{Adt, PathResolution, ScopeDef}; 3use hir::{Adt, HasVisibility, PathResolution, ScopeDef};
4use ra_syntax::AstNode; 4use ra_syntax::AstNode;
5use test_utils::tested_by; 5use test_utils::tested_by;
6 6
@@ -15,9 +15,10 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
15 Some(PathResolution::Def(def)) => def, 15 Some(PathResolution::Def(def)) => def,
16 _ => return, 16 _ => return,
17 }; 17 };
18 let context_module = ctx.scope().module();
18 match def { 19 match def {
19 hir::ModuleDef::Module(module) => { 20 hir::ModuleDef::Module(module) => {
20 let module_scope = module.scope(ctx.db); 21 let module_scope = module.scope(ctx.db, context_module);
21 for (name, def) in module_scope { 22 for (name, def) in module_scope {
22 if ctx.use_item_syntax.is_some() { 23 if ctx.use_item_syntax.is_some() {
23 if let ScopeDef::Unknown = def { 24 if let ScopeDef::Unknown = def {
@@ -51,6 +52,9 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
51 if let Some(krate) = krate { 52 if let Some(krate) = krate {
52 let traits_in_scope = ctx.scope().traits_in_scope(); 53 let traits_in_scope = ctx.scope().traits_in_scope();
53 ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| { 54 ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| {
55 if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) {
56 return None;
57 }
54 match item { 58 match item {
55 hir::AssocItem::Function(func) => { 59 hir::AssocItem::Function(func) => {
56 if !func.has_self_param(ctx.db) { 60 if !func.has_self_param(ctx.db) {
@@ -64,6 +68,9 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
64 }); 68 });
65 69
66 ty.iterate_impl_items(ctx.db, krate, |item| { 70 ty.iterate_impl_items(ctx.db, krate, |item| {
71 if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) {
72 return None;
73 }
67 match item { 74 match item {
68 hir::AssocItem::Function(_) | hir::AssocItem::Const(_) => {} 75 hir::AssocItem::Function(_) | hir::AssocItem::Const(_) => {}
69 hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty), 76 hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty),
@@ -74,6 +81,9 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
74 } 81 }
75 hir::ModuleDef::Trait(t) => { 82 hir::ModuleDef::Trait(t) => {
76 for item in t.items(ctx.db) { 83 for item in t.items(ctx.db) {
84 if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) {
85 continue;
86 }
77 match item { 87 match item {
78 hir::AssocItem::Function(func) => { 88 hir::AssocItem::Function(func) => {
79 if !func.has_self_param(ctx.db) { 89 if !func.has_self_param(ctx.db) {
@@ -170,6 +180,41 @@ mod tests {
170 } 180 }
171 181
172 #[test] 182 #[test]
183 fn path_visibility() {
184 assert_debug_snapshot!(
185 do_reference_completion(
186 r"
187 use self::my::<|>;
188
189 mod my {
190 struct Bar;
191 pub struct Foo;
192 pub use Bar as PublicBar;
193 }
194 "
195 ),
196 @r###"
197 [
198 CompletionItem {
199 label: "Foo",
200 source_range: [31; 31),
201 delete: [31; 31),
202 insert: "Foo",
203 kind: Struct,
204 },
205 CompletionItem {
206 label: "PublicBar",
207 source_range: [31; 31),
208 delete: [31; 31),
209 insert: "PublicBar",
210 kind: Struct,
211 },
212 ]
213 "###
214 );
215 }
216
217 #[test]
173 fn completes_use_item_starting_with_self() { 218 fn completes_use_item_starting_with_self() {
174 assert_debug_snapshot!( 219 assert_debug_snapshot!(
175 do_reference_completion( 220 do_reference_completion(
@@ -177,7 +222,7 @@ mod tests {
177 use self::m::<|>; 222 use self::m::<|>;
178 223
179 mod m { 224 mod m {
180 struct Bar; 225 pub struct Bar;
181 } 226 }
182 " 227 "
183 ), 228 ),
@@ -502,6 +547,60 @@ mod tests {
502 } 547 }
503 548
504 #[test] 549 #[test]
550 fn associated_item_visibility() {
551 assert_debug_snapshot!(
552 do_reference_completion(
553 "
554 //- /lib.rs
555 struct S;
556
557 mod m {
558 impl super::S {
559 pub(super) fn public_method() { }
560 fn private_method() { }
561 pub(super) type PublicType = u32;
562 type PrivateType = u32;
563 pub(super) const PUBLIC_CONST: u32 = 1;
564 const PRIVATE_CONST: u32 = 1;
565 }
566 }
567
568 fn foo() { let _ = S::<|> }
569 "
570 ),
571 @r###"
572 [
573 CompletionItem {
574 label: "PUBLIC_CONST",
575 source_range: [302; 302),
576 delete: [302; 302),
577 insert: "PUBLIC_CONST",
578 kind: Const,
579 detail: "pub(super) const PUBLIC_CONST: u32 = 1;",
580 },
581 CompletionItem {
582 label: "PublicType",
583 source_range: [302; 302),
584 delete: [302; 302),
585 insert: "PublicType",
586 kind: TypeAlias,
587 detail: "pub(super) type PublicType = u32;",
588 },
589 CompletionItem {
590 label: "public_method()",
591 source_range: [302; 302),
592 delete: [302; 302),
593 insert: "public_method()$0",
594 kind: Function,
595 lookup: "public_method",
596 detail: "pub(super) fn public_method()",
597 },
598 ]
599 "###
600 );
601 }
602
603 #[test]
505 fn completes_enum_associated_method() { 604 fn completes_enum_associated_method() {
506 assert_debug_snapshot!( 605 assert_debug_snapshot!(
507 do_reference_completion( 606 do_reference_completion(
diff --git a/crates/ra_ide/src/inlay_hints.rs b/crates/ra_ide/src/inlay_hints.rs
index 69098a630..cf0cbdbd0 100644
--- a/crates/ra_ide/src/inlay_hints.rs
+++ b/crates/ra_ide/src/inlay_hints.rs
@@ -119,6 +119,12 @@ fn should_not_display_type_hint(db: &RootDatabase, bind_pat: &ast::BindPat, pat_
119 return true; 119 return true;
120 } 120 }
121 121
122 if let Some(Adt::Struct(s)) = pat_ty.as_adt() {
123 if s.fields(db).is_empty() && s.name(db).to_string() == bind_pat.syntax().to_string() {
124 return true;
125 }
126 }
127
122 for node in bind_pat.syntax().ancestors() { 128 for node in bind_pat.syntax().ancestors() {
123 match_ast! { 129 match_ast! {
124 match node { 130 match node {
@@ -943,4 +949,30 @@ fn main() {
943 "### 949 "###
944 ); 950 );
945 } 951 }
952
953 #[test]
954 fn unit_structs_have_no_type_hints() {
955 let (analysis, file_id) = single_file(
956 r#"
957enum CustomResult<T, E> {
958 Ok(T),
959 Err(E),
960}
961use CustomResult::*;
962
963struct SyntheticSyntax;
964
965fn main() {
966 match Ok(()) {
967 Ok(_) => (),
968 Err(SyntheticSyntax) => (),
969 }
970}"#,
971 );
972
973 assert_debug_snapshot!(analysis.inlay_hints(file_id, Some(8)).unwrap(), @r###"
974 []
975 "###
976 );
977 }
946} 978}
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs
index 2c6ae5658..43afe24cc 100644
--- a/crates/ra_mbe/src/lib.rs
+++ b/crates/ra_mbe/src/lib.rs
@@ -31,7 +31,8 @@ pub enum ExpandError {
31} 31}
32 32
33pub use crate::syntax_bridge::{ 33pub use crate::syntax_bridge::{
34 ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node, TokenMap, 34 ast_to_token_tree, parse_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node,
35 TokenMap,
35}; 36};
36 37
37/// This struct contains AST for a single `macro_rules` definition. What might 38/// This struct contains AST for a single `macro_rules` definition. What might
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
index fb9fa5314..fcb73fbc7 100644
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ b/crates/ra_mbe/src/syntax_bridge.rs
@@ -2,8 +2,10 @@
2 2
3use ra_parser::{FragmentKind, ParseError, TreeSink}; 3use ra_parser::{FragmentKind, ParseError, TreeSink};
4use ra_syntax::{ 4use ra_syntax::{
5 ast, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode, 5 ast::{self, make::tokens::doc_comment},
6 SyntaxTreeBuilder, TextRange, TextUnit, T, 6 tokenize, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind,
7 SyntaxKind::*,
8 SyntaxNode, SyntaxTreeBuilder, TextRange, TextUnit, Token, T,
7}; 9};
8use rustc_hash::FxHashMap; 10use rustc_hash::FxHashMap;
9use std::iter::successors; 11use std::iter::successors;
@@ -48,9 +50,11 @@ pub fn ast_to_token_tree(ast: &impl ast::AstNode) -> Option<(tt::Subtree, TokenM
48/// will consume). 50/// will consume).
49pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, TokenMap)> { 51pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, TokenMap)> {
50 let global_offset = node.text_range().start(); 52 let global_offset = node.text_range().start();
51 let mut c = Convertor { map: TokenMap::default(), global_offset, next_id: 0 }; 53 let mut c = Convertor {
54 id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } },
55 };
52 let subtree = c.go(node)?; 56 let subtree = c.go(node)?;
53 Some((subtree, c.map)) 57 Some((subtree, c.id_alloc.map))
54} 58}
55 59
56// The following items are what `rustc` macro can be parsed into : 60// The following items are what `rustc` macro can be parsed into :
@@ -89,6 +93,28 @@ pub fn token_tree_to_syntax_node(
89 Ok((parse, range_map)) 93 Ok((parse, range_map))
90} 94}
91 95
96/// Convert a string to a `TokenTree`
97pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> {
98 let (tokens, errors) = tokenize(text);
99 if !errors.is_empty() {
100 return None;
101 }
102
103 let mut conv = RawConvertor {
104 text,
105 offset: TextUnit::default(),
106 inner: tokens.iter(),
107 id_alloc: TokenIdAlloc {
108 map: Default::default(),
109 global_offset: TextUnit::default(),
110 next_id: 0,
111 },
112 };
113
114 let subtree = conv.go()?;
115 Some((subtree, conv.id_alloc.map))
116}
117
92impl TokenMap { 118impl TokenMap {
93 pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> { 119 pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
94 let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { 120 let &(token_id, _) = self.entries.iter().find(|(_, range)| match range {
@@ -118,6 +144,14 @@ impl TokenMap {
118 self.entries 144 self.entries
119 .push((token_id, TokenTextRange::Delimiter(open_relative_range, close_relative_range))); 145 .push((token_id, TokenTextRange::Delimiter(open_relative_range, close_relative_range)));
120 } 146 }
147
148 fn update_close_delim(&mut self, token_id: tt::TokenId, close_relative_range: TextRange) {
149 if let Some(entry) = self.entries.iter_mut().find(|(tid, _)| *tid == token_id) {
150 if let TokenTextRange::Delimiter(dim, _) = entry.1 {
151 entry.1 = TokenTextRange::Delimiter(dim, close_relative_range);
152 }
153 }
154 }
121} 155}
122 156
123/// Returns the textual content of a doc comment block as a quoted string 157/// Returns the textual content of a doc comment block as a quoted string
@@ -188,12 +222,161 @@ fn convert_doc_comment(token: &ra_syntax::SyntaxToken) -> Option<Vec<tt::TokenTr
188 } 222 }
189} 223}
190 224
191struct Convertor { 225struct TokenIdAlloc {
192 map: TokenMap, 226 map: TokenMap,
193 global_offset: TextUnit, 227 global_offset: TextUnit,
194 next_id: u32, 228 next_id: u32,
195} 229}
196 230
231impl TokenIdAlloc {
232 fn alloc(&mut self, absolute_range: TextRange) -> tt::TokenId {
233 let relative_range = absolute_range - self.global_offset;
234 let token_id = tt::TokenId(self.next_id);
235 self.next_id += 1;
236 self.map.insert(token_id, relative_range);
237 token_id
238 }
239
240 fn delim(&mut self, open_abs_range: TextRange, close_abs_range: TextRange) -> tt::TokenId {
241 let open_relative_range = open_abs_range - self.global_offset;
242 let close_relative_range = close_abs_range - self.global_offset;
243 let token_id = tt::TokenId(self.next_id);
244 self.next_id += 1;
245
246 self.map.insert_delim(token_id, open_relative_range, close_relative_range);
247 token_id
248 }
249
250 fn open_delim(&mut self, open_abs_range: TextRange) -> tt::TokenId {
251 let token_id = tt::TokenId(self.next_id);
252 self.next_id += 1;
253 self.map.insert_delim(token_id, open_abs_range, open_abs_range);
254 token_id
255 }
256
257 fn close_delim(&mut self, id: tt::TokenId, close_abs_range: TextRange) {
258 self.map.update_close_delim(id, close_abs_range);
259 }
260}
261
262/// A Raw Token (straightly from lexer) convertor
263struct RawConvertor<'a> {
264 text: &'a str,
265 offset: TextUnit,
266 id_alloc: TokenIdAlloc,
267 inner: std::slice::Iter<'a, Token>,
268}
269
270impl RawConvertor<'_> {
271 fn go(&mut self) -> Option<tt::Subtree> {
272 let mut subtree = tt::Subtree::default();
273 subtree.delimiter = None;
274 while self.peek().is_some() {
275 self.collect_leaf(&mut subtree.token_trees);
276 }
277 if subtree.token_trees.is_empty() {
278 return None;
279 }
280 if subtree.token_trees.len() == 1 {
281 if let tt::TokenTree::Subtree(first) = &subtree.token_trees[0] {
282 return Some(first.clone());
283 }
284 }
285 Some(subtree)
286 }
287
288 fn bump(&mut self) -> Option<(Token, TextRange)> {
289 let token = self.inner.next()?;
290 let range = TextRange::offset_len(self.offset, token.len);
291 self.offset += token.len;
292 Some((*token, range))
293 }
294
295 fn peek(&self) -> Option<Token> {
296 self.inner.as_slice().get(0).cloned()
297 }
298
299 fn collect_leaf(&mut self, result: &mut Vec<tt::TokenTree>) {
300 let (token, range) = match self.bump() {
301 None => return,
302 Some(it) => it,
303 };
304
305 let k: SyntaxKind = token.kind;
306 if k == COMMENT {
307 let node = doc_comment(&self.text[range]);
308 if let Some(tokens) = convert_doc_comment(&node) {
309 result.extend(tokens);
310 }
311 return;
312 }
313
314 result.push(if k.is_punct() {
315 let delim = match k {
316 T!['('] => Some((tt::DelimiterKind::Parenthesis, T![')'])),
317 T!['{'] => Some((tt::DelimiterKind::Brace, T!['}'])),
318 T!['['] => Some((tt::DelimiterKind::Bracket, T![']'])),
319 _ => None,
320 };
321
322 if let Some((kind, closed)) = delim {
323 let mut subtree = tt::Subtree::default();
324 let id = self.id_alloc.open_delim(range);
325 subtree.delimiter = Some(tt::Delimiter { kind, id });
326
327 while self.peek().map(|it| it.kind != closed).unwrap_or(false) {
328 self.collect_leaf(&mut subtree.token_trees);
329 }
330 let last_range = match self.bump() {
331 None => return,
332 Some(it) => it.1,
333 };
334 self.id_alloc.close_delim(id, last_range);
335 subtree.into()
336 } else {
337 let spacing = match self.peek() {
338 Some(next)
339 if next.kind.is_trivia()
340 || next.kind == T!['[']
341 || next.kind == T!['{']
342 || next.kind == T!['('] =>
343 {
344 tt::Spacing::Alone
345 }
346 Some(next) if next.kind.is_punct() => tt::Spacing::Joint,
347 _ => tt::Spacing::Alone,
348 };
349 let char =
350 self.text[range].chars().next().expect("Token from lexer must be single char");
351
352 tt::Leaf::from(tt::Punct { char, spacing, id: self.id_alloc.alloc(range) }).into()
353 }
354 } else {
355 macro_rules! make_leaf {
356 ($i:ident) => {
357 tt::$i { id: self.id_alloc.alloc(range), text: self.text[range].into() }.into()
358 };
359 }
360 let leaf: tt::Leaf = match k {
361 T![true] | T![false] => make_leaf!(Literal),
362 IDENT | LIFETIME => make_leaf!(Ident),
363 k if k.is_keyword() => make_leaf!(Ident),
364 k if k.is_literal() => make_leaf!(Literal),
365 _ => return,
366 };
367
368 leaf.into()
369 });
370 }
371}
372
373// FIXME: There are some duplicate logic between RawConvertor and Convertor
374// It would be nice to refactor to converting SyntaxNode to ra_parser::Token and thus
375// use RawConvertor directly. But performance-wise it may not be a good idea ?
376struct Convertor {
377 id_alloc: TokenIdAlloc,
378}
379
197impl Convertor { 380impl Convertor {
198 fn go(&mut self, tt: &SyntaxNode) -> Option<tt::Subtree> { 381 fn go(&mut self, tt: &SyntaxNode) -> Option<tt::Subtree> {
199 // This tree is empty 382 // This tree is empty
@@ -236,7 +419,7 @@ impl Convertor {
236 }; 419 };
237 let delimiter = delimiter_kind.map(|kind| tt::Delimiter { 420 let delimiter = delimiter_kind.map(|kind| tt::Delimiter {
238 kind, 421 kind,
239 id: self.alloc_delim(first_child.text_range(), last_child.text_range()), 422 id: self.id_alloc.delim(first_child.text_range(), last_child.text_range()),
240 }); 423 });
241 424
242 let mut token_trees = Vec::new(); 425 let mut token_trees = Vec::new();
@@ -273,7 +456,7 @@ impl Convertor {
273 tt::Leaf::from(tt::Punct { 456 tt::Leaf::from(tt::Punct {
274 char, 457 char,
275 spacing, 458 spacing,
276 id: self.alloc(token.text_range()), 459 id: self.id_alloc.alloc(token.text_range()),
277 }) 460 })
278 .into(), 461 .into(),
279 ); 462 );
@@ -282,7 +465,7 @@ impl Convertor {
282 macro_rules! make_leaf { 465 macro_rules! make_leaf {
283 ($i:ident) => { 466 ($i:ident) => {
284 tt::$i { 467 tt::$i {
285 id: self.alloc(token.text_range()), 468 id: self.id_alloc.alloc(token.text_range()),
286 text: token.text().clone(), 469 text: token.text().clone(),
287 } 470 }
288 .into() 471 .into()
@@ -313,28 +496,6 @@ impl Convertor {
313 let res = tt::Subtree { delimiter, token_trees }; 496 let res = tt::Subtree { delimiter, token_trees };
314 Some(res) 497 Some(res)
315 } 498 }
316
317 fn alloc(&mut self, absolute_range: TextRange) -> tt::TokenId {
318 let relative_range = absolute_range - self.global_offset;
319 let token_id = tt::TokenId(self.next_id);
320 self.next_id += 1;
321 self.map.insert(token_id, relative_range);
322 token_id
323 }
324
325 fn alloc_delim(
326 &mut self,
327 open_abs_range: TextRange,
328 close_abs_range: TextRange,
329 ) -> tt::TokenId {
330 let open_relative_range = open_abs_range - self.global_offset;
331 let close_relative_range = close_abs_range - self.global_offset;
332 let token_id = tt::TokenId(self.next_id);
333 self.next_id += 1;
334
335 self.map.insert_delim(token_id, open_relative_range, close_relative_range);
336 token_id
337 }
338} 499}
339 500
340struct TtTreeSink<'a> { 501struct TtTreeSink<'a> {
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs
index 066ce150b..6d5d1e9e6 100644
--- a/crates/ra_mbe/src/tests.rs
+++ b/crates/ra_mbe/src/tests.rs
@@ -1499,12 +1499,20 @@ impl MacroFixture {
1499 } 1499 }
1500} 1500}
1501 1501
1502pub(crate) fn parse_macro(macro_definition: &str) -> MacroFixture { 1502pub(crate) fn parse_macro(ra_fixture: &str) -> MacroFixture {
1503 let source_file = ast::SourceFile::parse(macro_definition).ok().unwrap(); 1503 let source_file = ast::SourceFile::parse(ra_fixture).ok().unwrap();
1504 let macro_definition = 1504 let macro_definition =
1505 source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); 1505 source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
1506 1506
1507 let (definition_tt, _) = ast_to_token_tree(&macro_definition.token_tree().unwrap()).unwrap(); 1507 let (definition_tt, _) = ast_to_token_tree(&macro_definition.token_tree().unwrap()).unwrap();
1508
1509 let parsed = parse_to_token_tree(
1510 &ra_fixture[macro_definition.token_tree().unwrap().syntax().text_range()],
1511 )
1512 .unwrap()
1513 .0;
1514 assert_eq!(definition_tt, parsed);
1515
1508 let rules = MacroRules::parse(&definition_tt).unwrap(); 1516 let rules = MacroRules::parse(&definition_tt).unwrap();
1509 MacroFixture { rules } 1517 MacroFixture { rules }
1510} 1518}
diff --git a/crates/ra_syntax/src/ast/make.rs b/crates/ra_syntax/src/ast/make.rs
index 53d6fa562..ae8829807 100644
--- a/crates/ra_syntax/src/ast/make.rs
+++ b/crates/ra_syntax/src/ast/make.rs
@@ -267,6 +267,12 @@ pub mod tokens {
267 sf.syntax().first_child_or_token().unwrap().into_token().unwrap() 267 sf.syntax().first_child_or_token().unwrap().into_token().unwrap()
268 } 268 }
269 269
270 pub fn doc_comment(text: &str) -> SyntaxToken {
271 assert!(!text.trim().is_empty());
272 let sf = SourceFile::parse(text).ok().unwrap();
273 sf.syntax().first_child_or_token().unwrap().into_token().unwrap()
274 }
275
270 pub fn literal(text: &str) -> SyntaxToken { 276 pub fn literal(text: &str) -> SyntaxToken {
271 assert_eq!(text.trim(), text); 277 assert_eq!(text.trim(), text);
272 let lit: ast::Literal = super::ast_from_text(&format!("fn f() {{ let _ = {}; }}", text)); 278 let lit: ast::Literal = super::ast_from_text(&format!("fn f() {{ let _ = {}; }}", text));
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index 580ad1f2c..5480b9e4d 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -631,6 +631,9 @@ fn on_notification(
631 } 631 }
632 Err(not) => not, 632 Err(not) => not,
633 }; 633 };
634 if not.method.starts_with("$/") {
635 return Ok(());
636 }
634 log::error!("unhandled notification: {:?}", not); 637 log::error!("unhandled notification: {:?}", not);
635 Ok(()) 638 Ok(())
636} 639}