aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--crates/ra_assists/src/assists/add_new.rs35
-rw-r--r--crates/ra_hir_def/src/path.rs8
-rw-r--r--crates/ra_hir_expand/src/name.rs2
-rw-r--r--crates/ra_hir_expand/src/quote.rs7
-rw-r--r--crates/ra_hir_ty/src/infer.rs34
-rw-r--r--crates/ra_hir_ty/src/infer/expr.rs101
-rw-r--r--crates/ra_hir_ty/src/tests/traits.rs64
-rw-r--r--crates/ra_ide/src/expand.rs53
-rw-r--r--crates/ra_lsp_server/src/conv.rs5
-rw-r--r--crates/ra_mbe/src/lib.rs4
-rw-r--r--crates/ra_mbe/src/mbe_expander/matcher.rs6
-rw-r--r--crates/ra_mbe/src/mbe_expander/transcriber.rs10
-rw-r--r--crates/ra_mbe/src/subtree_source.rs10
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs34
-rw-r--r--crates/ra_mbe/src/tests.rs2
-rw-r--r--crates/ra_tt/src/lib.rs13
16 files changed, 238 insertions, 150 deletions
diff --git a/crates/ra_assists/src/assists/add_new.rs b/crates/ra_assists/src/assists/add_new.rs
index d340cac8f..b2f946fac 100644
--- a/crates/ra_assists/src/assists/add_new.rs
+++ b/crates/ra_assists/src/assists/add_new.rs
@@ -139,43 +139,40 @@ fn find_struct_impl(
139 139
140 let struct_ty = { 140 let struct_ty = {
141 let src = InFile { file_id: ctx.frange.file_id.into(), value: strukt.clone() }; 141 let src = InFile { file_id: ctx.frange.file_id.into(), value: strukt.clone() };
142 hir::Struct::from_source(db, src).unwrap().ty(db) 142 hir::Struct::from_source(db, src)?.ty(db)
143 }; 143 };
144 144
145 let mut found_new_fn = false; 145 let block = module.descendants().filter_map(ast::ImplBlock::cast).find_map(|impl_blk| {
146
147 let block = module.descendants().filter_map(ast::ImplBlock::cast).find(|impl_blk| {
148 if found_new_fn {
149 return false;
150 }
151
152 let src = InFile { file_id: ctx.frange.file_id.into(), value: impl_blk.clone() }; 146 let src = InFile { file_id: ctx.frange.file_id.into(), value: impl_blk.clone() };
153 let blk = hir::ImplBlock::from_source(db, src).unwrap(); 147 let blk = hir::ImplBlock::from_source(db, src)?;
154 148
155 let same_ty = blk.target_ty(db) == struct_ty; 149 let same_ty = blk.target_ty(db) == struct_ty;
156 let not_trait_impl = blk.target_trait(db).is_none(); 150 let not_trait_impl = blk.target_trait(db).is_none();
157 151
158 if !(same_ty && not_trait_impl) { 152 if !(same_ty && not_trait_impl) {
159 return false; 153 None
154 } else {
155 Some(impl_blk)
160 } 156 }
161
162 found_new_fn = has_new_fn(impl_blk);
163 true
164 }); 157 });
165 158
166 if found_new_fn { 159 if let Some(ref impl_blk) = block {
167 None 160 if has_new_fn(impl_blk) {
168 } else { 161 return None;
169 Some(block) 162 }
170 } 163 }
164
165 Some(block)
171} 166}
172 167
173fn has_new_fn(imp: &ast::ImplBlock) -> bool { 168fn has_new_fn(imp: &ast::ImplBlock) -> bool {
174 if let Some(il) = imp.item_list() { 169 if let Some(il) = imp.item_list() {
175 for item in il.impl_items() { 170 for item in il.impl_items() {
176 if let ast::ImplItem::FnDef(f) = item { 171 if let ast::ImplItem::FnDef(f) = item {
177 if f.name().unwrap().text().eq_ignore_ascii_case("new") { 172 if let Some(name) = f.name() {
178 return true; 173 if name.text().eq_ignore_ascii_case("new") {
174 return true;
175 }
179 } 176 }
180 } 177 }
181 } 178 }
diff --git a/crates/ra_hir_def/src/path.rs b/crates/ra_hir_def/src/path.rs
index ec9d13e82..50f0cad94 100644
--- a/crates/ra_hir_def/src/path.rs
+++ b/crates/ra_hir_def/src/path.rs
@@ -342,6 +342,14 @@ pub mod known {
342 ) 342 )
343 } 343 }
344 344
345 pub fn std_ops_neg() -> Path {
346 Path::from_simple_segments(PathKind::Abs, vec![name::STD, name::OPS, name::NEG_TYPE])
347 }
348
349 pub fn std_ops_not() -> Path {
350 Path::from_simple_segments(PathKind::Abs, vec![name::STD, name::OPS, name::NOT_TYPE])
351 }
352
345 pub fn std_result_result() -> Path { 353 pub fn std_result_result() -> Path {
346 Path::from_simple_segments(PathKind::Abs, vec![name::STD, name::RESULT, name::RESULT_TYPE]) 354 Path::from_simple_segments(PathKind::Abs, vec![name::STD, name::RESULT, name::RESULT_TYPE])
347 } 355 }
diff --git a/crates/ra_hir_expand/src/name.rs b/crates/ra_hir_expand/src/name.rs
index 4f2f702c0..9e68dd98d 100644
--- a/crates/ra_hir_expand/src/name.rs
+++ b/crates/ra_hir_expand/src/name.rs
@@ -152,6 +152,8 @@ pub const RANGE_INCLUSIVE_TYPE: Name = Name::new_inline_ascii(b"RangeInclusive")
152pub const RANGE_TO_INCLUSIVE_TYPE: Name = Name::new_inline_ascii(b"RangeToInclusive"); 152pub const RANGE_TO_INCLUSIVE_TYPE: Name = Name::new_inline_ascii(b"RangeToInclusive");
153pub const RANGE_TO_TYPE: Name = Name::new_inline_ascii(b"RangeTo"); 153pub const RANGE_TO_TYPE: Name = Name::new_inline_ascii(b"RangeTo");
154pub const RANGE_TYPE: Name = Name::new_inline_ascii(b"Range"); 154pub const RANGE_TYPE: Name = Name::new_inline_ascii(b"Range");
155pub const NEG_TYPE: Name = Name::new_inline_ascii(b"Neg");
156pub const NOT_TYPE: Name = Name::new_inline_ascii(b"Not");
155 157
156// Builtin Macros 158// Builtin Macros
157pub const FILE_MACRO: Name = Name::new_inline_ascii(b"file"); 159pub const FILE_MACRO: Name = Name::new_inline_ascii(b"file");
diff --git a/crates/ra_hir_expand/src/quote.rs b/crates/ra_hir_expand/src/quote.rs
index 4f698ff13..aa8a5f23f 100644
--- a/crates/ra_hir_expand/src/quote.rs
+++ b/crates/ra_hir_expand/src/quote.rs
@@ -16,7 +16,7 @@ macro_rules! __quote {
16 { 16 {
17 let children = $crate::__quote!($($tt)*); 17 let children = $crate::__quote!($($tt)*);
18 let subtree = tt::Subtree { 18 let subtree = tt::Subtree {
19 delimiter: tt::Delimiter::$delim, 19 delimiter: Some(tt::Delimiter::$delim),
20 token_trees: $crate::quote::IntoTt::to_tokens(children), 20 token_trees: $crate::quote::IntoTt::to_tokens(children),
21 }; 21 };
22 subtree 22 subtree
@@ -124,7 +124,7 @@ pub(crate) trait IntoTt {
124 124
125impl IntoTt for Vec<tt::TokenTree> { 125impl IntoTt for Vec<tt::TokenTree> {
126 fn to_subtree(self) -> tt::Subtree { 126 fn to_subtree(self) -> tt::Subtree {
127 tt::Subtree { delimiter: tt::Delimiter::None, token_trees: self } 127 tt::Subtree { delimiter: None, token_trees: self }
128 } 128 }
129 129
130 fn to_tokens(self) -> Vec<tt::TokenTree> { 130 fn to_tokens(self) -> Vec<tt::TokenTree> {
@@ -254,7 +254,8 @@ mod tests {
254 let fields = 254 let fields =
255 fields.iter().map(|it| quote!(#it: self.#it.clone(), ).token_trees.clone()).flatten(); 255 fields.iter().map(|it| quote!(#it: self.#it.clone(), ).token_trees.clone()).flatten();
256 256
257 let list = tt::Subtree { delimiter: tt::Delimiter::Brace, token_trees: fields.collect() }; 257 let list =
258 tt::Subtree { delimiter: Some(tt::Delimiter::Brace), token_trees: fields.collect() };
258 259
259 let quoted = quote! { 260 let quoted = quote! {
260 impl Clone for #struct_name { 261 impl Clone for #struct_name {
diff --git a/crates/ra_hir_ty/src/infer.rs b/crates/ra_hir_ty/src/infer.rs
index d16f1eb46..a1201b3e4 100644
--- a/crates/ra_hir_ty/src/infer.rs
+++ b/crates/ra_hir_ty/src/infer.rs
@@ -36,8 +36,8 @@ use ra_prof::profile;
36use super::{ 36use super::{
37 primitive::{FloatTy, IntTy}, 37 primitive::{FloatTy, IntTy},
38 traits::{Guidance, Obligation, ProjectionPredicate, Solution}, 38 traits::{Guidance, Obligation, ProjectionPredicate, Solution},
39 ApplicationTy, InEnvironment, ProjectionTy, TraitEnvironment, TraitRef, Ty, TypeCtor, TypeWalk, 39 ApplicationTy, InEnvironment, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TypeCtor,
40 Uncertain, 40 TypeWalk, Uncertain,
41}; 41};
42use crate::{db::HirDatabase, infer::diagnostics::InferenceDiagnostic}; 42use crate::{db::HirDatabase, infer::diagnostics::InferenceDiagnostic};
43 43
@@ -338,6 +338,24 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
338 self.table.resolve_ty_shallow(ty) 338 self.table.resolve_ty_shallow(ty)
339 } 339 }
340 340
341 fn resolve_associated_type(&mut self, inner_ty: Ty, assoc_ty: Option<TypeAliasId>) -> Ty {
342 match assoc_ty {
343 Some(res_assoc_ty) => {
344 let ty = self.table.new_type_var();
345 let projection = ProjectionPredicate {
346 ty: ty.clone(),
347 projection_ty: ProjectionTy {
348 associated_ty: res_assoc_ty,
349 parameters: Substs::single(inner_ty),
350 },
351 };
352 self.obligations.push(Obligation::Projection(projection));
353 self.resolve_ty_as_possible(ty)
354 }
355 None => Ty::Unknown,
356 }
357 }
358
341 /// Recurses through the given type, normalizing associated types mentioned 359 /// Recurses through the given type, normalizing associated types mentioned
342 /// in it by replacing them by type variables and registering obligations to 360 /// in it by replacing them by type variables and registering obligations to
343 /// resolve later. This should be done once for every type we get from some 361 /// resolve later. This should be done once for every type we get from some
@@ -415,6 +433,18 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
415 self.db.trait_data(trait_).associated_type_by_name(&name::OK_TYPE) 433 self.db.trait_data(trait_).associated_type_by_name(&name::OK_TYPE)
416 } 434 }
417 435
436 fn resolve_ops_neg_output(&self) -> Option<TypeAliasId> {
437 let path = known::std_ops_neg();
438 let trait_ = self.resolver.resolve_known_trait(self.db, &path)?;
439 self.db.trait_data(trait_).associated_type_by_name(&name::OUTPUT_TYPE)
440 }
441
442 fn resolve_ops_not_output(&self) -> Option<TypeAliasId> {
443 let path = known::std_ops_not();
444 let trait_ = self.resolver.resolve_known_trait(self.db, &path)?;
445 self.db.trait_data(trait_).associated_type_by_name(&name::OUTPUT_TYPE)
446 }
447
418 fn resolve_future_future_output(&self) -> Option<TypeAliasId> { 448 fn resolve_future_future_output(&self) -> Option<TypeAliasId> {
419 let path = known::std_future_future(); 449 let path = known::std_future_future();
420 let trait_ = self.resolver.resolve_known_trait(self.db, &path)?; 450 let trait_ = self.resolver.resolve_known_trait(self.db, &path)?;
diff --git a/crates/ra_hir_ty/src/infer/expr.rs b/crates/ra_hir_ty/src/infer/expr.rs
index 2c296987c..f8c00a7b4 100644
--- a/crates/ra_hir_ty/src/infer/expr.rs
+++ b/crates/ra_hir_ty/src/infer/expr.rs
@@ -19,8 +19,8 @@ use crate::{
19 method_resolution, op, 19 method_resolution, op,
20 traits::InEnvironment, 20 traits::InEnvironment,
21 utils::{generics, variant_data, Generics}, 21 utils::{generics, variant_data, Generics},
22 CallableDef, InferTy, IntTy, Mutability, Obligation, ProjectionPredicate, ProjectionTy, Substs, 22 ApplicationTy, CallableDef, InferTy, IntTy, Mutability, Obligation, Substs, TraitRef, Ty,
23 TraitRef, Ty, TypeCtor, TypeWalk, Uncertain, 23 TypeCtor, TypeWalk, Uncertain,
24}; 24};
25 25
26use super::{BindingMode, Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch}; 26use super::{BindingMode, Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch};
@@ -95,21 +95,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
95 Expr::For { iterable, body, pat } => { 95 Expr::For { iterable, body, pat } => {
96 let iterable_ty = self.infer_expr(*iterable, &Expectation::none()); 96 let iterable_ty = self.infer_expr(*iterable, &Expectation::none());
97 97
98 let pat_ty = match self.resolve_into_iter_item() { 98 let pat_ty =
99 Some(into_iter_item_alias) => { 99 self.resolve_associated_type(iterable_ty, self.resolve_into_iter_item());
100 let pat_ty = self.table.new_type_var();
101 let projection = ProjectionPredicate {
102 ty: pat_ty.clone(),
103 projection_ty: ProjectionTy {
104 associated_ty: into_iter_item_alias,
105 parameters: Substs::single(iterable_ty),
106 },
107 };
108 self.obligations.push(Obligation::Projection(projection));
109 self.resolve_ty_as_possible(pat_ty)
110 }
111 None => Ty::Unknown,
112 };
113 100
114 self.infer_pat(*pat, &pat_ty, BindingMode::default()); 101 self.infer_pat(*pat, &pat_ty, BindingMode::default());
115 self.infer_expr(*body, &Expectation::has_type(Ty::unit())); 102 self.infer_expr(*body, &Expectation::has_type(Ty::unit()));
@@ -284,40 +271,13 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
284 } 271 }
285 Expr::Await { expr } => { 272 Expr::Await { expr } => {
286 let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); 273 let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
287 let ty = match self.resolve_future_future_output() { 274 let ty =
288 Some(future_future_output_alias) => { 275 self.resolve_associated_type(inner_ty, self.resolve_future_future_output());
289 let ty = self.table.new_type_var();
290 let projection = ProjectionPredicate {
291 ty: ty.clone(),
292 projection_ty: ProjectionTy {
293 associated_ty: future_future_output_alias,
294 parameters: Substs::single(inner_ty),
295 },
296 };
297 self.obligations.push(Obligation::Projection(projection));
298 self.resolve_ty_as_possible(ty)
299 }
300 None => Ty::Unknown,
301 };
302 ty 276 ty
303 } 277 }
304 Expr::Try { expr } => { 278 Expr::Try { expr } => {
305 let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); 279 let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
306 let ty = match self.resolve_ops_try_ok() { 280 let ty = self.resolve_associated_type(inner_ty, self.resolve_ops_try_ok());
307 Some(ops_try_ok_alias) => {
308 let ty = self.table.new_type_var();
309 let projection = ProjectionPredicate {
310 ty: ty.clone(),
311 projection_ty: ProjectionTy {
312 associated_ty: ops_try_ok_alias,
313 parameters: Substs::single(inner_ty),
314 },
315 };
316 self.obligations.push(Obligation::Projection(projection));
317 self.resolve_ty_as_possible(ty)
318 }
319 None => Ty::Unknown,
320 };
321 ty 281 ty
322 } 282 }
323 Expr::Cast { expr, type_ref } => { 283 Expr::Cast { expr, type_ref } => {
@@ -372,31 +332,36 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
372 }, 332 },
373 UnaryOp::Neg => { 333 UnaryOp::Neg => {
374 match &inner_ty { 334 match &inner_ty {
375 Ty::Apply(a_ty) => match a_ty.ctor { 335 // Fast path for builtins
376 TypeCtor::Int(Uncertain::Unknown) 336 Ty::Apply(ApplicationTy {
377 | TypeCtor::Int(Uncertain::Known(IntTy { 337 ctor:
378 signedness: Signedness::Signed, 338 TypeCtor::Int(Uncertain::Known(IntTy {
379 .. 339 signedness: Signedness::Signed,
380 })) 340 ..
381 | TypeCtor::Float(..) => inner_ty, 341 })),
382 _ => Ty::Unknown, 342 ..
383 }, 343 })
384 Ty::Infer(InferTy::IntVar(..)) | Ty::Infer(InferTy::FloatVar(..)) => { 344 | Ty::Apply(ApplicationTy {
385 inner_ty 345 ctor: TypeCtor::Int(Uncertain::Unknown),
386 } 346 ..
387 // FIXME: resolve ops::Neg trait 347 })
388 _ => Ty::Unknown, 348 | Ty::Apply(ApplicationTy { ctor: TypeCtor::Float(_), .. })
349 | Ty::Infer(InferTy::IntVar(..))
350 | Ty::Infer(InferTy::FloatVar(..)) => inner_ty,
351 // Otherwise we resolve via the std::ops::Neg trait
352 _ => self
353 .resolve_associated_type(inner_ty, self.resolve_ops_neg_output()),
389 } 354 }
390 } 355 }
391 UnaryOp::Not => { 356 UnaryOp::Not => {
392 match &inner_ty { 357 match &inner_ty {
393 Ty::Apply(a_ty) => match a_ty.ctor { 358 // Fast path for builtins
394 TypeCtor::Bool | TypeCtor::Int(_) => inner_ty, 359 Ty::Apply(ApplicationTy { ctor: TypeCtor::Bool, .. })
395 _ => Ty::Unknown, 360 | Ty::Apply(ApplicationTy { ctor: TypeCtor::Int(_), .. })
396 }, 361 | Ty::Infer(InferTy::IntVar(..)) => inner_ty,
397 Ty::Infer(InferTy::IntVar(..)) => inner_ty, 362 // Otherwise we resolve via the std::ops::Not trait
398 // FIXME: resolve ops::Not trait for inner_ty 363 _ => self
399 _ => Ty::Unknown, 364 .resolve_associated_type(inner_ty, self.resolve_ops_not_output()),
400 } 365 }
401 } 366 }
402 } 367 }
diff --git a/crates/ra_hir_ty/src/tests/traits.rs b/crates/ra_hir_ty/src/tests/traits.rs
index 93c5f9a15..6139adb72 100644
--- a/crates/ra_hir_ty/src/tests/traits.rs
+++ b/crates/ra_hir_ty/src/tests/traits.rs
@@ -116,6 +116,70 @@ mod collections {
116} 116}
117 117
118#[test] 118#[test]
119fn infer_ops_neg() {
120 let (db, pos) = TestDB::with_position(
121 r#"
122//- /main.rs crate:main deps:std
123
124struct Bar;
125struct Foo;
126
127impl std::ops::Neg for Bar {
128 type Output = Foo;
129}
130
131fn test() {
132 let a = Bar;
133 let b = -a;
134 b<|>;
135}
136
137//- /std.rs crate:std
138
139#[prelude_import] use ops::*;
140mod ops {
141 pub trait Neg {
142 type Output;
143 }
144}
145"#,
146 );
147 assert_eq!("Foo", type_at_pos(&db, pos));
148}
149
150#[test]
151fn infer_ops_not() {
152 let (db, pos) = TestDB::with_position(
153 r#"
154//- /main.rs crate:main deps:std
155
156struct Bar;
157struct Foo;
158
159impl std::ops::Not for Bar {
160 type Output = Foo;
161}
162
163fn test() {
164 let a = Bar;
165 let b = !a;
166 b<|>;
167}
168
169//- /std.rs crate:std
170
171#[prelude_import] use ops::*;
172mod ops {
173 pub trait Not {
174 type Output;
175 }
176}
177"#,
178 );
179 assert_eq!("Foo", type_at_pos(&db, pos));
180}
181
182#[test]
119fn infer_from_bound_1() { 183fn infer_from_bound_1() {
120 assert_snapshot!( 184 assert_snapshot!(
121 infer(r#" 185 infer(r#"
diff --git a/crates/ra_ide/src/expand.rs b/crates/ra_ide/src/expand.rs
index 216d5cfec..661628ae4 100644
--- a/crates/ra_ide/src/expand.rs
+++ b/crates/ra_ide/src/expand.rs
@@ -3,7 +3,7 @@ use std::iter::successors;
3 3
4use hir::InFile; 4use hir::InFile;
5use ra_db::FileId; 5use ra_db::FileId;
6use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxToken}; 6use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxToken, TextRange};
7 7
8use crate::{db::RootDatabase, FileRange}; 8use crate::{db::RootDatabase, FileRange};
9 9
@@ -17,26 +17,45 @@ pub(crate) fn original_range(db: &RootDatabase, node: InFile<&SyntaxNode>) -> Fi
17 } 17 }
18 Some(it) => it, 18 Some(it) => it,
19 }; 19 };
20 // FIXME: the following completely wrong. 20 // FIXME: We should handle recurside macro expansions
21 // 21
22 // *First*, we should try to map first and last tokens of node, and, if that 22 let range = node.value.descendants_with_tokens().find_map(|it| {
23 // fails, return the range of the overall macro expansions. 23 match it.as_token() {
24 // 24 // FIXME: Remove this branch after all `tt::TokenTree`s have a proper `TokenId`,
25 // *Second*, we should handle recurside macro expansions 25 // and return the range of the overall macro expansions if mapping first and last tokens fails.
26 26 Some(token) => {
27 let token = node 27 let token = expansion.map_token_up(node.with_value(&token))?;
28 .value 28 Some(token.with_value(token.value.text_range()))
29 .descendants_with_tokens() 29 }
30 .filter_map(|it| it.into_token()) 30 None => {
31 .find_map(|it| expansion.map_token_up(node.with_value(&it))); 31 // Try to map first and last tokens of node, and, if success, return the union range of mapped tokens
32 32 let n = it.into_node()?;
33 match token { 33 let first = expansion.map_token_up(node.with_value(&n.first_token()?))?;
34 Some(it) => { 34 let last = expansion.map_token_up(node.with_value(&n.last_token()?))?;
35 FileRange { file_id: it.file_id.original_file(db), range: it.value.text_range() } 35
36 // FIXME: Is is possible ?
37 if first.file_id != last.file_id {
38 return None;
39 }
40
41 // FIXME: Add union method in TextRange
42 let range = union_range(first.value.text_range(), last.value.text_range());
43 Some(first.with_value(range))
44 }
36 } 45 }
46 });
47
48 return match range {
49 Some(it) => FileRange { file_id: it.file_id.original_file(db), range: it.value },
37 None => { 50 None => {
38 FileRange { file_id: node.file_id.original_file(db), range: node.value.text_range() } 51 FileRange { file_id: node.file_id.original_file(db), range: node.value.text_range() }
39 } 52 }
53 };
54
55 fn union_range(a: TextRange, b: TextRange) -> TextRange {
56 let start = a.start().min(b.start());
57 let end = a.end().max(b.end());
58 TextRange::from_to(start, end)
40 } 59 }
41} 60}
42 61
diff --git a/crates/ra_lsp_server/src/conv.rs b/crates/ra_lsp_server/src/conv.rs
index b13093cfe..5561f6270 100644
--- a/crates/ra_lsp_server/src/conv.rs
+++ b/crates/ra_lsp_server/src/conv.rs
@@ -130,6 +130,11 @@ impl ConvWith<(&LineIndex, LineEndings)> for CompletionItem {
130 deprecated: Some(self.deprecated()), 130 deprecated: Some(self.deprecated()),
131 ..Default::default() 131 ..Default::default()
132 }; 132 };
133
134 if self.deprecated() {
135 res.tags = Some(vec![lsp_types::CompletionItemTag::Deprecated])
136 }
137
133 res.insert_text_format = Some(match self.insert_text_format() { 138 res.insert_text_format = Some(match self.insert_text_format() {
134 InsertTextFormat::Snippet => lsp_types::InsertTextFormat::Snippet, 139 InsertTextFormat::Snippet => lsp_types::InsertTextFormat::Snippet,
135 InsertTextFormat::PlainText => lsp_types::InsertTextFormat::PlainText, 140 InsertTextFormat::PlainText => lsp_types::InsertTextFormat::PlainText,
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs
index bbddebe67..0d2d43bef 100644
--- a/crates/ra_mbe/src/lib.rs
+++ b/crates/ra_mbe/src/lib.rs
@@ -159,14 +159,14 @@ impl Rule {
159 .expect_subtree() 159 .expect_subtree()
160 .map_err(|()| ParseError::Expected("expected subtree".to_string()))? 160 .map_err(|()| ParseError::Expected("expected subtree".to_string()))?
161 .clone(); 161 .clone();
162 lhs.delimiter = tt::Delimiter::None; 162 lhs.delimiter = None;
163 src.expect_char('=').map_err(|()| ParseError::Expected("expected `=`".to_string()))?; 163 src.expect_char('=').map_err(|()| ParseError::Expected("expected `=`".to_string()))?;
164 src.expect_char('>').map_err(|()| ParseError::Expected("expected `>`".to_string()))?; 164 src.expect_char('>').map_err(|()| ParseError::Expected("expected `>`".to_string()))?;
165 let mut rhs = src 165 let mut rhs = src
166 .expect_subtree() 166 .expect_subtree()
167 .map_err(|()| ParseError::Expected("expected subtree".to_string()))? 167 .map_err(|()| ParseError::Expected("expected subtree".to_string()))?
168 .clone(); 168 .clone();
169 rhs.delimiter = tt::Delimiter::None; 169 rhs.delimiter = None;
170 Ok(crate::Rule { lhs, rhs }) 170 Ok(crate::Rule { lhs, rhs })
171 } 171 }
172} 172}
diff --git a/crates/ra_mbe/src/mbe_expander/matcher.rs b/crates/ra_mbe/src/mbe_expander/matcher.rs
index 33b9d483d..3f5136478 100644
--- a/crates/ra_mbe/src/mbe_expander/matcher.rs
+++ b/crates/ra_mbe/src/mbe_expander/matcher.rs
@@ -16,7 +16,7 @@ impl Bindings {
16 fn push_optional(&mut self, name: &SmolStr) { 16 fn push_optional(&mut self, name: &SmolStr) {
17 // FIXME: Do we have a better way to represent an empty token ? 17 // FIXME: Do we have a better way to represent an empty token ?
18 // Insert an empty subtree for empty token 18 // Insert an empty subtree for empty token
19 let tt = tt::Subtree { delimiter: tt::Delimiter::None, token_trees: vec![] }.into(); 19 let tt = tt::Subtree::default().into();
20 self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt))); 20 self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt)));
21 } 21 }
22 22
@@ -65,7 +65,7 @@ macro_rules! bail {
65} 65}
66 66
67pub(super) fn match_(pattern: &tt::Subtree, src: &tt::Subtree) -> Result<Bindings, ExpandError> { 67pub(super) fn match_(pattern: &tt::Subtree, src: &tt::Subtree) -> Result<Bindings, ExpandError> {
68 assert!(pattern.delimiter == tt::Delimiter::None); 68 assert!(pattern.delimiter == None);
69 69
70 let mut res = Bindings::default(); 70 let mut res = Bindings::default();
71 let mut src = TtIter::new(src); 71 let mut src = TtIter::new(src);
@@ -210,7 +210,7 @@ impl<'a> TtIter<'a> {
210 0 => Err(()), 210 0 => Err(()),
211 1 => Ok(res[0].clone()), 211 1 => Ok(res[0].clone()),
212 _ => Ok(tt::TokenTree::Subtree(tt::Subtree { 212 _ => Ok(tt::TokenTree::Subtree(tt::Subtree {
213 delimiter: tt::Delimiter::None, 213 delimiter: None,
214 token_trees: res.into_iter().cloned().collect(), 214 token_trees: res.into_iter().cloned().collect(),
215 })), 215 })),
216 } 216 }
diff --git a/crates/ra_mbe/src/mbe_expander/transcriber.rs b/crates/ra_mbe/src/mbe_expander/transcriber.rs
index ed094d5bb..f7636db11 100644
--- a/crates/ra_mbe/src/mbe_expander/transcriber.rs
+++ b/crates/ra_mbe/src/mbe_expander/transcriber.rs
@@ -50,7 +50,7 @@ pub(super) fn transcribe(
50 template: &tt::Subtree, 50 template: &tt::Subtree,
51 bindings: &Bindings, 51 bindings: &Bindings,
52) -> Result<tt::Subtree, ExpandError> { 52) -> Result<tt::Subtree, ExpandError> {
53 assert!(template.delimiter == tt::Delimiter::None); 53 assert!(template.delimiter == None);
54 let mut ctx = ExpandCtx { bindings: &bindings, nesting: Vec::new(), var_expanded: false }; 54 let mut ctx = ExpandCtx { bindings: &bindings, nesting: Vec::new(), var_expanded: false };
55 expand_subtree(&mut ctx, template) 55 expand_subtree(&mut ctx, template)
56} 56}
@@ -106,7 +106,7 @@ fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr) -> Result<Fragment, ExpandError>
106 // ``` 106 // ```
107 // We just treat it a normal tokens 107 // We just treat it a normal tokens
108 let tt = tt::Subtree { 108 let tt = tt::Subtree {
109 delimiter: tt::Delimiter::None, 109 delimiter: None,
110 token_trees: vec![ 110 token_trees: vec![
111 tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone }).into(), 111 tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone }).into(),
112 tt::Leaf::from(tt::Ident { text: v.clone(), id: tt::TokenId::unspecified() }) 112 tt::Leaf::from(tt::Ident { text: v.clone(), id: tt::TokenId::unspecified() })
@@ -147,7 +147,7 @@ fn expand_repeat(
147 ctx.var_expanded = false; 147 ctx.var_expanded = false;
148 148
149 while let Ok(mut t) = expand_subtree(ctx, template) { 149 while let Ok(mut t) = expand_subtree(ctx, template) {
150 t.delimiter = tt::Delimiter::None; 150 t.delimiter = None;
151 // if no var expanded in the child, we count it as a fail 151 // if no var expanded in the child, we count it as a fail
152 if !ctx.var_expanded { 152 if !ctx.var_expanded {
153 break; 153 break;
@@ -212,7 +212,7 @@ fn expand_repeat(
212 212
213 // Check if it is a single token subtree without any delimiter 213 // Check if it is a single token subtree without any delimiter
214 // e.g {Delimiter:None> ['>'] /Delimiter:None>} 214 // e.g {Delimiter:None> ['>'] /Delimiter:None>}
215 let tt = tt::Subtree { delimiter: tt::Delimiter::None, token_trees: buf }.into(); 215 let tt = tt::Subtree { delimiter: None, token_trees: buf }.into();
216 Ok(Fragment::Tokens(tt)) 216 Ok(Fragment::Tokens(tt))
217} 217}
218 218
@@ -225,7 +225,7 @@ fn push_fragment(buf: &mut Vec<tt::TokenTree>, fragment: Fragment) {
225 225
226fn push_subtree(buf: &mut Vec<tt::TokenTree>, tt: tt::Subtree) { 226fn push_subtree(buf: &mut Vec<tt::TokenTree>, tt: tt::Subtree) {
227 match tt.delimiter { 227 match tt.delimiter {
228 tt::Delimiter::None => buf.extend(tt.token_trees), 228 None => buf.extend(tt.token_trees),
229 _ => buf.push(tt.into()), 229 _ => buf.push(tt.into()),
230 } 230 }
231} 231}
diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs
index 7ef45f6dc..061e9f20b 100644
--- a/crates/ra_mbe/src/subtree_source.rs
+++ b/crates/ra_mbe/src/subtree_source.rs
@@ -114,12 +114,12 @@ impl<'a> TokenSource for SubtreeTokenSource<'a> {
114 } 114 }
115} 115}
116 116
117fn convert_delim(d: tt::Delimiter, closing: bool) -> TtToken { 117fn convert_delim(d: Option<tt::Delimiter>, closing: bool) -> TtToken {
118 let (kinds, texts) = match d { 118 let (kinds, texts) = match d {
119 tt::Delimiter::Parenthesis => ([T!['('], T![')']], "()"), 119 Some(tt::Delimiter::Parenthesis) => ([T!['('], T![')']], "()"),
120 tt::Delimiter::Brace => ([T!['{'], T!['}']], "{}"), 120 Some(tt::Delimiter::Brace) => ([T!['{'], T!['}']], "{}"),
121 tt::Delimiter::Bracket => ([T!['['], T![']']], "[]"), 121 Some(tt::Delimiter::Bracket) => ([T!['['], T![']']], "[]"),
122 tt::Delimiter::None => ([L_DOLLAR, R_DOLLAR], ""), 122 None => ([L_DOLLAR, R_DOLLAR], ""),
123 }; 123 };
124 124
125 let idx = closing as usize; 125 let idx = closing as usize;
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
index 66c1f0337..b8e2cfc1d 100644
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ b/crates/ra_mbe/src/syntax_bridge.rs
@@ -51,7 +51,7 @@ pub fn token_tree_to_syntax_node(
51) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> { 51) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> {
52 let tmp; 52 let tmp;
53 let tokens = match tt { 53 let tokens = match tt {
54 tt::Subtree { delimiter: tt::Delimiter::None, token_trees } => token_trees.as_slice(), 54 tt::Subtree { delimiter: None, token_trees } => token_trees.as_slice(),
55 _ => { 55 _ => {
56 tmp = [tt.clone().into()]; 56 tmp = [tt.clone().into()];
57 &tmp[..] 57 &tmp[..]
@@ -121,7 +121,7 @@ fn convert_doc_comment(token: &ra_syntax::SyntaxToken) -> Option<Vec<tt::TokenTr
121 token_trees.push(mk_punct('!')); 121 token_trees.push(mk_punct('!'));
122 } 122 }
123 token_trees.push(tt::TokenTree::from(tt::Subtree { 123 token_trees.push(tt::TokenTree::from(tt::Subtree {
124 delimiter: tt::Delimiter::Bracket, 124 delimiter: Some(tt::Delimiter::Bracket),
125 token_trees: meta_tkns, 125 token_trees: meta_tkns,
126 })); 126 }));
127 127
@@ -156,7 +156,7 @@ impl Convertor {
156 fn go(&mut self, tt: &SyntaxNode) -> Option<tt::Subtree> { 156 fn go(&mut self, tt: &SyntaxNode) -> Option<tt::Subtree> {
157 // This tree is empty 157 // This tree is empty
158 if tt.first_child_or_token().is_none() { 158 if tt.first_child_or_token().is_none() {
159 return Some(tt::Subtree { token_trees: vec![], delimiter: tt::Delimiter::None }); 159 return Some(tt::Subtree { token_trees: vec![], delimiter: None });
160 } 160 }
161 161
162 let first_child = tt.first_child_or_token()?; 162 let first_child = tt.first_child_or_token()?;
@@ -173,7 +173,7 @@ impl Convertor {
173 .last() 173 .last()
174 .unwrap(); 174 .unwrap();
175 if first_child.kind().is_trivia() { 175 if first_child.kind().is_trivia() {
176 return Some(tt::Subtree { token_trees: vec![], delimiter: tt::Delimiter::None }); 176 return Some(tt::Subtree { token_trees: vec![], delimiter: None });
177 } 177 }
178 178
179 let last_child = successors(Some(last_child), |it| { 179 let last_child = successors(Some(last_child), |it| {
@@ -187,10 +187,10 @@ impl Convertor {
187 .unwrap(); 187 .unwrap();
188 188
189 let (delimiter, skip_first) = match (first_child.kind(), last_child.kind()) { 189 let (delimiter, skip_first) = match (first_child.kind(), last_child.kind()) {
190 (T!['('], T![')']) => (tt::Delimiter::Parenthesis, true), 190 (T!['('], T![')']) => (Some(tt::Delimiter::Parenthesis), true),
191 (T!['{'], T!['}']) => (tt::Delimiter::Brace, true), 191 (T!['{'], T!['}']) => (Some(tt::Delimiter::Brace), true),
192 (T!['['], T![']']) => (tt::Delimiter::Bracket, true), 192 (T!['['], T![']']) => (Some(tt::Delimiter::Bracket), true),
193 _ => (tt::Delimiter::None, false), 193 _ => (None, false),
194 }; 194 };
195 195
196 let mut token_trees = Vec::new(); 196 let mut token_trees = Vec::new();
@@ -246,9 +246,7 @@ impl Convertor {
246 } 246 }
247 NodeOrToken::Node(node) => { 247 NodeOrToken::Node(node) => {
248 let child_subtree = self.go(&node)?; 248 let child_subtree = self.go(&node)?;
249 if child_subtree.delimiter == tt::Delimiter::None 249 if child_subtree.delimiter.is_none() && node.kind() != SyntaxKind::TOKEN_TREE {
250 && node.kind() != SyntaxKind::TOKEN_TREE
251 {
252 token_trees.extend(child_subtree.token_trees); 250 token_trees.extend(child_subtree.token_trees);
253 } else { 251 } else {
254 token_trees.push(child_subtree.into()); 252 token_trees.push(child_subtree.into());
@@ -299,16 +297,16 @@ impl<'a> TtTreeSink<'a> {
299 } 297 }
300} 298}
301 299
302fn delim_to_str(d: tt::Delimiter, closing: bool) -> SmolStr { 300fn delim_to_str(d: Option<tt::Delimiter>, closing: bool) -> SmolStr {
303 let texts = match d { 301 let texts = match d {
304 tt::Delimiter::Parenthesis => "()", 302 Some(tt::Delimiter::Parenthesis) => "()",
305 tt::Delimiter::Brace => "{}", 303 Some(tt::Delimiter::Brace) => "{}",
306 tt::Delimiter::Bracket => "[]", 304 Some(tt::Delimiter::Bracket) => "[]",
307 tt::Delimiter::None => "", 305 None => return "".into(),
308 }; 306 };
309 307
310 let idx = closing as usize; 308 let idx = closing as usize;
311 let text = if !texts.is_empty() { &texts[idx..texts.len() - (1 - idx)] } else { "" }; 309 let text = &texts[idx..texts.len() - (1 - idx)];
312 text.into() 310 text.into()
313} 311}
314 312
@@ -497,7 +495,7 @@ mod tests {
497 let token_tree = ast::TokenTree::cast(token_tree).unwrap(); 495 let token_tree = ast::TokenTree::cast(token_tree).unwrap();
498 let tt = ast_to_token_tree(&token_tree).unwrap().0; 496 let tt = ast_to_token_tree(&token_tree).unwrap().0;
499 497
500 assert_eq!(tt.delimiter, tt::Delimiter::Brace); 498 assert_eq!(tt.delimiter, Some(tt::Delimiter::Brace));
501 } 499 }
502 500
503 #[test] 501 #[test]
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs
index 0109a4d98..148cc2625 100644
--- a/crates/ra_mbe/src/tests.rs
+++ b/crates/ra_mbe/src/tests.rs
@@ -1463,7 +1463,7 @@ pub(crate) fn assert_expansion(
1463 let wrapped = ast::SourceFile::parse(&wrapped); 1463 let wrapped = ast::SourceFile::parse(&wrapped);
1464 let wrapped = wrapped.tree().syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); 1464 let wrapped = wrapped.tree().syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
1465 let mut wrapped = ast_to_token_tree(&wrapped).unwrap().0; 1465 let mut wrapped = ast_to_token_tree(&wrapped).unwrap().0;
1466 wrapped.delimiter = tt::Delimiter::None; 1466 wrapped.delimiter = None;
1467 wrapped 1467 wrapped
1468 }; 1468 };
1469 let (expanded_tree, expected_tree) = match kind { 1469 let (expanded_tree, expected_tree) = match kind {
diff --git a/crates/ra_tt/src/lib.rs b/crates/ra_tt/src/lib.rs
index 4c00b8f30..e7bfd5fd2 100644
--- a/crates/ra_tt/src/lib.rs
+++ b/crates/ra_tt/src/lib.rs
@@ -48,9 +48,9 @@ pub enum Leaf {
48} 48}
49impl_froms!(Leaf: Literal, Punct, Ident); 49impl_froms!(Leaf: Literal, Punct, Ident);
50 50
51#[derive(Debug, Clone, PartialEq, Eq, Hash)] 51#[derive(Debug, Clone, PartialEq, Eq, Hash, Default)]
52pub struct Subtree { 52pub struct Subtree {
53 pub delimiter: Delimiter, 53 pub delimiter: Option<Delimiter>,
54 pub token_trees: Vec<TokenTree>, 54 pub token_trees: Vec<TokenTree>,
55} 55}
56 56
@@ -59,7 +59,6 @@ pub enum Delimiter {
59 Parenthesis, 59 Parenthesis,
60 Brace, 60 Brace,
61 Bracket, 61 Bracket,
62 None,
63} 62}
64 63
65#[derive(Debug, Clone, PartialEq, Eq, Hash)] 64#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -97,10 +96,10 @@ impl fmt::Display for TokenTree {
97impl fmt::Display for Subtree { 96impl fmt::Display for Subtree {
98 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 97 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
99 let (l, r) = match self.delimiter { 98 let (l, r) = match self.delimiter {
100 Delimiter::Parenthesis => ("(", ")"), 99 Some(Delimiter::Parenthesis) => ("(", ")"),
101 Delimiter::Brace => ("{", "}"), 100 Some(Delimiter::Brace) => ("{", "}"),
102 Delimiter::Bracket => ("[", "]"), 101 Some(Delimiter::Bracket) => ("[", "]"),
103 Delimiter::None => ("", ""), 102 None => ("", ""),
104 }; 103 };
105 f.write_str(l)?; 104 f.write_str(l)?;
106 let mut needs_space = false; 105 let mut needs_space = false;