aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
Diffstat (limited to 'crates')
-rw-r--r--crates/assists/src/handlers/inline_function.rs202
-rw-r--r--crates/assists/src/lib.rs2
-rw-r--r--crates/assists/src/tests/generated.rs23
-rw-r--r--crates/hir/src/code_model.rs12
-rw-r--r--crates/hir_def/src/attr.rs56
-rw-r--r--crates/hir_def/src/db.rs11
-rw-r--r--crates/hir_expand/src/proc_macro.rs10
-rw-r--r--crates/hir_ty/Cargo.toml6
-rw-r--r--crates/mbe/src/mbe_expander/matcher.rs6
-rw-r--r--crates/mbe/src/subtree_source.rs152
-rw-r--r--crates/mbe/src/syntax_bridge.rs51
-rw-r--r--crates/rust-analyzer/src/config.rs5
-rw-r--r--crates/ssr/src/parsing.rs16
-rw-r--r--crates/ssr/src/tests.rs47
-rw-r--r--crates/syntax/Cargo.toml2
-rw-r--r--crates/tt/src/buffer.rs105
16 files changed, 532 insertions, 174 deletions
diff --git a/crates/assists/src/handlers/inline_function.rs b/crates/assists/src/handlers/inline_function.rs
new file mode 100644
index 000000000..2edf252b7
--- /dev/null
+++ b/crates/assists/src/handlers/inline_function.rs
@@ -0,0 +1,202 @@
1use ast::make;
2use hir::{HasSource, PathResolution};
3use syntax::{
4 ast::{self, edit::AstNodeEdit, ArgListOwner},
5 AstNode,
6};
7use test_utils::mark;
8
9use crate::{
10 assist_context::{AssistContext, Assists},
11 AssistId, AssistKind,
12};
13
14// Assist: inline_function
15//
16// Inlines a function body.
17//
18// ```
19// fn add(a: u32, b: u32) -> u32 { a + b }
20// fn main() {
21// let x = add<|>(1, 2);
22// }
23// ```
24// ->
25// ```
26// fn add(a: u32, b: u32) -> u32 { a + b }
27// fn main() {
28// let x = {
29// let a = 1;
30// let b = 2;
31// a + b
32// };
33// }
34// ```
35pub(crate) fn inline_function(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
36 let path_expr: ast::PathExpr = ctx.find_node_at_offset()?;
37 let call = path_expr.syntax().parent().and_then(ast::CallExpr::cast)?;
38 let path = path_expr.path()?;
39
40 let function = match ctx.sema.resolve_path(&path)? {
41 PathResolution::Def(hir::ModuleDef::Function(f)) => f,
42 _ => return None,
43 };
44
45 let function_source = function.source(ctx.db())?;
46 let arguments: Vec<_> = call.arg_list()?.args().collect();
47 let parameters = function_parameter_patterns(&function_source.value)?;
48
49 if arguments.len() != parameters.len() {
50 // Can't inline the function because they've passed the wrong number of
51 // arguments to this function
52 mark::hit!(inline_function_incorrect_number_of_arguments);
53 return None;
54 }
55
56 let new_bindings = parameters.into_iter().zip(arguments);
57
58 let body = function_source.value.body()?;
59
60 acc.add(
61 AssistId("inline_function", AssistKind::RefactorInline),
62 format!("Inline `{}`", path),
63 call.syntax().text_range(),
64 |builder| {
65 let mut statements: Vec<ast::Stmt> = Vec::new();
66
67 for (pattern, value) in new_bindings {
68 statements.push(make::let_stmt(pattern, Some(value)).into());
69 }
70
71 statements.extend(body.statements());
72
73 let original_indentation = call.indent_level();
74 let replacement = make::block_expr(statements, body.expr())
75 .reset_indent()
76 .indent(original_indentation);
77
78 builder.replace_ast(ast::Expr::CallExpr(call), ast::Expr::BlockExpr(replacement));
79 },
80 )
81}
82
83fn function_parameter_patterns(value: &ast::Fn) -> Option<Vec<ast::Pat>> {
84 let mut patterns = Vec::new();
85
86 for param in value.param_list()?.params() {
87 let pattern = param.pat()?;
88 patterns.push(pattern);
89 }
90
91 Some(patterns)
92}
93
94#[cfg(test)]
95mod tests {
96 use crate::tests::{check_assist, check_assist_not_applicable};
97
98 use super::*;
99
100 #[test]
101 fn no_args_or_return_value_gets_inlined_without_block() {
102 check_assist(
103 inline_function,
104 r#"
105fn foo() { println!("Hello, World!"); }
106fn main() {
107 fo<|>o();
108}
109"#,
110 r#"
111fn foo() { println!("Hello, World!"); }
112fn main() {
113 {
114 println!("Hello, World!");
115 };
116}
117"#,
118 );
119 }
120
121 #[test]
122 fn args_with_side_effects() {
123 check_assist(
124 inline_function,
125 r#"
126fn foo(name: String) { println!("Hello, {}!", name); }
127fn main() {
128 foo<|>(String::from("Michael"));
129}
130"#,
131 r#"
132fn foo(name: String) { println!("Hello, {}!", name); }
133fn main() {
134 {
135 let name = String::from("Michael");
136 println!("Hello, {}!", name);
137 };
138}
139"#,
140 );
141 }
142
143 #[test]
144 fn method_inlining_isnt_supported() {
145 check_assist_not_applicable(
146 inline_function,
147 r"
148struct Foo;
149impl Foo { fn bar(&self) {} }
150
151fn main() { Foo.bar<|>(); }
152",
153 );
154 }
155
156 #[test]
157 fn not_applicable_when_incorrect_number_of_parameters_are_provided() {
158 mark::check!(inline_function_incorrect_number_of_arguments);
159 check_assist_not_applicable(
160 inline_function,
161 r#"
162fn add(a: u32, b: u32) -> u32 { a + b }
163fn main() { let x = add<|>(42); }
164"#,
165 );
166 }
167
168 #[test]
169 fn function_with_multiple_statements() {
170 check_assist(
171 inline_function,
172 r#"
173fn foo(a: u32, b: u32) -> u32 {
174 let x = a + b;
175 let y = x - b;
176 x * y
177}
178
179fn main() {
180 let x = foo<|>(1, 2);
181}
182"#,
183 r#"
184fn foo(a: u32, b: u32) -> u32 {
185 let x = a + b;
186 let y = x - b;
187 x * y
188}
189
190fn main() {
191 let x = {
192 let a = 1;
193 let b = 2;
194 let x = a + b;
195 let y = x - b;
196 x * y
197 };
198}
199"#,
200 );
201 }
202}
diff --git a/crates/assists/src/lib.rs b/crates/assists/src/lib.rs
index 01baa65fe..9c2a95735 100644
--- a/crates/assists/src/lib.rs
+++ b/crates/assists/src/lib.rs
@@ -131,6 +131,7 @@ mod handlers {
131 mod generate_impl; 131 mod generate_impl;
132 mod generate_new; 132 mod generate_new;
133 mod infer_function_return_type; 133 mod infer_function_return_type;
134 mod inline_function;
134 mod inline_local_variable; 135 mod inline_local_variable;
135 mod introduce_named_lifetime; 136 mod introduce_named_lifetime;
136 mod invert_if; 137 mod invert_if;
@@ -183,6 +184,7 @@ mod handlers {
183 generate_impl::generate_impl, 184 generate_impl::generate_impl,
184 generate_new::generate_new, 185 generate_new::generate_new,
185 infer_function_return_type::infer_function_return_type, 186 infer_function_return_type::infer_function_return_type,
187 inline_function::inline_function,
186 inline_local_variable::inline_local_variable, 188 inline_local_variable::inline_local_variable,
187 introduce_named_lifetime::introduce_named_lifetime, 189 introduce_named_lifetime::introduce_named_lifetime,
188 invert_if::invert_if, 190 invert_if::invert_if,
diff --git a/crates/assists/src/tests/generated.rs b/crates/assists/src/tests/generated.rs
index 85e3c6742..b15352cf3 100644
--- a/crates/assists/src/tests/generated.rs
+++ b/crates/assists/src/tests/generated.rs
@@ -531,6 +531,29 @@ fn foo() -> i32 { 42i32 }
531} 531}
532 532
533#[test] 533#[test]
534fn doctest_inline_function() {
535 check_doc_test(
536 "inline_function",
537 r#####"
538fn add(a: u32, b: u32) -> u32 { a + b }
539fn main() {
540 let x = add<|>(1, 2);
541}
542"#####,
543 r#####"
544fn add(a: u32, b: u32) -> u32 { a + b }
545fn main() {
546 let x = {
547 let a = 1;
548 let b = 2;
549 a + b
550 };
551}
552"#####,
553 )
554}
555
556#[test]
534fn doctest_inline_local_variable() { 557fn doctest_inline_local_variable() {
535 check_doc_test( 558 check_doc_test(
536 "inline_local_variable", 559 "inline_local_variable",
diff --git a/crates/hir/src/code_model.rs b/crates/hir/src/code_model.rs
index 071e553a8..1a4aa78fb 100644
--- a/crates/hir/src/code_model.rs
+++ b/crates/hir/src/code_model.rs
@@ -743,6 +743,18 @@ impl Function {
743 db.function_data(self.id).name.clone() 743 db.function_data(self.id).name.clone()
744 } 744 }
745 745
746 /// Get this function's return type
747 pub fn ret_type(self, db: &dyn HirDatabase) -> Type {
748 let resolver = self.id.resolver(db.upcast());
749 let ret_type = &db.function_data(self.id).ret_type;
750 let ctx = hir_ty::TyLoweringContext::new(db, &resolver);
751 let environment = TraitEnvironment::lower(db, &resolver);
752 Type {
753 krate: self.id.lookup(db.upcast()).container.module(db.upcast()).krate,
754 ty: InEnvironment { value: Ty::from_hir_ext(&ctx, ret_type).0, environment },
755 }
756 }
757
746 pub fn self_param(self, db: &dyn HirDatabase) -> Option<SelfParam> { 758 pub fn self_param(self, db: &dyn HirDatabase) -> Option<SelfParam> {
747 if !db.function_data(self.id).has_self_param { 759 if !db.function_data(self.id).has_self_param {
748 return None; 760 return None;
diff --git a/crates/hir_def/src/attr.rs b/crates/hir_def/src/attr.rs
index 6b79e7bad..9e6426b31 100644
--- a/crates/hir_def/src/attr.rs
+++ b/crates/hir_def/src/attr.rs
@@ -2,6 +2,7 @@
2 2
3use std::{ops, sync::Arc}; 3use std::{ops, sync::Arc};
4 4
5use arena::map::ArenaMap;
5use base_db::CrateId; 6use base_db::CrateId;
6use cfg::{CfgExpr, CfgOptions}; 7use cfg::{CfgExpr, CfgOptions};
7use either::Either; 8use either::Either;
@@ -21,7 +22,8 @@ use crate::{
21 nameres::ModuleSource, 22 nameres::ModuleSource,
22 path::{ModPath, PathKind}, 23 path::{ModPath, PathKind},
23 src::HasChildSource, 24 src::HasChildSource,
24 AdtId, AttrDefId, GenericParamId, Lookup, 25 AdtId, AttrDefId, EnumId, GenericParamId, HasModule, LocalEnumVariantId, LocalFieldId, Lookup,
26 VariantId,
25}; 27};
26 28
27/// Holds documentation 29/// Holds documentation
@@ -210,16 +212,10 @@ impl Attrs {
210 } 212 }
211 } 213 }
212 AttrDefId::FieldId(it) => { 214 AttrDefId::FieldId(it) => {
213 let src = it.parent.child_source(db); 215 return db.fields_attrs(it.parent)[it.local_id].clone();
214 match &src.value[it.local_id] {
215 Either::Left(_tuple) => RawAttrs::default(),
216 Either::Right(record) => RawAttrs::from_attrs_owner(db, src.with_value(record)),
217 }
218 } 216 }
219 AttrDefId::EnumVariantId(var_id) => { 217 AttrDefId::EnumVariantId(it) => {
220 let src = var_id.parent.child_source(db); 218 return db.variants_attrs(it.parent)[it.local_id].clone();
221 let src = src.as_ref().map(|it| &it[var_id.local_id]);
222 RawAttrs::from_attrs_owner(db, src.map(|it| it as &dyn AttrsOwner))
223 } 219 }
224 AttrDefId::AdtId(it) => match it { 220 AttrDefId::AdtId(it) => match it {
225 AdtId::StructId(it) => attrs_from_item_tree(it.lookup(db).id, db), 221 AdtId::StructId(it) => attrs_from_item_tree(it.lookup(db).id, db),
@@ -259,6 +255,46 @@ impl Attrs {
259 raw_attrs.filter(db, def.krate(db)) 255 raw_attrs.filter(db, def.krate(db))
260 } 256 }
261 257
258 pub(crate) fn variants_attrs_query(
259 db: &dyn DefDatabase,
260 e: EnumId,
261 ) -> Arc<ArenaMap<LocalEnumVariantId, Attrs>> {
262 let krate = e.lookup(db).container.module(db).krate;
263 let src = e.child_source(db);
264 let mut res = ArenaMap::default();
265
266 for (id, var) in src.value.iter() {
267 let attrs = RawAttrs::from_attrs_owner(db, src.with_value(var as &dyn AttrsOwner))
268 .filter(db, krate);
269
270 res.insert(id, attrs)
271 }
272
273 Arc::new(res)
274 }
275
276 pub(crate) fn fields_attrs_query(
277 db: &dyn DefDatabase,
278 v: VariantId,
279 ) -> Arc<ArenaMap<LocalFieldId, Attrs>> {
280 let krate = v.module(db).krate;
281 let src = v.child_source(db);
282 let mut res = ArenaMap::default();
283
284 for (id, fld) in src.value.iter() {
285 let attrs = match fld {
286 Either::Left(_tuple) => Attrs::default(),
287 Either::Right(record) => {
288 RawAttrs::from_attrs_owner(db, src.with_value(record)).filter(db, krate)
289 }
290 };
291
292 res.insert(id, attrs);
293 }
294
295 Arc::new(res)
296 }
297
262 pub fn by_key(&self, key: &'static str) -> AttrQuery<'_> { 298 pub fn by_key(&self, key: &'static str) -> AttrQuery<'_> {
263 AttrQuery { attrs: self, key } 299 AttrQuery { attrs: self, key }
264 } 300 }
diff --git a/crates/hir_def/src/db.rs b/crates/hir_def/src/db.rs
index d1a459066..d3bf5b34c 100644
--- a/crates/hir_def/src/db.rs
+++ b/crates/hir_def/src/db.rs
@@ -1,6 +1,7 @@
1//! Defines database & queries for name resolution. 1//! Defines database & queries for name resolution.
2use std::sync::Arc; 2use std::sync::Arc;
3 3
4use arena::map::ArenaMap;
4use base_db::{salsa, CrateId, SourceDatabase, Upcast}; 5use base_db::{salsa, CrateId, SourceDatabase, Upcast};
5use hir_expand::{db::AstDatabase, HirFileId}; 6use hir_expand::{db::AstDatabase, HirFileId};
6use syntax::SmolStr; 7use syntax::SmolStr;
@@ -16,8 +17,8 @@ use crate::{
16 lang_item::{LangItemTarget, LangItems}, 17 lang_item::{LangItemTarget, LangItems},
17 nameres::CrateDefMap, 18 nameres::CrateDefMap,
18 AttrDefId, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, FunctionId, FunctionLoc, 19 AttrDefId, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, FunctionId, FunctionLoc,
19 GenericDefId, ImplId, ImplLoc, StaticId, StaticLoc, StructId, StructLoc, TraitId, TraitLoc, 20 GenericDefId, ImplId, ImplLoc, LocalEnumVariantId, LocalFieldId, StaticId, StaticLoc, StructId,
20 TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, 21 StructLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, VariantId,
21}; 22};
22 23
23#[salsa::query_group(InternDatabaseStorage)] 24#[salsa::query_group(InternDatabaseStorage)]
@@ -92,6 +93,12 @@ pub trait DefDatabase: InternDatabase + AstDatabase + Upcast<dyn AstDatabase> {
92 #[salsa::invoke(GenericParams::generic_params_query)] 93 #[salsa::invoke(GenericParams::generic_params_query)]
93 fn generic_params(&self, def: GenericDefId) -> Arc<GenericParams>; 94 fn generic_params(&self, def: GenericDefId) -> Arc<GenericParams>;
94 95
96 #[salsa::invoke(Attrs::variants_attrs_query)]
97 fn variants_attrs(&self, def: EnumId) -> Arc<ArenaMap<LocalEnumVariantId, Attrs>>;
98
99 #[salsa::invoke(Attrs::fields_attrs_query)]
100 fn fields_attrs(&self, def: VariantId) -> Arc<ArenaMap<LocalFieldId, Attrs>>;
101
95 #[salsa::invoke(Attrs::attrs_query)] 102 #[salsa::invoke(Attrs::attrs_query)]
96 fn attrs(&self, def: AttrDefId) -> Attrs; 103 fn attrs(&self, def: AttrDefId) -> Attrs;
97 104
diff --git a/crates/hir_expand/src/proc_macro.rs b/crates/hir_expand/src/proc_macro.rs
index 7c77f6ce0..1923daca5 100644
--- a/crates/hir_expand/src/proc_macro.rs
+++ b/crates/hir_expand/src/proc_macro.rs
@@ -58,7 +58,7 @@ impl ProcMacroExpander {
58} 58}
59 59
60fn eat_punct(cursor: &mut Cursor, c: char) -> bool { 60fn eat_punct(cursor: &mut Cursor, c: char) -> bool {
61 if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = cursor.token_tree() { 61 if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(punct), _)) = cursor.token_tree() {
62 if punct.char == c { 62 if punct.char == c {
63 *cursor = cursor.bump(); 63 *cursor = cursor.bump();
64 return true; 64 return true;
@@ -68,7 +68,7 @@ fn eat_punct(cursor: &mut Cursor, c: char) -> bool {
68} 68}
69 69
70fn eat_subtree(cursor: &mut Cursor, kind: tt::DelimiterKind) -> bool { 70fn eat_subtree(cursor: &mut Cursor, kind: tt::DelimiterKind) -> bool {
71 if let Some(tt::TokenTree::Subtree(subtree)) = cursor.token_tree() { 71 if let Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) = cursor.token_tree() {
72 if Some(kind) == subtree.delimiter_kind() { 72 if Some(kind) == subtree.delimiter_kind() {
73 *cursor = cursor.bump_subtree(); 73 *cursor = cursor.bump_subtree();
74 return true; 74 return true;
@@ -78,7 +78,7 @@ fn eat_subtree(cursor: &mut Cursor, kind: tt::DelimiterKind) -> bool {
78} 78}
79 79
80fn eat_ident(cursor: &mut Cursor, t: &str) -> bool { 80fn eat_ident(cursor: &mut Cursor, t: &str) -> bool {
81 if let Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) = cursor.token_tree() { 81 if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Ident(ident), _)) = cursor.token_tree() {
82 if t == ident.text.as_str() { 82 if t == ident.text.as_str() {
83 *cursor = cursor.bump(); 83 *cursor = cursor.bump();
84 return true; 84 return true;
@@ -88,7 +88,7 @@ fn eat_ident(cursor: &mut Cursor, t: &str) -> bool {
88} 88}
89 89
90fn remove_derive_attrs(tt: &tt::Subtree) -> Option<tt::Subtree> { 90fn remove_derive_attrs(tt: &tt::Subtree) -> Option<tt::Subtree> {
91 let buffer = TokenBuffer::new(&tt.token_trees); 91 let buffer = TokenBuffer::from_tokens(&tt.token_trees);
92 let mut p = buffer.begin(); 92 let mut p = buffer.begin();
93 let mut result = tt::Subtree::default(); 93 let mut result = tt::Subtree::default();
94 94
@@ -106,7 +106,7 @@ fn remove_derive_attrs(tt: &tt::Subtree) -> Option<tt::Subtree> {
106 } 106 }
107 } 107 }
108 108
109 result.token_trees.push(curr.token_tree()?.clone()); 109 result.token_trees.push(curr.token_tree()?.cloned());
110 p = curr.bump(); 110 p = curr.bump();
111 } 111 }
112 112
diff --git a/crates/hir_ty/Cargo.toml b/crates/hir_ty/Cargo.toml
index 3d1778590..b0a453961 100644
--- a/crates/hir_ty/Cargo.toml
+++ b/crates/hir_ty/Cargo.toml
@@ -17,9 +17,9 @@ ena = "0.14.0"
17log = "0.4.8" 17log = "0.4.8"
18rustc-hash = "1.1.0" 18rustc-hash = "1.1.0"
19scoped-tls = "1" 19scoped-tls = "1"
20chalk-solve = { version = "0.45", default-features = false } 20chalk-solve = { version = "0.47", default-features = false }
21chalk-ir = "0.45" 21chalk-ir = "0.47"
22chalk-recursive = "0.45" 22chalk-recursive = "0.47"
23 23
24stdx = { path = "../stdx", version = "0.0.0" } 24stdx = { path = "../stdx", version = "0.0.0" }
25hir_def = { path = "../hir_def", version = "0.0.0" } 25hir_def = { path = "../hir_def", version = "0.0.0" }
diff --git a/crates/mbe/src/mbe_expander/matcher.rs b/crates/mbe/src/mbe_expander/matcher.rs
index ab5f87c48..fdc8844ce 100644
--- a/crates/mbe/src/mbe_expander/matcher.rs
+++ b/crates/mbe/src/mbe_expander/matcher.rs
@@ -309,7 +309,7 @@ impl<'a> TtIter<'a> {
309 } 309 }
310 } 310 }
311 311
312 let buffer = TokenBuffer::new(&self.inner.as_slice()); 312 let buffer = TokenBuffer::from_tokens(&self.inner.as_slice());
313 let mut src = SubtreeTokenSource::new(&buffer); 313 let mut src = SubtreeTokenSource::new(&buffer);
314 let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false }; 314 let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false };
315 315
@@ -336,11 +336,11 @@ impl<'a> TtIter<'a> {
336 err = Some(err!("no tokens consumed")); 336 err = Some(err!("no tokens consumed"));
337 } 337 }
338 let res = match res.len() { 338 let res = match res.len() {
339 1 => Some(res[0].clone()), 339 1 => Some(res[0].cloned()),
340 0 => None, 340 0 => None,
341 _ => Some(tt::TokenTree::Subtree(tt::Subtree { 341 _ => Some(tt::TokenTree::Subtree(tt::Subtree {
342 delimiter: None, 342 delimiter: None,
343 token_trees: res.into_iter().cloned().collect(), 343 token_trees: res.into_iter().map(|it| it.cloned()).collect(),
344 })), 344 })),
345 }; 345 };
346 ExpandResult { value: res, err } 346 ExpandResult { value: res, err }
diff --git a/crates/mbe/src/subtree_source.rs b/crates/mbe/src/subtree_source.rs
index d10d4b70e..d7433bd35 100644
--- a/crates/mbe/src/subtree_source.rs
+++ b/crates/mbe/src/subtree_source.rs
@@ -1,129 +1,104 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use parser::{Token, TokenSource}; 3use parser::{Token, TokenSource};
4use std::cell::{Cell, Ref, RefCell};
5use syntax::{lex_single_syntax_kind, SmolStr, SyntaxKind, SyntaxKind::*, T}; 4use syntax::{lex_single_syntax_kind, SmolStr, SyntaxKind, SyntaxKind::*, T};
6use tt::buffer::{Cursor, TokenBuffer}; 5use tt::buffer::TokenBuffer;
7 6
8#[derive(Debug, Clone, Eq, PartialEq)] 7#[derive(Debug, Clone, Eq, PartialEq)]
9struct TtToken { 8struct TtToken {
10 kind: SyntaxKind, 9 tt: Token,
11 is_joint_to_next: bool,
12 text: SmolStr, 10 text: SmolStr,
13} 11}
14 12
15pub(crate) struct SubtreeTokenSource<'a> { 13pub(crate) struct SubtreeTokenSource {
16 cached_cursor: Cell<Cursor<'a>>, 14 cached: Vec<TtToken>,
17 cached: RefCell<Vec<Option<TtToken>>>,
18 curr: (Token, usize), 15 curr: (Token, usize),
19} 16}
20 17
21impl<'a> SubtreeTokenSource<'a> { 18impl<'a> SubtreeTokenSource {
22 // Helper function used in test 19 // Helper function used in test
23 #[cfg(test)] 20 #[cfg(test)]
24 pub(crate) fn text(&self) -> SmolStr { 21 pub(crate) fn text(&self) -> SmolStr {
25 match *self.get(self.curr.1) { 22 match self.cached.get(self.curr.1) {
26 Some(ref tt) => tt.text.clone(), 23 Some(ref tt) => tt.text.clone(),
27 _ => SmolStr::new(""), 24 _ => SmolStr::new(""),
28 } 25 }
29 } 26 }
30} 27}
31 28
32impl<'a> SubtreeTokenSource<'a> { 29impl<'a> SubtreeTokenSource {
33 pub(crate) fn new(buffer: &'a TokenBuffer) -> SubtreeTokenSource<'a> { 30 pub(crate) fn new(buffer: &TokenBuffer) -> SubtreeTokenSource {
34 let cursor = buffer.begin(); 31 let mut current = buffer.begin();
32 let mut cached = Vec::with_capacity(100);
35 33
36 let mut res = SubtreeTokenSource { 34 while !current.eof() {
37 curr: (Token { kind: EOF, is_jointed_to_next: false }, 0), 35 let cursor = current;
38 cached_cursor: Cell::new(cursor), 36 let tt = cursor.token_tree();
39 cached: RefCell::new(Vec::with_capacity(10)),
40 };
41 res.curr = (res.mk_token(0), 0);
42 res
43 }
44 37
45 fn mk_token(&self, pos: usize) -> Token { 38 // Check if it is lifetime
46 match *self.get(pos) { 39 if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(punct), _)) = tt {
47 Some(ref tt) => Token { kind: tt.kind, is_jointed_to_next: tt.is_joint_to_next },
48 None => Token { kind: EOF, is_jointed_to_next: false },
49 }
50 }
51
52 fn get(&self, pos: usize) -> Ref<Option<TtToken>> {
53 fn is_lifetime(c: Cursor) -> Option<(Cursor, SmolStr)> {
54 let tkn = c.token_tree();
55
56 if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = tkn {
57 if punct.char == '\'' { 40 if punct.char == '\'' {
58 let next = c.bump(); 41 let next = cursor.bump();
59 if let Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) = next.token_tree() { 42 if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Ident(ident), _)) =
60 let res_cursor = next.bump(); 43 next.token_tree()
61 let text = SmolStr::new("'".to_string() + &ident.to_string()); 44 {
62 45 let text = SmolStr::new("'".to_string() + &ident.text);
63 return Some((res_cursor, text)); 46 cached.push(TtToken {
47 tt: Token { kind: LIFETIME_IDENT, is_jointed_to_next: false },
48 text,
49 });
50 current = next.bump();
51 continue;
64 } else { 52 } else {
65 panic!("Next token must be ident : {:#?}", next.token_tree()); 53 panic!("Next token must be ident : {:#?}", next.token_tree());
66 } 54 }
67 } 55 }
68 } 56 }
69 57
70 None 58 current = match tt {
71 } 59 Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
72 60 cached.push(convert_leaf(&leaf));
73 if pos < self.cached.borrow().len() { 61 cursor.bump()
74 return Ref::map(self.cached.borrow(), |c| &c[pos]);
75 }
76
77 {
78 let mut cached = self.cached.borrow_mut();
79 while pos >= cached.len() {
80 let cursor = self.cached_cursor.get();
81 if cursor.eof() {
82 cached.push(None);
83 continue;
84 } 62 }
85 63 Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
86 if let Some((curr, text)) = is_lifetime(cursor) { 64 cached.push(convert_delim(subtree.delimiter_kind(), false));
87 cached.push(Some(TtToken { 65 cursor.subtree().unwrap()
88 kind: LIFETIME_IDENT,
89 is_joint_to_next: false,
90 text,
91 }));
92 self.cached_cursor.set(curr);
93 continue;
94 } 66 }
95 67 None => {
96 match cursor.token_tree() { 68 if let Some(subtree) = cursor.end() {
97 Some(tt::TokenTree::Leaf(leaf)) => { 69 cached.push(convert_delim(subtree.delimiter_kind(), true));
98 cached.push(Some(convert_leaf(&leaf))); 70 cursor.bump()
99 self.cached_cursor.set(cursor.bump()); 71 } else {
100 } 72 continue;
101 Some(tt::TokenTree::Subtree(subtree)) => {
102 self.cached_cursor.set(cursor.subtree().unwrap());
103 cached.push(Some(convert_delim(subtree.delimiter_kind(), false)));
104 }
105 None => {
106 if let Some(subtree) = cursor.end() {
107 cached.push(Some(convert_delim(subtree.delimiter_kind(), true)));
108 self.cached_cursor.set(cursor.bump());
109 }
110 } 73 }
111 } 74 }
112 } 75 };
113 } 76 }
114 77
115 Ref::map(self.cached.borrow(), |c| &c[pos]) 78 let mut res = SubtreeTokenSource {
79 curr: (Token { kind: EOF, is_jointed_to_next: false }, 0),
80 cached,
81 };
82 res.curr = (res.token(0), 0);
83 res
84 }
85
86 fn token(&self, pos: usize) -> Token {
87 match self.cached.get(pos) {
88 Some(it) => it.tt,
89 None => Token { kind: EOF, is_jointed_to_next: false },
90 }
116 } 91 }
117} 92}
118 93
119impl<'a> TokenSource for SubtreeTokenSource<'a> { 94impl<'a> TokenSource for SubtreeTokenSource {
120 fn current(&self) -> Token { 95 fn current(&self) -> Token {
121 self.curr.0 96 self.curr.0
122 } 97 }
123 98
124 /// Lookahead n token 99 /// Lookahead n token
125 fn lookahead_nth(&self, n: usize) -> Token { 100 fn lookahead_nth(&self, n: usize) -> Token {
126 self.mk_token(self.curr.1 + n) 101 self.token(self.curr.1 + n)
127 } 102 }
128 103
129 /// bump cursor to next token 104 /// bump cursor to next token
@@ -131,13 +106,12 @@ impl<'a> TokenSource for SubtreeTokenSource<'a> {
131 if self.current().kind == EOF { 106 if self.current().kind == EOF {
132 return; 107 return;
133 } 108 }
134 109 self.curr = (self.token(self.curr.1 + 1), self.curr.1 + 1);
135 self.curr = (self.mk_token(self.curr.1 + 1), self.curr.1 + 1);
136 } 110 }
137 111
138 /// Is the current token a specified keyword? 112 /// Is the current token a specified keyword?
139 fn is_keyword(&self, kw: &str) -> bool { 113 fn is_keyword(&self, kw: &str) -> bool {
140 match *self.get(self.curr.1) { 114 match self.cached.get(self.curr.1) {
141 Some(ref t) => t.text == *kw, 115 Some(ref t) => t.text == *kw,
142 _ => false, 116 _ => false,
143 } 117 }
@@ -155,7 +129,7 @@ fn convert_delim(d: Option<tt::DelimiterKind>, closing: bool) -> TtToken {
155 let idx = closing as usize; 129 let idx = closing as usize;
156 let kind = kinds[idx]; 130 let kind = kinds[idx];
157 let text = if !texts.is_empty() { &texts[idx..texts.len() - (1 - idx)] } else { "" }; 131 let text = if !texts.is_empty() { &texts[idx..texts.len() - (1 - idx)] } else { "" };
158 TtToken { kind, is_joint_to_next: false, text: SmolStr::new(text) } 132 TtToken { tt: Token { kind, is_jointed_to_next: false }, text: SmolStr::new(text) }
159} 133}
160 134
161fn convert_literal(l: &tt::Literal) -> TtToken { 135fn convert_literal(l: &tt::Literal) -> TtToken {
@@ -169,7 +143,7 @@ fn convert_literal(l: &tt::Literal) -> TtToken {
169 }) 143 })
170 .unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &l)); 144 .unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &l));
171 145
172 TtToken { kind, is_joint_to_next: false, text: l.text.clone() } 146 TtToken { tt: Token { kind, is_jointed_to_next: false }, text: l.text.clone() }
173} 147}
174 148
175fn convert_ident(ident: &tt::Ident) -> TtToken { 149fn convert_ident(ident: &tt::Ident) -> TtToken {
@@ -180,7 +154,7 @@ fn convert_ident(ident: &tt::Ident) -> TtToken {
180 _ => SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT), 154 _ => SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT),
181 }; 155 };
182 156
183 TtToken { kind, is_joint_to_next: false, text: ident.text.clone() } 157 TtToken { tt: Token { kind, is_jointed_to_next: false }, text: ident.text.clone() }
184} 158}
185 159
186fn convert_punct(p: tt::Punct) -> TtToken { 160fn convert_punct(p: tt::Punct) -> TtToken {
@@ -194,7 +168,7 @@ fn convert_punct(p: tt::Punct) -> TtToken {
194 let s: &str = p.char.encode_utf8(&mut buf); 168 let s: &str = p.char.encode_utf8(&mut buf);
195 SmolStr::new(s) 169 SmolStr::new(s)
196 }; 170 };
197 TtToken { kind, is_joint_to_next: p.spacing == tt::Spacing::Joint, text } 171 TtToken { tt: Token { kind, is_jointed_to_next: p.spacing == tt::Spacing::Joint }, text }
198} 172}
199 173
200fn convert_leaf(leaf: &tt::Leaf) -> TtToken { 174fn convert_leaf(leaf: &tt::Leaf) -> TtToken {
@@ -208,6 +182,7 @@ fn convert_leaf(leaf: &tt::Leaf) -> TtToken {
208#[cfg(test)] 182#[cfg(test)]
209mod tests { 183mod tests {
210 use super::{convert_literal, TtToken}; 184 use super::{convert_literal, TtToken};
185 use parser::Token;
211 use syntax::{SmolStr, SyntaxKind}; 186 use syntax::{SmolStr, SyntaxKind};
212 187
213 #[test] 188 #[test]
@@ -218,8 +193,7 @@ mod tests {
218 text: SmolStr::new("-42.0") 193 text: SmolStr::new("-42.0")
219 }), 194 }),
220 TtToken { 195 TtToken {
221 kind: SyntaxKind::FLOAT_NUMBER, 196 tt: Token { kind: SyntaxKind::FLOAT_NUMBER, is_jointed_to_next: false },
222 is_joint_to_next: false,
223 text: SmolStr::new("-42.0") 197 text: SmolStr::new("-42.0")
224 } 198 }
225 ); 199 );
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs
index 265c0d63d..671036e1c 100644
--- a/crates/mbe/src/syntax_bridge.rs
+++ b/crates/mbe/src/syntax_bridge.rs
@@ -70,15 +70,12 @@ pub fn token_tree_to_syntax_node(
70 tt: &tt::Subtree, 70 tt: &tt::Subtree,
71 fragment_kind: FragmentKind, 71 fragment_kind: FragmentKind,
72) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> { 72) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> {
73 let tmp; 73 let buffer = match tt {
74 let tokens = match tt { 74 tt::Subtree { delimiter: None, token_trees } => {
75 tt::Subtree { delimiter: None, token_trees } => token_trees.as_slice(), 75 TokenBuffer::from_tokens(token_trees.as_slice())
76 _ => {
77 tmp = [tt.clone().into()];
78 &tmp[..]
79 } 76 }
77 _ => TokenBuffer::from_subtree(tt),
80 }; 78 };
81 let buffer = TokenBuffer::new(&tokens);
82 let mut token_source = SubtreeTokenSource::new(&buffer); 79 let mut token_source = SubtreeTokenSource::new(&buffer);
83 let mut tree_sink = TtTreeSink::new(buffer.begin()); 80 let mut tree_sink = TtTreeSink::new(buffer.begin());
84 parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind); 81 parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind);
@@ -414,7 +411,7 @@ trait TokenConvertor {
414 fn id_alloc(&mut self) -> &mut TokenIdAlloc; 411 fn id_alloc(&mut self) -> &mut TokenIdAlloc;
415} 412}
416 413
417impl<'a> SrcToken for (RawToken, &'a str) { 414impl<'a> SrcToken for (&'a RawToken, &'a str) {
418 fn kind(&self) -> SyntaxKind { 415 fn kind(&self) -> SyntaxKind {
419 self.0.kind 416 self.0.kind
420 } 417 }
@@ -431,7 +428,7 @@ impl<'a> SrcToken for (RawToken, &'a str) {
431impl RawConvertor<'_> {} 428impl RawConvertor<'_> {}
432 429
433impl<'a> TokenConvertor for RawConvertor<'a> { 430impl<'a> TokenConvertor for RawConvertor<'a> {
434 type Token = (RawToken, &'a str); 431 type Token = (&'a RawToken, &'a str);
435 432
436 fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> { 433 fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
437 convert_doc_comment(&doc_comment(token.1)) 434 convert_doc_comment(&doc_comment(token.1))
@@ -442,11 +439,11 @@ impl<'a> TokenConvertor for RawConvertor<'a> {
442 let range = TextRange::at(self.offset, token.len); 439 let range = TextRange::at(self.offset, token.len);
443 self.offset += token.len; 440 self.offset += token.len;
444 441
445 Some(((*token, &self.text[range]), range)) 442 Some(((token, &self.text[range]), range))
446 } 443 }
447 444
448 fn peek(&self) -> Option<Self::Token> { 445 fn peek(&self) -> Option<Self::Token> {
449 let token = self.inner.as_slice().get(0).cloned(); 446 let token = self.inner.as_slice().get(0);
450 447
451 token.map(|it| { 448 token.map(|it| {
452 let range = TextRange::at(self.offset, it.len); 449 let range = TextRange::at(self.offset, it.len);
@@ -601,17 +598,16 @@ impl<'a> TtTreeSink<'a> {
601 } 598 }
602} 599}
603 600
604fn delim_to_str(d: Option<tt::DelimiterKind>, closing: bool) -> SmolStr { 601fn delim_to_str(d: Option<tt::DelimiterKind>, closing: bool) -> &'static str {
605 let texts = match d { 602 let texts = match d {
606 Some(tt::DelimiterKind::Parenthesis) => "()", 603 Some(tt::DelimiterKind::Parenthesis) => "()",
607 Some(tt::DelimiterKind::Brace) => "{}", 604 Some(tt::DelimiterKind::Brace) => "{}",
608 Some(tt::DelimiterKind::Bracket) => "[]", 605 Some(tt::DelimiterKind::Bracket) => "[]",
609 None => return "".into(), 606 None => return "",
610 }; 607 };
611 608
612 let idx = closing as usize; 609 let idx = closing as usize;
613 let text = &texts[idx..texts.len() - (1 - idx)]; 610 &texts[idx..texts.len() - (1 - idx)]
614 text.into()
615} 611}
616 612
617impl<'a> TreeSink for TtTreeSink<'a> { 613impl<'a> TreeSink for TtTreeSink<'a> {
@@ -626,29 +622,32 @@ impl<'a> TreeSink for TtTreeSink<'a> {
626 622
627 let mut last = self.cursor; 623 let mut last = self.cursor;
628 for _ in 0..n_tokens { 624 for _ in 0..n_tokens {
625 let tmp_str: SmolStr;
629 if self.cursor.eof() { 626 if self.cursor.eof() {
630 break; 627 break;
631 } 628 }
632 last = self.cursor; 629 last = self.cursor;
633 let text: SmolStr = match self.cursor.token_tree() { 630 let text: &str = match self.cursor.token_tree() {
634 Some(tt::TokenTree::Leaf(leaf)) => { 631 Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
635 // Mark the range if needed 632 // Mark the range if needed
636 let (text, id) = match leaf { 633 let (text, id) = match leaf {
637 tt::Leaf::Ident(ident) => (ident.text.clone(), ident.id), 634 tt::Leaf::Ident(ident) => (&ident.text, ident.id),
638 tt::Leaf::Punct(punct) => { 635 tt::Leaf::Punct(punct) => {
639 assert!(punct.char.is_ascii()); 636 assert!(punct.char.is_ascii());
640 let char = &(punct.char as u8); 637 let char = &(punct.char as u8);
641 let text = std::str::from_utf8(std::slice::from_ref(char)).unwrap(); 638 tmp_str = SmolStr::new_inline(
642 (SmolStr::new_inline(text), punct.id) 639 std::str::from_utf8(std::slice::from_ref(char)).unwrap(),
640 );
641 (&tmp_str, punct.id)
643 } 642 }
644 tt::Leaf::Literal(lit) => (lit.text.clone(), lit.id), 643 tt::Leaf::Literal(lit) => (&lit.text, lit.id),
645 }; 644 };
646 let range = TextRange::at(self.text_pos, TextSize::of(text.as_str())); 645 let range = TextRange::at(self.text_pos, TextSize::of(text.as_str()));
647 self.token_map.insert(id, range); 646 self.token_map.insert(id, range);
648 self.cursor = self.cursor.bump(); 647 self.cursor = self.cursor.bump();
649 text 648 text
650 } 649 }
651 Some(tt::TokenTree::Subtree(subtree)) => { 650 Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
652 self.cursor = self.cursor.subtree().unwrap(); 651 self.cursor = self.cursor.subtree().unwrap();
653 if let Some(id) = subtree.delimiter.map(|it| it.id) { 652 if let Some(id) = subtree.delimiter.map(|it| it.id) {
654 self.open_delims.insert(id, self.text_pos); 653 self.open_delims.insert(id, self.text_pos);
@@ -672,7 +671,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
672 } 671 }
673 }; 672 };
674 self.buf += &text; 673 self.buf += &text;
675 self.text_pos += TextSize::of(text.as_str()); 674 self.text_pos += TextSize::of(text);
676 } 675 }
677 676
678 let text = SmolStr::new(self.buf.as_str()); 677 let text = SmolStr::new(self.buf.as_str());
@@ -682,8 +681,8 @@ impl<'a> TreeSink for TtTreeSink<'a> {
682 // Add whitespace between adjoint puncts 681 // Add whitespace between adjoint puncts
683 let next = last.bump(); 682 let next = last.bump();
684 if let ( 683 if let (
685 Some(tt::TokenTree::Leaf(tt::Leaf::Punct(curr))), 684 Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(curr), _)),
686 Some(tt::TokenTree::Leaf(tt::Leaf::Punct(_))), 685 Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(_), _)),
687 ) = (last.token_tree(), next.token_tree()) 686 ) = (last.token_tree(), next.token_tree())
688 { 687 {
689 // Note: We always assume the semi-colon would be the last token in 688 // Note: We always assume the semi-colon would be the last token in
@@ -742,7 +741,7 @@ mod tests {
742 ) 741 )
743 .expand_tt("literals!(foo);"); 742 .expand_tt("literals!(foo);");
744 let tts = &[expansion.into()]; 743 let tts = &[expansion.into()];
745 let buffer = tt::buffer::TokenBuffer::new(tts); 744 let buffer = tt::buffer::TokenBuffer::from_tokens(tts);
746 let mut tt_src = SubtreeTokenSource::new(&buffer); 745 let mut tt_src = SubtreeTokenSource::new(&buffer);
747 let mut tokens = vec![]; 746 let mut tokens = vec![];
748 while tt_src.current().kind != EOF { 747 while tt_src.current().kind != EOF {
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 685a9fdf0..a5b1d90b1 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -777,9 +777,8 @@ fn manual(fields: &[(&'static str, &'static str, &[&str], &str)]) -> String {
777 fields 777 fields
778 .iter() 778 .iter()
779 .map(|(field, _ty, doc, default)| { 779 .map(|(field, _ty, doc, default)| {
780 let name = field.replace("_", "."); 780 let name = format!("rust-analyzer.{}", field.replace("_", "."));
781 let name = format!("rust-analyzer.{} (default: `{}`)", name, default); 781 format!("[[{}]]{} (default: `{}`)::\n{}\n", name, name, default, doc.join(" "))
782 format!("{}::\n{}\n", name, doc.join(" "))
783 }) 782 })
784 .collect::<String>() 783 .collect::<String>()
785} 784}
diff --git a/crates/ssr/src/parsing.rs b/crates/ssr/src/parsing.rs
index 289affe90..3d5e4feb7 100644
--- a/crates/ssr/src/parsing.rs
+++ b/crates/ssr/src/parsing.rs
@@ -73,12 +73,18 @@ impl ParsedRule {
73 placeholders_by_stand_in: pattern.placeholders_by_stand_in(), 73 placeholders_by_stand_in: pattern.placeholders_by_stand_in(),
74 rules: Vec::new(), 74 rules: Vec::new(),
75 }; 75 };
76 builder.try_add(ast::Expr::parse(&raw_pattern), raw_template.map(ast::Expr::parse)); 76
77 let raw_template_stmt = raw_template.map(ast::Stmt::parse);
78 if let raw_template_expr @ Some(Ok(_)) = raw_template.map(ast::Expr::parse) {
79 builder.try_add(ast::Expr::parse(&raw_pattern), raw_template_expr);
80 } else {
81 builder.try_add(ast::Expr::parse(&raw_pattern), raw_template_stmt.clone());
82 }
77 builder.try_add(ast::Type::parse(&raw_pattern), raw_template.map(ast::Type::parse)); 83 builder.try_add(ast::Type::parse(&raw_pattern), raw_template.map(ast::Type::parse));
78 builder.try_add(ast::Item::parse(&raw_pattern), raw_template.map(ast::Item::parse)); 84 builder.try_add(ast::Item::parse(&raw_pattern), raw_template.map(ast::Item::parse));
79 builder.try_add(ast::Path::parse(&raw_pattern), raw_template.map(ast::Path::parse)); 85 builder.try_add(ast::Path::parse(&raw_pattern), raw_template.map(ast::Path::parse));
80 builder.try_add(ast::Pat::parse(&raw_pattern), raw_template.map(ast::Pat::parse)); 86 builder.try_add(ast::Pat::parse(&raw_pattern), raw_template.map(ast::Pat::parse));
81 builder.try_add(ast::Stmt::parse(&raw_pattern), raw_template.map(ast::Stmt::parse)); 87 builder.try_add(ast::Stmt::parse(&raw_pattern), raw_template_stmt);
82 builder.build() 88 builder.build()
83 } 89 }
84} 90}
@@ -89,7 +95,11 @@ struct RuleBuilder {
89} 95}
90 96
91impl RuleBuilder { 97impl RuleBuilder {
92 fn try_add<T: AstNode>(&mut self, pattern: Result<T, ()>, template: Option<Result<T, ()>>) { 98 fn try_add<T: AstNode, T2: AstNode>(
99 &mut self,
100 pattern: Result<T, ()>,
101 template: Option<Result<T2, ()>>,
102 ) {
93 match (pattern, template) { 103 match (pattern, template) {
94 (Ok(pattern), Some(Ok(template))) => self.rules.push(ParsedRule { 104 (Ok(pattern), Some(Ok(template))) => self.rules.push(ParsedRule {
95 placeholders_by_stand_in: self.placeholders_by_stand_in.clone(), 105 placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
diff --git a/crates/ssr/src/tests.rs b/crates/ssr/src/tests.rs
index c4149a849..db9cb8ca1 100644
--- a/crates/ssr/src/tests.rs
+++ b/crates/ssr/src/tests.rs
@@ -204,6 +204,53 @@ fn ssr_let_stmt_replace_expr() {
204} 204}
205 205
206#[test] 206#[test]
207fn ssr_blockexpr_replace_stmt_with_stmt() {
208 assert_ssr_transform(
209 "if $a() {$b;} ==>> $b;",
210 "{
211 if foo() {
212 bar();
213 }
214 Ok(())
215}",
216 expect![[r#"{
217 bar();
218 Ok(())
219}"#]],
220 );
221}
222
223#[test]
224fn ssr_blockexpr_match_trailing_expr() {
225 assert_matches(
226 "if $a() {$b;}",
227 "{
228 if foo() {
229 bar();
230 }
231}",
232 &["if foo() {
233 bar();
234 }"],
235 );
236}
237
238#[test]
239fn ssr_blockexpr_replace_trailing_expr_with_stmt() {
240 assert_ssr_transform(
241 "if $a() {$b;} ==>> $b;",
242 "{
243 if foo() {
244 bar();
245 }
246}",
247 expect![["{
248 bar();
249}"]],
250 );
251}
252
253#[test]
207fn ssr_function_to_method() { 254fn ssr_function_to_method() {
208 assert_ssr_transform( 255 assert_ssr_transform(
209 "my_function($a, $b) ==>> ($a).my_method($b)", 256 "my_function($a, $b) ==>> ($a).my_method($b)",
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml
index 181077944..cfeaed9e6 100644
--- a/crates/syntax/Cargo.toml
+++ b/crates/syntax/Cargo.toml
@@ -13,7 +13,7 @@ doctest = false
13[dependencies] 13[dependencies]
14itertools = "0.10.0" 14itertools = "0.10.0"
15rowan = "0.10.0" 15rowan = "0.10.0"
16rustc_lexer = { version = "695.0.0", package = "rustc-ap-rustc_lexer" } 16rustc_lexer = { version = "697.0.0", package = "rustc-ap-rustc_lexer" }
17rustc-hash = "1.1.0" 17rustc-hash = "1.1.0"
18arrayvec = "0.5.1" 18arrayvec = "0.5.1"
19once_cell = "1.3.1" 19once_cell = "1.3.1"
diff --git a/crates/tt/src/buffer.rs b/crates/tt/src/buffer.rs
index 02c771f70..3606c887d 100644
--- a/crates/tt/src/buffer.rs
+++ b/crates/tt/src/buffer.rs
@@ -1,6 +1,6 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use crate::{Subtree, TokenTree}; 3use crate::{Leaf, Subtree, TokenTree};
4 4
5#[derive(Copy, Clone, Debug, Eq, PartialEq)] 5#[derive(Copy, Clone, Debug, Eq, PartialEq)]
6struct EntryId(usize); 6struct EntryId(usize);
@@ -13,7 +13,7 @@ struct EntryPtr(EntryId, usize);
13#[derive(Debug)] 13#[derive(Debug)]
14enum Entry<'t> { 14enum Entry<'t> {
15 // Mimicking types from proc-macro. 15 // Mimicking types from proc-macro.
16 Subtree(&'t TokenTree, EntryId), 16 Subtree(Option<&'t TokenTree>, &'t Subtree, EntryId),
17 Leaf(&'t TokenTree), 17 Leaf(&'t TokenTree),
18 // End entries contain a pointer to the entry from the containing 18 // End entries contain a pointer to the entry from the containing
19 // token tree, or None if this is the outermost level. 19 // token tree, or None if this is the outermost level.
@@ -27,37 +27,64 @@ pub struct TokenBuffer<'t> {
27 buffers: Vec<Box<[Entry<'t>]>>, 27 buffers: Vec<Box<[Entry<'t>]>>,
28} 28}
29 29
30impl<'t> TokenBuffer<'t> { 30trait TokenList<'a> {
31 pub fn new(tokens: &'t [TokenTree]) -> TokenBuffer<'t> { 31 fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec<Entry<'a>>);
32 let mut buffers = vec![]; 32}
33
34 let idx = TokenBuffer::new_inner(tokens, &mut buffers, None);
35 assert_eq!(idx, 0);
36
37 TokenBuffer { buffers }
38 }
39 33
40 fn new_inner( 34impl<'a> TokenList<'a> for &'a [TokenTree] {
41 tokens: &'t [TokenTree], 35 fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec<Entry<'a>>) {
42 buffers: &mut Vec<Box<[Entry<'t>]>>,
43 next: Option<EntryPtr>,
44 ) -> usize {
45 // Must contain everything in tokens and then the Entry::End 36 // Must contain everything in tokens and then the Entry::End
46 let start_capacity = tokens.len() + 1; 37 let start_capacity = self.len() + 1;
47 let mut entries = Vec::with_capacity(start_capacity); 38 let mut entries = Vec::with_capacity(start_capacity);
48 let mut children = vec![]; 39 let mut children = vec![];
49 40 for (idx, tt) in self.iter().enumerate() {
50 for (idx, tt) in tokens.iter().enumerate() {
51 match tt { 41 match tt {
52 TokenTree::Leaf(_) => { 42 TokenTree::Leaf(_) => {
53 entries.push(Entry::Leaf(tt)); 43 entries.push(Entry::Leaf(tt));
54 } 44 }
55 TokenTree::Subtree(subtree) => { 45 TokenTree::Subtree(subtree) => {
56 entries.push(Entry::End(None)); 46 entries.push(Entry::End(None));
57 children.push((idx, (subtree, tt))); 47 children.push((idx, (subtree, Some(tt))));
58 } 48 }
59 } 49 }
60 } 50 }
51 (children, entries)
52 }
53}
54
55impl<'a> TokenList<'a> for &'a Subtree {
56 fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec<Entry<'a>>) {
57 // Must contain everything in tokens and then the Entry::End
58 let mut entries = vec![];
59 let mut children = vec![];
60 entries.push(Entry::End(None));
61 children.push((0usize, (*self, None)));
62 (children, entries)
63 }
64}
65
66impl<'t> TokenBuffer<'t> {
67 pub fn from_tokens(tokens: &'t [TokenTree]) -> TokenBuffer<'t> {
68 Self::new(tokens)
69 }
70
71 pub fn from_subtree(subtree: &'t Subtree) -> TokenBuffer<'t> {
72 Self::new(subtree)
73 }
74
75 fn new<T: TokenList<'t>>(tokens: T) -> TokenBuffer<'t> {
76 let mut buffers = vec![];
77 let idx = TokenBuffer::new_inner(tokens, &mut buffers, None);
78 assert_eq!(idx, 0);
79 TokenBuffer { buffers }
80 }
81
82 fn new_inner<T: TokenList<'t>>(
83 tokens: T,
84 buffers: &mut Vec<Box<[Entry<'t>]>>,
85 next: Option<EntryPtr>,
86 ) -> usize {
87 let (children, mut entries) = tokens.entries();
61 88
62 entries.push(Entry::End(next)); 89 entries.push(Entry::End(next));
63 let res = buffers.len(); 90 let res = buffers.len();
@@ -65,11 +92,11 @@ impl<'t> TokenBuffer<'t> {
65 92
66 for (child_idx, (subtree, tt)) in children { 93 for (child_idx, (subtree, tt)) in children {
67 let idx = TokenBuffer::new_inner( 94 let idx = TokenBuffer::new_inner(
68 &subtree.token_trees, 95 subtree.token_trees.as_slice(),
69 buffers, 96 buffers,
70 Some(EntryPtr(EntryId(res), child_idx + 1)), 97 Some(EntryPtr(EntryId(res), child_idx + 1)),
71 ); 98 );
72 buffers[res].as_mut()[child_idx] = Entry::Subtree(tt, EntryId(idx)); 99 buffers[res].as_mut()[child_idx] = Entry::Subtree(tt, subtree, EntryId(idx));
73 } 100 }
74 101
75 res 102 res
@@ -87,6 +114,24 @@ impl<'t> TokenBuffer<'t> {
87 } 114 }
88} 115}
89 116
117#[derive(Debug)]
118pub enum TokenTreeRef<'a> {
119 Subtree(&'a Subtree, Option<&'a TokenTree>),
120 Leaf(&'a Leaf, &'a TokenTree),
121}
122
123impl<'a> TokenTreeRef<'a> {
124 pub fn cloned(&self) -> TokenTree {
125 match &self {
126 TokenTreeRef::Subtree(subtree, tt) => match tt {
127 Some(it) => (*it).clone(),
128 None => (*subtree).clone().into(),
129 },
130 TokenTreeRef::Leaf(_, tt) => (*tt).clone(),
131 }
132 }
133}
134
90/// A safe version of `Cursor` from `syn` crate https://github.com/dtolnay/syn/blob/6533607f91686545cb034d2838beea338d9d0742/src/buffer.rs#L125 135/// A safe version of `Cursor` from `syn` crate https://github.com/dtolnay/syn/blob/6533607f91686545cb034d2838beea338d9d0742/src/buffer.rs#L125
91#[derive(Copy, Clone, Debug)] 136#[derive(Copy, Clone, Debug)]
92pub struct Cursor<'a> { 137pub struct Cursor<'a> {
@@ -114,12 +159,11 @@ impl<'a> Cursor<'a> {
114 match self.entry() { 159 match self.entry() {
115 Some(Entry::End(Some(ptr))) => { 160 Some(Entry::End(Some(ptr))) => {
116 let idx = ptr.1; 161 let idx = ptr.1;
117 if let Some(Entry::Subtree(TokenTree::Subtree(subtree), _)) = 162 if let Some(Entry::Subtree(_, subtree, _)) =
118 self.buffer.entry(&EntryPtr(ptr.0, idx - 1)) 163 self.buffer.entry(&EntryPtr(ptr.0, idx - 1))
119 { 164 {
120 return Some(subtree); 165 return Some(subtree);
121 } 166 }
122
123 None 167 None
124 } 168 }
125 _ => None, 169 _ => None,
@@ -134,7 +178,7 @@ impl<'a> Cursor<'a> {
134 /// a cursor into that subtree 178 /// a cursor into that subtree
135 pub fn subtree(self) -> Option<Cursor<'a>> { 179 pub fn subtree(self) -> Option<Cursor<'a>> {
136 match self.entry() { 180 match self.entry() {
137 Some(Entry::Subtree(_, entry_id)) => { 181 Some(Entry::Subtree(_, _, entry_id)) => {
138 Some(Cursor::create(self.buffer, EntryPtr(*entry_id, 0))) 182 Some(Cursor::create(self.buffer, EntryPtr(*entry_id, 0)))
139 } 183 }
140 _ => None, 184 _ => None,
@@ -142,10 +186,13 @@ impl<'a> Cursor<'a> {
142 } 186 }
143 187
144 /// If the cursor is pointing at a `TokenTree`, returns it 188 /// If the cursor is pointing at a `TokenTree`, returns it
145 pub fn token_tree(self) -> Option<&'a TokenTree> { 189 pub fn token_tree(self) -> Option<TokenTreeRef<'a>> {
146 match self.entry() { 190 match self.entry() {
147 Some(Entry::Leaf(tt)) => Some(tt), 191 Some(Entry::Leaf(tt)) => match tt {
148 Some(Entry::Subtree(tt, _)) => Some(tt), 192 TokenTree::Leaf(leaf) => Some(TokenTreeRef::Leaf(leaf, *tt)),
193 TokenTree::Subtree(subtree) => Some(TokenTreeRef::Subtree(subtree, Some(tt))),
194 },
195 Some(Entry::Subtree(tt, subtree, _)) => Some(TokenTreeRef::Subtree(subtree, *tt)),
149 Some(Entry::End(_)) => None, 196 Some(Entry::End(_)) => None,
150 None => None, 197 None => None,
151 } 198 }
@@ -172,7 +219,7 @@ impl<'a> Cursor<'a> {
172 /// a cursor into that subtree 219 /// a cursor into that subtree
173 pub fn bump_subtree(self) -> Cursor<'a> { 220 pub fn bump_subtree(self) -> Cursor<'a> {
174 match self.entry() { 221 match self.entry() {
175 Some(Entry::Subtree(_, _)) => self.subtree().unwrap(), 222 Some(Entry::Subtree(_, _, _)) => self.subtree().unwrap(),
176 _ => self.bump(), 223 _ => self.bump(),
177 } 224 }
178 } 225 }