diff options
Diffstat (limited to 'crates')
29 files changed, 642 insertions, 476 deletions
diff --git a/crates/assists/src/handlers/extract_assignment.rs b/crates/assists/src/handlers/pull_assignment_up.rs index 281cf5d24..560d93e10 100644 --- a/crates/assists/src/handlers/extract_assignment.rs +++ b/crates/assists/src/handlers/pull_assignment_up.rs | |||
@@ -1,4 +1,3 @@ | |||
1 | use hir::AsName; | ||
2 | use syntax::{ | 1 | use syntax::{ |
3 | ast::{self, edit::AstNodeEdit, make}, | 2 | ast::{self, edit::AstNodeEdit, make}, |
4 | AstNode, | 3 | AstNode, |
@@ -10,9 +9,9 @@ use crate::{ | |||
10 | AssistId, AssistKind, | 9 | AssistId, AssistKind, |
11 | }; | 10 | }; |
12 | 11 | ||
13 | // Assist: extract_assignment | 12 | // Assist: pull_assignment_up |
14 | // | 13 | // |
15 | // Extracts variable assigment to outside an if or match statement. | 14 | // Extracts variable assignment to outside an if or match statement. |
16 | // | 15 | // |
17 | // ``` | 16 | // ``` |
18 | // fn main() { | 17 | // fn main() { |
@@ -37,16 +36,24 @@ use crate::{ | |||
37 | // }; | 36 | // }; |
38 | // } | 37 | // } |
39 | // ``` | 38 | // ``` |
40 | pub(crate) fn extract_assigment(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 39 | pub(crate) fn pull_assignment_up(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
41 | let name = ctx.find_node_at_offset::<ast::NameRef>()?.as_name(); | 40 | let assign_expr = ctx.find_node_at_offset::<ast::BinExpr>()?; |
41 | let name_expr = if assign_expr.op_kind()? == ast::BinOp::Assignment { | ||
42 | assign_expr.lhs()? | ||
43 | } else { | ||
44 | return None; | ||
45 | }; | ||
42 | 46 | ||
43 | let (old_stmt, new_stmt) = if let Some(if_expr) = ctx.find_node_at_offset::<ast::IfExpr>() { | 47 | let (old_stmt, new_stmt) = if let Some(if_expr) = ctx.find_node_at_offset::<ast::IfExpr>() { |
44 | ( | 48 | ( |
45 | ast::Expr::cast(if_expr.syntax().to_owned())?, | 49 | ast::Expr::cast(if_expr.syntax().to_owned())?, |
46 | exprify_if(&if_expr, &name)?.indent(if_expr.indent_level()), | 50 | exprify_if(&if_expr, &ctx.sema, &name_expr)?.indent(if_expr.indent_level()), |
47 | ) | 51 | ) |
48 | } else if let Some(match_expr) = ctx.find_node_at_offset::<ast::MatchExpr>() { | 52 | } else if let Some(match_expr) = ctx.find_node_at_offset::<ast::MatchExpr>() { |
49 | (ast::Expr::cast(match_expr.syntax().to_owned())?, exprify_match(&match_expr, &name)?) | 53 | ( |
54 | ast::Expr::cast(match_expr.syntax().to_owned())?, | ||
55 | exprify_match(&match_expr, &ctx.sema, &name_expr)?, | ||
56 | ) | ||
50 | } else { | 57 | } else { |
51 | return None; | 58 | return None; |
52 | }; | 59 | }; |
@@ -54,22 +61,26 @@ pub(crate) fn extract_assigment(acc: &mut Assists, ctx: &AssistContext) -> Optio | |||
54 | let expr_stmt = make::expr_stmt(new_stmt); | 61 | let expr_stmt = make::expr_stmt(new_stmt); |
55 | 62 | ||
56 | acc.add( | 63 | acc.add( |
57 | AssistId("extract_assignment", AssistKind::RefactorExtract), | 64 | AssistId("pull_assignment_up", AssistKind::RefactorExtract), |
58 | "Extract assignment", | 65 | "Pull assignment up", |
59 | old_stmt.syntax().text_range(), | 66 | old_stmt.syntax().text_range(), |
60 | move |edit| { | 67 | move |edit| { |
61 | edit.replace(old_stmt.syntax().text_range(), format!("{} = {};", name, expr_stmt)); | 68 | edit.replace(old_stmt.syntax().text_range(), format!("{} = {};", name_expr, expr_stmt)); |
62 | }, | 69 | }, |
63 | ) | 70 | ) |
64 | } | 71 | } |
65 | 72 | ||
66 | fn exprify_match(match_expr: &ast::MatchExpr, name: &hir::Name) -> Option<ast::Expr> { | 73 | fn exprify_match( |
74 | match_expr: &ast::MatchExpr, | ||
75 | sema: &hir::Semantics<ide_db::RootDatabase>, | ||
76 | name: &ast::Expr, | ||
77 | ) -> Option<ast::Expr> { | ||
67 | let new_arm_list = match_expr | 78 | let new_arm_list = match_expr |
68 | .match_arm_list()? | 79 | .match_arm_list()? |
69 | .arms() | 80 | .arms() |
70 | .map(|arm| { | 81 | .map(|arm| { |
71 | if let ast::Expr::BlockExpr(block) = arm.expr()? { | 82 | if let ast::Expr::BlockExpr(block) = arm.expr()? { |
72 | let new_block = exprify_block(&block, name)?.indent(block.indent_level()); | 83 | let new_block = exprify_block(&block, sema, name)?.indent(block.indent_level()); |
73 | Some(arm.replace_descendant(block, new_block)) | 84 | Some(arm.replace_descendant(block, new_block)) |
74 | } else { | 85 | } else { |
75 | None | 86 | None |
@@ -82,21 +93,31 @@ fn exprify_match(match_expr: &ast::MatchExpr, name: &hir::Name) -> Option<ast::E | |||
82 | Some(make::expr_match(match_expr.expr()?, new_arm_list)) | 93 | Some(make::expr_match(match_expr.expr()?, new_arm_list)) |
83 | } | 94 | } |
84 | 95 | ||
85 | fn exprify_if(statement: &ast::IfExpr, name: &hir::Name) -> Option<ast::Expr> { | 96 | fn exprify_if( |
86 | let then_branch = exprify_block(&statement.then_branch()?, name)?; | 97 | statement: &ast::IfExpr, |
98 | sema: &hir::Semantics<ide_db::RootDatabase>, | ||
99 | name: &ast::Expr, | ||
100 | ) -> Option<ast::Expr> { | ||
101 | let then_branch = exprify_block(&statement.then_branch()?, sema, name)?; | ||
87 | let else_branch = match statement.else_branch()? { | 102 | let else_branch = match statement.else_branch()? { |
88 | ast::ElseBranch::Block(ref block) => ast::ElseBranch::Block(exprify_block(block, name)?), | 103 | ast::ElseBranch::Block(ref block) => { |
104 | ast::ElseBranch::Block(exprify_block(block, sema, name)?) | ||
105 | } | ||
89 | ast::ElseBranch::IfExpr(expr) => { | 106 | ast::ElseBranch::IfExpr(expr) => { |
90 | mark::hit!(test_extract_assigment_chained_if); | 107 | mark::hit!(test_pull_assignment_up_chained_if); |
91 | ast::ElseBranch::IfExpr(ast::IfExpr::cast( | 108 | ast::ElseBranch::IfExpr(ast::IfExpr::cast( |
92 | exprify_if(&expr, name)?.syntax().to_owned(), | 109 | exprify_if(&expr, sema, name)?.syntax().to_owned(), |
93 | )?) | 110 | )?) |
94 | } | 111 | } |
95 | }; | 112 | }; |
96 | Some(make::expr_if(statement.condition()?, then_branch, Some(else_branch))) | 113 | Some(make::expr_if(statement.condition()?, then_branch, Some(else_branch))) |
97 | } | 114 | } |
98 | 115 | ||
99 | fn exprify_block(block: &ast::BlockExpr, name: &hir::Name) -> Option<ast::BlockExpr> { | 116 | fn exprify_block( |
117 | block: &ast::BlockExpr, | ||
118 | sema: &hir::Semantics<ide_db::RootDatabase>, | ||
119 | name: &ast::Expr, | ||
120 | ) -> Option<ast::BlockExpr> { | ||
100 | if block.expr().is_some() { | 121 | if block.expr().is_some() { |
101 | return None; | 122 | return None; |
102 | } | 123 | } |
@@ -106,8 +127,7 @@ fn exprify_block(block: &ast::BlockExpr, name: &hir::Name) -> Option<ast::BlockE | |||
106 | 127 | ||
107 | if let ast::Stmt::ExprStmt(stmt) = stmt { | 128 | if let ast::Stmt::ExprStmt(stmt) = stmt { |
108 | if let ast::Expr::BinExpr(expr) = stmt.expr()? { | 129 | if let ast::Expr::BinExpr(expr) = stmt.expr()? { |
109 | if expr.op_kind()? == ast::BinOp::Assignment | 130 | if expr.op_kind()? == ast::BinOp::Assignment && is_equivalent(sema, &expr.lhs()?, name) |
110 | && &expr.lhs()?.name_ref()?.as_name() == name | ||
111 | { | 131 | { |
112 | // The last statement in the block is an assignment to the name we want | 132 | // The last statement in the block is an assignment to the name we want |
113 | return Some(make::block_expr(stmts, Some(expr.rhs()?))); | 133 | return Some(make::block_expr(stmts, Some(expr.rhs()?))); |
@@ -117,6 +137,29 @@ fn exprify_block(block: &ast::BlockExpr, name: &hir::Name) -> Option<ast::BlockE | |||
117 | None | 137 | None |
118 | } | 138 | } |
119 | 139 | ||
140 | fn is_equivalent( | ||
141 | sema: &hir::Semantics<ide_db::RootDatabase>, | ||
142 | expr0: &ast::Expr, | ||
143 | expr1: &ast::Expr, | ||
144 | ) -> bool { | ||
145 | match (expr0, expr1) { | ||
146 | (ast::Expr::FieldExpr(field_expr0), ast::Expr::FieldExpr(field_expr1)) => { | ||
147 | mark::hit!(test_pull_assignment_up_field_assignment); | ||
148 | sema.resolve_field(field_expr0) == sema.resolve_field(field_expr1) | ||
149 | } | ||
150 | (ast::Expr::PathExpr(path0), ast::Expr::PathExpr(path1)) => { | ||
151 | let path0 = path0.path(); | ||
152 | let path1 = path1.path(); | ||
153 | if let (Some(path0), Some(path1)) = (path0, path1) { | ||
154 | sema.resolve_path(&path0) == sema.resolve_path(&path1) | ||
155 | } else { | ||
156 | false | ||
157 | } | ||
158 | } | ||
159 | _ => false, | ||
160 | } | ||
161 | } | ||
162 | |||
120 | #[cfg(test)] | 163 | #[cfg(test)] |
121 | mod tests { | 164 | mod tests { |
122 | use super::*; | 165 | use super::*; |
@@ -124,9 +167,9 @@ mod tests { | |||
124 | use crate::tests::{check_assist, check_assist_not_applicable}; | 167 | use crate::tests::{check_assist, check_assist_not_applicable}; |
125 | 168 | ||
126 | #[test] | 169 | #[test] |
127 | fn test_extract_assignment_if() { | 170 | fn test_pull_assignment_up_if() { |
128 | check_assist( | 171 | check_assist( |
129 | extract_assigment, | 172 | pull_assignment_up, |
130 | r#" | 173 | r#" |
131 | fn foo() { | 174 | fn foo() { |
132 | let mut a = 1; | 175 | let mut a = 1; |
@@ -151,9 +194,9 @@ fn foo() { | |||
151 | } | 194 | } |
152 | 195 | ||
153 | #[test] | 196 | #[test] |
154 | fn test_extract_assignment_match() { | 197 | fn test_pull_assignment_up_match() { |
155 | check_assist( | 198 | check_assist( |
156 | extract_assigment, | 199 | pull_assignment_up, |
157 | r#" | 200 | r#" |
158 | fn foo() { | 201 | fn foo() { |
159 | let mut a = 1; | 202 | let mut a = 1; |
@@ -190,9 +233,9 @@ fn foo() { | |||
190 | } | 233 | } |
191 | 234 | ||
192 | #[test] | 235 | #[test] |
193 | fn test_extract_assignment_not_last_not_applicable() { | 236 | fn test_pull_assignment_up_not_last_not_applicable() { |
194 | check_assist_not_applicable( | 237 | check_assist_not_applicable( |
195 | extract_assigment, | 238 | pull_assignment_up, |
196 | r#" | 239 | r#" |
197 | fn foo() { | 240 | fn foo() { |
198 | let mut a = 1; | 241 | let mut a = 1; |
@@ -208,10 +251,10 @@ fn foo() { | |||
208 | } | 251 | } |
209 | 252 | ||
210 | #[test] | 253 | #[test] |
211 | fn test_extract_assignment_chained_if() { | 254 | fn test_pull_assignment_up_chained_if() { |
212 | mark::check!(test_extract_assigment_chained_if); | 255 | mark::check!(test_pull_assignment_up_chained_if); |
213 | check_assist( | 256 | check_assist( |
214 | extract_assigment, | 257 | pull_assignment_up, |
215 | r#" | 258 | r#" |
216 | fn foo() { | 259 | fn foo() { |
217 | let mut a = 1; | 260 | let mut a = 1; |
@@ -240,9 +283,9 @@ fn foo() { | |||
240 | } | 283 | } |
241 | 284 | ||
242 | #[test] | 285 | #[test] |
243 | fn test_extract_assigment_retains_stmts() { | 286 | fn test_pull_assignment_up_retains_stmts() { |
244 | check_assist( | 287 | check_assist( |
245 | extract_assigment, | 288 | pull_assignment_up, |
246 | r#" | 289 | r#" |
247 | fn foo() { | 290 | fn foo() { |
248 | let mut a = 1; | 291 | let mut a = 1; |
@@ -271,9 +314,9 @@ fn foo() { | |||
271 | } | 314 | } |
272 | 315 | ||
273 | #[test] | 316 | #[test] |
274 | fn extract_assignment_let_stmt_not_applicable() { | 317 | fn pull_assignment_up_let_stmt_not_applicable() { |
275 | check_assist_not_applicable( | 318 | check_assist_not_applicable( |
276 | extract_assigment, | 319 | pull_assignment_up, |
277 | r#" | 320 | r#" |
278 | fn foo() { | 321 | fn foo() { |
279 | let mut a = 1; | 322 | let mut a = 1; |
@@ -288,9 +331,9 @@ fn foo() { | |||
288 | } | 331 | } |
289 | 332 | ||
290 | #[test] | 333 | #[test] |
291 | fn extract_assignment_if_missing_assigment_not_applicable() { | 334 | fn pull_assignment_up_if_missing_assigment_not_applicable() { |
292 | check_assist_not_applicable( | 335 | check_assist_not_applicable( |
293 | extract_assigment, | 336 | pull_assignment_up, |
294 | r#" | 337 | r#" |
295 | fn foo() { | 338 | fn foo() { |
296 | let mut a = 1; | 339 | let mut a = 1; |
@@ -303,9 +346,9 @@ fn foo() { | |||
303 | } | 346 | } |
304 | 347 | ||
305 | #[test] | 348 | #[test] |
306 | fn extract_assignment_match_missing_assigment_not_applicable() { | 349 | fn pull_assignment_up_match_missing_assigment_not_applicable() { |
307 | check_assist_not_applicable( | 350 | check_assist_not_applicable( |
308 | extract_assigment, | 351 | pull_assignment_up, |
309 | r#" | 352 | r#" |
310 | fn foo() { | 353 | fn foo() { |
311 | let mut a = 1; | 354 | let mut a = 1; |
@@ -322,4 +365,36 @@ fn foo() { | |||
322 | }"#, | 365 | }"#, |
323 | ) | 366 | ) |
324 | } | 367 | } |
368 | |||
369 | #[test] | ||
370 | fn test_pull_assignment_up_field_assignment() { | ||
371 | mark::check!(test_pull_assignment_up_field_assignment); | ||
372 | check_assist( | ||
373 | pull_assignment_up, | ||
374 | r#" | ||
375 | struct A(usize); | ||
376 | |||
377 | fn foo() { | ||
378 | let mut a = A(1); | ||
379 | |||
380 | if true { | ||
381 | <|>a.0 = 2; | ||
382 | } else { | ||
383 | a.0 = 3; | ||
384 | } | ||
385 | }"#, | ||
386 | r#" | ||
387 | struct A(usize); | ||
388 | |||
389 | fn foo() { | ||
390 | let mut a = A(1); | ||
391 | |||
392 | a.0 = if true { | ||
393 | 2 | ||
394 | } else { | ||
395 | 3 | ||
396 | }; | ||
397 | }"#, | ||
398 | ) | ||
399 | } | ||
325 | } | 400 | } |
diff --git a/crates/assists/src/lib.rs b/crates/assists/src/lib.rs index 212464f85..01baa65fe 100644 --- a/crates/assists/src/lib.rs +++ b/crates/assists/src/lib.rs | |||
@@ -116,7 +116,6 @@ mod handlers { | |||
116 | mod convert_integer_literal; | 116 | mod convert_integer_literal; |
117 | mod early_return; | 117 | mod early_return; |
118 | mod expand_glob_import; | 118 | mod expand_glob_import; |
119 | mod extract_assignment; | ||
120 | mod extract_module_to_file; | 119 | mod extract_module_to_file; |
121 | mod extract_struct_from_enum_variant; | 120 | mod extract_struct_from_enum_variant; |
122 | mod extract_variable; | 121 | mod extract_variable; |
@@ -125,8 +124,8 @@ mod handlers { | |||
125 | mod flip_binexpr; | 124 | mod flip_binexpr; |
126 | mod flip_comma; | 125 | mod flip_comma; |
127 | mod flip_trait_bound; | 126 | mod flip_trait_bound; |
128 | mod generate_derive; | ||
129 | mod generate_default_from_enum_variant; | 127 | mod generate_default_from_enum_variant; |
128 | mod generate_derive; | ||
130 | mod generate_from_impl_for_enum; | 129 | mod generate_from_impl_for_enum; |
131 | mod generate_function; | 130 | mod generate_function; |
132 | mod generate_impl; | 131 | mod generate_impl; |
@@ -139,6 +138,7 @@ mod handlers { | |||
139 | mod merge_match_arms; | 138 | mod merge_match_arms; |
140 | mod move_bounds; | 139 | mod move_bounds; |
141 | mod move_guard; | 140 | mod move_guard; |
141 | mod pull_assignment_up; | ||
142 | mod qualify_path; | 142 | mod qualify_path; |
143 | mod raw_string; | 143 | mod raw_string; |
144 | mod remove_dbg; | 144 | mod remove_dbg; |
@@ -168,7 +168,6 @@ mod handlers { | |||
168 | convert_integer_literal::convert_integer_literal, | 168 | convert_integer_literal::convert_integer_literal, |
169 | early_return::convert_to_guarded_return, | 169 | early_return::convert_to_guarded_return, |
170 | expand_glob_import::expand_glob_import, | 170 | expand_glob_import::expand_glob_import, |
171 | extract_assignment::extract_assigment, | ||
172 | extract_module_to_file::extract_module_to_file, | 171 | extract_module_to_file::extract_module_to_file, |
173 | extract_struct_from_enum_variant::extract_struct_from_enum_variant, | 172 | extract_struct_from_enum_variant::extract_struct_from_enum_variant, |
174 | extract_variable::extract_variable, | 173 | extract_variable::extract_variable, |
@@ -177,8 +176,8 @@ mod handlers { | |||
177 | flip_binexpr::flip_binexpr, | 176 | flip_binexpr::flip_binexpr, |
178 | flip_comma::flip_comma, | 177 | flip_comma::flip_comma, |
179 | flip_trait_bound::flip_trait_bound, | 178 | flip_trait_bound::flip_trait_bound, |
180 | generate_derive::generate_derive, | ||
181 | generate_default_from_enum_variant::generate_default_from_enum_variant, | 179 | generate_default_from_enum_variant::generate_default_from_enum_variant, |
180 | generate_derive::generate_derive, | ||
182 | generate_from_impl_for_enum::generate_from_impl_for_enum, | 181 | generate_from_impl_for_enum::generate_from_impl_for_enum, |
183 | generate_function::generate_function, | 182 | generate_function::generate_function, |
184 | generate_impl::generate_impl, | 183 | generate_impl::generate_impl, |
@@ -192,6 +191,7 @@ mod handlers { | |||
192 | move_bounds::move_bounds_to_where_clause, | 191 | move_bounds::move_bounds_to_where_clause, |
193 | move_guard::move_arm_cond_to_match_guard, | 192 | move_guard::move_arm_cond_to_match_guard, |
194 | move_guard::move_guard_to_arm_body, | 193 | move_guard::move_guard_to_arm_body, |
194 | pull_assignment_up::pull_assignment_up, | ||
195 | qualify_path::qualify_path, | 195 | qualify_path::qualify_path, |
196 | raw_string::add_hash, | 196 | raw_string::add_hash, |
197 | raw_string::make_usual_string, | 197 | raw_string::make_usual_string, |
diff --git a/crates/assists/src/tests/generated.rs b/crates/assists/src/tests/generated.rs index b91a816e8..85e3c6742 100644 --- a/crates/assists/src/tests/generated.rs +++ b/crates/assists/src/tests/generated.rs | |||
@@ -238,35 +238,6 @@ fn qux(bar: Bar, baz: Baz) {} | |||
238 | } | 238 | } |
239 | 239 | ||
240 | #[test] | 240 | #[test] |
241 | fn doctest_extract_assignment() { | ||
242 | check_doc_test( | ||
243 | "extract_assignment", | ||
244 | r#####" | ||
245 | fn main() { | ||
246 | let mut foo = 6; | ||
247 | |||
248 | if true { | ||
249 | <|>foo = 5; | ||
250 | } else { | ||
251 | foo = 4; | ||
252 | } | ||
253 | } | ||
254 | "#####, | ||
255 | r#####" | ||
256 | fn main() { | ||
257 | let mut foo = 6; | ||
258 | |||
259 | foo = if true { | ||
260 | 5 | ||
261 | } else { | ||
262 | 4 | ||
263 | }; | ||
264 | } | ||
265 | "#####, | ||
266 | ) | ||
267 | } | ||
268 | |||
269 | #[test] | ||
270 | fn doctest_extract_module_to_file() { | 241 | fn doctest_extract_module_to_file() { |
271 | check_doc_test( | 242 | check_doc_test( |
272 | "extract_module_to_file", | 243 | "extract_module_to_file", |
@@ -767,6 +738,35 @@ fn handle(action: Action) { | |||
767 | } | 738 | } |
768 | 739 | ||
769 | #[test] | 740 | #[test] |
741 | fn doctest_pull_assignment_up() { | ||
742 | check_doc_test( | ||
743 | "pull_assignment_up", | ||
744 | r#####" | ||
745 | fn main() { | ||
746 | let mut foo = 6; | ||
747 | |||
748 | if true { | ||
749 | <|>foo = 5; | ||
750 | } else { | ||
751 | foo = 4; | ||
752 | } | ||
753 | } | ||
754 | "#####, | ||
755 | r#####" | ||
756 | fn main() { | ||
757 | let mut foo = 6; | ||
758 | |||
759 | foo = if true { | ||
760 | 5 | ||
761 | } else { | ||
762 | 4 | ||
763 | }; | ||
764 | } | ||
765 | "#####, | ||
766 | ) | ||
767 | } | ||
768 | |||
769 | #[test] | ||
770 | fn doctest_qualify_path() { | 770 | fn doctest_qualify_path() { |
771 | check_doc_test( | 771 | check_doc_test( |
772 | "qualify_path", | 772 | "qualify_path", |
diff --git a/crates/cfg/Cargo.toml b/crates/cfg/Cargo.toml index c68e391c1..73247d130 100644 --- a/crates/cfg/Cargo.toml +++ b/crates/cfg/Cargo.toml | |||
@@ -17,4 +17,4 @@ tt = { path = "../tt", version = "0.0.0" } | |||
17 | [dev-dependencies] | 17 | [dev-dependencies] |
18 | mbe = { path = "../mbe" } | 18 | mbe = { path = "../mbe" } |
19 | syntax = { path = "../syntax" } | 19 | syntax = { path = "../syntax" } |
20 | expect-test = "1.0" | 20 | expect-test = "1.1" |
diff --git a/crates/completion/Cargo.toml b/crates/completion/Cargo.toml index 78e93e78e..99a1bdd54 100644 --- a/crates/completion/Cargo.toml +++ b/crates/completion/Cargo.toml | |||
@@ -28,4 +28,4 @@ test_utils = { path = "../test_utils", version = "0.0.0" } | |||
28 | hir = { path = "../hir", version = "0.0.0" } | 28 | hir = { path = "../hir", version = "0.0.0" } |
29 | 29 | ||
30 | [dev-dependencies] | 30 | [dev-dependencies] |
31 | expect-test = "1.0" | 31 | expect-test = "1.1" |
diff --git a/crates/hir/src/code_model.rs b/crates/hir/src/code_model.rs index 62eccf475..071e553a8 100644 --- a/crates/hir/src/code_model.rs +++ b/crates/hir/src/code_model.rs | |||
@@ -5,9 +5,7 @@ use arrayvec::ArrayVec; | |||
5 | use base_db::{CrateDisplayName, CrateId, Edition, FileId}; | 5 | use base_db::{CrateDisplayName, CrateId, Edition, FileId}; |
6 | use either::Either; | 6 | use either::Either; |
7 | use hir_def::{ | 7 | use hir_def::{ |
8 | adt::ReprKind, | 8 | adt::{ReprKind, StructKind, VariantData}, |
9 | adt::StructKind, | ||
10 | adt::VariantData, | ||
11 | builtin_type::BuiltinType, | 9 | builtin_type::BuiltinType, |
12 | expr::{BindingAnnotation, LabelId, Pat, PatId}, | 10 | expr::{BindingAnnotation, LabelId, Pat, PatId}, |
13 | import_map, | 11 | import_map, |
@@ -31,7 +29,7 @@ use hir_expand::{ | |||
31 | }; | 29 | }; |
32 | use hir_ty::{ | 30 | use hir_ty::{ |
33 | autoderef, | 31 | autoderef, |
34 | display::{HirDisplayError, HirFormatter}, | 32 | display::{write_bounds_like_dyn_trait, HirDisplayError, HirFormatter}, |
35 | method_resolution, | 33 | method_resolution, |
36 | traits::{FnTrait, Solution, SolutionVariables}, | 34 | traits::{FnTrait, Solution, SolutionVariables}, |
37 | ApplicationTy, BoundVar, CallableDefId, Canonical, DebruijnIndex, FnSig, GenericPredicate, | 35 | ApplicationTy, BoundVar, CallableDefId, Canonical, DebruijnIndex, FnSig, GenericPredicate, |
@@ -1278,6 +1276,18 @@ impl TypeParam { | |||
1278 | } | 1276 | } |
1279 | } | 1277 | } |
1280 | 1278 | ||
1279 | pub fn trait_bounds(self, db: &dyn HirDatabase) -> Vec<Trait> { | ||
1280 | db.generic_predicates_for_param(self.id) | ||
1281 | .into_iter() | ||
1282 | .filter_map(|pred| match &pred.value { | ||
1283 | hir_ty::GenericPredicate::Implemented(trait_ref) => { | ||
1284 | Some(Trait::from(trait_ref.trait_)) | ||
1285 | } | ||
1286 | _ => None, | ||
1287 | }) | ||
1288 | .collect() | ||
1289 | } | ||
1290 | |||
1281 | pub fn default(self, db: &dyn HirDatabase) -> Option<Type> { | 1291 | pub fn default(self, db: &dyn HirDatabase) -> Option<Type> { |
1282 | let params = db.generic_defaults(self.id.parent); | 1292 | let params = db.generic_defaults(self.id.parent); |
1283 | let local_idx = hir_ty::param_idx(db, self.id)?; | 1293 | let local_idx = hir_ty::param_idx(db, self.id)?; |
@@ -1293,6 +1303,20 @@ impl TypeParam { | |||
1293 | } | 1303 | } |
1294 | } | 1304 | } |
1295 | 1305 | ||
1306 | impl HirDisplay for TypeParam { | ||
1307 | fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { | ||
1308 | write!(f, "{}", self.name(f.db))?; | ||
1309 | let bounds = f.db.generic_predicates_for_param(self.id); | ||
1310 | let substs = Substs::type_params(f.db, self.id.parent); | ||
1311 | let predicates = bounds.iter().cloned().map(|b| b.subst(&substs)).collect::<Vec<_>>(); | ||
1312 | if !(predicates.is_empty() || f.omit_verbose_types()) { | ||
1313 | write!(f, ": ")?; | ||
1314 | write_bounds_like_dyn_trait(&predicates, f)?; | ||
1315 | } | ||
1316 | Ok(()) | ||
1317 | } | ||
1318 | } | ||
1319 | |||
1296 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] | 1320 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] |
1297 | pub struct LifetimeParam { | 1321 | pub struct LifetimeParam { |
1298 | pub(crate) id: LifetimeParamId, | 1322 | pub(crate) id: LifetimeParamId, |
@@ -1331,6 +1355,12 @@ impl ConstParam { | |||
1331 | pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef { | 1355 | pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef { |
1332 | self.id.parent.into() | 1356 | self.id.parent.into() |
1333 | } | 1357 | } |
1358 | |||
1359 | pub fn ty(self, db: &dyn HirDatabase) -> Type { | ||
1360 | let def = self.id.parent; | ||
1361 | let krate = def.module(db.upcast()).krate; | ||
1362 | Type::new(db, krate, def, db.const_param_ty(self.id)) | ||
1363 | } | ||
1334 | } | 1364 | } |
1335 | 1365 | ||
1336 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | 1366 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] |
diff --git a/crates/hir_def/Cargo.toml b/crates/hir_def/Cargo.toml index e8b581e2f..7ef966cd2 100644 --- a/crates/hir_def/Cargo.toml +++ b/crates/hir_def/Cargo.toml | |||
@@ -33,4 +33,4 @@ cfg = { path = "../cfg", version = "0.0.0" } | |||
33 | tt = { path = "../tt", version = "0.0.0" } | 33 | tt = { path = "../tt", version = "0.0.0" } |
34 | 34 | ||
35 | [dev-dependencies] | 35 | [dev-dependencies] |
36 | expect-test = "1.0" | 36 | expect-test = "1.1" |
diff --git a/crates/hir_def/src/diagnostics.rs b/crates/hir_def/src/diagnostics.rs index c71266dc0..ab3f059ce 100644 --- a/crates/hir_def/src/diagnostics.rs +++ b/crates/hir_def/src/diagnostics.rs | |||
@@ -133,6 +133,10 @@ impl Diagnostic for InactiveCode { | |||
133 | // This diagnostic is shown when a procedural macro can not be found. This usually means that | 133 | // This diagnostic is shown when a procedural macro can not be found. This usually means that |
134 | // procedural macro support is simply disabled (and hence is only a weak hint instead of an error), | 134 | // procedural macro support is simply disabled (and hence is only a weak hint instead of an error), |
135 | // but can also indicate project setup problems. | 135 | // but can also indicate project setup problems. |
136 | // | ||
137 | // If you are seeing a lot of "proc macro not expanded" warnings, you can add this option to the | ||
138 | // `rust-analyzer.diagnostics.disabled` list to prevent them from showing. Alternatively you can | ||
139 | // enable support for procedural macros (see `rust-analyzer.procMacro.enable`). | ||
136 | #[derive(Debug, Clone, Eq, PartialEq)] | 140 | #[derive(Debug, Clone, Eq, PartialEq)] |
137 | pub struct UnresolvedProcMacro { | 141 | pub struct UnresolvedProcMacro { |
138 | pub file: HirFileId, | 142 | pub file: HirFileId, |
diff --git a/crates/hir_def/src/path/lower.rs b/crates/hir_def/src/path/lower.rs index 9518ac109..8a01e6eea 100644 --- a/crates/hir_def/src/path/lower.rs +++ b/crates/hir_def/src/path/lower.rs | |||
@@ -123,7 +123,7 @@ pub(super) fn lower_path(mut path: ast::Path, hygiene: &Hygiene) -> Option<Path> | |||
123 | // We follow what it did anyway :) | 123 | // We follow what it did anyway :) |
124 | if segments.len() == 1 && kind == PathKind::Plain { | 124 | if segments.len() == 1 && kind == PathKind::Plain { |
125 | if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) { | 125 | if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) { |
126 | if let Some(crate_id) = hygiene.local_inner_macros(path) { | 126 | if let Some(crate_id) = hygiene.local_inner_macros() { |
127 | kind = PathKind::DollarCrate(crate_id); | 127 | kind = PathKind::DollarCrate(crate_id); |
128 | } | 128 | } |
129 | } | 129 | } |
diff --git a/crates/hir_expand/src/hygiene.rs b/crates/hir_expand/src/hygiene.rs index 6042e15b2..7ab0a5e52 100644 --- a/crates/hir_expand/src/hygiene.rs +++ b/crates/hir_expand/src/hygiene.rs | |||
@@ -2,94 +2,30 @@ | |||
2 | //! | 2 | //! |
3 | //! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at | 3 | //! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at |
4 | //! this moment, this is horribly incomplete and handles only `$crate`. | 4 | //! this moment, this is horribly incomplete and handles only `$crate`. |
5 | use std::sync::Arc; | ||
6 | |||
7 | use arena::{Arena, Idx}; | ||
8 | use base_db::CrateId; | 5 | use base_db::CrateId; |
9 | use either::Either; | 6 | use either::Either; |
10 | use mbe::Origin; | 7 | use syntax::ast; |
11 | use syntax::{ast, AstNode}; | ||
12 | 8 | ||
13 | use crate::{ | 9 | use crate::{ |
14 | db::AstDatabase, | 10 | db::AstDatabase, |
15 | name::{AsName, Name}, | 11 | name::{AsName, Name}, |
16 | ExpansionInfo, HirFileId, HirFileIdRepr, MacroCallId, MacroDefKind, | 12 | HirFileId, HirFileIdRepr, MacroCallId, MacroDefKind, |
17 | }; | 13 | }; |
18 | 14 | ||
19 | #[derive(Clone, Debug)] | 15 | #[derive(Clone, Debug)] |
20 | pub struct Hygiene { | 16 | pub struct Hygiene { |
21 | frames: Option<Arc<HygieneFrames>>, | 17 | // This is what `$crate` expands to |
22 | } | 18 | def_crate: Option<CrateId>, |
23 | |||
24 | impl Hygiene { | ||
25 | pub fn new(db: &dyn AstDatabase, file_id: HirFileId) -> Hygiene { | ||
26 | Hygiene { frames: Some(Arc::new(HygieneFrames::new(db, file_id.clone()))) } | ||
27 | } | ||
28 | |||
29 | pub fn new_unhygienic() -> Hygiene { | ||
30 | Hygiene { frames: None } | ||
31 | } | ||
32 | |||
33 | // FIXME: this should just return name | ||
34 | pub fn name_ref_to_name(&self, name_ref: ast::NameRef) -> Either<Name, CrateId> { | ||
35 | if let Some(frames) = &self.frames { | ||
36 | if name_ref.text() == "$crate" { | ||
37 | if let Some(krate) = frames.root_crate(&name_ref) { | ||
38 | return Either::Right(krate); | ||
39 | } | ||
40 | } | ||
41 | } | ||
42 | |||
43 | Either::Left(name_ref.as_name()) | ||
44 | } | ||
45 | |||
46 | pub fn local_inner_macros(&self, path: ast::Path) -> Option<CrateId> { | ||
47 | let frames = self.frames.as_ref()?; | ||
48 | |||
49 | let mut token = path.syntax().first_token()?; | ||
50 | let mut current = frames.first(); | ||
51 | |||
52 | while let Some((frame, data)) = | ||
53 | current.and_then(|it| Some((it, it.expansion.as_ref()?.map_token_up(&token)?))) | ||
54 | { | ||
55 | let (mapped, origin) = data; | ||
56 | if origin == Origin::Def { | ||
57 | return if frame.local_inner { frame.krate } else { None }; | ||
58 | } | ||
59 | current = Some(&frames.0[frame.call_site?]); | ||
60 | token = mapped.value; | ||
61 | } | ||
62 | None | ||
63 | } | ||
64 | } | ||
65 | |||
66 | #[derive(Default, Debug)] | ||
67 | struct HygieneFrames(Arena<HygieneFrame>); | ||
68 | |||
69 | #[derive(Clone, Debug)] | ||
70 | struct HygieneFrame { | ||
71 | expansion: Option<ExpansionInfo>, | ||
72 | 19 | ||
73 | // Indicate this is a local inner macro | 20 | // Indicate this is a local inner macro |
74 | local_inner: bool, | 21 | local_inner: bool, |
75 | krate: Option<CrateId>, | ||
76 | |||
77 | call_site: Option<Idx<HygieneFrame>>, | ||
78 | def_site: Option<Idx<HygieneFrame>>, | ||
79 | } | 22 | } |
80 | 23 | ||
81 | impl HygieneFrames { | 24 | impl Hygiene { |
82 | fn new(db: &dyn AstDatabase, file_id: HirFileId) -> Self { | 25 | pub fn new(db: &dyn AstDatabase, file_id: HirFileId) -> Hygiene { |
83 | let mut frames = HygieneFrames::default(); | 26 | let (def_crate, local_inner) = match file_id.0 { |
84 | frames.add(db, file_id); | ||
85 | frames | ||
86 | } | ||
87 | |||
88 | fn add(&mut self, db: &dyn AstDatabase, file_id: HirFileId) -> Option<Idx<HygieneFrame>> { | ||
89 | let (krate, local_inner) = match file_id.0 { | ||
90 | HirFileIdRepr::FileId(_) => (None, false), | 27 | HirFileIdRepr::FileId(_) => (None, false), |
91 | HirFileIdRepr::MacroFile(macro_file) => match macro_file.macro_call_id { | 28 | HirFileIdRepr::MacroFile(macro_file) => match macro_file.macro_call_id { |
92 | MacroCallId::EagerMacro(_id) => (None, false), | ||
93 | MacroCallId::LazyMacro(id) => { | 29 | MacroCallId::LazyMacro(id) => { |
94 | let loc = db.lookup_intern_macro(id); | 30 | let loc = db.lookup_intern_macro(id); |
95 | match loc.def.kind { | 31 | match loc.def.kind { |
@@ -100,68 +36,31 @@ impl HygieneFrames { | |||
100 | MacroDefKind::ProcMacro(_) => (None, false), | 36 | MacroDefKind::ProcMacro(_) => (None, false), |
101 | } | 37 | } |
102 | } | 38 | } |
39 | MacroCallId::EagerMacro(_id) => (None, false), | ||
103 | }, | 40 | }, |
104 | }; | 41 | }; |
105 | 42 | Hygiene { def_crate, local_inner } | |
106 | let expansion = file_id.expansion_info(db); | ||
107 | let expansion = match expansion { | ||
108 | None => { | ||
109 | return Some(self.0.alloc(HygieneFrame { | ||
110 | expansion: None, | ||
111 | local_inner, | ||
112 | krate, | ||
113 | call_site: None, | ||
114 | def_site: None, | ||
115 | })); | ||
116 | } | ||
117 | Some(it) => it, | ||
118 | }; | ||
119 | |||
120 | let def_site = expansion.def.clone(); | ||
121 | let call_site = expansion.arg.file_id; | ||
122 | let idx = self.0.alloc(HygieneFrame { | ||
123 | expansion: Some(expansion), | ||
124 | local_inner, | ||
125 | krate, | ||
126 | call_site: None, | ||
127 | def_site: None, | ||
128 | }); | ||
129 | |||
130 | self.0[idx].call_site = self.add(db, call_site); | ||
131 | self.0[idx].def_site = def_site.and_then(|it| self.add(db, it.file_id)); | ||
132 | |||
133 | Some(idx) | ||
134 | } | 43 | } |
135 | 44 | ||
136 | fn first(&self) -> Option<&HygieneFrame> { | 45 | pub fn new_unhygienic() -> Hygiene { |
137 | self.0.iter().next().map(|it| it.1) | 46 | Hygiene { def_crate: None, local_inner: false } |
138 | } | 47 | } |
139 | 48 | ||
140 | fn root_crate(&self, name_ref: &ast::NameRef) -> Option<CrateId> { | 49 | // FIXME: this should just return name |
141 | let mut token = name_ref.syntax().first_token()?; | 50 | pub fn name_ref_to_name(&self, name_ref: ast::NameRef) -> Either<Name, CrateId> { |
142 | let first = self.first()?; | 51 | if let Some(def_crate) = self.def_crate { |
143 | let mut result = first.krate; | 52 | if name_ref.text() == "$crate" { |
144 | let mut current = Some(first); | 53 | return Either::Right(def_crate); |
145 | 54 | } | |
146 | while let Some((frame, (mapped, origin))) = | ||
147 | current.and_then(|it| Some((it, it.expansion.as_ref()?.map_token_up(&token)?))) | ||
148 | { | ||
149 | result = frame.krate; | ||
150 | |||
151 | let site = match origin { | ||
152 | Origin::Def => frame.def_site, | ||
153 | Origin::Call => frame.call_site, | ||
154 | }; | ||
155 | |||
156 | let site = match site { | ||
157 | None => break, | ||
158 | Some(it) => it, | ||
159 | }; | ||
160 | |||
161 | current = Some(&self.0[site]); | ||
162 | token = mapped.value; | ||
163 | } | 55 | } |
56 | Either::Left(name_ref.as_name()) | ||
57 | } | ||
164 | 58 | ||
165 | result | 59 | pub fn local_inner_macros(&self) -> Option<CrateId> { |
60 | if self.local_inner { | ||
61 | self.def_crate | ||
62 | } else { | ||
63 | None | ||
64 | } | ||
166 | } | 65 | } |
167 | } | 66 | } |
diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs index 5b6734a5f..3fa1b1d77 100644 --- a/crates/hir_expand/src/lib.rs +++ b/crates/hir_expand/src/lib.rs | |||
@@ -340,8 +340,11 @@ impl ExpansionInfo { | |||
340 | Some(self.expanded.with_value(token)) | 340 | Some(self.expanded.with_value(token)) |
341 | } | 341 | } |
342 | 342 | ||
343 | pub fn map_token_up(&self, token: &SyntaxToken) -> Option<(InFile<SyntaxToken>, Origin)> { | 343 | pub fn map_token_up( |
344 | let token_id = self.exp_map.token_by_range(token.text_range())?; | 344 | &self, |
345 | token: InFile<&SyntaxToken>, | ||
346 | ) -> Option<(InFile<SyntaxToken>, Origin)> { | ||
347 | let token_id = self.exp_map.token_by_range(token.value.text_range())?; | ||
345 | 348 | ||
346 | let (token_id, origin) = self.macro_def.0.map_id_up(token_id); | 349 | let (token_id, origin) = self.macro_def.0.map_id_up(token_id); |
347 | let (token_map, tt) = match origin { | 350 | let (token_map, tt) = match origin { |
@@ -356,7 +359,7 @@ impl ExpansionInfo { | |||
356 | ), | 359 | ), |
357 | }; | 360 | }; |
358 | 361 | ||
359 | let range = token_map.range_by_token(token_id)?.by_kind(token.kind())?; | 362 | let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?; |
360 | let token = algo::find_covering_element(&tt.value, range + tt.value.text_range().start()) | 363 | let token = algo::find_covering_element(&tt.value, range + tt.value.text_range().start()) |
361 | .into_token()?; | 364 | .into_token()?; |
362 | Some((tt.with_value(token), origin)) | 365 | Some((tt.with_value(token), origin)) |
@@ -492,7 +495,7 @@ fn ascend_call_token( | |||
492 | expansion: &ExpansionInfo, | 495 | expansion: &ExpansionInfo, |
493 | token: InFile<SyntaxToken>, | 496 | token: InFile<SyntaxToken>, |
494 | ) -> Option<InFile<SyntaxToken>> { | 497 | ) -> Option<InFile<SyntaxToken>> { |
495 | let (mapped, origin) = expansion.map_token_up(&token.value)?; | 498 | let (mapped, origin) = expansion.map_token_up(token.as_ref())?; |
496 | if origin != Origin::Call { | 499 | if origin != Origin::Call { |
497 | return None; | 500 | return None; |
498 | } | 501 | } |
diff --git a/crates/hir_expand/src/proc_macro.rs b/crates/hir_expand/src/proc_macro.rs index 7c77f6ce0..1923daca5 100644 --- a/crates/hir_expand/src/proc_macro.rs +++ b/crates/hir_expand/src/proc_macro.rs | |||
@@ -58,7 +58,7 @@ impl ProcMacroExpander { | |||
58 | } | 58 | } |
59 | 59 | ||
60 | fn eat_punct(cursor: &mut Cursor, c: char) -> bool { | 60 | fn eat_punct(cursor: &mut Cursor, c: char) -> bool { |
61 | if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = cursor.token_tree() { | 61 | if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(punct), _)) = cursor.token_tree() { |
62 | if punct.char == c { | 62 | if punct.char == c { |
63 | *cursor = cursor.bump(); | 63 | *cursor = cursor.bump(); |
64 | return true; | 64 | return true; |
@@ -68,7 +68,7 @@ fn eat_punct(cursor: &mut Cursor, c: char) -> bool { | |||
68 | } | 68 | } |
69 | 69 | ||
70 | fn eat_subtree(cursor: &mut Cursor, kind: tt::DelimiterKind) -> bool { | 70 | fn eat_subtree(cursor: &mut Cursor, kind: tt::DelimiterKind) -> bool { |
71 | if let Some(tt::TokenTree::Subtree(subtree)) = cursor.token_tree() { | 71 | if let Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) = cursor.token_tree() { |
72 | if Some(kind) == subtree.delimiter_kind() { | 72 | if Some(kind) == subtree.delimiter_kind() { |
73 | *cursor = cursor.bump_subtree(); | 73 | *cursor = cursor.bump_subtree(); |
74 | return true; | 74 | return true; |
@@ -78,7 +78,7 @@ fn eat_subtree(cursor: &mut Cursor, kind: tt::DelimiterKind) -> bool { | |||
78 | } | 78 | } |
79 | 79 | ||
80 | fn eat_ident(cursor: &mut Cursor, t: &str) -> bool { | 80 | fn eat_ident(cursor: &mut Cursor, t: &str) -> bool { |
81 | if let Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) = cursor.token_tree() { | 81 | if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Ident(ident), _)) = cursor.token_tree() { |
82 | if t == ident.text.as_str() { | 82 | if t == ident.text.as_str() { |
83 | *cursor = cursor.bump(); | 83 | *cursor = cursor.bump(); |
84 | return true; | 84 | return true; |
@@ -88,7 +88,7 @@ fn eat_ident(cursor: &mut Cursor, t: &str) -> bool { | |||
88 | } | 88 | } |
89 | 89 | ||
90 | fn remove_derive_attrs(tt: &tt::Subtree) -> Option<tt::Subtree> { | 90 | fn remove_derive_attrs(tt: &tt::Subtree) -> Option<tt::Subtree> { |
91 | let buffer = TokenBuffer::new(&tt.token_trees); | 91 | let buffer = TokenBuffer::from_tokens(&tt.token_trees); |
92 | let mut p = buffer.begin(); | 92 | let mut p = buffer.begin(); |
93 | let mut result = tt::Subtree::default(); | 93 | let mut result = tt::Subtree::default(); |
94 | 94 | ||
@@ -106,7 +106,7 @@ fn remove_derive_attrs(tt: &tt::Subtree) -> Option<tt::Subtree> { | |||
106 | } | 106 | } |
107 | } | 107 | } |
108 | 108 | ||
109 | result.token_trees.push(curr.token_tree()?.clone()); | 109 | result.token_trees.push(curr.token_tree()?.cloned()); |
110 | p = curr.bump(); | 110 | p = curr.bump(); |
111 | } | 111 | } |
112 | 112 | ||
diff --git a/crates/hir_ty/Cargo.toml b/crates/hir_ty/Cargo.toml index 2dfccd191..b0a453961 100644 --- a/crates/hir_ty/Cargo.toml +++ b/crates/hir_ty/Cargo.toml | |||
@@ -17,9 +17,9 @@ ena = "0.14.0" | |||
17 | log = "0.4.8" | 17 | log = "0.4.8" |
18 | rustc-hash = "1.1.0" | 18 | rustc-hash = "1.1.0" |
19 | scoped-tls = "1" | 19 | scoped-tls = "1" |
20 | chalk-solve = { version = "0.45", default-features = false } | 20 | chalk-solve = { version = "0.47", default-features = false } |
21 | chalk-ir = "0.45" | 21 | chalk-ir = "0.47" |
22 | chalk-recursive = "0.45" | 22 | chalk-recursive = "0.47" |
23 | 23 | ||
24 | stdx = { path = "../stdx", version = "0.0.0" } | 24 | stdx = { path = "../stdx", version = "0.0.0" } |
25 | hir_def = { path = "../hir_def", version = "0.0.0" } | 25 | hir_def = { path = "../hir_def", version = "0.0.0" } |
@@ -31,7 +31,7 @@ syntax = { path = "../syntax", version = "0.0.0" } | |||
31 | test_utils = { path = "../test_utils", version = "0.0.0" } | 31 | test_utils = { path = "../test_utils", version = "0.0.0" } |
32 | 32 | ||
33 | [dev-dependencies] | 33 | [dev-dependencies] |
34 | expect-test = "1.0" | 34 | expect-test = "1.1" |
35 | tracing = "0.1" | 35 | tracing = "0.1" |
36 | tracing-subscriber = { version = "0.2", default-features = false, features = ["env-filter", "registry"] } | 36 | tracing-subscriber = { version = "0.2", default-features = false, features = ["env-filter", "registry"] } |
37 | tracing-tree = { version = "0.1.4" } | 37 | tracing-tree = { version = "0.1.4" } |
diff --git a/crates/hir_ty/src/display.rs b/crates/hir_ty/src/display.rs index 0e827a29e..a54225c18 100644 --- a/crates/hir_ty/src/display.rs +++ b/crates/hir_ty/src/display.rs | |||
@@ -595,7 +595,7 @@ impl HirDisplay for FnSig { | |||
595 | } | 595 | } |
596 | } | 596 | } |
597 | 597 | ||
598 | fn write_bounds_like_dyn_trait( | 598 | pub fn write_bounds_like_dyn_trait( |
599 | predicates: &[GenericPredicate], | 599 | predicates: &[GenericPredicate], |
600 | f: &mut HirFormatter, | 600 | f: &mut HirFormatter, |
601 | ) -> Result<(), HirDisplayError> { | 601 | ) -> Result<(), HirDisplayError> { |
diff --git a/crates/hir_ty/src/tests/macros.rs b/crates/hir_ty/src/tests/macros.rs index c64f0b5b5..1953da7be 100644 --- a/crates/hir_ty/src/tests/macros.rs +++ b/crates/hir_ty/src/tests/macros.rs | |||
@@ -371,37 +371,6 @@ expand!(); | |||
371 | } | 371 | } |
372 | 372 | ||
373 | #[test] | 373 | #[test] |
374 | fn infer_macro_with_dollar_crate_in_def_site() { | ||
375 | check_types( | ||
376 | r#" | ||
377 | //- /main.rs crate:main deps:foo | ||
378 | use foo::expand; | ||
379 | |||
380 | macro_rules! list { | ||
381 | ($($tt:tt)*) => { $($tt)* } | ||
382 | } | ||
383 | |||
384 | fn test() { | ||
385 | let r = expand!(); | ||
386 | r; | ||
387 | //^ u128 | ||
388 | } | ||
389 | |||
390 | //- /lib.rs crate:foo | ||
391 | #[macro_export] | ||
392 | macro_rules! expand { | ||
393 | () => { list!($crate::m!()) }; | ||
394 | } | ||
395 | |||
396 | #[macro_export] | ||
397 | macro_rules! m { | ||
398 | () => { 0u128 }; | ||
399 | } | ||
400 | "#, | ||
401 | ); | ||
402 | } | ||
403 | |||
404 | #[test] | ||
405 | fn infer_type_value_non_legacy_macro_use_as() { | 374 | fn infer_type_value_non_legacy_macro_use_as() { |
406 | check_infer( | 375 | check_infer( |
407 | r#" | 376 | r#" |
diff --git a/crates/ide/Cargo.toml b/crates/ide/Cargo.toml index f1544dbe0..bb28cca4d 100644 --- a/crates/ide/Cargo.toml +++ b/crates/ide/Cargo.toml | |||
@@ -36,4 +36,4 @@ completion = { path = "../completion", version = "0.0.0" } | |||
36 | hir = { path = "../hir", version = "0.0.0" } | 36 | hir = { path = "../hir", version = "0.0.0" } |
37 | 37 | ||
38 | [dev-dependencies] | 38 | [dev-dependencies] |
39 | expect-test = "1.0" | 39 | expect-test = "1.1" |
diff --git a/crates/ide/src/display/short_label.rs b/crates/ide/src/display/short_label.rs index ea49d9f97..990f740b8 100644 --- a/crates/ide/src/display/short_label.rs +++ b/crates/ide/src/display/short_label.rs | |||
@@ -87,6 +87,17 @@ impl ShortLabel for ast::Variant { | |||
87 | } | 87 | } |
88 | } | 88 | } |
89 | 89 | ||
90 | impl ShortLabel for ast::ConstParam { | ||
91 | fn short_label(&self) -> Option<String> { | ||
92 | let mut buf = "const ".to_owned(); | ||
93 | buf.push_str(self.name()?.text().as_str()); | ||
94 | if let Some(type_ref) = self.ty() { | ||
95 | format_to!(buf, ": {}", type_ref.syntax()); | ||
96 | } | ||
97 | Some(buf) | ||
98 | } | ||
99 | } | ||
100 | |||
90 | fn short_label_from_ty<T>(node: &T, ty: Option<ast::Type>, prefix: &str) -> Option<String> | 101 | fn short_label_from_ty<T>(node: &T, ty: Option<ast::Type>, prefix: &str) -> Option<String> |
91 | where | 102 | where |
92 | T: NameOwner + VisibilityOwner, | 103 | T: NameOwner + VisibilityOwner, |
diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 2737c900f..f2ad95cb6 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs | |||
@@ -70,7 +70,7 @@ impl HoverConfig { | |||
70 | #[derive(Debug, Clone)] | 70 | #[derive(Debug, Clone)] |
71 | pub enum HoverAction { | 71 | pub enum HoverAction { |
72 | Runnable(Runnable), | 72 | Runnable(Runnable), |
73 | Implementaion(FilePosition), | 73 | Implementation(FilePosition), |
74 | GoToType(Vec<HoverGotoTypeData>), | 74 | GoToType(Vec<HoverGotoTypeData>), |
75 | } | 75 | } |
76 | 76 | ||
@@ -116,12 +116,13 @@ pub(crate) fn hover( | |||
116 | }; | 116 | }; |
117 | if let Some(definition) = definition { | 117 | if let Some(definition) = definition { |
118 | if let Some(markup) = hover_for_definition(db, definition) { | 118 | if let Some(markup) = hover_for_definition(db, definition) { |
119 | let markup = markup.as_str(); | ||
119 | let markup = if !markdown { | 120 | let markup = if !markdown { |
120 | remove_markdown(&markup.as_str()) | 121 | remove_markdown(markup) |
121 | } else if links_in_hover { | 122 | } else if links_in_hover { |
122 | rewrite_links(db, &markup.as_str(), &definition) | 123 | rewrite_links(db, markup, &definition) |
123 | } else { | 124 | } else { |
124 | remove_links(&markup.as_str()) | 125 | remove_links(markup) |
125 | }; | 126 | }; |
126 | res.markup = Markup::from(markup); | 127 | res.markup = Markup::from(markup); |
127 | if let Some(action) = show_implementations_action(db, definition) { | 128 | if let Some(action) = show_implementations_action(db, definition) { |
@@ -175,22 +176,24 @@ pub(crate) fn hover( | |||
175 | 176 | ||
176 | fn show_implementations_action(db: &RootDatabase, def: Definition) -> Option<HoverAction> { | 177 | fn show_implementations_action(db: &RootDatabase, def: Definition) -> Option<HoverAction> { |
177 | fn to_action(nav_target: NavigationTarget) -> HoverAction { | 178 | fn to_action(nav_target: NavigationTarget) -> HoverAction { |
178 | HoverAction::Implementaion(FilePosition { | 179 | HoverAction::Implementation(FilePosition { |
179 | file_id: nav_target.file_id, | 180 | file_id: nav_target.file_id, |
180 | offset: nav_target.focus_or_full_range().start(), | 181 | offset: nav_target.focus_or_full_range().start(), |
181 | }) | 182 | }) |
182 | } | 183 | } |
183 | 184 | ||
184 | match def { | 185 | let adt = match def { |
185 | Definition::ModuleDef(it) => match it { | 186 | Definition::ModuleDef(ModuleDef::Trait(it)) => return it.try_to_nav(db).map(to_action), |
186 | ModuleDef::Adt(Adt::Struct(it)) => Some(to_action(it.try_to_nav(db)?)), | 187 | Definition::ModuleDef(ModuleDef::Adt(it)) => Some(it), |
187 | ModuleDef::Adt(Adt::Union(it)) => Some(to_action(it.try_to_nav(db)?)), | 188 | Definition::SelfType(it) => it.target_ty(db).as_adt(), |
188 | ModuleDef::Adt(Adt::Enum(it)) => Some(to_action(it.try_to_nav(db)?)), | ||
189 | ModuleDef::Trait(it) => Some(to_action(it.try_to_nav(db)?)), | ||
190 | _ => None, | ||
191 | }, | ||
192 | _ => None, | 189 | _ => None, |
190 | }?; | ||
191 | match adt { | ||
192 | Adt::Struct(it) => it.try_to_nav(db), | ||
193 | Adt::Union(it) => it.try_to_nav(db), | ||
194 | Adt::Enum(it) => it.try_to_nav(db), | ||
193 | } | 195 | } |
196 | .map(to_action) | ||
194 | } | 197 | } |
195 | 198 | ||
196 | fn runnable_action( | 199 | fn runnable_action( |
@@ -225,45 +228,46 @@ fn runnable_action( | |||
225 | } | 228 | } |
226 | 229 | ||
227 | fn goto_type_action(db: &RootDatabase, def: Definition) -> Option<HoverAction> { | 230 | fn goto_type_action(db: &RootDatabase, def: Definition) -> Option<HoverAction> { |
228 | match def { | 231 | let mut targets: Vec<ModuleDef> = Vec::new(); |
229 | Definition::Local(it) => { | 232 | let mut push_new_def = |item: ModuleDef| { |
230 | let mut targets: Vec<ModuleDef> = Vec::new(); | 233 | if !targets.contains(&item) { |
231 | let mut push_new_def = |item: ModuleDef| { | 234 | targets.push(item); |
232 | if !targets.contains(&item) { | ||
233 | targets.push(item); | ||
234 | } | ||
235 | }; | ||
236 | |||
237 | it.ty(db).walk(db, |t| { | ||
238 | if let Some(adt) = t.as_adt() { | ||
239 | push_new_def(adt.into()); | ||
240 | } else if let Some(trait_) = t.as_dyn_trait() { | ||
241 | push_new_def(trait_.into()); | ||
242 | } else if let Some(traits) = t.as_impl_traits(db) { | ||
243 | traits.into_iter().for_each(|it| push_new_def(it.into())); | ||
244 | } else if let Some(trait_) = t.as_associated_type_parent_trait(db) { | ||
245 | push_new_def(trait_.into()); | ||
246 | } | ||
247 | }); | ||
248 | |||
249 | let targets = targets | ||
250 | .into_iter() | ||
251 | .filter_map(|it| { | ||
252 | Some(HoverGotoTypeData { | ||
253 | mod_path: render_path( | ||
254 | db, | ||
255 | it.module(db)?, | ||
256 | it.name(db).map(|name| name.to_string()), | ||
257 | ), | ||
258 | nav: it.try_to_nav(db)?, | ||
259 | }) | ||
260 | }) | ||
261 | .collect(); | ||
262 | |||
263 | Some(HoverAction::GoToType(targets)) | ||
264 | } | 235 | } |
265 | _ => None, | 236 | }; |
237 | |||
238 | if let Definition::TypeParam(it) = def { | ||
239 | it.trait_bounds(db).into_iter().for_each(|it| push_new_def(it.into())); | ||
240 | } else { | ||
241 | let ty = match def { | ||
242 | Definition::Local(it) => it.ty(db), | ||
243 | Definition::ConstParam(it) => it.ty(db), | ||
244 | _ => return None, | ||
245 | }; | ||
246 | |||
247 | ty.walk(db, |t| { | ||
248 | if let Some(adt) = t.as_adt() { | ||
249 | push_new_def(adt.into()); | ||
250 | } else if let Some(trait_) = t.as_dyn_trait() { | ||
251 | push_new_def(trait_.into()); | ||
252 | } else if let Some(traits) = t.as_impl_traits(db) { | ||
253 | traits.into_iter().for_each(|it| push_new_def(it.into())); | ||
254 | } else if let Some(trait_) = t.as_associated_type_parent_trait(db) { | ||
255 | push_new_def(trait_.into()); | ||
256 | } | ||
257 | }); | ||
266 | } | 258 | } |
259 | |||
260 | let targets = targets | ||
261 | .into_iter() | ||
262 | .filter_map(|it| { | ||
263 | Some(HoverGotoTypeData { | ||
264 | mod_path: render_path(db, it.module(db)?, it.name(db).map(|name| name.to_string())), | ||
265 | nav: it.try_to_nav(db)?, | ||
266 | }) | ||
267 | }) | ||
268 | .collect(); | ||
269 | |||
270 | Some(HoverAction::GoToType(targets)) | ||
267 | } | 271 | } |
268 | 272 | ||
269 | fn hover_markup( | 273 | fn hover_markup( |
@@ -370,10 +374,8 @@ fn hover_for_definition(db: &RootDatabase, def: Definition) -> Option<Markup> { | |||
370 | } | 374 | } |
371 | Definition::Label(it) => Some(Markup::fenced_block(&it.name(db))), | 375 | Definition::Label(it) => Some(Markup::fenced_block(&it.name(db))), |
372 | Definition::LifetimeParam(it) => Some(Markup::fenced_block(&it.name(db))), | 376 | Definition::LifetimeParam(it) => Some(Markup::fenced_block(&it.name(db))), |
373 | Definition::TypeParam(_) | Definition::ConstParam(_) => { | 377 | Definition::TypeParam(type_param) => Some(Markup::fenced_block(&type_param.display(db))), |
374 | // FIXME: Hover for generic param | 378 | Definition::ConstParam(it) => from_def_source(db, it, None), |
375 | None | ||
376 | } | ||
377 | }; | 379 | }; |
378 | 380 | ||
379 | fn from_def_source<A, D>(db: &RootDatabase, def: D, mod_path: Option<String>) -> Option<Markup> | 381 | fn from_def_source<A, D>(db: &RootDatabase, def: D, mod_path: Option<String>) -> Option<Markup> |
@@ -1393,7 +1395,7 @@ fn bar() { fo<|>o(); } | |||
1393 | r"unsafe trait foo<|>() {}", | 1395 | r"unsafe trait foo<|>() {}", |
1394 | expect![[r#" | 1396 | expect![[r#" |
1395 | [ | 1397 | [ |
1396 | Implementaion( | 1398 | Implementation( |
1397 | FilePosition { | 1399 | FilePosition { |
1398 | file_id: FileId( | 1400 | file_id: FileId( |
1399 | 0, | 1401 | 0, |
@@ -2105,7 +2107,7 @@ fn foo() { let bar = Bar; bar.fo<|>o(); } | |||
2105 | r#"trait foo<|>() {}"#, | 2107 | r#"trait foo<|>() {}"#, |
2106 | expect![[r#" | 2108 | expect![[r#" |
2107 | [ | 2109 | [ |
2108 | Implementaion( | 2110 | Implementation( |
2109 | FilePosition { | 2111 | FilePosition { |
2110 | file_id: FileId( | 2112 | file_id: FileId( |
2111 | 0, | 2113 | 0, |
@@ -2124,7 +2126,7 @@ fn foo() { let bar = Bar; bar.fo<|>o(); } | |||
2124 | r"struct foo<|>() {}", | 2126 | r"struct foo<|>() {}", |
2125 | expect![[r#" | 2127 | expect![[r#" |
2126 | [ | 2128 | [ |
2127 | Implementaion( | 2129 | Implementation( |
2128 | FilePosition { | 2130 | FilePosition { |
2129 | file_id: FileId( | 2131 | file_id: FileId( |
2130 | 0, | 2132 | 0, |
@@ -2143,7 +2145,7 @@ fn foo() { let bar = Bar; bar.fo<|>o(); } | |||
2143 | r#"union foo<|>() {}"#, | 2145 | r#"union foo<|>() {}"#, |
2144 | expect![[r#" | 2146 | expect![[r#" |
2145 | [ | 2147 | [ |
2146 | Implementaion( | 2148 | Implementation( |
2147 | FilePosition { | 2149 | FilePosition { |
2148 | file_id: FileId( | 2150 | file_id: FileId( |
2149 | 0, | 2151 | 0, |
@@ -2162,7 +2164,7 @@ fn foo() { let bar = Bar; bar.fo<|>o(); } | |||
2162 | r"enum foo<|>() { A, B }", | 2164 | r"enum foo<|>() { A, B }", |
2163 | expect![[r#" | 2165 | expect![[r#" |
2164 | [ | 2166 | [ |
2165 | Implementaion( | 2167 | Implementation( |
2166 | FilePosition { | 2168 | FilePosition { |
2167 | file_id: FileId( | 2169 | file_id: FileId( |
2168 | 0, | 2170 | 0, |
@@ -2176,6 +2178,25 @@ fn foo() { let bar = Bar; bar.fo<|>o(); } | |||
2176 | } | 2178 | } |
2177 | 2179 | ||
2178 | #[test] | 2180 | #[test] |
2181 | fn test_hover_self_has_impl_action() { | ||
2182 | check_actions( | ||
2183 | r#"struct foo where Self<|>:;"#, | ||
2184 | expect![[r#" | ||
2185 | [ | ||
2186 | Implementation( | ||
2187 | FilePosition { | ||
2188 | file_id: FileId( | ||
2189 | 0, | ||
2190 | ), | ||
2191 | offset: 7, | ||
2192 | }, | ||
2193 | ), | ||
2194 | ] | ||
2195 | "#]], | ||
2196 | ); | ||
2197 | } | ||
2198 | |||
2199 | #[test] | ||
2179 | fn test_hover_test_has_action() { | 2200 | fn test_hover_test_has_action() { |
2180 | check_actions( | 2201 | check_actions( |
2181 | r#" | 2202 | r#" |
@@ -3064,6 +3085,71 @@ fn main() { let s<|>t = test().get(); } | |||
3064 | } | 3085 | } |
3065 | 3086 | ||
3066 | #[test] | 3087 | #[test] |
3088 | fn test_hover_const_param_has_goto_type_action() { | ||
3089 | check_actions( | ||
3090 | r#" | ||
3091 | struct Bar; | ||
3092 | struct Foo<const BAR: Bar>; | ||
3093 | |||
3094 | impl<const BAR: Bar> Foo<BAR<|>> {} | ||
3095 | "#, | ||
3096 | expect![[r#" | ||
3097 | [ | ||
3098 | GoToType( | ||
3099 | [ | ||
3100 | HoverGotoTypeData { | ||
3101 | mod_path: "test::Bar", | ||
3102 | nav: NavigationTarget { | ||
3103 | file_id: FileId( | ||
3104 | 0, | ||
3105 | ), | ||
3106 | full_range: 0..11, | ||
3107 | focus_range: 7..10, | ||
3108 | name: "Bar", | ||
3109 | kind: Struct, | ||
3110 | description: "struct Bar", | ||
3111 | }, | ||
3112 | }, | ||
3113 | ], | ||
3114 | ), | ||
3115 | ] | ||
3116 | "#]], | ||
3117 | ); | ||
3118 | } | ||
3119 | |||
3120 | #[test] | ||
3121 | fn test_hover_type_param_has_goto_type_action() { | ||
3122 | check_actions( | ||
3123 | r#" | ||
3124 | trait Foo {} | ||
3125 | |||
3126 | fn foo<T: Foo>(t: T<|>){} | ||
3127 | "#, | ||
3128 | expect![[r#" | ||
3129 | [ | ||
3130 | GoToType( | ||
3131 | [ | ||
3132 | HoverGotoTypeData { | ||
3133 | mod_path: "test::Foo", | ||
3134 | nav: NavigationTarget { | ||
3135 | file_id: FileId( | ||
3136 | 0, | ||
3137 | ), | ||
3138 | full_range: 0..12, | ||
3139 | focus_range: 6..9, | ||
3140 | name: "Foo", | ||
3141 | kind: Trait, | ||
3142 | description: "trait Foo", | ||
3143 | }, | ||
3144 | }, | ||
3145 | ], | ||
3146 | ), | ||
3147 | ] | ||
3148 | "#]], | ||
3149 | ); | ||
3150 | } | ||
3151 | |||
3152 | #[test] | ||
3067 | fn hover_displays_normalized_crate_names() { | 3153 | fn hover_displays_normalized_crate_names() { |
3068 | check( | 3154 | check( |
3069 | r#" | 3155 | r#" |
@@ -3257,4 +3343,68 @@ fn foo() { | |||
3257 | "#]], | 3343 | "#]], |
3258 | ); | 3344 | ); |
3259 | } | 3345 | } |
3346 | |||
3347 | #[test] | ||
3348 | fn hover_type_param() { | ||
3349 | check( | ||
3350 | r#" | ||
3351 | struct Foo<T>(T); | ||
3352 | trait Copy {} | ||
3353 | trait Clone {} | ||
3354 | trait Sized {} | ||
3355 | impl<T: Copy + Clone> Foo<T<|>> where T: Sized {} | ||
3356 | "#, | ||
3357 | expect![[r#" | ||
3358 | *T* | ||
3359 | |||
3360 | ```rust | ||
3361 | T: Copy + Clone + Sized | ||
3362 | ``` | ||
3363 | "#]], | ||
3364 | ); | ||
3365 | check( | ||
3366 | r#" | ||
3367 | struct Foo<T>(T); | ||
3368 | impl<T> Foo<T<|>> {} | ||
3369 | "#, | ||
3370 | expect![[r#" | ||
3371 | *T* | ||
3372 | |||
3373 | ```rust | ||
3374 | T | ||
3375 | ``` | ||
3376 | "#]], | ||
3377 | ); | ||
3378 | // lifetimes aren't being substituted yet | ||
3379 | check( | ||
3380 | r#" | ||
3381 | struct Foo<T>(T); | ||
3382 | impl<T: 'static> Foo<T<|>> {} | ||
3383 | "#, | ||
3384 | expect![[r#" | ||
3385 | *T* | ||
3386 | |||
3387 | ```rust | ||
3388 | T: {error} | ||
3389 | ``` | ||
3390 | "#]], | ||
3391 | ); | ||
3392 | } | ||
3393 | |||
3394 | #[test] | ||
3395 | fn hover_const_param() { | ||
3396 | check( | ||
3397 | r#" | ||
3398 | struct Foo<const LEN: usize>; | ||
3399 | impl<const LEN: usize> Foo<LEN<|>> {} | ||
3400 | "#, | ||
3401 | expect![[r#" | ||
3402 | *LEN* | ||
3403 | |||
3404 | ```rust | ||
3405 | const LEN: usize | ||
3406 | ``` | ||
3407 | "#]], | ||
3408 | ); | ||
3409 | } | ||
3260 | } | 3410 | } |
diff --git a/crates/ide_db/Cargo.toml b/crates/ide_db/Cargo.toml index ebe53c8ee..d3d3dc688 100644 --- a/crates/ide_db/Cargo.toml +++ b/crates/ide_db/Cargo.toml | |||
@@ -32,4 +32,4 @@ test_utils = { path = "../test_utils", version = "0.0.0" } | |||
32 | hir = { path = "../hir", version = "0.0.0" } | 32 | hir = { path = "../hir", version = "0.0.0" } |
33 | 33 | ||
34 | [dev-dependencies] | 34 | [dev-dependencies] |
35 | expect-test = "1.0" | 35 | expect-test = "1.1" |
diff --git a/crates/mbe/src/mbe_expander/matcher.rs b/crates/mbe/src/mbe_expander/matcher.rs index 385b46601..fdc8844ce 100644 --- a/crates/mbe/src/mbe_expander/matcher.rs +++ b/crates/mbe/src/mbe_expander/matcher.rs | |||
@@ -150,7 +150,7 @@ fn match_subtree( | |||
150 | res.add_err(err!("leftover tokens")); | 150 | res.add_err(err!("leftover tokens")); |
151 | } | 151 | } |
152 | } | 152 | } |
153 | Op::Var { name, kind, .. } => { | 153 | Op::Var { name, kind } => { |
154 | let kind = match kind { | 154 | let kind = match kind { |
155 | Some(k) => k, | 155 | Some(k) => k, |
156 | None => { | 156 | None => { |
@@ -309,7 +309,7 @@ impl<'a> TtIter<'a> { | |||
309 | } | 309 | } |
310 | } | 310 | } |
311 | 311 | ||
312 | let buffer = TokenBuffer::new(&self.inner.as_slice()); | 312 | let buffer = TokenBuffer::from_tokens(&self.inner.as_slice()); |
313 | let mut src = SubtreeTokenSource::new(&buffer); | 313 | let mut src = SubtreeTokenSource::new(&buffer); |
314 | let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false }; | 314 | let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false }; |
315 | 315 | ||
@@ -336,11 +336,11 @@ impl<'a> TtIter<'a> { | |||
336 | err = Some(err!("no tokens consumed")); | 336 | err = Some(err!("no tokens consumed")); |
337 | } | 337 | } |
338 | let res = match res.len() { | 338 | let res = match res.len() { |
339 | 1 => Some(res[0].clone()), | 339 | 1 => Some(res[0].cloned()), |
340 | 0 => None, | 340 | 0 => None, |
341 | _ => Some(tt::TokenTree::Subtree(tt::Subtree { | 341 | _ => Some(tt::TokenTree::Subtree(tt::Subtree { |
342 | delimiter: None, | 342 | delimiter: None, |
343 | token_trees: res.into_iter().cloned().collect(), | 343 | token_trees: res.into_iter().map(|it| it.cloned()).collect(), |
344 | })), | 344 | })), |
345 | }; | 345 | }; |
346 | ExpandResult { value: res, err } | 346 | ExpandResult { value: res, err } |
diff --git a/crates/mbe/src/mbe_expander/transcriber.rs b/crates/mbe/src/mbe_expander/transcriber.rs index 57f3f104d..720531237 100644 --- a/crates/mbe/src/mbe_expander/transcriber.rs +++ b/crates/mbe/src/mbe_expander/transcriber.rs | |||
@@ -100,8 +100,8 @@ fn expand_subtree( | |||
100 | err = err.or(e); | 100 | err = err.or(e); |
101 | arena.push(tt.into()); | 101 | arena.push(tt.into()); |
102 | } | 102 | } |
103 | Op::Var { name, id, .. } => { | 103 | Op::Var { name, .. } => { |
104 | let ExpandResult { value: fragment, err: e } = expand_var(ctx, &name, *id); | 104 | let ExpandResult { value: fragment, err: e } = expand_var(ctx, &name); |
105 | err = err.or(e); | 105 | err = err.or(e); |
106 | push_fragment(arena, fragment); | 106 | push_fragment(arena, fragment); |
107 | } | 107 | } |
@@ -118,10 +118,12 @@ fn expand_subtree( | |||
118 | ExpandResult { value: tt::Subtree { delimiter: template.delimiter, token_trees: tts }, err } | 118 | ExpandResult { value: tt::Subtree { delimiter: template.delimiter, token_trees: tts }, err } |
119 | } | 119 | } |
120 | 120 | ||
121 | fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr, id: tt::TokenId) -> ExpandResult<Fragment> { | 121 | fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr) -> ExpandResult<Fragment> { |
122 | if v == "crate" { | 122 | if v == "crate" { |
123 | // We simply produce identifier `$crate` here. And it will be resolved when lowering ast to Path. | 123 | // We simply produce identifier `$crate` here. And it will be resolved when lowering ast to Path. |
124 | let tt = tt::Leaf::from(tt::Ident { text: "$crate".into(), id }).into(); | 124 | let tt = |
125 | tt::Leaf::from(tt::Ident { text: "$crate".into(), id: tt::TokenId::unspecified() }) | ||
126 | .into(); | ||
125 | ExpandResult::ok(Fragment::Tokens(tt)) | 127 | ExpandResult::ok(Fragment::Tokens(tt)) |
126 | } else if !ctx.bindings.contains(v) { | 128 | } else if !ctx.bindings.contains(v) { |
127 | // Note that it is possible to have a `$var` inside a macro which is not bound. | 129 | // Note that it is possible to have a `$var` inside a macro which is not bound. |
@@ -140,8 +142,14 @@ fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr, id: tt::TokenId) -> ExpandResult | |||
140 | let tt = tt::Subtree { | 142 | let tt = tt::Subtree { |
141 | delimiter: None, | 143 | delimiter: None, |
142 | token_trees: vec![ | 144 | token_trees: vec![ |
143 | tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, id }).into(), | 145 | tt::Leaf::from(tt::Punct { |
144 | tt::Leaf::from(tt::Ident { text: v.clone(), id }).into(), | 146 | char: '$', |
147 | spacing: tt::Spacing::Alone, | ||
148 | id: tt::TokenId::unspecified(), | ||
149 | }) | ||
150 | .into(), | ||
151 | tt::Leaf::from(tt::Ident { text: v.clone(), id: tt::TokenId::unspecified() }) | ||
152 | .into(), | ||
145 | ], | 153 | ], |
146 | } | 154 | } |
147 | .into(); | 155 | .into(); |
diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs index 77cc739b6..2f3ebc831 100644 --- a/crates/mbe/src/parser.rs +++ b/crates/mbe/src/parser.rs | |||
@@ -8,7 +8,7 @@ use crate::{tt_iter::TtIter, ExpandError, MetaTemplate}; | |||
8 | 8 | ||
9 | #[derive(Clone, Debug, PartialEq, Eq)] | 9 | #[derive(Clone, Debug, PartialEq, Eq)] |
10 | pub(crate) enum Op { | 10 | pub(crate) enum Op { |
11 | Var { name: SmolStr, kind: Option<SmolStr>, id: tt::TokenId }, | 11 | Var { name: SmolStr, kind: Option<SmolStr> }, |
12 | Repeat { subtree: MetaTemplate, kind: RepeatKind, separator: Option<Separator> }, | 12 | Repeat { subtree: MetaTemplate, kind: RepeatKind, separator: Option<Separator> }, |
13 | Leaf(tt::Leaf), | 13 | Leaf(tt::Leaf), |
14 | Subtree(MetaTemplate), | 14 | Subtree(MetaTemplate), |
@@ -106,21 +106,18 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul | |||
106 | } | 106 | } |
107 | let name = UNDERSCORE.clone(); | 107 | let name = UNDERSCORE.clone(); |
108 | let kind = eat_fragment_kind(src, mode)?; | 108 | let kind = eat_fragment_kind(src, mode)?; |
109 | let id = punct.id; | 109 | Op::Var { name, kind } |
110 | Op::Var { name, kind, id } | ||
111 | } | 110 | } |
112 | tt::Leaf::Ident(ident) => { | 111 | tt::Leaf::Ident(ident) => { |
113 | let name = ident.text.clone(); | 112 | let name = ident.text.clone(); |
114 | let kind = eat_fragment_kind(src, mode)?; | 113 | let kind = eat_fragment_kind(src, mode)?; |
115 | let id = ident.id; | 114 | Op::Var { name, kind } |
116 | Op::Var { name, kind, id } | ||
117 | } | 115 | } |
118 | tt::Leaf::Literal(lit) => { | 116 | tt::Leaf::Literal(lit) => { |
119 | if is_boolean_literal(&lit) { | 117 | if is_boolean_literal(&lit) { |
120 | let name = lit.text.clone(); | 118 | let name = lit.text.clone(); |
121 | let kind = eat_fragment_kind(src, mode)?; | 119 | let kind = eat_fragment_kind(src, mode)?; |
122 | let id = lit.id; | 120 | Op::Var { name, kind } |
123 | Op::Var { name, kind, id } | ||
124 | } else { | 121 | } else { |
125 | bail!("bad var 2"); | 122 | bail!("bad var 2"); |
126 | } | 123 | } |
diff --git a/crates/mbe/src/subtree_source.rs b/crates/mbe/src/subtree_source.rs index d10d4b70e..d7433bd35 100644 --- a/crates/mbe/src/subtree_source.rs +++ b/crates/mbe/src/subtree_source.rs | |||
@@ -1,129 +1,104 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use parser::{Token, TokenSource}; | 3 | use parser::{Token, TokenSource}; |
4 | use std::cell::{Cell, Ref, RefCell}; | ||
5 | use syntax::{lex_single_syntax_kind, SmolStr, SyntaxKind, SyntaxKind::*, T}; | 4 | use syntax::{lex_single_syntax_kind, SmolStr, SyntaxKind, SyntaxKind::*, T}; |
6 | use tt::buffer::{Cursor, TokenBuffer}; | 5 | use tt::buffer::TokenBuffer; |
7 | 6 | ||
8 | #[derive(Debug, Clone, Eq, PartialEq)] | 7 | #[derive(Debug, Clone, Eq, PartialEq)] |
9 | struct TtToken { | 8 | struct TtToken { |
10 | kind: SyntaxKind, | 9 | tt: Token, |
11 | is_joint_to_next: bool, | ||
12 | text: SmolStr, | 10 | text: SmolStr, |
13 | } | 11 | } |
14 | 12 | ||
15 | pub(crate) struct SubtreeTokenSource<'a> { | 13 | pub(crate) struct SubtreeTokenSource { |
16 | cached_cursor: Cell<Cursor<'a>>, | 14 | cached: Vec<TtToken>, |
17 | cached: RefCell<Vec<Option<TtToken>>>, | ||
18 | curr: (Token, usize), | 15 | curr: (Token, usize), |
19 | } | 16 | } |
20 | 17 | ||
21 | impl<'a> SubtreeTokenSource<'a> { | 18 | impl<'a> SubtreeTokenSource { |
22 | // Helper function used in test | 19 | // Helper function used in test |
23 | #[cfg(test)] | 20 | #[cfg(test)] |
24 | pub(crate) fn text(&self) -> SmolStr { | 21 | pub(crate) fn text(&self) -> SmolStr { |
25 | match *self.get(self.curr.1) { | 22 | match self.cached.get(self.curr.1) { |
26 | Some(ref tt) => tt.text.clone(), | 23 | Some(ref tt) => tt.text.clone(), |
27 | _ => SmolStr::new(""), | 24 | _ => SmolStr::new(""), |
28 | } | 25 | } |
29 | } | 26 | } |
30 | } | 27 | } |
31 | 28 | ||
32 | impl<'a> SubtreeTokenSource<'a> { | 29 | impl<'a> SubtreeTokenSource { |
33 | pub(crate) fn new(buffer: &'a TokenBuffer) -> SubtreeTokenSource<'a> { | 30 | pub(crate) fn new(buffer: &TokenBuffer) -> SubtreeTokenSource { |
34 | let cursor = buffer.begin(); | 31 | let mut current = buffer.begin(); |
32 | let mut cached = Vec::with_capacity(100); | ||
35 | 33 | ||
36 | let mut res = SubtreeTokenSource { | 34 | while !current.eof() { |
37 | curr: (Token { kind: EOF, is_jointed_to_next: false }, 0), | 35 | let cursor = current; |
38 | cached_cursor: Cell::new(cursor), | 36 | let tt = cursor.token_tree(); |
39 | cached: RefCell::new(Vec::with_capacity(10)), | ||
40 | }; | ||
41 | res.curr = (res.mk_token(0), 0); | ||
42 | res | ||
43 | } | ||
44 | 37 | ||
45 | fn mk_token(&self, pos: usize) -> Token { | 38 | // Check if it is lifetime |
46 | match *self.get(pos) { | 39 | if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(punct), _)) = tt { |
47 | Some(ref tt) => Token { kind: tt.kind, is_jointed_to_next: tt.is_joint_to_next }, | ||
48 | None => Token { kind: EOF, is_jointed_to_next: false }, | ||
49 | } | ||
50 | } | ||
51 | |||
52 | fn get(&self, pos: usize) -> Ref<Option<TtToken>> { | ||
53 | fn is_lifetime(c: Cursor) -> Option<(Cursor, SmolStr)> { | ||
54 | let tkn = c.token_tree(); | ||
55 | |||
56 | if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = tkn { | ||
57 | if punct.char == '\'' { | 40 | if punct.char == '\'' { |
58 | let next = c.bump(); | 41 | let next = cursor.bump(); |
59 | if let Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) = next.token_tree() { | 42 | if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Ident(ident), _)) = |
60 | let res_cursor = next.bump(); | 43 | next.token_tree() |
61 | let text = SmolStr::new("'".to_string() + &ident.to_string()); | 44 | { |
62 | 45 | let text = SmolStr::new("'".to_string() + &ident.text); | |
63 | return Some((res_cursor, text)); | 46 | cached.push(TtToken { |
47 | tt: Token { kind: LIFETIME_IDENT, is_jointed_to_next: false }, | ||
48 | text, | ||
49 | }); | ||
50 | current = next.bump(); | ||
51 | continue; | ||
64 | } else { | 52 | } else { |
65 | panic!("Next token must be ident : {:#?}", next.token_tree()); | 53 | panic!("Next token must be ident : {:#?}", next.token_tree()); |
66 | } | 54 | } |
67 | } | 55 | } |
68 | } | 56 | } |
69 | 57 | ||
70 | None | 58 | current = match tt { |
71 | } | 59 | Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => { |
72 | 60 | cached.push(convert_leaf(&leaf)); | |
73 | if pos < self.cached.borrow().len() { | 61 | cursor.bump() |
74 | return Ref::map(self.cached.borrow(), |c| &c[pos]); | ||
75 | } | ||
76 | |||
77 | { | ||
78 | let mut cached = self.cached.borrow_mut(); | ||
79 | while pos >= cached.len() { | ||
80 | let cursor = self.cached_cursor.get(); | ||
81 | if cursor.eof() { | ||
82 | cached.push(None); | ||
83 | continue; | ||
84 | } | 62 | } |
85 | 63 | Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => { | |
86 | if let Some((curr, text)) = is_lifetime(cursor) { | 64 | cached.push(convert_delim(subtree.delimiter_kind(), false)); |
87 | cached.push(Some(TtToken { | 65 | cursor.subtree().unwrap() |
88 | kind: LIFETIME_IDENT, | ||
89 | is_joint_to_next: false, | ||
90 | text, | ||
91 | })); | ||
92 | self.cached_cursor.set(curr); | ||
93 | continue; | ||
94 | } | 66 | } |
95 | 67 | None => { | |
96 | match cursor.token_tree() { | 68 | if let Some(subtree) = cursor.end() { |
97 | Some(tt::TokenTree::Leaf(leaf)) => { | 69 | cached.push(convert_delim(subtree.delimiter_kind(), true)); |
98 | cached.push(Some(convert_leaf(&leaf))); | 70 | cursor.bump() |
99 | self.cached_cursor.set(cursor.bump()); | 71 | } else { |
100 | } | 72 | continue; |
101 | Some(tt::TokenTree::Subtree(subtree)) => { | ||
102 | self.cached_cursor.set(cursor.subtree().unwrap()); | ||
103 | cached.push(Some(convert_delim(subtree.delimiter_kind(), false))); | ||
104 | } | ||
105 | None => { | ||
106 | if let Some(subtree) = cursor.end() { | ||
107 | cached.push(Some(convert_delim(subtree.delimiter_kind(), true))); | ||
108 | self.cached_cursor.set(cursor.bump()); | ||
109 | } | ||
110 | } | 73 | } |
111 | } | 74 | } |
112 | } | 75 | }; |
113 | } | 76 | } |
114 | 77 | ||
115 | Ref::map(self.cached.borrow(), |c| &c[pos]) | 78 | let mut res = SubtreeTokenSource { |
79 | curr: (Token { kind: EOF, is_jointed_to_next: false }, 0), | ||
80 | cached, | ||
81 | }; | ||
82 | res.curr = (res.token(0), 0); | ||
83 | res | ||
84 | } | ||
85 | |||
86 | fn token(&self, pos: usize) -> Token { | ||
87 | match self.cached.get(pos) { | ||
88 | Some(it) => it.tt, | ||
89 | None => Token { kind: EOF, is_jointed_to_next: false }, | ||
90 | } | ||
116 | } | 91 | } |
117 | } | 92 | } |
118 | 93 | ||
119 | impl<'a> TokenSource for SubtreeTokenSource<'a> { | 94 | impl<'a> TokenSource for SubtreeTokenSource { |
120 | fn current(&self) -> Token { | 95 | fn current(&self) -> Token { |
121 | self.curr.0 | 96 | self.curr.0 |
122 | } | 97 | } |
123 | 98 | ||
124 | /// Lookahead n token | 99 | /// Lookahead n token |
125 | fn lookahead_nth(&self, n: usize) -> Token { | 100 | fn lookahead_nth(&self, n: usize) -> Token { |
126 | self.mk_token(self.curr.1 + n) | 101 | self.token(self.curr.1 + n) |
127 | } | 102 | } |
128 | 103 | ||
129 | /// bump cursor to next token | 104 | /// bump cursor to next token |
@@ -131,13 +106,12 @@ impl<'a> TokenSource for SubtreeTokenSource<'a> { | |||
131 | if self.current().kind == EOF { | 106 | if self.current().kind == EOF { |
132 | return; | 107 | return; |
133 | } | 108 | } |
134 | 109 | self.curr = (self.token(self.curr.1 + 1), self.curr.1 + 1); | |
135 | self.curr = (self.mk_token(self.curr.1 + 1), self.curr.1 + 1); | ||
136 | } | 110 | } |
137 | 111 | ||
138 | /// Is the current token a specified keyword? | 112 | /// Is the current token a specified keyword? |
139 | fn is_keyword(&self, kw: &str) -> bool { | 113 | fn is_keyword(&self, kw: &str) -> bool { |
140 | match *self.get(self.curr.1) { | 114 | match self.cached.get(self.curr.1) { |
141 | Some(ref t) => t.text == *kw, | 115 | Some(ref t) => t.text == *kw, |
142 | _ => false, | 116 | _ => false, |
143 | } | 117 | } |
@@ -155,7 +129,7 @@ fn convert_delim(d: Option<tt::DelimiterKind>, closing: bool) -> TtToken { | |||
155 | let idx = closing as usize; | 129 | let idx = closing as usize; |
156 | let kind = kinds[idx]; | 130 | let kind = kinds[idx]; |
157 | let text = if !texts.is_empty() { &texts[idx..texts.len() - (1 - idx)] } else { "" }; | 131 | let text = if !texts.is_empty() { &texts[idx..texts.len() - (1 - idx)] } else { "" }; |
158 | TtToken { kind, is_joint_to_next: false, text: SmolStr::new(text) } | 132 | TtToken { tt: Token { kind, is_jointed_to_next: false }, text: SmolStr::new(text) } |
159 | } | 133 | } |
160 | 134 | ||
161 | fn convert_literal(l: &tt::Literal) -> TtToken { | 135 | fn convert_literal(l: &tt::Literal) -> TtToken { |
@@ -169,7 +143,7 @@ fn convert_literal(l: &tt::Literal) -> TtToken { | |||
169 | }) | 143 | }) |
170 | .unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &l)); | 144 | .unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &l)); |
171 | 145 | ||
172 | TtToken { kind, is_joint_to_next: false, text: l.text.clone() } | 146 | TtToken { tt: Token { kind, is_jointed_to_next: false }, text: l.text.clone() } |
173 | } | 147 | } |
174 | 148 | ||
175 | fn convert_ident(ident: &tt::Ident) -> TtToken { | 149 | fn convert_ident(ident: &tt::Ident) -> TtToken { |
@@ -180,7 +154,7 @@ fn convert_ident(ident: &tt::Ident) -> TtToken { | |||
180 | _ => SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT), | 154 | _ => SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT), |
181 | }; | 155 | }; |
182 | 156 | ||
183 | TtToken { kind, is_joint_to_next: false, text: ident.text.clone() } | 157 | TtToken { tt: Token { kind, is_jointed_to_next: false }, text: ident.text.clone() } |
184 | } | 158 | } |
185 | 159 | ||
186 | fn convert_punct(p: tt::Punct) -> TtToken { | 160 | fn convert_punct(p: tt::Punct) -> TtToken { |
@@ -194,7 +168,7 @@ fn convert_punct(p: tt::Punct) -> TtToken { | |||
194 | let s: &str = p.char.encode_utf8(&mut buf); | 168 | let s: &str = p.char.encode_utf8(&mut buf); |
195 | SmolStr::new(s) | 169 | SmolStr::new(s) |
196 | }; | 170 | }; |
197 | TtToken { kind, is_joint_to_next: p.spacing == tt::Spacing::Joint, text } | 171 | TtToken { tt: Token { kind, is_jointed_to_next: p.spacing == tt::Spacing::Joint }, text } |
198 | } | 172 | } |
199 | 173 | ||
200 | fn convert_leaf(leaf: &tt::Leaf) -> TtToken { | 174 | fn convert_leaf(leaf: &tt::Leaf) -> TtToken { |
@@ -208,6 +182,7 @@ fn convert_leaf(leaf: &tt::Leaf) -> TtToken { | |||
208 | #[cfg(test)] | 182 | #[cfg(test)] |
209 | mod tests { | 183 | mod tests { |
210 | use super::{convert_literal, TtToken}; | 184 | use super::{convert_literal, TtToken}; |
185 | use parser::Token; | ||
211 | use syntax::{SmolStr, SyntaxKind}; | 186 | use syntax::{SmolStr, SyntaxKind}; |
212 | 187 | ||
213 | #[test] | 188 | #[test] |
@@ -218,8 +193,7 @@ mod tests { | |||
218 | text: SmolStr::new("-42.0") | 193 | text: SmolStr::new("-42.0") |
219 | }), | 194 | }), |
220 | TtToken { | 195 | TtToken { |
221 | kind: SyntaxKind::FLOAT_NUMBER, | 196 | tt: Token { kind: SyntaxKind::FLOAT_NUMBER, is_jointed_to_next: false }, |
222 | is_joint_to_next: false, | ||
223 | text: SmolStr::new("-42.0") | 197 | text: SmolStr::new("-42.0") |
224 | } | 198 | } |
225 | ); | 199 | ); |
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 265c0d63d..671036e1c 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs | |||
@@ -70,15 +70,12 @@ pub fn token_tree_to_syntax_node( | |||
70 | tt: &tt::Subtree, | 70 | tt: &tt::Subtree, |
71 | fragment_kind: FragmentKind, | 71 | fragment_kind: FragmentKind, |
72 | ) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> { | 72 | ) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> { |
73 | let tmp; | 73 | let buffer = match tt { |
74 | let tokens = match tt { | 74 | tt::Subtree { delimiter: None, token_trees } => { |
75 | tt::Subtree { delimiter: None, token_trees } => token_trees.as_slice(), | 75 | TokenBuffer::from_tokens(token_trees.as_slice()) |
76 | _ => { | ||
77 | tmp = [tt.clone().into()]; | ||
78 | &tmp[..] | ||
79 | } | 76 | } |
77 | _ => TokenBuffer::from_subtree(tt), | ||
80 | }; | 78 | }; |
81 | let buffer = TokenBuffer::new(&tokens); | ||
82 | let mut token_source = SubtreeTokenSource::new(&buffer); | 79 | let mut token_source = SubtreeTokenSource::new(&buffer); |
83 | let mut tree_sink = TtTreeSink::new(buffer.begin()); | 80 | let mut tree_sink = TtTreeSink::new(buffer.begin()); |
84 | parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind); | 81 | parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind); |
@@ -414,7 +411,7 @@ trait TokenConvertor { | |||
414 | fn id_alloc(&mut self) -> &mut TokenIdAlloc; | 411 | fn id_alloc(&mut self) -> &mut TokenIdAlloc; |
415 | } | 412 | } |
416 | 413 | ||
417 | impl<'a> SrcToken for (RawToken, &'a str) { | 414 | impl<'a> SrcToken for (&'a RawToken, &'a str) { |
418 | fn kind(&self) -> SyntaxKind { | 415 | fn kind(&self) -> SyntaxKind { |
419 | self.0.kind | 416 | self.0.kind |
420 | } | 417 | } |
@@ -431,7 +428,7 @@ impl<'a> SrcToken for (RawToken, &'a str) { | |||
431 | impl RawConvertor<'_> {} | 428 | impl RawConvertor<'_> {} |
432 | 429 | ||
433 | impl<'a> TokenConvertor for RawConvertor<'a> { | 430 | impl<'a> TokenConvertor for RawConvertor<'a> { |
434 | type Token = (RawToken, &'a str); | 431 | type Token = (&'a RawToken, &'a str); |
435 | 432 | ||
436 | fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> { | 433 | fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> { |
437 | convert_doc_comment(&doc_comment(token.1)) | 434 | convert_doc_comment(&doc_comment(token.1)) |
@@ -442,11 +439,11 @@ impl<'a> TokenConvertor for RawConvertor<'a> { | |||
442 | let range = TextRange::at(self.offset, token.len); | 439 | let range = TextRange::at(self.offset, token.len); |
443 | self.offset += token.len; | 440 | self.offset += token.len; |
444 | 441 | ||
445 | Some(((*token, &self.text[range]), range)) | 442 | Some(((token, &self.text[range]), range)) |
446 | } | 443 | } |
447 | 444 | ||
448 | fn peek(&self) -> Option<Self::Token> { | 445 | fn peek(&self) -> Option<Self::Token> { |
449 | let token = self.inner.as_slice().get(0).cloned(); | 446 | let token = self.inner.as_slice().get(0); |
450 | 447 | ||
451 | token.map(|it| { | 448 | token.map(|it| { |
452 | let range = TextRange::at(self.offset, it.len); | 449 | let range = TextRange::at(self.offset, it.len); |
@@ -601,17 +598,16 @@ impl<'a> TtTreeSink<'a> { | |||
601 | } | 598 | } |
602 | } | 599 | } |
603 | 600 | ||
604 | fn delim_to_str(d: Option<tt::DelimiterKind>, closing: bool) -> SmolStr { | 601 | fn delim_to_str(d: Option<tt::DelimiterKind>, closing: bool) -> &'static str { |
605 | let texts = match d { | 602 | let texts = match d { |
606 | Some(tt::DelimiterKind::Parenthesis) => "()", | 603 | Some(tt::DelimiterKind::Parenthesis) => "()", |
607 | Some(tt::DelimiterKind::Brace) => "{}", | 604 | Some(tt::DelimiterKind::Brace) => "{}", |
608 | Some(tt::DelimiterKind::Bracket) => "[]", | 605 | Some(tt::DelimiterKind::Bracket) => "[]", |
609 | None => return "".into(), | 606 | None => return "", |
610 | }; | 607 | }; |
611 | 608 | ||
612 | let idx = closing as usize; | 609 | let idx = closing as usize; |
613 | let text = &texts[idx..texts.len() - (1 - idx)]; | 610 | &texts[idx..texts.len() - (1 - idx)] |
614 | text.into() | ||
615 | } | 611 | } |
616 | 612 | ||
617 | impl<'a> TreeSink for TtTreeSink<'a> { | 613 | impl<'a> TreeSink for TtTreeSink<'a> { |
@@ -626,29 +622,32 @@ impl<'a> TreeSink for TtTreeSink<'a> { | |||
626 | 622 | ||
627 | let mut last = self.cursor; | 623 | let mut last = self.cursor; |
628 | for _ in 0..n_tokens { | 624 | for _ in 0..n_tokens { |
625 | let tmp_str: SmolStr; | ||
629 | if self.cursor.eof() { | 626 | if self.cursor.eof() { |
630 | break; | 627 | break; |
631 | } | 628 | } |
632 | last = self.cursor; | 629 | last = self.cursor; |
633 | let text: SmolStr = match self.cursor.token_tree() { | 630 | let text: &str = match self.cursor.token_tree() { |
634 | Some(tt::TokenTree::Leaf(leaf)) => { | 631 | Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => { |
635 | // Mark the range if needed | 632 | // Mark the range if needed |
636 | let (text, id) = match leaf { | 633 | let (text, id) = match leaf { |
637 | tt::Leaf::Ident(ident) => (ident.text.clone(), ident.id), | 634 | tt::Leaf::Ident(ident) => (&ident.text, ident.id), |
638 | tt::Leaf::Punct(punct) => { | 635 | tt::Leaf::Punct(punct) => { |
639 | assert!(punct.char.is_ascii()); | 636 | assert!(punct.char.is_ascii()); |
640 | let char = &(punct.char as u8); | 637 | let char = &(punct.char as u8); |
641 | let text = std::str::from_utf8(std::slice::from_ref(char)).unwrap(); | 638 | tmp_str = SmolStr::new_inline( |
642 | (SmolStr::new_inline(text), punct.id) | 639 | std::str::from_utf8(std::slice::from_ref(char)).unwrap(), |
640 | ); | ||
641 | (&tmp_str, punct.id) | ||
643 | } | 642 | } |
644 | tt::Leaf::Literal(lit) => (lit.text.clone(), lit.id), | 643 | tt::Leaf::Literal(lit) => (&lit.text, lit.id), |
645 | }; | 644 | }; |
646 | let range = TextRange::at(self.text_pos, TextSize::of(text.as_str())); | 645 | let range = TextRange::at(self.text_pos, TextSize::of(text.as_str())); |
647 | self.token_map.insert(id, range); | 646 | self.token_map.insert(id, range); |
648 | self.cursor = self.cursor.bump(); | 647 | self.cursor = self.cursor.bump(); |
649 | text | 648 | text |
650 | } | 649 | } |
651 | Some(tt::TokenTree::Subtree(subtree)) => { | 650 | Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => { |
652 | self.cursor = self.cursor.subtree().unwrap(); | 651 | self.cursor = self.cursor.subtree().unwrap(); |
653 | if let Some(id) = subtree.delimiter.map(|it| it.id) { | 652 | if let Some(id) = subtree.delimiter.map(|it| it.id) { |
654 | self.open_delims.insert(id, self.text_pos); | 653 | self.open_delims.insert(id, self.text_pos); |
@@ -672,7 +671,7 @@ impl<'a> TreeSink for TtTreeSink<'a> { | |||
672 | } | 671 | } |
673 | }; | 672 | }; |
674 | self.buf += &text; | 673 | self.buf += &text; |
675 | self.text_pos += TextSize::of(text.as_str()); | 674 | self.text_pos += TextSize::of(text); |
676 | } | 675 | } |
677 | 676 | ||
678 | let text = SmolStr::new(self.buf.as_str()); | 677 | let text = SmolStr::new(self.buf.as_str()); |
@@ -682,8 +681,8 @@ impl<'a> TreeSink for TtTreeSink<'a> { | |||
682 | // Add whitespace between adjoint puncts | 681 | // Add whitespace between adjoint puncts |
683 | let next = last.bump(); | 682 | let next = last.bump(); |
684 | if let ( | 683 | if let ( |
685 | Some(tt::TokenTree::Leaf(tt::Leaf::Punct(curr))), | 684 | Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(curr), _)), |
686 | Some(tt::TokenTree::Leaf(tt::Leaf::Punct(_))), | 685 | Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(_), _)), |
687 | ) = (last.token_tree(), next.token_tree()) | 686 | ) = (last.token_tree(), next.token_tree()) |
688 | { | 687 | { |
689 | // Note: We always assume the semi-colon would be the last token in | 688 | // Note: We always assume the semi-colon would be the last token in |
@@ -742,7 +741,7 @@ mod tests { | |||
742 | ) | 741 | ) |
743 | .expand_tt("literals!(foo);"); | 742 | .expand_tt("literals!(foo);"); |
744 | let tts = &[expansion.into()]; | 743 | let tts = &[expansion.into()]; |
745 | let buffer = tt::buffer::TokenBuffer::new(tts); | 744 | let buffer = tt::buffer::TokenBuffer::from_tokens(tts); |
746 | let mut tt_src = SubtreeTokenSource::new(&buffer); | 745 | let mut tt_src = SubtreeTokenSource::new(&buffer); |
747 | let mut tokens = vec![]; | 746 | let mut tokens = vec![]; |
748 | while tt_src.current().kind != EOF { | 747 | while tt_src.current().kind != EOF { |
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index 0a63593fb..af7b86ead 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml | |||
@@ -62,7 +62,7 @@ proc_macro_srv = { path = "../proc_macro_srv", version = "0.0.0" } | |||
62 | winapi = "0.3.8" | 62 | winapi = "0.3.8" |
63 | 63 | ||
64 | [dev-dependencies] | 64 | [dev-dependencies] |
65 | expect-test = "1.0" | 65 | expect-test = "1.1" |
66 | test_utils = { path = "../test_utils" } | 66 | test_utils = { path = "../test_utils" } |
67 | mbe = { path = "../mbe" } | 67 | mbe = { path = "../mbe" } |
68 | tt = { path = "../tt" } | 68 | tt = { path = "../tt" } |
diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index dd486070b..c21ca044a 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs | |||
@@ -1648,7 +1648,7 @@ fn prepare_hover_actions( | |||
1648 | actions | 1648 | actions |
1649 | .iter() | 1649 | .iter() |
1650 | .filter_map(|it| match it { | 1650 | .filter_map(|it| match it { |
1651 | HoverAction::Implementaion(position) => show_impl_command_link(snap, position), | 1651 | HoverAction::Implementation(position) => show_impl_command_link(snap, position), |
1652 | HoverAction::Runnable(r) => runnable_action_links(snap, file_id, r.clone()), | 1652 | HoverAction::Runnable(r) => runnable_action_links(snap, file_id, r.clone()), |
1653 | HoverAction::GoToType(targets) => goto_type_action_links(snap, targets), | 1653 | HoverAction::GoToType(targets) => goto_type_action_links(snap, targets), |
1654 | }) | 1654 | }) |
diff --git a/crates/ssr/Cargo.toml b/crates/ssr/Cargo.toml index 339eda86a..cc8136d22 100644 --- a/crates/ssr/Cargo.toml +++ b/crates/ssr/Cargo.toml | |||
@@ -21,4 +21,4 @@ hir = { path = "../hir", version = "0.0.0" } | |||
21 | test_utils = { path = "../test_utils", version = "0.0.0" } | 21 | test_utils = { path = "../test_utils", version = "0.0.0" } |
22 | 22 | ||
23 | [dev-dependencies] | 23 | [dev-dependencies] |
24 | expect-test = "1.0" | 24 | expect-test = "1.1" |
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index 5d8389ade..cfeaed9e6 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml | |||
@@ -13,7 +13,7 @@ doctest = false | |||
13 | [dependencies] | 13 | [dependencies] |
14 | itertools = "0.10.0" | 14 | itertools = "0.10.0" |
15 | rowan = "0.10.0" | 15 | rowan = "0.10.0" |
16 | rustc_lexer = { version = "695.0.0", package = "rustc-ap-rustc_lexer" } | 16 | rustc_lexer = { version = "697.0.0", package = "rustc-ap-rustc_lexer" } |
17 | rustc-hash = "1.1.0" | 17 | rustc-hash = "1.1.0" |
18 | arrayvec = "0.5.1" | 18 | arrayvec = "0.5.1" |
19 | once_cell = "1.3.1" | 19 | once_cell = "1.3.1" |
@@ -33,4 +33,4 @@ profile = { path = "../profile", version = "0.0.0" } | |||
33 | [dev-dependencies] | 33 | [dev-dependencies] |
34 | walkdir = "2.3.1" | 34 | walkdir = "2.3.1" |
35 | rayon = "1" | 35 | rayon = "1" |
36 | expect-test = "1.0" | 36 | expect-test = "1.1" |
diff --git a/crates/tt/src/buffer.rs b/crates/tt/src/buffer.rs index 02c771f70..3606c887d 100644 --- a/crates/tt/src/buffer.rs +++ b/crates/tt/src/buffer.rs | |||
@@ -1,6 +1,6 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use crate::{Subtree, TokenTree}; | 3 | use crate::{Leaf, Subtree, TokenTree}; |
4 | 4 | ||
5 | #[derive(Copy, Clone, Debug, Eq, PartialEq)] | 5 | #[derive(Copy, Clone, Debug, Eq, PartialEq)] |
6 | struct EntryId(usize); | 6 | struct EntryId(usize); |
@@ -13,7 +13,7 @@ struct EntryPtr(EntryId, usize); | |||
13 | #[derive(Debug)] | 13 | #[derive(Debug)] |
14 | enum Entry<'t> { | 14 | enum Entry<'t> { |
15 | // Mimicking types from proc-macro. | 15 | // Mimicking types from proc-macro. |
16 | Subtree(&'t TokenTree, EntryId), | 16 | Subtree(Option<&'t TokenTree>, &'t Subtree, EntryId), |
17 | Leaf(&'t TokenTree), | 17 | Leaf(&'t TokenTree), |
18 | // End entries contain a pointer to the entry from the containing | 18 | // End entries contain a pointer to the entry from the containing |
19 | // token tree, or None if this is the outermost level. | 19 | // token tree, or None if this is the outermost level. |
@@ -27,37 +27,64 @@ pub struct TokenBuffer<'t> { | |||
27 | buffers: Vec<Box<[Entry<'t>]>>, | 27 | buffers: Vec<Box<[Entry<'t>]>>, |
28 | } | 28 | } |
29 | 29 | ||
30 | impl<'t> TokenBuffer<'t> { | 30 | trait TokenList<'a> { |
31 | pub fn new(tokens: &'t [TokenTree]) -> TokenBuffer<'t> { | 31 | fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec<Entry<'a>>); |
32 | let mut buffers = vec![]; | 32 | } |
33 | |||
34 | let idx = TokenBuffer::new_inner(tokens, &mut buffers, None); | ||
35 | assert_eq!(idx, 0); | ||
36 | |||
37 | TokenBuffer { buffers } | ||
38 | } | ||
39 | 33 | ||
40 | fn new_inner( | 34 | impl<'a> TokenList<'a> for &'a [TokenTree] { |
41 | tokens: &'t [TokenTree], | 35 | fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec<Entry<'a>>) { |
42 | buffers: &mut Vec<Box<[Entry<'t>]>>, | ||
43 | next: Option<EntryPtr>, | ||
44 | ) -> usize { | ||
45 | // Must contain everything in tokens and then the Entry::End | 36 | // Must contain everything in tokens and then the Entry::End |
46 | let start_capacity = tokens.len() + 1; | 37 | let start_capacity = self.len() + 1; |
47 | let mut entries = Vec::with_capacity(start_capacity); | 38 | let mut entries = Vec::with_capacity(start_capacity); |
48 | let mut children = vec![]; | 39 | let mut children = vec![]; |
49 | 40 | for (idx, tt) in self.iter().enumerate() { | |
50 | for (idx, tt) in tokens.iter().enumerate() { | ||
51 | match tt { | 41 | match tt { |
52 | TokenTree::Leaf(_) => { | 42 | TokenTree::Leaf(_) => { |
53 | entries.push(Entry::Leaf(tt)); | 43 | entries.push(Entry::Leaf(tt)); |
54 | } | 44 | } |
55 | TokenTree::Subtree(subtree) => { | 45 | TokenTree::Subtree(subtree) => { |
56 | entries.push(Entry::End(None)); | 46 | entries.push(Entry::End(None)); |
57 | children.push((idx, (subtree, tt))); | 47 | children.push((idx, (subtree, Some(tt)))); |
58 | } | 48 | } |
59 | } | 49 | } |
60 | } | 50 | } |
51 | (children, entries) | ||
52 | } | ||
53 | } | ||
54 | |||
55 | impl<'a> TokenList<'a> for &'a Subtree { | ||
56 | fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec<Entry<'a>>) { | ||
57 | // Must contain everything in tokens and then the Entry::End | ||
58 | let mut entries = vec![]; | ||
59 | let mut children = vec![]; | ||
60 | entries.push(Entry::End(None)); | ||
61 | children.push((0usize, (*self, None))); | ||
62 | (children, entries) | ||
63 | } | ||
64 | } | ||
65 | |||
66 | impl<'t> TokenBuffer<'t> { | ||
67 | pub fn from_tokens(tokens: &'t [TokenTree]) -> TokenBuffer<'t> { | ||
68 | Self::new(tokens) | ||
69 | } | ||
70 | |||
71 | pub fn from_subtree(subtree: &'t Subtree) -> TokenBuffer<'t> { | ||
72 | Self::new(subtree) | ||
73 | } | ||
74 | |||
75 | fn new<T: TokenList<'t>>(tokens: T) -> TokenBuffer<'t> { | ||
76 | let mut buffers = vec![]; | ||
77 | let idx = TokenBuffer::new_inner(tokens, &mut buffers, None); | ||
78 | assert_eq!(idx, 0); | ||
79 | TokenBuffer { buffers } | ||
80 | } | ||
81 | |||
82 | fn new_inner<T: TokenList<'t>>( | ||
83 | tokens: T, | ||
84 | buffers: &mut Vec<Box<[Entry<'t>]>>, | ||
85 | next: Option<EntryPtr>, | ||
86 | ) -> usize { | ||
87 | let (children, mut entries) = tokens.entries(); | ||
61 | 88 | ||
62 | entries.push(Entry::End(next)); | 89 | entries.push(Entry::End(next)); |
63 | let res = buffers.len(); | 90 | let res = buffers.len(); |
@@ -65,11 +92,11 @@ impl<'t> TokenBuffer<'t> { | |||
65 | 92 | ||
66 | for (child_idx, (subtree, tt)) in children { | 93 | for (child_idx, (subtree, tt)) in children { |
67 | let idx = TokenBuffer::new_inner( | 94 | let idx = TokenBuffer::new_inner( |
68 | &subtree.token_trees, | 95 | subtree.token_trees.as_slice(), |
69 | buffers, | 96 | buffers, |
70 | Some(EntryPtr(EntryId(res), child_idx + 1)), | 97 | Some(EntryPtr(EntryId(res), child_idx + 1)), |
71 | ); | 98 | ); |
72 | buffers[res].as_mut()[child_idx] = Entry::Subtree(tt, EntryId(idx)); | 99 | buffers[res].as_mut()[child_idx] = Entry::Subtree(tt, subtree, EntryId(idx)); |
73 | } | 100 | } |
74 | 101 | ||
75 | res | 102 | res |
@@ -87,6 +114,24 @@ impl<'t> TokenBuffer<'t> { | |||
87 | } | 114 | } |
88 | } | 115 | } |
89 | 116 | ||
117 | #[derive(Debug)] | ||
118 | pub enum TokenTreeRef<'a> { | ||
119 | Subtree(&'a Subtree, Option<&'a TokenTree>), | ||
120 | Leaf(&'a Leaf, &'a TokenTree), | ||
121 | } | ||
122 | |||
123 | impl<'a> TokenTreeRef<'a> { | ||
124 | pub fn cloned(&self) -> TokenTree { | ||
125 | match &self { | ||
126 | TokenTreeRef::Subtree(subtree, tt) => match tt { | ||
127 | Some(it) => (*it).clone(), | ||
128 | None => (*subtree).clone().into(), | ||
129 | }, | ||
130 | TokenTreeRef::Leaf(_, tt) => (*tt).clone(), | ||
131 | } | ||
132 | } | ||
133 | } | ||
134 | |||
90 | /// A safe version of `Cursor` from `syn` crate https://github.com/dtolnay/syn/blob/6533607f91686545cb034d2838beea338d9d0742/src/buffer.rs#L125 | 135 | /// A safe version of `Cursor` from `syn` crate https://github.com/dtolnay/syn/blob/6533607f91686545cb034d2838beea338d9d0742/src/buffer.rs#L125 |
91 | #[derive(Copy, Clone, Debug)] | 136 | #[derive(Copy, Clone, Debug)] |
92 | pub struct Cursor<'a> { | 137 | pub struct Cursor<'a> { |
@@ -114,12 +159,11 @@ impl<'a> Cursor<'a> { | |||
114 | match self.entry() { | 159 | match self.entry() { |
115 | Some(Entry::End(Some(ptr))) => { | 160 | Some(Entry::End(Some(ptr))) => { |
116 | let idx = ptr.1; | 161 | let idx = ptr.1; |
117 | if let Some(Entry::Subtree(TokenTree::Subtree(subtree), _)) = | 162 | if let Some(Entry::Subtree(_, subtree, _)) = |
118 | self.buffer.entry(&EntryPtr(ptr.0, idx - 1)) | 163 | self.buffer.entry(&EntryPtr(ptr.0, idx - 1)) |
119 | { | 164 | { |
120 | return Some(subtree); | 165 | return Some(subtree); |
121 | } | 166 | } |
122 | |||
123 | None | 167 | None |
124 | } | 168 | } |
125 | _ => None, | 169 | _ => None, |
@@ -134,7 +178,7 @@ impl<'a> Cursor<'a> { | |||
134 | /// a cursor into that subtree | 178 | /// a cursor into that subtree |
135 | pub fn subtree(self) -> Option<Cursor<'a>> { | 179 | pub fn subtree(self) -> Option<Cursor<'a>> { |
136 | match self.entry() { | 180 | match self.entry() { |
137 | Some(Entry::Subtree(_, entry_id)) => { | 181 | Some(Entry::Subtree(_, _, entry_id)) => { |
138 | Some(Cursor::create(self.buffer, EntryPtr(*entry_id, 0))) | 182 | Some(Cursor::create(self.buffer, EntryPtr(*entry_id, 0))) |
139 | } | 183 | } |
140 | _ => None, | 184 | _ => None, |
@@ -142,10 +186,13 @@ impl<'a> Cursor<'a> { | |||
142 | } | 186 | } |
143 | 187 | ||
144 | /// If the cursor is pointing at a `TokenTree`, returns it | 188 | /// If the cursor is pointing at a `TokenTree`, returns it |
145 | pub fn token_tree(self) -> Option<&'a TokenTree> { | 189 | pub fn token_tree(self) -> Option<TokenTreeRef<'a>> { |
146 | match self.entry() { | 190 | match self.entry() { |
147 | Some(Entry::Leaf(tt)) => Some(tt), | 191 | Some(Entry::Leaf(tt)) => match tt { |
148 | Some(Entry::Subtree(tt, _)) => Some(tt), | 192 | TokenTree::Leaf(leaf) => Some(TokenTreeRef::Leaf(leaf, *tt)), |
193 | TokenTree::Subtree(subtree) => Some(TokenTreeRef::Subtree(subtree, Some(tt))), | ||
194 | }, | ||
195 | Some(Entry::Subtree(tt, subtree, _)) => Some(TokenTreeRef::Subtree(subtree, *tt)), | ||
149 | Some(Entry::End(_)) => None, | 196 | Some(Entry::End(_)) => None, |
150 | None => None, | 197 | None => None, |
151 | } | 198 | } |
@@ -172,7 +219,7 @@ impl<'a> Cursor<'a> { | |||
172 | /// a cursor into that subtree | 219 | /// a cursor into that subtree |
173 | pub fn bump_subtree(self) -> Cursor<'a> { | 220 | pub fn bump_subtree(self) -> Cursor<'a> { |
174 | match self.entry() { | 221 | match self.entry() { |
175 | Some(Entry::Subtree(_, _)) => self.subtree().unwrap(), | 222 | Some(Entry::Subtree(_, _, _)) => self.subtree().unwrap(), |
176 | _ => self.bump(), | 223 | _ => self.bump(), |
177 | } | 224 | } |
178 | } | 225 | } |