aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorbors[bot] <26634292+bors[bot]@users.noreply.github.com>2019-09-10 20:23:39 +0100
committerGitHub <[email protected]>2019-09-10 20:23:39 +0100
commit73b4fea01502ea7d4381f711cde5210cefcb4e00 (patch)
tree8b4a9d1ef764d82ccfdafdb2231a7813be09b01d
parent1140a83c1b393cdcd18e42d5d816fd8be348b059 (diff)
parent9eb14e11706ebf9a60b0afbe9bc82a99a816bc02 (diff)
Merge #1810
1810: cleanup dollar handling in expressions r=matklad a=matklad Co-authored-by: Aleksey Kladov <[email protected]>
-rw-r--r--crates/ra_hir/src/ids.rs2
-rw-r--r--crates/ra_hir/src/nameres/raw.rs4
-rw-r--r--crates/ra_mbe/src/lib.rs4
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs27
-rw-r--r--crates/ra_mbe/src/tests.rs8
-rw-r--r--crates/ra_parser/src/grammar/expressions.rs49
-rw-r--r--crates/ra_parser/src/grammar/expressions/atom.rs25
-rw-r--r--crates/ra_parser/src/parser.rs73
8 files changed, 62 insertions, 130 deletions
diff --git a/crates/ra_hir/src/ids.rs b/crates/ra_hir/src/ids.rs
index e0d0d4209..9ea4e695d 100644
--- a/crates/ra_hir/src/ids.rs
+++ b/crates/ra_hir/src/ids.rs
@@ -90,7 +90,7 @@ impl HirFileId {
90 }) 90 })
91 .ok()?; 91 .ok()?;
92 match macro_file.macro_file_kind { 92 match macro_file.macro_file_kind {
93 MacroFileKind::Items => Some(Parse::to_syntax(mbe::token_tree_to_ast_item_list(&tt))), 93 MacroFileKind::Items => mbe::token_tree_to_items(&tt).ok().map(Parse::to_syntax),
94 MacroFileKind::Expr => mbe::token_tree_to_expr(&tt).ok().map(Parse::to_syntax), 94 MacroFileKind::Expr => mbe::token_tree_to_expr(&tt).ok().map(Parse::to_syntax),
95 } 95 }
96 } 96 }
diff --git a/crates/ra_hir/src/nameres/raw.rs b/crates/ra_hir/src/nameres/raw.rs
index c646d3d00..04b97cb90 100644
--- a/crates/ra_hir/src/nameres/raw.rs
+++ b/crates/ra_hir/src/nameres/raw.rs
@@ -76,8 +76,10 @@ impl RawItems {
76 source_map: ImportSourceMap::default(), 76 source_map: ImportSourceMap::default(),
77 }; 77 };
78 if let Some(node) = db.parse_or_expand(file_id) { 78 if let Some(node) = db.parse_or_expand(file_id) {
79 if let Some(source_file) = ast::SourceFile::cast(node) { 79 if let Some(source_file) = ast::SourceFile::cast(node.clone()) {
80 collector.process_module(None, source_file); 80 collector.process_module(None, source_file);
81 } else if let Some(item_list) = ast::MacroItems::cast(node) {
82 collector.process_module(None, item_list);
81 } 83 }
82 } 84 }
83 (Arc::new(collector.raw_items), Arc::new(collector.source_map)) 85 (Arc::new(collector.raw_items), Arc::new(collector.source_map))
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs
index 52c3d03b5..f07f000ff 100644
--- a/crates/ra_mbe/src/lib.rs
+++ b/crates/ra_mbe/src/lib.rs
@@ -41,8 +41,8 @@ pub enum ExpandError {
41} 41}
42 42
43pub use crate::syntax_bridge::{ 43pub use crate::syntax_bridge::{
44 ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_ast_item_list, token_tree_to_expr, 44 ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_expr, token_tree_to_items,
45 token_tree_to_macro_items, token_tree_to_macro_stmts, token_tree_to_pat, token_tree_to_ty, 45 token_tree_to_macro_stmts, token_tree_to_pat, token_tree_to_ty,
46}; 46};
47 47
48/// This struct contains AST for a single `macro_rules` definition. What might 48/// This struct contains AST for a single `macro_rules` definition. What might
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
index 26524adf9..2d035307b 100644
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ b/crates/ra_mbe/src/syntax_bridge.rs
@@ -46,25 +46,6 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, Toke
46// * TraitItems(SmallVec<[ast::TraitItem; 1]>) 46// * TraitItems(SmallVec<[ast::TraitItem; 1]>)
47// * ImplItems(SmallVec<[ast::ImplItem; 1]>) 47// * ImplItems(SmallVec<[ast::ImplItem; 1]>)
48// * ForeignItems(SmallVec<[ast::ForeignItem; 1]> 48// * ForeignItems(SmallVec<[ast::ForeignItem; 1]>
49//
50//
51
52fn token_tree_to_syntax_node<F>(tt: &tt::Subtree, f: F) -> Result<Parse<SyntaxNode>, ExpandError>
53where
54 F: Fn(&mut dyn ra_parser::TokenSource, &mut dyn ra_parser::TreeSink),
55{
56 let tokens = [tt.clone().into()];
57 let buffer = TokenBuffer::new(&tokens);
58 let mut token_source = SubtreeTokenSource::new(&buffer);
59 let mut tree_sink = TtTreeSink::new(buffer.begin());
60 f(&mut token_source, &mut tree_sink);
61 if tree_sink.roots.len() != 1 {
62 return Err(ExpandError::ConversionError);
63 }
64 //FIXME: would be cool to report errors
65 let parse = tree_sink.inner.finish();
66 Ok(parse)
67}
68 49
69fn fragment_to_syntax_node( 50fn fragment_to_syntax_node(
70 tt: &tt::Subtree, 51 tt: &tt::Subtree,
@@ -115,17 +96,11 @@ pub fn token_tree_to_macro_stmts(tt: &tt::Subtree) -> Result<Parse<ast::MacroStm
115} 96}
116 97
117/// Parses the token tree (result of macro expansion) as a sequence of items 98/// Parses the token tree (result of macro expansion) as a sequence of items
118pub fn token_tree_to_macro_items(tt: &tt::Subtree) -> Result<Parse<ast::MacroItems>, ExpandError> { 99pub fn token_tree_to_items(tt: &tt::Subtree) -> Result<Parse<ast::MacroItems>, ExpandError> {
119 let parse = fragment_to_syntax_node(tt, Items)?; 100 let parse = fragment_to_syntax_node(tt, Items)?;
120 parse.cast().ok_or_else(|| crate::ExpandError::ConversionError) 101 parse.cast().ok_or_else(|| crate::ExpandError::ConversionError)
121} 102}
122 103
123/// Parses the token tree (result of macro expansion) as a sequence of items
124pub fn token_tree_to_ast_item_list(tt: &tt::Subtree) -> Parse<ast::SourceFile> {
125 let parse = token_tree_to_syntax_node(tt, ra_parser::parse).unwrap();
126 parse.cast().unwrap()
127}
128
129impl TokenMap { 104impl TokenMap {
130 pub fn relative_range_of(&self, tt: tt::TokenId) -> Option<TextRange> { 105 pub fn relative_range_of(&self, tt: tt::TokenId) -> Option<TextRange> {
131 let idx = tt.0 as usize; 106 let idx = tt.0 as usize;
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs
index 0f07e935d..312fa4626 100644
--- a/crates/ra_mbe/src/tests.rs
+++ b/crates/ra_mbe/src/tests.rs
@@ -70,7 +70,7 @@ pub(crate) fn expand(rules: &MacroRules, invocation: &str) -> tt::Subtree {
70 70
71pub(crate) fn expand_to_items(rules: &MacroRules, invocation: &str) -> ast::MacroItems { 71pub(crate) fn expand_to_items(rules: &MacroRules, invocation: &str) -> ast::MacroItems {
72 let expanded = expand(rules, invocation); 72 let expanded = expand(rules, invocation);
73 token_tree_to_macro_items(&expanded).unwrap().tree() 73 token_tree_to_items(&expanded).unwrap().tree()
74} 74}
75 75
76#[allow(unused)] 76#[allow(unused)]
@@ -155,8 +155,8 @@ pub(crate) fn assert_expansion(
155 let expected = text_to_tokentree(&expected); 155 let expected = text_to_tokentree(&expected);
156 let (expanded_tree, expected_tree) = match kind { 156 let (expanded_tree, expected_tree) = match kind {
157 MacroKind::Items => { 157 MacroKind::Items => {
158 let expanded_tree = token_tree_to_macro_items(&expanded).unwrap().tree(); 158 let expanded_tree = token_tree_to_items(&expanded).unwrap().tree();
159 let expected_tree = token_tree_to_macro_items(&expected).unwrap().tree(); 159 let expected_tree = token_tree_to_items(&expected).unwrap().tree();
160 160
161 ( 161 (
162 debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(), 162 debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(),
@@ -410,7 +410,7 @@ fn test_expand_to_item_list() {
410 ", 410 ",
411 ); 411 );
412 let expansion = expand(&rules, "structs!(Foo, Bar);"); 412 let expansion = expand(&rules, "structs!(Foo, Bar);");
413 let tree = token_tree_to_macro_items(&expansion).unwrap().tree(); 413 let tree = token_tree_to_items(&expansion).unwrap().tree();
414 assert_eq!( 414 assert_eq!(
415 format!("{:#?}", tree.syntax()).trim(), 415 format!("{:#?}", tree.syntax()).trim(),
416 r#" 416 r#"
diff --git a/crates/ra_parser/src/grammar/expressions.rs b/crates/ra_parser/src/grammar/expressions.rs
index ea04b9458..1dd9a586c 100644
--- a/crates/ra_parser/src/grammar/expressions.rs
+++ b/crates/ra_parser/src/grammar/expressions.rs
@@ -14,20 +14,17 @@ const EXPR_FIRST: TokenSet = LHS_FIRST;
14 14
15pub(super) fn expr(p: &mut Parser) -> BlockLike { 15pub(super) fn expr(p: &mut Parser) -> BlockLike {
16 let r = Restrictions { forbid_structs: false, prefer_stmt: false }; 16 let r = Restrictions { forbid_structs: false, prefer_stmt: false };
17 let mut dollar_lvl = 0; 17 expr_bp(p, r, 1).1
18 expr_bp(p, r, 1, &mut dollar_lvl).1
19} 18}
20 19
21pub(super) fn expr_stmt(p: &mut Parser) -> (Option<CompletedMarker>, BlockLike) { 20pub(super) fn expr_stmt(p: &mut Parser) -> (Option<CompletedMarker>, BlockLike) {
22 let r = Restrictions { forbid_structs: false, prefer_stmt: true }; 21 let r = Restrictions { forbid_structs: false, prefer_stmt: true };
23 let mut dollar_lvl = 0; 22 expr_bp(p, r, 1)
24 expr_bp(p, r, 1, &mut dollar_lvl)
25} 23}
26 24
27fn expr_no_struct(p: &mut Parser) { 25fn expr_no_struct(p: &mut Parser) {
28 let r = Restrictions { forbid_structs: true, prefer_stmt: false }; 26 let r = Restrictions { forbid_structs: true, prefer_stmt: false };
29 let mut dollar_lvl = 0; 27 expr_bp(p, r, 1);
30 expr_bp(p, r, 1, &mut dollar_lvl);
31} 28}
32 29
33// test block 30// test block
@@ -257,23 +254,8 @@ fn current_op(p: &Parser) -> (u8, SyntaxKind) {
257} 254}
258 255
259// Parses expression with binding power of at least bp. 256// Parses expression with binding power of at least bp.
260fn expr_bp( 257fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option<CompletedMarker>, BlockLike) {
261 p: &mut Parser, 258 let mut lhs = match lhs(p, r) {
262 r: Restrictions,
263 mut bp: u8,
264 dollar_lvl: &mut usize,
265) -> (Option<CompletedMarker>, BlockLike) {
266 // `newly_dollar_open` is a flag indicated that dollar is just closed after lhs, e.g.
267 // `$1$ + a`
268 // We use this flag to skip handling it.
269 let mut newly_dollar_open = if p.at_l_dollar() {
270 *dollar_lvl += p.eat_l_dollars();
271 true
272 } else {
273 false
274 };
275
276 let mut lhs = match lhs(p, r, dollar_lvl) {
277 Some((lhs, blocklike)) => { 259 Some((lhs, blocklike)) => {
278 // test stmt_bin_expr_ambiguity 260 // test stmt_bin_expr_ambiguity
279 // fn foo() { 261 // fn foo() {
@@ -289,15 +271,6 @@ fn expr_bp(
289 }; 271 };
290 272
291 loop { 273 loop {
292 if *dollar_lvl > 0 && p.at_r_dollar() {
293 *dollar_lvl -= p.eat_r_dollars(*dollar_lvl);
294 if !newly_dollar_open {
295 // We "pump" bp for make it highest priority
296 bp = 255;
297 }
298 newly_dollar_open = false;
299 }
300
301 let is_range = p.at(T![..]) || p.at(T![..=]); 274 let is_range = p.at(T![..]) || p.at(T![..=]);
302 let (op_bp, op) = current_op(p); 275 let (op_bp, op) = current_op(p);
303 if op_bp < bp { 276 if op_bp < bp {
@@ -306,7 +279,7 @@ fn expr_bp(
306 let m = lhs.precede(p); 279 let m = lhs.precede(p);
307 p.bump(op); 280 p.bump(op);
308 281
309 expr_bp(p, r, op_bp + 1, dollar_lvl); 282 expr_bp(p, r, op_bp + 1);
310 lhs = m.complete(p, if is_range { RANGE_EXPR } else { BIN_EXPR }); 283 lhs = m.complete(p, if is_range { RANGE_EXPR } else { BIN_EXPR });
311 } 284 }
312 (Some(lhs), BlockLike::NotBlock) 285 (Some(lhs), BlockLike::NotBlock)
@@ -314,11 +287,7 @@ fn expr_bp(
314 287
315const LHS_FIRST: TokenSet = atom::ATOM_EXPR_FIRST.union(token_set![AMP, STAR, EXCL, DOT, MINUS]); 288const LHS_FIRST: TokenSet = atom::ATOM_EXPR_FIRST.union(token_set![AMP, STAR, EXCL, DOT, MINUS]);
316 289
317fn lhs( 290fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> {
318 p: &mut Parser,
319 r: Restrictions,
320 dollar_lvl: &mut usize,
321) -> Option<(CompletedMarker, BlockLike)> {
322 let m; 291 let m;
323 let kind = match p.current() { 292 let kind = match p.current() {
324 // test ref_expr 293 // test ref_expr
@@ -351,7 +320,7 @@ fn lhs(
351 m = p.start(); 320 m = p.start();
352 p.bump(op); 321 p.bump(op);
353 if p.at_ts(EXPR_FIRST) { 322 if p.at_ts(EXPR_FIRST) {
354 expr_bp(p, r, 2, dollar_lvl); 323 expr_bp(p, r, 2);
355 } 324 }
356 return Some((m.complete(p, RANGE_EXPR), BlockLike::NotBlock)); 325 return Some((m.complete(p, RANGE_EXPR), BlockLike::NotBlock));
357 } 326 }
@@ -367,7 +336,7 @@ fn lhs(
367 return Some(postfix_expr(p, lhs, blocklike, !(r.prefer_stmt && blocklike.is_block()))); 336 return Some(postfix_expr(p, lhs, blocklike, !(r.prefer_stmt && blocklike.is_block())));
368 } 337 }
369 }; 338 };
370 expr_bp(p, r, 255, dollar_lvl); 339 expr_bp(p, r, 255);
371 Some((m.complete(p, kind), BlockLike::NotBlock)) 340 Some((m.complete(p, kind), BlockLike::NotBlock))
372} 341}
373 342
diff --git a/crates/ra_parser/src/grammar/expressions/atom.rs b/crates/ra_parser/src/grammar/expressions/atom.rs
index cea79cf6f..6e295fbf9 100644
--- a/crates/ra_parser/src/grammar/expressions/atom.rs
+++ b/crates/ra_parser/src/grammar/expressions/atom.rs
@@ -69,6 +69,7 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMar
69 let done = match p.current() { 69 let done = match p.current() {
70 T!['('] => tuple_expr(p), 70 T!['('] => tuple_expr(p),
71 T!['['] => array_expr(p), 71 T!['['] => array_expr(p),
72 L_DOLLAR => meta_var_expr(p),
72 T![|] => lambda_expr(p), 73 T![|] => lambda_expr(p),
73 T![move] if la == T![|] => lambda_expr(p), 74 T![move] if la == T![|] => lambda_expr(p),
74 T![async] if la == T![|] || (la == T![move] && p.nth(2) == T![|]) => lambda_expr(p), 75 T![async] if la == T![|] || (la == T![move] && p.nth(2) == T![|]) => lambda_expr(p),
@@ -554,3 +555,27 @@ fn box_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker {
554 } 555 }
555 m.complete(p, BOX_EXPR) 556 m.complete(p, BOX_EXPR)
556} 557}
558
559/// Expression from `$var` macro expansion, wrapped in dollars
560fn meta_var_expr(p: &mut Parser) -> CompletedMarker {
561 assert!(p.at(L_DOLLAR));
562 let m = p.start();
563 p.bump(L_DOLLAR);
564 let (completed, _is_block) =
565 expr_bp(p, Restrictions { forbid_structs: false, prefer_stmt: false }, 1);
566
567 match (completed, p.current()) {
568 (Some(it), R_DOLLAR) => {
569 p.bump(R_DOLLAR);
570 m.abandon(p);
571 it
572 }
573 _ => {
574 while !p.at(R_DOLLAR) {
575 p.bump_any()
576 }
577 p.bump(R_DOLLAR);
578 m.complete(p, ERROR)
579 }
580 }
581}
diff --git a/crates/ra_parser/src/parser.rs b/crates/ra_parser/src/parser.rs
index e7281123b..a2ac363fb 100644
--- a/crates/ra_parser/src/parser.rs
+++ b/crates/ra_parser/src/parser.rs
@@ -5,7 +5,7 @@ use drop_bomb::DropBomb;
5use crate::{ 5use crate::{
6 event::Event, 6 event::Event,
7 ParseError, 7 ParseError,
8 SyntaxKind::{self, EOF, ERROR, TOMBSTONE}, 8 SyntaxKind::{self, EOF, ERROR, L_DOLLAR, R_DOLLAR, TOMBSTONE},
9 TokenSet, TokenSource, T, 9 TokenSet, TokenSource, T,
10}; 10};
11 11
@@ -211,19 +211,26 @@ impl<'t> Parser<'t> {
211 211
212 /// Create an error node and consume the next token. 212 /// Create an error node and consume the next token.
213 pub(crate) fn err_recover(&mut self, message: &str, recovery: TokenSet) { 213 pub(crate) fn err_recover(&mut self, message: &str, recovery: TokenSet) {
214 if self.at(T!['{']) || self.at(T!['}']) || self.at_ts(recovery) { 214 match self.current() {
215 self.error(message); 215 T!['{'] | T!['}'] | L_DOLLAR | R_DOLLAR => {
216 } else { 216 self.error(message);
217 let m = self.start(); 217 return;
218 }
219 _ => (),
220 }
221
222 if self.at_ts(recovery) {
218 self.error(message); 223 self.error(message);
219 self.bump_any(); 224 return;
220 m.complete(self, ERROR); 225 }
221 }; 226
227 let m = self.start();
228 self.error(message);
229 self.bump_any();
230 m.complete(self, ERROR);
222 } 231 }
223 232
224 fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) { 233 fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) {
225 // self.eat_dollars();
226
227 for _ in 0..n_raw_tokens { 234 for _ in 0..n_raw_tokens {
228 self.token_source.bump(); 235 self.token_source.bump();
229 } 236 }
@@ -234,52 +241,6 @@ impl<'t> Parser<'t> {
234 fn push_event(&mut self, event: Event) { 241 fn push_event(&mut self, event: Event) {
235 self.events.push(event) 242 self.events.push(event)
236 } 243 }
237
238 pub(crate) fn eat_l_dollars(&mut self) -> usize {
239 let mut ate_count = 0;
240 loop {
241 match self.token_source.current().kind {
242 k @ SyntaxKind::L_DOLLAR => {
243 self.token_source.bump();
244 self.push_event(Event::Token { kind: k, n_raw_tokens: 1 });
245 ate_count += 1;
246 }
247 _ => {
248 return ate_count;
249 }
250 }
251 }
252 }
253
254 pub(crate) fn eat_r_dollars(&mut self, max_count: usize) -> usize {
255 let mut ate_count = 0;
256 loop {
257 match self.token_source.current().kind {
258 k @ SyntaxKind::R_DOLLAR => {
259 self.token_source.bump();
260 self.push_event(Event::Token { kind: k, n_raw_tokens: 1 });
261 ate_count += 1;
262
263 if max_count >= ate_count {
264 return ate_count;
265 }
266 }
267 _ => {
268 return ate_count;
269 }
270 }
271 }
272 }
273
274 pub(crate) fn at_l_dollar(&self) -> bool {
275 let kind = self.token_source.current().kind;
276 (kind == SyntaxKind::L_DOLLAR)
277 }
278
279 pub(crate) fn at_r_dollar(&self) -> bool {
280 let kind = self.token_source.current().kind;
281 (kind == SyntaxKind::R_DOLLAR)
282 }
283} 244}
284 245
285/// See `Parser::start`. 246/// See `Parser::start`.