aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
Diffstat (limited to 'crates')
-rw-r--r--crates/ra_mbe/src/lib.rs55
-rw-r--r--crates/ra_mbe/src/mbe_expander.rs5
-rw-r--r--crates/ra_mbe/src/subtree_parser.rs61
-rw-r--r--crates/ra_mbe/src/subtree_source.rs521
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs254
-rw-r--r--crates/ra_mbe/src/tt_cursor.rs6
-rw-r--r--crates/ra_parser/src/grammar.rs4
-rw-r--r--crates/ra_parser/src/lib.rs8
8 files changed, 683 insertions, 231 deletions
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs
index 4203929d4..a21ea4dbc 100644
--- a/crates/ra_mbe/src/lib.rs
+++ b/crates/ra_mbe/src/lib.rs
@@ -15,10 +15,13 @@ macro_rules! impl_froms {
15 } 15 }
16} 16}
17 17
18mod tt_cursor; 18// mod tt_cursor;
19mod mbe_parser; 19mod mbe_parser;
20mod mbe_expander; 20mod mbe_expander;
21mod syntax_bridge; 21mod syntax_bridge;
22mod tt_cursor;
23mod subtree_source;
24mod subtree_parser;
22 25
23use ra_syntax::SmolStr; 26use ra_syntax::SmolStr;
24 27
@@ -379,4 +382,54 @@ SOURCE_FILE@[0; 40)
379 // [let] [s] [=] ["rust1"] [;] 382 // [let] [s] [=] ["rust1"] [;]
380 assert_eq!(to_literal(&stm_tokens[15 + 3]).text, "\"rust1\""); 383 assert_eq!(to_literal(&stm_tokens[15 + 3]).text, "\"rust1\"");
381 } 384 }
385
386 #[test]
387 fn test_two_idents() {
388 let rules = create_rules(
389 r#"
390 macro_rules! foo {
391 ($ i:ident, $ j:ident) => {
392 fn foo() { let a = $ i; let b = $j; }
393 }
394 }
395"#,
396 );
397 assert_expansion(&rules, "foo! { foo, bar }", "fn foo () {let a = foo ; let b = bar ;}");
398 }
399
400 // The following tests are port from intellij-rust directly
401 // https://github.com/intellij-rust/intellij-rust/blob/c4e9feee4ad46e7953b1948c112533360b6087bb/src/test/kotlin/org/rust/lang/core/macros/RsMacroExpansionTest.kt
402
403 #[test]
404 fn test_path() {
405 let rules = create_rules(
406 r#"
407 macro_rules! foo {
408 ($ i:path) => {
409 fn foo() { let a = $ i; }
410 }
411 }
412"#,
413 );
414 assert_expansion(&rules, "foo! { foo }", "fn foo () {let a = foo ;}");
415 assert_expansion(
416 &rules,
417 "foo! { bar::<u8>::baz::<u8> }",
418 "fn foo () {let a = bar ::< u8 > ::baz ::< u8 > ;}",
419 );
420 }
421
422 #[test]
423 fn test_two_paths() {
424 let rules = create_rules(
425 r#"
426 macro_rules! foo {
427 ($ i:path, $ j:path) => {
428 fn foo() { let a = $ i; let b = $j; }
429 }
430 }
431"#,
432 );
433 assert_expansion(&rules, "foo! { foo, bar }", "fn foo () {let a = foo ; let b = bar ;}");
434 }
382} 435}
diff --git a/crates/ra_mbe/src/mbe_expander.rs b/crates/ra_mbe/src/mbe_expander.rs
index 2dafd68f6..ce41d7225 100644
--- a/crates/ra_mbe/src/mbe_expander.rs
+++ b/crates/ra_mbe/src/mbe_expander.rs
@@ -139,6 +139,11 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings,
139 Binding::Simple(tt::Leaf::from(ident).into()), 139 Binding::Simple(tt::Leaf::from(ident).into()),
140 ); 140 );
141 } 141 }
142 "path" => {
143 let path =
144 input.eat_path().ok_or(ExpandError::UnexpectedToken)?.clone();
145 res.inner.insert(text.clone(), Binding::Simple(path.into()));
146 }
142 _ => return Err(ExpandError::UnexpectedToken), 147 _ => return Err(ExpandError::UnexpectedToken),
143 } 148 }
144 } 149 }
diff --git a/crates/ra_mbe/src/subtree_parser.rs b/crates/ra_mbe/src/subtree_parser.rs
new file mode 100644
index 000000000..ce39a40bb
--- /dev/null
+++ b/crates/ra_mbe/src/subtree_parser.rs
@@ -0,0 +1,61 @@
1use crate::subtree_source::SubtreeTokenSource;
2
3use ra_parser::{TokenSource, TreeSink};
4use ra_syntax::{SyntaxKind};
5
6struct OffsetTokenSink {
7 token_pos: usize,
8}
9
10impl TreeSink for OffsetTokenSink {
11 fn token(&mut self, _kind: SyntaxKind, n_tokens: u8) {
12 self.token_pos += n_tokens as usize;
13 }
14 fn start_node(&mut self, _kind: SyntaxKind) {}
15 fn finish_node(&mut self) {}
16 fn error(&mut self, _error: ra_parser::ParseError) {}
17}
18
19pub(crate) struct Parser<'a> {
20 subtree: &'a tt::Subtree,
21 cur_pos: &'a mut usize,
22}
23
24impl<'a> Parser<'a> {
25 pub fn new(cur_pos: &'a mut usize, subtree: &'a tt::Subtree) -> Parser<'a> {
26 Parser { cur_pos, subtree }
27 }
28
29 pub fn parse_path(self) -> Option<tt::TokenTree> {
30 self.parse(ra_parser::parse_path)
31 }
32
33 fn parse<F>(self, f: F) -> Option<tt::TokenTree>
34 where
35 F: FnOnce(&dyn TokenSource, &mut dyn TreeSink),
36 {
37 let mut src = SubtreeTokenSource::new(self.subtree);
38 src.start_from_nth(*self.cur_pos);
39 let mut sink = OffsetTokenSink { token_pos: 0 };
40
41 f(&src, &mut sink);
42
43 self.finish(sink.token_pos, &mut src)
44 }
45
46 fn finish(self, parsed_token: usize, src: &mut SubtreeTokenSource) -> Option<tt::TokenTree> {
47 let res = src.bump_n(parsed_token);
48 *self.cur_pos += res.len();
49
50 let res: Vec<_> = res.into_iter().cloned().collect();
51
52 match res.len() {
53 0 => None,
54 1 => Some(res[0].clone()),
55 _ => Some(tt::TokenTree::Subtree(tt::Subtree {
56 delimiter: tt::Delimiter::None,
57 token_trees: res,
58 })),
59 }
60 }
61}
diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs
new file mode 100644
index 000000000..4b37c2bda
--- /dev/null
+++ b/crates/ra_mbe/src/subtree_source.rs
@@ -0,0 +1,521 @@
1use ra_parser::{TokenSource};
2use ra_syntax::{classify_literal, SmolStr, SyntaxKind, SyntaxKind::*};
3use std::cell::{RefCell};
4
5#[derive(Debug, Clone, Eq, PartialEq)]
6struct TtToken {
7 pub kind: SyntaxKind,
8 pub is_joint_to_next: bool,
9 pub text: SmolStr,
10 pub n_tokens: usize,
11}
12
13#[derive(Debug, Clone, Eq, PartialEq)]
14enum WalkCursor {
15 DelimiterBegin(Option<TtToken>),
16 Token(usize, Option<TtToken>),
17 DelimiterEnd(Option<TtToken>),
18 Eof,
19}
20
21#[derive(Debug)]
22struct SubTreeWalker<'a> {
23 pos: usize,
24 stack: Vec<(&'a tt::Subtree, Option<usize>)>,
25 cursor: WalkCursor,
26 last_steps: Vec<usize>,
27 subtree: &'a tt::Subtree,
28}
29
30impl<'a> SubTreeWalker<'a> {
31 fn new(subtree: &tt::Subtree) -> SubTreeWalker {
32 let mut res = SubTreeWalker {
33 pos: 0,
34 stack: vec![],
35 cursor: WalkCursor::Eof,
36 last_steps: vec![],
37 subtree,
38 };
39
40 res.reset();
41 res
42 }
43
44 fn is_eof(&self) -> bool {
45 self.cursor == WalkCursor::Eof
46 }
47
48 fn reset(&mut self) {
49 self.pos = 0;
50 self.stack = vec![(self.subtree, None)];
51 self.cursor = WalkCursor::DelimiterBegin(convert_delim(self.subtree.delimiter, false));
52 self.last_steps = vec![];
53
54 while self.is_empty_delimiter() {
55 self.forward_unchecked();
56 }
57 }
58
59 // This funciton will fast forward the cursor,
60 // Such that backward will stop at `start_pos` point
61 fn start_from_nth(&mut self, start_pos: usize) {
62 self.reset();
63 self.pos = start_pos;
64 self.cursor = self.walk_token(start_pos, 0, false);
65
66 while self.is_empty_delimiter() {
67 self.forward_unchecked();
68 }
69 }
70
71 fn current(&self) -> Option<&TtToken> {
72 match &self.cursor {
73 WalkCursor::DelimiterBegin(t) => t.as_ref(),
74 WalkCursor::Token(_, t) => t.as_ref(),
75 WalkCursor::DelimiterEnd(t) => t.as_ref(),
76 WalkCursor::Eof => None,
77 }
78 }
79
80 fn is_empty_delimiter(&self) -> bool {
81 match &self.cursor {
82 WalkCursor::DelimiterBegin(None) => true,
83 WalkCursor::DelimiterEnd(None) => true,
84 _ => false,
85 }
86 }
87
88 /// Move cursor backward by 1 step with empty checking
89 fn backward(&mut self) {
90 if self.last_steps.is_empty() {
91 return;
92 }
93 self.pos -= 1;
94 loop {
95 self.backward_unchecked();
96 // Skip Empty delimiter
97 if self.last_steps.is_empty() || !self.is_empty_delimiter() {
98 break;
99 }
100 }
101
102 // Move forward if it is empty delimiter
103 if self.last_steps.is_empty() {
104 while self.is_empty_delimiter() {
105 self.forward_unchecked();
106 }
107 }
108 }
109
110 /// Move cursor backward by 1 step without empty check
111 ///
112 /// Depends on the current state of cursor:
113 ///
114 /// * Delimiter Begin => Pop the stack, goto last walking token (`walk_token`)
115 /// * Token => Goto prev token (`walk_token`)
116 /// * Delimiter End => Goto the last child token (`walk_token`)
117 /// * Eof => push the root subtree, and set it as Delimiter End
118 fn backward_unchecked(&mut self) {
119 if self.last_steps.is_empty() {
120 return;
121 }
122
123 let last_step = self.last_steps.pop().unwrap();
124 let do_walk_token = match self.cursor {
125 WalkCursor::DelimiterBegin(_) => None,
126 WalkCursor::Token(u, _) => Some(u),
127 WalkCursor::DelimiterEnd(_) => {
128 let (top, _) = self.stack.last().unwrap();
129 Some(top.token_trees.len())
130 }
131 WalkCursor::Eof => None,
132 };
133
134 self.cursor = match do_walk_token {
135 Some(u) => self.walk_token(u, last_step, true),
136 None => match self.cursor {
137 WalkCursor::Eof => {
138 self.stack.push((self.subtree, None));
139 WalkCursor::DelimiterEnd(convert_delim(
140 self.stack.last().unwrap().0.delimiter,
141 true,
142 ))
143 }
144 _ => {
145 let (_, last_top_cursor) = self.stack.pop().unwrap();
146 assert!(!self.stack.is_empty());
147
148 self.walk_token(last_top_cursor.unwrap(), last_step, true)
149 }
150 },
151 };
152 }
153
154 /// Move cursor forward by 1 step with empty checking
155 fn forward(&mut self) {
156 if self.is_eof() {
157 return;
158 }
159
160 self.pos += 1;
161 loop {
162 self.forward_unchecked();
163 if !self.is_empty_delimiter() {
164 break;
165 }
166 }
167 }
168
169 /// Move cursor forward by 1 step without empty checking
170 ///
171 /// Depends on the current state of cursor:
172 ///
173 /// * Delimiter Begin => Goto the first child token (`walk_token`)
174 /// * Token => Goto next token (`walk_token`)
175 /// * Delimiter End => Pop the stack, goto last walking token (`walk_token`)
176 ///
177 fn forward_unchecked(&mut self) {
178 if self.is_eof() {
179 return;
180 }
181
182 let step = self.current().map(|x| x.n_tokens).unwrap_or(1);
183 self.last_steps.push(step);
184
185 let do_walk_token = match self.cursor {
186 WalkCursor::DelimiterBegin(_) => Some((0, 0)),
187 WalkCursor::Token(u, _) => Some((u, step)),
188 WalkCursor::DelimiterEnd(_) => None,
189 _ => unreachable!(),
190 };
191
192 self.cursor = match do_walk_token {
193 Some((u, step)) => self.walk_token(u, step, false),
194 None => {
195 let (_, last_top_idx) = self.stack.pop().unwrap();
196 match self.stack.last() {
197 Some(_) => self.walk_token(last_top_idx.unwrap(), 1, false),
198 None => WalkCursor::Eof,
199 }
200 }
201 };
202 }
203
204 /// Traversal child token
205 /// Depends on the new position, it returns:
206 ///
207 /// * new position < 0 => DelimiterBegin
208 /// * new position > token_tree.len() => DelimiterEnd
209 /// * if new position is a subtree, depends on traversal direction:
210 /// ** backward => DelimiterEnd
211 /// ** forward => DelimiterBegin
212 /// * if new psoition is a leaf, return walk_leaf()
213 fn walk_token(&mut self, pos: usize, offset: usize, backward: bool) -> WalkCursor {
214 let (top, _) = self.stack.last().unwrap();
215
216 if backward && pos < offset {
217 return WalkCursor::DelimiterBegin(convert_delim(
218 self.stack.last().unwrap().0.delimiter,
219 false,
220 ));
221 }
222
223 if !backward && pos + offset >= top.token_trees.len() {
224 return WalkCursor::DelimiterEnd(convert_delim(
225 self.stack.last().unwrap().0.delimiter,
226 true,
227 ));
228 }
229
230 let pos = if backward { pos - offset } else { pos + offset };
231
232 match &top.token_trees[pos] {
233 tt::TokenTree::Subtree(subtree) => {
234 self.stack.push((subtree, Some(pos)));
235 let delim = convert_delim(self.stack.last().unwrap().0.delimiter, backward);
236 if backward {
237 WalkCursor::DelimiterEnd(delim)
238 } else {
239 WalkCursor::DelimiterBegin(delim)
240 }
241 }
242 tt::TokenTree::Leaf(leaf) => WalkCursor::Token(pos, Some(self.walk_leaf(leaf, pos))),
243 }
244 }
245
246 fn walk_leaf(&mut self, leaf: &tt::Leaf, pos: usize) -> TtToken {
247 match leaf {
248 tt::Leaf::Literal(l) => convert_literal(l),
249 tt::Leaf::Ident(ident) => convert_ident(ident),
250 tt::Leaf::Punct(punct) => {
251 let (top, _) = self.stack.last().unwrap();
252 convert_punct(punct, top, pos)
253 }
254 }
255 }
256}
257
258pub(crate) trait Querier {
259 fn token(&self, uidx: usize) -> (SyntaxKind, SmolStr);
260}
261
262// A wrapper class for ref cell
263#[derive(Debug)]
264pub(crate) struct WalkerOwner<'a> {
265 walker: RefCell<SubTreeWalker<'a>>,
266 offset: usize,
267}
268
269impl<'a> WalkerOwner<'a> {
270 fn new(subtree: &'a tt::Subtree) -> Self {
271 WalkerOwner { walker: RefCell::new(SubTreeWalker::new(subtree)), offset: 0 }
272 }
273
274 fn get<'b>(&self, pos: usize) -> Option<TtToken> {
275 self.set_walker_pos(pos);
276 let walker = self.walker.borrow();
277 walker.current().cloned()
278 }
279
280 fn start_from_nth(&mut self, pos: usize) {
281 self.offset = pos;
282 self.walker.borrow_mut().start_from_nth(pos);
283 }
284
285 fn set_walker_pos(&self, mut pos: usize) {
286 pos += self.offset;
287 let mut walker = self.walker.borrow_mut();
288 while pos > walker.pos && !walker.is_eof() {
289 walker.forward();
290 }
291 while pos < walker.pos {
292 walker.backward();
293 }
294 }
295
296 fn collect_token_trees(&mut self, n: usize) -> Vec<&tt::TokenTree> {
297 self.start_from_nth(self.offset);
298
299 let mut res = vec![];
300 let mut walker = self.walker.borrow_mut();
301
302 while walker.pos - self.offset < n {
303 if let WalkCursor::Token(u, tt) = &walker.cursor {
304 if walker.stack.len() == 1 {
305 // We only collect the topmost child
306 res.push(&walker.stack[0].0.token_trees[*u]);
307 if let Some(tt) = tt {
308 for i in 0..tt.n_tokens - 1 {
309 res.push(&walker.stack[0].0.token_trees[u + i]);
310 }
311 }
312 }
313 }
314
315 walker.forward();
316 }
317
318 res
319 }
320}
321
322impl<'a> Querier for WalkerOwner<'a> {
323 fn token(&self, uidx: usize) -> (SyntaxKind, SmolStr) {
324 let tkn = self.get(uidx).unwrap();
325 (tkn.kind, tkn.text)
326 }
327}
328
329pub(crate) struct SubtreeTokenSource<'a> {
330 walker: WalkerOwner<'a>,
331}
332
333impl<'a> SubtreeTokenSource<'a> {
334 pub fn new(subtree: &tt::Subtree) -> SubtreeTokenSource {
335 SubtreeTokenSource { walker: WalkerOwner::new(subtree) }
336 }
337
338 pub fn start_from_nth(&mut self, n: usize) {
339 self.walker.start_from_nth(n);
340 }
341
342 pub fn querier<'b>(&'a self) -> &'b WalkerOwner<'a>
343 where
344 'a: 'b,
345 {
346 &self.walker
347 }
348
349 pub(crate) fn bump_n(&mut self, parsed_tokens: usize) -> Vec<&tt::TokenTree> {
350 let res = self.walker.collect_token_trees(parsed_tokens);
351 res
352 }
353}
354
355impl<'a> TokenSource for SubtreeTokenSource<'a> {
356 fn token_kind(&self, pos: usize) -> SyntaxKind {
357 if let Some(tok) = self.walker.get(pos) {
358 tok.kind
359 } else {
360 SyntaxKind::EOF
361 }
362 }
363 fn is_token_joint_to_next(&self, pos: usize) -> bool {
364 self.walker.get(pos).unwrap().is_joint_to_next
365 }
366 fn is_keyword(&self, pos: usize, kw: &str) -> bool {
367 self.walker.get(pos).unwrap().text == *kw
368 }
369}
370
371struct TokenPeek<'a, I>
372where
373 I: Iterator<Item = &'a tt::TokenTree>,
374{
375 iter: itertools::MultiPeek<I>,
376}
377
378// helper function
379fn to_punct(tt: &tt::TokenTree) -> Option<&tt::Punct> {
380 if let tt::TokenTree::Leaf(tt::Leaf::Punct(pp)) = tt {
381 return Some(pp);
382 }
383 None
384}
385
386impl<'a, I> TokenPeek<'a, I>
387where
388 I: Iterator<Item = &'a tt::TokenTree>,
389{
390 pub fn new(iter: I) -> Self {
391 TokenPeek { iter: itertools::multipeek(iter) }
392 }
393
394 fn current_punct2(&mut self, p: &tt::Punct) -> Option<((char, char), bool)> {
395 if p.spacing != tt::Spacing::Joint {
396 return None;
397 }
398
399 self.iter.reset_peek();
400 let p1 = to_punct(self.iter.peek()?)?;
401 Some(((p.char, p1.char), p1.spacing == tt::Spacing::Joint))
402 }
403
404 fn current_punct3(&mut self, p: &tt::Punct) -> Option<((char, char, char), bool)> {
405 self.current_punct2(p).and_then(|((p0, p1), last_joint)| {
406 if !last_joint {
407 None
408 } else {
409 let p2 = to_punct(*self.iter.peek()?)?;
410 Some(((p0, p1, p2.char), p2.spacing == tt::Spacing::Joint))
411 }
412 })
413 }
414}
415
416fn convert_multi_char_punct<'b, I>(
417 p: &tt::Punct,
418 iter: &mut TokenPeek<'b, I>,
419) -> Option<(SyntaxKind, bool, &'static str, usize)>
420where
421 I: Iterator<Item = &'b tt::TokenTree>,
422{
423 if let Some((m, is_joint_to_next)) = iter.current_punct3(p) {
424 if let Some((kind, text)) = match m {
425 ('<', '<', '=') => Some((SHLEQ, "<<=")),
426 ('>', '>', '=') => Some((SHREQ, ">>=")),
427 ('.', '.', '.') => Some((DOTDOTDOT, "...")),
428 ('.', '.', '=') => Some((DOTDOTEQ, "..=")),
429 _ => None,
430 } {
431 return Some((kind, is_joint_to_next, text, 3));
432 }
433 }
434
435 if let Some((m, is_joint_to_next)) = iter.current_punct2(p) {
436 if let Some((kind, text)) = match m {
437 ('<', '<') => Some((SHL, "<<")),
438 ('>', '>') => Some((SHR, ">>")),
439
440 ('|', '|') => Some((PIPEPIPE, "||")),
441 ('&', '&') => Some((AMPAMP, "&&")),
442 ('%', '=') => Some((PERCENTEQ, "%=")),
443 ('*', '=') => Some((STAREQ, "*=")),
444 ('/', '=') => Some((SLASHEQ, "/=")),
445 ('^', '=') => Some((CARETEQ, "^=")),
446
447 ('&', '=') => Some((AMPEQ, "&=")),
448 ('|', '=') => Some((PIPEEQ, "|=")),
449 ('-', '=') => Some((MINUSEQ, "-=")),
450 ('+', '=') => Some((PLUSEQ, "+=")),
451 ('>', '=') => Some((GTEQ, ">=")),
452 ('<', '=') => Some((LTEQ, "<=")),
453
454 ('-', '>') => Some((THIN_ARROW, "->")),
455 ('!', '=') => Some((NEQ, "!=")),
456 ('=', '>') => Some((FAT_ARROW, "=>")),
457 ('=', '=') => Some((EQEQ, "==")),
458 ('.', '.') => Some((DOTDOT, "..")),
459 (':', ':') => Some((COLONCOLON, "::")),
460
461 _ => None,
462 } {
463 return Some((kind, is_joint_to_next, text, 2));
464 }
465 }
466
467 None
468}
469
470fn convert_delim(d: tt::Delimiter, closing: bool) -> Option<TtToken> {
471 let (kinds, texts) = match d {
472 tt::Delimiter::Parenthesis => ([L_PAREN, R_PAREN], "()"),
473 tt::Delimiter::Brace => ([L_CURLY, R_CURLY], "{}"),
474 tt::Delimiter::Bracket => ([L_BRACK, R_BRACK], "[]"),
475 tt::Delimiter::None => return None,
476 };
477
478 let idx = closing as usize;
479 let kind = kinds[idx];
480 let text = &texts[idx..texts.len() - (1 - idx)];
481 Some(TtToken { kind, is_joint_to_next: false, text: SmolStr::new(text), n_tokens: 1 })
482}
483
484fn convert_literal(l: &tt::Literal) -> TtToken {
485 TtToken {
486 kind: classify_literal(&l.text).unwrap().kind,
487 is_joint_to_next: false,
488 text: l.text.clone(),
489 n_tokens: 1,
490 }
491}
492
493fn convert_ident(ident: &tt::Ident) -> TtToken {
494 let kind = SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT);
495 TtToken { kind, is_joint_to_next: false, text: ident.text.clone(), n_tokens: 1 }
496}
497
498fn convert_punct(p: &tt::Punct, parent: &tt::Subtree, next: usize) -> TtToken {
499 let iter = parent.token_trees[next + 1..].iter();
500 let mut peek = TokenPeek::new(iter);
501
502 if let Some((kind, is_joint_to_next, text, size)) = convert_multi_char_punct(p, &mut peek) {
503 TtToken { kind, is_joint_to_next, text: text.into(), n_tokens: size }
504 } else {
505 let kind = match p.char {
506 // lexer may produce combpund tokens for these ones
507 '.' => DOT,
508 ':' => COLON,
509 '=' => EQ,
510 '!' => EXCL,
511 '-' => MINUS,
512 c => SyntaxKind::from_char(c).unwrap(),
513 };
514 let text = {
515 let mut buf = [0u8; 4];
516 let s: &str = p.char.encode_utf8(&mut buf);
517 SmolStr::new(s)
518 };
519 TtToken { kind, is_joint_to_next: p.spacing == tt::Spacing::Joint, text, n_tokens: 1 }
520 }
521}
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
index 139a0fd33..19c17bd55 100644
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ b/crates/ra_mbe/src/syntax_bridge.rs
@@ -1,9 +1,11 @@
1use ra_parser::{TokenSource, TreeSink, ParseError}; 1use ra_parser::{TreeSink, ParseError};
2use ra_syntax::{ 2use ra_syntax::{
3 AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement, 3 AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement,
4 ast, SyntaxKind::*, TextUnit, classify_literal 4 ast, SyntaxKind::*, TextUnit
5}; 5};
6 6
7use crate::subtree_source::{SubtreeTokenSource, Querier};
8
7/// Maps `tt::TokenId` to the relative range of the original token. 9/// Maps `tt::TokenId` to the relative range of the original token.
8#[derive(Default)] 10#[derive(Default)]
9pub struct TokenMap { 11pub struct TokenMap {
@@ -22,8 +24,8 @@ pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<(tt::Subtree, TokenMap)
22 24
23/// Parses the token tree (result of macro expansion) as a sequence of items 25/// Parses the token tree (result of macro expansion) as a sequence of items
24pub fn token_tree_to_ast_item_list(tt: &tt::Subtree) -> TreeArc<ast::SourceFile> { 26pub fn token_tree_to_ast_item_list(tt: &tt::Subtree) -> TreeArc<ast::SourceFile> {
25 let token_source = TtTokenSource::new(tt); 27 let token_source = SubtreeTokenSource::new(tt);
26 let mut tree_sink = TtTreeSink::new(&token_source.tokens); 28 let mut tree_sink = TtTreeSink::new(token_source.querier());
27 ra_parser::parse(&token_source, &mut tree_sink); 29 ra_parser::parse(&token_source, &mut tree_sink);
28 let syntax = tree_sink.inner.finish(); 30 let syntax = tree_sink.inner.finish();
29 ast::SourceFile::cast(&syntax).unwrap().to_owned() 31 ast::SourceFile::cast(&syntax).unwrap().to_owned()
@@ -103,229 +105,19 @@ fn convert_tt(
103 Some(res) 105 Some(res)
104} 106}
105 107
106#[derive(Debug)] 108struct TtTreeSink<'a, Q: Querier> {
107struct TtTokenSource {
108 tokens: Vec<TtToken>,
109}
110
111#[derive(Debug)]
112struct TtToken {
113 kind: SyntaxKind,
114 is_joint_to_next: bool,
115 text: SmolStr,
116}
117
118// Some helper functions
119fn to_punct(tt: &tt::TokenTree) -> Option<&tt::Punct> {
120 if let tt::TokenTree::Leaf(tt::Leaf::Punct(pp)) = tt {
121 return Some(pp);
122 }
123 None
124}
125
126struct TokenPeek<'a, I>
127where
128 I: Iterator<Item = &'a tt::TokenTree>,
129{
130 iter: itertools::MultiPeek<I>,
131}
132
133impl<'a, I> TokenPeek<'a, I>
134where
135 I: Iterator<Item = &'a tt::TokenTree>,
136{
137 fn next(&mut self) -> Option<&tt::TokenTree> {
138 self.iter.next()
139 }
140
141 fn current_punct2(&mut self, p: &tt::Punct) -> Option<((char, char), bool)> {
142 if p.spacing != tt::Spacing::Joint {
143 return None;
144 }
145
146 self.iter.reset_peek();
147 let p1 = to_punct(self.iter.peek()?)?;
148 Some(((p.char, p1.char), p1.spacing == tt::Spacing::Joint))
149 }
150
151 fn current_punct3(&mut self, p: &tt::Punct) -> Option<((char, char, char), bool)> {
152 self.current_punct2(p).and_then(|((p0, p1), last_joint)| {
153 if !last_joint {
154 None
155 } else {
156 let p2 = to_punct(*self.iter.peek()?)?;
157 Some(((p0, p1, p2.char), p2.spacing == tt::Spacing::Joint))
158 }
159 })
160 }
161}
162
163impl TtTokenSource {
164 fn new(tt: &tt::Subtree) -> TtTokenSource {
165 let mut res = TtTokenSource { tokens: Vec::new() };
166 res.convert_subtree(tt);
167 res
168 }
169 fn convert_subtree(&mut self, sub: &tt::Subtree) {
170 self.push_delim(sub.delimiter, false);
171 let mut peek = TokenPeek { iter: itertools::multipeek(sub.token_trees.iter()) };
172 while let Some(tt) = peek.iter.next() {
173 self.convert_tt(tt, &mut peek);
174 }
175 self.push_delim(sub.delimiter, true)
176 }
177
178 fn convert_tt<'a, I>(&mut self, tt: &tt::TokenTree, iter: &mut TokenPeek<'a, I>)
179 where
180 I: Iterator<Item = &'a tt::TokenTree>,
181 {
182 match tt {
183 tt::TokenTree::Leaf(token) => self.convert_token(token, iter),
184 tt::TokenTree::Subtree(sub) => self.convert_subtree(sub),
185 }
186 }
187
188 fn convert_token<'a, I>(&mut self, token: &tt::Leaf, iter: &mut TokenPeek<'a, I>)
189 where
190 I: Iterator<Item = &'a tt::TokenTree>,
191 {
192 let tok = match token {
193 tt::Leaf::Literal(l) => TtToken {
194 kind: classify_literal(&l.text).unwrap().kind,
195 is_joint_to_next: false,
196 text: l.text.clone(),
197 },
198 tt::Leaf::Punct(p) => {
199 if let Some(tt) = Self::convert_multi_char_punct(p, iter) {
200 tt
201 } else {
202 let kind = match p.char {
203 // lexer may produce combpund tokens for these ones
204 '.' => DOT,
205 ':' => COLON,
206 '=' => EQ,
207 '!' => EXCL,
208 '-' => MINUS,
209 c => SyntaxKind::from_char(c).unwrap(),
210 };
211 let text = {
212 let mut buf = [0u8; 4];
213 let s: &str = p.char.encode_utf8(&mut buf);
214 SmolStr::new(s)
215 };
216 TtToken { kind, is_joint_to_next: p.spacing == tt::Spacing::Joint, text }
217 }
218 }
219 tt::Leaf::Ident(ident) => {
220 let kind = SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT);
221 TtToken { kind, is_joint_to_next: false, text: ident.text.clone() }
222 }
223 };
224 self.tokens.push(tok)
225 }
226
227 fn convert_multi_char_punct<'a, I>(
228 p: &tt::Punct,
229 iter: &mut TokenPeek<'a, I>,
230 ) -> Option<TtToken>
231 where
232 I: Iterator<Item = &'a tt::TokenTree>,
233 {
234 if let Some((m, is_joint_to_next)) = iter.current_punct3(p) {
235 if let Some((kind, text)) = match m {
236 ('<', '<', '=') => Some((SHLEQ, "<<=")),
237 ('>', '>', '=') => Some((SHREQ, ">>=")),
238 ('.', '.', '.') => Some((DOTDOTDOT, "...")),
239 ('.', '.', '=') => Some((DOTDOTEQ, "..=")),
240 _ => None,
241 } {
242 iter.next();
243 iter.next();
244 return Some(TtToken { kind, is_joint_to_next, text: text.into() });
245 }
246 }
247
248 if let Some((m, is_joint_to_next)) = iter.current_punct2(p) {
249 if let Some((kind, text)) = match m {
250 ('<', '<') => Some((SHL, "<<")),
251 ('>', '>') => Some((SHR, ">>")),
252
253 ('|', '|') => Some((PIPEPIPE, "||")),
254 ('&', '&') => Some((AMPAMP, "&&")),
255 ('%', '=') => Some((PERCENTEQ, "%=")),
256 ('*', '=') => Some((STAREQ, "*=")),
257 ('/', '=') => Some((SLASHEQ, "/=")),
258 ('^', '=') => Some((CARETEQ, "^=")),
259
260 ('&', '=') => Some((AMPEQ, "&=")),
261 ('|', '=') => Some((PIPEEQ, "|=")),
262 ('-', '=') => Some((MINUSEQ, "-=")),
263 ('+', '=') => Some((PLUSEQ, "+=")),
264 ('>', '=') => Some((GTEQ, ">=")),
265 ('<', '=') => Some((LTEQ, "<=")),
266
267 ('-', '>') => Some((THIN_ARROW, "->")),
268 ('!', '=') => Some((NEQ, "!=")),
269 ('=', '>') => Some((FAT_ARROW, "=>")),
270 ('=', '=') => Some((EQEQ, "==")),
271 ('.', '.') => Some((DOTDOT, "..")),
272 (':', ':') => Some((COLONCOLON, "::")),
273
274 _ => None,
275 } {
276 iter.next();
277 return Some(TtToken { kind, is_joint_to_next, text: text.into() });
278 }
279 }
280
281 None
282 }
283
284 fn push_delim(&mut self, d: tt::Delimiter, closing: bool) {
285 let (kinds, texts) = match d {
286 tt::Delimiter::Parenthesis => ([L_PAREN, R_PAREN], "()"),
287 tt::Delimiter::Brace => ([L_CURLY, R_CURLY], "{}"),
288 tt::Delimiter::Bracket => ([L_BRACK, R_BRACK], "[]"),
289 tt::Delimiter::None => return,
290 };
291 let idx = closing as usize;
292 let kind = kinds[idx];
293 let text = &texts[idx..texts.len() - (1 - idx)];
294 let tok = TtToken { kind, is_joint_to_next: false, text: SmolStr::new(text) };
295 self.tokens.push(tok)
296 }
297}
298
299impl TokenSource for TtTokenSource {
300 fn token_kind(&self, pos: usize) -> SyntaxKind {
301 if let Some(tok) = self.tokens.get(pos) {
302 tok.kind
303 } else {
304 SyntaxKind::EOF
305 }
306 }
307 fn is_token_joint_to_next(&self, pos: usize) -> bool {
308 self.tokens[pos].is_joint_to_next
309 }
310 fn is_keyword(&self, pos: usize, kw: &str) -> bool {
311 self.tokens[pos].text == *kw
312 }
313}
314
315#[derive(Default)]
316struct TtTreeSink<'a> {
317 buf: String, 109 buf: String,
318 tokens: &'a [TtToken], 110 src_querier: &'a Q,
319 text_pos: TextUnit, 111 text_pos: TextUnit,
320 token_pos: usize, 112 token_pos: usize,
321 inner: SyntaxTreeBuilder, 113 inner: SyntaxTreeBuilder,
322} 114}
323 115
324impl<'a> TtTreeSink<'a> { 116impl<'a, Q: Querier> TtTreeSink<'a, Q> {
325 fn new(tokens: &'a [TtToken]) -> TtTreeSink { 117 fn new(src_querier: &'a Q) -> Self {
326 TtTreeSink { 118 TtTreeSink {
327 buf: String::new(), 119 buf: String::new(),
328 tokens, 120 src_querier,
329 text_pos: 0.into(), 121 text_pos: 0.into(),
330 token_pos: 0, 122 token_pos: 0,
331 inner: SyntaxTreeBuilder::default(), 123 inner: SyntaxTreeBuilder::default(),
@@ -333,10 +125,10 @@ impl<'a> TtTreeSink<'a> {
333 } 125 }
334} 126}
335 127
336impl<'a> TreeSink for TtTreeSink<'a> { 128impl<'a, Q: Querier> TreeSink for TtTreeSink<'a, Q> {
337 fn token(&mut self, kind: SyntaxKind, n_tokens: u8) { 129 fn token(&mut self, kind: SyntaxKind, n_tokens: u8) {
338 for _ in 0..n_tokens { 130 for _ in 0..n_tokens {
339 self.buf += self.tokens[self.token_pos].text.as_str(); 131 self.buf += &self.src_querier.token(self.token_pos).1;
340 self.token_pos += 1; 132 self.token_pos += 1;
341 } 133 }
342 self.text_pos += TextUnit::of_str(&self.buf); 134 self.text_pos += TextUnit::of_str(&self.buf);
@@ -380,21 +172,23 @@ mod tests {
380 "#, 172 "#,
381 ); 173 );
382 let expansion = expand(&rules, "literals!(foo)"); 174 let expansion = expand(&rules, "literals!(foo)");
383 let tt_src = TtTokenSource::new(&expansion); 175 let tt_src = SubtreeTokenSource::new(&expansion);
176
177 let query = tt_src.querier();
384 178
385 // [{] 179 // [{]
386 // [let] [a] [=] ['c'] [;] 180 // [let] [a] [=] ['c'] [;]
387 assert_eq!(tt_src.tokens[1 + 3].text, "'c'"); 181 assert_eq!(query.token(1 + 3).1, "'c'");
388 assert_eq!(tt_src.tokens[1 + 3].kind, CHAR); 182 assert_eq!(query.token(1 + 3).0, CHAR);
389 // [let] [c] [=] [1000] [;] 183 // [let] [c] [=] [1000] [;]
390 assert_eq!(tt_src.tokens[1 + 5 + 3].text, "1000"); 184 assert_eq!(query.token(1 + 5 + 3).1, "1000");
391 assert_eq!(tt_src.tokens[1 + 5 + 3].kind, INT_NUMBER); 185 assert_eq!(query.token(1 + 5 + 3).0, INT_NUMBER);
392 // [let] [f] [=] [12E+99_f64] [;] 186 // [let] [f] [=] [12E+99_f64] [;]
393 assert_eq!(tt_src.tokens[1 + 10 + 3].text, "12E+99_f64"); 187 assert_eq!(query.token(1 + 10 + 3).1, "12E+99_f64");
394 assert_eq!(tt_src.tokens[1 + 10 + 3].kind, FLOAT_NUMBER); 188 assert_eq!(query.token(1 + 10 + 3).0, FLOAT_NUMBER);
395 189
396 // [let] [s] [=] ["rust1"] [;] 190 // [let] [s] [=] ["rust1"] [;]
397 assert_eq!(tt_src.tokens[1 + 15 + 3].text, "\"rust1\""); 191 assert_eq!(query.token(1 + 15 + 3).1, "\"rust1\"");
398 assert_eq!(tt_src.tokens[1 + 15 + 3].kind, STRING); 192 assert_eq!(query.token(1 + 15 + 3).0, STRING);
399 } 193 }
400} 194}
diff --git a/crates/ra_mbe/src/tt_cursor.rs b/crates/ra_mbe/src/tt_cursor.rs
index 3128cb9ae..d29faa77c 100644
--- a/crates/ra_mbe/src/tt_cursor.rs
+++ b/crates/ra_mbe/src/tt_cursor.rs
@@ -1,4 +1,5 @@
1use crate::ParseError; 1use crate::ParseError;
2use crate::subtree_parser::Parser;
2 3
3#[derive(Clone)] 4#[derive(Clone)]
4pub(crate) struct TtCursor<'a> { 5pub(crate) struct TtCursor<'a> {
@@ -78,6 +79,11 @@ impl<'a> TtCursor<'a> {
78 }) 79 })
79 } 80 }
80 81
82 pub(crate) fn eat_path(&mut self) -> Option<tt::TokenTree> {
83 let parser = Parser::new(&mut self.pos, self.subtree);
84 parser.parse_path()
85 }
86
81 pub(crate) fn expect_char(&mut self, char: char) -> Result<(), ParseError> { 87 pub(crate) fn expect_char(&mut self, char: char) -> Result<(), ParseError> {
82 if self.at_char(char) { 88 if self.at_char(char) {
83 self.bump(); 89 self.bump();
diff --git a/crates/ra_parser/src/grammar.rs b/crates/ra_parser/src/grammar.rs
index b2ffeff8c..c5f510e6b 100644
--- a/crates/ra_parser/src/grammar.rs
+++ b/crates/ra_parser/src/grammar.rs
@@ -49,6 +49,10 @@ pub(crate) fn root(p: &mut Parser) {
49 m.complete(p, SOURCE_FILE); 49 m.complete(p, SOURCE_FILE);
50} 50}
51 51
52pub(crate) fn path(p: &mut Parser) {
53 paths::type_path(p);
54}
55
52pub(crate) fn reparser( 56pub(crate) fn reparser(
53 node: SyntaxKind, 57 node: SyntaxKind,
54 first_child: Option<SyntaxKind>, 58 first_child: Option<SyntaxKind>,
diff --git a/crates/ra_parser/src/lib.rs b/crates/ra_parser/src/lib.rs
index 30ba06aac..3ceeeebd7 100644
--- a/crates/ra_parser/src/lib.rs
+++ b/crates/ra_parser/src/lib.rs
@@ -61,6 +61,14 @@ pub fn parse(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
61 event::process(tree_sink, events); 61 event::process(tree_sink, events);
62} 62}
63 63
64/// Parse given tokens into the given sink as a path
65pub fn parse_path(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
66 let mut p = parser::Parser::new(token_source);
67 grammar::path(&mut p);
68 let events = p.finish();
69 event::process(tree_sink, events);
70}
71
64/// A parsing function for a specific braced-block. 72/// A parsing function for a specific braced-block.
65pub struct Reparser(fn(&mut parser::Parser)); 73pub struct Reparser(fn(&mut parser::Parser));
66 74