diff options
author | bors[bot] <bors[bot]@users.noreply.github.com> | 2019-05-02 07:32:42 +0100 |
---|---|---|
committer | bors[bot] <bors[bot]@users.noreply.github.com> | 2019-05-02 07:32:42 +0100 |
commit | b0e7022afe17f35fd0f6960a279e375b19061919 (patch) | |
tree | 487b133512986fb6eb6eac4068e0045e75cfd2ac /crates | |
parent | 12629d5e4f2d949eedb707dedad4d75eff09e683 (diff) | |
parent | 779676f782565cfc936db79db48e2e7b62adf3a3 (diff) |
Merge #1224
1224: Remove unused multchar puncts code and add space between puncts r=matklad a=edwin0cheng
After #1213 , parser only need single char punct, this PR do the following things:
* Remove code which handles multi char puncts
* Remove code which handle traversal backward in `SubtreeSource` , because we cached the result in #1195
* Add space between two consecutive puncts while `tt` to `SyntaxNode` conversion .
Note that the spaces should only be added if both puncts are not delimiters.
Co-authored-by: Edwin Cheng <[email protected]>
Diffstat (limited to 'crates')
-rw-r--r-- | crates/ra_mbe/src/subtree_source.rs | 223 | ||||
-rw-r--r-- | crates/ra_mbe/src/syntax_bridge.rs | 64 | ||||
-rw-r--r-- | crates/ra_mbe/src/tt_cursor.rs | 46 |
3 files changed, 120 insertions, 213 deletions
diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs index 278d046fb..8176296e6 100644 --- a/crates/ra_mbe/src/subtree_source.rs +++ b/crates/ra_mbe/src/subtree_source.rs | |||
@@ -45,20 +45,6 @@ impl<'a> TokenSeq<'a> { | |||
45 | } | 45 | } |
46 | } | 46 | } |
47 | } | 47 | } |
48 | |||
49 | fn len(&self) -> usize { | ||
50 | match self { | ||
51 | TokenSeq::Subtree(subtree) => subtree.token_trees.len() + 2, | ||
52 | TokenSeq::Seq(tokens) => tokens.len(), | ||
53 | } | ||
54 | } | ||
55 | |||
56 | fn child_slice(&self, pos: usize) -> &[tt::TokenTree] { | ||
57 | match self { | ||
58 | TokenSeq::Subtree(subtree) => &subtree.token_trees[pos - 1..], | ||
59 | TokenSeq::Seq(tokens) => &tokens[pos..], | ||
60 | } | ||
61 | } | ||
62 | } | 48 | } |
63 | 49 | ||
64 | #[derive(Debug, Clone, Eq, PartialEq)] | 50 | #[derive(Debug, Clone, Eq, PartialEq)] |
@@ -66,7 +52,6 @@ struct TtToken { | |||
66 | pub kind: SyntaxKind, | 52 | pub kind: SyntaxKind, |
67 | pub is_joint_to_next: bool, | 53 | pub is_joint_to_next: bool, |
68 | pub text: SmolStr, | 54 | pub text: SmolStr, |
69 | pub n_tokens: usize, | ||
70 | } | 55 | } |
71 | 56 | ||
72 | #[derive(Debug, Clone, Eq, PartialEq)] | 57 | #[derive(Debug, Clone, Eq, PartialEq)] |
@@ -80,19 +65,12 @@ struct SubTreeWalker<'a> { | |||
80 | pos: usize, | 65 | pos: usize, |
81 | stack: Vec<(TokenSeq<'a>, usize)>, | 66 | stack: Vec<(TokenSeq<'a>, usize)>, |
82 | cursor: WalkCursor, | 67 | cursor: WalkCursor, |
83 | last_steps: Vec<usize>, | ||
84 | ts: TokenSeq<'a>, | 68 | ts: TokenSeq<'a>, |
85 | } | 69 | } |
86 | 70 | ||
87 | impl<'a> SubTreeWalker<'a> { | 71 | impl<'a> SubTreeWalker<'a> { |
88 | fn new(ts: TokenSeq<'a>) -> SubTreeWalker { | 72 | fn new(ts: TokenSeq<'a>) -> SubTreeWalker { |
89 | let mut res = SubTreeWalker { | 73 | let mut res = SubTreeWalker { pos: 0, stack: vec![], cursor: WalkCursor::Eof, ts }; |
90 | pos: 0, | ||
91 | stack: vec![], | ||
92 | cursor: WalkCursor::Eof, | ||
93 | last_steps: vec![], | ||
94 | ts, | ||
95 | }; | ||
96 | 74 | ||
97 | res.reset(); | 75 | res.reset(); |
98 | res | 76 | res |
@@ -105,7 +83,6 @@ impl<'a> SubTreeWalker<'a> { | |||
105 | fn reset(&mut self) { | 83 | fn reset(&mut self) { |
106 | self.pos = 0; | 84 | self.pos = 0; |
107 | self.stack = vec![]; | 85 | self.stack = vec![]; |
108 | self.last_steps = vec![]; | ||
109 | 86 | ||
110 | self.cursor = match self.ts.get(0) { | 87 | self.cursor = match self.ts.get(0) { |
111 | DelimToken::Token(token) => match token { | 88 | DelimToken::Token(token) => match token { |
@@ -114,10 +91,7 @@ impl<'a> SubTreeWalker<'a> { | |||
114 | self.stack.push((ts, 0)); | 91 | self.stack.push((ts, 0)); |
115 | WalkCursor::Token(0, convert_delim(subtree.delimiter, false)) | 92 | WalkCursor::Token(0, convert_delim(subtree.delimiter, false)) |
116 | } | 93 | } |
117 | tt::TokenTree::Leaf(leaf) => { | 94 | tt::TokenTree::Leaf(leaf) => WalkCursor::Token(0, convert_leaf(leaf)), |
118 | let next_tokens = self.ts.child_slice(0); | ||
119 | WalkCursor::Token(0, convert_leaf(&next_tokens, leaf)) | ||
120 | } | ||
121 | }, | 95 | }, |
122 | DelimToken::Delim(delim, is_end) => { | 96 | DelimToken::Delim(delim, is_end) => { |
123 | assert!(!is_end); | 97 | assert!(!is_end); |
@@ -138,24 +112,6 @@ impl<'a> SubTreeWalker<'a> { | |||
138 | self.stack.last().map(|(t, _)| t).unwrap_or(&self.ts) | 112 | self.stack.last().map(|(t, _)| t).unwrap_or(&self.ts) |
139 | } | 113 | } |
140 | 114 | ||
141 | /// Move cursor backward by 1 step | ||
142 | fn backward(&mut self) { | ||
143 | if self.last_steps.is_empty() { | ||
144 | return; | ||
145 | } | ||
146 | |||
147 | self.pos -= 1; | ||
148 | let last_step = self.last_steps.pop().unwrap(); | ||
149 | |||
150 | self.cursor = match self.cursor { | ||
151 | WalkCursor::Token(idx, _) => self.walk_token(idx, last_step, true), | ||
152 | WalkCursor::Eof => { | ||
153 | let len = self.top().len(); | ||
154 | self.walk_token(len, last_step, true) | ||
155 | } | ||
156 | } | ||
157 | } | ||
158 | |||
159 | /// Move cursor forward by 1 step | 115 | /// Move cursor forward by 1 step |
160 | fn forward(&mut self) { | 116 | fn forward(&mut self) { |
161 | if self.is_eof() { | 117 | if self.is_eof() { |
@@ -163,37 +119,24 @@ impl<'a> SubTreeWalker<'a> { | |||
163 | } | 119 | } |
164 | self.pos += 1; | 120 | self.pos += 1; |
165 | 121 | ||
166 | let step = self.current().map(|x| x.n_tokens).unwrap_or(1); | ||
167 | self.last_steps.push(step); | ||
168 | |||
169 | if let WalkCursor::Token(u, _) = self.cursor { | 122 | if let WalkCursor::Token(u, _) = self.cursor { |
170 | self.cursor = self.walk_token(u, step, false) | 123 | self.cursor = self.walk_token(u) |
171 | } | 124 | } |
172 | } | 125 | } |
173 | 126 | ||
174 | /// Traversal child token | 127 | /// Traversal child token |
175 | fn walk_token(&mut self, pos: usize, offset: usize, backward: bool) -> WalkCursor { | 128 | fn walk_token(&mut self, pos: usize) -> WalkCursor { |
176 | let top = self.stack.last().map(|(t, _)| t).unwrap_or(&self.ts); | 129 | let top = self.stack.last().map(|(t, _)| t).unwrap_or(&self.ts); |
177 | 130 | let pos = pos + 1; | |
178 | if backward && pos < offset { | ||
179 | let (_, last_idx) = self.stack.pop().unwrap(); | ||
180 | return self.walk_token(last_idx, offset, backward); | ||
181 | } | ||
182 | |||
183 | let pos = if backward { pos - offset } else { pos + offset }; | ||
184 | 131 | ||
185 | match top.get(pos) { | 132 | match top.get(pos) { |
186 | DelimToken::Token(token) => match token { | 133 | DelimToken::Token(token) => match token { |
187 | tt::TokenTree::Subtree(subtree) => { | 134 | tt::TokenTree::Subtree(subtree) => { |
188 | let ts = TokenSeq::from(subtree); | 135 | let ts = TokenSeq::from(subtree); |
189 | let new_idx = if backward { ts.len() - 1 } else { 0 }; | ||
190 | self.stack.push((ts, pos)); | 136 | self.stack.push((ts, pos)); |
191 | WalkCursor::Token(new_idx, convert_delim(subtree.delimiter, backward)) | 137 | WalkCursor::Token(0, convert_delim(subtree.delimiter, false)) |
192 | } | ||
193 | tt::TokenTree::Leaf(leaf) => { | ||
194 | let next_tokens = top.child_slice(pos); | ||
195 | WalkCursor::Token(pos, convert_leaf(&next_tokens, leaf)) | ||
196 | } | 138 | } |
139 | tt::TokenTree::Leaf(leaf) => WalkCursor::Token(pos, convert_leaf(leaf)), | ||
197 | }, | 140 | }, |
198 | DelimToken::Delim(delim, is_end) => { | 141 | DelimToken::Delim(delim, is_end) => { |
199 | WalkCursor::Token(pos, convert_delim(*delim, is_end)) | 142 | WalkCursor::Token(pos, convert_delim(*delim, is_end)) |
@@ -201,8 +144,7 @@ impl<'a> SubTreeWalker<'a> { | |||
201 | DelimToken::End => { | 144 | DelimToken::End => { |
202 | // it is the top level | 145 | // it is the top level |
203 | if let Some((_, last_idx)) = self.stack.pop() { | 146 | if let Some((_, last_idx)) = self.stack.pop() { |
204 | assert!(!backward); | 147 | self.walk_token(last_idx) |
205 | self.walk_token(last_idx, offset, backward) | ||
206 | } else { | 148 | } else { |
207 | WalkCursor::Eof | 149 | WalkCursor::Eof |
208 | } | 150 | } |
@@ -237,12 +179,9 @@ impl<'a> WalkerOwner<'a> { | |||
237 | } | 179 | } |
238 | 180 | ||
239 | while pos >= cached.len() { | 181 | while pos >= cached.len() { |
240 | let len = cached.len(); | 182 | self.set_pos(cached.len()); |
241 | cached.push({ | 183 | let walker = self.walker.borrow(); |
242 | self.set_pos(len); | 184 | cached.push(walker.current().cloned()); |
243 | let walker = self.walker.borrow(); | ||
244 | walker.current().cloned() | ||
245 | }); | ||
246 | } | 185 | } |
247 | 186 | ||
248 | return cached[pos].clone(); | 187 | return cached[pos].clone(); |
@@ -250,12 +189,11 @@ impl<'a> WalkerOwner<'a> { | |||
250 | 189 | ||
251 | fn set_pos(&self, pos: usize) { | 190 | fn set_pos(&self, pos: usize) { |
252 | let mut walker = self.walker.borrow_mut(); | 191 | let mut walker = self.walker.borrow_mut(); |
192 | assert!(walker.pos <= pos); | ||
193 | |||
253 | while pos > walker.pos && !walker.is_eof() { | 194 | while pos > walker.pos && !walker.is_eof() { |
254 | walker.forward(); | 195 | walker.forward(); |
255 | } | 196 | } |
256 | while pos < walker.pos { | ||
257 | walker.backward(); | ||
258 | } | ||
259 | } | 197 | } |
260 | 198 | ||
261 | fn collect_token_trees(&mut self, n: usize) -> Vec<&tt::TokenTree> { | 199 | fn collect_token_trees(&mut self, n: usize) -> Vec<&tt::TokenTree> { |
@@ -264,15 +202,16 @@ impl<'a> WalkerOwner<'a> { | |||
264 | walker.reset(); | 202 | walker.reset(); |
265 | 203 | ||
266 | while walker.pos < n { | 204 | while walker.pos < n { |
267 | if let WalkCursor::Token(u, tt) = &walker.cursor { | 205 | if let WalkCursor::Token(u, _) = &walker.cursor { |
268 | // We only collect the topmost child | 206 | // We only collect the topmost child |
269 | if walker.stack.len() == 0 { | 207 | if walker.stack.len() == 0 { |
270 | for i in 0..tt.n_tokens { | 208 | if let DelimToken::Token(token) = walker.ts.get(*u) { |
271 | if let DelimToken::Token(token) = walker.ts.get(u + i) { | 209 | res.push(token); |
272 | res.push(token); | ||
273 | } | ||
274 | } | 210 | } |
275 | } else if walker.stack.len() == 1 { | 211 | } |
212 | // Check whether the second level is a subtree | ||
213 | // if so, collect its parent which is topmost child | ||
214 | else if walker.stack.len() == 1 { | ||
276 | if let DelimToken::Delim(_, is_end) = walker.top().get(*u) { | 215 | if let DelimToken::Delim(_, is_end) = walker.top().get(*u) { |
277 | if !is_end { | 216 | if !is_end { |
278 | let (_, last_idx) = &walker.stack[0]; | 217 | let (_, last_idx) = &walker.stack[0]; |
@@ -343,78 +282,6 @@ impl<'a> TokenSource for SubtreeTokenSource<'a> { | |||
343 | } | 282 | } |
344 | } | 283 | } |
345 | 284 | ||
346 | pub(crate) struct TokenPeek<'a, I> | ||
347 | where | ||
348 | I: Iterator<Item = &'a tt::TokenTree>, | ||
349 | { | ||
350 | iter: itertools::MultiPeek<I>, | ||
351 | } | ||
352 | |||
353 | // helper function | ||
354 | fn to_punct(tt: &tt::TokenTree) -> Option<&tt::Punct> { | ||
355 | if let tt::TokenTree::Leaf(tt::Leaf::Punct(pp)) = tt { | ||
356 | return Some(pp); | ||
357 | } | ||
358 | None | ||
359 | } | ||
360 | |||
361 | impl<'a, I> TokenPeek<'a, I> | ||
362 | where | ||
363 | I: Iterator<Item = &'a tt::TokenTree>, | ||
364 | { | ||
365 | pub fn new(iter: I) -> Self { | ||
366 | TokenPeek { iter: itertools::multipeek(iter) } | ||
367 | } | ||
368 | |||
369 | pub fn current_punct2(&mut self, p: &tt::Punct) -> Option<((char, char), bool)> { | ||
370 | if p.spacing != tt::Spacing::Joint { | ||
371 | return None; | ||
372 | } | ||
373 | |||
374 | self.iter.reset_peek(); | ||
375 | let p1 = to_punct(self.iter.peek()?)?; | ||
376 | Some(((p.char, p1.char), p1.spacing == tt::Spacing::Joint)) | ||
377 | } | ||
378 | |||
379 | pub fn current_punct3(&mut self, p: &tt::Punct) -> Option<((char, char, char), bool)> { | ||
380 | self.current_punct2(p).and_then(|((p0, p1), last_joint)| { | ||
381 | if !last_joint { | ||
382 | None | ||
383 | } else { | ||
384 | let p2 = to_punct(*self.iter.peek()?)?; | ||
385 | Some(((p0, p1, p2.char), p2.spacing == tt::Spacing::Joint)) | ||
386 | } | ||
387 | }) | ||
388 | } | ||
389 | } | ||
390 | |||
391 | // FIXME: Remove this function | ||
392 | fn convert_multi_char_punct<'b, I>( | ||
393 | p: &tt::Punct, | ||
394 | iter: &mut TokenPeek<'b, I>, | ||
395 | ) -> Option<(SyntaxKind, bool, &'static str, usize)> | ||
396 | where | ||
397 | I: Iterator<Item = &'b tt::TokenTree>, | ||
398 | { | ||
399 | if let Some((m, is_joint_to_next)) = iter.current_punct3(p) { | ||
400 | if let Some((kind, text)) = match m { | ||
401 | _ => None, | ||
402 | } { | ||
403 | return Some((kind, is_joint_to_next, text, 3)); | ||
404 | } | ||
405 | } | ||
406 | |||
407 | if let Some((m, is_joint_to_next)) = iter.current_punct2(p) { | ||
408 | if let Some((kind, text)) = match m { | ||
409 | _ => None, | ||
410 | } { | ||
411 | return Some((kind, is_joint_to_next, text, 2)); | ||
412 | } | ||
413 | } | ||
414 | |||
415 | None | ||
416 | } | ||
417 | |||
418 | fn convert_delim(d: tt::Delimiter, closing: bool) -> TtToken { | 285 | fn convert_delim(d: tt::Delimiter, closing: bool) -> TtToken { |
419 | let (kinds, texts) = match d { | 286 | let (kinds, texts) = match d { |
420 | tt::Delimiter::Parenthesis => ([L_PAREN, R_PAREN], "()"), | 287 | tt::Delimiter::Parenthesis => ([L_PAREN, R_PAREN], "()"), |
@@ -426,7 +293,7 @@ fn convert_delim(d: tt::Delimiter, closing: bool) -> TtToken { | |||
426 | let idx = closing as usize; | 293 | let idx = closing as usize; |
427 | let kind = kinds[idx]; | 294 | let kind = kinds[idx]; |
428 | let text = if texts.len() > 0 { &texts[idx..texts.len() - (1 - idx)] } else { "" }; | 295 | let text = if texts.len() > 0 { &texts[idx..texts.len() - (1 - idx)] } else { "" }; |
429 | TtToken { kind, is_joint_to_next: false, text: SmolStr::new(text), n_tokens: 1 } | 296 | TtToken { kind, is_joint_to_next: false, text: SmolStr::new(text) } |
430 | } | 297 | } |
431 | 298 | ||
432 | fn convert_literal(l: &tt::Literal) -> TtToken { | 299 | fn convert_literal(l: &tt::Literal) -> TtToken { |
@@ -437,7 +304,7 @@ fn convert_literal(l: &tt::Literal) -> TtToken { | |||
437 | _ => panic!("Fail to convert given literal {:#?}", &l), | 304 | _ => panic!("Fail to convert given literal {:#?}", &l), |
438 | }); | 305 | }); |
439 | 306 | ||
440 | TtToken { kind, is_joint_to_next: false, text: l.text.clone(), n_tokens: 1 } | 307 | TtToken { kind, is_joint_to_next: false, text: l.text.clone() } |
441 | } | 308 | } |
442 | 309 | ||
443 | fn convert_ident(ident: &tt::Ident) -> TtToken { | 310 | fn convert_ident(ident: &tt::Ident) -> TtToken { |
@@ -447,39 +314,31 @@ fn convert_ident(ident: &tt::Ident) -> TtToken { | |||
447 | SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT) | 314 | SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT) |
448 | }; | 315 | }; |
449 | 316 | ||
450 | TtToken { kind, is_joint_to_next: false, text: ident.text.clone(), n_tokens: 1 } | 317 | TtToken { kind, is_joint_to_next: false, text: ident.text.clone() } |
451 | } | 318 | } |
452 | 319 | ||
453 | fn convert_punct(p: &tt::Punct, next_tokens: &[tt::TokenTree]) -> TtToken { | 320 | fn convert_punct(p: &tt::Punct) -> TtToken { |
454 | let mut iter = next_tokens.iter(); | 321 | let kind = match p.char { |
455 | iter.next(); | 322 | // lexer may produce combpund tokens for these ones |
456 | let mut peek = TokenPeek::new(iter); | 323 | '.' => DOT, |
457 | 324 | ':' => COLON, | |
458 | if let Some((kind, is_joint_to_next, text, size)) = convert_multi_char_punct(p, &mut peek) { | 325 | '=' => EQ, |
459 | TtToken { kind, is_joint_to_next, text: text.into(), n_tokens: size } | 326 | '!' => EXCL, |
460 | } else { | 327 | '-' => MINUS, |
461 | let kind = match p.char { | 328 | c => SyntaxKind::from_char(c).unwrap(), |
462 | // lexer may produce combpund tokens for these ones | 329 | }; |
463 | '.' => DOT, | 330 | let text = { |
464 | ':' => COLON, | 331 | let mut buf = [0u8; 4]; |
465 | '=' => EQ, | 332 | let s: &str = p.char.encode_utf8(&mut buf); |
466 | '!' => EXCL, | 333 | SmolStr::new(s) |
467 | '-' => MINUS, | 334 | }; |
468 | c => SyntaxKind::from_char(c).unwrap(), | 335 | TtToken { kind, is_joint_to_next: p.spacing == tt::Spacing::Joint, text } |
469 | }; | ||
470 | let text = { | ||
471 | let mut buf = [0u8; 4]; | ||
472 | let s: &str = p.char.encode_utf8(&mut buf); | ||
473 | SmolStr::new(s) | ||
474 | }; | ||
475 | TtToken { kind, is_joint_to_next: p.spacing == tt::Spacing::Joint, text, n_tokens: 1 } | ||
476 | } | ||
477 | } | 336 | } |
478 | 337 | ||
479 | fn convert_leaf(tokens: &[tt::TokenTree], leaf: &tt::Leaf) -> TtToken { | 338 | fn convert_leaf(leaf: &tt::Leaf) -> TtToken { |
480 | match leaf { | 339 | match leaf { |
481 | tt::Leaf::Literal(l) => convert_literal(l), | 340 | tt::Leaf::Literal(l) => convert_literal(l), |
482 | tt::Leaf::Ident(ident) => convert_ident(ident), | 341 | tt::Leaf::Ident(ident) => convert_ident(ident), |
483 | tt::Leaf::Punct(punct) => convert_punct(punct, tokens), | 342 | tt::Leaf::Punct(punct) => convert_punct(punct), |
484 | } | 343 | } |
485 | } | 344 | } |
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index e0f228ce9..3521b382a 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -148,30 +148,21 @@ fn convert_tt( | |||
148 | match child { | 148 | match child { |
149 | SyntaxElement::Token(token) => { | 149 | SyntaxElement::Token(token) => { |
150 | if token.kind().is_punct() { | 150 | if token.kind().is_punct() { |
151 | let mut prev = None; | 151 | assert!(token.text().len() == 1, "Input ast::token punct must be single char."); |
152 | for char in token.text().chars() { | 152 | let char = token.text().chars().next().unwrap(); |
153 | if let Some(char) = prev { | 153 | |
154 | token_trees.push( | 154 | let spacing = match child_iter.peek() { |
155 | tt::Leaf::from(tt::Punct { char, spacing: tt::Spacing::Joint }) | 155 | Some(SyntaxElement::Token(token)) => { |
156 | .into(), | 156 | if token.kind().is_punct() { |
157 | ); | 157 | tt::Spacing::Joint |
158 | } | 158 | } else { |
159 | prev = Some(char) | 159 | tt::Spacing::Alone |
160 | } | ||
161 | if let Some(char) = prev { | ||
162 | let spacing = match child_iter.peek() { | ||
163 | Some(SyntaxElement::Token(token)) => { | ||
164 | if token.kind().is_punct() { | ||
165 | tt::Spacing::Joint | ||
166 | } else { | ||
167 | tt::Spacing::Alone | ||
168 | } | ||
169 | } | 160 | } |
170 | _ => tt::Spacing::Alone, | 161 | } |
171 | }; | 162 | _ => tt::Spacing::Alone, |
163 | }; | ||
172 | 164 | ||
173 | token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into()); | 165 | token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into()); |
174 | } | ||
175 | } else { | 166 | } else { |
176 | let child: tt::TokenTree = if token.kind() == SyntaxKind::TRUE_KW | 167 | let child: tt::TokenTree = if token.kind() == SyntaxKind::TRUE_KW |
177 | || token.kind() == SyntaxKind::FALSE_KW | 168 | || token.kind() == SyntaxKind::FALSE_KW |
@@ -224,6 +215,15 @@ impl<'a, Q: Querier> TtTreeSink<'a, Q> { | |||
224 | } | 215 | } |
225 | } | 216 | } |
226 | 217 | ||
218 | fn is_delimiter(kind: SyntaxKind) -> bool { | ||
219 | use SyntaxKind::*; | ||
220 | |||
221 | match kind { | ||
222 | L_PAREN | L_BRACK | L_CURLY | R_PAREN | R_BRACK | R_CURLY => true, | ||
223 | _ => false, | ||
224 | } | ||
225 | } | ||
226 | |||
227 | impl<'a, Q: Querier> TreeSink for TtTreeSink<'a, Q> { | 227 | impl<'a, Q: Querier> TreeSink for TtTreeSink<'a, Q> { |
228 | fn token(&mut self, kind: SyntaxKind, n_tokens: u8) { | 228 | fn token(&mut self, kind: SyntaxKind, n_tokens: u8) { |
229 | if kind == L_DOLLAR || kind == R_DOLLAR { | 229 | if kind == L_DOLLAR || kind == R_DOLLAR { |
@@ -240,14 +240,18 @@ impl<'a, Q: Querier> TreeSink for TtTreeSink<'a, Q> { | |||
240 | self.buf.clear(); | 240 | self.buf.clear(); |
241 | self.inner.token(kind, text); | 241 | self.inner.token(kind, text); |
242 | 242 | ||
243 | // // Add a white space to token | 243 | // Add a white space between tokens, only if both are not delimiters |
244 | // let (last_kind, _, last_joint_to_next ) = self.src_querier.token(self.token_pos-n_tokens as usize); | 244 | if !is_delimiter(kind) { |
245 | // if !last_joint_to_next && last_kind.is_punct() { | 245 | let (last_kind, _, last_joint_to_next) = self.src_querier.token(self.token_pos - 1); |
246 | // let (cur_kind, _, _ ) = self.src_querier.token(self.token_pos); | 246 | if !last_joint_to_next && last_kind.is_punct() { |
247 | // if cur_kind.is_punct() { | 247 | let (cur_kind, _, _) = self.src_querier.token(self.token_pos); |
248 | // self.inner.token(WHITESPACE, " ".into()); | 248 | if !is_delimiter(cur_kind) { |
249 | // } | 249 | if cur_kind.is_punct() { |
250 | // } | 250 | self.inner.token(WHITESPACE, " ".into()); |
251 | } | ||
252 | } | ||
253 | } | ||
254 | } | ||
251 | } | 255 | } |
252 | 256 | ||
253 | fn start_node(&mut self, kind: SyntaxKind) { | 257 | fn start_node(&mut self, kind: SyntaxKind) { |
diff --git a/crates/ra_mbe/src/tt_cursor.rs b/crates/ra_mbe/src/tt_cursor.rs index eef642a9c..8f15d215b 100644 --- a/crates/ra_mbe/src/tt_cursor.rs +++ b/crates/ra_mbe/src/tt_cursor.rs | |||
@@ -1,6 +1,5 @@ | |||
1 | use crate::ParseError; | 1 | use crate::ParseError; |
2 | use crate::subtree_parser::Parser; | 2 | use crate::subtree_parser::Parser; |
3 | use crate::subtree_source::TokenPeek; | ||
4 | use smallvec::{SmallVec, smallvec}; | 3 | use smallvec::{SmallVec, smallvec}; |
5 | 4 | ||
6 | #[derive(Debug, Clone)] | 5 | #[derive(Debug, Clone)] |
@@ -262,3 +261,48 @@ impl<'a> TtCursor<'a> { | |||
262 | self.pos = memento.pos; | 261 | self.pos = memento.pos; |
263 | } | 262 | } |
264 | } | 263 | } |
264 | |||
265 | pub(crate) struct TokenPeek<'a, I> | ||
266 | where | ||
267 | I: Iterator<Item = &'a tt::TokenTree>, | ||
268 | { | ||
269 | iter: itertools::MultiPeek<I>, | ||
270 | } | ||
271 | |||
272 | // helper function | ||
273 | fn to_punct(tt: &tt::TokenTree) -> Option<&tt::Punct> { | ||
274 | if let tt::TokenTree::Leaf(tt::Leaf::Punct(pp)) = tt { | ||
275 | return Some(pp); | ||
276 | } | ||
277 | None | ||
278 | } | ||
279 | |||
280 | impl<'a, I> TokenPeek<'a, I> | ||
281 | where | ||
282 | I: Iterator<Item = &'a tt::TokenTree>, | ||
283 | { | ||
284 | pub fn new(iter: I) -> Self { | ||
285 | TokenPeek { iter: itertools::multipeek(iter) } | ||
286 | } | ||
287 | |||
288 | pub fn current_punct2(&mut self, p: &tt::Punct) -> Option<((char, char), bool)> { | ||
289 | if p.spacing != tt::Spacing::Joint { | ||
290 | return None; | ||
291 | } | ||
292 | |||
293 | self.iter.reset_peek(); | ||
294 | let p1 = to_punct(self.iter.peek()?)?; | ||
295 | Some(((p.char, p1.char), p1.spacing == tt::Spacing::Joint)) | ||
296 | } | ||
297 | |||
298 | pub fn current_punct3(&mut self, p: &tt::Punct) -> Option<((char, char, char), bool)> { | ||
299 | self.current_punct2(p).and_then(|((p0, p1), last_joint)| { | ||
300 | if !last_joint { | ||
301 | None | ||
302 | } else { | ||
303 | let p2 = to_punct(*self.iter.peek()?)?; | ||
304 | Some(((p0, p1, p2.char), p2.spacing == tt::Spacing::Joint)) | ||
305 | } | ||
306 | }) | ||
307 | } | ||
308 | } | ||