aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_mbe/src
diff options
context:
space:
mode:
authorbors[bot] <26634292+bors[bot]@users.noreply.github.com>2020-04-10 20:16:32 +0100
committerGitHub <[email protected]>2020-04-10 20:16:32 +0100
commitca9a5dd1654cc0d54ed5b2bb71ec8ed559470276 (patch)
treebba0a2507861fd9c4e354283e2b2102532747e7e /crates/ra_mbe/src
parent773bb5173d3d54e6d19dfadd75c342c7acdce287 (diff)
parent1b68c72fe93424213db895a4066eafb99dfdeac9 (diff)
Merge #3933
3933: Fix accidently quadratic behavior when processing include! r=matklad a=matklad This fixes the immediate problem behind #3927. It doesn't yet fix the deeper problem with `to_node` being quadratic (hence the test is ignored), but it is a good start anyway. bors r+ Co-authored-by: Aleksey Kladov <[email protected]>
Diffstat (limited to 'crates/ra_mbe/src')
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs36
1 files changed, 19 insertions, 17 deletions
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
index 8e8ae2b29..9fb5cb058 100644
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ b/crates/ra_mbe/src/syntax_bridge.rs
@@ -137,21 +137,23 @@ impl TokenMap {
137 token_id: tt::TokenId, 137 token_id: tt::TokenId,
138 open_relative_range: TextRange, 138 open_relative_range: TextRange,
139 close_relative_range: TextRange, 139 close_relative_range: TextRange,
140 ) { 140 ) -> usize {
141 let res = self.entries.len();
141 self.entries 142 self.entries
142 .push((token_id, TokenTextRange::Delimiter(open_relative_range, close_relative_range))); 143 .push((token_id, TokenTextRange::Delimiter(open_relative_range, close_relative_range)));
144 res
143 } 145 }
144 146
145 fn update_close_delim(&mut self, token_id: tt::TokenId, close_relative_range: TextRange) { 147 fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) {
146 if let Some(entry) = self.entries.iter_mut().find(|(tid, _)| *tid == token_id) { 148 let (_, token_text_range) = &mut self.entries[idx];
147 if let TokenTextRange::Delimiter(dim, _) = entry.1 { 149 if let TokenTextRange::Delimiter(dim, _) = token_text_range {
148 entry.1 = TokenTextRange::Delimiter(dim, close_relative_range); 150 *token_text_range = TokenTextRange::Delimiter(*dim, close_relative_range);
149 }
150 } 151 }
151 } 152 }
152 153
153 fn remove_delim(&mut self, token_id: tt::TokenId) { 154 fn remove_delim(&mut self, idx: usize) {
154 self.entries.retain(|(tid, _)| *tid != token_id); 155 // FIXME: This could be accidently quadratic
156 self.entries.remove(idx);
155 } 157 }
156} 158}
157 159
@@ -238,24 +240,24 @@ impl TokenIdAlloc {
238 token_id 240 token_id
239 } 241 }
240 242
241 fn open_delim(&mut self, open_abs_range: TextRange) -> tt::TokenId { 243 fn open_delim(&mut self, open_abs_range: TextRange) -> (tt::TokenId, usize) {
242 let token_id = tt::TokenId(self.next_id); 244 let token_id = tt::TokenId(self.next_id);
243 self.next_id += 1; 245 self.next_id += 1;
244 self.map.insert_delim( 246 let idx = self.map.insert_delim(
245 token_id, 247 token_id,
246 open_abs_range - self.global_offset, 248 open_abs_range - self.global_offset,
247 open_abs_range - self.global_offset, 249 open_abs_range - self.global_offset,
248 ); 250 );
249 token_id 251 (token_id, idx)
250 } 252 }
251 253
252 fn close_delim(&mut self, id: tt::TokenId, close_abs_range: Option<TextRange>) { 254 fn close_delim(&mut self, idx: usize, close_abs_range: Option<TextRange>) {
253 match close_abs_range { 255 match close_abs_range {
254 None => { 256 None => {
255 self.map.remove_delim(id); 257 self.map.remove_delim(idx);
256 } 258 }
257 Some(close) => { 259 Some(close) => {
258 self.map.update_close_delim(id, close - self.global_offset); 260 self.map.update_close_delim(idx, close - self.global_offset);
259 } 261 }
260 } 262 }
261 } 263 }
@@ -322,7 +324,7 @@ trait TokenConvertor {
322 324
323 if let Some((kind, closed)) = delim { 325 if let Some((kind, closed)) = delim {
324 let mut subtree = tt::Subtree::default(); 326 let mut subtree = tt::Subtree::default();
325 let id = self.id_alloc().open_delim(range); 327 let (id, idx) = self.id_alloc().open_delim(range);
326 subtree.delimiter = Some(tt::Delimiter { kind, id }); 328 subtree.delimiter = Some(tt::Delimiter { kind, id });
327 329
328 while self.peek().map(|it| it.kind() != closed).unwrap_or(false) { 330 while self.peek().map(|it| it.kind() != closed).unwrap_or(false) {
@@ -331,7 +333,7 @@ trait TokenConvertor {
331 let last_range = match self.bump() { 333 let last_range = match self.bump() {
332 None => { 334 None => {
333 // For error resilience, we insert an char punct for the opening delim here 335 // For error resilience, we insert an char punct for the opening delim here
334 self.id_alloc().close_delim(id, None); 336 self.id_alloc().close_delim(idx, None);
335 let leaf: tt::Leaf = tt::Punct { 337 let leaf: tt::Leaf = tt::Punct {
336 id: self.id_alloc().alloc(range), 338 id: self.id_alloc().alloc(range),
337 char: token.to_char().unwrap(), 339 char: token.to_char().unwrap(),
@@ -344,7 +346,7 @@ trait TokenConvertor {
344 } 346 }
345 Some(it) => it.1, 347 Some(it) => it.1,
346 }; 348 };
347 self.id_alloc().close_delim(id, Some(last_range)); 349 self.id_alloc().close_delim(idx, Some(last_range));
348 subtree.into() 350 subtree.into()
349 } else { 351 } else {
350 let spacing = match self.peek() { 352 let spacing = match self.peek() {