aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_mbe
diff options
context:
space:
mode:
authorAleksey Kladov <[email protected]>2020-04-10 18:41:01 +0100
committerAleksey Kladov <[email protected]>2020-04-10 18:43:37 +0100
commit428d4dfe710f7fe7564e6db82c4b44caaf84655d (patch)
tree0c57256151b575836bebb0702823415f4a404613 /crates/ra_mbe
parent5e2f29af851b7d89ed7f4be91bd1932d45b90eaa (diff)
Fix accidently quadratic behavior when processing includes
closes #3927
Diffstat (limited to 'crates/ra_mbe')
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs36
1 files changed, 19 insertions, 17 deletions
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
index 8e8ae2b29..9fb5cb058 100644
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ b/crates/ra_mbe/src/syntax_bridge.rs
@@ -137,21 +137,23 @@ impl TokenMap {
137 token_id: tt::TokenId, 137 token_id: tt::TokenId,
138 open_relative_range: TextRange, 138 open_relative_range: TextRange,
139 close_relative_range: TextRange, 139 close_relative_range: TextRange,
140 ) { 140 ) -> usize {
141 let res = self.entries.len();
141 self.entries 142 self.entries
142 .push((token_id, TokenTextRange::Delimiter(open_relative_range, close_relative_range))); 143 .push((token_id, TokenTextRange::Delimiter(open_relative_range, close_relative_range)));
144 res
143 } 145 }
144 146
145 fn update_close_delim(&mut self, token_id: tt::TokenId, close_relative_range: TextRange) { 147 fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) {
146 if let Some(entry) = self.entries.iter_mut().find(|(tid, _)| *tid == token_id) { 148 let (_, token_text_range) = &mut self.entries[idx];
147 if let TokenTextRange::Delimiter(dim, _) = entry.1 { 149 if let TokenTextRange::Delimiter(dim, _) = token_text_range {
148 entry.1 = TokenTextRange::Delimiter(dim, close_relative_range); 150 *token_text_range = TokenTextRange::Delimiter(*dim, close_relative_range);
149 }
150 } 151 }
151 } 152 }
152 153
153 fn remove_delim(&mut self, token_id: tt::TokenId) { 154 fn remove_delim(&mut self, idx: usize) {
154 self.entries.retain(|(tid, _)| *tid != token_id); 155 // FIXME: This could be accidently quadratic
156 self.entries.remove(idx);
155 } 157 }
156} 158}
157 159
@@ -238,24 +240,24 @@ impl TokenIdAlloc {
238 token_id 240 token_id
239 } 241 }
240 242
241 fn open_delim(&mut self, open_abs_range: TextRange) -> tt::TokenId { 243 fn open_delim(&mut self, open_abs_range: TextRange) -> (tt::TokenId, usize) {
242 let token_id = tt::TokenId(self.next_id); 244 let token_id = tt::TokenId(self.next_id);
243 self.next_id += 1; 245 self.next_id += 1;
244 self.map.insert_delim( 246 let idx = self.map.insert_delim(
245 token_id, 247 token_id,
246 open_abs_range - self.global_offset, 248 open_abs_range - self.global_offset,
247 open_abs_range - self.global_offset, 249 open_abs_range - self.global_offset,
248 ); 250 );
249 token_id 251 (token_id, idx)
250 } 252 }
251 253
252 fn close_delim(&mut self, id: tt::TokenId, close_abs_range: Option<TextRange>) { 254 fn close_delim(&mut self, idx: usize, close_abs_range: Option<TextRange>) {
253 match close_abs_range { 255 match close_abs_range {
254 None => { 256 None => {
255 self.map.remove_delim(id); 257 self.map.remove_delim(idx);
256 } 258 }
257 Some(close) => { 259 Some(close) => {
258 self.map.update_close_delim(id, close - self.global_offset); 260 self.map.update_close_delim(idx, close - self.global_offset);
259 } 261 }
260 } 262 }
261 } 263 }
@@ -322,7 +324,7 @@ trait TokenConvertor {
322 324
323 if let Some((kind, closed)) = delim { 325 if let Some((kind, closed)) = delim {
324 let mut subtree = tt::Subtree::default(); 326 let mut subtree = tt::Subtree::default();
325 let id = self.id_alloc().open_delim(range); 327 let (id, idx) = self.id_alloc().open_delim(range);
326 subtree.delimiter = Some(tt::Delimiter { kind, id }); 328 subtree.delimiter = Some(tt::Delimiter { kind, id });
327 329
328 while self.peek().map(|it| it.kind() != closed).unwrap_or(false) { 330 while self.peek().map(|it| it.kind() != closed).unwrap_or(false) {
@@ -331,7 +333,7 @@ trait TokenConvertor {
331 let last_range = match self.bump() { 333 let last_range = match self.bump() {
332 None => { 334 None => {
333 // For error resilience, we insert an char punct for the opening delim here 335 // For error resilience, we insert an char punct for the opening delim here
334 self.id_alloc().close_delim(id, None); 336 self.id_alloc().close_delim(idx, None);
335 let leaf: tt::Leaf = tt::Punct { 337 let leaf: tt::Leaf = tt::Punct {
336 id: self.id_alloc().alloc(range), 338 id: self.id_alloc().alloc(range),
337 char: token.to_char().unwrap(), 339 char: token.to_char().unwrap(),
@@ -344,7 +346,7 @@ trait TokenConvertor {
344 } 346 }
345 Some(it) => it.1, 347 Some(it) => it.1,
346 }; 348 };
347 self.id_alloc().close_delim(id, Some(last_range)); 349 self.id_alloc().close_delim(idx, Some(last_range));
348 subtree.into() 350 subtree.into()
349 } else { 351 } else {
350 let spacing = match self.peek() { 352 let spacing = match self.peek() {