aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--crates/hir/src/semantics.rs10
-rw-r--r--crates/hir_expand/src/db.rs16
-rw-r--r--crates/hir_expand/src/hygiene.rs2
-rw-r--r--crates/hir_expand/src/lib.rs4
-rw-r--r--crates/hir_ty/src/lower.rs2
-rw-r--r--crates/ide_completion/src/context.rs20
-rw-r--r--crates/ide_completion/src/lib.rs4
-rw-r--r--crates/mbe/src/lib.rs10
-rw-r--r--crates/mbe/src/syntax_bridge.rs83
-rw-r--r--crates/mbe/src/tests/expand.rs5
-rw-r--r--crates/mbe/src/token_map.rs85
-rw-r--r--crates/project_model/src/cargo_workspace.rs36
-rw-r--r--editors/code/src/snippets.ts2
13 files changed, 148 insertions, 131 deletions
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index 3aa467e3c..8d3c43d08 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -120,10 +120,10 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
120 pub fn speculative_expand( 120 pub fn speculative_expand(
121 &self, 121 &self,
122 actual_macro_call: &ast::MacroCall, 122 actual_macro_call: &ast::MacroCall,
123 hypothetical_args: &ast::TokenTree, 123 speculative_args: &ast::TokenTree,
124 token_to_map: SyntaxToken, 124 token_to_map: SyntaxToken,
125 ) -> Option<(SyntaxNode, SyntaxToken)> { 125 ) -> Option<(SyntaxNode, SyntaxToken)> {
126 self.imp.speculative_expand(actual_macro_call, hypothetical_args, token_to_map) 126 self.imp.speculative_expand(actual_macro_call, speculative_args, token_to_map)
127 } 127 }
128 128
129 pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { 129 pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
@@ -335,7 +335,7 @@ impl<'db> SemanticsImpl<'db> {
335 fn speculative_expand( 335 fn speculative_expand(
336 &self, 336 &self,
337 actual_macro_call: &ast::MacroCall, 337 actual_macro_call: &ast::MacroCall,
338 hypothetical_args: &ast::TokenTree, 338 speculative_args: &ast::TokenTree,
339 token_to_map: SyntaxToken, 339 token_to_map: SyntaxToken,
340 ) -> Option<(SyntaxNode, SyntaxToken)> { 340 ) -> Option<(SyntaxNode, SyntaxToken)> {
341 let sa = self.analyze(actual_macro_call.syntax()); 341 let sa = self.analyze(actual_macro_call.syntax());
@@ -344,10 +344,10 @@ impl<'db> SemanticsImpl<'db> {
344 let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { 344 let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
345 sa.resolver.resolve_path_as_macro(self.db.upcast(), &path) 345 sa.resolver.resolve_path_as_macro(self.db.upcast(), &path)
346 })?; 346 })?;
347 hir_expand::db::expand_hypothetical( 347 hir_expand::db::expand_speculative(
348 self.db.upcast(), 348 self.db.upcast(),
349 macro_call_id, 349 macro_call_id,
350 hypothetical_args, 350 speculative_args,
351 token_to_map, 351 token_to_map,
352 ) 352 )
353 } 353 }
diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs
index 5c769c1bf..625c26f0a 100644
--- a/crates/hir_expand/src/db.rs
+++ b/crates/hir_expand/src/db.rs
@@ -131,15 +131,15 @@ pub trait AstDatabase: SourceDatabase {
131/// used for completion, where we want to see what 'would happen' if we insert a 131/// used for completion, where we want to see what 'would happen' if we insert a
132/// token. The `token_to_map` mapped down into the expansion, with the mapped 132/// token. The `token_to_map` mapped down into the expansion, with the mapped
133/// token returned. 133/// token returned.
134pub fn expand_hypothetical( 134pub fn expand_speculative(
135 db: &dyn AstDatabase, 135 db: &dyn AstDatabase,
136 actual_macro_call: MacroCallId, 136 actual_macro_call: MacroCallId,
137 hypothetical_args: &ast::TokenTree, 137 speculative_args: &ast::TokenTree,
138 token_to_map: SyntaxToken, 138 token_to_map: SyntaxToken,
139) -> Option<(SyntaxNode, SyntaxToken)> { 139) -> Option<(SyntaxNode, SyntaxToken)> {
140 let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax()); 140 let (tt, tmap_1) = mbe::syntax_node_to_token_tree(speculative_args.syntax());
141 let range = 141 let range =
142 token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?; 142 token_to_map.text_range().checked_sub(speculative_args.syntax().text_range().start())?;
143 let token_id = tmap_1.token_by_range(range)?; 143 let token_id = tmap_1.token_by_range(range)?;
144 144
145 let macro_def = { 145 let macro_def = {
@@ -147,15 +147,15 @@ pub fn expand_hypothetical(
147 db.macro_def(loc.def)? 147 db.macro_def(loc.def)?
148 }; 148 };
149 149
150 let hypothetical_expansion = macro_def.expand(db, actual_macro_call, &tt); 150 let speculative_expansion = macro_def.expand(db, actual_macro_call, &tt);
151 151
152 let fragment_kind = macro_fragment_kind(db, actual_macro_call); 152 let fragment_kind = macro_fragment_kind(db, actual_macro_call);
153 153
154 let (node, tmap_2) = 154 let (node, tmap_2) =
155 mbe::token_tree_to_syntax_node(&hypothetical_expansion.value, fragment_kind).ok()?; 155 mbe::token_tree_to_syntax_node(&speculative_expansion.value, fragment_kind).ok()?;
156 156
157 let token_id = macro_def.map_id_down(token_id); 157 let token_id = macro_def.map_id_down(token_id);
158 let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?; 158 let range = tmap_2.range_by_token(token_id, token_to_map.kind())?;
159 let token = node.syntax_node().covering_element(range).into_token()?; 159 let token = node.syntax_node().covering_element(range).into_token()?;
160 Some((node.syntax_node(), token)) 160 Some((node.syntax_node(), token))
161} 161}
@@ -325,7 +325,7 @@ fn macro_expand_with_arg(
325 if let Some(eager) = &loc.eager { 325 if let Some(eager) = &loc.eager {
326 if arg.is_some() { 326 if arg.is_some() {
327 return ExpandResult::str_err( 327 return ExpandResult::str_err(
328 "hypothetical macro expansion not implemented for eager macro".to_owned(), 328 "speculative macro expansion not implemented for eager macro".to_owned(),
329 ); 329 );
330 } else { 330 } else {
331 return ExpandResult { 331 return ExpandResult {
diff --git a/crates/hir_expand/src/hygiene.rs b/crates/hir_expand/src/hygiene.rs
index 38e09fdd4..d98913907 100644
--- a/crates/hir_expand/src/hygiene.rs
+++ b/crates/hir_expand/src/hygiene.rs
@@ -154,7 +154,7 @@ impl HygieneInfo {
154 }, 154 },
155 }; 155 };
156 156
157 let range = token_map.range_by_token(token_id)?.by_kind(SyntaxKind::IDENT)?; 157 let range = token_map.range_by_token(token_id, SyntaxKind::IDENT)?;
158 Some((tt.with_value(range + tt.value), origin)) 158 Some((tt.with_value(range + tt.value), origin))
159 } 159 }
160} 160}
diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs
index 92c679dd2..6be4516a3 100644
--- a/crates/hir_expand/src/lib.rs
+++ b/crates/hir_expand/src/lib.rs
@@ -329,7 +329,7 @@ impl ExpansionInfo {
329 let token_id = self.macro_arg.1.token_by_range(range)?; 329 let token_id = self.macro_arg.1.token_by_range(range)?;
330 let token_id = self.macro_def.map_id_down(token_id); 330 let token_id = self.macro_def.map_id_down(token_id);
331 331
332 let range = self.exp_map.range_by_token(token_id)?.by_kind(token.value.kind())?; 332 let range = self.exp_map.range_by_token(token_id, token.value.kind())?;
333 333
334 let token = self.expanded.value.covering_element(range).into_token()?; 334 let token = self.expanded.value.covering_element(range).into_token()?;
335 335
@@ -354,7 +354,7 @@ impl ExpansionInfo {
354 }, 354 },
355 }; 355 };
356 356
357 let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?; 357 let range = token_map.range_by_token(token_id, token.value.kind())?;
358 let token = 358 let token =
359 tt.value.covering_element(range + tt.value.text_range().start()).into_token()?; 359 tt.value.covering_element(range + tt.value.text_range().start()).into_token()?;
360 Some((tt.with_value(token), origin)) 360 Some((tt.with_value(token), origin))
diff --git a/crates/hir_ty/src/lower.rs b/crates/hir_ty/src/lower.rs
index 1645ac533..c83933c73 100644
--- a/crates/hir_ty/src/lower.rs
+++ b/crates/hir_ty/src/lower.rs
@@ -1026,7 +1026,7 @@ pub(crate) fn trait_environment_query(
1026 }; 1026 };
1027 if let Some(AssocContainerId::TraitId(trait_id)) = container { 1027 if let Some(AssocContainerId::TraitId(trait_id)) = container {
1028 // add `Self: Trait<T1, T2, ...>` to the environment in trait 1028 // add `Self: Trait<T1, T2, ...>` to the environment in trait
1029 // function default implementations (and hypothetical code 1029 // function default implementations (and speculative code
1030 // inside consts or type aliases) 1030 // inside consts or type aliases)
1031 cov_mark::hit!(trait_self_implements_self); 1031 cov_mark::hit!(trait_self_implements_self);
1032 let substs = TyBuilder::type_params_subst(db, trait_id); 1032 let substs = TyBuilder::type_params_subst(db, trait_id);
diff --git a/crates/ide_completion/src/context.rs b/crates/ide_completion/src/context.rs
index 1ec59ff80..2f3fb1710 100644
--- a/crates/ide_completion/src/context.rs
+++ b/crates/ide_completion/src/context.rs
@@ -196,46 +196,46 @@ impl<'a> CompletionContext<'a> {
196 }; 196 };
197 197
198 let mut original_file = original_file.syntax().clone(); 198 let mut original_file = original_file.syntax().clone();
199 let mut hypothetical_file = file_with_fake_ident.syntax().clone(); 199 let mut speculative_file = file_with_fake_ident.syntax().clone();
200 let mut offset = position.offset; 200 let mut offset = position.offset;
201 let mut fake_ident_token = fake_ident_token; 201 let mut fake_ident_token = fake_ident_token;
202 202
203 // Are we inside a macro call? 203 // Are we inside a macro call?
204 while let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = ( 204 while let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
205 find_node_at_offset::<ast::MacroCall>(&original_file, offset), 205 find_node_at_offset::<ast::MacroCall>(&original_file, offset),
206 find_node_at_offset::<ast::MacroCall>(&hypothetical_file, offset), 206 find_node_at_offset::<ast::MacroCall>(&speculative_file, offset),
207 ) { 207 ) {
208 if actual_macro_call.path().as_ref().map(|s| s.syntax().text()) 208 if actual_macro_call.path().as_ref().map(|s| s.syntax().text())
209 != macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text()) 209 != macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text())
210 { 210 {
211 break; 211 break;
212 } 212 }
213 let hypothetical_args = match macro_call_with_fake_ident.token_tree() { 213 let speculative_args = match macro_call_with_fake_ident.token_tree() {
214 Some(tt) => tt, 214 Some(tt) => tt,
215 None => break, 215 None => break,
216 }; 216 };
217 if let (Some(actual_expansion), Some(hypothetical_expansion)) = ( 217 if let (Some(actual_expansion), Some(speculative_expansion)) = (
218 ctx.sema.expand(&actual_macro_call), 218 ctx.sema.expand(&actual_macro_call),
219 ctx.sema.speculative_expand( 219 ctx.sema.speculative_expand(
220 &actual_macro_call, 220 &actual_macro_call,
221 &hypothetical_args, 221 &speculative_args,
222 fake_ident_token, 222 fake_ident_token,
223 ), 223 ),
224 ) { 224 ) {
225 let new_offset = hypothetical_expansion.1.text_range().start(); 225 let new_offset = speculative_expansion.1.text_range().start();
226 if new_offset > actual_expansion.text_range().end() { 226 if new_offset > actual_expansion.text_range().end() {
227 break; 227 break;
228 } 228 }
229 original_file = actual_expansion; 229 original_file = actual_expansion;
230 hypothetical_file = hypothetical_expansion.0; 230 speculative_file = speculative_expansion.0;
231 fake_ident_token = hypothetical_expansion.1; 231 fake_ident_token = speculative_expansion.1;
232 offset = new_offset; 232 offset = new_offset;
233 } else { 233 } else {
234 break; 234 break;
235 } 235 }
236 } 236 }
237 ctx.fill_keyword_patterns(&hypothetical_file, offset); 237 ctx.fill_keyword_patterns(&speculative_file, offset);
238 ctx.fill(&original_file, hypothetical_file, offset); 238 ctx.fill(&original_file, speculative_file, offset);
239 Some(ctx) 239 Some(ctx)
240 } 240 }
241 241
diff --git a/crates/ide_completion/src/lib.rs b/crates/ide_completion/src/lib.rs
index 645349215..1152a9850 100644
--- a/crates/ide_completion/src/lib.rs
+++ b/crates/ide_completion/src/lib.rs
@@ -107,7 +107,7 @@ pub use crate::{
107/// identifier prefix/fuzzy match should be done higher in the stack, together 107/// identifier prefix/fuzzy match should be done higher in the stack, together
108/// with ordering of completions (currently this is done by the client). 108/// with ordering of completions (currently this is done by the client).
109/// 109///
110/// # Hypothetical Completion Problem 110/// # Speculative Completion Problem
111/// 111///
112/// There's a curious unsolved problem in the current implementation. Often, you 112/// There's a curious unsolved problem in the current implementation. Often, you
113/// want to compute completions on a *slightly different* text document. 113/// want to compute completions on a *slightly different* text document.
@@ -121,7 +121,7 @@ pub use crate::{
121/// doesn't allow such "phantom" inputs. 121/// doesn't allow such "phantom" inputs.
122/// 122///
123/// Another case where this would be instrumental is macro expansion. We want to 123/// Another case where this would be instrumental is macro expansion. We want to
124/// insert a fake ident and re-expand code. There's `expand_hypothetical` as a 124/// insert a fake ident and re-expand code. There's `expand_speculative` as a
125/// work-around for this. 125/// work-around for this.
126/// 126///
127/// A different use-case is completion of injection (examples and links in doc 127/// A different use-case is completion of injection (examples and links in doc
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs
index 3af5bc18b..b95374b76 100644
--- a/crates/mbe/src/lib.rs
+++ b/crates/mbe/src/lib.rs
@@ -14,6 +14,7 @@ mod tests;
14 14
15#[cfg(test)] 15#[cfg(test)]
16mod benchmark; 16mod benchmark;
17mod token_map;
17 18
18use std::fmt; 19use std::fmt;
19 20
@@ -63,9 +64,12 @@ impl fmt::Display for ExpandError {
63 } 64 }
64} 65}
65 66
66pub use crate::syntax_bridge::{ 67pub use crate::{
67 ast_to_token_tree, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree, 68 syntax_bridge::{
68 token_tree_to_syntax_node, TokenMap, 69 ast_to_token_tree, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree,
70 token_tree_to_syntax_node,
71 },
72 token_map::TokenMap,
69}; 73};
70 74
71/// This struct contains AST for a single `macro_rules` definition. What might 75/// This struct contains AST for a single `macro_rules` definition. What might
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs
index b13168bd3..b11172caf 100644
--- a/crates/mbe/src/syntax_bridge.rs
+++ b/crates/mbe/src/syntax_bridge.rs
@@ -10,36 +10,8 @@ use syntax::{
10}; 10};
11use tt::buffer::{Cursor, TokenBuffer}; 11use tt::buffer::{Cursor, TokenBuffer};
12 12
13use crate::ExpandError;
14use crate::{subtree_source::SubtreeTokenSource, tt_iter::TtIter}; 13use crate::{subtree_source::SubtreeTokenSource, tt_iter::TtIter};
15 14use crate::{ExpandError, TokenMap};
16#[derive(Debug, PartialEq, Eq, Clone, Copy)]
17pub enum TokenTextRange {
18 Token(TextRange),
19 Delimiter(TextRange),
20}
21
22impl TokenTextRange {
23 pub fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> {
24 match self {
25 TokenTextRange::Token(it) => Some(it),
26 TokenTextRange::Delimiter(it) => match kind {
27 T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())),
28 T!['}'] | T![')'] | T![']'] => {
29 Some(TextRange::at(it.end() - TextSize::of('}'), 1.into()))
30 }
31 _ => None,
32 },
33 }
34 }
35}
36
37/// Maps `tt::TokenId` to the relative range of the original token.
38#[derive(Debug, PartialEq, Eq, Clone, Default)]
39pub struct TokenMap {
40 /// Maps `tt::TokenId` to the *relative* source range.
41 entries: Vec<(tt::TokenId, TokenTextRange)>,
42}
43 15
44/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro 16/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
45/// will consume). 17/// will consume).
@@ -53,7 +25,7 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
53 let global_offset = node.text_range().start(); 25 let global_offset = node.text_range().start();
54 let mut c = Convertor::new(node, global_offset); 26 let mut c = Convertor::new(node, global_offset);
55 let subtree = c.go(); 27 let subtree = c.go();
56 c.id_alloc.map.entries.shrink_to_fit(); 28 c.id_alloc.map.shrink_to_fit();
57 (subtree, c.id_alloc.map) 29 (subtree, c.id_alloc.map)
58} 30}
59 31
@@ -149,55 +121,6 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
149 res 121 res
150} 122}
151 123
152impl TokenMap {
153 pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
154 let &(token_id, _) = self.entries.iter().find(|(_, range)| match range {
155 TokenTextRange::Token(it) => *it == relative_range,
156 TokenTextRange::Delimiter(it) => {
157 let open = TextRange::at(it.start(), 1.into());
158 let close = TextRange::at(it.end() - TextSize::of('}'), 1.into());
159 open == relative_range || close == relative_range
160 }
161 })?;
162 Some(token_id)
163 }
164
165 pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TokenTextRange> {
166 let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?;
167 Some(range)
168 }
169
170 fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
171 self.entries.push((token_id, TokenTextRange::Token(relative_range)));
172 }
173
174 fn insert_delim(
175 &mut self,
176 token_id: tt::TokenId,
177 open_relative_range: TextRange,
178 close_relative_range: TextRange,
179 ) -> usize {
180 let res = self.entries.len();
181 let cover = open_relative_range.cover(close_relative_range);
182
183 self.entries.push((token_id, TokenTextRange::Delimiter(cover)));
184 res
185 }
186
187 fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) {
188 let (_, token_text_range) = &mut self.entries[idx];
189 if let TokenTextRange::Delimiter(dim) = token_text_range {
190 let cover = dim.cover(close_relative_range);
191 *token_text_range = TokenTextRange::Delimiter(cover);
192 }
193 }
194
195 fn remove_delim(&mut self, idx: usize) {
196 // FIXME: This could be accidentally quadratic
197 self.entries.remove(idx);
198 }
199}
200
201/// Returns the textual content of a doc comment block as a quoted string 124/// Returns the textual content of a doc comment block as a quoted string
202/// That is, strips leading `///` (or `/**`, etc) 125/// That is, strips leading `///` (or `/**`, etc)
203/// and strips the ending `*/` 126/// and strips the ending `*/`
@@ -634,7 +557,7 @@ impl<'a> TtTreeSink<'a> {
634 } 557 }
635 558
636 fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap) { 559 fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap) {
637 self.token_map.entries.shrink_to_fit(); 560 self.token_map.shrink_to_fit();
638 (self.inner.finish(), self.token_map) 561 (self.inner.finish(), self.token_map)
639 } 562 }
640} 563}
diff --git a/crates/mbe/src/tests/expand.rs b/crates/mbe/src/tests/expand.rs
index 3a1d840ea..5f173f513 100644
--- a/crates/mbe/src/tests/expand.rs
+++ b/crates/mbe/src/tests/expand.rs
@@ -58,9 +58,8 @@ macro_rules! foobar {
58 let (node, token_map) = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap(); 58 let (node, token_map) = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap();
59 let content = node.syntax_node().to_string(); 59 let content = node.syntax_node().to_string();
60 60
61 let get_text = |id, kind| -> String { 61 let get_text =
62 content[token_map.range_by_token(id).unwrap().by_kind(kind).unwrap()].to_string() 62 |id, kind| -> String { content[token_map.range_by_token(id, kind).unwrap()].to_string() };
63 };
64 63
65 assert_eq!(expanded.token_trees.len(), 4); 64 assert_eq!(expanded.token_trees.len(), 4);
66 // {($e:ident) => { fn $e() {} }} 65 // {($e:ident) => { fn $e() {} }}
diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs
new file mode 100644
index 000000000..6df3de3b3
--- /dev/null
+++ b/crates/mbe/src/token_map.rs
@@ -0,0 +1,85 @@
1//! Mapping between `TokenId`s and the token's position in macro definitions or inputs.
2
3use parser::{SyntaxKind, T};
4use syntax::{TextRange, TextSize};
5
6#[derive(Debug, PartialEq, Eq, Clone, Copy)]
7enum TokenTextRange {
8 Token(TextRange),
9 Delimiter(TextRange),
10}
11
12impl TokenTextRange {
13 fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> {
14 match self {
15 TokenTextRange::Token(it) => Some(it),
16 TokenTextRange::Delimiter(it) => match kind {
17 T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())),
18 T!['}'] | T![')'] | T![']'] => {
19 Some(TextRange::at(it.end() - TextSize::of('}'), 1.into()))
20 }
21 _ => None,
22 },
23 }
24 }
25}
26
27/// Maps `tt::TokenId` to the relative range of the original token.
28#[derive(Debug, PartialEq, Eq, Clone, Default)]
29pub struct TokenMap {
30 /// Maps `tt::TokenId` to the *relative* source range.
31 entries: Vec<(tt::TokenId, TokenTextRange)>,
32}
33
34impl TokenMap {
35 pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
36 let &(token_id, _) = self.entries.iter().find(|(_, range)| match range {
37 TokenTextRange::Token(it) => *it == relative_range,
38 TokenTextRange::Delimiter(it) => {
39 let open = TextRange::at(it.start(), 1.into());
40 let close = TextRange::at(it.end() - TextSize::of('}'), 1.into());
41 open == relative_range || close == relative_range
42 }
43 })?;
44 Some(token_id)
45 }
46
47 pub fn range_by_token(&self, token_id: tt::TokenId, kind: SyntaxKind) -> Option<TextRange> {
48 let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?;
49 range.by_kind(kind)
50 }
51
52 pub(crate) fn shrink_to_fit(&mut self) {
53 self.entries.shrink_to_fit();
54 }
55
56 pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
57 self.entries.push((token_id, TokenTextRange::Token(relative_range)));
58 }
59
60 pub(crate) fn insert_delim(
61 &mut self,
62 token_id: tt::TokenId,
63 open_relative_range: TextRange,
64 close_relative_range: TextRange,
65 ) -> usize {
66 let res = self.entries.len();
67 let cover = open_relative_range.cover(close_relative_range);
68
69 self.entries.push((token_id, TokenTextRange::Delimiter(cover)));
70 res
71 }
72
73 pub(crate) fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) {
74 let (_, token_text_range) = &mut self.entries[idx];
75 if let TokenTextRange::Delimiter(dim) = token_text_range {
76 let cover = dim.cover(close_relative_range);
77 *token_text_range = TokenTextRange::Delimiter(cover);
78 }
79 }
80
81 pub(crate) fn remove_delim(&mut self, idx: usize) {
82 // FIXME: This could be accidentally quadratic
83 self.entries.remove(idx);
84 }
85}
diff --git a/crates/project_model/src/cargo_workspace.rs b/crates/project_model/src/cargo_workspace.rs
index ad705c752..a8fee4f08 100644
--- a/crates/project_model/src/cargo_workspace.rs
+++ b/crates/project_model/src/cargo_workspace.rs
@@ -121,7 +121,7 @@ pub struct PackageDependency {
121 pub kind: DepKind, 121 pub kind: DepKind,
122} 122}
123 123
124#[derive(Debug, Clone, Eq, PartialEq)] 124#[derive(Debug, Clone, Eq, PartialEq, PartialOrd, Ord)]
125pub enum DepKind { 125pub enum DepKind {
126 /// Available to the library, binary, and dev targets in the package (but not the build script). 126 /// Available to the library, binary, and dev targets in the package (but not the build script).
127 Normal, 127 Normal,
@@ -132,17 +132,23 @@ pub enum DepKind {
132} 132}
133 133
134impl DepKind { 134impl DepKind {
135 fn new(list: &[cargo_metadata::DepKindInfo]) -> Self { 135 fn iter(list: &[cargo_metadata::DepKindInfo]) -> impl Iterator<Item = Self> + '_ {
136 let mut dep_kinds = Vec::new();
137 if list.is_empty() {
138 dep_kinds.push(Self::Normal);
139 }
136 for info in list { 140 for info in list {
137 match info.kind { 141 let kind = match info.kind {
138 cargo_metadata::DependencyKind::Normal => return Self::Normal, 142 cargo_metadata::DependencyKind::Normal => Self::Normal,
139 cargo_metadata::DependencyKind::Development => return Self::Dev, 143 cargo_metadata::DependencyKind::Development => Self::Dev,
140 cargo_metadata::DependencyKind::Build => return Self::Build, 144 cargo_metadata::DependencyKind::Build => Self::Build,
141 cargo_metadata::DependencyKind::Unknown => continue, 145 cargo_metadata::DependencyKind::Unknown => continue,
142 } 146 };
147 dep_kinds.push(kind);
143 } 148 }
144 149 dep_kinds.sort_unstable();
145 Self::Normal 150 dep_kinds.dedup();
151 dep_kinds.into_iter()
146 } 152 }
147} 153}
148 154
@@ -317,7 +323,11 @@ impl CargoWorkspace {
317 } 323 }
318 }; 324 };
319 node.deps.sort_by(|a, b| a.pkg.cmp(&b.pkg)); 325 node.deps.sort_by(|a, b| a.pkg.cmp(&b.pkg));
320 for dep_node in node.deps { 326 for (dep_node, kind) in node
327 .deps
328 .iter()
329 .flat_map(|dep| DepKind::iter(&dep.dep_kinds).map(move |kind| (dep, kind)))
330 {
321 let pkg = match pkg_by_id.get(&dep_node.pkg) { 331 let pkg = match pkg_by_id.get(&dep_node.pkg) {
322 Some(&pkg) => pkg, 332 Some(&pkg) => pkg,
323 None => { 333 None => {
@@ -328,11 +338,7 @@ impl CargoWorkspace {
328 continue; 338 continue;
329 } 339 }
330 }; 340 };
331 let dep = PackageDependency { 341 let dep = PackageDependency { name: dep_node.name.clone(), pkg, kind };
332 name: dep_node.name,
333 pkg,
334 kind: DepKind::new(&dep_node.dep_kinds),
335 };
336 packages[source].dependencies.push(dep); 342 packages[source].dependencies.push(dep);
337 } 343 }
338 packages[source].active_features.extend(node.features); 344 packages[source].active_features.extend(node.features);
diff --git a/editors/code/src/snippets.ts b/editors/code/src/snippets.ts
index 58f7aa128..a409e5296 100644
--- a/editors/code/src/snippets.ts
+++ b/editors/code/src/snippets.ts
@@ -52,7 +52,7 @@ export async function applySnippetTextEdits(editor: vscode.TextEditor, edits: vs
52 } else { 52 } else {
53 builder.replace(indel.range, indel.newText); 53 builder.replace(indel.range, indel.newText);
54 } 54 }
55 lineDelta = countLines(indel.newText) - (indel.range.end.line - indel.range.start.line); 55 lineDelta += countLines(indel.newText) - (indel.range.end.line - indel.range.start.line);
56 } 56 }
57 }); 57 });
58 if (selections.length > 0) editor.selections = selections; 58 if (selections.length > 0) editor.selections = selections;