diff options
-rw-r--r-- | Cargo.lock | 1 | ||||
-rw-r--r-- | crates/ra_ide/src/completion.rs | 8 | ||||
-rw-r--r-- | crates/ra_ide/src/completion/complete_keyword.rs | 6 | ||||
-rw-r--r-- | crates/ra_ide/src/completion/presentation.rs | 50 | ||||
-rw-r--r-- | crates/ra_ide/src/ssr.rs | 12 | ||||
-rw-r--r-- | crates/ra_ssr/Cargo.toml | 1 | ||||
-rw-r--r-- | crates/ra_ssr/src/matching.rs | 39 | ||||
-rw-r--r-- | crates/ra_ssr/src/parsing.rs | 108 | ||||
-rw-r--r-- | crates/ra_ssr/src/tests.rs | 17 | ||||
-rw-r--r-- | editors/code/src/main.ts | 26 |
10 files changed, 242 insertions, 26 deletions
diff --git a/Cargo.lock b/Cargo.lock index e1d2475e2..61ae8157a 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -1248,6 +1248,7 @@ dependencies = [ | |||
1248 | "ra_syntax", | 1248 | "ra_syntax", |
1249 | "ra_text_edit", | 1249 | "ra_text_edit", |
1250 | "rustc-hash", | 1250 | "rustc-hash", |
1251 | "test_utils", | ||
1251 | ] | 1252 | ] |
1252 | 1253 | ||
1253 | [[package]] | 1254 | [[package]] |
diff --git a/crates/ra_ide/src/completion.rs b/crates/ra_ide/src/completion.rs index 69ea754b3..9ebb8ebb7 100644 --- a/crates/ra_ide/src/completion.rs +++ b/crates/ra_ide/src/completion.rs | |||
@@ -63,11 +63,11 @@ pub use crate::completion::{ | |||
63 | // There also snippet completions: | 63 | // There also snippet completions: |
64 | // | 64 | // |
65 | // .Expressions | 65 | // .Expressions |
66 | // - `pd` -> `println!("{:?}")` | 66 | // - `pd` -> `eprintln!(" = {:?}", );")` |
67 | // - `ppd` -> `println!("{:#?}")` | 67 | // - `ppd` -> `eprintln!(" = {:#?}", );` |
68 | // | 68 | // |
69 | // .Items | 69 | // .Items |
70 | // - `tfn` -> `#[test] fn f(){}` | 70 | // - `tfn` -> `#[test] fn feature(){}` |
71 | // - `tmod` -> | 71 | // - `tmod` -> |
72 | // ```rust | 72 | // ```rust |
73 | // #[cfg(test)] | 73 | // #[cfg(test)] |
@@ -75,7 +75,7 @@ pub use crate::completion::{ | |||
75 | // use super::*; | 75 | // use super::*; |
76 | // | 76 | // |
77 | // #[test] | 77 | // #[test] |
78 | // fn test_fn() {} | 78 | // fn test_name() {} |
79 | // } | 79 | // } |
80 | // ``` | 80 | // ``` |
81 | 81 | ||
diff --git a/crates/ra_ide/src/completion/complete_keyword.rs b/crates/ra_ide/src/completion/complete_keyword.rs index 3b174f916..e599cc3d1 100644 --- a/crates/ra_ide/src/completion/complete_keyword.rs +++ b/crates/ra_ide/src/completion/complete_keyword.rs | |||
@@ -1,6 +1,6 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use ra_syntax::ast; | 3 | use ra_syntax::{ast, SyntaxKind}; |
4 | 4 | ||
5 | use crate::completion::{ | 5 | use crate::completion::{ |
6 | CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, Completions, | 6 | CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, Completions, |
@@ -37,6 +37,10 @@ pub(super) fn complete_use_tree_keyword(acc: &mut Completions, ctx: &CompletionC | |||
37 | } | 37 | } |
38 | 38 | ||
39 | pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionContext) { | 39 | pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionContext) { |
40 | if ctx.token.kind() == SyntaxKind::COMMENT { | ||
41 | return; | ||
42 | } | ||
43 | |||
40 | let has_trait_or_impl_parent = ctx.has_impl_parent || ctx.has_trait_parent; | 44 | let has_trait_or_impl_parent = ctx.has_impl_parent || ctx.has_trait_parent; |
41 | if ctx.trait_as_prev_sibling || ctx.impl_as_prev_sibling { | 45 | if ctx.trait_as_prev_sibling || ctx.impl_as_prev_sibling { |
42 | add_keyword(ctx, acc, "where", "where "); | 46 | add_keyword(ctx, acc, "where", "where "); |
diff --git a/crates/ra_ide/src/completion/presentation.rs b/crates/ra_ide/src/completion/presentation.rs index 4fdc2f0bb..b18279746 100644 --- a/crates/ra_ide/src/completion/presentation.rs +++ b/crates/ra_ide/src/completion/presentation.rs | |||
@@ -1516,4 +1516,54 @@ mod tests { | |||
1516 | "### | 1516 | "### |
1517 | ); | 1517 | ); |
1518 | } | 1518 | } |
1519 | |||
1520 | #[test] | ||
1521 | fn no_keyword_autocompletion_on_line_comments() { | ||
1522 | assert_debug_snapshot!( | ||
1523 | do_completion( | ||
1524 | r" | ||
1525 | fn test() { | ||
1526 | let x = 2; // A comment<|> | ||
1527 | } | ||
1528 | ", | ||
1529 | CompletionKind::Keyword | ||
1530 | ), | ||
1531 | @r###" | ||
1532 | [] | ||
1533 | "### | ||
1534 | ); | ||
1535 | } | ||
1536 | |||
1537 | #[test] | ||
1538 | fn no_keyword_autocompletion_on_multi_line_comments() { | ||
1539 | assert_debug_snapshot!( | ||
1540 | do_completion( | ||
1541 | r" | ||
1542 | /* | ||
1543 | Some multi-line comment<|> | ||
1544 | */ | ||
1545 | ", | ||
1546 | CompletionKind::Keyword | ||
1547 | ), | ||
1548 | @r###" | ||
1549 | [] | ||
1550 | "### | ||
1551 | ); | ||
1552 | } | ||
1553 | |||
1554 | #[test] | ||
1555 | fn no_keyword_autocompletion_on_doc_comments() { | ||
1556 | assert_debug_snapshot!( | ||
1557 | do_completion( | ||
1558 | r" | ||
1559 | /// Some doc comment | ||
1560 | /// let test<|> = 1 | ||
1561 | ", | ||
1562 | CompletionKind::Keyword | ||
1563 | ), | ||
1564 | @r###" | ||
1565 | [] | ||
1566 | "### | ||
1567 | ); | ||
1568 | } | ||
1519 | } | 1569 | } |
diff --git a/crates/ra_ide/src/ssr.rs b/crates/ra_ide/src/ssr.rs index 9f8e540c0..b3e9e5dfe 100644 --- a/crates/ra_ide/src/ssr.rs +++ b/crates/ra_ide/src/ssr.rs | |||
@@ -10,6 +10,18 @@ use ra_ssr::{MatchFinder, SsrError, SsrRule}; | |||
10 | // The syntax for a structural search replace command is `<search_pattern> ==>> <replace_pattern>`. | 10 | // The syntax for a structural search replace command is `<search_pattern> ==>> <replace_pattern>`. |
11 | // A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement. | 11 | // A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement. |
12 | // Within a macro call, a placeholder will match up until whatever token follows the placeholder. | 12 | // Within a macro call, a placeholder will match up until whatever token follows the placeholder. |
13 | // | ||
14 | // Placeholders may be given constraints by writing them as `${<name>:<constraint1>:<constraint2>...}`. | ||
15 | // | ||
16 | // Supported constraints: | ||
17 | // | ||
18 | // |=== | ||
19 | // | Constraint | Restricts placeholder | ||
20 | // | ||
21 | // | kind(literal) | Is a literal (e.g. `42` or `"forty two"`) | ||
22 | // | not(a) | Negates the constraint `a` | ||
23 | // |=== | ||
24 | // | ||
13 | // Available via the command `rust-analyzer.ssr`. | 25 | // Available via the command `rust-analyzer.ssr`. |
14 | // | 26 | // |
15 | // ```rust | 27 | // ```rust |
diff --git a/crates/ra_ssr/Cargo.toml b/crates/ra_ssr/Cargo.toml index 3c2f15a83..fe098aaee 100644 --- a/crates/ra_ssr/Cargo.toml +++ b/crates/ra_ssr/Cargo.toml | |||
@@ -17,3 +17,4 @@ ra_db = { path = "../ra_db" } | |||
17 | ra_ide_db = { path = "../ra_ide_db" } | 17 | ra_ide_db = { path = "../ra_ide_db" } |
18 | hir = { path = "../ra_hir", package = "ra_hir" } | 18 | hir = { path = "../ra_hir", package = "ra_hir" } |
19 | rustc-hash = "1.1.0" | 19 | rustc-hash = "1.1.0" |
20 | test_utils = { path = "../test_utils" } | ||
diff --git a/crates/ra_ssr/src/matching.rs b/crates/ra_ssr/src/matching.rs index 53d802e77..ce53d46d2 100644 --- a/crates/ra_ssr/src/matching.rs +++ b/crates/ra_ssr/src/matching.rs | |||
@@ -2,7 +2,7 @@ | |||
2 | //! process of matching, placeholder values are recorded. | 2 | //! process of matching, placeholder values are recorded. |
3 | 3 | ||
4 | use crate::{ | 4 | use crate::{ |
5 | parsing::{Placeholder, SsrTemplate}, | 5 | parsing::{Constraint, NodeKind, Placeholder, SsrTemplate}, |
6 | SsrMatches, SsrPattern, SsrRule, | 6 | SsrMatches, SsrPattern, SsrRule, |
7 | }; | 7 | }; |
8 | use hir::Semantics; | 8 | use hir::Semantics; |
@@ -11,6 +11,7 @@ use ra_syntax::ast::{AstNode, AstToken}; | |||
11 | use ra_syntax::{ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken}; | 11 | use ra_syntax::{ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken}; |
12 | use rustc_hash::FxHashMap; | 12 | use rustc_hash::FxHashMap; |
13 | use std::{cell::Cell, iter::Peekable}; | 13 | use std::{cell::Cell, iter::Peekable}; |
14 | use test_utils::mark; | ||
14 | 15 | ||
15 | // Creates a match error. If we're currently attempting to match some code that we thought we were | 16 | // Creates a match error. If we're currently attempting to match some code that we thought we were |
16 | // going to match, as indicated by the --debug-snippet flag, then populate the reason field. | 17 | // going to match, as indicated by the --debug-snippet flag, then populate the reason field. |
@@ -169,6 +170,9 @@ impl<'db, 'sema> MatchState<'db, 'sema> { | |||
169 | if let Some(placeholder) = | 170 | if let Some(placeholder) = |
170 | match_inputs.get_placeholder(&SyntaxElement::Node(pattern.clone())) | 171 | match_inputs.get_placeholder(&SyntaxElement::Node(pattern.clone())) |
171 | { | 172 | { |
173 | for constraint in &placeholder.constraints { | ||
174 | self.check_constraint(constraint, code)?; | ||
175 | } | ||
172 | if self.match_out.is_none() { | 176 | if self.match_out.is_none() { |
173 | return Ok(()); | 177 | return Ok(()); |
174 | } | 178 | } |
@@ -292,6 +296,24 @@ impl<'db, 'sema> MatchState<'db, 'sema> { | |||
292 | Ok(()) | 296 | Ok(()) |
293 | } | 297 | } |
294 | 298 | ||
299 | fn check_constraint( | ||
300 | &self, | ||
301 | constraint: &Constraint, | ||
302 | code: &SyntaxNode, | ||
303 | ) -> Result<(), MatchFailed> { | ||
304 | match constraint { | ||
305 | Constraint::Kind(kind) => { | ||
306 | kind.matches(code)?; | ||
307 | } | ||
308 | Constraint::Not(sub) => { | ||
309 | if self.check_constraint(&*sub, code).is_ok() { | ||
310 | fail_match!("Constraint {:?} failed for '{}'", constraint, code.text()); | ||
311 | } | ||
312 | } | ||
313 | } | ||
314 | Ok(()) | ||
315 | } | ||
316 | |||
295 | /// We want to allow the records to match in any order, so we have special matching logic for | 317 | /// We want to allow the records to match in any order, so we have special matching logic for |
296 | /// them. | 318 | /// them. |
297 | fn attempt_match_record_field_list( | 319 | fn attempt_match_record_field_list( |
@@ -515,6 +537,21 @@ impl SsrPattern { | |||
515 | } | 537 | } |
516 | } | 538 | } |
517 | 539 | ||
540 | impl NodeKind { | ||
541 | fn matches(&self, node: &SyntaxNode) -> Result<(), MatchFailed> { | ||
542 | let ok = match self { | ||
543 | Self::Literal => { | ||
544 | mark::hit!(literal_constraint); | ||
545 | ast::Literal::can_cast(node.kind()) | ||
546 | } | ||
547 | }; | ||
548 | if !ok { | ||
549 | fail_match!("Code '{}' isn't of kind {:?}", node.text(), self); | ||
550 | } | ||
551 | Ok(()) | ||
552 | } | ||
553 | } | ||
554 | |||
518 | // If `node` contains nothing but an ident then return it, otherwise return None. | 555 | // If `node` contains nothing but an ident then return it, otherwise return None. |
519 | fn only_ident(element: SyntaxElement) -> Option<SyntaxToken> { | 556 | fn only_ident(element: SyntaxElement) -> Option<SyntaxToken> { |
520 | match element { | 557 | match element { |
diff --git a/crates/ra_ssr/src/parsing.rs b/crates/ra_ssr/src/parsing.rs index 04d46bd32..5ea125616 100644 --- a/crates/ra_ssr/src/parsing.rs +++ b/crates/ra_ssr/src/parsing.rs | |||
@@ -6,7 +6,7 @@ | |||
6 | //! e.g. expressions, type references etc. | 6 | //! e.g. expressions, type references etc. |
7 | 7 | ||
8 | use crate::{SsrError, SsrPattern, SsrRule}; | 8 | use crate::{SsrError, SsrPattern, SsrRule}; |
9 | use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind}; | 9 | use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind, T}; |
10 | use rustc_hash::{FxHashMap, FxHashSet}; | 10 | use rustc_hash::{FxHashMap, FxHashSet}; |
11 | use std::str::FromStr; | 11 | use std::str::FromStr; |
12 | 12 | ||
@@ -39,6 +39,18 @@ pub(crate) struct Placeholder { | |||
39 | pub(crate) ident: SmolStr, | 39 | pub(crate) ident: SmolStr, |
40 | /// A unique name used in place of this placeholder when we parse the pattern as Rust code. | 40 | /// A unique name used in place of this placeholder when we parse the pattern as Rust code. |
41 | stand_in_name: String, | 41 | stand_in_name: String, |
42 | pub(crate) constraints: Vec<Constraint>, | ||
43 | } | ||
44 | |||
45 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
46 | pub(crate) enum Constraint { | ||
47 | Kind(NodeKind), | ||
48 | Not(Box<Constraint>), | ||
49 | } | ||
50 | |||
51 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
52 | pub(crate) enum NodeKind { | ||
53 | Literal, | ||
42 | } | 54 | } |
43 | 55 | ||
44 | #[derive(Debug, Clone, PartialEq, Eq)] | 56 | #[derive(Debug, Clone, PartialEq, Eq)] |
@@ -149,7 +161,7 @@ fn parse_pattern(pattern_str: &str) -> Result<Vec<PatternElement>, SsrError> { | |||
149 | let mut placeholder_names = FxHashSet::default(); | 161 | let mut placeholder_names = FxHashSet::default(); |
150 | let mut tokens = tokenize(pattern_str)?.into_iter(); | 162 | let mut tokens = tokenize(pattern_str)?.into_iter(); |
151 | while let Some(token) = tokens.next() { | 163 | while let Some(token) = tokens.next() { |
152 | if token.kind == SyntaxKind::DOLLAR { | 164 | if token.kind == T![$] { |
153 | let placeholder = parse_placeholder(&mut tokens)?; | 165 | let placeholder = parse_placeholder(&mut tokens)?; |
154 | if !placeholder_names.insert(placeholder.ident.clone()) { | 166 | if !placeholder_names.insert(placeholder.ident.clone()) { |
155 | bail!("Name `{}` repeats more than once", placeholder.ident); | 167 | bail!("Name `{}` repeats more than once", placeholder.ident); |
@@ -177,6 +189,9 @@ fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> { | |||
177 | if !defined_placeholders.contains(&placeholder.ident) { | 189 | if !defined_placeholders.contains(&placeholder.ident) { |
178 | undefined.push(format!("${}", placeholder.ident)); | 190 | undefined.push(format!("${}", placeholder.ident)); |
179 | } | 191 | } |
192 | if !placeholder.constraints.is_empty() { | ||
193 | bail!("Replacement placeholders cannot have constraints"); | ||
194 | } | ||
180 | } | 195 | } |
181 | } | 196 | } |
182 | if !undefined.is_empty() { | 197 | if !undefined.is_empty() { |
@@ -205,23 +220,90 @@ fn tokenize(source: &str) -> Result<Vec<Token>, SsrError> { | |||
205 | 220 | ||
206 | fn parse_placeholder(tokens: &mut std::vec::IntoIter<Token>) -> Result<Placeholder, SsrError> { | 221 | fn parse_placeholder(tokens: &mut std::vec::IntoIter<Token>) -> Result<Placeholder, SsrError> { |
207 | let mut name = None; | 222 | let mut name = None; |
223 | let mut constraints = Vec::new(); | ||
208 | if let Some(token) = tokens.next() { | 224 | if let Some(token) = tokens.next() { |
209 | match token.kind { | 225 | match token.kind { |
210 | SyntaxKind::IDENT => { | 226 | SyntaxKind::IDENT => { |
211 | name = Some(token.text); | 227 | name = Some(token.text); |
212 | } | 228 | } |
229 | T!['{'] => { | ||
230 | let token = | ||
231 | tokens.next().ok_or_else(|| SsrError::new("Unexpected end of placeholder"))?; | ||
232 | if token.kind == SyntaxKind::IDENT { | ||
233 | name = Some(token.text); | ||
234 | } | ||
235 | loop { | ||
236 | let token = tokens | ||
237 | .next() | ||
238 | .ok_or_else(|| SsrError::new("Placeholder is missing closing brace '}'"))?; | ||
239 | match token.kind { | ||
240 | T![:] => { | ||
241 | constraints.push(parse_constraint(tokens)?); | ||
242 | } | ||
243 | T!['}'] => break, | ||
244 | _ => bail!("Unexpected token while parsing placeholder: '{}'", token.text), | ||
245 | } | ||
246 | } | ||
247 | } | ||
213 | _ => { | 248 | _ => { |
214 | bail!("Placeholders should be $name"); | 249 | bail!("Placeholders should either be $name or ${name:constraints}"); |
215 | } | 250 | } |
216 | } | 251 | } |
217 | } | 252 | } |
218 | let name = name.ok_or_else(|| SsrError::new("Placeholder ($) with no name"))?; | 253 | let name = name.ok_or_else(|| SsrError::new("Placeholder ($) with no name"))?; |
219 | Ok(Placeholder::new(name)) | 254 | Ok(Placeholder::new(name, constraints)) |
255 | } | ||
256 | |||
257 | fn parse_constraint(tokens: &mut std::vec::IntoIter<Token>) -> Result<Constraint, SsrError> { | ||
258 | let constraint_type = tokens | ||
259 | .next() | ||
260 | .ok_or_else(|| SsrError::new("Found end of placeholder while looking for a constraint"))? | ||
261 | .text | ||
262 | .to_string(); | ||
263 | match constraint_type.as_str() { | ||
264 | "kind" => { | ||
265 | expect_token(tokens, "(")?; | ||
266 | let t = tokens.next().ok_or_else(|| { | ||
267 | SsrError::new("Unexpected end of constraint while looking for kind") | ||
268 | })?; | ||
269 | if t.kind != SyntaxKind::IDENT { | ||
270 | bail!("Expected ident, found {:?} while parsing kind constraint", t.kind); | ||
271 | } | ||
272 | expect_token(tokens, ")")?; | ||
273 | Ok(Constraint::Kind(NodeKind::from(&t.text)?)) | ||
274 | } | ||
275 | "not" => { | ||
276 | expect_token(tokens, "(")?; | ||
277 | let sub = parse_constraint(tokens)?; | ||
278 | expect_token(tokens, ")")?; | ||
279 | Ok(Constraint::Not(Box::new(sub))) | ||
280 | } | ||
281 | x => bail!("Unsupported constraint type '{}'", x), | ||
282 | } | ||
283 | } | ||
284 | |||
285 | fn expect_token(tokens: &mut std::vec::IntoIter<Token>, expected: &str) -> Result<(), SsrError> { | ||
286 | if let Some(t) = tokens.next() { | ||
287 | if t.text == expected { | ||
288 | return Ok(()); | ||
289 | } | ||
290 | bail!("Expected {} found {}", expected, t.text); | ||
291 | } | ||
292 | bail!("Expected {} found end of stream"); | ||
293 | } | ||
294 | |||
295 | impl NodeKind { | ||
296 | fn from(name: &SmolStr) -> Result<NodeKind, SsrError> { | ||
297 | Ok(match name.as_str() { | ||
298 | "literal" => NodeKind::Literal, | ||
299 | _ => bail!("Unknown node kind '{}'", name), | ||
300 | }) | ||
301 | } | ||
220 | } | 302 | } |
221 | 303 | ||
222 | impl Placeholder { | 304 | impl Placeholder { |
223 | fn new(name: SmolStr) -> Self { | 305 | fn new(name: SmolStr, constraints: Vec<Constraint>) -> Self { |
224 | Self { stand_in_name: format!("__placeholder_{}", name), ident: name } | 306 | Self { stand_in_name: format!("__placeholder_{}", name), constraints, ident: name } |
225 | } | 307 | } |
226 | } | 308 | } |
227 | 309 | ||
@@ -241,31 +323,31 @@ mod tests { | |||
241 | PatternElement::Token(Token { kind, text: SmolStr::new(text) }) | 323 | PatternElement::Token(Token { kind, text: SmolStr::new(text) }) |
242 | } | 324 | } |
243 | fn placeholder(name: &str) -> PatternElement { | 325 | fn placeholder(name: &str) -> PatternElement { |
244 | PatternElement::Placeholder(Placeholder::new(SmolStr::new(name))) | 326 | PatternElement::Placeholder(Placeholder::new(SmolStr::new(name), Vec::new())) |
245 | } | 327 | } |
246 | let result: SsrRule = "foo($a, $b) ==>> bar($b, $a)".parse().unwrap(); | 328 | let result: SsrRule = "foo($a, $b) ==>> bar($b, $a)".parse().unwrap(); |
247 | assert_eq!( | 329 | assert_eq!( |
248 | result.pattern.raw.tokens, | 330 | result.pattern.raw.tokens, |
249 | vec![ | 331 | vec![ |
250 | token(SyntaxKind::IDENT, "foo"), | 332 | token(SyntaxKind::IDENT, "foo"), |
251 | token(SyntaxKind::L_PAREN, "("), | 333 | token(T!['('], "("), |
252 | placeholder("a"), | 334 | placeholder("a"), |
253 | token(SyntaxKind::COMMA, ","), | 335 | token(T![,], ","), |
254 | token(SyntaxKind::WHITESPACE, " "), | 336 | token(SyntaxKind::WHITESPACE, " "), |
255 | placeholder("b"), | 337 | placeholder("b"), |
256 | token(SyntaxKind::R_PAREN, ")"), | 338 | token(T![')'], ")"), |
257 | ] | 339 | ] |
258 | ); | 340 | ); |
259 | assert_eq!( | 341 | assert_eq!( |
260 | result.template.tokens, | 342 | result.template.tokens, |
261 | vec![ | 343 | vec![ |
262 | token(SyntaxKind::IDENT, "bar"), | 344 | token(SyntaxKind::IDENT, "bar"), |
263 | token(SyntaxKind::L_PAREN, "("), | 345 | token(T!['('], "("), |
264 | placeholder("b"), | 346 | placeholder("b"), |
265 | token(SyntaxKind::COMMA, ","), | 347 | token(T![,], ","), |
266 | token(SyntaxKind::WHITESPACE, " "), | 348 | token(SyntaxKind::WHITESPACE, " "), |
267 | placeholder("a"), | 349 | placeholder("a"), |
268 | token(SyntaxKind::R_PAREN, ")"), | 350 | token(T![')'], ")"), |
269 | ] | 351 | ] |
270 | ); | 352 | ); |
271 | } | 353 | } |
diff --git a/crates/ra_ssr/src/tests.rs b/crates/ra_ssr/src/tests.rs index c692c97e2..9568d4432 100644 --- a/crates/ra_ssr/src/tests.rs +++ b/crates/ra_ssr/src/tests.rs | |||
@@ -1,5 +1,6 @@ | |||
1 | use crate::{MatchFinder, SsrRule}; | 1 | use crate::{MatchFinder, SsrRule}; |
2 | use ra_db::{FileId, SourceDatabaseExt}; | 2 | use ra_db::{FileId, SourceDatabaseExt}; |
3 | use test_utils::mark; | ||
3 | 4 | ||
4 | fn parse_error_text(query: &str) -> String { | 5 | fn parse_error_text(query: &str) -> String { |
5 | format!("{}", query.parse::<SsrRule>().unwrap_err()) | 6 | format!("{}", query.parse::<SsrRule>().unwrap_err()) |
@@ -302,6 +303,22 @@ fn match_pattern() { | |||
302 | } | 303 | } |
303 | 304 | ||
304 | #[test] | 305 | #[test] |
306 | fn literal_constraint() { | ||
307 | mark::check!(literal_constraint); | ||
308 | let code = r#" | ||
309 | fn f1() { | ||
310 | let x1 = Some(42); | ||
311 | let x2 = Some("foo"); | ||
312 | let x3 = Some(x1); | ||
313 | let x4 = Some(40 + 2); | ||
314 | let x5 = Some(true); | ||
315 | } | ||
316 | "#; | ||
317 | assert_matches("Some(${a:kind(literal)})", code, &["Some(42)", "Some(\"foo\")", "Some(true)"]); | ||
318 | assert_matches("Some(${a:not(kind(literal))})", code, &["Some(x1)", "Some(40 + 2)"]); | ||
319 | } | ||
320 | |||
321 | #[test] | ||
305 | fn match_reordered_struct_instantiation() { | 322 | fn match_reordered_struct_instantiation() { |
306 | assert_matches( | 323 | assert_matches( |
307 | "Foo {aa: 1, b: 2, ccc: 3}", | 324 | "Foo {aa: 1, b: 2, ccc: 3}", |
diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts index 2982b9cbf..a1521a93b 100644 --- a/editors/code/src/main.ts +++ b/editors/code/src/main.ts | |||
@@ -19,6 +19,16 @@ let ctx: Ctx | undefined; | |||
19 | const RUST_PROJECT_CONTEXT_NAME = "inRustProject"; | 19 | const RUST_PROJECT_CONTEXT_NAME = "inRustProject"; |
20 | 20 | ||
21 | export async function activate(context: vscode.ExtensionContext) { | 21 | export async function activate(context: vscode.ExtensionContext) { |
22 | // For some reason vscode not always shows pop-up error notifications | ||
23 | // when an extension fails to activate, so we do it explicitly by ourselves. | ||
24 | // FIXME: remove this bit of code once vscode fixes this issue: https://github.com/microsoft/vscode/issues/101242 | ||
25 | await tryActivate(context).catch(err => { | ||
26 | void vscode.window.showErrorMessage(`Cannot activate rust-analyzer: ${err.message}`); | ||
27 | throw err; | ||
28 | }); | ||
29 | } | ||
30 | |||
31 | async function tryActivate(context: vscode.ExtensionContext) { | ||
22 | // Register a "dumb" onEnter command for the case where server fails to | 32 | // Register a "dumb" onEnter command for the case where server fails to |
23 | // start. | 33 | // start. |
24 | // | 34 | // |
@@ -58,9 +68,7 @@ export async function activate(context: vscode.ExtensionContext) { | |||
58 | 68 | ||
59 | const workspaceFolder = vscode.workspace.workspaceFolders?.[0]; | 69 | const workspaceFolder = vscode.workspace.workspaceFolders?.[0]; |
60 | if (workspaceFolder === undefined) { | 70 | if (workspaceFolder === undefined) { |
61 | const err = "Cannot activate rust-analyzer when no folder is opened"; | 71 | throw new Error("no folder is opened"); |
62 | void vscode.window.showErrorMessage(err); | ||
63 | throw new Error(err); | ||
64 | } | 72 | } |
65 | 73 | ||
66 | // Note: we try to start the server before we activate type hints so that it | 74 | // Note: we try to start the server before we activate type hints so that it |
@@ -152,13 +160,17 @@ async function bootstrapExtension(config: Config, state: PersistentState): Promi | |||
152 | return; | 160 | return; |
153 | }; | 161 | }; |
154 | 162 | ||
155 | const lastCheck = state.lastCheck; | ||
156 | const now = Date.now(); | 163 | const now = Date.now(); |
164 | if (config.package.releaseTag === NIGHTLY_TAG) { | ||
165 | // Check if we should poll github api for the new nightly version | ||
166 | // if we haven't done it during the past hour | ||
167 | const lastCheck = state.lastCheck; | ||
157 | 168 | ||
158 | const anHour = 60 * 60 * 1000; | 169 | const anHour = 60 * 60 * 1000; |
159 | const shouldDownloadNightly = state.releaseId === undefined || (now - (lastCheck ?? 0)) > anHour; | 170 | const shouldCheckForNewNightly = state.releaseId === undefined || (now - (lastCheck ?? 0)) > anHour; |
160 | 171 | ||
161 | if (!shouldDownloadNightly) return; | 172 | if (!shouldCheckForNewNightly) return; |
173 | } | ||
162 | 174 | ||
163 | const release = await fetchRelease("nightly").catch((e) => { | 175 | const release = await fetchRelease("nightly").catch((e) => { |
164 | log.error(e); | 176 | log.error(e); |