diff options
Diffstat (limited to 'crates')
-rw-r--r-- | crates/hir/src/lib.rs | 11 | ||||
-rw-r--r-- | crates/ide/src/folding_ranges.rs | 55 | ||||
-rw-r--r-- | crates/ide/src/hover.rs | 135 | ||||
-rw-r--r-- | crates/ide/src/join_lines.rs | 74 | ||||
-rw-r--r-- | crates/ide_assists/src/handlers/add_turbo_fish.rs | 108 | ||||
-rw-r--r-- | crates/ide_db/src/helpers.rs | 4 | ||||
-rw-r--r-- | crates/ide_db/src/helpers/famous_defs_fixture.rs | 8 | ||||
-rw-r--r-- | crates/mbe/Cargo.toml | 1 | ||||
-rw-r--r-- | crates/mbe/src/benchmark.rs | 40 | ||||
-rw-r--r-- | crates/mbe/src/expander.rs | 16 | ||||
-rw-r--r-- | crates/mbe/src/expander/matcher.rs | 559 | ||||
-rw-r--r-- | crates/mbe/src/expander/transcriber.rs | 12 | ||||
-rw-r--r-- | crates/mbe/src/lib.rs | 11 | ||||
-rw-r--r-- | crates/mbe/src/parser.rs | 80 | ||||
-rw-r--r-- | crates/mbe/src/tests.rs | 23 | ||||
-rw-r--r-- | crates/rust-analyzer/src/to_proto.rs | 2 |
16 files changed, 935 insertions, 204 deletions
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 769945c47..69fcdab07 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs | |||
@@ -33,11 +33,11 @@ mod has_source; | |||
33 | pub use crate::{ | 33 | pub use crate::{ |
34 | attrs::{HasAttrs, Namespace}, | 34 | attrs::{HasAttrs, Namespace}, |
35 | code_model::{ | 35 | code_model::{ |
36 | Access, Adt, AsAssocItem, AssocItem, AssocItemContainer, Callable, CallableKind, Const, | 36 | Access, Adt, AsAssocItem, AssocItem, AssocItemContainer, BuiltinType, Callable, |
37 | ConstParam, Crate, CrateDependency, DefWithBody, Enum, Field, FieldSource, Function, | 37 | CallableKind, Const, ConstParam, Crate, CrateDependency, DefWithBody, Enum, Field, |
38 | GenericDef, GenericParam, HasVisibility, Impl, Label, LifetimeParam, Local, MacroDef, | 38 | FieldSource, Function, GenericDef, GenericParam, HasVisibility, Impl, Label, LifetimeParam, |
39 | Module, ModuleDef, ScopeDef, Static, Struct, Trait, Type, TypeAlias, TypeParam, Union, | 39 | Local, MacroDef, Module, ModuleDef, ScopeDef, Static, Struct, Trait, Type, TypeAlias, |
40 | Variant, VariantDef, | 40 | TypeParam, Union, Variant, VariantDef, |
41 | }, | 41 | }, |
42 | has_source::HasSource, | 42 | has_source::HasSource, |
43 | semantics::{PathResolution, Semantics, SemanticsScope}, | 43 | semantics::{PathResolution, Semantics, SemanticsScope}, |
@@ -47,7 +47,6 @@ pub use hir_def::{ | |||
47 | adt::StructKind, | 47 | adt::StructKind, |
48 | attr::{Attrs, Documentation}, | 48 | attr::{Attrs, Documentation}, |
49 | body::scope::ExprScopes, | 49 | body::scope::ExprScopes, |
50 | builtin_type::BuiltinType, | ||
51 | find_path::PrefixKind, | 50 | find_path::PrefixKind, |
52 | import_map, | 51 | import_map, |
53 | item_scope::ItemInNs, | 52 | item_scope::ItemInNs, |
diff --git a/crates/ide/src/folding_ranges.rs b/crates/ide/src/folding_ranges.rs index 45170dd29..4b1b24562 100644 --- a/crates/ide/src/folding_ranges.rs +++ b/crates/ide/src/folding_ranges.rs | |||
@@ -6,7 +6,7 @@ use syntax::{ | |||
6 | ast::{self, AstNode, AstToken, VisibilityOwner}, | 6 | ast::{self, AstNode, AstToken, VisibilityOwner}, |
7 | Direction, NodeOrToken, SourceFile, | 7 | Direction, NodeOrToken, SourceFile, |
8 | SyntaxKind::{self, *}, | 8 | SyntaxKind::{self, *}, |
9 | SyntaxNode, TextRange, | 9 | SyntaxNode, TextRange, TextSize, |
10 | }; | 10 | }; |
11 | 11 | ||
12 | #[derive(Debug, PartialEq, Eq)] | 12 | #[derive(Debug, PartialEq, Eq)] |
@@ -16,6 +16,7 @@ pub enum FoldKind { | |||
16 | Mods, | 16 | Mods, |
17 | Block, | 17 | Block, |
18 | ArgList, | 18 | ArgList, |
19 | Region, | ||
19 | } | 20 | } |
20 | 21 | ||
21 | #[derive(Debug)] | 22 | #[derive(Debug)] |
@@ -29,6 +30,8 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> { | |||
29 | let mut visited_comments = FxHashSet::default(); | 30 | let mut visited_comments = FxHashSet::default(); |
30 | let mut visited_imports = FxHashSet::default(); | 31 | let mut visited_imports = FxHashSet::default(); |
31 | let mut visited_mods = FxHashSet::default(); | 32 | let mut visited_mods = FxHashSet::default(); |
33 | // regions can be nested, here is a LIFO buffer | ||
34 | let mut regions_starts: Vec<TextSize> = vec![]; | ||
32 | 35 | ||
33 | for element in file.syntax().descendants_with_tokens() { | 36 | for element in file.syntax().descendants_with_tokens() { |
34 | // Fold items that span multiple lines | 37 | // Fold items that span multiple lines |
@@ -48,10 +51,25 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> { | |||
48 | // Fold groups of comments | 51 | // Fold groups of comments |
49 | if let Some(comment) = ast::Comment::cast(token) { | 52 | if let Some(comment) = ast::Comment::cast(token) { |
50 | if !visited_comments.contains(&comment) { | 53 | if !visited_comments.contains(&comment) { |
51 | if let Some(range) = | 54 | // regions are not real comments |
52 | contiguous_range_for_comment(comment, &mut visited_comments) | 55 | if comment.text().trim().starts_with("// region:") { |
53 | { | 56 | regions_starts.push(comment.syntax().text_range().start()); |
54 | res.push(Fold { range, kind: FoldKind::Comment }) | 57 | } else if comment.text().trim().starts_with("// endregion") { |
58 | if let Some(region) = regions_starts.pop() { | ||
59 | res.push(Fold { | ||
60 | range: TextRange::new( | ||
61 | region, | ||
62 | comment.syntax().text_range().end(), | ||
63 | ), | ||
64 | kind: FoldKind::Region, | ||
65 | }) | ||
66 | } | ||
67 | } else { | ||
68 | if let Some(range) = | ||
69 | contiguous_range_for_comment(comment, &mut visited_comments) | ||
70 | { | ||
71 | res.push(Fold { range, kind: FoldKind::Comment }) | ||
72 | } | ||
55 | } | 73 | } |
56 | } | 74 | } |
57 | } | 75 | } |
@@ -175,9 +193,16 @@ fn contiguous_range_for_comment( | |||
175 | } | 193 | } |
176 | if let Some(c) = ast::Comment::cast(token) { | 194 | if let Some(c) = ast::Comment::cast(token) { |
177 | if c.kind() == group_kind { | 195 | if c.kind() == group_kind { |
178 | visited.insert(c.clone()); | 196 | // regions are not real comments |
179 | last = c; | 197 | if c.text().trim().starts_with("// region:") |
180 | continue; | 198 | || c.text().trim().starts_with("// endregion") |
199 | { | ||
200 | break; | ||
201 | } else { | ||
202 | visited.insert(c.clone()); | ||
203 | last = c; | ||
204 | continue; | ||
205 | } | ||
181 | } | 206 | } |
182 | } | 207 | } |
183 | // The comment group ends because either: | 208 | // The comment group ends because either: |
@@ -224,6 +249,7 @@ mod tests { | |||
224 | FoldKind::Mods => "mods", | 249 | FoldKind::Mods => "mods", |
225 | FoldKind::Block => "block", | 250 | FoldKind::Block => "block", |
226 | FoldKind::ArgList => "arglist", | 251 | FoldKind::ArgList => "arglist", |
252 | FoldKind::Region => "region", | ||
227 | }; | 253 | }; |
228 | assert_eq!(kind, &attr.unwrap()); | 254 | assert_eq!(kind, &attr.unwrap()); |
229 | } | 255 | } |
@@ -418,4 +444,17 @@ fn foo<fold arglist>( | |||
418 | "#, | 444 | "#, |
419 | ) | 445 | ) |
420 | } | 446 | } |
447 | |||
448 | #[test] | ||
449 | fn fold_region() { | ||
450 | check( | ||
451 | r#" | ||
452 | // 1. some normal comment | ||
453 | <fold region>// region: test | ||
454 | // 2. some normal comment | ||
455 | calling_function(x,y); | ||
456 | // endregion: test</fold> | ||
457 | "#, | ||
458 | ) | ||
459 | } | ||
421 | } | 460 | } |
diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 20b799490..a9454cfa3 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs | |||
@@ -5,6 +5,7 @@ use hir::{ | |||
5 | use ide_db::{ | 5 | use ide_db::{ |
6 | base_db::SourceDatabase, | 6 | base_db::SourceDatabase, |
7 | defs::{Definition, NameClass, NameRefClass}, | 7 | defs::{Definition, NameClass, NameRefClass}, |
8 | helpers::FamousDefs, | ||
8 | RootDatabase, | 9 | RootDatabase, |
9 | }; | 10 | }; |
10 | use itertools::Itertools; | 11 | use itertools::Itertools; |
@@ -107,16 +108,14 @@ pub(crate) fn hover( | |||
107 | } | 108 | } |
108 | }; | 109 | }; |
109 | if let Some(definition) = definition { | 110 | if let Some(definition) = definition { |
110 | if let Some(markup) = hover_for_definition(db, definition) { | 111 | let famous_defs = match &definition { |
111 | let markup = markup.as_str(); | 112 | Definition::ModuleDef(ModuleDef::BuiltinType(_)) => { |
112 | let markup = if !markdown { | 113 | Some(FamousDefs(&sema, sema.scope(&node).krate())) |
113 | remove_markdown(markup) | 114 | } |
114 | } else if links_in_hover { | 115 | _ => None, |
115 | rewrite_links(db, markup, &definition) | 116 | }; |
116 | } else { | 117 | if let Some(markup) = hover_for_definition(db, definition, famous_defs.as_ref()) { |
117 | remove_links(markup) | 118 | res.markup = process_markup(sema.db, definition, &markup, links_in_hover, markdown); |
118 | }; | ||
119 | res.markup = Markup::from(markup); | ||
120 | if let Some(action) = show_implementations_action(db, definition) { | 119 | if let Some(action) = show_implementations_action(db, definition) { |
121 | res.actions.push(action); | 120 | res.actions.push(action); |
122 | } | 121 | } |
@@ -138,6 +137,9 @@ pub(crate) fn hover( | |||
138 | // don't highlight the entire parent node on comment hover | 137 | // don't highlight the entire parent node on comment hover |
139 | return None; | 138 | return None; |
140 | } | 139 | } |
140 | if let res @ Some(_) = hover_for_keyword(&sema, links_in_hover, markdown, &token) { | ||
141 | return res; | ||
142 | } | ||
141 | 143 | ||
142 | let node = token | 144 | let node = token |
143 | .ancestors() | 145 | .ancestors() |
@@ -272,6 +274,24 @@ fn hover_markup( | |||
272 | } | 274 | } |
273 | } | 275 | } |
274 | 276 | ||
277 | fn process_markup( | ||
278 | db: &RootDatabase, | ||
279 | def: Definition, | ||
280 | markup: &Markup, | ||
281 | links_in_hover: bool, | ||
282 | markdown: bool, | ||
283 | ) -> Markup { | ||
284 | let markup = markup.as_str(); | ||
285 | let markup = if !markdown { | ||
286 | remove_markdown(markup) | ||
287 | } else if links_in_hover { | ||
288 | rewrite_links(db, markup, &def) | ||
289 | } else { | ||
290 | remove_links(markup) | ||
291 | }; | ||
292 | Markup::from(markup) | ||
293 | } | ||
294 | |||
275 | fn definition_owner_name(db: &RootDatabase, def: &Definition) -> Option<String> { | 295 | fn definition_owner_name(db: &RootDatabase, def: &Definition) -> Option<String> { |
276 | match def { | 296 | match def { |
277 | Definition::Field(f) => Some(f.parent_def(db).name(db)), | 297 | Definition::Field(f) => Some(f.parent_def(db).name(db)), |
@@ -304,7 +324,11 @@ fn definition_mod_path(db: &RootDatabase, def: &Definition) -> Option<String> { | |||
304 | def.module(db).map(|module| render_path(db, module, definition_owner_name(db, def))) | 324 | def.module(db).map(|module| render_path(db, module, definition_owner_name(db, def))) |
305 | } | 325 | } |
306 | 326 | ||
307 | fn hover_for_definition(db: &RootDatabase, def: Definition) -> Option<Markup> { | 327 | fn hover_for_definition( |
328 | db: &RootDatabase, | ||
329 | def: Definition, | ||
330 | famous_defs: Option<&FamousDefs>, | ||
331 | ) -> Option<Markup> { | ||
308 | let mod_path = definition_mod_path(db, &def); | 332 | let mod_path = definition_mod_path(db, &def); |
309 | return match def { | 333 | return match def { |
310 | Definition::Macro(it) => { | 334 | Definition::Macro(it) => { |
@@ -339,7 +363,9 @@ fn hover_for_definition(db: &RootDatabase, def: Definition) -> Option<Markup> { | |||
339 | ModuleDef::Static(it) => from_def_source(db, it, mod_path), | 363 | ModuleDef::Static(it) => from_def_source(db, it, mod_path), |
340 | ModuleDef::Trait(it) => from_def_source(db, it, mod_path), | 364 | ModuleDef::Trait(it) => from_def_source(db, it, mod_path), |
341 | ModuleDef::TypeAlias(it) => from_def_source(db, it, mod_path), | 365 | ModuleDef::TypeAlias(it) => from_def_source(db, it, mod_path), |
342 | ModuleDef::BuiltinType(it) => Some(Markup::fenced_block(&it.name())), | 366 | ModuleDef::BuiltinType(it) => famous_defs |
367 | .and_then(|fd| hover_for_builtin(fd, it)) | ||
368 | .or_else(|| Some(Markup::fenced_block(&it.name()))), | ||
343 | }, | 369 | }, |
344 | Definition::Local(it) => Some(Markup::fenced_block(&it.ty(db).display(db))), | 370 | Definition::Local(it) => Some(Markup::fenced_block(&it.ty(db).display(db))), |
345 | Definition::SelfType(impl_def) => { | 371 | Definition::SelfType(impl_def) => { |
@@ -380,11 +406,52 @@ fn hover_for_definition(db: &RootDatabase, def: Definition) -> Option<Markup> { | |||
380 | } | 406 | } |
381 | } | 407 | } |
382 | 408 | ||
409 | fn hover_for_keyword( | ||
410 | sema: &Semantics<RootDatabase>, | ||
411 | links_in_hover: bool, | ||
412 | markdown: bool, | ||
413 | token: &SyntaxToken, | ||
414 | ) -> Option<RangeInfo<HoverResult>> { | ||
415 | if !token.kind().is_keyword() { | ||
416 | return None; | ||
417 | } | ||
418 | let famous_defs = FamousDefs(&sema, sema.scope(&token.parent()).krate()); | ||
419 | // std exposes {}_keyword modules with docstrings on the root to document keywords | ||
420 | let keyword_mod = format!("{}_keyword", token.text()); | ||
421 | let doc_owner = find_std_module(&famous_defs, &keyword_mod)?; | ||
422 | let docs = doc_owner.attrs(sema.db).docs()?; | ||
423 | let markup = process_markup( | ||
424 | sema.db, | ||
425 | Definition::ModuleDef(doc_owner.into()), | ||
426 | &hover_markup(Some(docs.into()), Some(token.text().into()), None)?, | ||
427 | links_in_hover, | ||
428 | markdown, | ||
429 | ); | ||
430 | Some(RangeInfo::new(token.text_range(), HoverResult { markup, actions: Default::default() })) | ||
431 | } | ||
432 | |||
433 | fn hover_for_builtin(famous_defs: &FamousDefs, builtin: hir::BuiltinType) -> Option<Markup> { | ||
434 | // std exposes prim_{} modules with docstrings on the root to document the builtins | ||
435 | let primitive_mod = format!("prim_{}", builtin.name()); | ||
436 | let doc_owner = find_std_module(famous_defs, &primitive_mod)?; | ||
437 | let docs = doc_owner.attrs(famous_defs.0.db).docs()?; | ||
438 | hover_markup(Some(docs.into()), Some(builtin.name().to_string()), None) | ||
439 | } | ||
440 | |||
441 | fn find_std_module(famous_defs: &FamousDefs, name: &str) -> Option<hir::Module> { | ||
442 | let db = famous_defs.0.db; | ||
443 | let std_crate = famous_defs.std()?; | ||
444 | let std_root_module = std_crate.root_module(db); | ||
445 | std_root_module | ||
446 | .children(db) | ||
447 | .find(|module| module.name(db).map_or(false, |module| module.to_string() == name)) | ||
448 | } | ||
449 | |||
383 | fn pick_best(tokens: TokenAtOffset<SyntaxToken>) -> Option<SyntaxToken> { | 450 | fn pick_best(tokens: TokenAtOffset<SyntaxToken>) -> Option<SyntaxToken> { |
384 | return tokens.max_by_key(priority); | 451 | return tokens.max_by_key(priority); |
385 | fn priority(n: &SyntaxToken) -> usize { | 452 | fn priority(n: &SyntaxToken) -> usize { |
386 | match n.kind() { | 453 | match n.kind() { |
387 | IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] => 3, | 454 | IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] => 3, |
388 | T!['('] | T![')'] => 2, | 455 | T!['('] | T![')'] => 2, |
389 | kind if kind.is_trivia() => 0, | 456 | kind if kind.is_trivia() => 0, |
390 | _ => 1, | 457 | _ => 1, |
@@ -3523,6 +3590,48 @@ use foo::bar::{self$0}; | |||
3523 | 3590 | ||
3524 | But this should appear | 3591 | But this should appear |
3525 | "#]], | 3592 | "#]], |
3593 | ) | ||
3594 | } | ||
3595 | |||
3596 | #[test] | ||
3597 | fn hover_keyword() { | ||
3598 | let ra_fixture = r#"//- /main.rs crate:main deps:std | ||
3599 | fn f() { retur$0n; }"#; | ||
3600 | let fixture = format!("{}\n{}", ra_fixture, FamousDefs::FIXTURE); | ||
3601 | check( | ||
3602 | &fixture, | ||
3603 | expect![[r#" | ||
3604 | *return* | ||
3605 | |||
3606 | ```rust | ||
3607 | return | ||
3608 | ``` | ||
3609 | |||
3610 | --- | ||
3611 | |||
3612 | Docs for return_keyword | ||
3613 | "#]], | ||
3614 | ); | ||
3615 | } | ||
3616 | |||
3617 | #[test] | ||
3618 | fn hover_builtin() { | ||
3619 | let ra_fixture = r#"//- /main.rs crate:main deps:std | ||
3620 | cosnt _: &str$0 = ""; }"#; | ||
3621 | let fixture = format!("{}\n{}", ra_fixture, FamousDefs::FIXTURE); | ||
3622 | check( | ||
3623 | &fixture, | ||
3624 | expect![[r#" | ||
3625 | *str* | ||
3626 | |||
3627 | ```rust | ||
3628 | str | ||
3629 | ``` | ||
3630 | |||
3631 | --- | ||
3632 | |||
3633 | Docs for prim_str | ||
3634 | "#]], | ||
3526 | ); | 3635 | ); |
3527 | } | 3636 | } |
3528 | } | 3637 | } |
diff --git a/crates/ide/src/join_lines.rs b/crates/ide/src/join_lines.rs index 2c077ed1f..7fcae13e0 100644 --- a/crates/ide/src/join_lines.rs +++ b/crates/ide/src/join_lines.rs | |||
@@ -7,6 +7,7 @@ use syntax::{ | |||
7 | SyntaxKind::{self, USE_TREE, WHITESPACE}, | 7 | SyntaxKind::{self, USE_TREE, WHITESPACE}, |
8 | SyntaxNode, SyntaxToken, TextRange, TextSize, T, | 8 | SyntaxNode, SyntaxToken, TextRange, TextSize, T, |
9 | }; | 9 | }; |
10 | use test_utils::mark; | ||
10 | use text_edit::{TextEdit, TextEditBuilder}; | 11 | use text_edit::{TextEdit, TextEditBuilder}; |
11 | 12 | ||
12 | // Feature: Join Lines | 13 | // Feature: Join Lines |
@@ -44,9 +45,9 @@ pub(crate) fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit { | |||
44 | let text = token.text(); | 45 | let text = token.text(); |
45 | for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') { | 46 | for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') { |
46 | let pos: TextSize = (pos as u32).into(); | 47 | let pos: TextSize = (pos as u32).into(); |
47 | let off = token.text_range().start() + range.start() + pos; | 48 | let offset = token.text_range().start() + range.start() + pos; |
48 | if !edit.invalidates_offset(off) { | 49 | if !edit.invalidates_offset(offset) { |
49 | remove_newline(&mut edit, &token, off); | 50 | remove_newline(&mut edit, &token, offset); |
50 | } | 51 | } |
51 | } | 52 | } |
52 | } | 53 | } |
@@ -56,14 +57,25 @@ pub(crate) fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit { | |||
56 | 57 | ||
57 | fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextSize) { | 58 | fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextSize) { |
58 | if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 { | 59 | if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 { |
59 | // The node is either the first or the last in the file | 60 | let mut string_open_quote = false; |
60 | let suff = &token.text()[TextRange::new( | 61 | if let Some(string) = ast::String::cast(token.clone()) { |
61 | offset - token.text_range().start() + TextSize::of('\n'), | 62 | if let Some(range) = string.open_quote_text_range() { |
62 | TextSize::of(token.text()), | 63 | mark::hit!(join_string_literal); |
63 | )]; | 64 | string_open_quote = range.end() == offset; |
64 | let spaces = suff.bytes().take_while(|&b| b == b' ').count(); | 65 | } |
65 | 66 | } | |
66 | edit.replace(TextRange::at(offset, ((spaces + 1) as u32).into()), " ".to_string()); | 67 | |
68 | let n_spaces_after_line_break = { | ||
69 | let suff = &token.text()[TextRange::new( | ||
70 | offset - token.text_range().start() + TextSize::of('\n'), | ||
71 | TextSize::of(token.text()), | ||
72 | )]; | ||
73 | suff.bytes().take_while(|&b| b == b' ').count() | ||
74 | }; | ||
75 | |||
76 | let range = TextRange::at(offset, ((n_spaces_after_line_break + 1) as u32).into()); | ||
77 | let replace_with = if string_open_quote { "" } else { " " }; | ||
78 | edit.replace(range, replace_with.to_string()); | ||
67 | return; | 79 | return; |
68 | } | 80 | } |
69 | 81 | ||
@@ -194,7 +206,7 @@ fn compute_ws(left: SyntaxKind, right: SyntaxKind) -> &'static str { | |||
194 | #[cfg(test)] | 206 | #[cfg(test)] |
195 | mod tests { | 207 | mod tests { |
196 | use syntax::SourceFile; | 208 | use syntax::SourceFile; |
197 | use test_utils::{add_cursor, assert_eq_text, extract_offset, extract_range}; | 209 | use test_utils::{add_cursor, assert_eq_text, extract_offset, extract_range, mark}; |
198 | 210 | ||
199 | use super::*; | 211 | use super::*; |
200 | 212 | ||
@@ -771,4 +783,42 @@ fn foo() { | |||
771 | ", | 783 | ", |
772 | ); | 784 | ); |
773 | } | 785 | } |
786 | |||
787 | #[test] | ||
788 | fn join_string_literal() { | ||
789 | mark::check!(join_string_literal); | ||
790 | check_join_lines( | ||
791 | r#" | ||
792 | fn main() { | ||
793 | $0" | ||
794 | hello | ||
795 | "; | ||
796 | } | ||
797 | "#, | ||
798 | r#" | ||
799 | fn main() { | ||
800 | $0"hello | ||
801 | "; | ||
802 | } | ||
803 | "#, | ||
804 | ); | ||
805 | |||
806 | check_join_lines( | ||
807 | r#" | ||
808 | fn main() { | ||
809 | " | ||
810 | $0hello | ||
811 | world | ||
812 | "; | ||
813 | } | ||
814 | "#, | ||
815 | r#" | ||
816 | fn main() { | ||
817 | " | ||
818 | $0hello world | ||
819 | "; | ||
820 | } | ||
821 | "#, | ||
822 | ); | ||
823 | } | ||
774 | } | 824 | } |
diff --git a/crates/ide_assists/src/handlers/add_turbo_fish.rs b/crates/ide_assists/src/handlers/add_turbo_fish.rs index 8e9ea4fad..a08b55ebb 100644 --- a/crates/ide_assists/src/handlers/add_turbo_fish.rs +++ b/crates/ide_assists/src/handlers/add_turbo_fish.rs | |||
@@ -31,6 +31,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext) -> Option<( | |||
31 | return None; | 31 | return None; |
32 | } | 32 | } |
33 | mark::hit!(add_turbo_fish_after_call); | 33 | mark::hit!(add_turbo_fish_after_call); |
34 | mark::hit!(add_type_ascription_after_call); | ||
34 | arg_list.l_paren_token()?.prev_token().filter(|it| it.kind() == SyntaxKind::IDENT) | 35 | arg_list.l_paren_token()?.prev_token().filter(|it| it.kind() == SyntaxKind::IDENT) |
35 | })?; | 36 | })?; |
36 | let next_token = ident.next_token()?; | 37 | let next_token = ident.next_token()?; |
@@ -52,6 +53,24 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext) -> Option<( | |||
52 | mark::hit!(add_turbo_fish_non_generic); | 53 | mark::hit!(add_turbo_fish_non_generic); |
53 | return None; | 54 | return None; |
54 | } | 55 | } |
56 | |||
57 | if let Some(let_stmt) = ctx.find_node_at_offset::<ast::LetStmt>() { | ||
58 | if let_stmt.colon_token().is_none() { | ||
59 | let type_pos = let_stmt.pat()?.syntax().last_token()?.text_range().end(); | ||
60 | acc.add( | ||
61 | AssistId("add_type_ascription", AssistKind::RefactorRewrite), | ||
62 | "Add `: _` before assignment operator", | ||
63 | ident.text_range(), | ||
64 | |builder| match ctx.config.snippet_cap { | ||
65 | Some(cap) => builder.insert_snippet(cap, type_pos, ": ${0:_}"), | ||
66 | None => builder.insert(type_pos, ": _"), | ||
67 | }, | ||
68 | )? | ||
69 | } else { | ||
70 | mark::hit!(add_type_ascription_already_typed); | ||
71 | } | ||
72 | } | ||
73 | |||
55 | acc.add( | 74 | acc.add( |
56 | AssistId("add_turbo_fish", AssistKind::RefactorRewrite), | 75 | AssistId("add_turbo_fish", AssistKind::RefactorRewrite), |
57 | "Add `::<>`", | 76 | "Add `::<>`", |
@@ -65,7 +84,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext) -> Option<( | |||
65 | 84 | ||
66 | #[cfg(test)] | 85 | #[cfg(test)] |
67 | mod tests { | 86 | mod tests { |
68 | use crate::tests::{check_assist, check_assist_not_applicable}; | 87 | use crate::tests::{check_assist, check_assist_by_label, check_assist_not_applicable}; |
69 | 88 | ||
70 | use super::*; | 89 | use super::*; |
71 | use test_utils::mark; | 90 | use test_utils::mark; |
@@ -161,4 +180,91 @@ fn main() { | |||
161 | "#, | 180 | "#, |
162 | ); | 181 | ); |
163 | } | 182 | } |
183 | |||
184 | #[test] | ||
185 | fn add_type_ascription_function() { | ||
186 | check_assist_by_label( | ||
187 | add_turbo_fish, | ||
188 | r#" | ||
189 | fn make<T>() -> T {} | ||
190 | fn main() { | ||
191 | let x = make$0(); | ||
192 | } | ||
193 | "#, | ||
194 | r#" | ||
195 | fn make<T>() -> T {} | ||
196 | fn main() { | ||
197 | let x: ${0:_} = make(); | ||
198 | } | ||
199 | "#, | ||
200 | "Add `: _` before assignment operator", | ||
201 | ); | ||
202 | } | ||
203 | |||
204 | #[test] | ||
205 | fn add_type_ascription_after_call() { | ||
206 | mark::check!(add_type_ascription_after_call); | ||
207 | check_assist_by_label( | ||
208 | add_turbo_fish, | ||
209 | r#" | ||
210 | fn make<T>() -> T {} | ||
211 | fn main() { | ||
212 | let x = make()$0; | ||
213 | } | ||
214 | "#, | ||
215 | r#" | ||
216 | fn make<T>() -> T {} | ||
217 | fn main() { | ||
218 | let x: ${0:_} = make(); | ||
219 | } | ||
220 | "#, | ||
221 | "Add `: _` before assignment operator", | ||
222 | ); | ||
223 | } | ||
224 | |||
225 | #[test] | ||
226 | fn add_type_ascription_method() { | ||
227 | check_assist_by_label( | ||
228 | add_turbo_fish, | ||
229 | r#" | ||
230 | struct S; | ||
231 | impl S { | ||
232 | fn make<T>(&self) -> T {} | ||
233 | } | ||
234 | fn main() { | ||
235 | let x = S.make$0(); | ||
236 | } | ||
237 | "#, | ||
238 | r#" | ||
239 | struct S; | ||
240 | impl S { | ||
241 | fn make<T>(&self) -> T {} | ||
242 | } | ||
243 | fn main() { | ||
244 | let x: ${0:_} = S.make(); | ||
245 | } | ||
246 | "#, | ||
247 | "Add `: _` before assignment operator", | ||
248 | ); | ||
249 | } | ||
250 | |||
251 | #[test] | ||
252 | fn add_type_ascription_already_typed() { | ||
253 | mark::check!(add_type_ascription_already_typed); | ||
254 | check_assist( | ||
255 | add_turbo_fish, | ||
256 | r#" | ||
257 | fn make<T>() -> T {} | ||
258 | fn main() { | ||
259 | let x: () = make$0(); | ||
260 | } | ||
261 | "#, | ||
262 | r#" | ||
263 | fn make<T>() -> T {} | ||
264 | fn main() { | ||
265 | let x: () = make::<${0:_}>(); | ||
266 | } | ||
267 | "#, | ||
268 | ); | ||
269 | } | ||
164 | } | 270 | } |
diff --git a/crates/ide_db/src/helpers.rs b/crates/ide_db/src/helpers.rs index f9de8ce0e..3ff77400b 100644 --- a/crates/ide_db/src/helpers.rs +++ b/crates/ide_db/src/helpers.rs | |||
@@ -41,6 +41,10 @@ pub struct FamousDefs<'a, 'b>(pub &'a Semantics<'b, RootDatabase>, pub Option<Cr | |||
41 | impl FamousDefs<'_, '_> { | 41 | impl FamousDefs<'_, '_> { |
42 | pub const FIXTURE: &'static str = include_str!("helpers/famous_defs_fixture.rs"); | 42 | pub const FIXTURE: &'static str = include_str!("helpers/famous_defs_fixture.rs"); |
43 | 43 | ||
44 | pub fn std(&self) -> Option<Crate> { | ||
45 | self.find_crate("std") | ||
46 | } | ||
47 | |||
44 | pub fn core(&self) -> Option<Crate> { | 48 | pub fn core(&self) -> Option<Crate> { |
45 | self.find_crate("core") | 49 | self.find_crate("core") |
46 | } | 50 | } |
diff --git a/crates/ide_db/src/helpers/famous_defs_fixture.rs b/crates/ide_db/src/helpers/famous_defs_fixture.rs index bb4e9666b..d3464ae17 100644 --- a/crates/ide_db/src/helpers/famous_defs_fixture.rs +++ b/crates/ide_db/src/helpers/famous_defs_fixture.rs | |||
@@ -129,3 +129,11 @@ pub mod prelude { | |||
129 | } | 129 | } |
130 | #[prelude_import] | 130 | #[prelude_import] |
131 | pub use prelude::*; | 131 | pub use prelude::*; |
132 | //- /libstd.rs crate:std deps:core | ||
133 | //! Signatures of traits, types and functions from the std lib for use in tests. | ||
134 | |||
135 | /// Docs for return_keyword | ||
136 | mod return_keyword {} | ||
137 | |||
138 | /// Docs for prim_str | ||
139 | mod prim_str {} | ||
diff --git a/crates/mbe/Cargo.toml b/crates/mbe/Cargo.toml index 8b5cca22c..bb2656a80 100644 --- a/crates/mbe/Cargo.toml +++ b/crates/mbe/Cargo.toml | |||
@@ -18,6 +18,7 @@ syntax = { path = "../syntax", version = "0.0.0" } | |||
18 | parser = { path = "../parser", version = "0.0.0" } | 18 | parser = { path = "../parser", version = "0.0.0" } |
19 | tt = { path = "../tt", version = "0.0.0" } | 19 | tt = { path = "../tt", version = "0.0.0" } |
20 | test_utils = { path = "../test_utils", version = "0.0.0" } | 20 | test_utils = { path = "../test_utils", version = "0.0.0" } |
21 | stdx = { path = "../stdx", version = "0.0.0" } | ||
21 | 22 | ||
22 | [dev-dependencies] | 23 | [dev-dependencies] |
23 | profile = { path = "../profile" } | 24 | profile = { path = "../profile" } |
diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index 6d81be880..503ad1355 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs | |||
@@ -40,18 +40,12 @@ fn benchmark_expand_macro_rules() { | |||
40 | .into_iter() | 40 | .into_iter() |
41 | .map(|(id, tt)| { | 41 | .map(|(id, tt)| { |
42 | let res = rules[&id].expand(&tt); | 42 | let res = rules[&id].expand(&tt); |
43 | if res.err.is_some() { | 43 | assert!(res.err.is_none()); |
44 | // FIXME: | ||
45 | // Currently `invocation_fixtures` will generate some correct invocations but | ||
46 | // cannot be expanded by mbe. We ignore errors here. | ||
47 | // See: https://github.com/rust-analyzer/rust-analyzer/issues/4777 | ||
48 | eprintln!("err from {} {:?}", id, res.err); | ||
49 | } | ||
50 | res.value.token_trees.len() | 44 | res.value.token_trees.len() |
51 | }) | 45 | }) |
52 | .sum() | 46 | .sum() |
53 | }; | 47 | }; |
54 | assert_eq!(hash, 66995); | 48 | assert_eq!(hash, 69413); |
55 | } | 49 | } |
56 | 50 | ||
57 | fn macro_rules_fixtures() -> FxHashMap<String, MacroRules> { | 51 | fn macro_rules_fixtures() -> FxHashMap<String, MacroRules> { |
@@ -77,7 +71,7 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree> { | |||
77 | .collect() | 71 | .collect() |
78 | } | 72 | } |
79 | 73 | ||
80 | // Generate random invocation fixtures from rules | 74 | /// Generate random invocation fixtures from rules |
81 | fn invocation_fixtures(rules: &FxHashMap<String, MacroRules>) -> Vec<(String, tt::Subtree)> { | 75 | fn invocation_fixtures(rules: &FxHashMap<String, MacroRules>) -> Vec<(String, tt::Subtree)> { |
82 | let mut seed = 123456789; | 76 | let mut seed = 123456789; |
83 | let mut res = Vec::new(); | 77 | let mut res = Vec::new(); |
@@ -86,11 +80,31 @@ fn invocation_fixtures(rules: &FxHashMap<String, MacroRules>) -> Vec<(String, tt | |||
86 | for rule in &it.rules { | 80 | for rule in &it.rules { |
87 | // Generate twice | 81 | // Generate twice |
88 | for _ in 0..2 { | 82 | for _ in 0..2 { |
89 | let mut subtree = tt::Subtree::default(); | 83 | // The input are generated by filling the `Op` randomly. |
90 | for op in rule.lhs.iter() { | 84 | // However, there are some cases generated are ambiguous for expanding, for example: |
91 | collect_from_op(op, &mut subtree, &mut seed); | 85 | // ```rust |
86 | // macro_rules! m { | ||
87 | // ($($t:ident),* as $ty:ident) => {} | ||
88 | // } | ||
89 | // m!(as u32); // error: local ambiguity: multiple parsing options: built-in NTs ident ('t') or 1 other option. | ||
90 | // ``` | ||
91 | // | ||
92 | // So we just skip any error cases and try again | ||
93 | let mut try_cnt = 0; | ||
94 | loop { | ||
95 | let mut subtree = tt::Subtree::default(); | ||
96 | for op in rule.lhs.iter() { | ||
97 | collect_from_op(op, &mut subtree, &mut seed); | ||
98 | } | ||
99 | if it.expand(&subtree).err.is_none() { | ||
100 | res.push((name.clone(), subtree)); | ||
101 | break; | ||
102 | } | ||
103 | try_cnt += 1; | ||
104 | if try_cnt > 100 { | ||
105 | panic!("invocaton fixture {} cannot be generated.\n", name); | ||
106 | } | ||
92 | } | 107 | } |
93 | res.push((name.clone(), subtree)); | ||
94 | } | 108 | } |
95 | } | 109 | } |
96 | } | 110 | } |
diff --git a/crates/mbe/src/expander.rs b/crates/mbe/src/expander.rs index e7e14b3cc..2efff8f52 100644 --- a/crates/mbe/src/expander.rs +++ b/crates/mbe/src/expander.rs | |||
@@ -5,7 +5,7 @@ | |||
5 | mod matcher; | 5 | mod matcher; |
6 | mod transcriber; | 6 | mod transcriber; |
7 | 7 | ||
8 | use rustc_hash::FxHashMap; | 8 | use smallvec::SmallVec; |
9 | use syntax::SmolStr; | 9 | use syntax::SmolStr; |
10 | 10 | ||
11 | use crate::{ExpandError, ExpandResult}; | 11 | use crate::{ExpandError, ExpandResult}; |
@@ -28,10 +28,10 @@ pub(crate) fn expand_rules( | |||
28 | return ExpandResult::ok(value); | 28 | return ExpandResult::ok(value); |
29 | } | 29 | } |
30 | } | 30 | } |
31 | // Use the rule if we matched more tokens, or had fewer errors | 31 | // Use the rule if we matched more tokens, or bound variables count |
32 | if let Some((prev_match, _)) = &match_ { | 32 | if let Some((prev_match, _)) = &match_ { |
33 | if (new_match.unmatched_tts, new_match.err_count) | 33 | if (new_match.unmatched_tts, -(new_match.bound_count as i32)) |
34 | < (prev_match.unmatched_tts, prev_match.err_count) | 34 | < (prev_match.unmatched_tts, -(prev_match.bound_count as i32)) |
35 | { | 35 | { |
36 | match_ = Some((new_match, rule)); | 36 | match_ = Some((new_match, rule)); |
37 | } | 37 | } |
@@ -94,19 +94,19 @@ pub(crate) fn expand_rules( | |||
94 | /// In other words, `Bindings` is a *multi* mapping from `SmolStr` to | 94 | /// In other words, `Bindings` is a *multi* mapping from `SmolStr` to |
95 | /// `tt::TokenTree`, where the index to select a particular `TokenTree` among | 95 | /// `tt::TokenTree`, where the index to select a particular `TokenTree` among |
96 | /// many is not a plain `usize`, but an `&[usize]`. | 96 | /// many is not a plain `usize`, but an `&[usize]`. |
97 | #[derive(Debug, Default)] | 97 | #[derive(Debug, Default, Clone, PartialEq, Eq)] |
98 | struct Bindings { | 98 | struct Bindings { |
99 | inner: FxHashMap<SmolStr, Binding>, | 99 | inner: SmallVec<[(SmolStr, Binding); 4]>, |
100 | } | 100 | } |
101 | 101 | ||
102 | #[derive(Debug)] | 102 | #[derive(Debug, Clone, PartialEq, Eq)] |
103 | enum Binding { | 103 | enum Binding { |
104 | Fragment(Fragment), | 104 | Fragment(Fragment), |
105 | Nested(Vec<Binding>), | 105 | Nested(Vec<Binding>), |
106 | Empty, | 106 | Empty, |
107 | } | 107 | } |
108 | 108 | ||
109 | #[derive(Debug, Clone)] | 109 | #[derive(Debug, Clone, PartialEq, Eq)] |
110 | enum Fragment { | 110 | enum Fragment { |
111 | /// token fragments are just copy-pasted into the output | 111 | /// token fragments are just copy-pasted into the output |
112 | Tokens(tt::TokenTree), | 112 | Tokens(tt::TokenTree), |
diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index e3bd4c09a..9d3d28055 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs | |||
@@ -1,14 +1,74 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! An NFA-based parser, which is porting from rustc mbe parsing code |
2 | //! | ||
3 | //! See https://github.com/rust-lang/rust/blob/70b18bc2cbac4712020019f5bf57c00905373205/compiler/rustc_expand/src/mbe/macro_parser.rs | ||
4 | //! Here is a quick intro to how the parser works, copied from rustc: | ||
5 | //! | ||
6 | //! A 'position' is a dot in the middle of a matcher, usually represented as a | ||
7 | //! dot. For example `· a $( a )* a b` is a position, as is `a $( · a )* a b`. | ||
8 | //! | ||
9 | //! The parser walks through the input a character at a time, maintaining a list | ||
10 | //! of threads consistent with the current position in the input string: `cur_items`. | ||
11 | //! | ||
12 | //! As it processes them, it fills up `eof_items` with threads that would be valid if | ||
13 | //! the macro invocation is now over, `bb_items` with threads that are waiting on | ||
14 | //! a Rust non-terminal like `$e:expr`, and `next_items` with threads that are waiting | ||
15 | //! on a particular token. Most of the logic concerns moving the · through the | ||
16 | //! repetitions indicated by Kleene stars. The rules for moving the · without | ||
17 | //! consuming any input are called epsilon transitions. It only advances or calls | ||
18 | //! out to the real Rust parser when no `cur_items` threads remain. | ||
19 | //! | ||
20 | //! Example: | ||
21 | //! | ||
22 | //! ```text, ignore | ||
23 | //! Start parsing a a a a b against [· a $( a )* a b]. | ||
24 | //! | ||
25 | //! Remaining input: a a a a b | ||
26 | //! next: [· a $( a )* a b] | ||
27 | //! | ||
28 | //! - - - Advance over an a. - - - | ||
29 | //! | ||
30 | //! Remaining input: a a a b | ||
31 | //! cur: [a · $( a )* a b] | ||
32 | //! Descend/Skip (first item). | ||
33 | //! next: [a $( · a )* a b] [a $( a )* · a b]. | ||
34 | //! | ||
35 | //! - - - Advance over an a. - - - | ||
36 | //! | ||
37 | //! Remaining input: a a b | ||
38 | //! cur: [a $( a · )* a b] [a $( a )* a · b] | ||
39 | //! Follow epsilon transition: Finish/Repeat (first item) | ||
40 | //! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b] | ||
41 | //! | ||
42 | //! - - - Advance over an a. - - - (this looks exactly like the last step) | ||
43 | //! | ||
44 | //! Remaining input: a b | ||
45 | //! cur: [a $( a · )* a b] [a $( a )* a · b] | ||
46 | //! Follow epsilon transition: Finish/Repeat (first item) | ||
47 | //! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b] | ||
48 | //! | ||
49 | //! - - - Advance over an a. - - - (this looks exactly like the last step) | ||
50 | //! | ||
51 | //! Remaining input: b | ||
52 | //! cur: [a $( a · )* a b] [a $( a )* a · b] | ||
53 | //! Follow epsilon transition: Finish/Repeat (first item) | ||
54 | //! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b] | ||
55 | //! | ||
56 | //! - - - Advance over a b. - - - | ||
57 | //! | ||
58 | //! Remaining input: '' | ||
59 | //! eof: [a $( a )* a b ·] | ||
60 | //! ``` | ||
2 | 61 | ||
3 | use crate::{ | 62 | use crate::{ |
4 | expander::{Binding, Bindings, Fragment}, | 63 | expander::{Binding, Bindings, Fragment}, |
5 | parser::{Op, RepeatKind, Separator}, | 64 | parser::{Op, OpDelimited, OpDelimitedIter, RepeatKind, Separator}, |
6 | tt_iter::TtIter, | 65 | tt_iter::TtIter, |
7 | ExpandError, MetaTemplate, | 66 | ExpandError, MetaTemplate, |
8 | }; | 67 | }; |
9 | 68 | ||
10 | use super::ExpandResult; | 69 | use super::ExpandResult; |
11 | use parser::FragmentKind::*; | 70 | use parser::FragmentKind::*; |
71 | use smallvec::{smallvec, SmallVec}; | ||
12 | use syntax::SmolStr; | 72 | use syntax::SmolStr; |
13 | 73 | ||
14 | impl Bindings { | 74 | impl Bindings { |
@@ -16,19 +76,19 @@ impl Bindings { | |||
16 | // FIXME: Do we have a better way to represent an empty token ? | 76 | // FIXME: Do we have a better way to represent an empty token ? |
17 | // Insert an empty subtree for empty token | 77 | // Insert an empty subtree for empty token |
18 | let tt = tt::Subtree::default().into(); | 78 | let tt = tt::Subtree::default().into(); |
19 | self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt))); | 79 | self.inner.push((name.clone(), Binding::Fragment(Fragment::Tokens(tt)))); |
20 | } | 80 | } |
21 | 81 | ||
22 | fn push_empty(&mut self, name: &SmolStr) { | 82 | fn push_empty(&mut self, name: &SmolStr) { |
23 | self.inner.insert(name.clone(), Binding::Empty); | 83 | self.inner.push((name.clone(), Binding::Empty)); |
24 | } | 84 | } |
25 | 85 | ||
26 | fn push_nested(&mut self, idx: usize, nested: Bindings) -> Result<(), ExpandError> { | 86 | fn push_nested(&mut self, idx: usize, nested: Bindings) -> Result<(), ExpandError> { |
27 | for (key, value) in nested.inner { | 87 | for (key, value) in nested.inner { |
28 | if !self.inner.contains_key(&key) { | 88 | if self.get_mut(&key).is_none() { |
29 | self.inner.insert(key.clone(), Binding::Nested(Vec::new())); | 89 | self.inner.push((key.clone(), Binding::Nested(Vec::new()))); |
30 | } | 90 | } |
31 | match self.inner.get_mut(&key) { | 91 | match self.get_mut(&key) { |
32 | Some(Binding::Nested(it)) => { | 92 | Some(Binding::Nested(it)) => { |
33 | // insert empty nested bindings before this one | 93 | // insert empty nested bindings before this one |
34 | while it.len() < idx { | 94 | while it.len() < idx { |
@@ -46,6 +106,14 @@ impl Bindings { | |||
46 | } | 106 | } |
47 | Ok(()) | 107 | Ok(()) |
48 | } | 108 | } |
109 | |||
110 | fn get_mut(&mut self, name: &str) -> Option<&mut Binding> { | ||
111 | self.inner.iter_mut().find_map(|(n, b)| if n == name { Some(b) } else { None }) | ||
112 | } | ||
113 | |||
114 | fn bindings(&self) -> impl Iterator<Item = &Binding> { | ||
115 | self.inner.iter().map(|(_, b)| b) | ||
116 | } | ||
49 | } | 117 | } |
50 | 118 | ||
51 | macro_rules! err { | 119 | macro_rules! err { |
@@ -57,7 +125,7 @@ macro_rules! err { | |||
57 | }; | 125 | }; |
58 | } | 126 | } |
59 | 127 | ||
60 | #[derive(Debug, Default)] | 128 | #[derive(Clone, Debug, Default, PartialEq, Eq)] |
61 | pub(super) struct Match { | 129 | pub(super) struct Match { |
62 | pub(super) bindings: Bindings, | 130 | pub(super) bindings: Bindings, |
63 | /// We currently just keep the first error and count the rest to compare matches. | 131 | /// We currently just keep the first error and count the rest to compare matches. |
@@ -65,6 +133,8 @@ pub(super) struct Match { | |||
65 | pub(super) err_count: usize, | 133 | pub(super) err_count: usize, |
66 | /// How many top-level token trees were left to match. | 134 | /// How many top-level token trees were left to match. |
67 | pub(super) unmatched_tts: usize, | 135 | pub(super) unmatched_tts: usize, |
136 | /// The number of bound variables | ||
137 | pub(super) bound_count: usize, | ||
68 | } | 138 | } |
69 | 139 | ||
70 | impl Match { | 140 | impl Match { |
@@ -76,72 +146,373 @@ impl Match { | |||
76 | } | 146 | } |
77 | 147 | ||
78 | /// Matching errors are added to the `Match`. | 148 | /// Matching errors are added to the `Match`. |
79 | pub(super) fn match_(pattern: &MetaTemplate, src: &tt::Subtree) -> Match { | 149 | pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree) -> Match { |
80 | let mut res = Match::default(); | 150 | let mut res = match_loop(pattern, &input); |
81 | let mut src = TtIter::new(src); | 151 | res.bound_count = count(res.bindings.bindings()); |
152 | return res; | ||
153 | |||
154 | fn count<'a>(bindings: impl Iterator<Item = &'a Binding>) -> usize { | ||
155 | bindings | ||
156 | .map(|it| match it { | ||
157 | Binding::Fragment(_) => 1, | ||
158 | Binding::Empty => 1, | ||
159 | Binding::Nested(it) => count(it.iter()), | ||
160 | }) | ||
161 | .sum() | ||
162 | } | ||
163 | } | ||
82 | 164 | ||
83 | match_tokens(&mut res, pattern, &mut src); | 165 | #[derive(Debug, Clone)] |
166 | struct MatchState<'t> { | ||
167 | /// The position of the "dot" in this matcher | ||
168 | dot: OpDelimitedIter<'t>, | ||
84 | 169 | ||
85 | if src.len() > 0 { | 170 | /// Token subtree stack |
86 | res.unmatched_tts += src.len(); | 171 | /// When matching against matchers with nested delimited submatchers (e.g., `pat ( pat ( .. ) |
87 | res.add_err(err!("leftover tokens")); | 172 | /// pat ) pat`), we need to keep track of the matchers we are descending into. This stack does |
88 | } | 173 | /// that where the bottom of the stack is the outermost matcher. |
174 | stack: SmallVec<[OpDelimitedIter<'t>; 4]>, | ||
175 | |||
176 | /// The "parent" matcher position if we are in a repetition. That is, the matcher position just | ||
177 | /// before we enter the repetition. | ||
178 | up: Option<Box<MatchState<'t>>>, | ||
179 | |||
180 | /// The separator if we are in a repetition. | ||
181 | sep: Option<Separator>, | ||
182 | |||
183 | /// The KleeneOp of this sequence if we are in a repetition. | ||
184 | sep_kind: Option<RepeatKind>, | ||
89 | 185 | ||
90 | res | 186 | /// Number of tokens of seperator parsed |
187 | sep_parsed: Option<usize>, | ||
188 | |||
189 | /// Matched meta variables bindings | ||
190 | bindings: SmallVec<[Bindings; 4]>, | ||
191 | |||
192 | /// Cached result of meta variable parsing | ||
193 | meta_result: Option<(TtIter<'t>, ExpandResult<Option<Fragment>>)>, | ||
194 | |||
195 | /// Is error occuried in this state, will `poised` to "parent" | ||
196 | is_error: bool, | ||
91 | } | 197 | } |
92 | 198 | ||
93 | fn match_tokens(res: &mut Match, pattern: &MetaTemplate, src: &mut TtIter) { | 199 | /// Process the matcher positions of `cur_items` until it is empty. In the process, this will |
94 | for op in pattern.iter() { | 200 | /// produce more items in `next_items`, `eof_items`, and `bb_items`. |
95 | match op { | 201 | /// |
96 | Op::Leaf(lhs) => { | 202 | /// For more info about the how this happens, see the module-level doc comments and the inline |
97 | if let Err(err) = match_leaf(lhs, src) { | 203 | /// comments of this function. |
98 | res.add_err(err); | 204 | /// |
99 | continue; | 205 | /// # Parameters |
206 | /// | ||
207 | /// - `src`: the current token of the parser. | ||
208 | /// - `stack`: the "parent" frames of the token tree | ||
209 | /// - `res`: the match result to store errors | ||
210 | /// - `cur_items`: the set of current items to be processed. This should be empty by the end of a | ||
211 | /// successful execution of this function. | ||
212 | /// - `next_items`: the set of newly generated items. These are used to replenish `cur_items` in | ||
213 | /// the function `parse`. | ||
214 | /// - `eof_items`: the set of items that would be valid if this was the EOF. | ||
215 | /// - `bb_items`: the set of items that are waiting for the black-box parser. | ||
216 | /// - `error_items`: the set of items in errors, used for error-resilient parsing | ||
217 | fn match_loop_inner<'t>( | ||
218 | src: TtIter<'t>, | ||
219 | stack: &[TtIter<'t>], | ||
220 | res: &mut Match, | ||
221 | cur_items: &mut SmallVec<[MatchState<'t>; 1]>, | ||
222 | bb_items: &mut SmallVec<[MatchState<'t>; 1]>, | ||
223 | next_items: &mut Vec<MatchState<'t>>, | ||
224 | eof_items: &mut SmallVec<[MatchState<'t>; 1]>, | ||
225 | error_items: &mut SmallVec<[MatchState<'t>; 1]>, | ||
226 | ) { | ||
227 | macro_rules! try_push { | ||
228 | ($items: expr, $it:expr) => { | ||
229 | if $it.is_error { | ||
230 | error_items.push($it); | ||
231 | } else { | ||
232 | $items.push($it); | ||
233 | } | ||
234 | }; | ||
235 | } | ||
236 | |||
237 | while let Some(mut item) = cur_items.pop() { | ||
238 | while item.dot.is_eof() { | ||
239 | match item.stack.pop() { | ||
240 | Some(frame) => { | ||
241 | item.dot = frame; | ||
242 | item.dot.next(); | ||
100 | } | 243 | } |
244 | None => break, | ||
101 | } | 245 | } |
102 | Op::Subtree { tokens, delimiter: delim } => { | 246 | } |
103 | let rhs = match src.expect_subtree() { | 247 | let op = match item.dot.peek() { |
104 | Ok(s) => s, | 248 | None => { |
105 | Err(()) => { | 249 | // We are at or past the end of the matcher of `item`. |
106 | res.add_err(err!("expected subtree")); | 250 | if item.up.is_some() { |
107 | continue; | 251 | if item.sep_parsed.is_none() { |
252 | // Get the `up` matcher | ||
253 | let mut new_pos = *item.up.clone().unwrap(); | ||
254 | // Add matches from this repetition to the `matches` of `up` | ||
255 | if let Some(bindings) = new_pos.bindings.last_mut() { | ||
256 | for (i, b) in item.bindings.iter_mut().enumerate() { | ||
257 | bindings.push_nested(i, b.clone()).unwrap(); | ||
258 | } | ||
259 | } | ||
260 | // Move the "dot" past the repetition in `up` | ||
261 | new_pos.dot.next(); | ||
262 | new_pos.is_error = new_pos.is_error || item.is_error; | ||
263 | cur_items.push(new_pos); | ||
264 | } | ||
265 | |||
266 | // Check if we need a separator. | ||
267 | // We check the separator one by one | ||
268 | let sep_idx = *item.sep_parsed.as_ref().unwrap_or(&0); | ||
269 | let sep_len = item.sep.as_ref().map_or(0, Separator::tt_count); | ||
270 | if item.sep.is_some() && sep_idx != sep_len { | ||
271 | let sep = item.sep.as_ref().unwrap(); | ||
272 | if src.clone().expect_separator(&sep, sep_idx) { | ||
273 | item.dot.next(); | ||
274 | item.sep_parsed = Some(sep_idx + 1); | ||
275 | try_push!(next_items, item); | ||
276 | } | ||
277 | } | ||
278 | // We don't need a separator. Move the "dot" back to the beginning of the matcher | ||
279 | // and try to match again UNLESS we are only allowed to have _one_ repetition. | ||
280 | else if item.sep_kind != Some(RepeatKind::ZeroOrOne) { | ||
281 | item.dot = item.dot.reset(); | ||
282 | item.sep_parsed = None; | ||
283 | item.bindings.push(Bindings::default()); | ||
284 | cur_items.push(item); | ||
285 | } | ||
286 | } else { | ||
287 | // If we are not in a repetition, then being at the end of a matcher means that we have | ||
288 | // reached the potential end of the input. | ||
289 | try_push!(eof_items, item); | ||
290 | } | ||
291 | continue; | ||
292 | } | ||
293 | Some(it) => it, | ||
294 | }; | ||
295 | |||
296 | // We are in the middle of a matcher. | ||
297 | match op { | ||
298 | OpDelimited::Op(Op::Repeat { tokens, kind, separator }) => { | ||
299 | if matches!(kind, RepeatKind::ZeroOrMore | RepeatKind::ZeroOrOne) { | ||
300 | let mut new_item = item.clone(); | ||
301 | new_item.dot.next(); | ||
302 | let mut vars = Vec::new(); | ||
303 | let bindings = new_item.bindings.last_mut().unwrap(); | ||
304 | collect_vars(&mut vars, tokens); | ||
305 | for var in vars { | ||
306 | bindings.push_empty(&var); | ||
108 | } | 307 | } |
109 | }; | 308 | cur_items.push(new_item); |
110 | if delim.map(|it| it.kind) != rhs.delimiter_kind() { | ||
111 | res.add_err(err!("mismatched delimiter")); | ||
112 | continue; | ||
113 | } | 309 | } |
114 | let mut src = TtIter::new(rhs); | 310 | cur_items.push(MatchState { |
115 | match_tokens(res, tokens, &mut src); | 311 | dot: tokens.iter_delimited(None), |
116 | if src.len() > 0 { | 312 | stack: Default::default(), |
117 | res.add_err(err!("leftover tokens")); | 313 | up: Some(Box::new(item)), |
314 | sep: separator.clone(), | ||
315 | sep_kind: Some(*kind), | ||
316 | sep_parsed: None, | ||
317 | bindings: smallvec![Bindings::default()], | ||
318 | meta_result: None, | ||
319 | is_error: false, | ||
320 | }) | ||
321 | } | ||
322 | OpDelimited::Op(Op::Subtree { tokens, delimiter }) => { | ||
323 | if let Ok(subtree) = src.clone().expect_subtree() { | ||
324 | if subtree.delimiter_kind() == delimiter.map(|it| it.kind) { | ||
325 | item.stack.push(item.dot); | ||
326 | item.dot = tokens.iter_delimited(delimiter.as_ref()); | ||
327 | cur_items.push(item); | ||
328 | } | ||
118 | } | 329 | } |
119 | } | 330 | } |
120 | Op::Var { name, kind, .. } => { | 331 | OpDelimited::Op(Op::Var { kind, name, .. }) => { |
121 | let kind = match kind { | 332 | if let Some(kind) = kind { |
122 | Some(k) => k, | 333 | let mut fork = src.clone(); |
123 | None => { | 334 | let match_res = match_meta_var(kind.as_str(), &mut fork); |
124 | res.add_err(ExpandError::UnexpectedToken); | 335 | match match_res.err { |
125 | continue; | 336 | None => { |
337 | // Some meta variables are optional (e.g. vis) | ||
338 | if match_res.value.is_some() { | ||
339 | item.meta_result = Some((fork, match_res)); | ||
340 | try_push!(bb_items, item); | ||
341 | } else { | ||
342 | item.bindings.last_mut().unwrap().push_optional(name); | ||
343 | item.dot.next(); | ||
344 | cur_items.push(item); | ||
345 | } | ||
346 | } | ||
347 | Some(err) => { | ||
348 | res.add_err(err); | ||
349 | match match_res.value { | ||
350 | Some(fragment) => { | ||
351 | item.bindings | ||
352 | .last_mut() | ||
353 | .unwrap() | ||
354 | .inner | ||
355 | .push((name.clone(), Binding::Fragment(fragment))); | ||
356 | } | ||
357 | _ => {} | ||
358 | } | ||
359 | item.is_error = true; | ||
360 | error_items.push(item); | ||
361 | } | ||
126 | } | 362 | } |
127 | }; | 363 | } |
128 | let ExpandResult { value: matched, err: match_err } = | 364 | } |
129 | match_meta_var(kind.as_str(), src); | 365 | OpDelimited::Op(Op::Leaf(leaf)) => { |
130 | match matched { | 366 | if let Err(err) = match_leaf(&leaf, &mut src.clone()) { |
367 | res.add_err(err); | ||
368 | item.is_error = true; | ||
369 | } else { | ||
370 | item.dot.next(); | ||
371 | } | ||
372 | try_push!(next_items, item); | ||
373 | } | ||
374 | OpDelimited::Open => { | ||
375 | if matches!(src.clone().next(), Some(tt::TokenTree::Subtree(..))) { | ||
376 | item.dot.next(); | ||
377 | try_push!(next_items, item); | ||
378 | } | ||
379 | } | ||
380 | OpDelimited::Close => { | ||
381 | let is_delim_closed = src.peek_n(0).is_none() && !stack.is_empty(); | ||
382 | if is_delim_closed { | ||
383 | item.dot.next(); | ||
384 | try_push!(next_items, item); | ||
385 | } | ||
386 | } | ||
387 | } | ||
388 | } | ||
389 | } | ||
390 | |||
391 | fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match { | ||
392 | let mut src = TtIter::new(src); | ||
393 | let mut stack: SmallVec<[TtIter; 1]> = SmallVec::new(); | ||
394 | let mut res = Match::default(); | ||
395 | let mut error_reover_item = None; | ||
396 | |||
397 | let mut cur_items = smallvec![MatchState { | ||
398 | dot: pattern.iter_delimited(None), | ||
399 | stack: Default::default(), | ||
400 | up: None, | ||
401 | sep: None, | ||
402 | sep_kind: None, | ||
403 | sep_parsed: None, | ||
404 | bindings: smallvec![Bindings::default()], | ||
405 | is_error: false, | ||
406 | meta_result: None, | ||
407 | }]; | ||
408 | |||
409 | let mut next_items = vec![]; | ||
410 | |||
411 | loop { | ||
412 | let mut bb_items = SmallVec::new(); | ||
413 | let mut eof_items = SmallVec::new(); | ||
414 | let mut error_items = SmallVec::new(); | ||
415 | |||
416 | stdx::always!(next_items.is_empty()); | ||
417 | |||
418 | match_loop_inner( | ||
419 | src.clone(), | ||
420 | &stack, | ||
421 | &mut res, | ||
422 | &mut cur_items, | ||
423 | &mut bb_items, | ||
424 | &mut next_items, | ||
425 | &mut eof_items, | ||
426 | &mut error_items, | ||
427 | ); | ||
428 | stdx::always!(cur_items.is_empty()); | ||
429 | |||
430 | if error_items.len() > 0 { | ||
431 | error_reover_item = error_items.pop(); | ||
432 | } else if eof_items.len() > 0 { | ||
433 | error_reover_item = Some(eof_items[0].clone()); | ||
434 | } | ||
435 | |||
436 | // We need to do some post processing after the `match_loop_inner`. | ||
437 | // If we reached the EOF, check that there is EXACTLY ONE possible matcher. Otherwise, | ||
438 | // either the parse is ambiguous (which should never happen) or there is a syntax error. | ||
439 | if src.peek_n(0).is_none() && stack.is_empty() { | ||
440 | if eof_items.len() == 1 { | ||
441 | // remove all errors, because it is the correct answer ! | ||
442 | res = Match::default(); | ||
443 | res.bindings = eof_items[0].bindings[0].clone(); | ||
444 | } else { | ||
445 | // Error recovery | ||
446 | if error_reover_item.is_some() { | ||
447 | res.bindings = error_reover_item.unwrap().bindings[0].clone(); | ||
448 | } | ||
449 | res.add_err(ExpandError::UnexpectedToken); | ||
450 | } | ||
451 | return res; | ||
452 | } | ||
453 | |||
454 | // If there are no possible next positions AND we aren't waiting for the black-box parser, | ||
455 | // then there is a syntax error. | ||
456 | // | ||
457 | // Another possibility is that we need to call out to parse some rust nonterminal | ||
458 | // (black-box) parser. However, if there is not EXACTLY ONE of these, something is wrong. | ||
459 | if (bb_items.is_empty() && next_items.is_empty()) | ||
460 | || (!bb_items.is_empty() && !next_items.is_empty()) | ||
461 | || bb_items.len() > 1 | ||
462 | { | ||
463 | res.unmatched_tts += src.len(); | ||
464 | while let Some(it) = stack.pop() { | ||
465 | src = it; | ||
466 | res.unmatched_tts += src.len(); | ||
467 | } | ||
468 | res.add_err(err!("leftover tokens")); | ||
469 | |||
470 | if let Some(mut error_reover_item) = error_reover_item { | ||
471 | res.bindings = error_reover_item.bindings.remove(0); | ||
472 | } | ||
473 | return res; | ||
474 | } | ||
475 | // Dump all possible `next_items` into `cur_items` for the next iteration. | ||
476 | else if !next_items.is_empty() { | ||
477 | // Now process the next token | ||
478 | cur_items.extend(next_items.drain(..)); | ||
479 | |||
480 | match src.next() { | ||
481 | Some(tt::TokenTree::Subtree(subtree)) => { | ||
482 | stack.push(src.clone()); | ||
483 | src = TtIter::new(subtree); | ||
484 | } | ||
485 | None if !stack.is_empty() => src = stack.pop().unwrap(), | ||
486 | _ => (), | ||
487 | } | ||
488 | } | ||
489 | // Finally, we have the case where we need to call the black-box parser to get some | ||
490 | // nonterminal. | ||
491 | else { | ||
492 | stdx::always!(bb_items.len() == 1); | ||
493 | let mut item = bb_items.pop().unwrap(); | ||
494 | |||
495 | if let Some(OpDelimited::Op(Op::Var { name, .. })) = item.dot.peek() { | ||
496 | let (iter, match_res) = item.meta_result.take().unwrap(); | ||
497 | let bindings = item.bindings.last_mut().unwrap(); | ||
498 | match match_res.value { | ||
131 | Some(fragment) => { | 499 | Some(fragment) => { |
132 | res.bindings.inner.insert(name.clone(), Binding::Fragment(fragment)); | 500 | bindings.inner.push((name.clone(), Binding::Fragment(fragment))); |
133 | } | 501 | } |
134 | None if match_err.is_none() => res.bindings.push_optional(name), | 502 | None if match_res.err.is_none() => bindings.push_optional(name), |
135 | _ => {} | 503 | _ => {} |
136 | } | 504 | } |
137 | if let Some(err) = match_err { | 505 | if let Some(err) = match_res.err { |
138 | res.add_err(err); | 506 | res.add_err(err); |
139 | } | 507 | } |
508 | src = iter.clone(); | ||
509 | item.dot.next(); | ||
510 | } else { | ||
511 | unreachable!() | ||
140 | } | 512 | } |
141 | Op::Repeat { tokens: subtree, kind, separator } => { | 513 | cur_items.push(item); |
142 | match_repeat(res, subtree, *kind, separator, src); | ||
143 | } | ||
144 | } | 514 | } |
515 | stdx::always!(!cur_items.is_empty()); | ||
145 | } | 516 | } |
146 | } | 517 | } |
147 | 518 | ||
@@ -173,73 +544,6 @@ fn match_leaf(lhs: &tt::Leaf, src: &mut TtIter) -> Result<(), ExpandError> { | |||
173 | Ok(()) | 544 | Ok(()) |
174 | } | 545 | } |
175 | 546 | ||
176 | fn match_repeat( | ||
177 | res: &mut Match, | ||
178 | pattern: &MetaTemplate, | ||
179 | kind: RepeatKind, | ||
180 | separator: &Option<Separator>, | ||
181 | src: &mut TtIter, | ||
182 | ) { | ||
183 | // Dirty hack to make macro-expansion terminate. | ||
184 | // This should be replaced by a proper macro-by-example implementation | ||
185 | let mut limit = 65536; | ||
186 | let mut counter = 0; | ||
187 | |||
188 | for i in 0.. { | ||
189 | let mut fork = src.clone(); | ||
190 | |||
191 | if let Some(separator) = &separator { | ||
192 | if i != 0 && !fork.eat_separator(separator) { | ||
193 | break; | ||
194 | } | ||
195 | } | ||
196 | |||
197 | let mut nested = Match::default(); | ||
198 | match_tokens(&mut nested, pattern, &mut fork); | ||
199 | if nested.err.is_none() { | ||
200 | limit -= 1; | ||
201 | if limit == 0 { | ||
202 | log::warn!( | ||
203 | "match_lhs exceeded repeat pattern limit => {:#?}\n{:#?}\n{:#?}\n{:#?}", | ||
204 | pattern, | ||
205 | src, | ||
206 | kind, | ||
207 | separator | ||
208 | ); | ||
209 | break; | ||
210 | } | ||
211 | *src = fork; | ||
212 | |||
213 | if let Err(err) = res.bindings.push_nested(counter, nested.bindings) { | ||
214 | res.add_err(err); | ||
215 | } | ||
216 | counter += 1; | ||
217 | if counter == 1 { | ||
218 | if let RepeatKind::ZeroOrOne = kind { | ||
219 | break; | ||
220 | } | ||
221 | } | ||
222 | } else { | ||
223 | break; | ||
224 | } | ||
225 | } | ||
226 | |||
227 | match (kind, counter) { | ||
228 | (RepeatKind::OneOrMore, 0) => { | ||
229 | res.add_err(ExpandError::UnexpectedToken); | ||
230 | } | ||
231 | (_, 0) => { | ||
232 | // Collect all empty variables in subtrees | ||
233 | let mut vars = Vec::new(); | ||
234 | collect_vars(&mut vars, pattern); | ||
235 | for var in vars { | ||
236 | res.bindings.push_empty(&var) | ||
237 | } | ||
238 | } | ||
239 | _ => (), | ||
240 | } | ||
241 | } | ||
242 | |||
243 | fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult<Option<Fragment>> { | 547 | fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult<Option<Fragment>> { |
244 | let fragment = match kind { | 548 | let fragment = match kind { |
245 | "path" => Path, | 549 | "path" => Path, |
@@ -303,14 +607,14 @@ fn collect_vars(buf: &mut Vec<SmolStr>, pattern: &MetaTemplate) { | |||
303 | } | 607 | } |
304 | 608 | ||
305 | impl<'a> TtIter<'a> { | 609 | impl<'a> TtIter<'a> { |
306 | fn eat_separator(&mut self, separator: &Separator) -> bool { | 610 | fn expect_separator(&mut self, separator: &Separator, idx: usize) -> bool { |
307 | let mut fork = self.clone(); | 611 | let mut fork = self.clone(); |
308 | let ok = match separator { | 612 | let ok = match separator { |
309 | Separator::Ident(lhs) => match fork.expect_ident() { | 613 | Separator::Ident(lhs) if idx == 0 => match fork.expect_ident() { |
310 | Ok(rhs) => rhs.text == lhs.text, | 614 | Ok(rhs) => rhs.text == lhs.text, |
311 | _ => false, | 615 | _ => false, |
312 | }, | 616 | }, |
313 | Separator::Literal(lhs) => match fork.expect_literal() { | 617 | Separator::Literal(lhs) if idx == 0 => match fork.expect_literal() { |
314 | Ok(rhs) => match rhs { | 618 | Ok(rhs) => match rhs { |
315 | tt::Leaf::Literal(rhs) => rhs.text == lhs.text, | 619 | tt::Leaf::Literal(rhs) => rhs.text == lhs.text, |
316 | tt::Leaf::Ident(rhs) => rhs.text == lhs.text, | 620 | tt::Leaf::Ident(rhs) => rhs.text == lhs.text, |
@@ -318,10 +622,11 @@ impl<'a> TtIter<'a> { | |||
318 | }, | 622 | }, |
319 | _ => false, | 623 | _ => false, |
320 | }, | 624 | }, |
321 | Separator::Puncts(lhss) => lhss.iter().all(|lhs| match fork.expect_punct() { | 625 | Separator::Puncts(lhss) if idx < lhss.len() => match fork.expect_punct() { |
322 | Ok(rhs) => rhs.char == lhs.char, | 626 | Ok(rhs) => rhs.char == lhss[idx].char, |
323 | _ => false, | 627 | _ => false, |
324 | }), | 628 | }, |
629 | _ => false, | ||
325 | }; | 630 | }; |
326 | if ok { | 631 | if ok { |
327 | *self = fork; | 632 | *self = fork; |
diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index 78368a33e..ad9953a7d 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs | |||
@@ -13,13 +13,17 @@ use crate::{ | |||
13 | 13 | ||
14 | impl Bindings { | 14 | impl Bindings { |
15 | fn contains(&self, name: &str) -> bool { | 15 | fn contains(&self, name: &str) -> bool { |
16 | self.inner.contains_key(name) | 16 | self.inner.iter().any(|(n, _)| n == name) |
17 | } | 17 | } |
18 | 18 | ||
19 | fn get(&self, name: &str, nesting: &mut [NestingState]) -> Result<&Fragment, ExpandError> { | 19 | fn get(&self, name: &str, nesting: &mut [NestingState]) -> Result<&Fragment, ExpandError> { |
20 | let mut b = self.inner.get(name).ok_or_else(|| { | 20 | let mut b: &Binding = self |
21 | ExpandError::BindingError(format!("could not find binding `{}`", name)) | 21 | .inner |
22 | })?; | 22 | .iter() |
23 | .find_map(|(n, b)| if n == name { Some(b) } else { None }) | ||
24 | .ok_or_else(|| { | ||
25 | ExpandError::BindingError(format!("could not find binding `{}`", name)) | ||
26 | })?; | ||
23 | for nesting_state in nesting.iter_mut() { | 27 | for nesting_state in nesting.iter_mut() { |
24 | nesting_state.hit = true; | 28 | nesting_state.hit = true; |
25 | b = match b { | 29 | b = match b { |
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 4c298f85f..f3d2da55a 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs | |||
@@ -21,7 +21,7 @@ use test_utils::mark; | |||
21 | pub use tt::{Delimiter, DelimiterKind, Punct}; | 21 | pub use tt::{Delimiter, DelimiterKind, Punct}; |
22 | 22 | ||
23 | use crate::{ | 23 | use crate::{ |
24 | parser::{parse_pattern, parse_template, Op}, | 24 | parser::{parse_pattern, parse_template, MetaTemplate, Op}, |
25 | tt_iter::TtIter, | 25 | tt_iter::TtIter, |
26 | }; | 26 | }; |
27 | 27 | ||
@@ -94,15 +94,6 @@ struct Rule { | |||
94 | rhs: MetaTemplate, | 94 | rhs: MetaTemplate, |
95 | } | 95 | } |
96 | 96 | ||
97 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
98 | struct MetaTemplate(Vec<Op>); | ||
99 | |||
100 | impl<'a> MetaTemplate { | ||
101 | fn iter(&self) -> impl Iterator<Item = &Op> { | ||
102 | self.0.iter() | ||
103 | } | ||
104 | } | ||
105 | |||
106 | #[derive(Clone, Copy, Debug, PartialEq, Eq)] | 97 | #[derive(Clone, Copy, Debug, PartialEq, Eq)] |
107 | struct Shift(u32); | 98 | struct Shift(u32); |
108 | 99 | ||
diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs index f891ec29c..8671322e1 100644 --- a/crates/mbe/src/parser.rs +++ b/crates/mbe/src/parser.rs | |||
@@ -5,7 +5,75 @@ use smallvec::SmallVec; | |||
5 | use syntax::SmolStr; | 5 | use syntax::SmolStr; |
6 | use tt::Delimiter; | 6 | use tt::Delimiter; |
7 | 7 | ||
8 | use crate::{tt_iter::TtIter, MetaTemplate, ParseError}; | 8 | use crate::{tt_iter::TtIter, ParseError}; |
9 | |||
10 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
11 | pub(crate) struct MetaTemplate(pub(crate) Vec<Op>); | ||
12 | |||
13 | #[derive(Debug, Clone, Copy)] | ||
14 | pub(crate) enum OpDelimited<'a> { | ||
15 | Op(&'a Op), | ||
16 | Open, | ||
17 | Close, | ||
18 | } | ||
19 | |||
20 | #[derive(Debug, Clone, Copy)] | ||
21 | pub(crate) struct OpDelimitedIter<'a> { | ||
22 | inner: &'a Vec<Op>, | ||
23 | delimited: Option<&'a Delimiter>, | ||
24 | idx: usize, | ||
25 | } | ||
26 | |||
27 | impl<'a> OpDelimitedIter<'a> { | ||
28 | pub(crate) fn is_eof(&self) -> bool { | ||
29 | let len = self.inner.len() + if self.delimited.is_some() { 2 } else { 0 }; | ||
30 | self.idx >= len | ||
31 | } | ||
32 | |||
33 | pub(crate) fn peek(&self) -> Option<OpDelimited<'a>> { | ||
34 | match self.delimited { | ||
35 | None => self.inner.get(self.idx).map(OpDelimited::Op), | ||
36 | Some(_) => match self.idx { | ||
37 | 0 => Some(OpDelimited::Open), | ||
38 | i if i == self.inner.len() + 1 => Some(OpDelimited::Close), | ||
39 | i => self.inner.get(i - 1).map(OpDelimited::Op), | ||
40 | }, | ||
41 | } | ||
42 | } | ||
43 | |||
44 | pub(crate) fn reset(&self) -> Self { | ||
45 | Self { inner: &self.inner, idx: 0, delimited: self.delimited } | ||
46 | } | ||
47 | } | ||
48 | |||
49 | impl<'a> Iterator for OpDelimitedIter<'a> { | ||
50 | type Item = OpDelimited<'a>; | ||
51 | |||
52 | fn next(&mut self) -> Option<Self::Item> { | ||
53 | let res = self.peek(); | ||
54 | self.idx += 1; | ||
55 | res | ||
56 | } | ||
57 | |||
58 | fn size_hint(&self) -> (usize, Option<usize>) { | ||
59 | let len = self.inner.len() + if self.delimited.is_some() { 2 } else { 0 }; | ||
60 | let remain = len.checked_sub(self.idx).unwrap_or(0); | ||
61 | (remain, Some(remain)) | ||
62 | } | ||
63 | } | ||
64 | |||
65 | impl<'a> MetaTemplate { | ||
66 | pub(crate) fn iter(&self) -> impl Iterator<Item = &Op> { | ||
67 | self.0.iter() | ||
68 | } | ||
69 | |||
70 | pub(crate) fn iter_delimited( | ||
71 | &'a self, | ||
72 | delimited: Option<&'a Delimiter>, | ||
73 | ) -> OpDelimitedIter<'a> { | ||
74 | OpDelimitedIter { inner: &self.0, idx: 0, delimited } | ||
75 | } | ||
76 | } | ||
9 | 77 | ||
10 | #[derive(Clone, Debug, PartialEq, Eq)] | 78 | #[derive(Clone, Debug, PartialEq, Eq)] |
11 | pub(crate) enum Op { | 79 | pub(crate) enum Op { |
@@ -47,6 +115,16 @@ impl PartialEq for Separator { | |||
47 | } | 115 | } |
48 | } | 116 | } |
49 | 117 | ||
118 | impl Separator { | ||
119 | pub(crate) fn tt_count(&self) -> usize { | ||
120 | match self { | ||
121 | Separator::Literal(_) => 1, | ||
122 | Separator::Ident(_) => 1, | ||
123 | Separator::Puncts(it) => it.len(), | ||
124 | } | ||
125 | } | ||
126 | } | ||
127 | |||
50 | pub(crate) fn parse_template(template: &tt::Subtree) -> Result<Vec<Op>, ParseError> { | 128 | pub(crate) fn parse_template(template: &tt::Subtree) -> Result<Vec<Op>, ParseError> { |
51 | parse_inner(&template, Mode::Template).into_iter().collect() | 129 | parse_inner(&template, Mode::Template).into_iter().collect() |
52 | } | 130 | } |
diff --git a/crates/mbe/src/tests.rs b/crates/mbe/src/tests.rs index f1eadcd1e..5c641ebf2 100644 --- a/crates/mbe/src/tests.rs +++ b/crates/mbe/src/tests.rs | |||
@@ -457,6 +457,17 @@ fn test_match_group_with_multichar_sep() { | |||
457 | } | 457 | } |
458 | 458 | ||
459 | #[test] | 459 | #[test] |
460 | fn test_match_group_with_multichar_sep2() { | ||
461 | parse_macro( | ||
462 | r#" | ||
463 | macro_rules! foo { | ||
464 | (fn $name:ident {$($i:literal)&&*} ) => ( fn $name() -> bool { $($i)&&*} ); | ||
465 | }"#, | ||
466 | ) | ||
467 | .assert_expand_items("foo! (fn baz {true && true} );", "fn baz () -> bool {true &&true}"); | ||
468 | } | ||
469 | |||
470 | #[test] | ||
460 | fn test_match_group_zero_match() { | 471 | fn test_match_group_zero_match() { |
461 | parse_macro( | 472 | parse_macro( |
462 | r#" | 473 | r#" |
@@ -1267,6 +1278,18 @@ macro_rules! m { | |||
1267 | .is_some()); | 1278 | .is_some()); |
1268 | } | 1279 | } |
1269 | 1280 | ||
1281 | #[test] | ||
1282 | fn test_match_is_not_greedy() { | ||
1283 | parse_macro( | ||
1284 | r#" | ||
1285 | macro_rules! foo { | ||
1286 | ($($i:ident $(,)*),*) => {}; | ||
1287 | } | ||
1288 | "#, | ||
1289 | ) | ||
1290 | .assert_expand_items(r#"foo!(a,b);"#, r#""#); | ||
1291 | } | ||
1292 | |||
1270 | // The following tests are based on real world situations | 1293 | // The following tests are based on real world situations |
1271 | #[test] | 1294 | #[test] |
1272 | fn test_vec() { | 1295 | fn test_vec() { |
diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index 70cb7fbab..c1ca88df6 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs | |||
@@ -474,7 +474,7 @@ pub(crate) fn folding_range( | |||
474 | let kind = match fold.kind { | 474 | let kind = match fold.kind { |
475 | FoldKind::Comment => Some(lsp_types::FoldingRangeKind::Comment), | 475 | FoldKind::Comment => Some(lsp_types::FoldingRangeKind::Comment), |
476 | FoldKind::Imports => Some(lsp_types::FoldingRangeKind::Imports), | 476 | FoldKind::Imports => Some(lsp_types::FoldingRangeKind::Imports), |
477 | FoldKind::Mods | FoldKind::Block | FoldKind::ArgList => None, | 477 | FoldKind::Mods | FoldKind::Block | FoldKind::ArgList | FoldKind::Region => None, |
478 | }; | 478 | }; |
479 | 479 | ||
480 | let range = range(line_index, fold.range); | 480 | let range = range(line_index, fold.range); |