diff options
author | Aleksey Kladov <[email protected]> | 2019-09-10 20:12:37 +0100 |
---|---|---|
committer | Aleksey Kladov <[email protected]> | 2019-09-10 20:22:57 +0100 |
commit | 9eb14e11706ebf9a60b0afbe9bc82a99a816bc02 (patch) | |
tree | 8b4a9d1ef764d82ccfdafdb2231a7813be09b01d | |
parent | b9d9db83d1ac85e83e018333f7aa4c5c5616e82b (diff) |
cleanup expansion to item list
-rw-r--r-- | crates/ra_hir/src/ids.rs | 2 | ||||
-rw-r--r-- | crates/ra_hir/src/nameres/raw.rs | 4 | ||||
-rw-r--r-- | crates/ra_mbe/src/lib.rs | 4 | ||||
-rw-r--r-- | crates/ra_mbe/src/syntax_bridge.rs | 27 | ||||
-rw-r--r-- | crates/ra_mbe/src/tests.rs | 8 |
5 files changed, 11 insertions, 34 deletions
diff --git a/crates/ra_hir/src/ids.rs b/crates/ra_hir/src/ids.rs index e0d0d4209..9ea4e695d 100644 --- a/crates/ra_hir/src/ids.rs +++ b/crates/ra_hir/src/ids.rs | |||
@@ -90,7 +90,7 @@ impl HirFileId { | |||
90 | }) | 90 | }) |
91 | .ok()?; | 91 | .ok()?; |
92 | match macro_file.macro_file_kind { | 92 | match macro_file.macro_file_kind { |
93 | MacroFileKind::Items => Some(Parse::to_syntax(mbe::token_tree_to_ast_item_list(&tt))), | 93 | MacroFileKind::Items => mbe::token_tree_to_items(&tt).ok().map(Parse::to_syntax), |
94 | MacroFileKind::Expr => mbe::token_tree_to_expr(&tt).ok().map(Parse::to_syntax), | 94 | MacroFileKind::Expr => mbe::token_tree_to_expr(&tt).ok().map(Parse::to_syntax), |
95 | } | 95 | } |
96 | } | 96 | } |
diff --git a/crates/ra_hir/src/nameres/raw.rs b/crates/ra_hir/src/nameres/raw.rs index c646d3d00..04b97cb90 100644 --- a/crates/ra_hir/src/nameres/raw.rs +++ b/crates/ra_hir/src/nameres/raw.rs | |||
@@ -76,8 +76,10 @@ impl RawItems { | |||
76 | source_map: ImportSourceMap::default(), | 76 | source_map: ImportSourceMap::default(), |
77 | }; | 77 | }; |
78 | if let Some(node) = db.parse_or_expand(file_id) { | 78 | if let Some(node) = db.parse_or_expand(file_id) { |
79 | if let Some(source_file) = ast::SourceFile::cast(node) { | 79 | if let Some(source_file) = ast::SourceFile::cast(node.clone()) { |
80 | collector.process_module(None, source_file); | 80 | collector.process_module(None, source_file); |
81 | } else if let Some(item_list) = ast::MacroItems::cast(node) { | ||
82 | collector.process_module(None, item_list); | ||
81 | } | 83 | } |
82 | } | 84 | } |
83 | (Arc::new(collector.raw_items), Arc::new(collector.source_map)) | 85 | (Arc::new(collector.raw_items), Arc::new(collector.source_map)) |
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs index 52c3d03b5..f07f000ff 100644 --- a/crates/ra_mbe/src/lib.rs +++ b/crates/ra_mbe/src/lib.rs | |||
@@ -41,8 +41,8 @@ pub enum ExpandError { | |||
41 | } | 41 | } |
42 | 42 | ||
43 | pub use crate::syntax_bridge::{ | 43 | pub use crate::syntax_bridge::{ |
44 | ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_ast_item_list, token_tree_to_expr, | 44 | ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_expr, token_tree_to_items, |
45 | token_tree_to_macro_items, token_tree_to_macro_stmts, token_tree_to_pat, token_tree_to_ty, | 45 | token_tree_to_macro_stmts, token_tree_to_pat, token_tree_to_ty, |
46 | }; | 46 | }; |
47 | 47 | ||
48 | /// This struct contains AST for a single `macro_rules` definition. What might | 48 | /// This struct contains AST for a single `macro_rules` definition. What might |
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 26524adf9..2d035307b 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -46,25 +46,6 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, Toke | |||
46 | // * TraitItems(SmallVec<[ast::TraitItem; 1]>) | 46 | // * TraitItems(SmallVec<[ast::TraitItem; 1]>) |
47 | // * ImplItems(SmallVec<[ast::ImplItem; 1]>) | 47 | // * ImplItems(SmallVec<[ast::ImplItem; 1]>) |
48 | // * ForeignItems(SmallVec<[ast::ForeignItem; 1]> | 48 | // * ForeignItems(SmallVec<[ast::ForeignItem; 1]> |
49 | // | ||
50 | // | ||
51 | |||
52 | fn token_tree_to_syntax_node<F>(tt: &tt::Subtree, f: F) -> Result<Parse<SyntaxNode>, ExpandError> | ||
53 | where | ||
54 | F: Fn(&mut dyn ra_parser::TokenSource, &mut dyn ra_parser::TreeSink), | ||
55 | { | ||
56 | let tokens = [tt.clone().into()]; | ||
57 | let buffer = TokenBuffer::new(&tokens); | ||
58 | let mut token_source = SubtreeTokenSource::new(&buffer); | ||
59 | let mut tree_sink = TtTreeSink::new(buffer.begin()); | ||
60 | f(&mut token_source, &mut tree_sink); | ||
61 | if tree_sink.roots.len() != 1 { | ||
62 | return Err(ExpandError::ConversionError); | ||
63 | } | ||
64 | //FIXME: would be cool to report errors | ||
65 | let parse = tree_sink.inner.finish(); | ||
66 | Ok(parse) | ||
67 | } | ||
68 | 49 | ||
69 | fn fragment_to_syntax_node( | 50 | fn fragment_to_syntax_node( |
70 | tt: &tt::Subtree, | 51 | tt: &tt::Subtree, |
@@ -115,17 +96,11 @@ pub fn token_tree_to_macro_stmts(tt: &tt::Subtree) -> Result<Parse<ast::MacroStm | |||
115 | } | 96 | } |
116 | 97 | ||
117 | /// Parses the token tree (result of macro expansion) as a sequence of items | 98 | /// Parses the token tree (result of macro expansion) as a sequence of items |
118 | pub fn token_tree_to_macro_items(tt: &tt::Subtree) -> Result<Parse<ast::MacroItems>, ExpandError> { | 99 | pub fn token_tree_to_items(tt: &tt::Subtree) -> Result<Parse<ast::MacroItems>, ExpandError> { |
119 | let parse = fragment_to_syntax_node(tt, Items)?; | 100 | let parse = fragment_to_syntax_node(tt, Items)?; |
120 | parse.cast().ok_or_else(|| crate::ExpandError::ConversionError) | 101 | parse.cast().ok_or_else(|| crate::ExpandError::ConversionError) |
121 | } | 102 | } |
122 | 103 | ||
123 | /// Parses the token tree (result of macro expansion) as a sequence of items | ||
124 | pub fn token_tree_to_ast_item_list(tt: &tt::Subtree) -> Parse<ast::SourceFile> { | ||
125 | let parse = token_tree_to_syntax_node(tt, ra_parser::parse).unwrap(); | ||
126 | parse.cast().unwrap() | ||
127 | } | ||
128 | |||
129 | impl TokenMap { | 104 | impl TokenMap { |
130 | pub fn relative_range_of(&self, tt: tt::TokenId) -> Option<TextRange> { | 105 | pub fn relative_range_of(&self, tt: tt::TokenId) -> Option<TextRange> { |
131 | let idx = tt.0 as usize; | 106 | let idx = tt.0 as usize; |
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs index 0f07e935d..312fa4626 100644 --- a/crates/ra_mbe/src/tests.rs +++ b/crates/ra_mbe/src/tests.rs | |||
@@ -70,7 +70,7 @@ pub(crate) fn expand(rules: &MacroRules, invocation: &str) -> tt::Subtree { | |||
70 | 70 | ||
71 | pub(crate) fn expand_to_items(rules: &MacroRules, invocation: &str) -> ast::MacroItems { | 71 | pub(crate) fn expand_to_items(rules: &MacroRules, invocation: &str) -> ast::MacroItems { |
72 | let expanded = expand(rules, invocation); | 72 | let expanded = expand(rules, invocation); |
73 | token_tree_to_macro_items(&expanded).unwrap().tree() | 73 | token_tree_to_items(&expanded).unwrap().tree() |
74 | } | 74 | } |
75 | 75 | ||
76 | #[allow(unused)] | 76 | #[allow(unused)] |
@@ -155,8 +155,8 @@ pub(crate) fn assert_expansion( | |||
155 | let expected = text_to_tokentree(&expected); | 155 | let expected = text_to_tokentree(&expected); |
156 | let (expanded_tree, expected_tree) = match kind { | 156 | let (expanded_tree, expected_tree) = match kind { |
157 | MacroKind::Items => { | 157 | MacroKind::Items => { |
158 | let expanded_tree = token_tree_to_macro_items(&expanded).unwrap().tree(); | 158 | let expanded_tree = token_tree_to_items(&expanded).unwrap().tree(); |
159 | let expected_tree = token_tree_to_macro_items(&expected).unwrap().tree(); | 159 | let expected_tree = token_tree_to_items(&expected).unwrap().tree(); |
160 | 160 | ||
161 | ( | 161 | ( |
162 | debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(), | 162 | debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(), |
@@ -410,7 +410,7 @@ fn test_expand_to_item_list() { | |||
410 | ", | 410 | ", |
411 | ); | 411 | ); |
412 | let expansion = expand(&rules, "structs!(Foo, Bar);"); | 412 | let expansion = expand(&rules, "structs!(Foo, Bar);"); |
413 | let tree = token_tree_to_macro_items(&expansion).unwrap().tree(); | 413 | let tree = token_tree_to_items(&expansion).unwrap().tree(); |
414 | assert_eq!( | 414 | assert_eq!( |
415 | format!("{:#?}", tree.syntax()).trim(), | 415 | format!("{:#?}", tree.syntax()).trim(), |
416 | r#" | 416 | r#" |