aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
Diffstat (limited to 'crates')
-rw-r--r--crates/ra_hir_expand/src/db.rs28
-rw-r--r--crates/ra_hir_expand/src/lib.rs2
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs24
-rw-r--r--crates/ra_tt/src/lib.rs13
4 files changed, 43 insertions, 24 deletions
diff --git a/crates/ra_hir_expand/src/db.rs b/crates/ra_hir_expand/src/db.rs
index 8abfbb4ff..b3746924d 100644
--- a/crates/ra_hir_expand/src/db.rs
+++ b/crates/ra_hir_expand/src/db.rs
@@ -18,6 +18,12 @@ pub struct ParseMacroWithInfo {
18 pub expansion_info: Arc<ExpansionInfo>, 18 pub expansion_info: Arc<ExpansionInfo>,
19} 19}
20 20
21#[derive(Debug, PartialEq, Eq, Clone)]
22pub struct MacroExpandInfo {
23 pub arg_map: Arc<mbe::TokenMap>,
24 pub def_map: Arc<mbe::TokenMap>,
25}
26
21// FIXME: rename to ExpandDatabase 27// FIXME: rename to ExpandDatabase
22#[salsa::query_group(AstDatabaseStorage)] 28#[salsa::query_group(AstDatabaseStorage)]
23pub trait AstDatabase: SourceDatabase { 29pub trait AstDatabase: SourceDatabase {
@@ -35,7 +41,7 @@ pub trait AstDatabase: SourceDatabase {
35 fn macro_expand( 41 fn macro_expand(
36 &self, 42 &self,
37 macro_call: MacroCallId, 43 macro_call: MacroCallId,
38 ) -> Result<(Arc<tt::Subtree>, (Arc<mbe::TokenMap>, Arc<mbe::TokenMap>)), String>; 44 ) -> Result<(Arc<tt::Subtree>, MacroExpandInfo), String>;
39 45
40 fn macro_expansion_info(&self, macro_file: MacroFile) -> Option<Arc<ExpansionInfo>>; 46 fn macro_expansion_info(&self, macro_file: MacroFile) -> Option<Arc<ExpansionInfo>>;
41} 47}
@@ -77,7 +83,7 @@ pub(crate) fn macro_arg(
77pub(crate) fn macro_expand( 83pub(crate) fn macro_expand(
78 db: &dyn AstDatabase, 84 db: &dyn AstDatabase,
79 id: MacroCallId, 85 id: MacroCallId,
80) -> Result<(Arc<tt::Subtree>, (Arc<mbe::TokenMap>, Arc<mbe::TokenMap>)), String> { 86) -> Result<(Arc<tt::Subtree>, MacroExpandInfo), String> {
81 let loc = db.lookup_intern_macro(id); 87 let loc = db.lookup_intern_macro(id);
82 let macro_arg = db.macro_arg(id).ok_or("Fail to args in to tt::TokenTree")?; 88 let macro_arg = db.macro_arg(id).ok_or("Fail to args in to tt::TokenTree")?;
83 89
@@ -89,7 +95,10 @@ pub(crate) fn macro_expand(
89 return Err(format!("Total tokens count exceed limit : count = {}", count)); 95 return Err(format!("Total tokens count exceed limit : count = {}", count));
90 } 96 }
91 97
92 Ok((Arc::new(tt), (macro_arg.1.clone(), macro_rules.1.clone()))) 98 Ok((
99 Arc::new(tt),
100 MacroExpandInfo { arg_map: macro_arg.1.clone(), def_map: macro_rules.1.clone() },
101 ))
93} 102}
94 103
95pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> { 104pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
@@ -133,7 +142,7 @@ pub(crate) fn parse_macro_with_info(
133 }; 142 };
134 143
135 res.map(|(parsed, exp_map)| { 144 res.map(|(parsed, exp_map)| {
136 let (arg_map, def_map) = tt.1; 145 let expand_info = tt.1;
137 let loc: MacroCallLoc = db.lookup_intern_macro(macro_call_id); 146 let loc: MacroCallLoc = db.lookup_intern_macro(macro_call_id);
138 147
139 let def_start = 148 let def_start =
@@ -141,11 +150,12 @@ pub(crate) fn parse_macro_with_info(
141 let arg_start = 150 let arg_start =
142 loc.ast_id.to_node(db).token_tree().map(|t| t.syntax().text_range().start()); 151 loc.ast_id.to_node(db).token_tree().map(|t| t.syntax().text_range().start());
143 152
144 let arg_map = 153 let arg_map = arg_start
145 arg_start.map(|start| exp_map.ranges(&arg_map, start)).unwrap_or_else(|| Vec::new()); 154 .map(|start| exp_map.ranges(&expand_info.arg_map, start))
146 155 .unwrap_or_else(|| Vec::new());
147 let def_map = 156 let def_map = def_start
148 def_start.map(|start| exp_map.ranges(&def_map, start)).unwrap_or_else(|| Vec::new()); 157 .map(|start| exp_map.ranges(&expand_info.def_map, start))
158 .unwrap_or_else(|| Vec::new());
149 159
150 let info = ExpansionInfo { arg_map, def_map }; 160 let info = ExpansionInfo { arg_map, def_map };
151 161
diff --git a/crates/ra_hir_expand/src/lib.rs b/crates/ra_hir_expand/src/lib.rs
index b6e55aa6a..63ba17158 100644
--- a/crates/ra_hir_expand/src/lib.rs
+++ b/crates/ra_hir_expand/src/lib.rs
@@ -132,7 +132,7 @@ impl MacroCallId {
132} 132}
133 133
134#[derive(Debug, Clone, PartialEq, Eq)] 134#[derive(Debug, Clone, PartialEq, Eq)]
135/// ExpansionInfo mainly describle how to map text range between src and expaned macro 135/// ExpansionInfo mainly describes how to map text range between src and expanded macro
136pub struct ExpansionInfo { 136pub struct ExpansionInfo {
137 pub arg_map: Vec<(TextRange, TextRange)>, 137 pub arg_map: Vec<(TextRange, TextRange)>,
138 pub def_map: Vec<(TextRange, TextRange)>, 138 pub def_map: Vec<(TextRange, TextRange)>,
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
index 8e86742d0..2f21ad6e0 100644
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ b/crates/ra_mbe/src/syntax_bridge.rs
@@ -26,7 +26,11 @@ static TOKEN_MAP_COUNTER: AtomicU32 = AtomicU32::new(0);
26 26
27/// Generate an unique token map id for each instance 27/// Generate an unique token map id for each instance
28fn make_uniq_token_map_id() -> u32 { 28fn make_uniq_token_map_id() -> u32 {
29 TOKEN_MAP_COUNTER.fetch_add(1, Ordering::SeqCst) 29 let res = TOKEN_MAP_COUNTER.fetch_add(1, Ordering::SeqCst);
30 if res == std::u32::MAX {
31 panic!("TOKEN_MAP_COUNTER is overflowed");
32 }
33 res
30} 34}
31 35
32impl std::default::Default for TokenMap { 36impl std::default::Default for TokenMap {
@@ -35,10 +39,9 @@ impl std::default::Default for TokenMap {
35 } 39 }
36} 40}
37 41
38/// Maps Relative range of the expanded syntax node to `tt::TokenId` 42/// Maps relative range of the expanded syntax node to `tt::TokenId`
39#[derive(Debug, PartialEq, Eq, Default)] 43#[derive(Debug, PartialEq, Eq, Default)]
40pub struct ExpandedRangeMap { 44pub struct ExpandedRangeMap {
41 /// Maps `tt::TokenId` to the *relative* source range.
42 ranges: Vec<(TextRange, tt::TokenId)>, 45 ranges: Vec<(TextRange, tt::TokenId)>,
43} 46}
44 47
@@ -85,14 +88,13 @@ fn fragment_to_syntax_node(
85 }; 88 };
86 let buffer = TokenBuffer::new(&tokens); 89 let buffer = TokenBuffer::new(&tokens);
87 let mut token_source = SubtreeTokenSource::new(&buffer); 90 let mut token_source = SubtreeTokenSource::new(&buffer);
88 let mut range_map = ExpandedRangeMap::default(); 91 let mut tree_sink = TtTreeSink::new(buffer.begin());
89 let mut tree_sink = TtTreeSink::new(buffer.begin(), &mut range_map);
90 ra_parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind); 92 ra_parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind);
91 if tree_sink.roots.len() != 1 { 93 if tree_sink.roots.len() != 1 {
92 return Err(ExpandError::ConversionError); 94 return Err(ExpandError::ConversionError);
93 } 95 }
94 //FIXME: would be cool to report errors 96 //FIXME: would be cool to report errors
95 let parse = tree_sink.inner.finish(); 97 let (parse, range_map) = tree_sink.finish();
96 Ok((parse, range_map)) 98 Ok((parse, range_map))
97} 99}
98 100
@@ -320,7 +322,7 @@ struct TtTreeSink<'a> {
320 cursor: Cursor<'a>, 322 cursor: Cursor<'a>,
321 text_pos: TextUnit, 323 text_pos: TextUnit,
322 inner: SyntaxTreeBuilder, 324 inner: SyntaxTreeBuilder,
323 range_map: &'a mut ExpandedRangeMap, 325 range_map: ExpandedRangeMap,
324 326
325 // Number of roots 327 // Number of roots
326 // Use for detect ill-form tree which is not single root 328 // Use for detect ill-form tree which is not single root
@@ -328,16 +330,20 @@ struct TtTreeSink<'a> {
328} 330}
329 331
330impl<'a> TtTreeSink<'a> { 332impl<'a> TtTreeSink<'a> {
331 fn new(cursor: Cursor<'a>, range_map: &'a mut ExpandedRangeMap) -> Self { 333 fn new(cursor: Cursor<'a>) -> Self {
332 TtTreeSink { 334 TtTreeSink {
333 buf: String::new(), 335 buf: String::new(),
334 cursor, 336 cursor,
335 text_pos: 0.into(), 337 text_pos: 0.into(),
336 inner: SyntaxTreeBuilder::default(), 338 inner: SyntaxTreeBuilder::default(),
337 roots: smallvec::SmallVec::new(), 339 roots: smallvec::SmallVec::new(),
338 range_map, 340 range_map: ExpandedRangeMap::default(),
339 } 341 }
340 } 342 }
343
344 fn finish(self) -> (Parse<SyntaxNode>, ExpandedRangeMap) {
345 (self.inner.finish(), self.range_map)
346 }
341} 347}
342 348
343fn delim_to_str(d: tt::Delimiter, closing: bool) -> SmolStr { 349fn delim_to_str(d: tt::Delimiter, closing: bool) -> SmolStr {
diff --git a/crates/ra_tt/src/lib.rs b/crates/ra_tt/src/lib.rs
index 96410ff22..1449618f3 100644
--- a/crates/ra_tt/src/lib.rs
+++ b/crates/ra_tt/src/lib.rs
@@ -25,23 +25,26 @@ use smol_str::SmolStr;
25/// source token and making sure that identities are preserved during macro 25/// source token and making sure that identities are preserved during macro
26/// expansion. 26/// expansion.
27#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] 27#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
28pub struct TokenId(u32, u32); 28pub struct TokenId {
29 token_id: u32,
30 map_id: u32,
31}
29 32
30impl TokenId { 33impl TokenId {
31 pub fn new(token_id: u32, map_id: u32) -> TokenId { 34 pub fn new(token_id: u32, map_id: u32) -> TokenId {
32 TokenId(token_id, map_id) 35 TokenId { token_id, map_id }
33 } 36 }
34 37
35 pub const fn unspecified() -> TokenId { 38 pub const fn unspecified() -> TokenId {
36 TokenId(!0, !0) 39 TokenId { token_id: !0, map_id: !0 }
37 } 40 }
38 41
39 pub fn token_id(&self) -> u32 { 42 pub fn token_id(&self) -> u32 {
40 self.0 43 self.token_id
41 } 44 }
42 45
43 pub fn map_id(&self) -> u32 { 46 pub fn map_id(&self) -> u32 {
44 self.1 47 self.map_id
45 } 48 }
46} 49}
47 50