From e6709f64af1836460aee41aca34eb19ed3a337dd Mon Sep 17 00:00:00 2001 From: Edwin Cheng Date: Mon, 4 Nov 2019 03:12:19 +0800 Subject: Rename and fix typos --- crates/ra_hir_expand/src/db.rs | 28 +++++++++++++++++++--------- crates/ra_hir_expand/src/lib.rs | 2 +- crates/ra_mbe/src/syntax_bridge.rs | 24 +++++++++++++++--------- crates/ra_tt/src/lib.rs | 13 ++++++++----- 4 files changed, 43 insertions(+), 24 deletions(-) diff --git a/crates/ra_hir_expand/src/db.rs b/crates/ra_hir_expand/src/db.rs index 8abfbb4ff..b3746924d 100644 --- a/crates/ra_hir_expand/src/db.rs +++ b/crates/ra_hir_expand/src/db.rs @@ -18,6 +18,12 @@ pub struct ParseMacroWithInfo { pub expansion_info: Arc, } +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct MacroExpandInfo { + pub arg_map: Arc, + pub def_map: Arc, +} + // FIXME: rename to ExpandDatabase #[salsa::query_group(AstDatabaseStorage)] pub trait AstDatabase: SourceDatabase { @@ -35,7 +41,7 @@ pub trait AstDatabase: SourceDatabase { fn macro_expand( &self, macro_call: MacroCallId, - ) -> Result<(Arc, (Arc, Arc)), String>; + ) -> Result<(Arc, MacroExpandInfo), String>; fn macro_expansion_info(&self, macro_file: MacroFile) -> Option>; } @@ -77,7 +83,7 @@ pub(crate) fn macro_arg( pub(crate) fn macro_expand( db: &dyn AstDatabase, id: MacroCallId, -) -> Result<(Arc, (Arc, Arc)), String> { +) -> Result<(Arc, MacroExpandInfo), String> { let loc = db.lookup_intern_macro(id); let macro_arg = db.macro_arg(id).ok_or("Fail to args in to tt::TokenTree")?; @@ -89,7 +95,10 @@ pub(crate) fn macro_expand( return Err(format!("Total tokens count exceed limit : count = {}", count)); } - Ok((Arc::new(tt), (macro_arg.1.clone(), macro_rules.1.clone()))) + Ok(( + Arc::new(tt), + MacroExpandInfo { arg_map: macro_arg.1.clone(), def_map: macro_rules.1.clone() }, + )) } pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option { @@ -133,7 +142,7 @@ pub(crate) fn parse_macro_with_info( }; res.map(|(parsed, exp_map)| { - let (arg_map, def_map) = tt.1; + let expand_info = tt.1; let loc: MacroCallLoc = db.lookup_intern_macro(macro_call_id); let def_start = @@ -141,11 +150,12 @@ pub(crate) fn parse_macro_with_info( let arg_start = loc.ast_id.to_node(db).token_tree().map(|t| t.syntax().text_range().start()); - let arg_map = - arg_start.map(|start| exp_map.ranges(&arg_map, start)).unwrap_or_else(|| Vec::new()); - - let def_map = - def_start.map(|start| exp_map.ranges(&def_map, start)).unwrap_or_else(|| Vec::new()); + let arg_map = arg_start + .map(|start| exp_map.ranges(&expand_info.arg_map, start)) + .unwrap_or_else(|| Vec::new()); + let def_map = def_start + .map(|start| exp_map.ranges(&expand_info.def_map, start)) + .unwrap_or_else(|| Vec::new()); let info = ExpansionInfo { arg_map, def_map }; diff --git a/crates/ra_hir_expand/src/lib.rs b/crates/ra_hir_expand/src/lib.rs index b6e55aa6a..63ba17158 100644 --- a/crates/ra_hir_expand/src/lib.rs +++ b/crates/ra_hir_expand/src/lib.rs @@ -132,7 +132,7 @@ impl MacroCallId { } #[derive(Debug, Clone, PartialEq, Eq)] -/// ExpansionInfo mainly describle how to map text range between src and expaned macro +/// ExpansionInfo mainly describes how to map text range between src and expanded macro pub struct ExpansionInfo { pub arg_map: Vec<(TextRange, TextRange)>, pub def_map: Vec<(TextRange, TextRange)>, diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 8e86742d0..2f21ad6e0 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs @@ -26,7 +26,11 @@ static TOKEN_MAP_COUNTER: AtomicU32 = AtomicU32::new(0); /// Generate an unique token map id for each instance fn make_uniq_token_map_id() -> u32 { - TOKEN_MAP_COUNTER.fetch_add(1, Ordering::SeqCst) + let res = TOKEN_MAP_COUNTER.fetch_add(1, Ordering::SeqCst); + if res == std::u32::MAX { + panic!("TOKEN_MAP_COUNTER is overflowed"); + } + res } impl std::default::Default for TokenMap { @@ -35,10 +39,9 @@ impl std::default::Default for TokenMap { } } -/// Maps Relative range of the expanded syntax node to `tt::TokenId` +/// Maps relative range of the expanded syntax node to `tt::TokenId` #[derive(Debug, PartialEq, Eq, Default)] pub struct ExpandedRangeMap { - /// Maps `tt::TokenId` to the *relative* source range. ranges: Vec<(TextRange, tt::TokenId)>, } @@ -85,14 +88,13 @@ fn fragment_to_syntax_node( }; let buffer = TokenBuffer::new(&tokens); let mut token_source = SubtreeTokenSource::new(&buffer); - let mut range_map = ExpandedRangeMap::default(); - let mut tree_sink = TtTreeSink::new(buffer.begin(), &mut range_map); + let mut tree_sink = TtTreeSink::new(buffer.begin()); ra_parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind); if tree_sink.roots.len() != 1 { return Err(ExpandError::ConversionError); } //FIXME: would be cool to report errors - let parse = tree_sink.inner.finish(); + let (parse, range_map) = tree_sink.finish(); Ok((parse, range_map)) } @@ -320,7 +322,7 @@ struct TtTreeSink<'a> { cursor: Cursor<'a>, text_pos: TextUnit, inner: SyntaxTreeBuilder, - range_map: &'a mut ExpandedRangeMap, + range_map: ExpandedRangeMap, // Number of roots // Use for detect ill-form tree which is not single root @@ -328,16 +330,20 @@ struct TtTreeSink<'a> { } impl<'a> TtTreeSink<'a> { - fn new(cursor: Cursor<'a>, range_map: &'a mut ExpandedRangeMap) -> Self { + fn new(cursor: Cursor<'a>) -> Self { TtTreeSink { buf: String::new(), cursor, text_pos: 0.into(), inner: SyntaxTreeBuilder::default(), roots: smallvec::SmallVec::new(), - range_map, + range_map: ExpandedRangeMap::default(), } } + + fn finish(self) -> (Parse, ExpandedRangeMap) { + (self.inner.finish(), self.range_map) + } } fn delim_to_str(d: tt::Delimiter, closing: bool) -> SmolStr { diff --git a/crates/ra_tt/src/lib.rs b/crates/ra_tt/src/lib.rs index 96410ff22..1449618f3 100644 --- a/crates/ra_tt/src/lib.rs +++ b/crates/ra_tt/src/lib.rs @@ -25,23 +25,26 @@ use smol_str::SmolStr; /// source token and making sure that identities are preserved during macro /// expansion. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct TokenId(u32, u32); +pub struct TokenId { + token_id: u32, + map_id: u32, +} impl TokenId { pub fn new(token_id: u32, map_id: u32) -> TokenId { - TokenId(token_id, map_id) + TokenId { token_id, map_id } } pub const fn unspecified() -> TokenId { - TokenId(!0, !0) + TokenId { token_id: !0, map_id: !0 } } pub fn token_id(&self) -> u32 { - self.0 + self.token_id } pub fn map_id(&self) -> u32 { - self.1 + self.map_id } } -- cgit v1.2.3