Rename and fix typos

This commit is contained in:
Edwin Cheng 2019-11-04 03:12:19 +08:00
parent d8b7ba201e
commit e6709f64af
4 changed files with 43 additions and 24 deletions

View file

@ -18,6 +18,12 @@ pub struct ParseMacroWithInfo {
pub expansion_info: Arc<ExpansionInfo>,
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct MacroExpandInfo {
pub arg_map: Arc<mbe::TokenMap>,
pub def_map: Arc<mbe::TokenMap>,
}
// FIXME: rename to ExpandDatabase
#[salsa::query_group(AstDatabaseStorage)]
pub trait AstDatabase: SourceDatabase {
@ -35,7 +41,7 @@ pub trait AstDatabase: SourceDatabase {
fn macro_expand(
&self,
macro_call: MacroCallId,
) -> Result<(Arc<tt::Subtree>, (Arc<mbe::TokenMap>, Arc<mbe::TokenMap>)), String>;
) -> Result<(Arc<tt::Subtree>, MacroExpandInfo), String>;
fn macro_expansion_info(&self, macro_file: MacroFile) -> Option<Arc<ExpansionInfo>>;
}
@ -77,7 +83,7 @@ pub(crate) fn macro_arg(
pub(crate) fn macro_expand(
db: &dyn AstDatabase,
id: MacroCallId,
) -> Result<(Arc<tt::Subtree>, (Arc<mbe::TokenMap>, Arc<mbe::TokenMap>)), String> {
) -> Result<(Arc<tt::Subtree>, MacroExpandInfo), String> {
let loc = db.lookup_intern_macro(id);
let macro_arg = db.macro_arg(id).ok_or("Fail to args in to tt::TokenTree")?;
@ -89,7 +95,10 @@ pub(crate) fn macro_expand(
return Err(format!("Total tokens count exceed limit : count = {}", count));
}
Ok((Arc::new(tt), (macro_arg.1.clone(), macro_rules.1.clone())))
Ok((
Arc::new(tt),
MacroExpandInfo { arg_map: macro_arg.1.clone(), def_map: macro_rules.1.clone() },
))
}
pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
@ -133,7 +142,7 @@ pub(crate) fn parse_macro_with_info(
};
res.map(|(parsed, exp_map)| {
let (arg_map, def_map) = tt.1;
let expand_info = tt.1;
let loc: MacroCallLoc = db.lookup_intern_macro(macro_call_id);
let def_start =
@ -141,11 +150,12 @@ pub(crate) fn parse_macro_with_info(
let arg_start =
loc.ast_id.to_node(db).token_tree().map(|t| t.syntax().text_range().start());
let arg_map =
arg_start.map(|start| exp_map.ranges(&arg_map, start)).unwrap_or_else(|| Vec::new());
let def_map =
def_start.map(|start| exp_map.ranges(&def_map, start)).unwrap_or_else(|| Vec::new());
let arg_map = arg_start
.map(|start| exp_map.ranges(&expand_info.arg_map, start))
.unwrap_or_else(|| Vec::new());
let def_map = def_start
.map(|start| exp_map.ranges(&expand_info.def_map, start))
.unwrap_or_else(|| Vec::new());
let info = ExpansionInfo { arg_map, def_map };

View file

@ -132,7 +132,7 @@ impl MacroCallId {
}
#[derive(Debug, Clone, PartialEq, Eq)]
/// ExpansionInfo mainly describle how to map text range between src and expaned macro
/// ExpansionInfo mainly describes how to map text range between src and expanded macro
pub struct ExpansionInfo {
pub arg_map: Vec<(TextRange, TextRange)>,
pub def_map: Vec<(TextRange, TextRange)>,

View file

@ -26,7 +26,11 @@ static TOKEN_MAP_COUNTER: AtomicU32 = AtomicU32::new(0);
/// Generate an unique token map id for each instance
fn make_uniq_token_map_id() -> u32 {
TOKEN_MAP_COUNTER.fetch_add(1, Ordering::SeqCst)
let res = TOKEN_MAP_COUNTER.fetch_add(1, Ordering::SeqCst);
if res == std::u32::MAX {
panic!("TOKEN_MAP_COUNTER is overflowed");
}
res
}
impl std::default::Default for TokenMap {
@ -35,10 +39,9 @@ impl std::default::Default for TokenMap {
}
}
/// Maps Relative range of the expanded syntax node to `tt::TokenId`
/// Maps relative range of the expanded syntax node to `tt::TokenId`
#[derive(Debug, PartialEq, Eq, Default)]
pub struct ExpandedRangeMap {
/// Maps `tt::TokenId` to the *relative* source range.
ranges: Vec<(TextRange, tt::TokenId)>,
}
@ -85,14 +88,13 @@ fn fragment_to_syntax_node(
};
let buffer = TokenBuffer::new(&tokens);
let mut token_source = SubtreeTokenSource::new(&buffer);
let mut range_map = ExpandedRangeMap::default();
let mut tree_sink = TtTreeSink::new(buffer.begin(), &mut range_map);
let mut tree_sink = TtTreeSink::new(buffer.begin());
ra_parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind);
if tree_sink.roots.len() != 1 {
return Err(ExpandError::ConversionError);
}
//FIXME: would be cool to report errors
let parse = tree_sink.inner.finish();
let (parse, range_map) = tree_sink.finish();
Ok((parse, range_map))
}
@ -320,7 +322,7 @@ struct TtTreeSink<'a> {
cursor: Cursor<'a>,
text_pos: TextUnit,
inner: SyntaxTreeBuilder,
range_map: &'a mut ExpandedRangeMap,
range_map: ExpandedRangeMap,
// Number of roots
// Use for detect ill-form tree which is not single root
@ -328,16 +330,20 @@ struct TtTreeSink<'a> {
}
impl<'a> TtTreeSink<'a> {
fn new(cursor: Cursor<'a>, range_map: &'a mut ExpandedRangeMap) -> Self {
fn new(cursor: Cursor<'a>) -> Self {
TtTreeSink {
buf: String::new(),
cursor,
text_pos: 0.into(),
inner: SyntaxTreeBuilder::default(),
roots: smallvec::SmallVec::new(),
range_map,
range_map: ExpandedRangeMap::default(),
}
}
fn finish(self) -> (Parse<SyntaxNode>, ExpandedRangeMap) {
(self.inner.finish(), self.range_map)
}
}
fn delim_to_str(d: tt::Delimiter, closing: bool) -> SmolStr {

View file

@ -25,23 +25,26 @@ use smol_str::SmolStr;
/// source token and making sure that identities are preserved during macro
/// expansion.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TokenId(u32, u32);
pub struct TokenId {
token_id: u32,
map_id: u32,
}
impl TokenId {
pub fn new(token_id: u32, map_id: u32) -> TokenId {
TokenId(token_id, map_id)
TokenId { token_id, map_id }
}
pub const fn unspecified() -> TokenId {
TokenId(!0, !0)
TokenId { token_id: !0, map_id: !0 }
}
pub fn token_id(&self) -> u32 {
self.0
self.token_id
}
pub fn map_id(&self) -> u32 {
self.1
self.map_id
}
}