mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-30 15:03:42 +00:00
Merge #1809
1809: add fragmets to expansion r=matklad a=matklad Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
commit
753a569c79
2 changed files with 35 additions and 52 deletions
|
@ -81,21 +81,26 @@ struct Bindings {
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
enum Binding {
|
enum Binding {
|
||||||
Simple(tt::TokenTree),
|
Fragment(Fragment),
|
||||||
Nested(Vec<Binding>),
|
Nested(Vec<Binding>),
|
||||||
Empty,
|
Empty,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
enum Fragment {
|
||||||
|
/// token fragments are just copy-pasted into the output
|
||||||
|
Tokens(tt::TokenTree),
|
||||||
|
/// Ast fragments are inserted with fake delimiters, so as to make things
|
||||||
|
/// like `$i * 2` where `$i = 1 + 1` work as expectd.
|
||||||
|
Ast(tt::TokenTree),
|
||||||
|
}
|
||||||
|
|
||||||
impl Bindings {
|
impl Bindings {
|
||||||
fn push_optional(&mut self, name: &SmolStr) {
|
fn push_optional(&mut self, name: &SmolStr) {
|
||||||
// FIXME: Do we have a better way to represent an empty token ?
|
// FIXME: Do we have a better way to represent an empty token ?
|
||||||
// Insert an empty subtree for empty token
|
// Insert an empty subtree for empty token
|
||||||
self.inner.insert(
|
let tt = tt::Subtree { delimiter: tt::Delimiter::None, token_trees: vec![] }.into();
|
||||||
name.clone(),
|
self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt)));
|
||||||
Binding::Simple(
|
|
||||||
tt::Subtree { delimiter: tt::Delimiter::None, token_trees: vec![] }.into(),
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn push_empty(&mut self, name: &SmolStr) {
|
fn push_empty(&mut self, name: &SmolStr) {
|
||||||
|
@ -106,13 +111,13 @@ impl Bindings {
|
||||||
self.inner.contains_key(name)
|
self.inner.contains_key(name)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get(&self, name: &SmolStr, nesting: &[usize]) -> Result<&tt::TokenTree, ExpandError> {
|
fn get(&self, name: &SmolStr, nesting: &[usize]) -> Result<&Fragment, ExpandError> {
|
||||||
let mut b = self.inner.get(name).ok_or_else(|| {
|
let mut b = self.inner.get(name).ok_or_else(|| {
|
||||||
ExpandError::BindingError(format!("could not find binding `{}`", name))
|
ExpandError::BindingError(format!("could not find binding `{}`", name))
|
||||||
})?;
|
})?;
|
||||||
for &idx in nesting.iter() {
|
for &idx in nesting.iter() {
|
||||||
b = match b {
|
b = match b {
|
||||||
Binding::Simple(_) => break,
|
Binding::Fragment(_) => break,
|
||||||
Binding::Nested(bs) => bs.get(idx).ok_or_else(|| {
|
Binding::Nested(bs) => bs.get(idx).ok_or_else(|| {
|
||||||
ExpandError::BindingError(format!("could not find nested binding `{}`", name))
|
ExpandError::BindingError(format!("could not find nested binding `{}`", name))
|
||||||
})?,
|
})?,
|
||||||
|
@ -125,7 +130,7 @@ impl Bindings {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
match b {
|
match b {
|
||||||
Binding::Simple(it) => Ok(it),
|
Binding::Fragment(it) => Ok(it),
|
||||||
Binding::Nested(_) => Err(ExpandError::BindingError(format!(
|
Binding::Nested(_) => Err(ExpandError::BindingError(format!(
|
||||||
"expected simple binding, found nested binding `{}`",
|
"expected simple binding, found nested binding `{}`",
|
||||||
name
|
name
|
||||||
|
@ -195,8 +200,8 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings,
|
||||||
crate::Leaf::Var(crate::Var { text, kind }) => {
|
crate::Leaf::Var(crate::Var { text, kind }) => {
|
||||||
let kind = kind.as_ref().ok_or(ExpandError::UnexpectedToken)?;
|
let kind = kind.as_ref().ok_or(ExpandError::UnexpectedToken)?;
|
||||||
match match_meta_var(kind.as_str(), input)? {
|
match match_meta_var(kind.as_str(), input)? {
|
||||||
Some(tt) => {
|
Some(fragment) => {
|
||||||
res.inner.insert(text.clone(), Binding::Simple(tt));
|
res.inner.insert(text.clone(), Binding::Fragment(fragment));
|
||||||
}
|
}
|
||||||
None => res.push_optional(text),
|
None => res.push_optional(text),
|
||||||
}
|
}
|
||||||
|
@ -292,7 +297,7 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings,
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn match_meta_var(kind: &str, input: &mut TtCursor) -> Result<Option<tt::TokenTree>, ExpandError> {
|
fn match_meta_var(kind: &str, input: &mut TtCursor) -> Result<Option<Fragment>, ExpandError> {
|
||||||
let fragment = match kind {
|
let fragment = match kind {
|
||||||
"path" => Path,
|
"path" => Path,
|
||||||
"expr" => Expr,
|
"expr" => Expr,
|
||||||
|
@ -303,7 +308,7 @@ fn match_meta_var(kind: &str, input: &mut TtCursor) -> Result<Option<tt::TokenTr
|
||||||
"meta" => MetaItem,
|
"meta" => MetaItem,
|
||||||
"item" => Item,
|
"item" => Item,
|
||||||
_ => {
|
_ => {
|
||||||
let binding = match kind {
|
let tt = match kind {
|
||||||
"ident" => {
|
"ident" => {
|
||||||
let ident = input.eat_ident().ok_or(ExpandError::UnexpectedToken)?.clone();
|
let ident = input.eat_ident().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||||
tt::Leaf::from(ident).into()
|
tt::Leaf::from(ident).into()
|
||||||
|
@ -321,11 +326,12 @@ fn match_meta_var(kind: &str, input: &mut TtCursor) -> Result<Option<tt::TokenTr
|
||||||
},
|
},
|
||||||
_ => return Err(ExpandError::UnexpectedToken),
|
_ => return Err(ExpandError::UnexpectedToken),
|
||||||
};
|
};
|
||||||
return Ok(Some(binding));
|
return Ok(Some(Fragment::Tokens(tt)));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let binding = input.eat_fragment(fragment).ok_or(ExpandError::UnexpectedToken)?;
|
let tt = input.eat_fragment(fragment).ok_or(ExpandError::UnexpectedToken)?;
|
||||||
Ok(Some(binding))
|
let fragment = if kind == "expr" { Fragment::Ast(tt) } else { Fragment::Tokens(tt) };
|
||||||
|
Ok(Some(fragment))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -342,30 +348,13 @@ fn expand_subtree(
|
||||||
let mut buf: Vec<tt::TokenTree> = Vec::new();
|
let mut buf: Vec<tt::TokenTree> = Vec::new();
|
||||||
for tt in template.token_trees.iter() {
|
for tt in template.token_trees.iter() {
|
||||||
let tt = expand_tt(tt, ctx)?;
|
let tt = expand_tt(tt, ctx)?;
|
||||||
push_tt(&mut buf, tt);
|
push_fragment(&mut buf, tt);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(tt::Subtree { delimiter: template.delimiter, token_trees: buf })
|
Ok(tt::Subtree { delimiter: template.delimiter, token_trees: buf })
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Reduce single token subtree to single token
|
fn expand_tt(template: &crate::TokenTree, ctx: &mut ExpandCtx) -> Result<Fragment, ExpandError> {
|
||||||
/// In `tt` matcher case, all tt tokens will be braced by a Delimiter::None
|
|
||||||
/// which makes all sort of problems.
|
|
||||||
fn reduce_single_token(mut subtree: tt::Subtree) -> tt::TokenTree {
|
|
||||||
if subtree.delimiter != tt::Delimiter::None || subtree.token_trees.len() != 1 {
|
|
||||||
return subtree.into();
|
|
||||||
}
|
|
||||||
|
|
||||||
match subtree.token_trees.pop().unwrap() {
|
|
||||||
tt::TokenTree::Subtree(subtree) => reduce_single_token(subtree),
|
|
||||||
tt::TokenTree::Leaf(token) => token.into(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand_tt(
|
|
||||||
template: &crate::TokenTree,
|
|
||||||
ctx: &mut ExpandCtx,
|
|
||||||
) -> Result<tt::TokenTree, ExpandError> {
|
|
||||||
let res: tt::TokenTree = match template {
|
let res: tt::TokenTree = match template {
|
||||||
crate::TokenTree::Subtree(subtree) => expand_subtree(subtree, ctx)?.into(),
|
crate::TokenTree::Subtree(subtree) => expand_subtree(subtree, ctx)?.into(),
|
||||||
crate::TokenTree::Repeat(repeat) => {
|
crate::TokenTree::Repeat(repeat) => {
|
||||||
|
@ -451,7 +440,7 @@ fn expand_tt(
|
||||||
|
|
||||||
// Check if it is a single token subtree without any delimiter
|
// Check if it is a single token subtree without any delimiter
|
||||||
// e.g {Delimiter:None> ['>'] /Delimiter:None>}
|
// e.g {Delimiter:None> ['>'] /Delimiter:None>}
|
||||||
reduce_single_token(tt::Subtree { delimiter: tt::Delimiter::None, token_trees: buf })
|
tt::Subtree { delimiter: tt::Delimiter::None, token_trees: buf }.into()
|
||||||
}
|
}
|
||||||
crate::TokenTree::Leaf(leaf) => match leaf {
|
crate::TokenTree::Leaf(leaf) => match leaf {
|
||||||
crate::Leaf::Ident(ident) => {
|
crate::Leaf::Ident(ident) => {
|
||||||
|
@ -492,20 +481,15 @@ fn expand_tt(
|
||||||
}
|
}
|
||||||
.into()
|
.into()
|
||||||
} else {
|
} else {
|
||||||
let tkn = ctx.bindings.get(&v.text, &ctx.nesting)?.clone();
|
let fragment = ctx.bindings.get(&v.text, &ctx.nesting)?.clone();
|
||||||
ctx.var_expanded = true;
|
ctx.var_expanded = true;
|
||||||
|
return Ok(fragment);
|
||||||
if let tt::TokenTree::Subtree(subtree) = tkn {
|
|
||||||
reduce_single_token(subtree)
|
|
||||||
} else {
|
|
||||||
tkn
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
crate::Leaf::Literal(l) => tt::Leaf::from(tt::Literal { text: l.text.clone() }).into(),
|
crate::Leaf::Literal(l) => tt::Leaf::from(tt::Literal { text: l.text.clone() }).into(),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
Ok(res)
|
Ok(Fragment::Tokens(res))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -579,10 +563,10 @@ mod tests {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn push_tt(buf: &mut Vec<tt::TokenTree>, tt: tt::TokenTree) {
|
fn push_fragment(buf: &mut Vec<tt::TokenTree>, fragment: Fragment) {
|
||||||
match tt {
|
match fragment {
|
||||||
tt::TokenTree::Subtree(tt) => push_subtree(buf, tt),
|
Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt),
|
||||||
_ => buf.push(tt),
|
Fragment::Tokens(tt) | Fragment::Ast(tt) => buf.push(tt),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -657,7 +657,6 @@ fn test_expr() {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[ignore]
|
|
||||||
fn test_expr_order() {
|
fn test_expr_order() {
|
||||||
let rules = create_rules(
|
let rules = create_rules(
|
||||||
r#"
|
r#"
|
||||||
|
@ -668,9 +667,9 @@ fn test_expr_order() {
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
|
let dump = format!("{:#?}", expand_to_items(&rules, "foo! { 1 + 1 }").syntax());
|
||||||
assert_eq_text!(
|
assert_eq_text!(
|
||||||
&format!("{:#?}", expand_to_items(&rules, "foo! { 1 + 1 }").syntax()),
|
dump.trim(),
|
||||||
r#"MACRO_ITEMS@[0; 15)
|
r#"MACRO_ITEMS@[0; 15)
|
||||||
FN_DEF@[0; 15)
|
FN_DEF@[0; 15)
|
||||||
FN_KW@[0; 2) "fn"
|
FN_KW@[0; 2) "fn"
|
||||||
|
|
Loading…
Reference in a new issue