mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 13:03:31 +00:00
Remove typed macro parsing API
We do type-erasure on every path anyway, so it doesn't make much sense to duplicate this function for every type
This commit is contained in:
parent
f6c40c09e0
commit
70f2a21b55
6 changed files with 84 additions and 72 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -1060,6 +1060,7 @@ dependencies = [
|
|||
"ra_arena 0.1.0",
|
||||
"ra_db 0.1.0",
|
||||
"ra_mbe 0.1.0",
|
||||
"ra_parser 0.1.0",
|
||||
"ra_prof 0.1.0",
|
||||
"ra_syntax 0.1.0",
|
||||
"ra_tt 0.1.0",
|
||||
|
|
|
@ -10,6 +10,7 @@ log = "0.4.5"
|
|||
ra_arena = { path = "../ra_arena" }
|
||||
ra_db = { path = "../ra_db" }
|
||||
ra_syntax = { path = "../ra_syntax" }
|
||||
ra_parser = { path = "../ra_parser" }
|
||||
ra_prof = { path = "../ra_prof" }
|
||||
tt = { path = "../ra_tt", package = "ra_tt" }
|
||||
mbe = { path = "../ra_mbe", package = "ra_mbe" }
|
||||
|
|
|
@ -4,6 +4,7 @@ use std::sync::Arc;
|
|||
|
||||
use mbe::MacroRules;
|
||||
use ra_db::{salsa, SourceDatabase};
|
||||
use ra_parser::FragmentKind;
|
||||
use ra_prof::profile;
|
||||
use ra_syntax::{AstNode, Parse, SyntaxNode};
|
||||
|
||||
|
@ -108,12 +109,10 @@ pub(crate) fn parse_macro(
|
|||
})
|
||||
.ok()?;
|
||||
|
||||
match macro_file.macro_file_kind {
|
||||
MacroFileKind::Items => {
|
||||
mbe::token_tree_to_items(&tt).ok().map(|(p, map)| (p.to_syntax(), Arc::new(map)))
|
||||
}
|
||||
MacroFileKind::Expr => {
|
||||
mbe::token_tree_to_expr(&tt).ok().map(|(p, map)| (p.to_syntax(), Arc::new(map)))
|
||||
}
|
||||
}
|
||||
let fragment_kind = match macro_file.macro_file_kind {
|
||||
MacroFileKind::Items => FragmentKind::Items,
|
||||
MacroFileKind::Expr => FragmentKind::Expr,
|
||||
};
|
||||
let (parse, rev_token_map) = mbe::token_tree_to_syntax_node(&tt, fragment_kind).ok()?;
|
||||
Some((parse, Arc::new(rev_token_map)))
|
||||
}
|
||||
|
|
|
@ -31,8 +31,7 @@ pub enum ExpandError {
|
|||
}
|
||||
|
||||
pub use crate::syntax_bridge::{
|
||||
ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_expr, token_tree_to_items,
|
||||
token_tree_to_macro_stmts, token_tree_to_pat, token_tree_to_ty, RevTokenMap, TokenMap,
|
||||
ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node, RevTokenMap, TokenMap,
|
||||
};
|
||||
|
||||
/// This struct contains AST for a single `macro_rules` definition. What might
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
//! FIXME: write short doc here
|
||||
|
||||
use ra_parser::{
|
||||
FragmentKind::{self, *},
|
||||
ParseError, TreeSink,
|
||||
};
|
||||
use ra_parser::{FragmentKind, ParseError, TreeSink};
|
||||
use ra_syntax::{
|
||||
ast, AstNode, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode,
|
||||
SyntaxTreeBuilder, TextRange, TextUnit, T,
|
||||
|
@ -55,7 +52,7 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, Toke
|
|||
// * ImplItems(SmallVec<[ast::ImplItem; 1]>)
|
||||
// * ForeignItems(SmallVec<[ast::ForeignItem; 1]>
|
||||
|
||||
fn fragment_to_syntax_node(
|
||||
pub fn token_tree_to_syntax_node(
|
||||
tt: &tt::Subtree,
|
||||
fragment_kind: FragmentKind,
|
||||
) -> Result<(Parse<SyntaxNode>, RevTokenMap), ExpandError> {
|
||||
|
@ -79,31 +76,6 @@ fn fragment_to_syntax_node(
|
|||
Ok((parse, range_map))
|
||||
}
|
||||
|
||||
macro_rules! impl_token_tree_conversions {
|
||||
($($(#[$attr:meta])* $name:ident => ($kind:ident, $t:ty) ),*) => {
|
||||
$(
|
||||
$(#[$attr])*
|
||||
pub fn $name(tt: &tt::Subtree) -> Result<(Parse<$t>, RevTokenMap), ExpandError> {
|
||||
let (parse, map) = fragment_to_syntax_node(tt, $kind)?;
|
||||
parse.cast().ok_or_else(|| crate::ExpandError::ConversionError).map(|p| (p, map))
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
||||
impl_token_tree_conversions! {
|
||||
/// Parses the token tree (result of macro expansion) to an expression
|
||||
token_tree_to_expr => (Expr, ast::Expr),
|
||||
/// Parses the token tree (result of macro expansion) to a Pattern
|
||||
token_tree_to_pat => (Pattern, ast::Pat),
|
||||
/// Parses the token tree (result of macro expansion) to a Type
|
||||
token_tree_to_ty => (Type, ast::TypeRef),
|
||||
/// Parses the token tree (result of macro expansion) as a sequence of stmts
|
||||
token_tree_to_macro_stmts => (Statements, ast::MacroStmts),
|
||||
/// Parses the token tree (result of macro expansion) as a sequence of items
|
||||
token_tree_to_items => (Items, ast::MacroItems)
|
||||
}
|
||||
|
||||
impl TokenMap {
|
||||
pub fn relative_range_of(&self, tt: tt::TokenId) -> Option<TextRange> {
|
||||
let idx = tt.0 as usize;
|
||||
|
@ -446,6 +418,6 @@ mod tests {
|
|||
"#,
|
||||
);
|
||||
let expansion = expand(&rules, "stmts!();");
|
||||
assert!(token_tree_to_expr(&expansion).is_err());
|
||||
assert!(token_tree_to_syntax_node(&expansion, FragmentKind::Expr).is_err());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
use ra_parser::FragmentKind;
|
||||
use ra_syntax::{ast, AstNode, NodeOrToken, WalkEvent};
|
||||
use test_utils::assert_eq_text;
|
||||
|
||||
|
@ -126,9 +127,9 @@ fn test_expr_order() {
|
|||
"#,
|
||||
);
|
||||
let expanded = expand(&rules, "foo! { 1 + 1}");
|
||||
let tree = token_tree_to_items(&expanded).unwrap().0.tree();
|
||||
let tree = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap().0.syntax_node();
|
||||
|
||||
let dump = format!("{:#?}", tree.syntax());
|
||||
let dump = format!("{:#?}", tree);
|
||||
assert_eq_text!(
|
||||
dump.trim(),
|
||||
r#"MACRO_ITEMS@[0; 15)
|
||||
|
@ -383,9 +384,9 @@ fn test_expand_to_item_list() {
|
|||
",
|
||||
);
|
||||
let expansion = expand(&rules, "structs!(Foo, Bar);");
|
||||
let tree = token_tree_to_items(&expansion).unwrap().0.tree();
|
||||
let tree = token_tree_to_syntax_node(&expansion, FragmentKind::Items).unwrap().0.syntax_node();
|
||||
assert_eq!(
|
||||
format!("{:#?}", tree.syntax()).trim(),
|
||||
format!("{:#?}", tree).trim(),
|
||||
r#"
|
||||
MACRO_ITEMS@[0; 40)
|
||||
STRUCT_DEF@[0; 20)
|
||||
|
@ -501,10 +502,11 @@ fn test_tt_to_stmts() {
|
|||
);
|
||||
|
||||
let expanded = expand(&rules, "foo!{}");
|
||||
let stmts = token_tree_to_macro_stmts(&expanded).unwrap().0.tree();
|
||||
let stmts =
|
||||
token_tree_to_syntax_node(&expanded, FragmentKind::Statements).unwrap().0.syntax_node();
|
||||
|
||||
assert_eq!(
|
||||
format!("{:#?}", stmts.syntax()).trim(),
|
||||
format!("{:#?}", stmts).trim(),
|
||||
r#"MACRO_STMTS@[0; 15)
|
||||
LET_STMT@[0; 7)
|
||||
LET_KW@[0; 3) "let"
|
||||
|
@ -754,7 +756,10 @@ fn test_all_items() {
|
|||
}
|
||||
"#,
|
||||
);
|
||||
assert_expansion(MacroKind::Items, &rules, r#"
|
||||
assert_expansion(
|
||||
MacroKind::Items,
|
||||
&rules,
|
||||
r#"
|
||||
foo! {
|
||||
extern crate a;
|
||||
mod b;
|
||||
|
@ -770,7 +775,9 @@ fn test_all_items() {
|
|||
extern {}
|
||||
type T = u8;
|
||||
}
|
||||
"#, r#"extern crate a ; mod b ; mod c {} use d ; const E : i32 = 0 ; static F : i32 = 0 ; impl G {} struct H ; enum I {Foo} trait J {} fn h () {} extern {} type T = u8 ;"#);
|
||||
"#,
|
||||
r#"extern crate a ; mod b ; mod c {} use d ; const E : i32 = 0 ; static F : i32 = 0 ; impl G {} struct H ; enum I {Foo} trait J {} fn h () {} extern {} type T = u8 ;"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -946,10 +953,10 @@ fn test_vec() {
|
|||
);
|
||||
|
||||
let expansion = expand(&rules, r#"vec![1u32,2];"#);
|
||||
let tree = token_tree_to_expr(&expansion).unwrap().0.tree();
|
||||
let tree = token_tree_to_syntax_node(&expansion, FragmentKind::Expr).unwrap().0.syntax_node();
|
||||
|
||||
assert_eq!(
|
||||
format!("{:#?}", tree.syntax()).trim(),
|
||||
format!("{:#?}", tree).trim(),
|
||||
r#"BLOCK_EXPR@[0; 45)
|
||||
BLOCK@[0; 45)
|
||||
L_CURLY@[0; 1) "{"
|
||||
|
@ -1088,8 +1095,12 @@ macro_rules! generate_pattern_iterators {
|
|||
"#,
|
||||
);
|
||||
|
||||
assert_expansion(MacroKind::Items, &rules, r#"generate_pattern_iterators ! ( double ended ; with # [ stable ( feature = "rust1" , since = "1.0.0" ) ] , Split , RSplit , & 'a str );"#,
|
||||
"fn foo () {}");
|
||||
assert_expansion(
|
||||
MacroKind::Items,
|
||||
&rules,
|
||||
r#"generate_pattern_iterators ! ( double ended ; with # [ stable ( feature = "rust1" , since = "1.0.0" ) ] , Split , RSplit , & 'a str );"#,
|
||||
"fn foo () {}",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -1171,8 +1182,12 @@ fn test_impl_nonzero_fmt() {
|
|||
"#,
|
||||
);
|
||||
|
||||
assert_expansion(MacroKind::Items, &rules, r#"impl_nonzero_fmt! { # [stable(feature= "nonzero",since="1.28.0")] (Debug,Display,Binary,Octal,LowerHex,UpperHex) for NonZeroU8}"#,
|
||||
"fn foo () {}");
|
||||
assert_expansion(
|
||||
MacroKind::Items,
|
||||
&rules,
|
||||
r#"impl_nonzero_fmt! { # [stable(feature= "nonzero",since="1.28.0")] (Debug,Display,Binary,Octal,LowerHex,UpperHex) for NonZeroU8}"#,
|
||||
"fn foo () {}",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -1189,8 +1204,12 @@ fn test_cfg_if_items() {
|
|||
"#,
|
||||
);
|
||||
|
||||
assert_expansion(MacroKind::Items, &rules, r#"__cfg_if_items ! { ( rustdoc , ) ; ( ( ) ( # [ cfg ( any ( target_os = "redox" , unix ) ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as unix ; # [ cfg ( windows ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as windows ; # [ cfg ( any ( target_os = "linux" , target_os = "l4re" ) ) ] pub mod linux ; ) ) , }"#,
|
||||
"__cfg_if_items ! {(rustdoc ,) ;}");
|
||||
assert_expansion(
|
||||
MacroKind::Items,
|
||||
&rules,
|
||||
r#"__cfg_if_items ! { ( rustdoc , ) ; ( ( ) ( # [ cfg ( any ( target_os = "redox" , unix ) ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as unix ; # [ cfg ( windows ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as windows ; # [ cfg ( any ( target_os = "linux" , target_os = "l4re" ) ) ] pub mod linux ; ) ) , }"#,
|
||||
"__cfg_if_items ! {(rustdoc ,) ;}",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -1233,10 +1252,13 @@ cfg_if ! {
|
|||
"#,
|
||||
"__cfg_if_items ! {() ; ((target_env = \"msvc\") ()) , ((all (target_arch = \"wasm32\" , not (target_os = \"emscripten\"))) ()) , (() (mod libunwind ; pub use libunwind :: * ;)) ,}");
|
||||
|
||||
assert_expansion(MacroKind::Items, &rules, r#"
|
||||
assert_expansion(
|
||||
MacroKind::Items,
|
||||
&rules,
|
||||
r#"
|
||||
cfg_if ! { @ __apply cfg ( all ( not ( any ( not ( any ( target_os = "solaris" , target_os = "illumos" ) ) ) ) ) ) , }
|
||||
"#,
|
||||
""
|
||||
"",
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1291,10 +1313,13 @@ macro_rules! RIDL {
|
|||
}"#,
|
||||
);
|
||||
|
||||
let expanded = expand(&rules, r#"
|
||||
let expanded = expand(
|
||||
&rules,
|
||||
r#"
|
||||
RIDL!{interface ID3D11Asynchronous(ID3D11AsynchronousVtbl): ID3D11DeviceChild(ID3D11DeviceChildVtbl) {
|
||||
fn GetDataSize(&mut self) -> UINT
|
||||
}}"#);
|
||||
}}"#,
|
||||
);
|
||||
assert_eq!(expanded.to_string(), "impl ID3D11Asynchronous {pub unsafe fn GetDataSize (& mut self) -> UINT {((* self . lpVtbl) .GetDataSize) (self)}}");
|
||||
}
|
||||
|
||||
|
@ -1340,7 +1365,8 @@ quick_error ! (SORT [enum Wrapped # [derive (Debug)]] items [
|
|||
|
||||
#[test]
|
||||
fn test_empty_repeat_vars_in_empty_repeat_vars() {
|
||||
let rules = create_rules(r#"
|
||||
let rules = create_rules(
|
||||
r#"
|
||||
macro_rules! delegate_impl {
|
||||
([$self_type:ident, $self_wrap:ty, $self_map:ident]
|
||||
pub trait $name:ident $(: $sup:ident)* $(+ $more_sup:ident)* {
|
||||
|
@ -1385,9 +1411,15 @@ macro_rules! delegate_impl {
|
|||
}
|
||||
}
|
||||
}
|
||||
"#);
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_expansion(MacroKind::Items, &rules, r#"delegate_impl ! {[G , & 'a mut G , deref] pub trait Data : GraphBase {@ section type type NodeWeight ;}}"#, "impl <> Data for & \'a mut G where G : Data {}");
|
||||
assert_expansion(
|
||||
MacroKind::Items,
|
||||
&rules,
|
||||
r#"delegate_impl ! {[G , & 'a mut G , deref] pub trait Data : GraphBase {@ section type type NodeWeight ;}}"#,
|
||||
"impl <> Data for & \'a mut G where G : Data {}",
|
||||
);
|
||||
}
|
||||
|
||||
pub(crate) fn create_rules(macro_definition: &str) -> MacroRules {
|
||||
|
@ -1436,22 +1468,30 @@ pub(crate) fn assert_expansion(
|
|||
};
|
||||
let (expanded_tree, expected_tree) = match kind {
|
||||
MacroKind::Items => {
|
||||
let expanded_tree = token_tree_to_items(&expanded).unwrap().0.tree();
|
||||
let expected_tree = token_tree_to_items(&expected).unwrap().0.tree();
|
||||
let expanded_tree =
|
||||
token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap().0.syntax_node();
|
||||
let expected_tree =
|
||||
token_tree_to_syntax_node(&expected, FragmentKind::Items).unwrap().0.syntax_node();
|
||||
|
||||
(
|
||||
debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(),
|
||||
debug_dump_ignore_spaces(expected_tree.syntax()).trim().to_string(),
|
||||
debug_dump_ignore_spaces(&expanded_tree).trim().to_string(),
|
||||
debug_dump_ignore_spaces(&expected_tree).trim().to_string(),
|
||||
)
|
||||
}
|
||||
|
||||
MacroKind::Stmts => {
|
||||
let expanded_tree = token_tree_to_macro_stmts(&expanded).unwrap().0.tree();
|
||||
let expected_tree = token_tree_to_macro_stmts(&expected).unwrap().0.tree();
|
||||
let expanded_tree = token_tree_to_syntax_node(&expanded, FragmentKind::Statements)
|
||||
.unwrap()
|
||||
.0
|
||||
.syntax_node();
|
||||
let expected_tree = token_tree_to_syntax_node(&expected, FragmentKind::Statements)
|
||||
.unwrap()
|
||||
.0
|
||||
.syntax_node();
|
||||
|
||||
(
|
||||
debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(),
|
||||
debug_dump_ignore_spaces(expected_tree.syntax()).trim().to_string(),
|
||||
debug_dump_ignore_spaces(&expanded_tree).trim().to_string(),
|
||||
debug_dump_ignore_spaces(&expected_tree).trim().to_string(),
|
||||
)
|
||||
}
|
||||
};
|
||||
|
|
Loading…
Reference in a new issue