mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-11-16 01:38:13 +00:00
Merge #2596
2596: Refactor macro tests r=matklad a=matklad Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
commit
1e32412e28
3 changed files with 293 additions and 407 deletions
|
@ -476,7 +476,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::tests::{create_rules, expand};
|
||||
use crate::tests::parse_macro;
|
||||
use ra_parser::TokenSource;
|
||||
use ra_syntax::{
|
||||
algo::{insert_children, InsertPosition},
|
||||
|
@ -485,7 +485,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn convert_tt_token_source() {
|
||||
let rules = create_rules(
|
||||
let expansion = parse_macro(
|
||||
r#"
|
||||
macro_rules! literals {
|
||||
($i:ident) => {
|
||||
|
@ -498,8 +498,8 @@ mod tests {
|
|||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
let expansion = expand(&rules, "literals!(foo);");
|
||||
)
|
||||
.expand_tt("literals!(foo);");
|
||||
let tts = &[expansion.into()];
|
||||
let buffer = tt::buffer::TokenBuffer::new(tts);
|
||||
let mut tt_src = SubtreeTokenSource::new(&buffer);
|
||||
|
@ -527,7 +527,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn stmts_token_trees_to_expr_is_err() {
|
||||
let rules = create_rules(
|
||||
let expansion = parse_macro(
|
||||
r#"
|
||||
macro_rules! stmts {
|
||||
() => {
|
||||
|
@ -538,8 +538,8 @@ mod tests {
|
|||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
let expansion = expand(&rules, "stmts!();");
|
||||
)
|
||||
.expand_tt("stmts!();");
|
||||
assert!(token_tree_to_syntax_node(&expansion, FragmentKind::Expr).is_err());
|
||||
}
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -83,6 +83,7 @@ pub fn parse(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
|
|||
parse_from_tokens(token_source, tree_sink, grammar::root);
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub enum FragmentKind {
|
||||
Path,
|
||||
Expr,
|
||||
|
|
Loading…
Reference in a new issue