mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-25 20:43:21 +00:00
Get tests working
This commit is contained in:
parent
0f3a54dd4d
commit
c32529ddd0
5 changed files with 19 additions and 7 deletions
|
@ -720,7 +720,18 @@ mod tests {
|
|||
}
|
||||
",
|
||||
),
|
||||
@r###"[]"###
|
||||
@r###"
|
||||
[
|
||||
CompletionItem {
|
||||
label: "the_field",
|
||||
source_range: [156; 156),
|
||||
delete: [156; 156),
|
||||
insert: "the_field",
|
||||
kind: Field,
|
||||
detail: "u32",
|
||||
},
|
||||
]
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -135,7 +135,7 @@ impl<'a> CompletionContext<'a> {
|
|||
),
|
||||
) {
|
||||
let new_offset = hypothetical_expansion.1.text_range().start();
|
||||
if new_offset >= actual_expansion.text_range().end() {
|
||||
if new_offset > actual_expansion.text_range().end() {
|
||||
break;
|
||||
}
|
||||
original_file = actual_expansion;
|
||||
|
|
|
@ -11,6 +11,7 @@ use rustc_hash::FxHashMap;
|
|||
use crate::{ExpandError, ExpandResult};
|
||||
|
||||
pub(crate) fn expand(rules: &crate::MacroRules, input: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
||||
eprintln!("expanding input: {:?}", input);
|
||||
let (mut result, mut unmatched_tokens, mut unmatched_patterns, mut err) = (
|
||||
tt::Subtree::default(),
|
||||
usize::max_value(),
|
||||
|
@ -39,9 +40,8 @@ fn expand_rule(
|
|||
rule: &crate::Rule,
|
||||
input: &tt::Subtree,
|
||||
) -> ExpandResult<(tt::Subtree, usize, usize)> {
|
||||
dbg!(&rule.lhs);
|
||||
let (match_result, bindings_err) = dbg!(matcher::match_(&rule.lhs, input));
|
||||
let (res, transcribe_err) = dbg!(transcriber::transcribe(&rule.rhs, &match_result.bindings));
|
||||
let (match_result, bindings_err) = matcher::match_(&rule.lhs, input);
|
||||
let (res, transcribe_err) = transcriber::transcribe(&rule.rhs, &match_result.bindings);
|
||||
(
|
||||
(res, match_result.unmatched_tokens, match_result.unmatched_patterns),
|
||||
bindings_err.or(transcribe_err),
|
||||
|
|
|
@ -73,6 +73,7 @@ pub fn token_tree_to_syntax_node(
|
|||
tt: &tt::Subtree,
|
||||
fragment_kind: FragmentKind,
|
||||
) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> {
|
||||
eprintln!("token_tree_to_syntax_node {:?} as {:?}", tt, fragment_kind);
|
||||
let tmp;
|
||||
let tokens = match tt {
|
||||
tt::Subtree { delimiter: None, token_trees } => token_trees.as_slice(),
|
||||
|
|
|
@ -565,10 +565,10 @@ fn meta_var_expr(p: &mut Parser) -> CompletedMarker {
|
|||
it
|
||||
}
|
||||
_ => {
|
||||
while !p.at(R_DOLLAR) {
|
||||
while !p.at(EOF) && !p.at(R_DOLLAR) {
|
||||
p.bump_any()
|
||||
}
|
||||
p.bump(R_DOLLAR);
|
||||
p.eat(R_DOLLAR);
|
||||
m.complete(p, ERROR)
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue