mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-25 04:23:25 +00:00
Merge #11444
11444: feat: Fix up syntax errors in attribute macro inputs to make completion work more often r=flodiebold a=flodiebold This implements the "fix up syntax nodes" workaround mentioned in #11014. It isn't much more than a proof of concept; I have only implemented a few cases, but it already helps quite a bit. Some notes: - I'm not super happy about how much the fixup procedure needs to interact with the syntax node -> token tree conversion code (e.g. needing to share the token map). This could maybe be simplified with some refactoring of that code. - It would maybe be nice to have the fixup procedure reuse or share information with the parser, though I'm not really sure how much that would actually help. Co-authored-by: Florian Diebold <flodiebold@gmail.com>
This commit is contained in:
commit
7a17fb9c43
14 changed files with 646 additions and 95 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -539,6 +539,7 @@ dependencies = [
|
||||||
"cfg",
|
"cfg",
|
||||||
"cov-mark",
|
"cov-mark",
|
||||||
"either",
|
"either",
|
||||||
|
"expect-test",
|
||||||
"hashbrown 0.12.0",
|
"hashbrown 0.12.0",
|
||||||
"itertools",
|
"itertools",
|
||||||
"la-arena",
|
"la-arena",
|
||||||
|
|
|
@ -43,6 +43,17 @@ pub trait WithFixture: Default + SourceDatabaseExt + 'static {
|
||||||
db
|
db
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn with_files_extra_proc_macros(
|
||||||
|
ra_fixture: &str,
|
||||||
|
proc_macros: Vec<(String, ProcMacro)>,
|
||||||
|
) -> Self {
|
||||||
|
let fixture = ChangeFixture::parse_with_proc_macros(ra_fixture, proc_macros);
|
||||||
|
let mut db = Self::default();
|
||||||
|
fixture.change.apply(&mut db);
|
||||||
|
assert!(fixture.file_position.is_none());
|
||||||
|
db
|
||||||
|
}
|
||||||
|
|
||||||
fn with_position(ra_fixture: &str) -> (Self, FilePosition) {
|
fn with_position(ra_fixture: &str) -> (Self, FilePosition) {
|
||||||
let (db, file_id, range_or_offset) = Self::with_range_or_offset(ra_fixture);
|
let (db, file_id, range_or_offset) = Self::with_range_or_offset(ra_fixture);
|
||||||
let offset = range_or_offset.expect_offset();
|
let offset = range_or_offset.expect_offset();
|
||||||
|
@ -84,7 +95,14 @@ pub struct ChangeFixture {
|
||||||
|
|
||||||
impl ChangeFixture {
|
impl ChangeFixture {
|
||||||
pub fn parse(ra_fixture: &str) -> ChangeFixture {
|
pub fn parse(ra_fixture: &str) -> ChangeFixture {
|
||||||
let (mini_core, proc_macros, fixture) = Fixture::parse(ra_fixture);
|
Self::parse_with_proc_macros(ra_fixture, Vec::new())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_with_proc_macros(
|
||||||
|
ra_fixture: &str,
|
||||||
|
mut proc_macros: Vec<(String, ProcMacro)>,
|
||||||
|
) -> ChangeFixture {
|
||||||
|
let (mini_core, proc_macro_names, fixture) = Fixture::parse(ra_fixture);
|
||||||
let mut change = Change::new();
|
let mut change = Change::new();
|
||||||
|
|
||||||
let mut files = Vec::new();
|
let mut files = Vec::new();
|
||||||
|
@ -222,11 +240,12 @@ impl ChangeFixture {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !proc_macros.is_empty() {
|
if !proc_macro_names.is_empty() {
|
||||||
let proc_lib_file = file_id;
|
let proc_lib_file = file_id;
|
||||||
file_id.0 += 1;
|
file_id.0 += 1;
|
||||||
|
|
||||||
let (proc_macro, source) = test_proc_macros(&proc_macros);
|
proc_macros.extend(default_test_proc_macros());
|
||||||
|
let (proc_macro, source) = filter_test_proc_macros(&proc_macro_names, proc_macros);
|
||||||
let mut fs = FileSet::default();
|
let mut fs = FileSet::default();
|
||||||
fs.insert(
|
fs.insert(
|
||||||
proc_lib_file,
|
proc_lib_file,
|
||||||
|
@ -272,52 +291,84 @@ impl ChangeFixture {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_proc_macros(proc_macros: &[String]) -> (Vec<ProcMacro>, String) {
|
fn default_test_proc_macros() -> [(String, ProcMacro); 4] {
|
||||||
// The source here is only required so that paths to the macros exist and are resolvable.
|
[
|
||||||
let source = r#"
|
(
|
||||||
|
r#"
|
||||||
#[proc_macro_attribute]
|
#[proc_macro_attribute]
|
||||||
pub fn identity(_attr: TokenStream, item: TokenStream) -> TokenStream {
|
pub fn identity(_attr: TokenStream, item: TokenStream) -> TokenStream {
|
||||||
item
|
item
|
||||||
}
|
}
|
||||||
#[proc_macro_derive(DeriveIdentity)]
|
"#
|
||||||
pub fn derive_identity(item: TokenStream) -> TokenStream {
|
.into(),
|
||||||
item
|
|
||||||
}
|
|
||||||
#[proc_macro_attribute]
|
|
||||||
pub fn input_replace(attr: TokenStream, _item: TokenStream) -> TokenStream {
|
|
||||||
attr
|
|
||||||
}
|
|
||||||
#[proc_macro]
|
|
||||||
pub fn mirror(input: TokenStream) -> TokenStream {
|
|
||||||
input
|
|
||||||
}
|
|
||||||
"#;
|
|
||||||
let proc_macros = [
|
|
||||||
ProcMacro {
|
ProcMacro {
|
||||||
name: "identity".into(),
|
name: "identity".into(),
|
||||||
kind: crate::ProcMacroKind::Attr,
|
kind: crate::ProcMacroKind::Attr,
|
||||||
expander: Arc::new(IdentityProcMacroExpander),
|
expander: Arc::new(IdentityProcMacroExpander),
|
||||||
},
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
r#"
|
||||||
|
#[proc_macro_derive(DeriveIdentity)]
|
||||||
|
pub fn derive_identity(item: TokenStream) -> TokenStream {
|
||||||
|
item
|
||||||
|
}
|
||||||
|
"#
|
||||||
|
.into(),
|
||||||
ProcMacro {
|
ProcMacro {
|
||||||
name: "DeriveIdentity".into(),
|
name: "DeriveIdentity".into(),
|
||||||
kind: crate::ProcMacroKind::CustomDerive,
|
kind: crate::ProcMacroKind::CustomDerive,
|
||||||
expander: Arc::new(IdentityProcMacroExpander),
|
expander: Arc::new(IdentityProcMacroExpander),
|
||||||
},
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
r#"
|
||||||
|
#[proc_macro_attribute]
|
||||||
|
pub fn input_replace(attr: TokenStream, _item: TokenStream) -> TokenStream {
|
||||||
|
attr
|
||||||
|
}
|
||||||
|
"#
|
||||||
|
.into(),
|
||||||
ProcMacro {
|
ProcMacro {
|
||||||
name: "input_replace".into(),
|
name: "input_replace".into(),
|
||||||
kind: crate::ProcMacroKind::Attr,
|
kind: crate::ProcMacroKind::Attr,
|
||||||
expander: Arc::new(AttributeInputReplaceProcMacroExpander),
|
expander: Arc::new(AttributeInputReplaceProcMacroExpander),
|
||||||
},
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
r#"
|
||||||
|
#[proc_macro]
|
||||||
|
pub fn mirror(input: TokenStream) -> TokenStream {
|
||||||
|
input
|
||||||
|
}
|
||||||
|
"#
|
||||||
|
.into(),
|
||||||
ProcMacro {
|
ProcMacro {
|
||||||
name: "mirror".into(),
|
name: "mirror".into(),
|
||||||
kind: crate::ProcMacroKind::FuncLike,
|
kind: crate::ProcMacroKind::FuncLike,
|
||||||
expander: Arc::new(MirrorProcMacroExpander),
|
expander: Arc::new(MirrorProcMacroExpander),
|
||||||
},
|
},
|
||||||
|
),
|
||||||
]
|
]
|
||||||
.into_iter()
|
}
|
||||||
.filter(|pm| proc_macros.iter().any(|name| name == &stdx::to_lower_snake_case(&pm.name)))
|
|
||||||
.collect();
|
fn filter_test_proc_macros(
|
||||||
(proc_macros, source.into())
|
proc_macro_names: &[String],
|
||||||
|
proc_macro_defs: Vec<(String, ProcMacro)>,
|
||||||
|
) -> (Vec<ProcMacro>, String) {
|
||||||
|
// The source here is only required so that paths to the macros exist and are resolvable.
|
||||||
|
let mut source = String::new();
|
||||||
|
let mut proc_macros = Vec::new();
|
||||||
|
|
||||||
|
for (c, p) in proc_macro_defs {
|
||||||
|
if !proc_macro_names.iter().any(|name| name == &stdx::to_lower_snake_case(&p.name)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
proc_macros.push(p);
|
||||||
|
source += &c;
|
||||||
|
}
|
||||||
|
|
||||||
|
(proc_macros, source)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy)]
|
#[derive(Debug, Clone, Copy)]
|
||||||
|
|
|
@ -14,10 +14,10 @@ mod builtin_fn_macro;
|
||||||
mod builtin_derive_macro;
|
mod builtin_derive_macro;
|
||||||
mod proc_macros;
|
mod proc_macros;
|
||||||
|
|
||||||
use std::{iter, ops::Range};
|
use std::{iter, ops::Range, sync::Arc};
|
||||||
|
|
||||||
use ::mbe::TokenMap;
|
use ::mbe::TokenMap;
|
||||||
use base_db::{fixture::WithFixture, SourceDatabase};
|
use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase};
|
||||||
use expect_test::Expect;
|
use expect_test::Expect;
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
db::{AstDatabase, TokenExpander},
|
db::{AstDatabase, TokenExpander},
|
||||||
|
@ -39,7 +39,21 @@ use crate::{
|
||||||
|
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
fn check(ra_fixture: &str, mut expect: Expect) {
|
fn check(ra_fixture: &str, mut expect: Expect) {
|
||||||
let db = TestDB::with_files(ra_fixture);
|
let extra_proc_macros = vec![(
|
||||||
|
r#"
|
||||||
|
#[proc_macro_attribute]
|
||||||
|
pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream {
|
||||||
|
item
|
||||||
|
}
|
||||||
|
"#
|
||||||
|
.into(),
|
||||||
|
ProcMacro {
|
||||||
|
name: "identity_when_valid".into(),
|
||||||
|
kind: base_db::ProcMacroKind::Attr,
|
||||||
|
expander: Arc::new(IdentityWhenValidProcMacroExpander),
|
||||||
|
},
|
||||||
|
)];
|
||||||
|
let db = TestDB::with_files_extra_proc_macros(ra_fixture, extra_proc_macros);
|
||||||
let krate = db.crate_graph().iter().next().unwrap();
|
let krate = db.crate_graph().iter().next().unwrap();
|
||||||
let def_map = db.crate_def_map(krate);
|
let def_map = db.crate_def_map(krate);
|
||||||
let local_id = def_map.root();
|
let local_id = def_map.root();
|
||||||
|
@ -172,7 +186,7 @@ fn check(ra_fixture: &str, mut expect: Expect) {
|
||||||
let range: Range<usize> = range.into();
|
let range: Range<usize> = range.into();
|
||||||
|
|
||||||
if show_token_ids {
|
if show_token_ids {
|
||||||
if let Some((tree, map)) = arg.as_deref() {
|
if let Some((tree, map, _)) = arg.as_deref() {
|
||||||
let tt_range = call.token_tree().unwrap().syntax().text_range();
|
let tt_range = call.token_tree().unwrap().syntax().text_range();
|
||||||
let mut ranges = Vec::new();
|
let mut ranges = Vec::new();
|
||||||
extract_id_ranges(&mut ranges, &map, &tree);
|
extract_id_ranges(&mut ranges, &map, &tree);
|
||||||
|
@ -201,10 +215,19 @@ fn check(ra_fixture: &str, mut expect: Expect) {
|
||||||
}
|
}
|
||||||
|
|
||||||
for decl_id in def_map[local_id].scope.declarations() {
|
for decl_id in def_map[local_id].scope.declarations() {
|
||||||
if let ModuleDefId::AdtId(AdtId::StructId(struct_id)) = decl_id {
|
// FIXME: I'm sure there's already better way to do this
|
||||||
let src = struct_id.lookup(&db).source(&db);
|
let src = match decl_id {
|
||||||
|
ModuleDefId::AdtId(AdtId::StructId(struct_id)) => {
|
||||||
|
Some(struct_id.lookup(&db).source(&db).syntax().cloned())
|
||||||
|
}
|
||||||
|
ModuleDefId::FunctionId(function_id) => {
|
||||||
|
Some(function_id.lookup(&db).source(&db).syntax().cloned())
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
if let Some(src) = src {
|
||||||
if src.file_id.is_attr_macro(&db) || src.file_id.is_custom_derive(&db) {
|
if src.file_id.is_attr_macro(&db) || src.file_id.is_custom_derive(&db) {
|
||||||
let pp = pretty_print_macro_expansion(src.value.syntax().clone(), None);
|
let pp = pretty_print_macro_expansion(src.value, None);
|
||||||
format_to!(expanded_text, "\n{}", pp)
|
format_to!(expanded_text, "\n{}", pp)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -304,3 +327,25 @@ fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> Str
|
||||||
}
|
}
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Identity mapping, but only works when the input is syntactically valid. This
|
||||||
|
// simulates common proc macros that unnecessarily parse their input and return
|
||||||
|
// compile errors.
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct IdentityWhenValidProcMacroExpander;
|
||||||
|
impl base_db::ProcMacroExpander for IdentityWhenValidProcMacroExpander {
|
||||||
|
fn expand(
|
||||||
|
&self,
|
||||||
|
subtree: &Subtree,
|
||||||
|
_: Option<&Subtree>,
|
||||||
|
_: &base_db::Env,
|
||||||
|
) -> Result<Subtree, base_db::ProcMacroExpansionError> {
|
||||||
|
let (parse, _) =
|
||||||
|
::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems);
|
||||||
|
if parse.errors().is_empty() {
|
||||||
|
Ok(subtree.clone())
|
||||||
|
} else {
|
||||||
|
panic!("got invalid macro input: {:?}", parse.errors());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -52,3 +52,43 @@ struct S;
|
||||||
#[attr2] struct S;"##]],
|
#[attr2] struct S;"##]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn attribute_macro_syntax_completion_1() {
|
||||||
|
// this is just the case where the input is actually valid
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
//- proc_macros: identity_when_valid
|
||||||
|
#[proc_macros::identity_when_valid]
|
||||||
|
fn foo() { bar.baz(); blub }
|
||||||
|
"#,
|
||||||
|
expect![[r##"
|
||||||
|
#[proc_macros::identity_when_valid]
|
||||||
|
fn foo() { bar.baz(); blub }
|
||||||
|
|
||||||
|
fn foo() {
|
||||||
|
bar.baz();
|
||||||
|
blub
|
||||||
|
}"##]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn attribute_macro_syntax_completion_2() {
|
||||||
|
// common case of dot completion while typing
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
//- proc_macros: identity_when_valid
|
||||||
|
#[proc_macros::identity_when_valid]
|
||||||
|
fn foo() { bar.; blub }
|
||||||
|
"#,
|
||||||
|
expect![[r##"
|
||||||
|
#[proc_macros::identity_when_valid]
|
||||||
|
fn foo() { bar.; blub }
|
||||||
|
|
||||||
|
fn foo() {
|
||||||
|
bar. ;
|
||||||
|
blub
|
||||||
|
}"##]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
@ -27,3 +27,6 @@ profile = { path = "../profile", version = "0.0.0" }
|
||||||
tt = { path = "../tt", version = "0.0.0" }
|
tt = { path = "../tt", version = "0.0.0" }
|
||||||
mbe = { path = "../mbe", version = "0.0.0" }
|
mbe = { path = "../mbe", version = "0.0.0" }
|
||||||
limit = { path = "../limit", version = "0.0.0" }
|
limit = { path = "../limit", version = "0.0.0" }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
expect-test = "1.2.0-pre.1"
|
||||||
|
|
|
@ -14,7 +14,7 @@ use syntax::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ast_id_map::AstIdMap, hygiene::HygieneFrame, BuiltinAttrExpander, BuiltinDeriveExpander,
|
ast_id_map::AstIdMap, fixup, hygiene::HygieneFrame, BuiltinAttrExpander, BuiltinDeriveExpander,
|
||||||
BuiltinFnLikeExpander, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind,
|
BuiltinFnLikeExpander, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind,
|
||||||
MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander,
|
MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander,
|
||||||
};
|
};
|
||||||
|
@ -108,7 +108,10 @@ pub trait AstDatabase: SourceDatabase {
|
||||||
|
|
||||||
/// Lowers syntactic macro call to a token tree representation.
|
/// Lowers syntactic macro call to a token tree representation.
|
||||||
#[salsa::transparent]
|
#[salsa::transparent]
|
||||||
fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>;
|
fn macro_arg(
|
||||||
|
&self,
|
||||||
|
id: MacroCallId,
|
||||||
|
) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>;
|
||||||
/// Extracts syntax node, corresponding to a macro call. That's a firewall
|
/// Extracts syntax node, corresponding to a macro call. That's a firewall
|
||||||
/// query, only typing in the macro call itself changes the returned
|
/// query, only typing in the macro call itself changes the returned
|
||||||
/// subtree.
|
/// subtree.
|
||||||
|
@ -146,8 +149,15 @@ pub fn expand_speculative(
|
||||||
|
|
||||||
// Build the subtree and token mapping for the speculative args
|
// Build the subtree and token mapping for the speculative args
|
||||||
let censor = censor_for_macro_input(&loc, &speculative_args);
|
let censor = censor_for_macro_input(&loc, &speculative_args);
|
||||||
let (mut tt, spec_args_tmap) =
|
let mut fixups = fixup::fixup_syntax(&speculative_args);
|
||||||
mbe::syntax_node_to_token_tree_censored(&speculative_args, &censor);
|
fixups.replace.extend(censor.into_iter().map(|node| (node, Vec::new())));
|
||||||
|
let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
|
||||||
|
&speculative_args,
|
||||||
|
fixups.token_map,
|
||||||
|
fixups.next_id,
|
||||||
|
fixups.replace,
|
||||||
|
fixups.append,
|
||||||
|
);
|
||||||
|
|
||||||
let (attr_arg, token_id) = match loc.kind {
|
let (attr_arg, token_id) = match loc.kind {
|
||||||
MacroCallKind::Attr { invoc_attr_index, .. } => {
|
MacroCallKind::Attr { invoc_attr_index, .. } => {
|
||||||
|
@ -194,7 +204,7 @@ pub fn expand_speculative(
|
||||||
|
|
||||||
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
|
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
|
||||||
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
|
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
|
||||||
let speculative_expansion = if let MacroDefKind::ProcMacro(expander, ..) = loc.def.kind {
|
let mut speculative_expansion = if let MacroDefKind::ProcMacro(expander, ..) = loc.def.kind {
|
||||||
tt.delimiter = None;
|
tt.delimiter = None;
|
||||||
expander.expand(db, loc.krate, &tt, attr_arg.as_ref())
|
expander.expand(db, loc.krate, &tt, attr_arg.as_ref())
|
||||||
} else {
|
} else {
|
||||||
|
@ -202,6 +212,7 @@ pub fn expand_speculative(
|
||||||
};
|
};
|
||||||
|
|
||||||
let expand_to = macro_expand_to(db, actual_macro_call);
|
let expand_to = macro_expand_to(db, actual_macro_call);
|
||||||
|
fixup::reverse_fixups(&mut speculative_expansion.value, &spec_args_tmap, &fixups.undo_info);
|
||||||
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
|
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
|
||||||
|
|
||||||
let range = rev_tmap.first_range_by_token(token_id, token_to_map.kind())?;
|
let range = rev_tmap.first_range_by_token(token_id, token_to_map.kind())?;
|
||||||
|
@ -289,20 +300,31 @@ fn parse_macro_expansion(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> {
|
fn macro_arg(
|
||||||
|
db: &dyn AstDatabase,
|
||||||
|
id: MacroCallId,
|
||||||
|
) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>> {
|
||||||
let arg = db.macro_arg_text(id)?;
|
let arg = db.macro_arg_text(id)?;
|
||||||
let loc = db.lookup_intern_macro_call(id);
|
let loc = db.lookup_intern_macro_call(id);
|
||||||
|
|
||||||
let node = SyntaxNode::new_root(arg);
|
let node = SyntaxNode::new_root(arg);
|
||||||
let censor = censor_for_macro_input(&loc, &node);
|
let censor = censor_for_macro_input(&loc, &node);
|
||||||
let (mut tt, tmap) = mbe::syntax_node_to_token_tree_censored(&node, &censor);
|
let mut fixups = fixup::fixup_syntax(&node);
|
||||||
|
fixups.replace.extend(censor.into_iter().map(|node| (node, Vec::new())));
|
||||||
|
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
|
||||||
|
&node,
|
||||||
|
fixups.token_map,
|
||||||
|
fixups.next_id,
|
||||||
|
fixups.replace,
|
||||||
|
fixups.append,
|
||||||
|
);
|
||||||
|
|
||||||
if loc.def.is_proc_macro() {
|
if loc.def.is_proc_macro() {
|
||||||
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
||||||
tt.delimiter = None;
|
tt.delimiter = None;
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(Arc::new((tt, tmap)))
|
Some(Arc::new((tt, tmap, fixups.undo_info)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
|
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
|
||||||
|
@ -419,10 +441,9 @@ fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Ar
|
||||||
// be reported at the definition site (when we construct a def map).
|
// be reported at the definition site (when we construct a def map).
|
||||||
Err(err) => return ExpandResult::str_err(format!("invalid macro definition: {}", err)),
|
Err(err) => return ExpandResult::str_err(format!("invalid macro definition: {}", err)),
|
||||||
};
|
};
|
||||||
let ExpandResult { value: tt, err } = expander.expand(db, id, ¯o_arg.0);
|
let ExpandResult { value: mut tt, err } = expander.expand(db, id, ¯o_arg.0);
|
||||||
// Set a hard limit for the expanded tt
|
// Set a hard limit for the expanded tt
|
||||||
let count = tt.count();
|
let count = tt.count();
|
||||||
// XXX: Make ExpandResult a real error and use .map_err instead?
|
|
||||||
if TOKEN_LIMIT.check(count).is_err() {
|
if TOKEN_LIMIT.check(count).is_err() {
|
||||||
return ExpandResult::str_err(format!(
|
return ExpandResult::str_err(format!(
|
||||||
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
|
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
|
||||||
|
@ -431,6 +452,8 @@ fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Ar
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fixup::reverse_fixups(&mut tt, ¯o_arg.1, ¯o_arg.2);
|
||||||
|
|
||||||
ExpandResult { value: Some(Arc::new(tt)), err }
|
ExpandResult { value: Some(Arc::new(tt)), err }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
261
crates/hir_expand/src/fixup.rs
Normal file
261
crates/hir_expand/src/fixup.rs
Normal file
|
@ -0,0 +1,261 @@
|
||||||
|
//! To make attribute macros work reliably when typing, we need to take care to
|
||||||
|
//! fix up syntax errors in the code we're passing to them.
|
||||||
|
use std::mem;
|
||||||
|
|
||||||
|
use mbe::{SyntheticToken, SyntheticTokenId, TokenMap};
|
||||||
|
use rustc_hash::FxHashMap;
|
||||||
|
use syntax::{
|
||||||
|
ast::{self, AstNode},
|
||||||
|
match_ast, SyntaxKind, SyntaxNode, TextRange,
|
||||||
|
};
|
||||||
|
use tt::Subtree;
|
||||||
|
|
||||||
|
/// The result of calculating fixes for a syntax node -- a bunch of changes
|
||||||
|
/// (appending to and replacing nodes), the information that is needed to
|
||||||
|
/// reverse those changes afterwards, and a token map.
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub(crate) struct SyntaxFixups {
|
||||||
|
pub(crate) append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
||||||
|
pub(crate) replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
||||||
|
pub(crate) undo_info: SyntaxFixupUndoInfo,
|
||||||
|
pub(crate) token_map: TokenMap,
|
||||||
|
pub(crate) next_id: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This is the information needed to reverse the fixups.
|
||||||
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
|
pub struct SyntaxFixupUndoInfo {
|
||||||
|
original: Vec<Subtree>,
|
||||||
|
}
|
||||||
|
|
||||||
|
const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
|
||||||
|
|
||||||
|
pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
|
let mut append = FxHashMap::default();
|
||||||
|
let mut replace = FxHashMap::default();
|
||||||
|
let mut preorder = node.preorder();
|
||||||
|
let mut original = Vec::new();
|
||||||
|
let mut token_map = TokenMap::default();
|
||||||
|
let mut next_id = 0;
|
||||||
|
while let Some(event) = preorder.next() {
|
||||||
|
let node = match event {
|
||||||
|
syntax::WalkEvent::Enter(node) => node,
|
||||||
|
syntax::WalkEvent::Leave(_) => continue,
|
||||||
|
};
|
||||||
|
|
||||||
|
if can_handle_error(&node) && has_error_to_handle(&node) {
|
||||||
|
// the node contains an error node, we have to completely replace it by something valid
|
||||||
|
let (original_tree, new_tmap, new_next_id) =
|
||||||
|
mbe::syntax_node_to_token_tree_with_modifications(
|
||||||
|
&node,
|
||||||
|
mem::take(&mut token_map),
|
||||||
|
next_id,
|
||||||
|
Default::default(),
|
||||||
|
Default::default(),
|
||||||
|
);
|
||||||
|
token_map = new_tmap;
|
||||||
|
next_id = new_next_id;
|
||||||
|
let idx = original.len() as u32;
|
||||||
|
original.push(original_tree);
|
||||||
|
let replacement = SyntheticToken {
|
||||||
|
kind: SyntaxKind::IDENT,
|
||||||
|
text: "__ra_fixup".into(),
|
||||||
|
range: node.text_range(),
|
||||||
|
id: SyntheticTokenId(idx),
|
||||||
|
};
|
||||||
|
replace.insert(node.clone(), vec![replacement]);
|
||||||
|
preorder.skip_subtree();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// In some other situations, we can fix things by just appending some tokens.
|
||||||
|
let end_range = TextRange::empty(node.text_range().end());
|
||||||
|
match_ast! {
|
||||||
|
match node {
|
||||||
|
ast::FieldExpr(it) => {
|
||||||
|
if it.name_ref().is_none() {
|
||||||
|
// incomplete field access: some_expr.|
|
||||||
|
append.insert(node.clone(), vec![
|
||||||
|
SyntheticToken {
|
||||||
|
kind: SyntaxKind::IDENT,
|
||||||
|
text: "__ra_fixup".into(),
|
||||||
|
range: end_range,
|
||||||
|
id: EMPTY_ID,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
ast::ExprStmt(it) => {
|
||||||
|
if it.semicolon_token().is_none() {
|
||||||
|
append.insert(node.clone(), vec![
|
||||||
|
SyntheticToken {
|
||||||
|
kind: SyntaxKind::SEMICOLON,
|
||||||
|
text: ";".into(),
|
||||||
|
range: end_range,
|
||||||
|
id: EMPTY_ID,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
SyntaxFixups {
|
||||||
|
append,
|
||||||
|
replace,
|
||||||
|
token_map,
|
||||||
|
next_id,
|
||||||
|
undo_info: SyntaxFixupUndoInfo { original },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn has_error(node: &SyntaxNode) -> bool {
|
||||||
|
node.children().any(|c| c.kind() == SyntaxKind::ERROR)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn can_handle_error(node: &SyntaxNode) -> bool {
|
||||||
|
ast::Expr::can_cast(node.kind())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn has_error_to_handle(node: &SyntaxNode) -> bool {
|
||||||
|
has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn reverse_fixups(
|
||||||
|
tt: &mut Subtree,
|
||||||
|
token_map: &TokenMap,
|
||||||
|
undo_info: &SyntaxFixupUndoInfo,
|
||||||
|
) {
|
||||||
|
tt.token_trees.retain(|tt| match tt {
|
||||||
|
tt::TokenTree::Leaf(leaf) => {
|
||||||
|
token_map.synthetic_token_id(leaf.id()).is_none()
|
||||||
|
|| token_map.synthetic_token_id(leaf.id()) != Some(EMPTY_ID)
|
||||||
|
}
|
||||||
|
_ => true,
|
||||||
|
});
|
||||||
|
tt.token_trees.iter_mut().for_each(|tt| match tt {
|
||||||
|
tt::TokenTree::Subtree(tt) => reverse_fixups(tt, token_map, undo_info),
|
||||||
|
tt::TokenTree::Leaf(leaf) => {
|
||||||
|
if let Some(id) = token_map.synthetic_token_id(leaf.id()) {
|
||||||
|
let original = &undo_info.original[id.0 as usize];
|
||||||
|
*tt = tt::TokenTree::Subtree(original.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use expect_test::{expect, Expect};
|
||||||
|
|
||||||
|
use super::reverse_fixups;
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
|
fn check(ra_fixture: &str, mut expect: Expect) {
|
||||||
|
let parsed = syntax::SourceFile::parse(ra_fixture);
|
||||||
|
let fixups = super::fixup_syntax(&parsed.syntax_node());
|
||||||
|
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
|
||||||
|
&parsed.syntax_node(),
|
||||||
|
fixups.token_map,
|
||||||
|
fixups.next_id,
|
||||||
|
fixups.replace,
|
||||||
|
fixups.append,
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut actual = tt.to_string();
|
||||||
|
actual.push_str("\n");
|
||||||
|
|
||||||
|
expect.indent(false);
|
||||||
|
expect.assert_eq(&actual);
|
||||||
|
|
||||||
|
// the fixed-up tree should be syntactically valid
|
||||||
|
let (parse, _) = mbe::token_tree_to_syntax_node(&tt, ::mbe::TopEntryPoint::MacroItems);
|
||||||
|
assert_eq!(
|
||||||
|
parse.errors(),
|
||||||
|
&[],
|
||||||
|
"parse has syntax errors. parse tree:\n{:#?}",
|
||||||
|
parse.syntax_node()
|
||||||
|
);
|
||||||
|
|
||||||
|
reverse_fixups(&mut tt, &tmap, &fixups.undo_info);
|
||||||
|
|
||||||
|
// the fixed-up + reversed version should be equivalent to the original input
|
||||||
|
// (but token IDs don't matter)
|
||||||
|
let (original_as_tt, _) = mbe::syntax_node_to_token_tree(&parsed.syntax_node());
|
||||||
|
assert_eq!(tt.to_string(), original_as_tt.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn incomplete_field_expr_1() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
fn foo() {
|
||||||
|
a.
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
fn foo () {a . __ra_fixup}
|
||||||
|
"#]],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn incomplete_field_expr_2() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
fn foo() {
|
||||||
|
a. ;
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
fn foo () {a . __ra_fixup ;}
|
||||||
|
"#]],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn incomplete_field_expr_3() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
fn foo() {
|
||||||
|
a. ;
|
||||||
|
bar();
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
fn foo () {a . __ra_fixup ; bar () ;}
|
||||||
|
"#]],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn field_expr_before_call() {
|
||||||
|
// another case that easily happens while typing
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
fn foo() {
|
||||||
|
a.b
|
||||||
|
bar();
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
fn foo () {a . b ; bar () ;}
|
||||||
|
"#]],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn extraneous_comma() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
fn foo() {
|
||||||
|
bar(,);
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
fn foo () {__ra_fixup ;}
|
||||||
|
"#]],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
|
@ -15,6 +15,7 @@ use syntax::{
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::{self, AstDatabase},
|
db::{self, AstDatabase},
|
||||||
|
fixup,
|
||||||
name::{AsName, Name},
|
name::{AsName, Name},
|
||||||
HirFileId, HirFileIdRepr, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile,
|
HirFileId, HirFileIdRepr, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile,
|
||||||
};
|
};
|
||||||
|
@ -127,7 +128,7 @@ struct HygieneInfo {
|
||||||
attr_input_or_mac_def_start: Option<InFile<TextSize>>,
|
attr_input_or_mac_def_start: Option<InFile<TextSize>>,
|
||||||
|
|
||||||
macro_def: Arc<TokenExpander>,
|
macro_def: Arc<TokenExpander>,
|
||||||
macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>,
|
macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
|
||||||
macro_arg_shift: mbe::Shift,
|
macro_arg_shift: mbe::Shift,
|
||||||
exp_map: Arc<mbe::TokenMap>,
|
exp_map: Arc<mbe::TokenMap>,
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,6 +15,7 @@ pub mod proc_macro;
|
||||||
pub mod quote;
|
pub mod quote;
|
||||||
pub mod eager;
|
pub mod eager;
|
||||||
pub mod mod_path;
|
pub mod mod_path;
|
||||||
|
mod fixup;
|
||||||
|
|
||||||
pub use mbe::{ExpandError, ExpandResult, Origin};
|
pub use mbe::{ExpandError, ExpandResult, Origin};
|
||||||
|
|
||||||
|
@ -426,7 +427,7 @@ pub struct ExpansionInfo {
|
||||||
attr_input_or_mac_def: Option<InFile<ast::TokenTree>>,
|
attr_input_or_mac_def: Option<InFile<ast::TokenTree>>,
|
||||||
|
|
||||||
macro_def: Arc<TokenExpander>,
|
macro_def: Arc<TokenExpander>,
|
||||||
macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>,
|
macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
|
||||||
/// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg
|
/// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg
|
||||||
/// and as such we need to shift tokens if they are part of an attributes input instead of their item.
|
/// and as such we need to shift tokens if they are part of an attributes input instead of their item.
|
||||||
macro_arg_shift: mbe::Shift,
|
macro_arg_shift: mbe::Shift,
|
||||||
|
|
|
@ -62,7 +62,6 @@ fn proc_macros_qualified() {
|
||||||
struct Foo;
|
struct Foo;
|
||||||
"#,
|
"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
at input_replace pub macro input_replace
|
|
||||||
at identity pub macro identity
|
at identity pub macro identity
|
||||||
"#]],
|
"#]],
|
||||||
)
|
)
|
||||||
|
|
|
@ -30,7 +30,8 @@ pub use tt::{Delimiter, DelimiterKind, Punct};
|
||||||
pub use crate::{
|
pub use crate::{
|
||||||
syntax_bridge::{
|
syntax_bridge::{
|
||||||
parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree,
|
parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree,
|
||||||
syntax_node_to_token_tree_censored, token_tree_to_syntax_node,
|
syntax_node_to_token_tree_with_modifications, token_tree_to_syntax_node, SyntheticToken,
|
||||||
|
SyntheticTokenId,
|
||||||
},
|
},
|
||||||
token_map::TokenMap,
|
token_map::TokenMap,
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
|
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
|
||||||
|
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::FxHashMap;
|
||||||
use stdx::non_empty_vec::NonEmptyVec;
|
use stdx::{always, non_empty_vec::NonEmptyVec};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, make::tokens::doc_comment},
|
ast::{self, make::tokens::doc_comment},
|
||||||
AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind,
|
AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind,
|
||||||
|
@ -15,20 +15,43 @@ use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap};
|
||||||
/// Convert the syntax node to a `TokenTree` (what macro
|
/// Convert the syntax node to a `TokenTree` (what macro
|
||||||
/// will consume).
|
/// will consume).
|
||||||
pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
|
pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
|
||||||
syntax_node_to_token_tree_censored(node, &Default::default())
|
let (subtree, token_map, _) = syntax_node_to_token_tree_with_modifications(
|
||||||
|
node,
|
||||||
|
Default::default(),
|
||||||
|
0,
|
||||||
|
Default::default(),
|
||||||
|
Default::default(),
|
||||||
|
);
|
||||||
|
(subtree, token_map)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert the syntax node to a `TokenTree` (what macro will consume)
|
/// Convert the syntax node to a `TokenTree` (what macro will consume)
|
||||||
/// with the censored range excluded.
|
/// with the censored range excluded.
|
||||||
pub fn syntax_node_to_token_tree_censored(
|
pub fn syntax_node_to_token_tree_with_modifications(
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
censor: &FxHashSet<SyntaxNode>,
|
existing_token_map: TokenMap,
|
||||||
) -> (tt::Subtree, TokenMap) {
|
next_id: u32,
|
||||||
|
replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
||||||
|
append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
||||||
|
) -> (tt::Subtree, TokenMap, u32) {
|
||||||
let global_offset = node.text_range().start();
|
let global_offset = node.text_range().start();
|
||||||
let mut c = Convertor::new(node, global_offset, censor);
|
let mut c = Convertor::new(node, global_offset, existing_token_map, next_id, replace, append);
|
||||||
let subtree = convert_tokens(&mut c);
|
let subtree = convert_tokens(&mut c);
|
||||||
c.id_alloc.map.shrink_to_fit();
|
c.id_alloc.map.shrink_to_fit();
|
||||||
(subtree, c.id_alloc.map)
|
always!(c.replace.is_empty(), "replace: {:?}", c.replace);
|
||||||
|
always!(c.append.is_empty(), "append: {:?}", c.append);
|
||||||
|
(subtree, c.id_alloc.map, c.id_alloc.next_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||||
|
pub struct SyntheticTokenId(pub u32);
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct SyntheticToken {
|
||||||
|
pub kind: SyntaxKind,
|
||||||
|
pub text: SmolStr,
|
||||||
|
pub range: TextRange,
|
||||||
|
pub id: SyntheticTokenId,
|
||||||
}
|
}
|
||||||
|
|
||||||
// The following items are what `rustc` macro can be parsed into :
|
// The following items are what `rustc` macro can be parsed into :
|
||||||
|
@ -147,13 +170,14 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
|
||||||
Some(it) => it,
|
Some(it) => it,
|
||||||
None => break,
|
None => break,
|
||||||
};
|
};
|
||||||
|
let synth_id = token.synthetic_id(&conv);
|
||||||
|
|
||||||
let kind = token.kind(&conv);
|
let kind = token.kind(&conv);
|
||||||
if kind == COMMENT {
|
if kind == COMMENT {
|
||||||
if let Some(tokens) = conv.convert_doc_comment(&token) {
|
if let Some(tokens) = conv.convert_doc_comment(&token) {
|
||||||
// FIXME: There has to be a better way to do this
|
// FIXME: There has to be a better way to do this
|
||||||
// Add the comments token id to the converted doc string
|
// Add the comments token id to the converted doc string
|
||||||
let id = conv.id_alloc().alloc(range);
|
let id = conv.id_alloc().alloc(range, synth_id);
|
||||||
result.extend(tokens.into_iter().map(|mut tt| {
|
result.extend(tokens.into_iter().map(|mut tt| {
|
||||||
if let tt::TokenTree::Subtree(sub) = &mut tt {
|
if let tt::TokenTree::Subtree(sub) = &mut tt {
|
||||||
if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) =
|
if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) =
|
||||||
|
@ -168,7 +192,9 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
let tt = if kind.is_punct() && kind != UNDERSCORE {
|
let tt = if kind.is_punct() && kind != UNDERSCORE {
|
||||||
|
if synth_id.is_none() {
|
||||||
assert_eq!(range.len(), TextSize::of('.'));
|
assert_eq!(range.len(), TextSize::of('.'));
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(delim) = subtree.delimiter {
|
if let Some(delim) = subtree.delimiter {
|
||||||
let expected = match delim.kind {
|
let expected = match delim.kind {
|
||||||
|
@ -220,11 +246,13 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
|
||||||
panic!("Token from lexer must be single char: token = {:#?}", token);
|
panic!("Token from lexer must be single char: token = {:#?}", token);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range) }).into()
|
tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range, synth_id) })
|
||||||
|
.into()
|
||||||
} else {
|
} else {
|
||||||
macro_rules! make_leaf {
|
macro_rules! make_leaf {
|
||||||
($i:ident) => {
|
($i:ident) => {
|
||||||
tt::$i { id: conv.id_alloc().alloc(range), text: token.to_text(conv) }.into()
|
tt::$i { id: conv.id_alloc().alloc(range, synth_id), text: token.to_text(conv) }
|
||||||
|
.into()
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
let leaf: tt::Leaf = match kind {
|
let leaf: tt::Leaf = match kind {
|
||||||
|
@ -239,14 +267,14 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
|
||||||
let apostrophe = tt::Leaf::from(tt::Punct {
|
let apostrophe = tt::Leaf::from(tt::Punct {
|
||||||
char: '\'',
|
char: '\'',
|
||||||
spacing: tt::Spacing::Joint,
|
spacing: tt::Spacing::Joint,
|
||||||
id: conv.id_alloc().alloc(r),
|
id: conv.id_alloc().alloc(r, synth_id),
|
||||||
});
|
});
|
||||||
result.push(apostrophe.into());
|
result.push(apostrophe.into());
|
||||||
|
|
||||||
let r = TextRange::at(range.start() + char_unit, range.len() - char_unit);
|
let r = TextRange::at(range.start() + char_unit, range.len() - char_unit);
|
||||||
let ident = tt::Leaf::from(tt::Ident {
|
let ident = tt::Leaf::from(tt::Ident {
|
||||||
text: SmolStr::new(&token.to_text(conv)[1..]),
|
text: SmolStr::new(&token.to_text(conv)[1..]),
|
||||||
id: conv.id_alloc().alloc(r),
|
id: conv.id_alloc().alloc(r, synth_id),
|
||||||
});
|
});
|
||||||
result.push(ident.into());
|
result.push(ident.into());
|
||||||
continue;
|
continue;
|
||||||
|
@ -267,7 +295,7 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
|
||||||
|
|
||||||
conv.id_alloc().close_delim(entry.idx, None);
|
conv.id_alloc().close_delim(entry.idx, None);
|
||||||
let leaf: tt::Leaf = tt::Punct {
|
let leaf: tt::Leaf = tt::Punct {
|
||||||
id: conv.id_alloc().alloc(entry.open_range),
|
id: conv.id_alloc().alloc(entry.open_range, None),
|
||||||
char: match entry.subtree.delimiter.unwrap().kind {
|
char: match entry.subtree.delimiter.unwrap().kind {
|
||||||
tt::DelimiterKind::Parenthesis => '(',
|
tt::DelimiterKind::Parenthesis => '(',
|
||||||
tt::DelimiterKind::Brace => '{',
|
tt::DelimiterKind::Brace => '{',
|
||||||
|
@ -361,11 +389,18 @@ struct TokenIdAlloc {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenIdAlloc {
|
impl TokenIdAlloc {
|
||||||
fn alloc(&mut self, absolute_range: TextRange) -> tt::TokenId {
|
fn alloc(
|
||||||
|
&mut self,
|
||||||
|
absolute_range: TextRange,
|
||||||
|
synthetic_id: Option<SyntheticTokenId>,
|
||||||
|
) -> tt::TokenId {
|
||||||
let relative_range = absolute_range - self.global_offset;
|
let relative_range = absolute_range - self.global_offset;
|
||||||
let token_id = tt::TokenId(self.next_id);
|
let token_id = tt::TokenId(self.next_id);
|
||||||
self.next_id += 1;
|
self.next_id += 1;
|
||||||
self.map.insert(token_id, relative_range);
|
self.map.insert(token_id, relative_range);
|
||||||
|
if let Some(id) = synthetic_id {
|
||||||
|
self.map.insert_synthetic(token_id, id);
|
||||||
|
}
|
||||||
token_id
|
token_id
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -405,6 +440,8 @@ trait SrcToken<Ctx>: std::fmt::Debug {
|
||||||
fn to_char(&self, ctx: &Ctx) -> Option<char>;
|
fn to_char(&self, ctx: &Ctx) -> Option<char>;
|
||||||
|
|
||||||
fn to_text(&self, ctx: &Ctx) -> SmolStr;
|
fn to_text(&self, ctx: &Ctx) -> SmolStr;
|
||||||
|
|
||||||
|
fn synthetic_id(&self, ctx: &Ctx) -> Option<SyntheticTokenId>;
|
||||||
}
|
}
|
||||||
|
|
||||||
trait TokenConvertor: Sized {
|
trait TokenConvertor: Sized {
|
||||||
|
@ -431,6 +468,10 @@ impl<'a> SrcToken<RawConvertor<'a>> for usize {
|
||||||
fn to_text(&self, ctx: &RawConvertor<'_>) -> SmolStr {
|
fn to_text(&self, ctx: &RawConvertor<'_>) -> SmolStr {
|
||||||
ctx.lexed.text(*self).into()
|
ctx.lexed.text(*self).into()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn synthetic_id(&self, _ctx: &RawConvertor<'a>) -> Option<SyntheticTokenId> {
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> TokenConvertor for RawConvertor<'a> {
|
impl<'a> TokenConvertor for RawConvertor<'a> {
|
||||||
|
@ -465,86 +506,130 @@ impl<'a> TokenConvertor for RawConvertor<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Convertor<'c> {
|
struct Convertor {
|
||||||
id_alloc: TokenIdAlloc,
|
id_alloc: TokenIdAlloc,
|
||||||
current: Option<SyntaxToken>,
|
current: Option<SyntaxToken>,
|
||||||
|
current_synthetic: Vec<SyntheticToken>,
|
||||||
preorder: PreorderWithTokens,
|
preorder: PreorderWithTokens,
|
||||||
censor: &'c FxHashSet<SyntaxNode>,
|
replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
||||||
|
append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
||||||
range: TextRange,
|
range: TextRange,
|
||||||
punct_offset: Option<(SyntaxToken, TextSize)>,
|
punct_offset: Option<(SyntaxToken, TextSize)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'c> Convertor<'c> {
|
impl Convertor {
|
||||||
fn new(
|
fn new(
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
global_offset: TextSize,
|
global_offset: TextSize,
|
||||||
censor: &'c FxHashSet<SyntaxNode>,
|
existing_token_map: TokenMap,
|
||||||
) -> Convertor<'c> {
|
next_id: u32,
|
||||||
|
mut replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
||||||
|
mut append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
||||||
|
) -> Convertor {
|
||||||
let range = node.text_range();
|
let range = node.text_range();
|
||||||
let mut preorder = node.preorder_with_tokens();
|
let mut preorder = node.preorder_with_tokens();
|
||||||
let first = Self::next_token(&mut preorder, censor);
|
let (first, synthetic) = Self::next_token(&mut preorder, &mut replace, &mut append);
|
||||||
Convertor {
|
Convertor {
|
||||||
id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } },
|
id_alloc: { TokenIdAlloc { map: existing_token_map, global_offset, next_id } },
|
||||||
current: first,
|
current: first,
|
||||||
|
current_synthetic: synthetic,
|
||||||
preorder,
|
preorder,
|
||||||
range,
|
range,
|
||||||
censor,
|
replace,
|
||||||
|
append,
|
||||||
punct_offset: None,
|
punct_offset: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn next_token(
|
fn next_token(
|
||||||
preorder: &mut PreorderWithTokens,
|
preorder: &mut PreorderWithTokens,
|
||||||
censor: &FxHashSet<SyntaxNode>,
|
replace: &mut FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
||||||
) -> Option<SyntaxToken> {
|
append: &mut FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
||||||
|
) -> (Option<SyntaxToken>, Vec<SyntheticToken>) {
|
||||||
while let Some(ev) = preorder.next() {
|
while let Some(ev) = preorder.next() {
|
||||||
let ele = match ev {
|
let ele = match ev {
|
||||||
WalkEvent::Enter(ele) => ele,
|
WalkEvent::Enter(ele) => ele,
|
||||||
|
WalkEvent::Leave(SyntaxElement::Node(node)) => {
|
||||||
|
if let Some(mut v) = append.remove(&node) {
|
||||||
|
if !v.is_empty() {
|
||||||
|
v.reverse();
|
||||||
|
return (None, v);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
_ => continue,
|
_ => continue,
|
||||||
};
|
};
|
||||||
match ele {
|
match ele {
|
||||||
SyntaxElement::Token(t) => return Some(t),
|
SyntaxElement::Token(t) => return (Some(t), Vec::new()),
|
||||||
SyntaxElement::Node(node) if censor.contains(&node) => preorder.skip_subtree(),
|
SyntaxElement::Node(node) => {
|
||||||
SyntaxElement::Node(_) => (),
|
if let Some(mut v) = replace.remove(&node) {
|
||||||
|
preorder.skip_subtree();
|
||||||
|
if !v.is_empty() {
|
||||||
|
v.reverse();
|
||||||
|
return (None, v);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(None, Vec::new())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
enum SynToken {
|
enum SynToken {
|
||||||
Ordinary(SyntaxToken),
|
Ordinary(SyntaxToken),
|
||||||
|
// FIXME is this supposed to be `Punct`?
|
||||||
Punch(SyntaxToken, TextSize),
|
Punch(SyntaxToken, TextSize),
|
||||||
|
Synthetic(SyntheticToken),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SynToken {
|
impl SynToken {
|
||||||
fn token(&self) -> &SyntaxToken {
|
fn token(&self) -> Option<&SyntaxToken> {
|
||||||
match self {
|
match self {
|
||||||
SynToken::Ordinary(it) | SynToken::Punch(it, _) => it,
|
SynToken::Ordinary(it) | SynToken::Punch(it, _) => Some(it),
|
||||||
|
SynToken::Synthetic(_) => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> SrcToken<Convertor<'a>> for SynToken {
|
impl SrcToken<Convertor> for SynToken {
|
||||||
fn kind(&self, _ctx: &Convertor<'a>) -> SyntaxKind {
|
fn kind(&self, _ctx: &Convertor) -> SyntaxKind {
|
||||||
self.token().kind()
|
match self {
|
||||||
|
SynToken::Ordinary(token) => token.kind(),
|
||||||
|
SynToken::Punch(token, _) => token.kind(),
|
||||||
|
SynToken::Synthetic(token) => token.kind,
|
||||||
}
|
}
|
||||||
fn to_char(&self, _ctx: &Convertor<'a>) -> Option<char> {
|
}
|
||||||
|
fn to_char(&self, _ctx: &Convertor) -> Option<char> {
|
||||||
match self {
|
match self {
|
||||||
SynToken::Ordinary(_) => None,
|
SynToken::Ordinary(_) => None,
|
||||||
SynToken::Punch(it, i) => it.text().chars().nth((*i).into()),
|
SynToken::Punch(it, i) => it.text().chars().nth((*i).into()),
|
||||||
|
SynToken::Synthetic(token) if token.text.len() == 1 => token.text.chars().next(),
|
||||||
|
SynToken::Synthetic(_) => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn to_text(&self, _ctx: &Convertor<'a>) -> SmolStr {
|
fn to_text(&self, _ctx: &Convertor) -> SmolStr {
|
||||||
self.token().text().into()
|
match self {
|
||||||
|
SynToken::Ordinary(token) => token.text().into(),
|
||||||
|
SynToken::Punch(token, _) => token.text().into(),
|
||||||
|
SynToken::Synthetic(token) => token.text.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn synthetic_id(&self, _ctx: &Convertor) -> Option<SyntheticTokenId> {
|
||||||
|
match self {
|
||||||
|
SynToken::Synthetic(token) => Some(token.id),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenConvertor for Convertor<'_> {
|
impl TokenConvertor for Convertor {
|
||||||
type Token = SynToken;
|
type Token = SynToken;
|
||||||
fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
|
fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
|
||||||
convert_doc_comment(token.token())
|
convert_doc_comment(token.token()?)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
|
fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
|
||||||
|
@ -558,11 +643,25 @@ impl TokenConvertor for Convertor<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(synth_token) = self.current_synthetic.pop() {
|
||||||
|
if self.current_synthetic.is_empty() {
|
||||||
|
let (new_current, new_synth) =
|
||||||
|
Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append);
|
||||||
|
self.current = new_current;
|
||||||
|
self.current_synthetic = new_synth;
|
||||||
|
}
|
||||||
|
let range = synth_token.range;
|
||||||
|
return Some((SynToken::Synthetic(synth_token), range));
|
||||||
|
}
|
||||||
|
|
||||||
let curr = self.current.clone()?;
|
let curr = self.current.clone()?;
|
||||||
if !&self.range.contains_range(curr.text_range()) {
|
if !&self.range.contains_range(curr.text_range()) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
self.current = Self::next_token(&mut self.preorder, self.censor);
|
let (new_current, new_synth) =
|
||||||
|
Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append);
|
||||||
|
self.current = new_current;
|
||||||
|
self.current_synthetic = new_synth;
|
||||||
let token = if curr.kind().is_punct() {
|
let token = if curr.kind().is_punct() {
|
||||||
self.punct_offset = Some((curr.clone(), 0.into()));
|
self.punct_offset = Some((curr.clone(), 0.into()));
|
||||||
let range = curr.text_range();
|
let range = curr.text_range();
|
||||||
|
@ -585,6 +684,10 @@ impl TokenConvertor for Convertor<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(synth_token) = self.current_synthetic.last() {
|
||||||
|
return Some(SynToken::Synthetic(synth_token.clone()));
|
||||||
|
}
|
||||||
|
|
||||||
let curr = self.current.clone()?;
|
let curr = self.current.clone()?;
|
||||||
if !self.range.contains_range(curr.text_range()) {
|
if !self.range.contains_range(curr.text_range()) {
|
||||||
return None;
|
return None;
|
||||||
|
|
|
@ -5,6 +5,8 @@ use std::hash::Hash;
|
||||||
use parser::{SyntaxKind, T};
|
use parser::{SyntaxKind, T};
|
||||||
use syntax::{TextRange, TextSize};
|
use syntax::{TextRange, TextSize};
|
||||||
|
|
||||||
|
use crate::syntax_bridge::SyntheticTokenId;
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
|
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
|
||||||
enum TokenTextRange {
|
enum TokenTextRange {
|
||||||
Token(TextRange),
|
Token(TextRange),
|
||||||
|
@ -31,6 +33,7 @@ impl TokenTextRange {
|
||||||
pub struct TokenMap {
|
pub struct TokenMap {
|
||||||
/// Maps `tt::TokenId` to the *relative* source range.
|
/// Maps `tt::TokenId` to the *relative* source range.
|
||||||
entries: Vec<(tt::TokenId, TokenTextRange)>,
|
entries: Vec<(tt::TokenId, TokenTextRange)>,
|
||||||
|
pub synthetic_entries: Vec<(tt::TokenId, SyntheticTokenId)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenMap {
|
impl TokenMap {
|
||||||
|
@ -57,6 +60,10 @@ impl TokenMap {
|
||||||
.filter_map(move |(_, range)| range.by_kind(kind))
|
.filter_map(move |(_, range)| range.by_kind(kind))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn synthetic_token_id(&self, token_id: tt::TokenId) -> Option<SyntheticTokenId> {
|
||||||
|
self.synthetic_entries.iter().find(|(tid, _)| *tid == token_id).map(|(_, id)| *id)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn first_range_by_token(
|
pub fn first_range_by_token(
|
||||||
&self,
|
&self,
|
||||||
token_id: tt::TokenId,
|
token_id: tt::TokenId,
|
||||||
|
@ -67,12 +74,17 @@ impl TokenMap {
|
||||||
|
|
||||||
pub(crate) fn shrink_to_fit(&mut self) {
|
pub(crate) fn shrink_to_fit(&mut self) {
|
||||||
self.entries.shrink_to_fit();
|
self.entries.shrink_to_fit();
|
||||||
|
self.synthetic_entries.shrink_to_fit();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
|
pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
|
||||||
self.entries.push((token_id, TokenTextRange::Token(relative_range)));
|
self.entries.push((token_id, TokenTextRange::Token(relative_range)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn insert_synthetic(&mut self, token_id: tt::TokenId, id: SyntheticTokenId) {
|
||||||
|
self.synthetic_entries.push((token_id, id));
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn insert_delim(
|
pub(crate) fn insert_delim(
|
||||||
&mut self,
|
&mut self,
|
||||||
token_id: tt::TokenId,
|
token_id: tt::TokenId,
|
||||||
|
|
|
@ -87,6 +87,16 @@ pub struct Ident {
|
||||||
pub id: TokenId,
|
pub id: TokenId,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Leaf {
|
||||||
|
pub fn id(&self) -> TokenId {
|
||||||
|
match self {
|
||||||
|
Leaf::Literal(l) => l.id,
|
||||||
|
Leaf::Punct(p) => p.id,
|
||||||
|
Leaf::Ident(i) => i.id,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn print_debug_subtree(f: &mut fmt::Formatter<'_>, subtree: &Subtree, level: usize) -> fmt::Result {
|
fn print_debug_subtree(f: &mut fmt::Formatter<'_>, subtree: &Subtree, level: usize) -> fmt::Result {
|
||||||
let align = " ".repeat(level);
|
let align = " ".repeat(level);
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue