mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-28 14:03:35 +00:00
Auto merge of #16834 - Veykril:macarons, r=Veykril
feat: Support macro calls in eager macros for IDE features Basically hovering `concat` and `env` in `include!(concat!(env!("OUT_DIR"), "/foo.rs"))` now works and highlights as expected. This also fixes a few bugs/problems to make it work. Prior we set the call site span to the entire macro call which is kind of wrong, typing inside the call would invalidate the span causing us to leak `MacroCallLoc`s whenever that happened. The same happened for attributes both of which now define their path as the call site.
This commit is contained in:
commit
14558af15e
26 changed files with 456 additions and 262 deletions
|
@ -191,9 +191,9 @@ impl StructData {
|
|||
let krate = loc.container.krate;
|
||||
let item_tree = loc.id.item_tree(db);
|
||||
let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into());
|
||||
let cfg_options = db.crate_graph()[loc.container.krate].cfg_options.clone();
|
||||
let cfg_options = db.crate_graph()[krate].cfg_options.clone();
|
||||
|
||||
let attrs = item_tree.attrs(db, loc.container.krate, ModItem::from(loc.id.value).into());
|
||||
let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into());
|
||||
|
||||
let mut flags = StructFlags::NO_FLAGS;
|
||||
if attrs.by_key("rustc_has_incoherent_inherent_impls").exists() {
|
||||
|
@ -248,9 +248,9 @@ impl StructData {
|
|||
let krate = loc.container.krate;
|
||||
let item_tree = loc.id.item_tree(db);
|
||||
let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into());
|
||||
let cfg_options = db.crate_graph()[loc.container.krate].cfg_options.clone();
|
||||
let cfg_options = db.crate_graph()[krate].cfg_options.clone();
|
||||
|
||||
let attrs = item_tree.attrs(db, loc.container.krate, ModItem::from(loc.id.value).into());
|
||||
let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into());
|
||||
let mut flags = StructFlags::NO_FLAGS;
|
||||
if attrs.by_key("rustc_has_incoherent_inherent_impls").exists() {
|
||||
flags |= StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL;
|
||||
|
|
|
@ -309,13 +309,10 @@ fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId {
|
|||
kind: kind(loc.expander, loc.id.file_id(), makro.ast_id.upcast()),
|
||||
local_inner: false,
|
||||
allow_internal_unsafe: loc.allow_internal_unsafe,
|
||||
span: db
|
||||
.span_map(loc.id.file_id())
|
||||
.span_for_range(db.ast_id_map(loc.id.file_id()).get(makro.ast_id).text_range()),
|
||||
span: makro.def_site,
|
||||
edition: loc.edition,
|
||||
}
|
||||
}
|
||||
|
||||
MacroId::MacroRulesId(it) => {
|
||||
let loc: MacroRulesLoc = it.lookup(db);
|
||||
|
||||
|
@ -328,9 +325,7 @@ fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId {
|
|||
allow_internal_unsafe: loc
|
||||
.flags
|
||||
.contains(MacroRulesLocFlags::ALLOW_INTERNAL_UNSAFE),
|
||||
span: db
|
||||
.span_map(loc.id.file_id())
|
||||
.span_for_range(db.ast_id_map(loc.id.file_id()).get(makro.ast_id).text_range()),
|
||||
span: makro.def_site,
|
||||
edition: loc.edition,
|
||||
}
|
||||
}
|
||||
|
@ -348,6 +343,7 @@ fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId {
|
|||
),
|
||||
local_inner: false,
|
||||
allow_internal_unsafe: false,
|
||||
// FIXME: This is wrong, this should point to the name
|
||||
span: db
|
||||
.span_map(loc.id.file_id())
|
||||
.span_for_range(db.ast_id_map(loc.id.file_id()).get(makro.ast_id).text_range()),
|
||||
|
|
|
@ -790,7 +790,6 @@ pub struct MacroCall {
|
|||
pub path: Interned<ModPath>,
|
||||
pub ast_id: FileAstId<ast::MacroCall>,
|
||||
pub expand_to: ExpandTo,
|
||||
// FIXME: We need to move this out. It invalidates the item tree when typing inside the macro call.
|
||||
pub call_site: Span,
|
||||
}
|
||||
|
||||
|
@ -799,6 +798,7 @@ pub struct MacroRules {
|
|||
/// The name of the declared macro.
|
||||
pub name: Name,
|
||||
pub ast_id: FileAstId<ast::MacroRules>,
|
||||
pub def_site: Span,
|
||||
}
|
||||
|
||||
/// "Macros 2.0" macro definition.
|
||||
|
@ -807,6 +807,7 @@ pub struct Macro2 {
|
|||
pub name: Name,
|
||||
pub visibility: RawVisibilityId,
|
||||
pub ast_id: FileAstId<ast::MacroDef>,
|
||||
pub def_site: Span,
|
||||
}
|
||||
|
||||
impl Use {
|
||||
|
|
|
@ -560,35 +560,34 @@ impl<'a> Ctx<'a> {
|
|||
|
||||
fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option<FileItemTreeId<MacroCall>> {
|
||||
let span_map = self.span_map();
|
||||
let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, &mut |range| {
|
||||
let path = m.path()?;
|
||||
let range = path.syntax().text_range();
|
||||
let path = Interned::new(ModPath::from_src(self.db.upcast(), path, &mut |range| {
|
||||
span_map.span_for_range(range).ctx
|
||||
})?);
|
||||
let ast_id = self.source_ast_id_map.ast_id(m);
|
||||
let expand_to = hir_expand::ExpandTo::from_call_site(m);
|
||||
let res = MacroCall {
|
||||
path,
|
||||
ast_id,
|
||||
expand_to,
|
||||
call_site: span_map.span_for_range(m.syntax().text_range()),
|
||||
};
|
||||
let res = MacroCall { path, ast_id, expand_to, call_site: span_map.span_for_range(range) };
|
||||
Some(id(self.data().macro_calls.alloc(res)))
|
||||
}
|
||||
|
||||
fn lower_macro_rules(&mut self, m: &ast::MacroRules) -> Option<FileItemTreeId<MacroRules>> {
|
||||
let name = m.name().map(|it| it.as_name())?;
|
||||
let name = m.name()?;
|
||||
let def_site = self.span_map().span_for_range(name.syntax().text_range());
|
||||
let ast_id = self.source_ast_id_map.ast_id(m);
|
||||
|
||||
let res = MacroRules { name, ast_id };
|
||||
let res = MacroRules { name: name.as_name(), ast_id, def_site };
|
||||
Some(id(self.data().macro_rules.alloc(res)))
|
||||
}
|
||||
|
||||
fn lower_macro_def(&mut self, m: &ast::MacroDef) -> Option<FileItemTreeId<Macro2>> {
|
||||
let name = m.name().map(|it| it.as_name())?;
|
||||
let name = m.name()?;
|
||||
let def_site = self.span_map().span_for_range(name.syntax().text_range());
|
||||
|
||||
let ast_id = self.source_ast_id_map.ast_id(m);
|
||||
let visibility = self.lower_visibility(m);
|
||||
|
||||
let res = Macro2 { name, ast_id, visibility };
|
||||
let res = Macro2 { name: name.as_name(), ast_id, visibility, def_site };
|
||||
Some(id(self.data().macro_defs.alloc(res)))
|
||||
}
|
||||
|
||||
|
|
|
@ -498,13 +498,23 @@ impl Printer<'_> {
|
|||
wln!(self, "{}!(...);", path.display(self.db.upcast()));
|
||||
}
|
||||
ModItem::MacroRules(it) => {
|
||||
let MacroRules { name, ast_id } = &self.tree[it];
|
||||
self.print_ast_id(ast_id.erase());
|
||||
let MacroRules { name, ast_id, def_site } = &self.tree[it];
|
||||
let _ = writeln!(
|
||||
self,
|
||||
"// AstId: {:?}, Span: {}",
|
||||
ast_id.erase().into_raw(),
|
||||
def_site,
|
||||
);
|
||||
wln!(self, "macro_rules! {} {{ ... }}", name.display(self.db.upcast()));
|
||||
}
|
||||
ModItem::Macro2(it) => {
|
||||
let Macro2 { name, visibility, ast_id } = &self.tree[it];
|
||||
self.print_ast_id(ast_id.erase());
|
||||
let Macro2 { name, visibility, ast_id, def_site } = &self.tree[it];
|
||||
let _ = writeln!(
|
||||
self,
|
||||
"// AstId: {:?}, Span: {}",
|
||||
ast_id.erase().into_raw(),
|
||||
def_site,
|
||||
);
|
||||
self.print_visibility(*visibility);
|
||||
wln!(self, "macro {} {{ ... }}", name.display(self.db.upcast()));
|
||||
}
|
||||
|
|
|
@ -272,13 +272,13 @@ pub macro m2() {}
|
|||
m!();
|
||||
"#,
|
||||
expect![[r#"
|
||||
// AstId: 1
|
||||
// AstId: 1, Span: 0:1@13..14#0
|
||||
macro_rules! m { ... }
|
||||
|
||||
// AstId: 2
|
||||
// AstId: 2, Span: 0:2@10..12#0
|
||||
pub macro m2 { ... }
|
||||
|
||||
// AstId: 3, Span: 0:3@0..5#0, ExpandTo: Items
|
||||
// AstId: 3, Span: 0:3@0..1#0, ExpandTo: Items
|
||||
m!(...);
|
||||
"#]],
|
||||
);
|
||||
|
|
|
@ -1342,17 +1342,18 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
|
|||
let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
|
||||
let span_map = db.span_map(self.file_id);
|
||||
let path = self.value.path().and_then(|path| {
|
||||
path::ModPath::from_src(db, path, &mut |range| {
|
||||
let range = path.syntax().text_range();
|
||||
let mod_path = path::ModPath::from_src(db, path, &mut |range| {
|
||||
span_map.as_ref().span_for_range(range).ctx
|
||||
})
|
||||
})?;
|
||||
let call_site = span_map.span_for_range(range);
|
||||
Some((call_site, mod_path))
|
||||
});
|
||||
|
||||
let Some(path) = path else {
|
||||
let Some((call_site, path)) = path else {
|
||||
return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation")));
|
||||
};
|
||||
|
||||
let call_site = span_map.span_for_range(self.value.syntax().text_range());
|
||||
|
||||
macro_call_as_call_id_with_eager(
|
||||
db,
|
||||
&AstIdWithPath::new(ast_id.file_id, ast_id.value, path),
|
||||
|
|
|
@ -171,7 +171,7 @@ fn main(foo: ()) {
|
|||
}
|
||||
|
||||
fn main(foo: ()) {
|
||||
/* error: unresolved macro unresolved */"helloworld!"#0:3@207..323#2#;
|
||||
/* error: unresolved macro unresolved */"helloworld!"#0:3@236..321#0#;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -201,10 +201,12 @@ impl Attr {
|
|||
span_map: SpanMapRef<'_>,
|
||||
id: AttrId,
|
||||
) -> Option<Attr> {
|
||||
let path = Interned::new(ModPath::from_src(db, ast.path()?, &mut |range| {
|
||||
let path = ast.path()?;
|
||||
let range = path.syntax().text_range();
|
||||
let path = Interned::new(ModPath::from_src(db, path, &mut |range| {
|
||||
span_map.span_for_range(range).ctx
|
||||
})?);
|
||||
let span = span_map.span_for_range(ast.syntax().text_range());
|
||||
let span = span_map.span_for_range(range);
|
||||
let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
|
||||
let value = match lit.kind() {
|
||||
ast::LiteralKind::String(string) => string.value()?.into(),
|
||||
|
|
|
@ -19,14 +19,14 @@ use crate::{
|
|||
};
|
||||
|
||||
macro_rules! register_builtin {
|
||||
( LAZY: $(($name:ident, $kind: ident) => $expand:ident),* , EAGER: $(($e_name:ident, $e_kind: ident) => $e_expand:ident),* ) => {
|
||||
( $LAZY:ident: $(($name:ident, $kind: ident) => $expand:ident),* , $EAGER:ident: $(($e_name:ident, $e_kind: ident) => $e_expand:ident),* ) => {
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum BuiltinFnLikeExpander {
|
||||
pub enum $LAZY {
|
||||
$($kind),*
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum EagerExpander {
|
||||
pub enum $EAGER {
|
||||
$($e_kind),*
|
||||
}
|
||||
|
||||
|
@ -84,6 +84,17 @@ impl EagerExpander {
|
|||
pub fn is_include(&self) -> bool {
|
||||
matches!(self, EagerExpander::Include)
|
||||
}
|
||||
|
||||
pub fn is_include_like(&self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
EagerExpander::Include | EagerExpander::IncludeStr | EagerExpander::IncludeBytes
|
||||
)
|
||||
}
|
||||
|
||||
pub fn is_env_or_option_env(&self) -> bool {
|
||||
matches!(self, EagerExpander::Env | EagerExpander::OptionEnv)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find_builtin_macro(
|
||||
|
@ -93,7 +104,7 @@ pub fn find_builtin_macro(
|
|||
}
|
||||
|
||||
register_builtin! {
|
||||
LAZY:
|
||||
BuiltinFnLikeExpander:
|
||||
(column, Column) => line_expand,
|
||||
(file, File) => file_expand,
|
||||
(line, Line) => line_expand,
|
||||
|
@ -114,7 +125,7 @@ register_builtin! {
|
|||
(format_args_nl, FormatArgsNl) => format_args_nl_expand,
|
||||
(quote, Quote) => quote_expand,
|
||||
|
||||
EAGER:
|
||||
EagerExpander:
|
||||
(compile_error, CompileError) => compile_error_expand,
|
||||
(concat, Concat) => concat_expand,
|
||||
(concat_idents, ConcatIdents) => concat_idents_expand,
|
||||
|
@ -426,22 +437,25 @@ fn use_panic_2021(db: &dyn ExpandDatabase, span: Span) -> bool {
|
|||
}
|
||||
}
|
||||
|
||||
fn unquote_str(lit: &tt::Literal) -> Option<String> {
|
||||
fn unquote_str(lit: &tt::Literal) -> Option<(String, Span)> {
|
||||
let span = lit.span;
|
||||
let lit = ast::make::tokens::literal(&lit.to_string());
|
||||
let token = ast::String::cast(lit)?;
|
||||
token.value().map(|it| it.into_owned())
|
||||
token.value().map(|it| (it.into_owned(), span))
|
||||
}
|
||||
|
||||
fn unquote_char(lit: &tt::Literal) -> Option<char> {
|
||||
fn unquote_char(lit: &tt::Literal) -> Option<(char, Span)> {
|
||||
let span = lit.span;
|
||||
let lit = ast::make::tokens::literal(&lit.to_string());
|
||||
let token = ast::Char::cast(lit)?;
|
||||
token.value()
|
||||
token.value().zip(Some(span))
|
||||
}
|
||||
|
||||
fn unquote_byte_string(lit: &tt::Literal) -> Option<Vec<u8>> {
|
||||
fn unquote_byte_string(lit: &tt::Literal) -> Option<(Vec<u8>, Span)> {
|
||||
let span = lit.span;
|
||||
let lit = ast::make::tokens::literal(&lit.to_string());
|
||||
let token = ast::ByteString::cast(lit)?;
|
||||
token.value().map(|it| it.into_owned())
|
||||
token.value().map(|it| (it.into_owned(), span))
|
||||
}
|
||||
|
||||
fn compile_error_expand(
|
||||
|
@ -452,7 +466,7 @@ fn compile_error_expand(
|
|||
) -> ExpandResult<tt::Subtree> {
|
||||
let err = match &*tt.token_trees {
|
||||
[tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) {
|
||||
Some(unquoted) => ExpandError::other(unquoted.into_boxed_str()),
|
||||
Some((unquoted, _)) => ExpandError::other(unquoted.into_boxed_str()),
|
||||
None => ExpandError::other("`compile_error!` argument must be a string"),
|
||||
},
|
||||
_ => ExpandError::other("`compile_error!` argument must be a string"),
|
||||
|
@ -465,10 +479,16 @@ fn concat_expand(
|
|||
_db: &dyn ExpandDatabase,
|
||||
_arg_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
span: Span,
|
||||
_: Span,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let mut err = None;
|
||||
let mut text = String::new();
|
||||
let mut span: Option<Span> = None;
|
||||
let mut record_span = |s: Span| match &mut span {
|
||||
Some(span) if span.anchor == s.anchor => span.range = span.range.cover(s.range),
|
||||
Some(_) => (),
|
||||
None => span = Some(s),
|
||||
};
|
||||
for (i, mut t) in tt.token_trees.iter().enumerate() {
|
||||
// FIXME: hack on top of a hack: `$e:expr` captures get surrounded in parentheses
|
||||
// to ensure the right parsing order, so skip the parentheses here. Ideally we'd
|
||||
|
@ -486,11 +506,14 @@ fn concat_expand(
|
|||
// concat works with string and char literals, so remove any quotes.
|
||||
// It also works with integer, float and boolean literals, so just use the rest
|
||||
// as-is.
|
||||
if let Some(c) = unquote_char(it) {
|
||||
if let Some((c, span)) = unquote_char(it) {
|
||||
text.push(c);
|
||||
record_span(span);
|
||||
} else {
|
||||
let component = unquote_str(it).unwrap_or_else(|| it.text.to_string());
|
||||
let (component, span) =
|
||||
unquote_str(it).unwrap_or_else(|| (it.text.to_string(), it.span));
|
||||
text.push_str(&component);
|
||||
record_span(span);
|
||||
}
|
||||
}
|
||||
// handle boolean literals
|
||||
|
@ -498,6 +521,7 @@ fn concat_expand(
|
|||
if i % 2 == 0 && (id.text == "true" || id.text == "false") =>
|
||||
{
|
||||
text.push_str(id.text.as_str());
|
||||
record_span(id.span);
|
||||
}
|
||||
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
|
||||
_ => {
|
||||
|
@ -505,6 +529,7 @@ fn concat_expand(
|
|||
}
|
||||
}
|
||||
}
|
||||
let span = span.unwrap_or(tt.delimiter.open);
|
||||
ExpandResult { value: quote!(span =>#text), err }
|
||||
}
|
||||
|
||||
|
@ -512,18 +537,25 @@ fn concat_bytes_expand(
|
|||
_db: &dyn ExpandDatabase,
|
||||
_arg_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
span: Span,
|
||||
call_site: Span,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let mut bytes = Vec::new();
|
||||
let mut err = None;
|
||||
let mut span: Option<Span> = None;
|
||||
let mut record_span = |s: Span| match &mut span {
|
||||
Some(span) if span.anchor == s.anchor => span.range = span.range.cover(s.range),
|
||||
Some(_) => (),
|
||||
None => span = Some(s),
|
||||
};
|
||||
for (i, t) in tt.token_trees.iter().enumerate() {
|
||||
match t {
|
||||
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
|
||||
let token = ast::make::tokens::literal(&lit.to_string());
|
||||
record_span(lit.span);
|
||||
match token.kind() {
|
||||
syntax::SyntaxKind::BYTE => bytes.push(token.text().to_owned()),
|
||||
syntax::SyntaxKind::BYTE_STRING => {
|
||||
let components = unquote_byte_string(lit).unwrap_or_default();
|
||||
let components = unquote_byte_string(lit).map_or(vec![], |(it, _)| it);
|
||||
components.into_iter().for_each(|it| bytes.push(it.to_string()));
|
||||
}
|
||||
_ => {
|
||||
|
@ -534,7 +566,7 @@ fn concat_bytes_expand(
|
|||
}
|
||||
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
|
||||
tt::TokenTree::Subtree(tree) if tree.delimiter.kind == tt::DelimiterKind::Bracket => {
|
||||
if let Err(e) = concat_bytes_expand_subtree(tree, &mut bytes) {
|
||||
if let Err(e) = concat_bytes_expand_subtree(tree, &mut bytes, &mut record_span) {
|
||||
err.get_or_insert(e);
|
||||
break;
|
||||
}
|
||||
|
@ -546,17 +578,24 @@ fn concat_bytes_expand(
|
|||
}
|
||||
}
|
||||
let value = tt::Subtree {
|
||||
delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket },
|
||||
delimiter: tt::Delimiter {
|
||||
open: call_site,
|
||||
close: call_site,
|
||||
kind: tt::DelimiterKind::Bracket,
|
||||
},
|
||||
token_trees: {
|
||||
Itertools::intersperse_with(
|
||||
bytes.into_iter().map(|it| {
|
||||
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: it.into(), span }))
|
||||
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
||||
text: it.into(),
|
||||
span: span.unwrap_or(call_site),
|
||||
}))
|
||||
}),
|
||||
|| {
|
||||
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
|
||||
char: ',',
|
||||
spacing: tt::Spacing::Alone,
|
||||
span,
|
||||
span: call_site,
|
||||
}))
|
||||
},
|
||||
)
|
||||
|
@ -569,13 +608,15 @@ fn concat_bytes_expand(
|
|||
fn concat_bytes_expand_subtree(
|
||||
tree: &tt::Subtree,
|
||||
bytes: &mut Vec<String>,
|
||||
mut record_span: impl FnMut(Span),
|
||||
) -> Result<(), ExpandError> {
|
||||
for (ti, tt) in tree.token_trees.iter().enumerate() {
|
||||
match tt {
|
||||
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
|
||||
let lit = ast::make::tokens::literal(&lit.to_string());
|
||||
tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => {
|
||||
let lit = ast::make::tokens::literal(&it.to_string());
|
||||
match lit.kind() {
|
||||
syntax::SyntaxKind::BYTE | syntax::SyntaxKind::INT_NUMBER => {
|
||||
record_span(it.span);
|
||||
bytes.push(lit.text().to_owned())
|
||||
}
|
||||
_ => {
|
||||
|
@ -635,7 +676,7 @@ fn relative_file(
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_string(tt: &tt::Subtree) -> Result<String, ExpandError> {
|
||||
fn parse_string(tt: &tt::Subtree) -> Result<(String, Span), ExpandError> {
|
||||
tt.token_trees
|
||||
.first()
|
||||
.and_then(|tt| match tt {
|
||||
|
@ -675,7 +716,7 @@ pub fn include_input_to_file_id(
|
|||
arg_id: MacroCallId,
|
||||
arg: &tt::Subtree,
|
||||
) -> Result<FileId, ExpandError> {
|
||||
relative_file(db, arg_id, &parse_string(arg)?, false)
|
||||
relative_file(db, arg_id, &parse_string(arg)?.0, false)
|
||||
}
|
||||
|
||||
fn include_bytes_expand(
|
||||
|
@ -701,7 +742,7 @@ fn include_str_expand(
|
|||
tt: &tt::Subtree,
|
||||
span: Span,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let path = match parse_string(tt) {
|
||||
let (path, span) = match parse_string(tt) {
|
||||
Ok(it) => it,
|
||||
Err(e) => {
|
||||
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
|
||||
|
@ -736,7 +777,7 @@ fn env_expand(
|
|||
tt: &tt::Subtree,
|
||||
span: Span,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let key = match parse_string(tt) {
|
||||
let (key, span) = match parse_string(tt) {
|
||||
Ok(it) => it,
|
||||
Err(e) => {
|
||||
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
|
||||
|
@ -766,18 +807,24 @@ fn option_env_expand(
|
|||
db: &dyn ExpandDatabase,
|
||||
arg_id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
span: Span,
|
||||
call_site: Span,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let key = match parse_string(tt) {
|
||||
let (key, span) = match parse_string(tt) {
|
||||
Ok(it) => it,
|
||||
Err(e) => {
|
||||
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
|
||||
return ExpandResult::new(
|
||||
tt::Subtree::empty(DelimSpan { open: call_site, close: call_site }),
|
||||
e,
|
||||
)
|
||||
}
|
||||
};
|
||||
let dollar_crate = dollar_crate(span);
|
||||
let dollar_crate = dollar_crate(call_site);
|
||||
let expanded = match get_env_inner(db, arg_id, &key) {
|
||||
None => quote! {span => #dollar_crate::option::Option::None::<&str> },
|
||||
Some(s) => quote! {span => #dollar_crate::option::Option::Some(#s) },
|
||||
None => quote! {call_site => #dollar_crate::option::Option::None::<&str> },
|
||||
Some(s) => {
|
||||
let s = quote! (span => #s);
|
||||
quote! {call_site => #dollar_crate::option::Option::Some(#s) }
|
||||
}
|
||||
};
|
||||
|
||||
ExpandResult::ok(expanded)
|
||||
|
|
|
@ -323,6 +323,9 @@ impl HirFileIdExt for HirFileId {
|
|||
}
|
||||
|
||||
pub trait MacroFileIdExt {
|
||||
fn is_env_or_option_env(&self, db: &dyn ExpandDatabase) -> bool;
|
||||
fn is_include_like_macro(&self, db: &dyn ExpandDatabase) -> bool;
|
||||
fn eager_arg(&self, db: &dyn ExpandDatabase) -> Option<MacroCallId>;
|
||||
fn expansion_level(self, db: &dyn ExpandDatabase) -> u32;
|
||||
/// If this is a macro call, returns the syntax node of the call.
|
||||
fn call_node(self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode>;
|
||||
|
@ -389,18 +392,34 @@ impl MacroFileIdExt for MacroFileId {
|
|||
db.lookup_intern_macro_call(self.macro_call_id).def.is_include()
|
||||
}
|
||||
|
||||
fn is_include_like_macro(&self, db: &dyn ExpandDatabase) -> bool {
|
||||
db.lookup_intern_macro_call(self.macro_call_id).def.is_include_like()
|
||||
}
|
||||
|
||||
fn is_env_or_option_env(&self, db: &dyn ExpandDatabase) -> bool {
|
||||
db.lookup_intern_macro_call(self.macro_call_id).def.is_env_or_option_env()
|
||||
}
|
||||
|
||||
fn is_eager(&self, db: &dyn ExpandDatabase) -> bool {
|
||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id);
|
||||
let loc = db.lookup_intern_macro_call(self.macro_call_id);
|
||||
matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
|
||||
}
|
||||
|
||||
fn eager_arg(&self, db: &dyn ExpandDatabase) -> Option<MacroCallId> {
|
||||
let loc = db.lookup_intern_macro_call(self.macro_call_id);
|
||||
match &loc.kind {
|
||||
MacroCallKind::FnLike { eager, .. } => eager.as_ref().map(|it| it.arg_id),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_attr_macro(&self, db: &dyn ExpandDatabase) -> bool {
|
||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id);
|
||||
let loc = db.lookup_intern_macro_call(self.macro_call_id);
|
||||
matches!(loc.kind, MacroCallKind::Attr { .. })
|
||||
}
|
||||
|
||||
fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool {
|
||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id);
|
||||
let loc = db.lookup_intern_macro_call(self.macro_call_id);
|
||||
loc.def.is_attribute_derive()
|
||||
}
|
||||
}
|
||||
|
@ -478,6 +497,14 @@ impl MacroDefId {
|
|||
pub fn is_include(&self) -> bool {
|
||||
matches!(self.kind, MacroDefKind::BuiltInEager(expander, ..) if expander.is_include())
|
||||
}
|
||||
|
||||
pub fn is_include_like(&self) -> bool {
|
||||
matches!(self.kind, MacroDefKind::BuiltInEager(expander, ..) if expander.is_include_like())
|
||||
}
|
||||
|
||||
pub fn is_env_or_option_env(&self) -> bool {
|
||||
matches!(self.kind, MacroDefKind::BuiltInEager(expander, ..) if expander.is_env_or_option_env())
|
||||
}
|
||||
}
|
||||
|
||||
impl MacroCallLoc {
|
||||
|
@ -659,7 +686,7 @@ impl MacroCallKind {
|
|||
/// ExpansionInfo mainly describes how to map text range between src and expanded macro
|
||||
// FIXME: can be expensive to create, we should check the use sites and maybe replace them with
|
||||
// simpler function calls if the map is only used once
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct ExpansionInfo {
|
||||
pub expanded: InMacroFile<SyntaxNode>,
|
||||
/// The argument TokenTree or item for attributes
|
||||
|
@ -689,6 +716,22 @@ impl ExpansionInfo {
|
|||
/// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
|
||||
///
|
||||
/// Note this does a linear search through the entire backing vector of the spanmap.
|
||||
pub fn map_range_down_exact(
|
||||
&self,
|
||||
span: Span,
|
||||
) -> Option<InMacroFile<impl Iterator<Item = SyntaxToken> + '_>> {
|
||||
let tokens = self
|
||||
.exp_map
|
||||
.ranges_with_span_exact(span)
|
||||
.flat_map(move |range| self.expanded.value.covering_element(range).into_token());
|
||||
|
||||
Some(InMacroFile::new(self.expanded.file_id, tokens))
|
||||
}
|
||||
|
||||
/// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
|
||||
/// Unlike [`map_range_down_exact`], this will consider spans that contain the given span.
|
||||
///
|
||||
/// Note this does a linear search through the entire backing vector of the spanmap.
|
||||
pub fn map_range_down(
|
||||
&self,
|
||||
span: Span,
|
||||
|
@ -745,7 +788,7 @@ impl ExpansionInfo {
|
|||
InFile::new(
|
||||
self.arg.file_id,
|
||||
arg_map
|
||||
.ranges_with_span(span)
|
||||
.ranges_with_span_exact(span)
|
||||
.filter(|range| range.intersect(arg_range).is_some())
|
||||
.collect(),
|
||||
)
|
||||
|
|
|
@ -266,10 +266,11 @@ mod tests {
|
|||
|
||||
let quoted = quote!(DUMMY =>#a);
|
||||
assert_eq!(quoted.to_string(), "hello");
|
||||
let t = format!("{quoted:?}");
|
||||
let t = format!("{quoted:#?}");
|
||||
expect![[r#"
|
||||
SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) } SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) }
|
||||
IDENT hello SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) }"#]].assert_eq(&t);
|
||||
SUBTREE $$ 937550:0@0..0#0 937550:0@0..0#0
|
||||
IDENT hello 937550:0@0..0#0"#]]
|
||||
.assert_eq(&t);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
//! Span maps for real files and macro expansions.
|
||||
|
||||
use span::{FileId, HirFileId, HirFileIdRepr, MacroFileId, Span, SyntaxContextId};
|
||||
use syntax::{AstNode, TextRange};
|
||||
use triomphe::Arc;
|
||||
|
|
|
@ -2617,6 +2617,15 @@ impl Macro {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn is_env_or_option_env(&self, db: &dyn HirDatabase) -> bool {
|
||||
match self.id {
|
||||
MacroId::Macro2Id(it) => {
|
||||
matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltInEager(eager) if eager.is_env_or_option_env())
|
||||
}
|
||||
MacroId::MacroRulesId(_) | MacroId::ProcMacroId(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_attr(&self, db: &dyn HirDatabase) -> bool {
|
||||
matches!(self.kind(db), MacroKind::Attr)
|
||||
}
|
||||
|
|
|
@ -681,19 +681,20 @@ impl<'db> SemanticsImpl<'db> {
|
|||
.filter(|&(_, include_file_id)| include_file_id == file_id)
|
||||
{
|
||||
let macro_file = invoc.as_macro_file();
|
||||
let expansion_info = cache
|
||||
.entry(macro_file)
|
||||
.or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
|
||||
let expansion_info = cache.entry(macro_file).or_insert_with(|| {
|
||||
let exp_info = macro_file.expansion_info(self.db.upcast());
|
||||
|
||||
let InMacroFile { file_id, value } = exp_info.expanded();
|
||||
self.cache(value, file_id.into());
|
||||
|
||||
exp_info
|
||||
});
|
||||
|
||||
// Create the source analyzer for the macro call scope
|
||||
let Some(sa) = self.analyze_no_infer(&self.parse_or_expand(expansion_info.call_file()))
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
{
|
||||
let InMacroFile { file_id: macro_file, value } = expansion_info.expanded();
|
||||
self.cache(value, macro_file.into());
|
||||
}
|
||||
|
||||
// get mapped token in the include! macro file
|
||||
let span = span::SpanData {
|
||||
|
@ -702,7 +703,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
ctx: SyntaxContextId::ROOT,
|
||||
};
|
||||
let Some(InMacroFile { file_id, value: mut mapped_tokens }) =
|
||||
expansion_info.map_range_down(span)
|
||||
expansion_info.map_range_down_exact(span)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
@ -753,22 +754,20 @@ impl<'db> SemanticsImpl<'db> {
|
|||
let def_map = sa.resolver.def_map();
|
||||
|
||||
let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])];
|
||||
|
||||
let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
|
||||
let expansion_info = cache
|
||||
.entry(macro_file)
|
||||
.or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
|
||||
let exp_info = cache.entry(macro_file).or_insert_with(|| {
|
||||
let exp_info = macro_file.expansion_info(self.db.upcast());
|
||||
|
||||
{
|
||||
let InMacroFile { file_id, value } = expansion_info.expanded();
|
||||
let InMacroFile { file_id, value } = exp_info.expanded();
|
||||
self.cache(value, file_id.into());
|
||||
}
|
||||
|
||||
let InMacroFile { file_id, value: mapped_tokens } =
|
||||
expansion_info.map_range_down(span)?;
|
||||
exp_info
|
||||
});
|
||||
|
||||
let InMacroFile { file_id, value: mapped_tokens } = exp_info.map_range_down(span)?;
|
||||
let mapped_tokens: SmallVec<[_; 2]> = mapped_tokens.collect();
|
||||
|
||||
// if the length changed we have found a mapping for the token
|
||||
// we have found a mapping for the token if the vec is non-empty
|
||||
let res = mapped_tokens.is_empty().not().then_some(());
|
||||
// requeue the tokens we got from mapping our current token down
|
||||
stack.push((HirFileId::from(file_id), mapped_tokens));
|
||||
|
@ -851,7 +850,13 @@ impl<'db> SemanticsImpl<'db> {
|
|||
// remove any other token in this macro input, all their mappings are the
|
||||
// same as this one
|
||||
tokens.retain(|t| !text_range.contains_range(t.text_range()));
|
||||
process_expansion_for_token(&mut stack, file_id)
|
||||
|
||||
process_expansion_for_token(&mut stack, file_id).or(file_id
|
||||
.eager_arg(self.db.upcast())
|
||||
.and_then(|arg| {
|
||||
// also descend into eager expansions
|
||||
process_expansion_for_token(&mut stack, arg.as_macro_file())
|
||||
}))
|
||||
} else if let Some(meta) = ast::Meta::cast(parent) {
|
||||
// attribute we failed expansion for earlier, this might be a derive invocation
|
||||
// or derive helper attribute
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
//! Completes environment variables defined by Cargo (https://doc.rust-lang.org/cargo/reference/environment-variables.html)
|
||||
use hir::Semantics;
|
||||
use ide_db::{syntax_helpers::node_ext::macro_call_for_string_token, RootDatabase};
|
||||
use syntax::ast::{self, IsString};
|
||||
use hir::MacroFileIdExt;
|
||||
use ide_db::syntax_helpers::node_ext::macro_call_for_string_token;
|
||||
use syntax::{
|
||||
ast::{self, IsString},
|
||||
AstToken,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
completions::Completions, context::CompletionContext, CompletionItem, CompletionItemKind,
|
||||
|
@ -32,10 +35,24 @@ const CARGO_DEFINED_VARS: &[(&str, &str)] = &[
|
|||
pub(crate) fn complete_cargo_env_vars(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
original: &ast::String,
|
||||
expanded: &ast::String,
|
||||
) -> Option<()> {
|
||||
guard_env_macro(expanded, &ctx.sema)?;
|
||||
let range = expanded.text_range_between_quotes()?;
|
||||
let is_in_env_expansion = ctx
|
||||
.sema
|
||||
.hir_file_for(&expanded.syntax().parent()?)
|
||||
.macro_file()
|
||||
.map_or(false, |it| it.is_env_or_option_env(ctx.sema.db));
|
||||
if !is_in_env_expansion {
|
||||
let call = macro_call_for_string_token(expanded)?;
|
||||
let makro = ctx.sema.resolve_macro_call(&call)?;
|
||||
// We won't map into `option_env` as that generates `None` for non-existent env vars
|
||||
// so fall back to this lookup
|
||||
if !makro.is_env_or_option_env(ctx.sema.db) {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
let range = original.text_range_between_quotes()?;
|
||||
|
||||
CARGO_DEFINED_VARS.iter().for_each(|&(var, detail)| {
|
||||
let mut item = CompletionItem::new(CompletionItemKind::Keyword, range, var);
|
||||
|
@ -46,18 +63,6 @@ pub(crate) fn complete_cargo_env_vars(
|
|||
Some(())
|
||||
}
|
||||
|
||||
fn guard_env_macro(string: &ast::String, semantics: &Semantics<'_, RootDatabase>) -> Option<()> {
|
||||
let call = macro_call_for_string_token(string)?;
|
||||
let name = call.path()?.segment()?.name_ref()?;
|
||||
let makro = semantics.resolve_macro_call(&call)?;
|
||||
let db = semantics.db;
|
||||
|
||||
match name.text().as_str() {
|
||||
"env" | "option_env" if makro.kind(db) == hir::MacroKind::BuiltIn => Some(()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::tests::{check_edit, completion_list};
|
||||
|
@ -68,7 +73,7 @@ mod tests {
|
|||
&format!(
|
||||
r#"
|
||||
#[rustc_builtin_macro]
|
||||
macro_rules! {macro_name} {{
|
||||
macro {macro_name} {{
|
||||
($var:literal) => {{ 0 }}
|
||||
}}
|
||||
|
||||
|
@ -80,7 +85,7 @@ mod tests {
|
|||
&format!(
|
||||
r#"
|
||||
#[rustc_builtin_macro]
|
||||
macro_rules! {macro_name} {{
|
||||
macro {macro_name} {{
|
||||
($var:literal) => {{ 0 }}
|
||||
}}
|
||||
|
||||
|
|
|
@ -207,7 +207,7 @@ pub fn completions(
|
|||
CompletionAnalysis::String { original, expanded: Some(expanded) } => {
|
||||
completions::extern_abi::complete_extern_abi(acc, ctx, expanded);
|
||||
completions::format_string::format_string(acc, ctx, original, expanded);
|
||||
completions::env_vars::complete_cargo_env_vars(acc, ctx, expanded);
|
||||
completions::env_vars::complete_cargo_env_vars(acc, ctx, original, expanded);
|
||||
}
|
||||
CompletionAnalysis::UnexpandedAttrTT {
|
||||
colon_prefix,
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
use std::mem::discriminant;
|
||||
use std::{iter, mem::discriminant};
|
||||
|
||||
use crate::{
|
||||
doc_links::token_as_doc_comment, navigation_target::ToNav, FilePosition, NavigationTarget,
|
||||
RangeInfo, TryToNav,
|
||||
};
|
||||
use hir::{AsAssocItem, AssocItem, DescendPreference, ModuleDef, Semantics};
|
||||
use hir::{AsAssocItem, AssocItem, DescendPreference, MacroFileIdExt, ModuleDef, Semantics};
|
||||
use ide_db::{
|
||||
base_db::{AnchoredPath, FileId, FileLoader},
|
||||
defs::{Definition, IdentClass},
|
||||
|
@ -74,11 +74,13 @@ pub(crate) fn goto_definition(
|
|||
.filter_map(|token| {
|
||||
let parent = token.parent()?;
|
||||
|
||||
if let Some(tt) = ast::TokenTree::cast(parent.clone()) {
|
||||
if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), file_id) {
|
||||
if let Some(token) = ast::String::cast(token.clone()) {
|
||||
if let Some(x) = try_lookup_include_path(sema, token, file_id) {
|
||||
return Some(vec![x]);
|
||||
}
|
||||
}
|
||||
|
||||
if ast::TokenTree::can_cast(parent.kind()) {
|
||||
if let Some(x) = try_lookup_macro_def_in_macro_use(sema, token) {
|
||||
return Some(vec![x]);
|
||||
}
|
||||
|
@ -111,24 +113,17 @@ pub(crate) fn goto_definition(
|
|||
|
||||
fn try_lookup_include_path(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
tt: ast::TokenTree,
|
||||
token: SyntaxToken,
|
||||
token: ast::String,
|
||||
file_id: FileId,
|
||||
) -> Option<NavigationTarget> {
|
||||
let token = ast::String::cast(token)?;
|
||||
let path = token.value()?.into_owned();
|
||||
let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
|
||||
let name = macro_call.path()?.segment()?.name_ref()?;
|
||||
if !matches!(&*name.text(), "include" | "include_str" | "include_bytes") {
|
||||
let file = sema.hir_file_for(&token.syntax().parent()?).macro_file()?;
|
||||
if !iter::successors(Some(file), |file| file.parent(sema.db).macro_file())
|
||||
// Check that we are in the eager argument expansion of an include macro
|
||||
.any(|file| file.is_include_like_macro(sema.db) && file.eager_arg(sema.db).is_none())
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
// Ignore non-built-in macros to account for shadowing
|
||||
if let Some(it) = sema.resolve_macro_call(¯o_call) {
|
||||
if !matches!(it.kind(sema.db), hir::MacroKind::BuiltIn) {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
let path = token.value()?;
|
||||
|
||||
let file_id = sema.db.resolve_path(AnchoredPath { anchor: file_id, path: &path })?;
|
||||
let size = sema.db.file_text(file_id).len().try_into().ok()?;
|
||||
|
@ -1531,6 +1526,26 @@ fn main() {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_include_has_eager_input() {
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs
|
||||
#[rustc_builtin_macro]
|
||||
macro_rules! include_str {}
|
||||
#[rustc_builtin_macro]
|
||||
macro_rules! concat {}
|
||||
|
||||
fn main() {
|
||||
let str = include_str!(concat!("foo", ".tx$0t"));
|
||||
}
|
||||
//- /foo.txt
|
||||
// empty
|
||||
//^file
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_doc_include_str() {
|
||||
check(
|
||||
|
|
|
@ -94,7 +94,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
|||
<span class="brace">}</span>
|
||||
|
||||
|
||||
<span class="macro default_library library">include</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="none macro">concat</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"foo/"</span><span class="comma macro">,</span> <span class="string_literal macro">"foo.rs"</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
|
||||
<span class="macro default_library library">include</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="macro default_library library macro">concat</span><span class="macro_bang macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"foo/"</span><span class="string_literal macro">,</span> <span class="string_literal macro">"foo.rs"</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
|
||||
|
||||
<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
|
||||
<span class="macro default_library library">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello, </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="parenthesis macro">(</span><span class="numeric_literal macro">92</span><span class="comma macro">,</span><span class="parenthesis macro">)</span><span class="operator macro">.</span><span class="field library macro">0</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
|
||||
|
|
|
@ -8,7 +8,12 @@ use expect_test::expect;
|
|||
|
||||
#[test]
|
||||
fn test_derive_empty() {
|
||||
assert_expand("DeriveEmpty", r#"struct S;"#, expect!["SUBTREE $$ 1 1"], expect!["SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"]);
|
||||
assert_expand(
|
||||
"DeriveEmpty",
|
||||
r#"struct S;"#,
|
||||
expect!["SUBTREE $$ 1 1"],
|
||||
expect!["SUBTREE $$ 42:2@0..100#0 42:2@0..100#0"],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -21,15 +26,15 @@ fn test_derive_error() {
|
|||
IDENT compile_error 1
|
||||
PUNCH ! [alone] 1
|
||||
SUBTREE () 1 1
|
||||
LITERAL "#[derive(DeriveError)] struct S ;" 1
|
||||
LITERAL "#[derive(DeriveError)] struct S ;"1
|
||||
PUNCH ; [alone] 1"##]],
|
||||
expect![[r##"
|
||||
SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
IDENT compile_error SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
PUNCH ! [alone] SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
SUBTREE () SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
LITERAL "#[derive(DeriveError)] struct S ;" SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
PUNCH ; [alone] SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"##]],
|
||||
SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
|
||||
IDENT compile_error 42:2@0..100#0
|
||||
PUNCH ! [alone] 42:2@0..100#0
|
||||
SUBTREE () 42:2@0..100#0 42:2@0..100#0
|
||||
LITERAL "#[derive(DeriveError)] struct S ;"42:2@0..100#0
|
||||
PUNCH ; [alone] 42:2@0..100#0"##]],
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -42,20 +47,20 @@ fn test_fn_like_macro_noop() {
|
|||
SUBTREE $$ 1 1
|
||||
IDENT ident 1
|
||||
PUNCH , [alone] 1
|
||||
LITERAL 0 1
|
||||
LITERAL 01
|
||||
PUNCH , [alone] 1
|
||||
LITERAL 1 1
|
||||
LITERAL 11
|
||||
PUNCH , [alone] 1
|
||||
SUBTREE [] 1 1"#]],
|
||||
expect![[r#"
|
||||
SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
IDENT ident SpanData { range: 0..5, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
PUNCH , [alone] SpanData { range: 5..6, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
LITERAL 0 SpanData { range: 7..8, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
PUNCH , [alone] SpanData { range: 8..9, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
LITERAL 1 SpanData { range: 10..11, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
PUNCH , [alone] SpanData { range: 11..12, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
SUBTREE [] SpanData { range: 13..14, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 14..15, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]],
|
||||
SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
|
||||
IDENT ident 42:2@0..5#0
|
||||
PUNCH , [alone] 42:2@5..6#0
|
||||
LITERAL 042:2@7..8#0
|
||||
PUNCH , [alone] 42:2@8..9#0
|
||||
LITERAL 142:2@10..11#0
|
||||
PUNCH , [alone] 42:2@11..12#0
|
||||
SUBTREE [] 42:2@13..14#0 42:2@14..15#0"#]],
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -70,10 +75,10 @@ fn test_fn_like_macro_clone_ident_subtree() {
|
|||
PUNCH , [alone] 1
|
||||
SUBTREE [] 1 1"#]],
|
||||
expect![[r#"
|
||||
SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
IDENT ident SpanData { range: 0..5, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
PUNCH , [alone] SpanData { range: 5..6, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
SUBTREE [] SpanData { range: 7..8, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 7..8, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]],
|
||||
SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
|
||||
IDENT ident 42:2@0..5#0
|
||||
PUNCH , [alone] 42:2@5..6#0
|
||||
SUBTREE [] 42:2@7..8#0 42:2@7..8#0"#]],
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -86,8 +91,8 @@ fn test_fn_like_macro_clone_raw_ident() {
|
|||
SUBTREE $$ 1 1
|
||||
IDENT r#async 1"#]],
|
||||
expect![[r#"
|
||||
SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
IDENT r#async SpanData { range: 0..7, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]],
|
||||
SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
|
||||
IDENT r#async 42:2@0..7#0"#]],
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -100,8 +105,8 @@ fn test_fn_like_fn_like_span_join() {
|
|||
SUBTREE $$ 1 1
|
||||
IDENT r#joined 1"#]],
|
||||
expect![[r#"
|
||||
SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
IDENT r#joined SpanData { range: 0..11, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]],
|
||||
SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
|
||||
IDENT r#joined 42:2@0..11#0"#]],
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -116,10 +121,10 @@ fn test_fn_like_fn_like_span_ops() {
|
|||
IDENT resolved_at_def_site 1
|
||||
IDENT start_span 1"#]],
|
||||
expect![[r#"
|
||||
SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
IDENT set_def_site SpanData { range: 0..150, anchor: SpanAnchor(FileId(41), 1), ctx: SyntaxContextId(0) }
|
||||
IDENT resolved_at_def_site SpanData { range: 13..33, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
IDENT start_span SpanData { range: 34..34, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]],
|
||||
SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
|
||||
IDENT set_def_site 41:1@0..150#0
|
||||
IDENT resolved_at_def_site 42:2@13..33#0
|
||||
IDENT start_span 42:2@34..34#0"#]],
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -130,22 +135,22 @@ fn test_fn_like_mk_literals() {
|
|||
r#""#,
|
||||
expect![[r#"
|
||||
SUBTREE $$ 1 1
|
||||
LITERAL b"byte_string" 1
|
||||
LITERAL 'c' 1
|
||||
LITERAL "string" 1
|
||||
LITERAL 3.14f64 1
|
||||
LITERAL 3.14 1
|
||||
LITERAL 123i64 1
|
||||
LITERAL 123 1"#]],
|
||||
LITERAL b"byte_string"1
|
||||
LITERAL 'c'1
|
||||
LITERAL "string"1
|
||||
LITERAL 3.14f641
|
||||
LITERAL 3.141
|
||||
LITERAL 123i641
|
||||
LITERAL 1231"#]],
|
||||
expect![[r#"
|
||||
SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
LITERAL b"byte_string" SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
LITERAL 'c' SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
LITERAL "string" SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
LITERAL 3.14f64 SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
LITERAL 3.14 SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
LITERAL 123i64 SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
LITERAL 123 SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]],
|
||||
SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
|
||||
LITERAL b"byte_string"42:2@0..100#0
|
||||
LITERAL 'c'42:2@0..100#0
|
||||
LITERAL "string"42:2@0..100#0
|
||||
LITERAL 3.14f6442:2@0..100#0
|
||||
LITERAL 3.1442:2@0..100#0
|
||||
LITERAL 123i6442:2@0..100#0
|
||||
LITERAL 12342:2@0..100#0"#]],
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -159,9 +164,9 @@ fn test_fn_like_mk_idents() {
|
|||
IDENT standard 1
|
||||
IDENT r#raw 1"#]],
|
||||
expect![[r#"
|
||||
SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
IDENT standard SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
IDENT r#raw SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]],
|
||||
SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
|
||||
IDENT standard 42:2@0..100#0
|
||||
IDENT r#raw 42:2@0..100#0"#]],
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -172,48 +177,48 @@ fn test_fn_like_macro_clone_literals() {
|
|||
r###"1u16, 2_u32, -4i64, 3.14f32, "hello bridge", "suffixed"suffix, r##"raw"##, 'a', b'b', c"null""###,
|
||||
expect![[r###"
|
||||
SUBTREE $$ 1 1
|
||||
LITERAL 1u16 1
|
||||
LITERAL 1u161
|
||||
PUNCH , [alone] 1
|
||||
LITERAL 2_u32 1
|
||||
LITERAL 2_u321
|
||||
PUNCH , [alone] 1
|
||||
PUNCH - [alone] 1
|
||||
LITERAL 4i64 1
|
||||
LITERAL 4i641
|
||||
PUNCH , [alone] 1
|
||||
LITERAL 3.14f32 1
|
||||
LITERAL 3.14f321
|
||||
PUNCH , [alone] 1
|
||||
LITERAL "hello bridge" 1
|
||||
LITERAL "hello bridge"1
|
||||
PUNCH , [alone] 1
|
||||
LITERAL "suffixed"suffix 1
|
||||
LITERAL "suffixed"suffix1
|
||||
PUNCH , [alone] 1
|
||||
LITERAL r##"raw"## 1
|
||||
LITERAL r##"raw"##1
|
||||
PUNCH , [alone] 1
|
||||
LITERAL 'a' 1
|
||||
LITERAL 'a'1
|
||||
PUNCH , [alone] 1
|
||||
LITERAL b'b' 1
|
||||
LITERAL b'b'1
|
||||
PUNCH , [alone] 1
|
||||
LITERAL c"null" 1"###]],
|
||||
LITERAL c"null"1"###]],
|
||||
expect![[r###"
|
||||
SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
LITERAL 1u16 SpanData { range: 0..4, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
PUNCH , [alone] SpanData { range: 4..5, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
LITERAL 2_u32 SpanData { range: 6..11, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
PUNCH , [alone] SpanData { range: 11..12, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
PUNCH - [alone] SpanData { range: 13..14, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
LITERAL 4i64 SpanData { range: 14..18, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
PUNCH , [alone] SpanData { range: 18..19, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
LITERAL 3.14f32 SpanData { range: 20..27, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
PUNCH , [alone] SpanData { range: 27..28, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
LITERAL "hello bridge" SpanData { range: 29..43, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
PUNCH , [alone] SpanData { range: 43..44, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
LITERAL "suffixed"suffix SpanData { range: 45..61, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
PUNCH , [alone] SpanData { range: 61..62, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
LITERAL r##"raw"## SpanData { range: 63..73, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
PUNCH , [alone] SpanData { range: 73..74, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
LITERAL 'a' SpanData { range: 75..78, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
PUNCH , [alone] SpanData { range: 78..79, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
LITERAL b'b' SpanData { range: 80..84, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
PUNCH , [alone] SpanData { range: 84..85, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
LITERAL c"null" SpanData { range: 86..93, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"###]],
|
||||
SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
|
||||
LITERAL 1u1642:2@0..4#0
|
||||
PUNCH , [alone] 42:2@4..5#0
|
||||
LITERAL 2_u3242:2@6..11#0
|
||||
PUNCH , [alone] 42:2@11..12#0
|
||||
PUNCH - [alone] 42:2@13..14#0
|
||||
LITERAL 4i6442:2@14..18#0
|
||||
PUNCH , [alone] 42:2@18..19#0
|
||||
LITERAL 3.14f3242:2@20..27#0
|
||||
PUNCH , [alone] 42:2@27..28#0
|
||||
LITERAL "hello bridge"42:2@29..43#0
|
||||
PUNCH , [alone] 42:2@43..44#0
|
||||
LITERAL "suffixed"suffix42:2@45..61#0
|
||||
PUNCH , [alone] 42:2@61..62#0
|
||||
LITERAL r##"raw"##42:2@63..73#0
|
||||
PUNCH , [alone] 42:2@73..74#0
|
||||
LITERAL 'a'42:2@75..78#0
|
||||
PUNCH , [alone] 42:2@78..79#0
|
||||
LITERAL b'b'42:2@80..84#0
|
||||
PUNCH , [alone] 42:2@84..85#0
|
||||
LITERAL c"null"42:2@86..93#0"###]],
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -231,15 +236,15 @@ fn test_attr_macro() {
|
|||
IDENT compile_error 1
|
||||
PUNCH ! [alone] 1
|
||||
SUBTREE () 1 1
|
||||
LITERAL "#[attr_error(some arguments)] mod m {}" 1
|
||||
LITERAL "#[attr_error(some arguments)] mod m {}"1
|
||||
PUNCH ; [alone] 1"##]],
|
||||
expect![[r##"
|
||||
SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
IDENT compile_error SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
PUNCH ! [alone] SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
SUBTREE () SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
LITERAL "#[attr_error(some arguments)] mod m {}" SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
|
||||
PUNCH ; [alone] SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"##]],
|
||||
SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
|
||||
IDENT compile_error 42:2@0..100#0
|
||||
PUNCH ! [alone] 42:2@0..100#0
|
||||
SUBTREE () 42:2@0..100#0 42:2@0..100#0
|
||||
LITERAL "#[attr_error(some arguments)] mod m {}"42:2@0..100#0
|
||||
PUNCH ; [alone] 42:2@0..100#0"##]],
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -91,7 +91,7 @@ fn assert_expand_impl(
|
|||
let res = expander
|
||||
.expand(macro_name, fixture.into_subtree(call_site), attr, def_site, call_site, mixed_site)
|
||||
.unwrap();
|
||||
expect_s.assert_eq(&format!("{res:?}"));
|
||||
expect_s.assert_eq(&format!("{res:#?}"));
|
||||
}
|
||||
|
||||
pub(crate) fn list() -> Vec<String> {
|
||||
|
|
|
@ -26,9 +26,19 @@ use salsa::{InternId, InternValue};
|
|||
use crate::MacroCallId;
|
||||
|
||||
/// Interned [`SyntaxContextData`].
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct SyntaxContextId(InternId);
|
||||
|
||||
impl fmt::Debug for SyntaxContextId {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
if f.alternate() {
|
||||
write!(f, "{}", self.0.as_u32())
|
||||
} else {
|
||||
f.debug_tuple("SyntaxContextId").field(&self.0).finish()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::InternKey for SyntaxContextId {
|
||||
fn from_intern_id(v: salsa::InternId) -> Self {
|
||||
SyntaxContextId(v)
|
||||
|
|
|
@ -44,7 +44,7 @@ pub const FIXUP_ERASED_FILE_AST_ID_MARKER: ErasedFileAstId =
|
|||
|
||||
pub type Span = SpanData<SyntaxContextId>;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct SpanData<Ctx> {
|
||||
/// The text range of this span, relative to the anchor.
|
||||
/// We need the anchor for incrementality, as storing absolute ranges will require
|
||||
|
@ -56,6 +56,26 @@ pub struct SpanData<Ctx> {
|
|||
pub ctx: Ctx,
|
||||
}
|
||||
|
||||
impl<Ctx: fmt::Debug> fmt::Debug for SpanData<Ctx> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
if f.alternate() {
|
||||
fmt::Debug::fmt(&self.anchor.file_id.index(), f)?;
|
||||
f.write_char(':')?;
|
||||
fmt::Debug::fmt(&self.anchor.ast_id.into_raw(), f)?;
|
||||
f.write_char('@')?;
|
||||
fmt::Debug::fmt(&self.range, f)?;
|
||||
f.write_char('#')?;
|
||||
self.ctx.fmt(f)
|
||||
} else {
|
||||
f.debug_struct("SpanData")
|
||||
.field("range", &self.range)
|
||||
.field("anchor", &self.anchor)
|
||||
.field("ctx", &self.ctx)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: Copy> SpanData<Ctx> {
|
||||
pub fn eq_ignoring_ctx(self, other: Self) -> bool {
|
||||
self.anchor == other.anchor && self.range == other.range
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
//! A map that maps a span to every position in a file. Usually maps a span to some range of positions.
|
||||
//! Allows bidirectional lookup.
|
||||
|
||||
use std::hash::Hash;
|
||||
use std::{fmt, hash::Hash};
|
||||
|
||||
use stdx::{always, itertools::Itertools};
|
||||
use syntax::{TextRange, TextSize};
|
||||
|
@ -52,7 +52,7 @@ where
|
|||
/// Returns all [`TextRange`]s that correspond to the given span.
|
||||
///
|
||||
/// Note this does a linear search through the entire backing vector.
|
||||
pub fn ranges_with_span(&self, span: SpanData<S>) -> impl Iterator<Item = TextRange> + '_
|
||||
pub fn ranges_with_span_exact(&self, span: SpanData<S>) -> impl Iterator<Item = TextRange> + '_
|
||||
where
|
||||
S: Copy,
|
||||
{
|
||||
|
@ -65,6 +65,25 @@ where
|
|||
})
|
||||
}
|
||||
|
||||
/// Returns all [`TextRange`]s whose spans contain the given span.
|
||||
///
|
||||
/// Note this does a linear search through the entire backing vector.
|
||||
pub fn ranges_with_span(&self, span: SpanData<S>) -> impl Iterator<Item = TextRange> + '_
|
||||
where
|
||||
S: Copy,
|
||||
{
|
||||
self.spans.iter().enumerate().filter_map(move |(idx, &(end, s))| {
|
||||
if s.anchor != span.anchor {
|
||||
return None;
|
||||
}
|
||||
if !s.range.contains_range(span.range) {
|
||||
return None;
|
||||
}
|
||||
let start = idx.checked_sub(1).map_or(TextSize::new(0), |prev| self.spans[prev].0);
|
||||
Some(TextRange::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the span at the given position.
|
||||
pub fn span_at(&self, offset: TextSize) -> SpanData<S> {
|
||||
let entry = self.spans.partition_point(|&(it, _)| it <= offset);
|
||||
|
@ -94,6 +113,16 @@ pub struct RealSpanMap {
|
|||
end: TextSize,
|
||||
}
|
||||
|
||||
impl fmt::Display for RealSpanMap {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
writeln!(f, "RealSpanMap({:?}):", self.file_id)?;
|
||||
for span in self.pairs.iter() {
|
||||
writeln!(f, "{}: {}", u32::from(span.0), span.1.into_raw().into_u32())?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl RealSpanMap {
|
||||
/// Creates a real file span map that returns absolute ranges (relative ranges to the root ast id).
|
||||
pub fn absolute(file_id: FileId) -> Self {
|
||||
|
|
|
@ -194,21 +194,6 @@ impl<'a, A: ?Sized + Downcast, V: IntoBox<A>> VacantEntry<'a, A, V> {
|
|||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
struct A(i32);
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
struct B(i32);
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
struct C(i32);
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
struct D(i32);
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
struct E(i32);
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
struct F(i32);
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
struct J(i32);
|
||||
|
||||
#[test]
|
||||
fn test_varieties() {
|
||||
fn assert_send<T: Send>() {}
|
||||
|
|
|
@ -177,17 +177,19 @@ fn print_debug_subtree<S: fmt::Debug>(
|
|||
let align = " ".repeat(level);
|
||||
|
||||
let Delimiter { kind, open, close } = &subtree.delimiter;
|
||||
let aux = match kind {
|
||||
DelimiterKind::Invisible => format!("$$ {:?} {:?}", open, close),
|
||||
DelimiterKind::Parenthesis => format!("() {:?} {:?}", open, close),
|
||||
DelimiterKind::Brace => format!("{{}} {:?} {:?}", open, close),
|
||||
DelimiterKind::Bracket => format!("[] {:?} {:?}", open, close),
|
||||
let delim = match kind {
|
||||
DelimiterKind::Invisible => "$$",
|
||||
DelimiterKind::Parenthesis => "()",
|
||||
DelimiterKind::Brace => "{}",
|
||||
DelimiterKind::Bracket => "[]",
|
||||
};
|
||||
|
||||
if subtree.token_trees.is_empty() {
|
||||
write!(f, "{align}SUBTREE {aux}")?;
|
||||
} else {
|
||||
writeln!(f, "{align}SUBTREE {aux}")?;
|
||||
write!(f, "{align}SUBTREE {delim} ",)?;
|
||||
fmt::Debug::fmt(&open, f)?;
|
||||
write!(f, " ")?;
|
||||
fmt::Debug::fmt(&close, f)?;
|
||||
if !subtree.token_trees.is_empty() {
|
||||
writeln!(f)?;
|
||||
for (idx, child) in subtree.token_trees.iter().enumerate() {
|
||||
print_debug_token(f, child, level + 1)?;
|
||||
if idx != subtree.token_trees.len() - 1 {
|
||||
|
@ -208,16 +210,24 @@ fn print_debug_token<S: fmt::Debug>(
|
|||
|
||||
match tkn {
|
||||
TokenTree::Leaf(leaf) => match leaf {
|
||||
Leaf::Literal(lit) => write!(f, "{}LITERAL {} {:?}", align, lit.text, lit.span)?,
|
||||
Leaf::Punct(punct) => write!(
|
||||
f,
|
||||
"{}PUNCH {} [{}] {:?}",
|
||||
align,
|
||||
punct.char,
|
||||
if punct.spacing == Spacing::Alone { "alone" } else { "joint" },
|
||||
punct.span
|
||||
)?,
|
||||
Leaf::Ident(ident) => write!(f, "{}IDENT {} {:?}", align, ident.text, ident.span)?,
|
||||
Leaf::Literal(lit) => {
|
||||
write!(f, "{}LITERAL {}", align, lit.text)?;
|
||||
fmt::Debug::fmt(&lit.span, f)?;
|
||||
}
|
||||
Leaf::Punct(punct) => {
|
||||
write!(
|
||||
f,
|
||||
"{}PUNCH {} [{}] ",
|
||||
align,
|
||||
punct.char,
|
||||
if punct.spacing == Spacing::Alone { "alone" } else { "joint" },
|
||||
)?;
|
||||
fmt::Debug::fmt(&punct.span, f)?;
|
||||
}
|
||||
Leaf::Ident(ident) => {
|
||||
write!(f, "{}IDENT {} ", align, ident.text)?;
|
||||
fmt::Debug::fmt(&ident.span, f)?;
|
||||
}
|
||||
},
|
||||
TokenTree::Subtree(subtree) => {
|
||||
print_debug_subtree(f, subtree, level)?;
|
||||
|
|
Loading…
Reference in a new issue