mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 13:03:31 +00:00
simplify and document
This commit is contained in:
parent
94e59c9c56
commit
8db88df758
6 changed files with 53 additions and 48 deletions
|
@ -675,15 +675,24 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
ast::MacroCall(mcall) => mcall,
|
ast::MacroCall(mcall) => mcall,
|
||||||
// attribute we failed expansion for earlier, this might be a derive invocation
|
// attribute we failed expansion for earlier, this might be a derive invocation
|
||||||
// so try downmapping the token into the pseudo derive expansion
|
// so try downmapping the token into the pseudo derive expansion
|
||||||
|
// see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
|
||||||
ast::Meta(meta) => {
|
ast::Meta(meta) => {
|
||||||
let attr = meta.parent_attr()?;
|
let attr = meta.parent_attr()?;
|
||||||
let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
|
let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
|
||||||
let call_id = self.with_ctx(|ctx| {
|
let call_id = self.with_ctx(|ctx| {
|
||||||
let (_, call_id, _) = ctx.attr_to_derive_macro_call(token.with_value(&adt), token.with_value(attr))?;
|
let (_, call_id, _) = ctx.attr_to_derive_macro_call(
|
||||||
|
token.with_value(&adt),
|
||||||
|
token.with_value(attr),
|
||||||
|
)?;
|
||||||
Some(call_id)
|
Some(call_id)
|
||||||
})?;
|
})?;
|
||||||
let file_id = call_id.as_file();
|
let file_id = call_id.as_file();
|
||||||
return process_expansion_for_token(&mut stack,file_id,Some(adt.into()),token.as_ref(),);
|
return process_expansion_for_token(
|
||||||
|
&mut stack,
|
||||||
|
file_id,
|
||||||
|
Some(adt.into()),
|
||||||
|
token.as_ref(),
|
||||||
|
);
|
||||||
},
|
},
|
||||||
_ => return None,
|
_ => return None,
|
||||||
}
|
}
|
||||||
|
|
|
@ -1170,6 +1170,9 @@ impl DefCollector<'_> {
|
||||||
len = idx;
|
len = idx;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// We treat the #[derive] macro as an attribute call, but we do not resolve it for nameres collection.
|
||||||
|
// This is just a trick to be able to resolve the input to derives as proper paths.
|
||||||
|
// Check the comment in [`builtin_attr_macro`].
|
||||||
let call_id = attr_macro_as_call_id(
|
let call_id = attr_macro_as_call_id(
|
||||||
self.db,
|
self.db,
|
||||||
file_ast_id,
|
file_ast_id,
|
||||||
|
|
|
@ -82,30 +82,47 @@ fn dummy_attr_expand(
|
||||||
ExpandResult::ok(tt.clone())
|
ExpandResult::ok(tt.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// We generate a very specific expansion here, as we do not actually expand the `#[derive]` attribute
|
||||||
|
/// itself in name res, but we do want to expand it to something for the IDE layer, so that the input
|
||||||
|
/// derive attributes can be downmapped, and resolved as proper paths.
|
||||||
|
/// This is basically a hack, that simplifies the hacks we need in a lot of ide layer places to
|
||||||
|
/// somewhat inconsistently resolve derive attributes.
|
||||||
|
///
|
||||||
|
/// As such, we expand `#[derive(Foo, bar::Bar)]` into
|
||||||
|
/// ```
|
||||||
|
/// #[Foo]
|
||||||
|
/// #[bar::Bar]
|
||||||
|
/// ();
|
||||||
|
/// ```
|
||||||
|
/// which allows fallback path resolution in hir::Semantics to properly identify our derives.
|
||||||
|
/// Since we do not expand the attribute in nameres though, we keep the original item.
|
||||||
|
///
|
||||||
|
/// The ideal expansion here would be for the `#[derive]` to re-emit the annotated item and somehow
|
||||||
|
/// use the input paths in its output as well.
|
||||||
|
/// But that would bring two problems with it, for one every derive would duplicate the item token tree
|
||||||
|
/// wasting a lot of memory, and it would also require some way to use a path in a way that makes it
|
||||||
|
/// always resolve as a derive without nameres recollecting them.
|
||||||
|
/// So this hacky approach is a lot more friendly for us, though it does require a bit of support in
|
||||||
|
/// [`hir::Semantics`] to make this work.
|
||||||
fn derive_attr_expand(
|
fn derive_attr_expand(
|
||||||
db: &dyn AstDatabase,
|
db: &dyn AstDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::Subtree,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
// we generate a very specific expansion here, as we do not actually expand the `#[derive]` attribute
|
|
||||||
// itself in name res, but we do want to expand it to something for the IDE layer, so that the input
|
|
||||||
// derive attributes can be downmapped, and resolved
|
|
||||||
// This is basically a hack, to get rid of hacks in the IDE layer that slowly accumulate more and more
|
|
||||||
// in various places.
|
|
||||||
|
|
||||||
// we transform the token tree of `#[derive(Foo, bar::Bar)]` into
|
|
||||||
// ```
|
|
||||||
// #[Foo]
|
|
||||||
// #[bar::Bar]
|
|
||||||
// ();
|
|
||||||
// ```
|
|
||||||
// which allows fallback path resolution in hir::Semantics to properly identify our derives
|
|
||||||
let loc = db.lookup_intern_macro_call(id);
|
let loc = db.lookup_intern_macro_call(id);
|
||||||
let derives = match &loc.kind {
|
let derives = match &loc.kind {
|
||||||
MacroCallKind::Attr { attr_args, .. } => &attr_args.0,
|
MacroCallKind::Attr { attr_args, .. } => &attr_args.0,
|
||||||
_ => return ExpandResult::ok(tt.clone()),
|
_ => return ExpandResult::ok(tt.clone()),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let mk_leaf = |char| {
|
||||||
|
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
|
||||||
|
char,
|
||||||
|
spacing: tt::Spacing::Alone,
|
||||||
|
id: tt::TokenId::unspecified(),
|
||||||
|
}))
|
||||||
|
};
|
||||||
|
|
||||||
let mut token_trees = Vec::new();
|
let mut token_trees = Vec::new();
|
||||||
for (comma, group) in &derives
|
for (comma, group) in &derives
|
||||||
.token_trees
|
.token_trees
|
||||||
|
@ -119,38 +136,13 @@ fn derive_attr_expand(
|
||||||
if comma {
|
if comma {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
let wrap = |leaf| tt::TokenTree::Leaf(tt::Leaf::Punct(leaf));
|
token_trees.push(mk_leaf('#'));
|
||||||
token_trees.push(wrap(tt::Punct {
|
token_trees.push(mk_leaf('['));
|
||||||
char: '#',
|
|
||||||
spacing: tt::Spacing::Alone,
|
|
||||||
id: tt::TokenId::unspecified(),
|
|
||||||
}));
|
|
||||||
token_trees.push(wrap(tt::Punct {
|
|
||||||
char: '[',
|
|
||||||
spacing: tt::Spacing::Alone,
|
|
||||||
id: tt::TokenId::unspecified(),
|
|
||||||
}));
|
|
||||||
token_trees.extend(group.cloned().map(tt::TokenTree::Leaf));
|
token_trees.extend(group.cloned().map(tt::TokenTree::Leaf));
|
||||||
token_trees.push(wrap(tt::Punct {
|
token_trees.push(mk_leaf(']'));
|
||||||
char: ']',
|
|
||||||
spacing: tt::Spacing::Alone,
|
|
||||||
id: tt::TokenId::unspecified(),
|
|
||||||
}));
|
|
||||||
token_trees.push(wrap(tt::Punct {
|
|
||||||
char: '(',
|
|
||||||
spacing: tt::Spacing::Alone,
|
|
||||||
id: tt::TokenId::unspecified(),
|
|
||||||
}));
|
|
||||||
token_trees.push(wrap(tt::Punct {
|
|
||||||
char: ')',
|
|
||||||
spacing: tt::Spacing::Alone,
|
|
||||||
id: tt::TokenId::unspecified(),
|
|
||||||
}));
|
|
||||||
token_trees.push(wrap(tt::Punct {
|
|
||||||
char: ';',
|
|
||||||
spacing: tt::Spacing::Alone,
|
|
||||||
id: tt::TokenId::unspecified(),
|
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
|
token_trees.push(mk_leaf('('));
|
||||||
|
token_trees.push(mk_leaf(')'));
|
||||||
|
token_trees.push(mk_leaf(';'));
|
||||||
ExpandResult::ok(tt::Subtree { delimiter: tt.delimiter, token_trees })
|
ExpandResult::ok(tt::Subtree { delimiter: tt.delimiter, token_trees })
|
||||||
}
|
}
|
||||||
|
|
|
@ -166,6 +166,7 @@ pub enum MacroCallKind {
|
||||||
/// Outer attributes are counted first, then inner attributes. This does not support
|
/// Outer attributes are counted first, then inner attributes. This does not support
|
||||||
/// out-of-line modules, which may have attributes spread across 2 files!
|
/// out-of-line modules, which may have attributes spread across 2 files!
|
||||||
invoc_attr_index: u32,
|
invoc_attr_index: u32,
|
||||||
|
/// Whether this attribute is the `#[derive]` attribute.
|
||||||
is_derive: bool,
|
is_derive: bool,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,7 +16,7 @@ use hir::{ItemInNs, MacroDef, ModuleDef, Name, Semantics};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, make, HasLoopBody},
|
ast::{self, make, HasLoopBody},
|
||||||
AstNode, SyntaxKind, SyntaxToken, TokenAtOffset, WalkEvent, T,
|
AstNode, AstToken, SyntaxKind, SyntaxToken, TokenAtOffset, WalkEvent, T,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{defs::Definition, RootDatabase};
|
use crate::{defs::Definition, RootDatabase};
|
||||||
|
|
|
@ -111,7 +111,7 @@ env UPDATE_EXPECT=1 cargo qt
|
||||||
|
|
||||||
After adding a new inline test you need to run `cargo test -p xtask` and also update the test data as described above.
|
After adding a new inline test you need to run `cargo test -p xtask` and also update the test data as described above.
|
||||||
|
|
||||||
Note [`api_walkthrough`](https://github.com/rust-analyzer/rust-analyzer/blob/2fb6af89eb794f775de60b82afe56b6f986c2a40/crates/ra_syntax/src/lib.rs#L190-L348)
|
Note [`api_walkthrough`](https://github.com/rust-analyzer/rust-analyzer/blob/2fb6af89eb794f775de60b82afe56b6f986c2a40/crates/ra_syntax/src/lib.rs#L190-L348)
|
||||||
in particular: it shows off various methods of working with syntax tree.
|
in particular: it shows off various methods of working with syntax tree.
|
||||||
|
|
||||||
See [#93](https://github.com/rust-analyzer/rust-analyzer/pull/93) for an example PR which fixes a bug in the grammar.
|
See [#93](https://github.com/rust-analyzer/rust-analyzer/pull/93) for an example PR which fixes a bug in the grammar.
|
||||||
|
|
Loading…
Reference in a new issue