mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-27 13:33:31 +00:00
Replace ID based TokenMap with proper relative text-ranges / spans
This commit is contained in:
parent
f79439caed
commit
890eb17b4e
80 changed files with 1816 additions and 2046 deletions
2
Cargo.lock
generated
2
Cargo.lock
generated
|
@ -1263,6 +1263,7 @@ dependencies = [
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"snap",
|
"snap",
|
||||||
"stdx",
|
"stdx",
|
||||||
|
"text-size",
|
||||||
"tracing",
|
"tracing",
|
||||||
"triomphe",
|
"triomphe",
|
||||||
"tt",
|
"tt",
|
||||||
|
@ -2010,6 +2011,7 @@ version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"smol_str",
|
"smol_str",
|
||||||
"stdx",
|
"stdx",
|
||||||
|
"text-size",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
|
@ -8,11 +8,12 @@ use test_utils::{
|
||||||
ESCAPED_CURSOR_MARKER,
|
ESCAPED_CURSOR_MARKER,
|
||||||
};
|
};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
use tt::token_id::{Leaf, Subtree, TokenTree};
|
use tt::{Leaf, Subtree, TokenTree};
|
||||||
use vfs::{file_set::FileSet, VfsPath};
|
use vfs::{file_set::FileSet, VfsPath};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
input::{CrateName, CrateOrigin, LangCrateOrigin},
|
input::{CrateName, CrateOrigin, LangCrateOrigin},
|
||||||
|
span::SpanData,
|
||||||
Change, CrateDisplayName, CrateGraph, CrateId, Dependency, DependencyKind, Edition, Env,
|
Change, CrateDisplayName, CrateGraph, CrateId, Dependency, DependencyKind, Edition, Env,
|
||||||
FileId, FilePosition, FileRange, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
|
FileId, FilePosition, FileRange, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
|
||||||
ProcMacros, ReleaseChannel, SourceDatabaseExt, SourceRoot, SourceRootId,
|
ProcMacros, ReleaseChannel, SourceDatabaseExt, SourceRoot, SourceRootId,
|
||||||
|
@ -539,10 +540,10 @@ struct IdentityProcMacroExpander;
|
||||||
impl ProcMacroExpander for IdentityProcMacroExpander {
|
impl ProcMacroExpander for IdentityProcMacroExpander {
|
||||||
fn expand(
|
fn expand(
|
||||||
&self,
|
&self,
|
||||||
subtree: &Subtree,
|
subtree: &Subtree<SpanData>,
|
||||||
_: Option<&Subtree>,
|
_: Option<&Subtree<SpanData>>,
|
||||||
_: &Env,
|
_: &Env,
|
||||||
) -> Result<Subtree, ProcMacroExpansionError> {
|
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
|
||||||
Ok(subtree.clone())
|
Ok(subtree.clone())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -553,10 +554,10 @@ struct AttributeInputReplaceProcMacroExpander;
|
||||||
impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander {
|
impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander {
|
||||||
fn expand(
|
fn expand(
|
||||||
&self,
|
&self,
|
||||||
_: &Subtree,
|
_: &Subtree<SpanData>,
|
||||||
attrs: Option<&Subtree>,
|
attrs: Option<&Subtree<SpanData>>,
|
||||||
_: &Env,
|
_: &Env,
|
||||||
) -> Result<Subtree, ProcMacroExpansionError> {
|
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
|
||||||
attrs
|
attrs
|
||||||
.cloned()
|
.cloned()
|
||||||
.ok_or_else(|| ProcMacroExpansionError::Panic("Expected attribute input".into()))
|
.ok_or_else(|| ProcMacroExpansionError::Panic("Expected attribute input".into()))
|
||||||
|
@ -568,11 +569,11 @@ struct MirrorProcMacroExpander;
|
||||||
impl ProcMacroExpander for MirrorProcMacroExpander {
|
impl ProcMacroExpander for MirrorProcMacroExpander {
|
||||||
fn expand(
|
fn expand(
|
||||||
&self,
|
&self,
|
||||||
input: &Subtree,
|
input: &Subtree<SpanData>,
|
||||||
_: Option<&Subtree>,
|
_: Option<&Subtree<SpanData>>,
|
||||||
_: &Env,
|
_: &Env,
|
||||||
) -> Result<Subtree, ProcMacroExpansionError> {
|
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
|
||||||
fn traverse(input: &Subtree) -> Subtree {
|
fn traverse(input: &Subtree<SpanData>) -> Subtree<SpanData> {
|
||||||
let mut token_trees = vec![];
|
let mut token_trees = vec![];
|
||||||
for tt in input.token_trees.iter().rev() {
|
for tt in input.token_trees.iter().rev() {
|
||||||
let tt = match tt {
|
let tt = match tt {
|
||||||
|
@ -595,13 +596,13 @@ struct ShortenProcMacroExpander;
|
||||||
impl ProcMacroExpander for ShortenProcMacroExpander {
|
impl ProcMacroExpander for ShortenProcMacroExpander {
|
||||||
fn expand(
|
fn expand(
|
||||||
&self,
|
&self,
|
||||||
input: &Subtree,
|
input: &Subtree<SpanData>,
|
||||||
_: Option<&Subtree>,
|
_: Option<&Subtree<SpanData>>,
|
||||||
_: &Env,
|
_: &Env,
|
||||||
) -> Result<Subtree, ProcMacroExpansionError> {
|
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
|
||||||
return Ok(traverse(input));
|
return Ok(traverse(input));
|
||||||
|
|
||||||
fn traverse(input: &Subtree) -> Subtree {
|
fn traverse(input: &Subtree<SpanData>) -> Subtree<SpanData> {
|
||||||
let token_trees = input
|
let token_trees = input
|
||||||
.token_trees
|
.token_trees
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -613,7 +614,7 @@ impl ProcMacroExpander for ShortenProcMacroExpander {
|
||||||
Subtree { delimiter: input.delimiter, token_trees }
|
Subtree { delimiter: input.delimiter, token_trees }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn modify_leaf(leaf: &Leaf) -> Leaf {
|
fn modify_leaf(leaf: &Leaf<SpanData>) -> Leaf<SpanData> {
|
||||||
let mut leaf = leaf.clone();
|
let mut leaf = leaf.clone();
|
||||||
match &mut leaf {
|
match &mut leaf {
|
||||||
Leaf::Literal(it) => {
|
Leaf::Literal(it) => {
|
||||||
|
|
|
@ -13,9 +13,10 @@ use la_arena::{Arena, Idx};
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use syntax::SmolStr;
|
use syntax::SmolStr;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
use tt::token_id::Subtree;
|
|
||||||
use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath};
|
use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath};
|
||||||
|
|
||||||
|
use crate::span::SpanData;
|
||||||
|
|
||||||
// Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`,
|
// Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`,
|
||||||
// then the crate for the proc-macro hasn't been build yet as the build data is missing.
|
// then the crate for the proc-macro hasn't been build yet as the build data is missing.
|
||||||
pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>;
|
pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>;
|
||||||
|
@ -255,10 +256,10 @@ pub enum ProcMacroKind {
|
||||||
pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
|
pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
|
||||||
fn expand(
|
fn expand(
|
||||||
&self,
|
&self,
|
||||||
subtree: &Subtree,
|
subtree: &tt::Subtree<SpanData>,
|
||||||
attrs: Option<&Subtree>,
|
attrs: Option<&tt::Subtree<SpanData>>,
|
||||||
env: &Env,
|
env: &Env,
|
||||||
) -> Result<Subtree, ProcMacroExpansionError>;
|
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
mod input;
|
mod input;
|
||||||
mod change;
|
mod change;
|
||||||
pub mod fixture;
|
pub mod fixture;
|
||||||
|
pub mod span;
|
||||||
|
|
||||||
use std::panic;
|
use std::panic;
|
||||||
|
|
||||||
|
|
166
crates/base-db/src/span.rs
Normal file
166
crates/base-db/src/span.rs
Normal file
|
@ -0,0 +1,166 @@
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
|
use salsa::InternId;
|
||||||
|
use vfs::FileId;
|
||||||
|
|
||||||
|
pub type ErasedFileAstId = la_arena::Idx<syntax::SyntaxNodePtr>;
|
||||||
|
|
||||||
|
// The first inde is always the root node's AstId
|
||||||
|
pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId =
|
||||||
|
la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(0));
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
||||||
|
pub struct SyntaxContext;
|
||||||
|
|
||||||
|
pub type SpanData = tt::SpanData<SpanAnchor>;
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||||
|
pub struct SpanAnchor {
|
||||||
|
pub file_id: HirFileId,
|
||||||
|
pub ast_id: ErasedFileAstId,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for SpanAnchor {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.debug_tuple("SpanAnchor").field(&self.file_id).field(&self.ast_id.into_raw()).finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl tt::Span for SpanAnchor {
|
||||||
|
const DUMMY: Self = SpanAnchor { file_id: HirFileId(0), ast_id: ROOT_ERASED_FILE_AST_ID };
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Input to the analyzer is a set of files, where each file is identified by
|
||||||
|
/// `FileId` and contains source code. However, another source of source code in
|
||||||
|
/// Rust are macros: each macro can be thought of as producing a "temporary
|
||||||
|
/// file". To assign an id to such a file, we use the id of the macro call that
|
||||||
|
/// produced the file. So, a `HirFileId` is either a `FileId` (source code
|
||||||
|
/// written by user), or a `MacroCallId` (source code produced by macro).
|
||||||
|
///
|
||||||
|
/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file
|
||||||
|
/// containing the call plus the offset of the macro call in the file. Note that
|
||||||
|
/// this is a recursive definition! However, the size_of of `HirFileId` is
|
||||||
|
/// finite (because everything bottoms out at the real `FileId`) and small
|
||||||
|
/// (`MacroCallId` uses the location interning. You can check details here:
|
||||||
|
/// <https://en.wikipedia.org/wiki/String_interning>).
|
||||||
|
///
|
||||||
|
/// The two variants are encoded in a single u32 which are differentiated by the MSB.
|
||||||
|
/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a
|
||||||
|
/// `MacroCallId`.
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||||
|
pub struct HirFileId(u32);
|
||||||
|
|
||||||
|
impl From<HirFileId> for u32 {
|
||||||
|
fn from(value: HirFileId) -> Self {
|
||||||
|
value.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<u32> for HirFileId {
|
||||||
|
fn from(value: u32) -> Self {
|
||||||
|
HirFileId(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<MacroCallId> for HirFileId {
|
||||||
|
fn from(value: MacroCallId) -> Self {
|
||||||
|
value.as_file()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for HirFileId {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
self.repr().fmt(f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
|
pub struct MacroFile {
|
||||||
|
pub macro_call_id: MacroCallId,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// `MacroCallId` identifies a particular macro invocation, like
|
||||||
|
/// `println!("Hello, {}", world)`.
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
|
pub struct MacroCallId(salsa::InternId);
|
||||||
|
crate::impl_intern_key!(MacroCallId);
|
||||||
|
|
||||||
|
impl MacroCallId {
|
||||||
|
pub fn as_file(self) -> HirFileId {
|
||||||
|
MacroFile { macro_call_id: self }.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_macro_file(self) -> MacroFile {
|
||||||
|
MacroFile { macro_call_id: self }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
|
pub enum HirFileIdRepr {
|
||||||
|
FileId(FileId),
|
||||||
|
MacroFile(MacroFile),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for HirFileIdRepr {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
Self::FileId(arg0) => f.debug_tuple("FileId").field(&arg0.0).finish(),
|
||||||
|
Self::MacroFile(arg0) => {
|
||||||
|
f.debug_tuple("MacroFile").field(&arg0.macro_call_id.0).finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<FileId> for HirFileId {
|
||||||
|
fn from(FileId(id): FileId) -> Self {
|
||||||
|
assert!(id < Self::MAX_FILE_ID);
|
||||||
|
HirFileId(id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<MacroFile> for HirFileId {
|
||||||
|
fn from(MacroFile { macro_call_id: MacroCallId(id) }: MacroFile) -> Self {
|
||||||
|
let id = id.as_u32();
|
||||||
|
assert!(id < Self::MAX_FILE_ID);
|
||||||
|
HirFileId(id | Self::MACRO_FILE_TAG_MASK)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HirFileId {
|
||||||
|
const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK;
|
||||||
|
const MACRO_FILE_TAG_MASK: u32 = 1 << 31;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn is_macro(self) -> bool {
|
||||||
|
self.0 & Self::MACRO_FILE_TAG_MASK != 0
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn macro_file(self) -> Option<MacroFile> {
|
||||||
|
match self.0 & Self::MACRO_FILE_TAG_MASK {
|
||||||
|
0 => None,
|
||||||
|
_ => Some(MacroFile {
|
||||||
|
macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn file_id(self) -> Option<FileId> {
|
||||||
|
match self.0 & Self::MACRO_FILE_TAG_MASK {
|
||||||
|
0 => Some(FileId(self.0)),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn repr(self) -> HirFileIdRepr {
|
||||||
|
match self.0 & Self::MACRO_FILE_TAG_MASK {
|
||||||
|
0 => HirFileIdRepr::FileId(FileId(self.0)),
|
||||||
|
_ => HirFileIdRepr::MacroFile(MacroFile {
|
||||||
|
macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -2,36 +2,37 @@ use arbitrary::{Arbitrary, Unstructured};
|
||||||
use expect_test::{expect, Expect};
|
use expect_test::{expect, Expect};
|
||||||
use mbe::syntax_node_to_token_tree;
|
use mbe::syntax_node_to_token_tree;
|
||||||
use syntax::{ast, AstNode};
|
use syntax::{ast, AstNode};
|
||||||
|
use tt::Span;
|
||||||
|
|
||||||
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
|
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||||
|
struct DummyFile;
|
||||||
|
impl Span for DummyFile {
|
||||||
|
const DUMMY: Self = DummyFile;
|
||||||
|
}
|
||||||
|
|
||||||
fn assert_parse_result(input: &str, expected: CfgExpr) {
|
fn assert_parse_result(input: &str, expected: CfgExpr) {
|
||||||
let (tt, _) = {
|
|
||||||
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
||||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||||
syntax_node_to_token_tree(tt.syntax())
|
let tt = syntax_node_to_token_tree(tt.syntax(), DummyFile, 0.into(), &Default::default());
|
||||||
};
|
|
||||||
let cfg = CfgExpr::parse(&tt);
|
let cfg = CfgExpr::parse(&tt);
|
||||||
assert_eq!(cfg, expected);
|
assert_eq!(cfg, expected);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_dnf(input: &str, expect: Expect) {
|
fn check_dnf(input: &str, expect: Expect) {
|
||||||
let (tt, _) = {
|
|
||||||
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
||||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||||
syntax_node_to_token_tree(tt.syntax())
|
let tt = syntax_node_to_token_tree(tt.syntax(), DummyFile, 0.into(), &Default::default());
|
||||||
};
|
|
||||||
let cfg = CfgExpr::parse(&tt);
|
let cfg = CfgExpr::parse(&tt);
|
||||||
let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
|
let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
|
||||||
expect.assert_eq(&actual);
|
expect.assert_eq(&actual);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
|
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
|
||||||
let (tt, _) = {
|
|
||||||
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
||||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||||
syntax_node_to_token_tree(tt.syntax())
|
let tt = syntax_node_to_token_tree(tt.syntax(), DummyFile, 0.into(), &Default::default());
|
||||||
};
|
|
||||||
let cfg = CfgExpr::parse(&tt);
|
let cfg = CfgExpr::parse(&tt);
|
||||||
let dnf = DnfExpr::new(cfg);
|
let dnf = DnfExpr::new(cfg);
|
||||||
let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
|
let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
|
||||||
|
@ -40,11 +41,9 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
|
||||||
|
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
|
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
|
||||||
let (tt, _) = {
|
|
||||||
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
||||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||||
syntax_node_to_token_tree(tt.syntax())
|
let tt = syntax_node_to_token_tree(tt.syntax(), DummyFile, 0.into(), &Default::default());
|
||||||
};
|
|
||||||
let cfg = CfgExpr::parse(&tt);
|
let cfg = CfgExpr::parse(&tt);
|
||||||
let dnf = DnfExpr::new(cfg);
|
let dnf = DnfExpr::new(cfg);
|
||||||
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();
|
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();
|
||||||
|
|
|
@ -7,7 +7,10 @@ mod tests;
|
||||||
|
|
||||||
use std::{hash::Hash, ops, slice::Iter as SliceIter};
|
use std::{hash::Hash, ops, slice::Iter as SliceIter};
|
||||||
|
|
||||||
use base_db::CrateId;
|
use base_db::{
|
||||||
|
span::{ErasedFileAstId, SpanAnchor},
|
||||||
|
CrateId,
|
||||||
|
};
|
||||||
use cfg::{CfgExpr, CfgOptions};
|
use cfg::{CfgExpr, CfgOptions};
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
|
@ -28,8 +31,8 @@ use crate::{
|
||||||
lang_item::LangItem,
|
lang_item::LangItem,
|
||||||
nameres::{ModuleOrigin, ModuleSource},
|
nameres::{ModuleOrigin, ModuleSource},
|
||||||
src::{HasChildSource, HasSource},
|
src::{HasChildSource, HasSource},
|
||||||
AdtId, AssocItemLoc, AttrDefId, EnumId, GenericParamId, ItemLoc, LocalEnumVariantId,
|
AdtId, AssocItemLoc, AttrDefId, EnumId, GenericDefId, GenericParamId, ItemLoc,
|
||||||
LocalFieldId, Lookup, MacroId, VariantId,
|
LocalEnumVariantId, LocalFieldId, Lookup, MacroId, VariantId,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Default, Debug, Clone, PartialEq, Eq)]
|
#[derive(Default, Debug, Clone, PartialEq, Eq)]
|
||||||
|
@ -415,11 +418,27 @@ impl AttrsWithOwner {
|
||||||
AttrDefId::StaticId(it) => attrs_from_item_tree_assoc(db, it),
|
AttrDefId::StaticId(it) => attrs_from_item_tree_assoc(db, it),
|
||||||
AttrDefId::FunctionId(it) => attrs_from_item_tree_assoc(db, it),
|
AttrDefId::FunctionId(it) => attrs_from_item_tree_assoc(db, it),
|
||||||
AttrDefId::TypeAliasId(it) => attrs_from_item_tree_assoc(db, it),
|
AttrDefId::TypeAliasId(it) => attrs_from_item_tree_assoc(db, it),
|
||||||
AttrDefId::GenericParamId(it) => match it {
|
AttrDefId::GenericParamId(it) => {
|
||||||
|
let ast_id = |p| match p {
|
||||||
|
GenericDefId::AdtId(AdtId::StructId(it)) => {
|
||||||
|
erased_ast_id_from_item_tree(db, it)
|
||||||
|
}
|
||||||
|
GenericDefId::AdtId(AdtId::EnumId(it)) => erased_ast_id_from_item_tree(db, it),
|
||||||
|
GenericDefId::AdtId(AdtId::UnionId(it)) => erased_ast_id_from_item_tree(db, it),
|
||||||
|
GenericDefId::TraitId(it) => erased_ast_id_from_item_tree(db, it),
|
||||||
|
GenericDefId::TraitAliasId(it) => erased_ast_id_from_item_tree(db, it),
|
||||||
|
GenericDefId::ImplId(it) => erased_ast_id_from_item_tree(db, it),
|
||||||
|
GenericDefId::EnumVariantId(it) => erased_ast_id_from_item_tree(db, it.parent),
|
||||||
|
GenericDefId::TypeAliasId(it) => erased_ast_id_from_item_tree_assoc(db, it),
|
||||||
|
GenericDefId::FunctionId(it) => erased_ast_id_from_item_tree_assoc(db, it),
|
||||||
|
GenericDefId::ConstId(it) => erased_ast_id_from_item_tree_assoc(db, it),
|
||||||
|
};
|
||||||
|
match it {
|
||||||
GenericParamId::ConstParamId(it) => {
|
GenericParamId::ConstParamId(it) => {
|
||||||
let src = it.parent().child_source(db);
|
let src = it.parent().child_source(db);
|
||||||
RawAttrs::from_attrs_owner(
|
RawAttrs::from_attrs_owner(
|
||||||
db.upcast(),
|
db.upcast(),
|
||||||
|
SpanAnchor { file_id: src.file_id, ast_id: ast_id(it.parent()) },
|
||||||
src.with_value(&src.value[it.local_id()]),
|
src.with_value(&src.value[it.local_id()]),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -427,14 +446,20 @@ impl AttrsWithOwner {
|
||||||
let src = it.parent().child_source(db);
|
let src = it.parent().child_source(db);
|
||||||
RawAttrs::from_attrs_owner(
|
RawAttrs::from_attrs_owner(
|
||||||
db.upcast(),
|
db.upcast(),
|
||||||
|
SpanAnchor { file_id: src.file_id, ast_id: ast_id(it.parent()) },
|
||||||
src.with_value(&src.value[it.local_id()]),
|
src.with_value(&src.value[it.local_id()]),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
GenericParamId::LifetimeParamId(it) => {
|
GenericParamId::LifetimeParamId(it) => {
|
||||||
let src = it.parent.child_source(db);
|
let src = it.parent.child_source(db);
|
||||||
RawAttrs::from_attrs_owner(db.upcast(), src.with_value(&src.value[it.local_id]))
|
RawAttrs::from_attrs_owner(
|
||||||
|
db.upcast(),
|
||||||
|
SpanAnchor { file_id: src.file_id, ast_id: ast_id(it.parent) },
|
||||||
|
src.with_value(&src.value[it.local_id]),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
|
||||||
AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it),
|
AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it),
|
||||||
AttrDefId::ExternCrateId(it) => attrs_from_item_tree_loc(db, it),
|
AttrDefId::ExternCrateId(it) => attrs_from_item_tree_loc(db, it),
|
||||||
AttrDefId::UseId(it) => attrs_from_item_tree_loc(db, it),
|
AttrDefId::UseId(it) => attrs_from_item_tree_loc(db, it),
|
||||||
|
@ -638,6 +663,26 @@ fn any_has_attrs(
|
||||||
id.lookup(db).source(db).map(ast::AnyHasAttrs::new)
|
id.lookup(db).source(db).map(ast::AnyHasAttrs::new)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn erased_ast_id_from_item_tree<N: ItemTreeNode>(
|
||||||
|
db: &dyn DefDatabase,
|
||||||
|
lookup: impl Lookup<Data = ItemLoc<N>>,
|
||||||
|
) -> ErasedFileAstId {
|
||||||
|
let id = lookup.lookup(db).id;
|
||||||
|
let tree = id.item_tree(db);
|
||||||
|
let mod_item = N::id_to_mod_item(id.value);
|
||||||
|
mod_item.ast_id(&tree).erase()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn erased_ast_id_from_item_tree_assoc<N: ItemTreeNode>(
|
||||||
|
db: &dyn DefDatabase,
|
||||||
|
lookup: impl Lookup<Data = AssocItemLoc<N>>,
|
||||||
|
) -> ErasedFileAstId {
|
||||||
|
let id = lookup.lookup(db).id;
|
||||||
|
let tree = id.item_tree(db);
|
||||||
|
let mod_item = N::id_to_mod_item(id.value);
|
||||||
|
mod_item.ast_id(&tree).erase()
|
||||||
|
}
|
||||||
|
|
||||||
fn attrs_from_item_tree<N: ItemTreeNode>(db: &dyn DefDatabase, id: ItemTreeId<N>) -> RawAttrs {
|
fn attrs_from_item_tree<N: ItemTreeNode>(db: &dyn DefDatabase, id: ItemTreeId<N>) -> RawAttrs {
|
||||||
let tree = id.item_tree(db);
|
let tree = id.item_tree(db);
|
||||||
let mod_item = N::id_to_mod_item(id.value);
|
let mod_item = N::id_to_mod_item(id.value);
|
||||||
|
|
|
@ -1,17 +1,18 @@
|
||||||
//! This module contains tests for doc-expression parsing.
|
//! This module contains tests for doc-expression parsing.
|
||||||
//! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`.
|
//! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`.
|
||||||
|
|
||||||
|
use base_db::span::SpanAnchor;
|
||||||
use mbe::syntax_node_to_token_tree;
|
use mbe::syntax_node_to_token_tree;
|
||||||
use syntax::{ast, AstNode};
|
use syntax::{ast, AstNode};
|
||||||
|
use tt::Span;
|
||||||
|
|
||||||
use crate::attr::{DocAtom, DocExpr};
|
use crate::attr::{DocAtom, DocExpr};
|
||||||
|
|
||||||
fn assert_parse_result(input: &str, expected: DocExpr) {
|
fn assert_parse_result(input: &str, expected: DocExpr) {
|
||||||
let (tt, _) = {
|
|
||||||
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
||||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||||
syntax_node_to_token_tree(tt.syntax())
|
let tt =
|
||||||
};
|
syntax_node_to_token_tree(tt.syntax(), SpanAnchor::DUMMY, 0.into(), &Default::default());
|
||||||
let cfg = DocExpr::parse(&tt);
|
let cfg = DocExpr::parse(&tt);
|
||||||
assert_eq!(cfg, expected);
|
assert_eq!(cfg, expected);
|
||||||
}
|
}
|
||||||
|
|
|
@ -663,7 +663,7 @@ impl<'a> AssocItemCollector<'a> {
|
||||||
self.module_id.local_id,
|
self.module_id.local_id,
|
||||||
MacroCallKind::Attr {
|
MacroCallKind::Attr {
|
||||||
ast_id,
|
ast_id,
|
||||||
attr_args: Arc::new((tt::Subtree::empty(), Default::default())),
|
attr_args: Arc::new(tt::Subtree::empty()),
|
||||||
invoc_attr_index: attr.id,
|
invoc_attr_index: attr.id,
|
||||||
},
|
},
|
||||||
attr.path().clone(),
|
attr.path().clone(),
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
//! Macro expansion utilities.
|
//! Macro expansion utilities.
|
||||||
|
|
||||||
use base_db::CrateId;
|
use base_db::{
|
||||||
|
span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID},
|
||||||
|
CrateId,
|
||||||
|
};
|
||||||
use cfg::CfgOptions;
|
use cfg::CfgOptions;
|
||||||
use drop_bomb::DropBomb;
|
use drop_bomb::DropBomb;
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
|
@ -118,7 +121,17 @@ impl Expander {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs {
|
pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs {
|
||||||
Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, &self.hygiene))
|
Attrs::filter(
|
||||||
|
db,
|
||||||
|
self.krate,
|
||||||
|
RawAttrs::new(
|
||||||
|
db.upcast(),
|
||||||
|
// Usin `ROOT_ERASED_FILE_AST_ID` here is fine as this is only used for cfg checking
|
||||||
|
SpanAnchor { file_id: self.current_file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
|
||||||
|
owner,
|
||||||
|
&self.hygiene,
|
||||||
|
),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn cfg_options(&self) -> &CfgOptions {
|
pub(crate) fn cfg_options(&self) -> &CfgOptions {
|
||||||
|
|
|
@ -21,7 +21,7 @@ use crate::{
|
||||||
db::DefDatabase,
|
db::DefDatabase,
|
||||||
dyn_map::{keys, DynMap},
|
dyn_map::{keys, DynMap},
|
||||||
expander::Expander,
|
expander::Expander,
|
||||||
item_tree::{AttrOwner, ItemTree},
|
item_tree::ItemTree,
|
||||||
lower::LowerCtx,
|
lower::LowerCtx,
|
||||||
nameres::{DefMap, MacroSubNs},
|
nameres::{DefMap, MacroSubNs},
|
||||||
src::{HasChildSource, HasSource},
|
src::{HasChildSource, HasSource},
|
||||||
|
@ -250,7 +250,10 @@ impl GenericParams {
|
||||||
&mut self,
|
&mut self,
|
||||||
lower_ctx: &LowerCtx<'_>,
|
lower_ctx: &LowerCtx<'_>,
|
||||||
node: &dyn HasGenericParams,
|
node: &dyn HasGenericParams,
|
||||||
add_param_attrs: impl FnMut(AttrOwner, ast::GenericParam),
|
add_param_attrs: impl FnMut(
|
||||||
|
Either<Idx<TypeOrConstParamData>, Idx<LifetimeParamData>>,
|
||||||
|
ast::GenericParam,
|
||||||
|
),
|
||||||
) {
|
) {
|
||||||
if let Some(params) = node.generic_param_list() {
|
if let Some(params) = node.generic_param_list() {
|
||||||
self.fill_params(lower_ctx, params, add_param_attrs)
|
self.fill_params(lower_ctx, params, add_param_attrs)
|
||||||
|
@ -275,7 +278,10 @@ impl GenericParams {
|
||||||
&mut self,
|
&mut self,
|
||||||
lower_ctx: &LowerCtx<'_>,
|
lower_ctx: &LowerCtx<'_>,
|
||||||
params: ast::GenericParamList,
|
params: ast::GenericParamList,
|
||||||
mut add_param_attrs: impl FnMut(AttrOwner, ast::GenericParam),
|
mut add_param_attrs: impl FnMut(
|
||||||
|
Either<Idx<TypeOrConstParamData>, Idx<LifetimeParamData>>,
|
||||||
|
ast::GenericParam,
|
||||||
|
),
|
||||||
) {
|
) {
|
||||||
for type_or_const_param in params.type_or_const_params() {
|
for type_or_const_param in params.type_or_const_params() {
|
||||||
match type_or_const_param {
|
match type_or_const_param {
|
||||||
|
@ -297,7 +303,7 @@ impl GenericParams {
|
||||||
type_param.type_bound_list(),
|
type_param.type_bound_list(),
|
||||||
Either::Left(type_ref),
|
Either::Left(type_ref),
|
||||||
);
|
);
|
||||||
add_param_attrs(idx.into(), ast::GenericParam::TypeParam(type_param));
|
add_param_attrs(Either::Left(idx), ast::GenericParam::TypeParam(type_param));
|
||||||
}
|
}
|
||||||
ast::TypeOrConstParam::Const(const_param) => {
|
ast::TypeOrConstParam::Const(const_param) => {
|
||||||
let name = const_param.name().map_or_else(Name::missing, |it| it.as_name());
|
let name = const_param.name().map_or_else(Name::missing, |it| it.as_name());
|
||||||
|
@ -310,7 +316,7 @@ impl GenericParams {
|
||||||
default: ConstRef::from_const_param(lower_ctx, &const_param),
|
default: ConstRef::from_const_param(lower_ctx, &const_param),
|
||||||
};
|
};
|
||||||
let idx = self.type_or_consts.alloc(param.into());
|
let idx = self.type_or_consts.alloc(param.into());
|
||||||
add_param_attrs(idx.into(), ast::GenericParam::ConstParam(const_param));
|
add_param_attrs(Either::Left(idx), ast::GenericParam::ConstParam(const_param));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -325,7 +331,7 @@ impl GenericParams {
|
||||||
lifetime_param.type_bound_list(),
|
lifetime_param.type_bound_list(),
|
||||||
Either::Right(lifetime_ref),
|
Either::Right(lifetime_ref),
|
||||||
);
|
);
|
||||||
add_param_attrs(idx.into(), ast::GenericParam::LifetimeParam(lifetime_param));
|
add_param_attrs(Either::Right(idx), ast::GenericParam::LifetimeParam(lifetime_param));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -43,7 +43,10 @@ use std::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use ast::{AstNode, HasName, StructKind};
|
use ast::{AstNode, HasName, StructKind};
|
||||||
use base_db::CrateId;
|
use base_db::{
|
||||||
|
span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID},
|
||||||
|
CrateId,
|
||||||
|
};
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
ast_id_map::{AstIdNode, FileAstId},
|
ast_id_map::{AstIdNode, FileAstId},
|
||||||
|
@ -119,7 +122,7 @@ impl ItemTree {
|
||||||
let mut item_tree = match_ast! {
|
let mut item_tree = match_ast! {
|
||||||
match syntax {
|
match syntax {
|
||||||
ast::SourceFile(file) => {
|
ast::SourceFile(file) => {
|
||||||
top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.hygiene()));
|
top_attrs = Some(RawAttrs::new(db.upcast(), SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, &file, ctx.hygiene()));
|
||||||
ctx.lower_module_items(&file)
|
ctx.lower_module_items(&file)
|
||||||
},
|
},
|
||||||
ast::MacroItems(items) => {
|
ast::MacroItems(items) => {
|
||||||
|
|
|
@ -2,12 +2,14 @@
|
||||||
|
|
||||||
use std::collections::hash_map::Entry;
|
use std::collections::hash_map::Entry;
|
||||||
|
|
||||||
|
use base_db::span::ErasedFileAstId;
|
||||||
use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, HirFileId};
|
use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, HirFileId};
|
||||||
use syntax::ast::{self, HasModuleItem, HasTypeBounds};
|
use syntax::ast::{self, HasModuleItem, HasTypeBounds};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
generics::{GenericParams, TypeParamData, TypeParamProvenance},
|
generics::{GenericParams, TypeParamData, TypeParamProvenance},
|
||||||
type_ref::{LifetimeRef, TraitBoundModifier, TraitRef},
|
type_ref::{LifetimeRef, TraitBoundModifier, TraitRef},
|
||||||
|
LocalLifetimeParamId, LocalTypeOrConstParamId,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
@ -21,6 +23,7 @@ pub(super) struct Ctx<'a> {
|
||||||
tree: ItemTree,
|
tree: ItemTree,
|
||||||
source_ast_id_map: Arc<AstIdMap>,
|
source_ast_id_map: Arc<AstIdMap>,
|
||||||
body_ctx: crate::lower::LowerCtx<'a>,
|
body_ctx: crate::lower::LowerCtx<'a>,
|
||||||
|
file: HirFileId,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Ctx<'a> {
|
impl<'a> Ctx<'a> {
|
||||||
|
@ -30,6 +33,7 @@ impl<'a> Ctx<'a> {
|
||||||
tree: ItemTree::default(),
|
tree: ItemTree::default(),
|
||||||
source_ast_id_map: db.ast_id_map(file),
|
source_ast_id_map: db.ast_id_map(file),
|
||||||
body_ctx: crate::lower::LowerCtx::with_file_id(db, file),
|
body_ctx: crate::lower::LowerCtx::with_file_id(db, file),
|
||||||
|
file,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -77,9 +81,18 @@ impl<'a> Ctx<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree {
|
pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree {
|
||||||
self.tree
|
self.tree.attrs.insert(
|
||||||
.attrs
|
AttrOwner::TopLevel,
|
||||||
.insert(AttrOwner::TopLevel, RawAttrs::new(self.db.upcast(), block, self.hygiene()));
|
RawAttrs::new(
|
||||||
|
self.db.upcast(),
|
||||||
|
SpanAnchor {
|
||||||
|
file_id: self.file,
|
||||||
|
ast_id: self.source_ast_id_map.ast_id(block).erase(),
|
||||||
|
},
|
||||||
|
block,
|
||||||
|
self.hygiene(),
|
||||||
|
),
|
||||||
|
);
|
||||||
self.tree.top_level = block
|
self.tree.top_level = block
|
||||||
.statements()
|
.statements()
|
||||||
.filter_map(|stmt| match stmt {
|
.filter_map(|stmt| match stmt {
|
||||||
|
@ -109,8 +122,7 @@ impl<'a> Ctx<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lower_mod_item(&mut self, item: &ast::Item) -> Option<ModItem> {
|
fn lower_mod_item(&mut self, item: &ast::Item) -> Option<ModItem> {
|
||||||
let attrs = RawAttrs::new(self.db.upcast(), item, self.hygiene());
|
let mod_item: ModItem = match item {
|
||||||
let item: ModItem = match item {
|
|
||||||
ast::Item::Struct(ast) => self.lower_struct(ast)?.into(),
|
ast::Item::Struct(ast) => self.lower_struct(ast)?.into(),
|
||||||
ast::Item::Union(ast) => self.lower_union(ast)?.into(),
|
ast::Item::Union(ast) => self.lower_union(ast)?.into(),
|
||||||
ast::Item::Enum(ast) => self.lower_enum(ast)?.into(),
|
ast::Item::Enum(ast) => self.lower_enum(ast)?.into(),
|
||||||
|
@ -129,10 +141,15 @@ impl<'a> Ctx<'a> {
|
||||||
ast::Item::MacroDef(ast) => self.lower_macro_def(ast)?.into(),
|
ast::Item::MacroDef(ast) => self.lower_macro_def(ast)?.into(),
|
||||||
ast::Item::ExternBlock(ast) => self.lower_extern_block(ast).into(),
|
ast::Item::ExternBlock(ast) => self.lower_extern_block(ast).into(),
|
||||||
};
|
};
|
||||||
|
let attrs = RawAttrs::new(
|
||||||
|
self.db.upcast(),
|
||||||
|
SpanAnchor { file_id: self.file, ast_id: mod_item.ast_id(&self.tree).erase() },
|
||||||
|
item,
|
||||||
|
self.hygiene(),
|
||||||
|
);
|
||||||
|
self.add_attrs(mod_item.into(), attrs);
|
||||||
|
|
||||||
self.add_attrs(item.into(), attrs);
|
Some(mod_item)
|
||||||
|
|
||||||
Some(item)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_attrs(&mut self, item: AttrOwner, attrs: RawAttrs) {
|
fn add_attrs(&mut self, item: AttrOwner, attrs: RawAttrs) {
|
||||||
|
@ -146,21 +163,37 @@ impl<'a> Ctx<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lower_assoc_item(&mut self, item: &ast::AssocItem) -> Option<AssocItem> {
|
fn lower_assoc_item(&mut self, item_node: &ast::AssocItem) -> Option<AssocItem> {
|
||||||
match item {
|
let item: AssocItem = match item_node {
|
||||||
ast::AssocItem::Fn(ast) => self.lower_function(ast).map(Into::into),
|
ast::AssocItem::Fn(ast) => self.lower_function(ast).map(Into::into),
|
||||||
ast::AssocItem::TypeAlias(ast) => self.lower_type_alias(ast).map(Into::into),
|
ast::AssocItem::TypeAlias(ast) => self.lower_type_alias(ast).map(Into::into),
|
||||||
ast::AssocItem::Const(ast) => Some(self.lower_const(ast).into()),
|
ast::AssocItem::Const(ast) => Some(self.lower_const(ast).into()),
|
||||||
ast::AssocItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into),
|
ast::AssocItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into),
|
||||||
}
|
}?;
|
||||||
|
let attrs = RawAttrs::new(
|
||||||
|
self.db.upcast(),
|
||||||
|
SpanAnchor { file_id: self.file, ast_id: item.ast_id(&self.tree).erase() },
|
||||||
|
item_node,
|
||||||
|
self.hygiene(),
|
||||||
|
);
|
||||||
|
self.add_attrs(
|
||||||
|
match item {
|
||||||
|
AssocItem::Function(it) => AttrOwner::ModItem(ModItem::Function(it)),
|
||||||
|
AssocItem::TypeAlias(it) => AttrOwner::ModItem(ModItem::TypeAlias(it)),
|
||||||
|
AssocItem::Const(it) => AttrOwner::ModItem(ModItem::Const(it)),
|
||||||
|
AssocItem::MacroCall(it) => AttrOwner::ModItem(ModItem::MacroCall(it)),
|
||||||
|
},
|
||||||
|
attrs,
|
||||||
|
);
|
||||||
|
Some(item)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lower_struct(&mut self, strukt: &ast::Struct) -> Option<FileItemTreeId<Struct>> {
|
fn lower_struct(&mut self, strukt: &ast::Struct) -> Option<FileItemTreeId<Struct>> {
|
||||||
let visibility = self.lower_visibility(strukt);
|
let visibility = self.lower_visibility(strukt);
|
||||||
let name = strukt.name()?.as_name();
|
let name = strukt.name()?.as_name();
|
||||||
let generic_params = self.lower_generic_params(HasImplicitSelf::No, strukt);
|
|
||||||
let fields = self.lower_fields(&strukt.kind());
|
|
||||||
let ast_id = self.source_ast_id_map.ast_id(strukt);
|
let ast_id = self.source_ast_id_map.ast_id(strukt);
|
||||||
|
let generic_params = self.lower_generic_params(HasImplicitSelf::No, strukt, ast_id.erase());
|
||||||
|
let fields = self.lower_fields(&strukt.kind());
|
||||||
let res = Struct { name, visibility, generic_params, fields, ast_id };
|
let res = Struct { name, visibility, generic_params, fields, ast_id };
|
||||||
Some(id(self.data().structs.alloc(res)))
|
Some(id(self.data().structs.alloc(res)))
|
||||||
}
|
}
|
||||||
|
@ -183,8 +216,20 @@ impl<'a> Ctx<'a> {
|
||||||
let start = self.next_field_idx();
|
let start = self.next_field_idx();
|
||||||
for field in fields.fields() {
|
for field in fields.fields() {
|
||||||
if let Some(data) = self.lower_record_field(&field) {
|
if let Some(data) = self.lower_record_field(&field) {
|
||||||
|
let ast_id = match data.ast_id {
|
||||||
|
FieldAstId::Record(it) => it.erase(),
|
||||||
|
FieldAstId::Tuple(it) => it.erase(),
|
||||||
|
};
|
||||||
let idx = self.data().fields.alloc(data);
|
let idx = self.data().fields.alloc(data);
|
||||||
self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene()));
|
self.add_attrs(
|
||||||
|
idx.into(),
|
||||||
|
RawAttrs::new(
|
||||||
|
self.db.upcast(),
|
||||||
|
SpanAnchor { file_id: self.file, ast_id },
|
||||||
|
&field,
|
||||||
|
self.hygiene(),
|
||||||
|
),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let end = self.next_field_idx();
|
let end = self.next_field_idx();
|
||||||
|
@ -204,8 +249,20 @@ impl<'a> Ctx<'a> {
|
||||||
let start = self.next_field_idx();
|
let start = self.next_field_idx();
|
||||||
for (i, field) in fields.fields().enumerate() {
|
for (i, field) in fields.fields().enumerate() {
|
||||||
let data = self.lower_tuple_field(i, &field);
|
let data = self.lower_tuple_field(i, &field);
|
||||||
|
let ast_id = match data.ast_id {
|
||||||
|
FieldAstId::Record(it) => it.erase(),
|
||||||
|
FieldAstId::Tuple(it) => it.erase(),
|
||||||
|
};
|
||||||
let idx = self.data().fields.alloc(data);
|
let idx = self.data().fields.alloc(data);
|
||||||
self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene()));
|
self.add_attrs(
|
||||||
|
idx.into(),
|
||||||
|
RawAttrs::new(
|
||||||
|
self.db.upcast(),
|
||||||
|
SpanAnchor { file_id: self.file, ast_id },
|
||||||
|
&field,
|
||||||
|
self.hygiene(),
|
||||||
|
),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
let end = self.next_field_idx();
|
let end = self.next_field_idx();
|
||||||
IdxRange::new(start..end)
|
IdxRange::new(start..end)
|
||||||
|
@ -222,12 +279,12 @@ impl<'a> Ctx<'a> {
|
||||||
fn lower_union(&mut self, union: &ast::Union) -> Option<FileItemTreeId<Union>> {
|
fn lower_union(&mut self, union: &ast::Union) -> Option<FileItemTreeId<Union>> {
|
||||||
let visibility = self.lower_visibility(union);
|
let visibility = self.lower_visibility(union);
|
||||||
let name = union.name()?.as_name();
|
let name = union.name()?.as_name();
|
||||||
let generic_params = self.lower_generic_params(HasImplicitSelf::No, union);
|
let ast_id = self.source_ast_id_map.ast_id(union);
|
||||||
|
let generic_params = self.lower_generic_params(HasImplicitSelf::No, union, ast_id.erase());
|
||||||
let fields = match union.record_field_list() {
|
let fields = match union.record_field_list() {
|
||||||
Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)),
|
Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)),
|
||||||
None => Fields::Record(IdxRange::new(self.next_field_idx()..self.next_field_idx())),
|
None => Fields::Record(IdxRange::new(self.next_field_idx()..self.next_field_idx())),
|
||||||
};
|
};
|
||||||
let ast_id = self.source_ast_id_map.ast_id(union);
|
|
||||||
let res = Union { name, visibility, generic_params, fields, ast_id };
|
let res = Union { name, visibility, generic_params, fields, ast_id };
|
||||||
Some(id(self.data().unions.alloc(res)))
|
Some(id(self.data().unions.alloc(res)))
|
||||||
}
|
}
|
||||||
|
@ -235,12 +292,12 @@ impl<'a> Ctx<'a> {
|
||||||
fn lower_enum(&mut self, enum_: &ast::Enum) -> Option<FileItemTreeId<Enum>> {
|
fn lower_enum(&mut self, enum_: &ast::Enum) -> Option<FileItemTreeId<Enum>> {
|
||||||
let visibility = self.lower_visibility(enum_);
|
let visibility = self.lower_visibility(enum_);
|
||||||
let name = enum_.name()?.as_name();
|
let name = enum_.name()?.as_name();
|
||||||
let generic_params = self.lower_generic_params(HasImplicitSelf::No, enum_);
|
let ast_id = self.source_ast_id_map.ast_id(enum_);
|
||||||
|
let generic_params = self.lower_generic_params(HasImplicitSelf::No, enum_, ast_id.erase());
|
||||||
let variants = match &enum_.variant_list() {
|
let variants = match &enum_.variant_list() {
|
||||||
Some(variant_list) => self.lower_variants(variant_list),
|
Some(variant_list) => self.lower_variants(variant_list),
|
||||||
None => IdxRange::new(self.next_variant_idx()..self.next_variant_idx()),
|
None => IdxRange::new(self.next_variant_idx()..self.next_variant_idx()),
|
||||||
};
|
};
|
||||||
let ast_id = self.source_ast_id_map.ast_id(enum_);
|
|
||||||
let res = Enum { name, visibility, generic_params, variants, ast_id };
|
let res = Enum { name, visibility, generic_params, variants, ast_id };
|
||||||
Some(id(self.data().enums.alloc(res)))
|
Some(id(self.data().enums.alloc(res)))
|
||||||
}
|
}
|
||||||
|
@ -249,10 +306,16 @@ impl<'a> Ctx<'a> {
|
||||||
let start = self.next_variant_idx();
|
let start = self.next_variant_idx();
|
||||||
for variant in variants.variants() {
|
for variant in variants.variants() {
|
||||||
if let Some(data) = self.lower_variant(&variant) {
|
if let Some(data) = self.lower_variant(&variant) {
|
||||||
|
let ast_id = data.ast_id.erase();
|
||||||
let idx = self.data().variants.alloc(data);
|
let idx = self.data().variants.alloc(data);
|
||||||
self.add_attrs(
|
self.add_attrs(
|
||||||
idx.into(),
|
idx.into(),
|
||||||
RawAttrs::new(self.db.upcast(), &variant, self.hygiene()),
|
RawAttrs::new(
|
||||||
|
self.db.upcast(),
|
||||||
|
SpanAnchor { file_id: self.file, ast_id },
|
||||||
|
&variant,
|
||||||
|
self.hygiene(),
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -303,28 +366,39 @@ impl<'a> Ctx<'a> {
|
||||||
});
|
});
|
||||||
self.add_attrs(
|
self.add_attrs(
|
||||||
idx.into(),
|
idx.into(),
|
||||||
RawAttrs::new(self.db.upcast(), &self_param, self.hygiene()),
|
RawAttrs::new(
|
||||||
|
self.db.upcast(),
|
||||||
|
SpanAnchor { file_id: self.file, ast_id: ast_id.erase() },
|
||||||
|
&self_param,
|
||||||
|
self.hygiene(),
|
||||||
|
),
|
||||||
);
|
);
|
||||||
has_self_param = true;
|
has_self_param = true;
|
||||||
}
|
}
|
||||||
for param in param_list.params() {
|
for param in param_list.params() {
|
||||||
let idx = match param.dotdotdot_token() {
|
|
||||||
Some(_) => {
|
|
||||||
let ast_id = self.source_ast_id_map.ast_id(¶m);
|
let ast_id = self.source_ast_id_map.ast_id(¶m);
|
||||||
self.data()
|
let idx = match param.dotdotdot_token() {
|
||||||
|
Some(_) => self
|
||||||
|
.data()
|
||||||
.params
|
.params
|
||||||
.alloc(Param { type_ref: None, ast_id: ParamAstId::Param(ast_id) })
|
.alloc(Param { type_ref: None, ast_id: ParamAstId::Param(ast_id) }),
|
||||||
}
|
|
||||||
None => {
|
None => {
|
||||||
let type_ref = TypeRef::from_ast_opt(&self.body_ctx, param.ty());
|
let type_ref = TypeRef::from_ast_opt(&self.body_ctx, param.ty());
|
||||||
let ty = Interned::new(type_ref);
|
let ty = Interned::new(type_ref);
|
||||||
let ast_id = self.source_ast_id_map.ast_id(¶m);
|
|
||||||
self.data()
|
self.data()
|
||||||
.params
|
.params
|
||||||
.alloc(Param { type_ref: Some(ty), ast_id: ParamAstId::Param(ast_id) })
|
.alloc(Param { type_ref: Some(ty), ast_id: ParamAstId::Param(ast_id) })
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), ¶m, self.hygiene()));
|
self.add_attrs(
|
||||||
|
idx.into(),
|
||||||
|
RawAttrs::new(
|
||||||
|
self.db.upcast(),
|
||||||
|
SpanAnchor { file_id: self.file, ast_id: ast_id.erase() },
|
||||||
|
¶m,
|
||||||
|
self.hygiene(),
|
||||||
|
),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let end_param = self.next_param_idx();
|
let end_param = self.next_param_idx();
|
||||||
|
@ -381,7 +455,8 @@ impl<'a> Ctx<'a> {
|
||||||
ast_id,
|
ast_id,
|
||||||
flags,
|
flags,
|
||||||
};
|
};
|
||||||
res.explicit_generic_params = self.lower_generic_params(HasImplicitSelf::No, func);
|
res.explicit_generic_params =
|
||||||
|
self.lower_generic_params(HasImplicitSelf::No, func, ast_id.erase());
|
||||||
|
|
||||||
Some(id(self.data().functions.alloc(res)))
|
Some(id(self.data().functions.alloc(res)))
|
||||||
}
|
}
|
||||||
|
@ -394,8 +469,9 @@ impl<'a> Ctx<'a> {
|
||||||
let type_ref = type_alias.ty().map(|it| self.lower_type_ref(&it));
|
let type_ref = type_alias.ty().map(|it| self.lower_type_ref(&it));
|
||||||
let visibility = self.lower_visibility(type_alias);
|
let visibility = self.lower_visibility(type_alias);
|
||||||
let bounds = self.lower_type_bounds(type_alias);
|
let bounds = self.lower_type_bounds(type_alias);
|
||||||
let generic_params = self.lower_generic_params(HasImplicitSelf::No, type_alias);
|
|
||||||
let ast_id = self.source_ast_id_map.ast_id(type_alias);
|
let ast_id = self.source_ast_id_map.ast_id(type_alias);
|
||||||
|
let generic_params =
|
||||||
|
self.lower_generic_params(HasImplicitSelf::No, type_alias, ast_id.erase());
|
||||||
let res = TypeAlias { name, visibility, bounds, generic_params, type_ref, ast_id };
|
let res = TypeAlias { name, visibility, bounds, generic_params, type_ref, ast_id };
|
||||||
Some(id(self.data().type_aliases.alloc(res)))
|
Some(id(self.data().type_aliases.alloc(res)))
|
||||||
}
|
}
|
||||||
|
@ -443,23 +519,20 @@ impl<'a> Ctx<'a> {
|
||||||
fn lower_trait(&mut self, trait_def: &ast::Trait) -> Option<FileItemTreeId<Trait>> {
|
fn lower_trait(&mut self, trait_def: &ast::Trait) -> Option<FileItemTreeId<Trait>> {
|
||||||
let name = trait_def.name()?.as_name();
|
let name = trait_def.name()?.as_name();
|
||||||
let visibility = self.lower_visibility(trait_def);
|
let visibility = self.lower_visibility(trait_def);
|
||||||
let generic_params =
|
let ast_id = self.source_ast_id_map.ast_id(trait_def);
|
||||||
self.lower_generic_params(HasImplicitSelf::Yes(trait_def.type_bound_list()), trait_def);
|
let generic_params = self.lower_generic_params(
|
||||||
|
HasImplicitSelf::Yes(trait_def.type_bound_list()),
|
||||||
|
trait_def,
|
||||||
|
ast_id.erase(),
|
||||||
|
);
|
||||||
let is_auto = trait_def.auto_token().is_some();
|
let is_auto = trait_def.auto_token().is_some();
|
||||||
let is_unsafe = trait_def.unsafe_token().is_some();
|
let is_unsafe = trait_def.unsafe_token().is_some();
|
||||||
let ast_id = self.source_ast_id_map.ast_id(trait_def);
|
|
||||||
|
|
||||||
let items = trait_def
|
let items = trait_def
|
||||||
.assoc_item_list()
|
.assoc_item_list()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|list| list.assoc_items())
|
.flat_map(|list| list.assoc_items())
|
||||||
.filter_map(|item| {
|
.filter_map(|item_node| self.lower_assoc_item(&item_node))
|
||||||
let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
|
|
||||||
self.lower_assoc_item(&item).map(|item| {
|
|
||||||
self.add_attrs(ModItem::from(item).into(), attrs);
|
|
||||||
item
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let def = Trait { name, visibility, generic_params, is_auto, is_unsafe, items, ast_id };
|
let def = Trait { name, visibility, generic_params, is_auto, is_unsafe, items, ast_id };
|
||||||
|
@ -472,20 +545,23 @@ impl<'a> Ctx<'a> {
|
||||||
) -> Option<FileItemTreeId<TraitAlias>> {
|
) -> Option<FileItemTreeId<TraitAlias>> {
|
||||||
let name = trait_alias_def.name()?.as_name();
|
let name = trait_alias_def.name()?.as_name();
|
||||||
let visibility = self.lower_visibility(trait_alias_def);
|
let visibility = self.lower_visibility(trait_alias_def);
|
||||||
|
let ast_id = self.source_ast_id_map.ast_id(trait_alias_def);
|
||||||
let generic_params = self.lower_generic_params(
|
let generic_params = self.lower_generic_params(
|
||||||
HasImplicitSelf::Yes(trait_alias_def.type_bound_list()),
|
HasImplicitSelf::Yes(trait_alias_def.type_bound_list()),
|
||||||
trait_alias_def,
|
trait_alias_def,
|
||||||
|
ast_id.erase(),
|
||||||
);
|
);
|
||||||
let ast_id = self.source_ast_id_map.ast_id(trait_alias_def);
|
|
||||||
|
|
||||||
let alias = TraitAlias { name, visibility, generic_params, ast_id };
|
let alias = TraitAlias { name, visibility, generic_params, ast_id };
|
||||||
Some(id(self.data().trait_aliases.alloc(alias)))
|
Some(id(self.data().trait_aliases.alloc(alias)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lower_impl(&mut self, impl_def: &ast::Impl) -> Option<FileItemTreeId<Impl>> {
|
fn lower_impl(&mut self, impl_def: &ast::Impl) -> Option<FileItemTreeId<Impl>> {
|
||||||
|
let ast_id = self.source_ast_id_map.ast_id(impl_def);
|
||||||
// Note that trait impls don't get implicit `Self` unlike traits, because here they are a
|
// Note that trait impls don't get implicit `Self` unlike traits, because here they are a
|
||||||
// type alias rather than a type parameter, so this is handled by the resolver.
|
// type alias rather than a type parameter, so this is handled by the resolver.
|
||||||
let generic_params = self.lower_generic_params(HasImplicitSelf::No, impl_def);
|
let generic_params =
|
||||||
|
self.lower_generic_params(HasImplicitSelf::No, impl_def, ast_id.erase());
|
||||||
// FIXME: If trait lowering fails, due to a non PathType for example, we treat this impl
|
// FIXME: If trait lowering fails, due to a non PathType for example, we treat this impl
|
||||||
// as if it was an non-trait impl. Ideally we want to create a unique missing ref that only
|
// as if it was an non-trait impl. Ideally we want to create a unique missing ref that only
|
||||||
// equals itself.
|
// equals itself.
|
||||||
|
@ -499,14 +575,8 @@ impl<'a> Ctx<'a> {
|
||||||
.assoc_item_list()
|
.assoc_item_list()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|it| it.assoc_items())
|
.flat_map(|it| it.assoc_items())
|
||||||
.filter_map(|item| {
|
.filter_map(|item| self.lower_assoc_item(&item))
|
||||||
let assoc = self.lower_assoc_item(&item)?;
|
|
||||||
let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
|
|
||||||
self.add_attrs(ModItem::from(assoc).into(), attrs);
|
|
||||||
Some(assoc)
|
|
||||||
})
|
|
||||||
.collect();
|
.collect();
|
||||||
let ast_id = self.source_ast_id_map.ast_id(impl_def);
|
|
||||||
let res =
|
let res =
|
||||||
Impl { generic_params, target_trait, self_ty, is_negative, is_unsafe, items, ast_id };
|
Impl { generic_params, target_trait, self_ty, is_negative, is_unsafe, items, ast_id };
|
||||||
Some(id(self.data().impls.alloc(res)))
|
Some(id(self.data().impls.alloc(res)))
|
||||||
|
@ -572,15 +642,23 @@ impl<'a> Ctx<'a> {
|
||||||
// (in other words, the knowledge that they're in an extern block must not be used).
|
// (in other words, the knowledge that they're in an extern block must not be used).
|
||||||
// This is because an extern block can contain macros whose ItemTree's top-level items
|
// This is because an extern block can contain macros whose ItemTree's top-level items
|
||||||
// should be considered to be in an extern block too.
|
// should be considered to be in an extern block too.
|
||||||
let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
|
let mod_item: ModItem = match &item {
|
||||||
let id: ModItem = match item {
|
ast::ExternItem::Fn(ast) => self.lower_function(ast)?.into(),
|
||||||
ast::ExternItem::Fn(ast) => self.lower_function(&ast)?.into(),
|
ast::ExternItem::Static(ast) => self.lower_static(ast)?.into(),
|
||||||
ast::ExternItem::Static(ast) => self.lower_static(&ast)?.into(),
|
ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(ty)?.into(),
|
||||||
ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(&ty)?.into(),
|
ast::ExternItem::MacroCall(call) => self.lower_macro_call(call)?.into(),
|
||||||
ast::ExternItem::MacroCall(call) => self.lower_macro_call(&call)?.into(),
|
|
||||||
};
|
};
|
||||||
self.add_attrs(id.into(), attrs);
|
let attrs = RawAttrs::new(
|
||||||
Some(id)
|
self.db.upcast(),
|
||||||
|
SpanAnchor {
|
||||||
|
file_id: self.file,
|
||||||
|
ast_id: mod_item.ast_id(&self.tree).erase(),
|
||||||
|
},
|
||||||
|
&item,
|
||||||
|
self.hygiene(),
|
||||||
|
);
|
||||||
|
self.add_attrs(mod_item.into(), attrs);
|
||||||
|
Some(mod_item)
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
});
|
});
|
||||||
|
@ -593,6 +671,7 @@ impl<'a> Ctx<'a> {
|
||||||
&mut self,
|
&mut self,
|
||||||
has_implicit_self: HasImplicitSelf,
|
has_implicit_self: HasImplicitSelf,
|
||||||
node: &dyn ast::HasGenericParams,
|
node: &dyn ast::HasGenericParams,
|
||||||
|
owner_ast_id: ErasedFileAstId,
|
||||||
) -> Interned<GenericParams> {
|
) -> Interned<GenericParams> {
|
||||||
let mut generics = GenericParams::default();
|
let mut generics = GenericParams::default();
|
||||||
|
|
||||||
|
@ -612,12 +691,21 @@ impl<'a> Ctx<'a> {
|
||||||
generics.fill_bounds(&self.body_ctx, bounds, Either::Left(self_param));
|
generics.fill_bounds(&self.body_ctx, bounds, Either::Left(self_param));
|
||||||
}
|
}
|
||||||
|
|
||||||
let add_param_attrs = |item, param| {
|
let add_param_attrs = |item: Either<LocalTypeOrConstParamId, LocalLifetimeParamId>,
|
||||||
let attrs = RawAttrs::new(self.db.upcast(), ¶m, self.body_ctx.hygiene());
|
param| {
|
||||||
|
let attrs = RawAttrs::new(
|
||||||
|
self.db.upcast(),
|
||||||
|
SpanAnchor { file_id: self.file, ast_id: owner_ast_id },
|
||||||
|
¶m,
|
||||||
|
self.body_ctx.hygiene(),
|
||||||
|
);
|
||||||
// This is identical to the body of `Ctx::add_attrs()` but we can't call that here
|
// This is identical to the body of `Ctx::add_attrs()` but we can't call that here
|
||||||
// because it requires `&mut self` and the call to `generics.fill()` below also
|
// because it requires `&mut self` and the call to `generics.fill()` below also
|
||||||
// references `self`.
|
// references `self`.
|
||||||
match self.tree.attrs.entry(item) {
|
match self.tree.attrs.entry(match item {
|
||||||
|
Either::Right(id) => id.into(),
|
||||||
|
Either::Left(id) => id.into(),
|
||||||
|
}) {
|
||||||
Entry::Occupied(mut entry) => {
|
Entry::Occupied(mut entry) => {
|
||||||
*entry.get_mut() = entry.get().merge(attrs);
|
*entry.get_mut() = entry.get().merge(attrs);
|
||||||
}
|
}
|
||||||
|
|
|
@ -84,7 +84,7 @@ use nameres::DefMap;
|
||||||
use stdx::impl_from;
|
use stdx::impl_from;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
|
|
||||||
use ::tt::token_id as tt;
|
pub use hir_expand::tt;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
builtin_type::BuiltinType,
|
builtin_type::BuiltinType,
|
||||||
|
@ -1341,15 +1341,13 @@ fn attr_macro_as_call_id(
|
||||||
def: MacroDefId,
|
def: MacroDefId,
|
||||||
) -> MacroCallId {
|
) -> MacroCallId {
|
||||||
let arg = match macro_attr.input.as_deref() {
|
let arg = match macro_attr.input.as_deref() {
|
||||||
Some(AttrInput::TokenTree(tt)) => (
|
Some(AttrInput::TokenTree(tt)) => {
|
||||||
{
|
let mut tt = tt.as_ref().clone();
|
||||||
let mut tt = tt.0.clone();
|
|
||||||
tt.delimiter = tt::Delimiter::UNSPECIFIED;
|
tt.delimiter = tt::Delimiter::UNSPECIFIED;
|
||||||
tt
|
tt
|
||||||
},
|
}
|
||||||
tt.1.clone(),
|
|
||||||
),
|
_ => tt::Subtree::empty(),
|
||||||
_ => (tt::Subtree::empty(), Default::default()),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
def.as_lazy_macro(
|
def.as_lazy_macro(
|
||||||
|
|
|
@ -15,7 +15,6 @@ use crate::macro_expansion_tests::check;
|
||||||
fn token_mapping_smoke_test() {
|
fn token_mapping_smoke_test() {
|
||||||
check(
|
check(
|
||||||
r#"
|
r#"
|
||||||
// +tokenids
|
|
||||||
macro_rules! f {
|
macro_rules! f {
|
||||||
( struct $ident:ident ) => {
|
( struct $ident:ident ) => {
|
||||||
struct $ident {
|
struct $ident {
|
||||||
|
@ -27,23 +26,19 @@ macro_rules! f {
|
||||||
// +tokenids
|
// +tokenids
|
||||||
f!(struct MyTraitMap2);
|
f!(struct MyTraitMap2);
|
||||||
"#,
|
"#,
|
||||||
expect![[r##"
|
expect![[r#"
|
||||||
// call ids will be shifted by Shift(30)
|
macro_rules! f {
|
||||||
// +tokenids
|
( struct $ident:ident ) => {
|
||||||
macro_rules! f {#0
|
struct $ident {
|
||||||
(#1 struct#2 $#3ident#4:#5ident#6 )#1 =#7>#8 {#9
|
map: ::std::collections::HashSet<()>,
|
||||||
struct#10 $#11ident#12 {#13
|
}
|
||||||
map#14:#15 :#16:#17std#18:#19:#20collections#21:#22:#23HashSet#24<#25(#26)#26>#27,#28
|
};
|
||||||
}#13
|
}
|
||||||
}#9;#29
|
|
||||||
}#0
|
|
||||||
|
|
||||||
// // +tokenids
|
struct#SpanAnchor(FileId(0), 1)@58..64 MyTraitMap2#SpanAnchor(FileId(0), 2)@23..34 {#SpanAnchor(FileId(0), 1)@72..73
|
||||||
// f!(struct#1 MyTraitMap2#2);
|
map#SpanAnchor(FileId(0), 1)@86..89:#SpanAnchor(FileId(0), 1)@89..90 ::std#SpanAnchor(FileId(0), 1)@93..96::collections#SpanAnchor(FileId(0), 1)@98..109::HashSet#SpanAnchor(FileId(0), 1)@111..118<#SpanAnchor(FileId(0), 1)@118..119(#SpanAnchor(FileId(0), 1)@119..120)#SpanAnchor(FileId(0), 1)@120..121>#SpanAnchor(FileId(0), 1)@121..122,#SpanAnchor(FileId(0), 1)@122..123
|
||||||
struct#10 MyTraitMap2#32 {#13
|
}#SpanAnchor(FileId(0), 1)@132..133
|
||||||
map#14:#15 ::std#18::collections#21::HashSet#24<#25(#26)#26>#27,#28
|
"#]],
|
||||||
}#13
|
|
||||||
"##]],
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -71,31 +66,22 @@ f! {
|
||||||
|
|
||||||
|
|
||||||
"#,
|
"#,
|
||||||
expect![[r##"
|
expect![[r#"
|
||||||
// call ids will be shifted by Shift(18)
|
|
||||||
// +tokenids
|
// +tokenids
|
||||||
macro_rules! f {#0
|
macro_rules! f {
|
||||||
(#1$#2(#3$#4tt#5:#6tt#7)#3*#8)#1 =#9>#10 {#11
|
($($tt:tt)*) => {
|
||||||
$#12(#13$#14tt#15)#13*#16
|
$($tt)*
|
||||||
}#11;#17
|
};
|
||||||
}#0
|
}
|
||||||
|
|
||||||
// // +tokenids
|
fn#SpanAnchor(FileId(0), 2)@22..24 main#SpanAnchor(FileId(0), 2)@25..29(#SpanAnchor(FileId(0), 2)@29..30)#SpanAnchor(FileId(0), 2)@30..31 {#SpanAnchor(FileId(0), 2)@32..33
|
||||||
// f! {
|
1#SpanAnchor(FileId(0), 2)@42..43;#SpanAnchor(FileId(0), 2)@43..44
|
||||||
// fn#1 main#2() {
|
1.0#SpanAnchor(FileId(0), 2)@53..56;#SpanAnchor(FileId(0), 2)@56..57
|
||||||
// 1#5;#6
|
let#SpanAnchor(FileId(0), 2)@66..69 x#SpanAnchor(FileId(0), 2)@70..71 =#SpanAnchor(FileId(0), 2)@72..73 1#SpanAnchor(FileId(0), 2)@74..75;#SpanAnchor(FileId(0), 2)@75..76
|
||||||
// 1.0#7;#8
|
}#SpanAnchor(FileId(0), 2)@81..82
|
||||||
// let#9 x#10 =#11 1#12;#13
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
fn#19 main#20(#21)#21 {#22
|
|
||||||
1#23;#24
|
|
||||||
1.0#25;#26
|
|
||||||
let#27 x#28 =#29 1#30;#31
|
|
||||||
}#22
|
|
||||||
|
|
||||||
|
|
||||||
"##]],
|
"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -150,8 +136,7 @@ macro_rules! identity {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main(foo: ()) {
|
fn main(foo: ()) {
|
||||||
// format_args/*+tokenids*/!("{} {} {}"#1,#2 format_args#3!#4("{}"#6,#7 0#8),#9 foo#10,#11 identity#12!#13(10#15),#16 "bar"#17)
|
builtin#SpanAnchor(FileId(0), 0)@0..0 ##SpanAnchor(FileId(0), 0)@0..0format_args#SpanAnchor(FileId(0), 0)@0..0 (#SpanAnchor(FileId(0), 6)@25..26"{} {} {}"#SpanAnchor(FileId(0), 6)@26..36,#SpanAnchor(FileId(0), 6)@36..37 format_args#SpanAnchor(FileId(0), 6)@38..49!#SpanAnchor(FileId(0), 6)@49..50(#SpanAnchor(FileId(0), 6)@50..51"{}"#SpanAnchor(FileId(0), 6)@51..55,#SpanAnchor(FileId(0), 6)@55..56 0#SpanAnchor(FileId(0), 6)@57..58)#SpanAnchor(FileId(0), 6)@58..59,#SpanAnchor(FileId(0), 6)@59..60 foo#SpanAnchor(FileId(0), 6)@61..64,#SpanAnchor(FileId(0), 6)@64..65 identity#SpanAnchor(FileId(0), 6)@66..74!#SpanAnchor(FileId(0), 6)@74..75(#SpanAnchor(FileId(0), 6)@75..7610#SpanAnchor(FileId(0), 6)@76..78)#SpanAnchor(FileId(0), 6)@78..79,#SpanAnchor(FileId(0), 6)@79..80 "bar"#SpanAnchor(FileId(0), 6)@81..86)#SpanAnchor(FileId(0), 6)@86..87
|
||||||
builtin#4294967295 ##4294967295format_args#4294967295 (#0"{} {} {}"#1,#2 format_args#3!#4(#5"{}"#6,#7 0#8)#5,#9 foo#10,#11 identity#12!#13(#1410#15)#14,#16 "bar"#17)#0
|
|
||||||
}
|
}
|
||||||
|
|
||||||
"##]],
|
"##]],
|
||||||
|
|
|
@ -16,21 +16,16 @@ mod proc_macros;
|
||||||
|
|
||||||
use std::{iter, ops::Range, sync};
|
use std::{iter, ops::Range, sync};
|
||||||
|
|
||||||
use ::mbe::TokenMap;
|
|
||||||
use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase};
|
use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase};
|
||||||
use expect_test::Expect;
|
use expect_test::Expect;
|
||||||
use hir_expand::{
|
use hir_expand::{db::ExpandDatabase, HirFileIdExt, InFile, MacroFile, SpanMap};
|
||||||
db::{DeclarativeMacroExpander, ExpandDatabase},
|
|
||||||
AstId, InFile, MacroFile,
|
|
||||||
};
|
|
||||||
use stdx::format_to;
|
use stdx::format_to;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, edit::IndentLevel},
|
ast::{self, edit::IndentLevel},
|
||||||
AstNode, SyntaxElement,
|
AstNode,
|
||||||
SyntaxKind::{self, COMMENT, EOF, IDENT, LIFETIME_IDENT},
|
SyntaxKind::{COMMENT, EOF, IDENT, LIFETIME_IDENT},
|
||||||
SyntaxNode, TextRange, T,
|
SyntaxNode, T,
|
||||||
};
|
};
|
||||||
use tt::token_id::{Subtree, TokenId};
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::DefDatabase,
|
db::DefDatabase,
|
||||||
|
@ -39,6 +34,7 @@ use crate::{
|
||||||
resolver::HasResolver,
|
resolver::HasResolver,
|
||||||
src::HasSource,
|
src::HasSource,
|
||||||
test_db::TestDB,
|
test_db::TestDB,
|
||||||
|
tt::Subtree,
|
||||||
AdtId, AsMacroCall, Lookup, ModuleDefId,
|
AdtId, AsMacroCall, Lookup, ModuleDefId,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -88,43 +84,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
||||||
let mut text_edits = Vec::new();
|
let mut text_edits = Vec::new();
|
||||||
let mut expansions = Vec::new();
|
let mut expansions = Vec::new();
|
||||||
|
|
||||||
for macro_ in source_file.syntax().descendants().filter_map(ast::Macro::cast) {
|
|
||||||
let mut show_token_ids = false;
|
|
||||||
for comment in macro_.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) {
|
|
||||||
show_token_ids |= comment.to_string().contains("+tokenids");
|
|
||||||
}
|
|
||||||
if !show_token_ids {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let call_offset = macro_.syntax().text_range().start().into();
|
|
||||||
let file_ast_id = db.ast_id_map(source.file_id).ast_id(¯o_);
|
|
||||||
let ast_id = AstId::new(source.file_id, file_ast_id.upcast());
|
|
||||||
|
|
||||||
let DeclarativeMacroExpander { mac, def_site_token_map } =
|
|
||||||
&*db.decl_macro_expander(krate, ast_id);
|
|
||||||
assert_eq!(mac.err(), None);
|
|
||||||
let tt = match ¯o_ {
|
|
||||||
ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(),
|
|
||||||
ast::Macro::MacroDef(_) => unimplemented!(""),
|
|
||||||
};
|
|
||||||
|
|
||||||
let tt_start = tt.syntax().text_range().start();
|
|
||||||
tt.syntax().descendants_with_tokens().filter_map(SyntaxElement::into_token).for_each(
|
|
||||||
|token| {
|
|
||||||
let range = token.text_range().checked_sub(tt_start).unwrap();
|
|
||||||
if let Some(id) = def_site_token_map.token_by_range(range) {
|
|
||||||
let offset = (range.end() + tt_start).into();
|
|
||||||
text_edits.push((offset..offset, format!("#{}", id.0)));
|
|
||||||
}
|
|
||||||
},
|
|
||||||
);
|
|
||||||
text_edits.push((
|
|
||||||
call_offset..call_offset,
|
|
||||||
format!("// call ids will be shifted by {:?}\n", mac.shift()),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) {
|
for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) {
|
||||||
let macro_call = InFile::new(source.file_id, ¯o_call);
|
let macro_call = InFile::new(source.file_id, ¯o_call);
|
||||||
let res = macro_call
|
let res = macro_call
|
||||||
|
@ -138,10 +97,10 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
||||||
let macro_file = MacroFile { macro_call_id };
|
let macro_file = MacroFile { macro_call_id };
|
||||||
let mut expansion_result = db.parse_macro_expansion(macro_file);
|
let mut expansion_result = db.parse_macro_expansion(macro_file);
|
||||||
expansion_result.err = expansion_result.err.or(res.err);
|
expansion_result.err = expansion_result.err.or(res.err);
|
||||||
expansions.push((macro_call.value.clone(), expansion_result, db.macro_arg(macro_call_id)));
|
expansions.push((macro_call.value.clone(), expansion_result));
|
||||||
}
|
}
|
||||||
|
|
||||||
for (call, exp, arg) in expansions.into_iter().rev() {
|
for (call, exp) in expansions.into_iter().rev() {
|
||||||
let mut tree = false;
|
let mut tree = false;
|
||||||
let mut expect_errors = false;
|
let mut expect_errors = false;
|
||||||
let mut show_token_ids = false;
|
let mut show_token_ids = false;
|
||||||
|
@ -185,28 +144,8 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
||||||
}
|
}
|
||||||
let range = call.syntax().text_range();
|
let range = call.syntax().text_range();
|
||||||
let range: Range<usize> = range.into();
|
let range: Range<usize> = range.into();
|
||||||
|
|
||||||
if show_token_ids {
|
|
||||||
if let Some((tree, map, _)) = arg.value.as_deref() {
|
|
||||||
let tt_range = call.token_tree().unwrap().syntax().text_range();
|
|
||||||
let mut ranges = Vec::new();
|
|
||||||
extract_id_ranges(&mut ranges, map, tree);
|
|
||||||
for (range, id) in ranges {
|
|
||||||
let idx = (tt_range.start() + range.end()).into();
|
|
||||||
text_edits.push((idx..idx, format!("#{}", id.0)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
text_edits.push((range.start..range.start, "// ".into()));
|
|
||||||
call.to_string().match_indices('\n').for_each(|(offset, _)| {
|
|
||||||
let offset = offset + 1 + range.start;
|
|
||||||
text_edits.push((offset..offset, "// ".into()));
|
|
||||||
});
|
|
||||||
text_edits.push((range.end..range.end, "\n".into()));
|
|
||||||
text_edits.push((range.end..range.end, expn_text));
|
|
||||||
} else {
|
|
||||||
text_edits.push((range, expn_text));
|
text_edits.push((range, expn_text));
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
text_edits.sort_by_key(|(range, _)| range.start);
|
text_edits.sort_by_key(|(range, _)| range.start);
|
||||||
text_edits.reverse();
|
text_edits.reverse();
|
||||||
|
@ -246,20 +185,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
||||||
expect.assert_eq(&expanded_text);
|
expect.assert_eq(&expanded_text);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_id_ranges(ranges: &mut Vec<(TextRange, TokenId)>, map: &TokenMap, tree: &Subtree) {
|
|
||||||
tree.token_trees.iter().for_each(|tree| match tree {
|
|
||||||
tt::TokenTree::Leaf(leaf) => {
|
|
||||||
let id = match leaf {
|
|
||||||
tt::Leaf::Literal(it) => it.span,
|
|
||||||
tt::Leaf::Punct(it) => it.span,
|
|
||||||
tt::Leaf::Ident(it) => it.span,
|
|
||||||
};
|
|
||||||
ranges.extend(map.ranges_by_token(id, SyntaxKind::ERROR).map(|range| (range, id)));
|
|
||||||
}
|
|
||||||
tt::TokenTree::Subtree(tree) => extract_id_ranges(ranges, map, tree),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
fn reindent(indent: IndentLevel, pp: String) -> String {
|
fn reindent(indent: IndentLevel, pp: String) -> String {
|
||||||
if !pp.contains('\n') {
|
if !pp.contains('\n') {
|
||||||
return pp;
|
return pp;
|
||||||
|
@ -276,7 +201,7 @@ fn reindent(indent: IndentLevel, pp: String) -> String {
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> String {
|
fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&SpanMap>) -> String {
|
||||||
let mut res = String::new();
|
let mut res = String::new();
|
||||||
let mut prev_kind = EOF;
|
let mut prev_kind = EOF;
|
||||||
let mut indent_level = 0;
|
let mut indent_level = 0;
|
||||||
|
@ -323,8 +248,8 @@ fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> Str
|
||||||
prev_kind = curr_kind;
|
prev_kind = curr_kind;
|
||||||
format_to!(res, "{}", token);
|
format_to!(res, "{}", token);
|
||||||
if let Some(map) = map {
|
if let Some(map) = map {
|
||||||
if let Some(id) = map.token_by_range(token.text_range()) {
|
if let Some(span) = map.span_for_range(token.text_range()) {
|
||||||
format_to!(res, "#{}", id.0);
|
format_to!(res, "#{:?}@{:?}", span.anchor, span.range);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
|
|
||||||
use std::{cmp::Ordering, iter, mem};
|
use std::{cmp::Ordering, iter, mem};
|
||||||
|
|
||||||
|
use ::tt::Span;
|
||||||
use base_db::{CrateId, Dependency, Edition, FileId};
|
use base_db::{CrateId, Dependency, Edition, FileId};
|
||||||
use cfg::{CfgExpr, CfgOptions};
|
use cfg::{CfgExpr, CfgOptions};
|
||||||
use either::Either;
|
use either::Either;
|
||||||
|
@ -85,8 +86,7 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.map(|(idx, it)| {
|
.map(|(idx, it)| {
|
||||||
// FIXME: a hacky way to create a Name from string.
|
// FIXME: a hacky way to create a Name from string.
|
||||||
let name =
|
let name = tt::Ident { text: it.name.clone(), span: tt::SpanData::DUMMY };
|
||||||
tt::Ident { text: it.name.clone(), span: tt::TokenId::unspecified() };
|
|
||||||
(name.as_name(), ProcMacroExpander::new(base_db::ProcMacroId(idx as u32)))
|
(name.as_name(), ProcMacroExpander::new(base_db::ProcMacroId(idx as u32)))
|
||||||
})
|
})
|
||||||
.collect())
|
.collect())
|
||||||
|
@ -471,7 +471,7 @@ impl DefCollector<'_> {
|
||||||
directive.module_id,
|
directive.module_id,
|
||||||
MacroCallKind::Attr {
|
MacroCallKind::Attr {
|
||||||
ast_id: ast_id.ast_id,
|
ast_id: ast_id.ast_id,
|
||||||
attr_args: Arc::new((tt::Subtree::empty(), Default::default())),
|
attr_args: Arc::new(tt::Subtree::empty()),
|
||||||
invoc_attr_index: attr.id,
|
invoc_attr_index: attr.id,
|
||||||
},
|
},
|
||||||
attr.path().clone(),
|
attr.path().clone(),
|
||||||
|
@ -2083,8 +2083,7 @@ impl ModCollector<'_, '_> {
|
||||||
let name = match attrs.by_key("rustc_builtin_macro").string_value() {
|
let name = match attrs.by_key("rustc_builtin_macro").string_value() {
|
||||||
Some(it) => {
|
Some(it) => {
|
||||||
// FIXME: a hacky way to create a Name from string.
|
// FIXME: a hacky way to create a Name from string.
|
||||||
name =
|
name = tt::Ident { text: it.clone(), span: tt::SpanData::DUMMY }.as_name();
|
||||||
tt::Ident { text: it.clone(), span: tt::TokenId::unspecified() }.as_name();
|
|
||||||
&name
|
&name
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
//! This module resolves `mod foo;` declaration to file.
|
//! This module resolves `mod foo;` declaration to file.
|
||||||
use arrayvec::ArrayVec;
|
use arrayvec::ArrayVec;
|
||||||
use base_db::{AnchoredPath, FileId};
|
use base_db::{AnchoredPath, FileId};
|
||||||
use hir_expand::name::Name;
|
use hir_expand::{name::Name, HirFileIdExt};
|
||||||
use limit::Limit;
|
use limit::Limit;
|
||||||
use syntax::SmolStr;
|
use syntax::SmolStr;
|
||||||
|
|
||||||
|
|
|
@ -66,7 +66,7 @@ fn typing_inside_a_function_should_not_invalidate_def_map() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn typing_inside_a_macro_should_not_invalidate_def_map() {
|
fn typing_inside_a_macro_should_not_invalidate_def_map() {
|
||||||
let (mut db, pos) = TestDB::with_position(
|
check_def_map_is_not_recomputed(
|
||||||
r"
|
r"
|
||||||
//- /lib.rs
|
//- /lib.rs
|
||||||
macro_rules! m {
|
macro_rules! m {
|
||||||
|
@ -84,27 +84,15 @@ fn typing_inside_a_macro_should_not_invalidate_def_map() {
|
||||||
//- /foo/bar.rs
|
//- /foo/bar.rs
|
||||||
$0
|
$0
|
||||||
m!(X);
|
m!(X);
|
||||||
|
|
||||||
|
pub struct S {}
|
||||||
|
",
|
||||||
|
r"
|
||||||
|
m!(Y);
|
||||||
|
|
||||||
|
pub struct S {}
|
||||||
",
|
",
|
||||||
);
|
);
|
||||||
let krate = db.test_crate();
|
|
||||||
{
|
|
||||||
let events = db.log_executed(|| {
|
|
||||||
let crate_def_map = db.crate_def_map(krate);
|
|
||||||
let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
|
|
||||||
assert_eq!(module_data.scope.resolutions().count(), 1);
|
|
||||||
});
|
|
||||||
assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}")
|
|
||||||
}
|
|
||||||
db.set_file_text(pos.file_id, Arc::from("m!(Y);"));
|
|
||||||
|
|
||||||
{
|
|
||||||
let events = db.log_executed(|| {
|
|
||||||
let crate_def_map = db.crate_def_map(krate);
|
|
||||||
let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
|
|
||||||
assert_eq!(module_data.scope.resolutions().count(), 1);
|
|
||||||
});
|
|
||||||
assert!(!format!("{events:?}").contains("crate_def_map"), "{events:#?}")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -12,11 +12,13 @@ use std::{
|
||||||
marker::PhantomData,
|
marker::PhantomData,
|
||||||
};
|
};
|
||||||
|
|
||||||
use la_arena::{Arena, Idx};
|
use la_arena::{Arena, Idx, RawIdx};
|
||||||
use profile::Count;
|
use profile::Count;
|
||||||
use rustc_hash::FxHasher;
|
use rustc_hash::FxHasher;
|
||||||
use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
|
use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
|
||||||
|
|
||||||
|
pub use base_db::span::ErasedFileAstId;
|
||||||
|
|
||||||
/// `AstId` points to an AST node in a specific file.
|
/// `AstId` points to an AST node in a specific file.
|
||||||
pub struct FileAstId<N: AstIdNode> {
|
pub struct FileAstId<N: AstIdNode> {
|
||||||
raw: ErasedFileAstId,
|
raw: ErasedFileAstId,
|
||||||
|
@ -62,8 +64,6 @@ impl<N: AstIdNode> FileAstId<N> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type ErasedFileAstId = Idx<SyntaxNodePtr>;
|
|
||||||
|
|
||||||
pub trait AstIdNode: AstNode {}
|
pub trait AstIdNode: AstNode {}
|
||||||
macro_rules! register_ast_id_node {
|
macro_rules! register_ast_id_node {
|
||||||
(impl AstIdNode for $($ident:ident),+ ) => {
|
(impl AstIdNode for $($ident:ident),+ ) => {
|
||||||
|
@ -129,6 +129,11 @@ impl AstIdMap {
|
||||||
pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap {
|
pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap {
|
||||||
assert!(node.parent().is_none());
|
assert!(node.parent().is_none());
|
||||||
let mut res = AstIdMap::default();
|
let mut res = AstIdMap::default();
|
||||||
|
|
||||||
|
// make sure to allocate the root node
|
||||||
|
if !should_alloc_id(node.kind()) {
|
||||||
|
res.alloc(node);
|
||||||
|
}
|
||||||
// By walking the tree in breadth-first order we make sure that parents
|
// By walking the tree in breadth-first order we make sure that parents
|
||||||
// get lower ids then children. That is, adding a new child does not
|
// get lower ids then children. That is, adding a new child does not
|
||||||
// change parent's id. This means that, say, adding a new function to a
|
// change parent's id. This means that, say, adding a new function to a
|
||||||
|
@ -155,6 +160,11 @@ impl AstIdMap {
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The [`AstId`] of the root node
|
||||||
|
pub fn root(&self) -> SyntaxNodePtr {
|
||||||
|
self.arena[Idx::from_raw(RawIdx::from_u32(0))].clone()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn ast_id<N: AstIdNode>(&self, item: &N) -> FileAstId<N> {
|
pub fn ast_id<N: AstIdNode>(&self, item: &N) -> FileAstId<N> {
|
||||||
let raw = self.erased_ast_id(item.syntax());
|
let raw = self.erased_ast_id(item.syntax());
|
||||||
FileAstId { raw, covariant: PhantomData }
|
FileAstId { raw, covariant: PhantomData }
|
||||||
|
@ -164,7 +174,7 @@ impl AstIdMap {
|
||||||
AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap()
|
AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn get_raw(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
|
pub fn get_raw(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
|
||||||
self.arena[id].clone()
|
self.arena[id].clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
//! A higher level attributes based on TokenTree, with also some shortcuts.
|
//! A higher level attributes based on TokenTree, with also some shortcuts.
|
||||||
use std::{fmt, ops};
|
use std::{fmt, ops};
|
||||||
|
|
||||||
use base_db::CrateId;
|
use ::tt::Span;
|
||||||
|
use base_db::{span::SpanAnchor, CrateId};
|
||||||
use cfg::CfgExpr;
|
use cfg::CfgExpr;
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use intern::Interned;
|
use intern::Interned;
|
||||||
|
@ -39,11 +40,16 @@ impl ops::Deref for RawAttrs {
|
||||||
impl RawAttrs {
|
impl RawAttrs {
|
||||||
pub const EMPTY: Self = Self { entries: None };
|
pub const EMPTY: Self = Self { entries: None };
|
||||||
|
|
||||||
pub fn new(db: &dyn ExpandDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self {
|
pub fn new(
|
||||||
|
db: &dyn ExpandDatabase,
|
||||||
|
span_anchor: SpanAnchor,
|
||||||
|
owner: &dyn ast::HasAttrs,
|
||||||
|
hygiene: &Hygiene,
|
||||||
|
) -> Self {
|
||||||
let entries = collect_attrs(owner)
|
let entries = collect_attrs(owner)
|
||||||
.filter_map(|(id, attr)| match attr {
|
.filter_map(|(id, attr)| match attr {
|
||||||
Either::Left(attr) => {
|
Either::Left(attr) => {
|
||||||
attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id))
|
attr.meta().and_then(|meta| Attr::from_src(db, span_anchor, meta, hygiene, id))
|
||||||
}
|
}
|
||||||
Either::Right(comment) => comment.doc_comment().map(|doc| Attr {
|
Either::Right(comment) => comment.doc_comment().map(|doc| Attr {
|
||||||
id,
|
id,
|
||||||
|
@ -58,9 +64,13 @@ impl RawAttrs {
|
||||||
Self { entries: if entries.is_empty() { None } else { Some(entries) } }
|
Self { entries: if entries.is_empty() { None } else { Some(entries) } }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_attrs_owner(db: &dyn ExpandDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self {
|
pub fn from_attrs_owner(
|
||||||
|
db: &dyn ExpandDatabase,
|
||||||
|
span_anchor: SpanAnchor,
|
||||||
|
owner: InFile<&dyn ast::HasAttrs>,
|
||||||
|
) -> Self {
|
||||||
let hygiene = Hygiene::new(db, owner.file_id);
|
let hygiene = Hygiene::new(db, owner.file_id);
|
||||||
Self::new(db, owner.value, &hygiene)
|
Self::new(db, span_anchor, owner.value, &hygiene)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn merge(&self, other: Self) -> Self {
|
pub fn merge(&self, other: Self) -> Self {
|
||||||
|
@ -190,16 +200,17 @@ pub struct Attr {
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
pub enum AttrInput {
|
pub enum AttrInput {
|
||||||
/// `#[attr = "string"]`
|
/// `#[attr = "string"]`
|
||||||
|
// FIXME: This is losing span
|
||||||
Literal(SmolStr),
|
Literal(SmolStr),
|
||||||
/// `#[attr(subtree)]`
|
/// `#[attr(subtree)]`
|
||||||
TokenTree(Box<(tt::Subtree, mbe::TokenMap)>),
|
TokenTree(Box<tt::Subtree>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for AttrInput {
|
impl fmt::Display for AttrInput {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()),
|
AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()),
|
||||||
AttrInput::TokenTree(tt) => tt.0.fmt(f),
|
AttrInput::TokenTree(tt) => tt.fmt(f),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -207,6 +218,7 @@ impl fmt::Display for AttrInput {
|
||||||
impl Attr {
|
impl Attr {
|
||||||
fn from_src(
|
fn from_src(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
|
span_anchor: SpanAnchor,
|
||||||
ast: ast::Meta,
|
ast: ast::Meta,
|
||||||
hygiene: &Hygiene,
|
hygiene: &Hygiene,
|
||||||
id: AttrId,
|
id: AttrId,
|
||||||
|
@ -219,8 +231,13 @@ impl Attr {
|
||||||
};
|
};
|
||||||
Some(Interned::new(AttrInput::Literal(value)))
|
Some(Interned::new(AttrInput::Literal(value)))
|
||||||
} else if let Some(tt) = ast.token_tree() {
|
} else if let Some(tt) = ast.token_tree() {
|
||||||
let (tree, map) = syntax_node_to_token_tree(tt.syntax());
|
// FIXME: We could also allocate ids for attributes and use the attribute itself as an anchor
|
||||||
Some(Interned::new(AttrInput::TokenTree(Box::new((tree, map)))))
|
let offset =
|
||||||
|
db.ast_id_map(span_anchor.file_id).get_raw(span_anchor.ast_id).text_range().start();
|
||||||
|
// FIXME: Spanmap
|
||||||
|
let tree =
|
||||||
|
syntax_node_to_token_tree(tt.syntax(), span_anchor, offset, &Default::default());
|
||||||
|
Some(Interned::new(AttrInput::TokenTree(Box::new(tree))))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
@ -233,10 +250,12 @@ impl Attr {
|
||||||
hygiene: &Hygiene,
|
hygiene: &Hygiene,
|
||||||
id: AttrId,
|
id: AttrId,
|
||||||
) -> Option<Attr> {
|
) -> Option<Attr> {
|
||||||
let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
|
// FIXME: Unecessary roundtrip tt -> ast -> tt
|
||||||
|
let (parse, _map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
|
||||||
let ast = ast::Meta::cast(parse.syntax_node())?;
|
let ast = ast::Meta::cast(parse.syntax_node())?;
|
||||||
|
|
||||||
Self::from_src(db, ast, hygiene, id)
|
// FIXME: we discard spans here!
|
||||||
|
Self::from_src(db, SpanAnchor::DUMMY, ast, hygiene, id)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn path(&self) -> &ModPath {
|
pub fn path(&self) -> &ModPath {
|
||||||
|
@ -256,7 +275,7 @@ impl Attr {
|
||||||
/// #[path(ident)]
|
/// #[path(ident)]
|
||||||
pub fn single_ident_value(&self) -> Option<&tt::Ident> {
|
pub fn single_ident_value(&self) -> Option<&tt::Ident> {
|
||||||
match self.input.as_deref()? {
|
match self.input.as_deref()? {
|
||||||
AttrInput::TokenTree(tt) => match &*tt.0.token_trees {
|
AttrInput::TokenTree(tt) => match &*tt.token_trees {
|
||||||
[tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident),
|
[tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident),
|
||||||
_ => None,
|
_ => None,
|
||||||
},
|
},
|
||||||
|
@ -267,7 +286,7 @@ impl Attr {
|
||||||
/// #[path TokenTree]
|
/// #[path TokenTree]
|
||||||
pub fn token_tree_value(&self) -> Option<&Subtree> {
|
pub fn token_tree_value(&self) -> Option<&Subtree> {
|
||||||
match self.input.as_deref()? {
|
match self.input.as_deref()? {
|
||||||
AttrInput::TokenTree(tt) => Some(&tt.0),
|
AttrInput::TokenTree(tt) => Some(tt),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
//! Builtin attributes.
|
//! Builtin attributes.
|
||||||
|
|
||||||
|
use ::tt::Span;
|
||||||
|
|
||||||
use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind};
|
use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind};
|
||||||
|
|
||||||
macro_rules! register_builtin {
|
macro_rules! register_builtin {
|
||||||
|
@ -98,7 +100,7 @@ fn derive_attr_expand(
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let loc = db.lookup_intern_macro_call(id);
|
let loc = db.lookup_intern_macro_call(id);
|
||||||
let derives = match &loc.kind {
|
let derives = match &loc.kind {
|
||||||
MacroCallKind::Attr { attr_args, .. } if loc.def.is_attribute_derive() => &attr_args.0,
|
MacroCallKind::Attr { attr_args, .. } if loc.def.is_attribute_derive() => attr_args,
|
||||||
_ => return ExpandResult::ok(tt::Subtree::empty()),
|
_ => return ExpandResult::ok(tt::Subtree::empty()),
|
||||||
};
|
};
|
||||||
pseudo_derive_attr_expansion(tt, derives)
|
pseudo_derive_attr_expansion(tt, derives)
|
||||||
|
@ -112,7 +114,7 @@ pub fn pseudo_derive_attr_expansion(
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
|
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
|
||||||
char,
|
char,
|
||||||
spacing: tt::Spacing::Alone,
|
spacing: tt::Spacing::Alone,
|
||||||
span: tt::TokenId::unspecified(),
|
span: tt::SpanData::DUMMY,
|
||||||
}))
|
}))
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -1,18 +1,20 @@
|
||||||
//! Builtin derives.
|
//! Builtin derives.
|
||||||
|
|
||||||
use ::tt::Ident;
|
use ::tt::Span;
|
||||||
use base_db::{CrateOrigin, LangCrateOrigin};
|
use base_db::{CrateOrigin, LangCrateOrigin};
|
||||||
use itertools::izip;
|
use itertools::izip;
|
||||||
use mbe::TokenMap;
|
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use stdx::never;
|
use stdx::never;
|
||||||
use tracing::debug;
|
use tracing::debug;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
name::{AsName, Name},
|
name::{AsName, Name},
|
||||||
tt::{self, TokenId},
|
tt, SpanMap,
|
||||||
|
};
|
||||||
|
use syntax::{
|
||||||
|
ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds},
|
||||||
|
TextSize,
|
||||||
};
|
};
|
||||||
use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds};
|
|
||||||
|
|
||||||
use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult, MacroCallId};
|
use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult, MacroCallId};
|
||||||
|
|
||||||
|
@ -29,7 +31,7 @@ macro_rules! register_builtin {
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &ast::Adt,
|
tt: &ast::Adt,
|
||||||
token_map: &TokenMap,
|
token_map: &SpanMap,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let expander = match *self {
|
let expander = match *self {
|
||||||
$( BuiltinDeriveExpander::$trait => $expand, )*
|
$( BuiltinDeriveExpander::$trait => $expand, )*
|
||||||
|
@ -71,7 +73,7 @@ enum VariantShape {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tuple_field_iterator(n: usize) -> impl Iterator<Item = tt::Ident> {
|
fn tuple_field_iterator(n: usize) -> impl Iterator<Item = tt::Ident> {
|
||||||
(0..n).map(|it| Ident::new(format!("f{it}"), tt::TokenId::unspecified()))
|
(0..n).map(|it| tt::Ident::new(format!("f{it}"), tt::SpanData::DUMMY))
|
||||||
}
|
}
|
||||||
|
|
||||||
impl VariantShape {
|
impl VariantShape {
|
||||||
|
@ -117,7 +119,7 @@ impl VariantShape {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from(tm: &TokenMap, value: Option<FieldList>) -> Result<Self, ExpandError> {
|
fn from(tm: &SpanMap, value: Option<FieldList>) -> Result<Self, ExpandError> {
|
||||||
let r = match value {
|
let r = match value {
|
||||||
None => VariantShape::Unit,
|
None => VariantShape::Unit,
|
||||||
Some(FieldList::RecordFieldList(it)) => VariantShape::Struct(
|
Some(FieldList::RecordFieldList(it)) => VariantShape::Struct(
|
||||||
|
@ -189,8 +191,8 @@ struct BasicAdtInfo {
|
||||||
associated_types: Vec<tt::Subtree>,
|
associated_types: Vec<tt::Subtree>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError> {
|
fn parse_adt(tm: &SpanMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError> {
|
||||||
let (name, generic_param_list, shape) = match &adt {
|
let (name, generic_param_list, shape) = match adt {
|
||||||
ast::Adt::Struct(it) => (
|
ast::Adt::Struct(it) => (
|
||||||
it.name(),
|
it.name(),
|
||||||
it.generic_param_list(),
|
it.generic_param_list(),
|
||||||
|
@ -234,21 +236,44 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError>
|
||||||
match this {
|
match this {
|
||||||
Some(it) => {
|
Some(it) => {
|
||||||
param_type_set.insert(it.as_name());
|
param_type_set.insert(it.as_name());
|
||||||
mbe::syntax_node_to_token_tree(it.syntax()).0
|
mbe::syntax_node_to_token_tree(
|
||||||
|
it.syntax(),
|
||||||
|
tm.span_for_range(it.syntax().first_token().unwrap().text_range())
|
||||||
|
.unwrap()
|
||||||
|
.anchor,
|
||||||
|
TextSize::from(0),
|
||||||
|
tm,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
None => tt::Subtree::empty(),
|
None => tt::Subtree::empty(),
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let bounds = match ¶m {
|
let bounds = match ¶m {
|
||||||
ast::TypeOrConstParam::Type(it) => {
|
ast::TypeOrConstParam::Type(it) => it.type_bound_list().map(|it| {
|
||||||
it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
|
mbe::syntax_node_to_token_tree(
|
||||||
}
|
it.syntax(),
|
||||||
|
tm.span_for_range(it.syntax().first_token().unwrap().text_range())
|
||||||
|
.unwrap()
|
||||||
|
.anchor,
|
||||||
|
TextSize::from(0),
|
||||||
|
tm,
|
||||||
|
)
|
||||||
|
}),
|
||||||
ast::TypeOrConstParam::Const(_) => None,
|
ast::TypeOrConstParam::Const(_) => None,
|
||||||
};
|
};
|
||||||
let ty = if let ast::TypeOrConstParam::Const(param) = param {
|
let ty = if let ast::TypeOrConstParam::Const(param) = param {
|
||||||
let ty = param
|
let ty = param
|
||||||
.ty()
|
.ty()
|
||||||
.map(|ty| mbe::syntax_node_to_token_tree(ty.syntax()).0)
|
.map(|ty| {
|
||||||
|
mbe::syntax_node_to_token_tree(
|
||||||
|
ty.syntax(),
|
||||||
|
tm.span_for_range(ty.syntax().first_token().unwrap().text_range())
|
||||||
|
.unwrap()
|
||||||
|
.anchor,
|
||||||
|
TextSize::from(0),
|
||||||
|
tm,
|
||||||
|
)
|
||||||
|
})
|
||||||
.unwrap_or_else(tt::Subtree::empty);
|
.unwrap_or_else(tt::Subtree::empty);
|
||||||
Some(ty)
|
Some(ty)
|
||||||
} else {
|
} else {
|
||||||
|
@ -282,20 +307,26 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError>
|
||||||
let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name();
|
let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name();
|
||||||
param_type_set.contains(&name).then_some(p)
|
param_type_set.contains(&name).then_some(p)
|
||||||
})
|
})
|
||||||
.map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
|
.map(|it| {
|
||||||
|
mbe::syntax_node_to_token_tree(
|
||||||
|
it.syntax(),
|
||||||
|
tm.span_for_range(it.syntax().first_token().unwrap().text_range()).unwrap().anchor,
|
||||||
|
TextSize::from(0),
|
||||||
|
tm,
|
||||||
|
)
|
||||||
|
})
|
||||||
.collect();
|
.collect();
|
||||||
let name_token = name_to_token(&tm, name)?;
|
let name_token = name_to_token(&tm, name)?;
|
||||||
Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types })
|
Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Ident, ExpandError> {
|
fn name_to_token(token_map: &SpanMap, name: Option<ast::Name>) -> Result<tt::Ident, ExpandError> {
|
||||||
let name = name.ok_or_else(|| {
|
let name = name.ok_or_else(|| {
|
||||||
debug!("parsed item has no name");
|
debug!("parsed item has no name");
|
||||||
ExpandError::other("missing name")
|
ExpandError::other("missing name")
|
||||||
})?;
|
})?;
|
||||||
let name_token_id =
|
let span = token_map.span_for_range(name.syntax().text_range()).unwrap();
|
||||||
token_map.token_by_range(name.syntax().text_range()).unwrap_or_else(TokenId::unspecified);
|
let name_token = tt::Ident { span, text: name.text().into() };
|
||||||
let name_token = tt::Ident { span: name_token_id, text: name.text().into() };
|
|
||||||
Ok(name_token)
|
Ok(name_token)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -332,7 +363,7 @@ fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Id
|
||||||
/// therefore does not get bound by the derived trait.
|
/// therefore does not get bound by the derived trait.
|
||||||
fn expand_simple_derive(
|
fn expand_simple_derive(
|
||||||
tt: &ast::Adt,
|
tt: &ast::Adt,
|
||||||
tm: &TokenMap,
|
tm: &SpanMap,
|
||||||
trait_path: tt::Subtree,
|
trait_path: tt::Subtree,
|
||||||
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree,
|
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
|
@ -393,7 +424,7 @@ fn copy_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &ast::Adt,
|
tt: &ast::Adt,
|
||||||
tm: &TokenMap,
|
tm: &SpanMap,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let krate = find_builtin_crate(db, id);
|
let krate = find_builtin_crate(db, id);
|
||||||
expand_simple_derive(tt, tm, quote! { #krate::marker::Copy }, |_| quote! {})
|
expand_simple_derive(tt, tm, quote! { #krate::marker::Copy }, |_| quote! {})
|
||||||
|
@ -403,16 +434,13 @@ fn clone_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &ast::Adt,
|
tt: &ast::Adt,
|
||||||
tm: &TokenMap,
|
tm: &SpanMap,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let krate = find_builtin_crate(db, id);
|
let krate = find_builtin_crate(db, id);
|
||||||
expand_simple_derive(tt, tm, quote! { #krate::clone::Clone }, |adt| {
|
expand_simple_derive(tt, tm, quote! { #krate::clone::Clone }, |adt| {
|
||||||
if matches!(adt.shape, AdtShape::Union) {
|
if matches!(adt.shape, AdtShape::Union) {
|
||||||
let star = tt::Punct {
|
let star =
|
||||||
char: '*',
|
tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span: tt::SpanData::DUMMY };
|
||||||
spacing: ::tt::Spacing::Alone,
|
|
||||||
span: tt::TokenId::unspecified(),
|
|
||||||
};
|
|
||||||
return quote! {
|
return quote! {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
#star self
|
#star self
|
||||||
|
@ -420,11 +448,8 @@ fn clone_expand(
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
|
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
|
||||||
let star = tt::Punct {
|
let star =
|
||||||
char: '*',
|
tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span: tt::SpanData::DUMMY };
|
||||||
spacing: ::tt::Spacing::Alone,
|
|
||||||
span: tt::TokenId::unspecified(),
|
|
||||||
};
|
|
||||||
return quote! {
|
return quote! {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
match #star self {}
|
match #star self {}
|
||||||
|
@ -452,16 +477,14 @@ fn clone_expand(
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This function exists since `quote! { => }` doesn't work.
|
/// This function exists since `quote! { => }` doesn't work.
|
||||||
fn fat_arrow() -> ::tt::Subtree<TokenId> {
|
fn fat_arrow() -> tt::Subtree {
|
||||||
let eq =
|
let eq = tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span: tt::SpanData::DUMMY };
|
||||||
tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() };
|
|
||||||
quote! { #eq> }
|
quote! { #eq> }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This function exists since `quote! { && }` doesn't work.
|
/// This function exists since `quote! { && }` doesn't work.
|
||||||
fn and_and() -> ::tt::Subtree<TokenId> {
|
fn and_and() -> tt::Subtree {
|
||||||
let and =
|
let and = tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span: tt::SpanData::DUMMY };
|
||||||
tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() };
|
|
||||||
quote! { #and& }
|
quote! { #and& }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -469,7 +492,7 @@ fn default_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &ast::Adt,
|
tt: &ast::Adt,
|
||||||
tm: &TokenMap,
|
tm: &SpanMap,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let krate = &find_builtin_crate(db, id);
|
let krate = &find_builtin_crate(db, id);
|
||||||
expand_simple_derive(tt, tm, quote! { #krate::default::Default }, |adt| {
|
expand_simple_derive(tt, tm, quote! { #krate::default::Default }, |adt| {
|
||||||
|
@ -509,7 +532,7 @@ fn debug_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &ast::Adt,
|
tt: &ast::Adt,
|
||||||
tm: &TokenMap,
|
tm: &SpanMap,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let krate = &find_builtin_crate(db, id);
|
let krate = &find_builtin_crate(db, id);
|
||||||
expand_simple_derive(tt, tm, quote! { #krate::fmt::Debug }, |adt| {
|
expand_simple_derive(tt, tm, quote! { #krate::fmt::Debug }, |adt| {
|
||||||
|
@ -540,11 +563,8 @@ fn debug_expand(
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
|
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
|
||||||
let star = tt::Punct {
|
let star =
|
||||||
char: '*',
|
tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span: tt::SpanData::DUMMY };
|
||||||
spacing: ::tt::Spacing::Alone,
|
|
||||||
span: tt::TokenId::unspecified(),
|
|
||||||
};
|
|
||||||
return quote! {
|
return quote! {
|
||||||
fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
|
fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
|
||||||
match #star self {}
|
match #star self {}
|
||||||
|
@ -590,7 +610,7 @@ fn hash_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &ast::Adt,
|
tt: &ast::Adt,
|
||||||
tm: &TokenMap,
|
tm: &SpanMap,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let krate = &find_builtin_crate(db, id);
|
let krate = &find_builtin_crate(db, id);
|
||||||
expand_simple_derive(tt, tm, quote! { #krate::hash::Hash }, |adt| {
|
expand_simple_derive(tt, tm, quote! { #krate::hash::Hash }, |adt| {
|
||||||
|
@ -599,11 +619,8 @@ fn hash_expand(
|
||||||
return quote! {};
|
return quote! {};
|
||||||
}
|
}
|
||||||
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
|
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
|
||||||
let star = tt::Punct {
|
let star =
|
||||||
char: '*',
|
tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span: tt::SpanData::DUMMY };
|
||||||
spacing: ::tt::Spacing::Alone,
|
|
||||||
span: tt::TokenId::unspecified(),
|
|
||||||
};
|
|
||||||
return quote! {
|
return quote! {
|
||||||
fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
|
fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
|
||||||
match #star self {}
|
match #star self {}
|
||||||
|
@ -644,7 +661,7 @@ fn eq_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &ast::Adt,
|
tt: &ast::Adt,
|
||||||
tm: &TokenMap,
|
tm: &SpanMap,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let krate = find_builtin_crate(db, id);
|
let krate = find_builtin_crate(db, id);
|
||||||
expand_simple_derive(tt, tm, quote! { #krate::cmp::Eq }, |_| quote! {})
|
expand_simple_derive(tt, tm, quote! { #krate::cmp::Eq }, |_| quote! {})
|
||||||
|
@ -654,7 +671,7 @@ fn partial_eq_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &ast::Adt,
|
tt: &ast::Adt,
|
||||||
tm: &TokenMap,
|
tm: &SpanMap,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let krate = find_builtin_crate(db, id);
|
let krate = find_builtin_crate(db, id);
|
||||||
expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialEq }, |adt| {
|
expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialEq }, |adt| {
|
||||||
|
@ -674,14 +691,14 @@ fn partial_eq_expand(
|
||||||
}
|
}
|
||||||
[first, rest @ ..] => {
|
[first, rest @ ..] => {
|
||||||
let rest = rest.iter().map(|it| {
|
let rest = rest.iter().map(|it| {
|
||||||
let t1 = Ident::new(format!("{}_self", it.text), it.span);
|
let t1 = tt::Ident::new(format!("{}_self", it.text), it.span);
|
||||||
let t2 = Ident::new(format!("{}_other", it.text), it.span);
|
let t2 = tt::Ident::new(format!("{}_other", it.text), it.span);
|
||||||
let and_and = and_and();
|
let and_and = and_and();
|
||||||
quote!(#and_and #t1 .eq( #t2 ))
|
quote!(#and_and #t1 .eq( #t2 ))
|
||||||
});
|
});
|
||||||
let first = {
|
let first = {
|
||||||
let t1 = Ident::new(format!("{}_self", first.text), first.span);
|
let t1 = tt::Ident::new(format!("{}_self", first.text), first.span);
|
||||||
let t2 = Ident::new(format!("{}_other", first.text), first.span);
|
let t2 = tt::Ident::new(format!("{}_other", first.text), first.span);
|
||||||
quote!(#t1 .eq( #t2 ))
|
quote!(#t1 .eq( #t2 ))
|
||||||
};
|
};
|
||||||
quote!(#first ##rest)
|
quote!(#first ##rest)
|
||||||
|
@ -708,11 +725,11 @@ fn self_and_other_patterns(
|
||||||
name: &tt::Ident,
|
name: &tt::Ident,
|
||||||
) -> (Vec<tt::Subtree>, Vec<tt::Subtree>) {
|
) -> (Vec<tt::Subtree>, Vec<tt::Subtree>) {
|
||||||
let self_patterns = adt.shape.as_pattern_map(name, |it| {
|
let self_patterns = adt.shape.as_pattern_map(name, |it| {
|
||||||
let t = Ident::new(format!("{}_self", it.text), it.span);
|
let t = tt::Ident::new(format!("{}_self", it.text), it.span);
|
||||||
quote!(#t)
|
quote!(#t)
|
||||||
});
|
});
|
||||||
let other_patterns = adt.shape.as_pattern_map(name, |it| {
|
let other_patterns = adt.shape.as_pattern_map(name, |it| {
|
||||||
let t = Ident::new(format!("{}_other", it.text), it.span);
|
let t = tt::Ident::new(format!("{}_other", it.text), it.span);
|
||||||
quote!(#t)
|
quote!(#t)
|
||||||
});
|
});
|
||||||
(self_patterns, other_patterns)
|
(self_patterns, other_patterns)
|
||||||
|
@ -722,7 +739,7 @@ fn ord_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &ast::Adt,
|
tt: &ast::Adt,
|
||||||
tm: &TokenMap,
|
tm: &SpanMap,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let krate = &find_builtin_crate(db, id);
|
let krate = &find_builtin_crate(db, id);
|
||||||
expand_simple_derive(tt, tm, quote! { #krate::cmp::Ord }, |adt| {
|
expand_simple_derive(tt, tm, quote! { #krate::cmp::Ord }, |adt| {
|
||||||
|
@ -752,8 +769,8 @@ fn ord_expand(
|
||||||
|(pat1, pat2, fields)| {
|
|(pat1, pat2, fields)| {
|
||||||
let mut body = quote!(#krate::cmp::Ordering::Equal);
|
let mut body = quote!(#krate::cmp::Ordering::Equal);
|
||||||
for f in fields.into_iter().rev() {
|
for f in fields.into_iter().rev() {
|
||||||
let t1 = Ident::new(format!("{}_self", f.text), f.span);
|
let t1 = tt::Ident::new(format!("{}_self", f.text), f.span);
|
||||||
let t2 = Ident::new(format!("{}_other", f.text), f.span);
|
let t2 = tt::Ident::new(format!("{}_other", f.text), f.span);
|
||||||
body = compare(krate, quote!(#t1), quote!(#t2), body);
|
body = compare(krate, quote!(#t1), quote!(#t2), body);
|
||||||
}
|
}
|
||||||
let fat_arrow = fat_arrow();
|
let fat_arrow = fat_arrow();
|
||||||
|
@ -784,7 +801,7 @@ fn partial_ord_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &ast::Adt,
|
tt: &ast::Adt,
|
||||||
tm: &TokenMap,
|
tm: &SpanMap,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let krate = &find_builtin_crate(db, id);
|
let krate = &find_builtin_crate(db, id);
|
||||||
expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialOrd }, |adt| {
|
expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialOrd }, |adt| {
|
||||||
|
@ -817,8 +834,8 @@ fn partial_ord_expand(
|
||||||
|(pat1, pat2, fields)| {
|
|(pat1, pat2, fields)| {
|
||||||
let mut body = quote!(#krate::option::Option::Some(#krate::cmp::Ordering::Equal));
|
let mut body = quote!(#krate::option::Option::Some(#krate::cmp::Ordering::Equal));
|
||||||
for f in fields.into_iter().rev() {
|
for f in fields.into_iter().rev() {
|
||||||
let t1 = Ident::new(format!("{}_self", f.text), f.span);
|
let t1 = tt::Ident::new(format!("{}_self", f.text), f.span);
|
||||||
let t2 = Ident::new(format!("{}_other", f.text), f.span);
|
let t2 = tt::Ident::new(format!("{}_other", f.text), f.span);
|
||||||
body = compare(krate, quote!(#t1), quote!(#t2), body);
|
body = compare(krate, quote!(#t1), quote!(#t2), body);
|
||||||
}
|
}
|
||||||
let fat_arrow = fat_arrow();
|
let fat_arrow = fat_arrow();
|
||||||
|
|
|
@ -1,17 +1,22 @@
|
||||||
//! Builtin macro
|
//! Builtin macro
|
||||||
|
|
||||||
use base_db::{AnchoredPath, Edition, FileId};
|
use base_db::{
|
||||||
|
span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID},
|
||||||
|
AnchoredPath, Edition, FileId,
|
||||||
|
};
|
||||||
use cfg::CfgExpr;
|
use cfg::CfgExpr;
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use mbe::{parse_exprs_with_sep, parse_to_token_tree, TokenMap};
|
use mbe::{parse_exprs_with_sep, parse_to_token_tree};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, AstToken},
|
ast::{self, AstToken},
|
||||||
SmolStr,
|
SmolStr,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::ExpandDatabase, name, quote, tt, EagerCallInfo, ExpandError, ExpandResult, MacroCallId,
|
db::ExpandDatabase,
|
||||||
MacroCallLoc,
|
name, quote,
|
||||||
|
tt::{self, Span},
|
||||||
|
EagerCallInfo, ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroCallLoc,
|
||||||
};
|
};
|
||||||
|
|
||||||
macro_rules! register_builtin {
|
macro_rules! register_builtin {
|
||||||
|
@ -110,7 +115,7 @@ register_builtin! {
|
||||||
}
|
}
|
||||||
|
|
||||||
const DOLLAR_CRATE: tt::Ident =
|
const DOLLAR_CRATE: tt::Ident =
|
||||||
tt::Ident { text: SmolStr::new_inline("$crate"), span: tt::TokenId::unspecified() };
|
tt::Ident { text: SmolStr::new_inline("$crate"), span: tt::SpanData::DUMMY };
|
||||||
|
|
||||||
fn module_path_expand(
|
fn module_path_expand(
|
||||||
_db: &dyn ExpandDatabase,
|
_db: &dyn ExpandDatabase,
|
||||||
|
@ -131,7 +136,7 @@ fn line_expand(
|
||||||
delimiter: tt::Delimiter::unspecified(),
|
delimiter: tt::Delimiter::unspecified(),
|
||||||
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
||||||
text: "0u32".into(),
|
text: "0u32".into(),
|
||||||
span: tt::Span::UNSPECIFIED,
|
span: tt::SpanData::DUMMY,
|
||||||
}))],
|
}))],
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -179,7 +184,7 @@ fn assert_expand(
|
||||||
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
|
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
|
||||||
char: ',',
|
char: ',',
|
||||||
spacing: tt::Spacing::Alone,
|
spacing: tt::Spacing::Alone,
|
||||||
span: tt::TokenId::unspecified(),
|
span: tt::SpanData::DUMMY,
|
||||||
}))],
|
}))],
|
||||||
};
|
};
|
||||||
let cond = cond.clone();
|
let cond = cond.clone();
|
||||||
|
@ -446,7 +451,7 @@ fn concat_bytes_expand(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let ident = tt::Ident { text: bytes.join(", ").into(), span: tt::TokenId::unspecified() };
|
let ident = tt::Ident { text: bytes.join(", ").into(), span: tt::SpanData::DUMMY };
|
||||||
ExpandResult { value: quote!([#ident]), err }
|
ExpandResult { value: quote!([#ident]), err }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -494,7 +499,7 @@ fn concat_idents_expand(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let ident = tt::Ident { text: ident.into(), span: tt::TokenId::unspecified() };
|
let ident = tt::Ident { text: ident.into(), span: tt::SpanData::DUMMY };
|
||||||
ExpandResult { value: quote!(#ident), err }
|
ExpandResult { value: quote!(#ident), err }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -533,15 +538,16 @@ fn include_expand(
|
||||||
_tt: &tt::Subtree,
|
_tt: &tt::Subtree,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
match db.include_expand(arg_id) {
|
match db.include_expand(arg_id) {
|
||||||
Ok((res, _)) => ExpandResult::ok(res.0.clone()),
|
Ok((res, _)) => ExpandResult::ok(res.as_ref().clone()),
|
||||||
Err(e) => ExpandResult::new(tt::Subtree::empty(), e),
|
Err(e) => ExpandResult::new(tt::Subtree::empty(), e),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME: Check if this is still needed now after the token map rewrite
|
||||||
pub(crate) fn include_arg_to_tt(
|
pub(crate) fn include_arg_to_tt(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
arg_id: MacroCallId,
|
arg_id: MacroCallId,
|
||||||
) -> Result<(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, TokenMap)>, FileId), ExpandError> {
|
) -> Result<(triomphe::Arc<tt::Subtree>, FileId), ExpandError> {
|
||||||
let loc = db.lookup_intern_macro_call(arg_id);
|
let loc = db.lookup_intern_macro_call(arg_id);
|
||||||
let Some(EagerCallInfo { arg, arg_id, .. }) = loc.eager.as_deref() else {
|
let Some(EagerCallInfo { arg, arg_id, .. }) = loc.eager.as_deref() else {
|
||||||
panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager);
|
panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager);
|
||||||
|
@ -549,9 +555,12 @@ pub(crate) fn include_arg_to_tt(
|
||||||
let path = parse_string(&arg.0)?;
|
let path = parse_string(&arg.0)?;
|
||||||
let file_id = relative_file(db, *arg_id, &path, false)?;
|
let file_id = relative_file(db, *arg_id, &path, false)?;
|
||||||
|
|
||||||
let (subtree, map) =
|
let subtree = parse_to_token_tree(
|
||||||
parse_to_token_tree(&db.file_text(file_id)).ok_or(mbe::ExpandError::ConversionError)?;
|
&db.file_text(file_id),
|
||||||
Ok((triomphe::Arc::new((subtree, map)), file_id))
|
SpanAnchor { file_id: file_id.into(), ast_id: ROOT_ERASED_FILE_AST_ID },
|
||||||
|
)
|
||||||
|
.ok_or(mbe::ExpandError::ConversionError)?;
|
||||||
|
Ok((triomphe::Arc::new(subtree), file_id))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn include_bytes_expand(
|
fn include_bytes_expand(
|
||||||
|
@ -568,7 +577,7 @@ fn include_bytes_expand(
|
||||||
delimiter: tt::Delimiter::unspecified(),
|
delimiter: tt::Delimiter::unspecified(),
|
||||||
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
||||||
text: r#"b"""#.into(),
|
text: r#"b"""#.into(),
|
||||||
span: tt::TokenId::unspecified(),
|
span: tt::SpanData::DUMMY,
|
||||||
}))],
|
}))],
|
||||||
};
|
};
|
||||||
ExpandResult::ok(res)
|
ExpandResult::ok(res)
|
||||||
|
|
|
@ -1,22 +1,25 @@
|
||||||
//! Defines database & queries for macro expansion.
|
//! Defines database & queries for macro expansion.
|
||||||
|
|
||||||
use base_db::{salsa, CrateId, Edition, SourceDatabase};
|
use base_db::{
|
||||||
|
salsa,
|
||||||
|
span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID},
|
||||||
|
CrateId, Edition, SourceDatabase,
|
||||||
|
};
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use limit::Limit;
|
use limit::Limit;
|
||||||
use mbe::{syntax_node_to_token_tree, ValueResult};
|
use mbe::{map_from_syntax_node, syntax_node_to_token_tree, ValueResult};
|
||||||
use rustc_hash::FxHashSet;
|
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, HasAttrs, HasDocComments},
|
ast::{self, HasAttrs, HasDocComments},
|
||||||
AstNode, GreenNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
|
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, TextSize, T,
|
||||||
};
|
};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion,
|
ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion,
|
||||||
builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander,
|
builtin_fn_macro::EagerExpander, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander,
|
||||||
BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult,
|
BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult,
|
||||||
ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
|
ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
|
||||||
MacroDefKind, MacroFile, ProcMacroExpander,
|
MacroDefKind, MacroFile, ProcMacroExpander, SpanMap, SyntaxContext, SyntaxContextId,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Total limit on the number of tokens produced by any macro invocation.
|
/// Total limit on the number of tokens produced by any macro invocation.
|
||||||
|
@ -30,8 +33,7 @@ static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
/// Old-style `macro_rules` or the new macros 2.0
|
/// Old-style `macro_rules` or the new macros 2.0
|
||||||
pub struct DeclarativeMacroExpander {
|
pub struct DeclarativeMacroExpander {
|
||||||
pub mac: mbe::DeclarativeMacro,
|
pub mac: mbe::DeclarativeMacro<base_db::span::SpanData>,
|
||||||
pub def_site_token_map: mbe::TokenMap,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DeclarativeMacroExpander {
|
impl DeclarativeMacroExpander {
|
||||||
|
@ -41,21 +43,14 @@ impl DeclarativeMacroExpander {
|
||||||
tt::Subtree::empty(),
|
tt::Subtree::empty(),
|
||||||
ExpandError::other(format!("invalid macro definition: {e}")),
|
ExpandError::other(format!("invalid macro definition: {e}")),
|
||||||
),
|
),
|
||||||
None => self.mac.expand(tt).map_err(Into::into),
|
None => self.mac.expand(&tt).map_err(Into::into),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn map_id_down(&self, token_id: tt::TokenId) -> tt::TokenId {
|
|
||||||
self.mac.map_id_down(token_id)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn map_id_up(&self, token_id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
|
|
||||||
self.mac.map_id_up(token_id)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
pub enum TokenExpander {
|
pub enum TokenExpander {
|
||||||
|
/// Old-style `macro_rules` or the new macros 2.0
|
||||||
DeclarativeMacro(Arc<DeclarativeMacroExpander>),
|
DeclarativeMacro(Arc<DeclarativeMacroExpander>),
|
||||||
/// Stuff like `line!` and `file!`.
|
/// Stuff like `line!` and `file!`.
|
||||||
BuiltIn(BuiltinFnLikeExpander),
|
BuiltIn(BuiltinFnLikeExpander),
|
||||||
|
@ -69,31 +64,6 @@ pub enum TokenExpander {
|
||||||
ProcMacro(ProcMacroExpander),
|
ProcMacro(ProcMacroExpander),
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: Get rid of these methods
|
|
||||||
impl TokenExpander {
|
|
||||||
pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
|
|
||||||
match self {
|
|
||||||
TokenExpander::DeclarativeMacro(expander) => expander.map_id_down(id),
|
|
||||||
TokenExpander::BuiltIn(..)
|
|
||||||
| TokenExpander::BuiltInEager(..)
|
|
||||||
| TokenExpander::BuiltInAttr(..)
|
|
||||||
| TokenExpander::BuiltInDerive(..)
|
|
||||||
| TokenExpander::ProcMacro(..) => id,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
|
|
||||||
match self {
|
|
||||||
TokenExpander::DeclarativeMacro(expander) => expander.map_id_up(id),
|
|
||||||
TokenExpander::BuiltIn(..)
|
|
||||||
| TokenExpander::BuiltInEager(..)
|
|
||||||
| TokenExpander::BuiltInAttr(..)
|
|
||||||
| TokenExpander::BuiltInDerive(..)
|
|
||||||
| TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[salsa::query_group(ExpandDatabaseStorage)]
|
#[salsa::query_group(ExpandDatabaseStorage)]
|
||||||
pub trait ExpandDatabase: SourceDatabase {
|
pub trait ExpandDatabase: SourceDatabase {
|
||||||
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
|
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
|
||||||
|
@ -109,7 +79,7 @@ pub trait ExpandDatabase: SourceDatabase {
|
||||||
fn parse_macro_expansion(
|
fn parse_macro_expansion(
|
||||||
&self,
|
&self,
|
||||||
macro_file: MacroFile,
|
macro_file: MacroFile,
|
||||||
) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>;
|
) -> ExpandResult<(Parse<SyntaxNode>, Arc<SpanMap>)>;
|
||||||
|
|
||||||
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
|
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
|
||||||
/// reason why we use salsa at all.
|
/// reason why we use salsa at all.
|
||||||
|
@ -118,23 +88,16 @@ pub trait ExpandDatabase: SourceDatabase {
|
||||||
/// to be incremental.
|
/// to be incremental.
|
||||||
#[salsa::interned]
|
#[salsa::interned]
|
||||||
fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId;
|
fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId;
|
||||||
|
#[salsa::interned]
|
||||||
|
fn intern_syntax_context(&self, ctx: SyntaxContext) -> SyntaxContextId;
|
||||||
|
|
||||||
/// Lowers syntactic macro call to a token tree representation.
|
/// Lowers syntactic macro call to a token tree representation. That's a firewall
|
||||||
#[salsa::transparent]
|
/// query, only typing in the macro call itself changes the returned
|
||||||
|
/// subtree.
|
||||||
fn macro_arg(
|
fn macro_arg(
|
||||||
&self,
|
&self,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
) -> ValueResult<
|
) -> ValueResult<Option<Arc<tt::Subtree>>, Arc<Box<[SyntaxError]>>>;
|
||||||
Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
|
|
||||||
Arc<Box<[SyntaxError]>>,
|
|
||||||
>;
|
|
||||||
/// Extracts syntax node, corresponding to a macro call. That's a firewall
|
|
||||||
/// query, only typing in the macro call itself changes the returned
|
|
||||||
/// subtree.
|
|
||||||
fn macro_arg_node(
|
|
||||||
&self,
|
|
||||||
id: MacroCallId,
|
|
||||||
) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>>;
|
|
||||||
/// Fetches the expander for this macro.
|
/// Fetches the expander for this macro.
|
||||||
#[salsa::transparent]
|
#[salsa::transparent]
|
||||||
fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
|
fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
|
||||||
|
@ -152,10 +115,7 @@ pub trait ExpandDatabase: SourceDatabase {
|
||||||
fn include_expand(
|
fn include_expand(
|
||||||
&self,
|
&self,
|
||||||
arg_id: MacroCallId,
|
arg_id: MacroCallId,
|
||||||
) -> Result<
|
) -> Result<(triomphe::Arc<tt::Subtree>, base_db::FileId), ExpandError>;
|
||||||
(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, mbe::TokenMap)>, base_db::FileId),
|
|
||||||
ExpandError,
|
|
||||||
>;
|
|
||||||
/// Special case of the previous query for procedural macros. We can't LRU
|
/// Special case of the previous query for procedural macros. We can't LRU
|
||||||
/// proc macros, since they are not deterministic in general, and
|
/// proc macros, since they are not deterministic in general, and
|
||||||
/// non-determinism breaks salsa in a very, very, very bad way.
|
/// non-determinism breaks salsa in a very, very, very bad way.
|
||||||
|
@ -181,21 +141,19 @@ pub fn expand_speculative(
|
||||||
token_to_map: SyntaxToken,
|
token_to_map: SyntaxToken,
|
||||||
) -> Option<(SyntaxNode, SyntaxToken)> {
|
) -> Option<(SyntaxNode, SyntaxToken)> {
|
||||||
let loc = db.lookup_intern_macro_call(actual_macro_call);
|
let loc = db.lookup_intern_macro_call(actual_macro_call);
|
||||||
let token_range = token_to_map.text_range();
|
let file_id = loc.kind.file_id();
|
||||||
|
|
||||||
// Build the subtree and token mapping for the speculative args
|
// Build the subtree and token mapping for the speculative args
|
||||||
let censor = censor_for_macro_input(&loc, speculative_args);
|
let _censor = censor_for_macro_input(&loc, speculative_args);
|
||||||
let mut fixups = fixup::fixup_syntax(speculative_args);
|
let mut tt = mbe::syntax_node_to_token_tree(
|
||||||
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
|
|
||||||
let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
|
|
||||||
speculative_args,
|
speculative_args,
|
||||||
fixups.token_map,
|
// we don't leak these spans into any query so its fine to make them absolute
|
||||||
fixups.next_id,
|
SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
|
||||||
fixups.replace,
|
TextSize::new(0),
|
||||||
fixups.append,
|
&Default::default(),
|
||||||
);
|
);
|
||||||
|
|
||||||
let (attr_arg, token_id) = match loc.kind {
|
let attr_arg = match loc.kind {
|
||||||
MacroCallKind::Attr { invoc_attr_index, .. } => {
|
MacroCallKind::Attr { invoc_attr_index, .. } => {
|
||||||
let attr = if loc.def.is_attribute_derive() {
|
let attr = if loc.def.is_attribute_derive() {
|
||||||
// for pseudo-derive expansion we actually pass the attribute itself only
|
// for pseudo-derive expansion we actually pass the attribute itself only
|
||||||
|
@ -210,48 +168,27 @@ pub fn expand_speculative(
|
||||||
}?;
|
}?;
|
||||||
match attr.token_tree() {
|
match attr.token_tree() {
|
||||||
Some(token_tree) => {
|
Some(token_tree) => {
|
||||||
let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax());
|
let mut tree = syntax_node_to_token_tree(
|
||||||
tree.delimiter = tt::Delimiter::unspecified();
|
token_tree.syntax(),
|
||||||
|
SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
|
||||||
|
TextSize::new(0),
|
||||||
|
&Default::default(),
|
||||||
|
);
|
||||||
|
tree.delimiter = tt::Delimiter::UNSPECIFIED;
|
||||||
|
|
||||||
let shift = mbe::Shift::new(&tt);
|
Some(tree)
|
||||||
shift.shift_all(&mut tree);
|
|
||||||
|
|
||||||
let token_id = if token_tree.syntax().text_range().contains_range(token_range) {
|
|
||||||
let attr_input_start =
|
|
||||||
token_tree.left_delimiter_token()?.text_range().start();
|
|
||||||
let range = token_range.checked_sub(attr_input_start)?;
|
|
||||||
let token_id = shift.shift(map.token_by_range(range)?);
|
|
||||||
Some(token_id)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
(Some(tree), token_id)
|
|
||||||
}
|
}
|
||||||
_ => (None, None),
|
_ => None,
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => (None, None),
|
|
||||||
};
|
|
||||||
let token_id = match token_id {
|
|
||||||
Some(token_id) => token_id,
|
|
||||||
// token wasn't inside an attribute input so it has to be in the general macro input
|
|
||||||
None => {
|
|
||||||
let range = token_range.checked_sub(speculative_args.text_range().start())?;
|
|
||||||
let token_id = spec_args_tmap.token_by_range(range)?;
|
|
||||||
match loc.def.kind {
|
|
||||||
MacroDefKind::Declarative(it) => {
|
|
||||||
db.decl_macro_expander(loc.krate, it).map_id_down(token_id)
|
|
||||||
}
|
|
||||||
_ => token_id,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
_ => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
|
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
|
||||||
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
|
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
|
||||||
let mut speculative_expansion = match loc.def.kind {
|
let speculative_expansion = match loc.def.kind {
|
||||||
MacroDefKind::ProcMacro(expander, ..) => {
|
MacroDefKind::ProcMacro(expander, ..) => {
|
||||||
tt.delimiter = tt::Delimiter::unspecified();
|
tt.delimiter = tt::Delimiter::UNSPECIFIED;
|
||||||
expander.expand(db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref())
|
expander.expand(db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref())
|
||||||
}
|
}
|
||||||
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
|
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
|
||||||
|
@ -260,7 +197,17 @@ pub fn expand_speculative(
|
||||||
MacroDefKind::BuiltInDerive(expander, ..) => {
|
MacroDefKind::BuiltInDerive(expander, ..) => {
|
||||||
// this cast is a bit sus, can we avoid losing the typedness here?
|
// this cast is a bit sus, can we avoid losing the typedness here?
|
||||||
let adt = ast::Adt::cast(speculative_args.clone()).unwrap();
|
let adt = ast::Adt::cast(speculative_args.clone()).unwrap();
|
||||||
expander.expand(db, actual_macro_call, &adt, &spec_args_tmap)
|
expander.expand(
|
||||||
|
db,
|
||||||
|
actual_macro_call,
|
||||||
|
&adt,
|
||||||
|
&map_from_syntax_node(
|
||||||
|
speculative_args,
|
||||||
|
// we don't leak these spans into any query so its fine to make them absolute
|
||||||
|
SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
|
||||||
|
TextSize::new(0),
|
||||||
|
),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand(tt),
|
MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand(tt),
|
||||||
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
|
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
|
||||||
|
@ -271,12 +218,14 @@ pub fn expand_speculative(
|
||||||
};
|
};
|
||||||
|
|
||||||
let expand_to = macro_expand_to(db, actual_macro_call);
|
let expand_to = macro_expand_to(db, actual_macro_call);
|
||||||
fixup::reverse_fixups(&mut speculative_expansion.value, &spec_args_tmap, &fixups.undo_info);
|
let (node, rev_tmap) = token_tree_to_syntax_node(db, &speculative_expansion.value, expand_to);
|
||||||
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
|
|
||||||
|
|
||||||
let syntax_node = node.syntax_node();
|
let syntax_node = node.syntax_node();
|
||||||
let token = rev_tmap
|
let token = rev_tmap
|
||||||
.ranges_by_token(token_id, token_to_map.kind())
|
.ranges_with_span(tt::SpanData {
|
||||||
|
range: token_to_map.text_range(),
|
||||||
|
anchor: SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
|
||||||
|
})
|
||||||
.filter_map(|range| syntax_node.covering_element(range).into_token())
|
.filter_map(|range| syntax_node.covering_element(range).into_token())
|
||||||
.min_by_key(|t| {
|
.min_by_key(|t| {
|
||||||
// prefer tokens of the same kind and text
|
// prefer tokens of the same kind and text
|
||||||
|
@ -293,7 +242,7 @@ fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
|
||||||
|
|
||||||
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
|
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
|
||||||
match file_id.repr() {
|
match file_id.repr() {
|
||||||
HirFileIdRepr::FileId(file_id) => db.parse(file_id).tree().syntax().clone(),
|
HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(),
|
||||||
HirFileIdRepr::MacroFile(macro_file) => {
|
HirFileIdRepr::MacroFile(macro_file) => {
|
||||||
db.parse_macro_expansion(macro_file).value.0.syntax_node()
|
db.parse_macro_expansion(macro_file).value.0.syntax_node()
|
||||||
}
|
}
|
||||||
|
@ -315,7 +264,7 @@ fn parse_or_expand_with_err(
|
||||||
fn parse_macro_expansion(
|
fn parse_macro_expansion(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
macro_file: MacroFile,
|
macro_file: MacroFile,
|
||||||
) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
|
) -> ExpandResult<(Parse<SyntaxNode>, Arc<SpanMap>)> {
|
||||||
let _p = profile::span("parse_macro_expansion");
|
let _p = profile::span("parse_macro_expansion");
|
||||||
let mbe::ValueResult { value: tt, err } = db.macro_expand(macro_file.macro_call_id);
|
let mbe::ValueResult { value: tt, err } = db.macro_expand(macro_file.macro_call_id);
|
||||||
|
|
||||||
|
@ -324,7 +273,7 @@ fn parse_macro_expansion(
|
||||||
tracing::debug!("expanded = {}", tt.as_debug_string());
|
tracing::debug!("expanded = {}", tt.as_debug_string());
|
||||||
tracing::debug!("kind = {:?}", expand_to);
|
tracing::debug!("kind = {:?}", expand_to);
|
||||||
|
|
||||||
let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to);
|
let (parse, rev_token_map) = token_tree_to_syntax_node(db, &tt, expand_to);
|
||||||
|
|
||||||
ExpandResult { value: (parse, Arc::new(rev_token_map)), err }
|
ExpandResult { value: (parse, Arc::new(rev_token_map)), err }
|
||||||
}
|
}
|
||||||
|
@ -340,48 +289,119 @@ fn parse_macro_expansion_error(
|
||||||
fn macro_arg(
|
fn macro_arg(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
) -> ValueResult<
|
) -> ValueResult<Option<Arc<tt::Subtree>>, Arc<Box<[SyntaxError]>>> {
|
||||||
Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
|
let mismatched_delimiters = |arg: &SyntaxNode| {
|
||||||
Arc<Box<[SyntaxError]>>,
|
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
|
||||||
> {
|
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
|
||||||
let loc = db.lookup_intern_macro_call(id);
|
let well_formed_tt =
|
||||||
|
matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
|
||||||
if let Some(EagerCallInfo { arg, arg_id: _, error: _ }) = loc.eager.as_deref() {
|
if !well_formed_tt {
|
||||||
return ValueResult::ok(Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default()))));
|
// Don't expand malformed (unbalanced) macro invocations. This is
|
||||||
|
// less than ideal, but trying to expand unbalanced macro calls
|
||||||
|
// sometimes produces pathological, deeply nested code which breaks
|
||||||
|
// all kinds of things.
|
||||||
|
//
|
||||||
|
// Some day, we'll have explicit recursion counters for all
|
||||||
|
// recursive things, at which point this code might be removed.
|
||||||
|
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
|
||||||
|
Some(Arc::new(Box::new([SyntaxError::new(
|
||||||
|
"unbalanced token tree".to_owned(),
|
||||||
|
arg.text_range(),
|
||||||
|
)]) as Box<[_]>))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
let ValueResult { value, err } = db.macro_arg_node(id);
|
|
||||||
let Some(arg) = value else {
|
|
||||||
return ValueResult { value: None, err };
|
|
||||||
};
|
};
|
||||||
|
let loc = db.lookup_intern_macro_call(id);
|
||||||
|
if let Some(EagerCallInfo { arg, .. }) = matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
|
||||||
|
.then(|| loc.eager.as_deref())
|
||||||
|
.flatten()
|
||||||
|
{
|
||||||
|
ValueResult::ok(Some(Arc::new(arg.0.clone())))
|
||||||
|
} else {
|
||||||
|
let (parse, map) = match loc.kind.file_id().repr() {
|
||||||
|
HirFileIdRepr::FileId(file_id) => {
|
||||||
|
(db.parse(file_id).to_syntax(), Arc::new(Default::default()))
|
||||||
|
}
|
||||||
|
HirFileIdRepr::MacroFile(macro_file) => {
|
||||||
|
let (parse, map) = db.parse_macro_expansion(macro_file).value;
|
||||||
|
(parse, map)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let root = parse.syntax_node();
|
||||||
|
|
||||||
let node = SyntaxNode::new_root(arg);
|
let (syntax, offset, ast_id) = match loc.kind {
|
||||||
let censor = censor_for_macro_input(&loc, &node);
|
MacroCallKind::FnLike { ast_id, .. } => {
|
||||||
let mut fixups = fixup::fixup_syntax(&node);
|
let node = &ast_id.to_ptr(db).to_node(&root);
|
||||||
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
|
let offset = node.syntax().text_range().start();
|
||||||
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
|
match node.token_tree().map(|it| it.syntax().clone()) {
|
||||||
&node,
|
Some(tt) => {
|
||||||
fixups.token_map,
|
if let Some(e) = mismatched_delimiters(&tt) {
|
||||||
fixups.next_id,
|
return ValueResult::only_err(e);
|
||||||
fixups.replace,
|
}
|
||||||
fixups.append,
|
(tt, offset, ast_id.value.erase())
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
return ValueResult::only_err(Arc::new(Box::new([
|
||||||
|
SyntaxError::new_at_offset("missing token tree".to_owned(), offset),
|
||||||
|
])));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
MacroCallKind::Derive { ast_id, .. } => {
|
||||||
|
let syntax_node = ast_id.to_ptr(db).to_node(&root).syntax().clone();
|
||||||
|
let offset = syntax_node.text_range().start();
|
||||||
|
(syntax_node, offset, ast_id.value.erase())
|
||||||
|
}
|
||||||
|
MacroCallKind::Attr { ast_id, .. } => {
|
||||||
|
let syntax_node = ast_id.to_ptr(db).to_node(&root).syntax().clone();
|
||||||
|
let offset = syntax_node.text_range().start();
|
||||||
|
(syntax_node, offset, ast_id.value.erase())
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let censor = censor_for_macro_input(&loc, &syntax);
|
||||||
|
// let mut fixups = fixup::fixup_syntax(&node);
|
||||||
|
// fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
|
||||||
|
// let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
|
||||||
|
// &node,
|
||||||
|
// fixups.token_map,
|
||||||
|
// fixups.next_id,
|
||||||
|
// fixups.replace,
|
||||||
|
// fixups.append,
|
||||||
|
// );
|
||||||
|
let mut tt = mbe::syntax_node_to_token_tree_censored(
|
||||||
|
&syntax,
|
||||||
|
SpanAnchor { file_id: loc.kind.file_id(), ast_id },
|
||||||
|
offset,
|
||||||
|
&map,
|
||||||
|
censor,
|
||||||
);
|
);
|
||||||
|
|
||||||
if loc.def.is_proc_macro() {
|
if loc.def.is_proc_macro() {
|
||||||
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
||||||
tt.delimiter = tt::Delimiter::unspecified();
|
tt.delimiter = tt::Delimiter::UNSPECIFIED;
|
||||||
|
}
|
||||||
|
|
||||||
|
if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) {
|
||||||
|
match parse.errors() {
|
||||||
|
[] => ValueResult::ok(Some(Arc::new(tt))),
|
||||||
|
errors => ValueResult::new(
|
||||||
|
Some(Arc::new(tt)),
|
||||||
|
// Box::<[_]>::from(res.errors()), not stable yet
|
||||||
|
Arc::new(errors.to_vec().into_boxed_slice()),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
ValueResult::ok(Some(Arc::new(tt)))
|
||||||
}
|
}
|
||||||
let val = Some(Arc::new((tt, tmap, fixups.undo_info)));
|
|
||||||
match err {
|
|
||||||
Some(err) => ValueResult::new(val, err),
|
|
||||||
None => ValueResult::ok(val),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME: Censoring info should be calculated by the caller! Namely by name resolution
|
||||||
/// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
|
/// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
|
||||||
/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
|
/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
|
||||||
/// - attributes expect the invoking attribute to be stripped
|
/// - attributes expect the invoking attribute to be stripped
|
||||||
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
|
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> Vec<SyntaxNode> {
|
||||||
// FIXME: handle `cfg_attr`
|
// FIXME: handle `cfg_attr`
|
||||||
(|| {
|
(|| {
|
||||||
let censor = match loc.kind {
|
let censor = match loc.kind {
|
||||||
|
@ -417,103 +437,56 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn macro_arg_node(
|
|
||||||
db: &dyn ExpandDatabase,
|
|
||||||
id: MacroCallId,
|
|
||||||
) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>> {
|
|
||||||
let err = || -> Arc<Box<[_]>> {
|
|
||||||
Arc::new(Box::new([SyntaxError::new_at_offset(
|
|
||||||
"invalid macro call".to_owned(),
|
|
||||||
syntax::TextSize::from(0),
|
|
||||||
)]))
|
|
||||||
};
|
|
||||||
let loc = db.lookup_intern_macro_call(id);
|
|
||||||
let arg = if let MacroDefKind::BuiltInEager(..) = loc.def.kind {
|
|
||||||
let res = if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
|
|
||||||
Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::MacroEagerInput).0)
|
|
||||||
} else {
|
|
||||||
loc.kind
|
|
||||||
.arg(db)
|
|
||||||
.and_then(|arg| ast::TokenTree::cast(arg.value))
|
|
||||||
.map(|tt| tt.reparse_as_comma_separated_expr().to_syntax())
|
|
||||||
};
|
|
||||||
match res {
|
|
||||||
Some(res) if res.errors().is_empty() => res.syntax_node(),
|
|
||||||
Some(res) => {
|
|
||||||
return ValueResult::new(
|
|
||||||
Some(res.syntax_node().green().into()),
|
|
||||||
// Box::<[_]>::from(res.errors()), not stable yet
|
|
||||||
Arc::new(res.errors().to_vec().into_boxed_slice()),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
None => return ValueResult::only_err(err()),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
match loc.kind.arg(db) {
|
|
||||||
Some(res) => res.value,
|
|
||||||
None => return ValueResult::only_err(err()),
|
|
||||||
}
|
|
||||||
};
|
|
||||||
if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
|
|
||||||
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
|
|
||||||
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
|
|
||||||
let well_formed_tt =
|
|
||||||
matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
|
|
||||||
if !well_formed_tt {
|
|
||||||
// Don't expand malformed (unbalanced) macro invocations. This is
|
|
||||||
// less than ideal, but trying to expand unbalanced macro calls
|
|
||||||
// sometimes produces pathological, deeply nested code which breaks
|
|
||||||
// all kinds of things.
|
|
||||||
//
|
|
||||||
// Some day, we'll have explicit recursion counters for all
|
|
||||||
// recursive things, at which point this code might be removed.
|
|
||||||
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
|
|
||||||
return ValueResult::only_err(Arc::new(Box::new([SyntaxError::new(
|
|
||||||
"unbalanced token tree".to_owned(),
|
|
||||||
arg.text_range(),
|
|
||||||
)])));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ValueResult::ok(Some(arg.green().into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn decl_macro_expander(
|
fn decl_macro_expander(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
def_crate: CrateId,
|
def_crate: CrateId,
|
||||||
id: AstId<ast::Macro>,
|
id: AstId<ast::Macro>,
|
||||||
) -> Arc<DeclarativeMacroExpander> {
|
) -> Arc<DeclarativeMacroExpander> {
|
||||||
let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021;
|
let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021;
|
||||||
let (mac, def_site_token_map) = match id.to_node(db) {
|
let (root, map) = match id.file_id.repr() {
|
||||||
|
HirFileIdRepr::FileId(file_id) => {
|
||||||
|
(db.parse(file_id).syntax_node(), Arc::new(Default::default()))
|
||||||
|
}
|
||||||
|
HirFileIdRepr::MacroFile(macro_file) => {
|
||||||
|
let (parse, map) = db.parse_macro_expansion(macro_file).value;
|
||||||
|
(parse.syntax_node(), map)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let mac = match id.to_ptr(db).to_node(&root) {
|
||||||
ast::Macro::MacroRules(macro_rules) => match macro_rules.token_tree() {
|
ast::Macro::MacroRules(macro_rules) => match macro_rules.token_tree() {
|
||||||
Some(arg) => {
|
Some(arg) => {
|
||||||
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
|
let tt = mbe::syntax_node_to_token_tree(
|
||||||
|
arg.syntax(),
|
||||||
|
SpanAnchor { file_id: id.file_id, ast_id: id.value.erase() },
|
||||||
|
macro_rules.syntax().text_range().start(),
|
||||||
|
&map,
|
||||||
|
);
|
||||||
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
|
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
|
||||||
(mac, def_site_token_map)
|
mac
|
||||||
}
|
}
|
||||||
None => (
|
None => mbe::DeclarativeMacro::from_err(
|
||||||
mbe::DeclarativeMacro::from_err(
|
|
||||||
mbe::ParseError::Expected("expected a token tree".into()),
|
mbe::ParseError::Expected("expected a token tree".into()),
|
||||||
is_2021,
|
is_2021,
|
||||||
),
|
),
|
||||||
Default::default(),
|
|
||||||
),
|
|
||||||
},
|
},
|
||||||
ast::Macro::MacroDef(macro_def) => match macro_def.body() {
|
ast::Macro::MacroDef(macro_def) => match macro_def.body() {
|
||||||
Some(arg) => {
|
Some(arg) => {
|
||||||
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
|
let tt = mbe::syntax_node_to_token_tree(
|
||||||
|
arg.syntax(),
|
||||||
|
SpanAnchor { file_id: id.file_id, ast_id: id.value.erase() },
|
||||||
|
macro_def.syntax().text_range().start(),
|
||||||
|
&map,
|
||||||
|
);
|
||||||
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
|
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
|
||||||
(mac, def_site_token_map)
|
mac
|
||||||
}
|
}
|
||||||
None => (
|
None => mbe::DeclarativeMacro::from_err(
|
||||||
mbe::DeclarativeMacro::from_err(
|
|
||||||
mbe::ParseError::Expected("expected a token tree".into()),
|
mbe::ParseError::Expected("expected a token tree".into()),
|
||||||
is_2021,
|
is_2021,
|
||||||
),
|
),
|
||||||
Default::default(),
|
|
||||||
),
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
Arc::new(DeclarativeMacroExpander { mac, def_site_token_map })
|
Arc::new(DeclarativeMacroExpander { mac })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
|
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
|
||||||
|
@ -536,25 +509,37 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
|
||||||
let ExpandResult { value: tt, mut err } = match loc.def.kind {
|
let ExpandResult { value: tt, mut err } = match loc.def.kind {
|
||||||
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(id),
|
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(id),
|
||||||
MacroDefKind::BuiltInDerive(expander, ..) => {
|
MacroDefKind::BuiltInDerive(expander, ..) => {
|
||||||
let arg = db.macro_arg_node(id).value.unwrap();
|
// FIXME: add firewall query for this?
|
||||||
|
let hir_file_id = loc.kind.file_id();
|
||||||
|
let (root, map) = match hir_file_id.repr() {
|
||||||
|
HirFileIdRepr::FileId(file_id) => (db.parse(file_id).syntax_node(), None),
|
||||||
|
HirFileIdRepr::MacroFile(macro_file) => {
|
||||||
|
let (parse, map) = db.parse_macro_expansion(macro_file).value;
|
||||||
|
(parse.syntax_node(), Some(map))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let MacroCallKind::Derive { ast_id, .. } = loc.kind else { unreachable!() };
|
||||||
|
let node = ast_id.to_ptr(db).to_node(&root);
|
||||||
|
|
||||||
let node = SyntaxNode::new_root(arg);
|
// FIXME: we might need to remove the spans from the input to the derive macro here
|
||||||
let censor = censor_for_macro_input(&loc, &node);
|
let _censor = censor_for_macro_input(&loc, node.syntax());
|
||||||
let mut fixups = fixup::fixup_syntax(&node);
|
let _t;
|
||||||
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
|
expander.expand(
|
||||||
let (tmap, _) = mbe::syntax_node_to_token_map_with_modifications(
|
db,
|
||||||
|
id,
|
||||||
&node,
|
&node,
|
||||||
fixups.token_map,
|
match &map {
|
||||||
fixups.next_id,
|
Some(map) => map,
|
||||||
fixups.replace,
|
None => {
|
||||||
fixups.append,
|
_t = map_from_syntax_node(
|
||||||
|
node.syntax(),
|
||||||
|
SpanAnchor { file_id: hir_file_id, ast_id: ast_id.value.erase() },
|
||||||
|
node.syntax().text_range().start(),
|
||||||
);
|
);
|
||||||
|
&_t
|
||||||
// this cast is a bit sus, can we avoid losing the typedness here?
|
}
|
||||||
let adt = ast::Adt::cast(node).unwrap();
|
},
|
||||||
let mut res = expander.expand(db, id, &adt, &tmap);
|
)
|
||||||
fixup::reverse_fixups(&mut res.value, &tmap, &fixups.undo_info);
|
|
||||||
res
|
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
let ValueResult { value, err } = db.macro_arg(id);
|
let ValueResult { value, err } = db.macro_arg(id);
|
||||||
|
@ -570,8 +555,8 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
let (arg, arg_tm, undo_info) = &*macro_arg;
|
let arg = &*macro_arg;
|
||||||
let mut res = match loc.def.kind {
|
match loc.def.kind {
|
||||||
MacroDefKind::Declarative(id) => {
|
MacroDefKind::Declarative(id) => {
|
||||||
db.decl_macro_expander(loc.def.krate, id).expand(arg.clone())
|
db.decl_macro_expander(loc.def.krate, id).expand(arg.clone())
|
||||||
}
|
}
|
||||||
|
@ -583,11 +568,8 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
|
||||||
// will end up going through here again, whereas we want to just want to inspect the raw input.
|
// will end up going through here again, whereas we want to just want to inspect the raw input.
|
||||||
// As such we just return the input subtree here.
|
// As such we just return the input subtree here.
|
||||||
MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => {
|
MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => {
|
||||||
let mut arg = arg.clone();
|
|
||||||
fixup::reverse_fixups(&mut arg, arg_tm, undo_info);
|
|
||||||
|
|
||||||
return ExpandResult {
|
return ExpandResult {
|
||||||
value: Arc::new(arg),
|
value: Arc::new(arg.clone()),
|
||||||
err: err.map(|err| {
|
err: err.map(|err| {
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
for err in &**err {
|
for err in &**err {
|
||||||
|
@ -603,9 +585,7 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
|
||||||
MacroDefKind::BuiltInEager(it, _) => it.expand(db, id, &arg).map_err(Into::into),
|
MacroDefKind::BuiltInEager(it, _) => it.expand(db, id, &arg).map_err(Into::into),
|
||||||
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg),
|
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg),
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
}
|
||||||
fixup::reverse_fixups(&mut res.value, arg_tm, undo_info);
|
|
||||||
res
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -626,6 +606,7 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
|
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
|
||||||
|
// FIXME: Syntax fix ups
|
||||||
let loc = db.lookup_intern_macro_call(id);
|
let loc = db.lookup_intern_macro_call(id);
|
||||||
let Some(macro_arg) = db.macro_arg(id).value else {
|
let Some(macro_arg) = db.macro_arg(id).value else {
|
||||||
return ExpandResult {
|
return ExpandResult {
|
||||||
|
@ -639,32 +620,24 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
let (arg_tt, arg_tm, undo_info) = &*macro_arg;
|
|
||||||
|
|
||||||
let expander = match loc.def.kind {
|
let expander = match loc.def.kind {
|
||||||
MacroDefKind::ProcMacro(expander, ..) => expander,
|
MacroDefKind::ProcMacro(expander, ..) => expander,
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let attr_arg = match &loc.kind {
|
let attr_arg = match &loc.kind {
|
||||||
MacroCallKind::Attr { attr_args, .. } => {
|
MacroCallKind::Attr { attr_args, .. } => Some(&**attr_args),
|
||||||
let mut attr_args = attr_args.0.clone();
|
|
||||||
mbe::Shift::new(arg_tt).shift_all(&mut attr_args);
|
|
||||||
Some(attr_args)
|
|
||||||
}
|
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let ExpandResult { value: mut tt, err } =
|
let ExpandResult { value: tt, err } =
|
||||||
expander.expand(db, loc.def.krate, loc.krate, arg_tt, attr_arg.as_ref());
|
expander.expand(db, loc.def.krate, loc.krate, ¯o_arg, attr_arg);
|
||||||
|
|
||||||
// Set a hard limit for the expanded tt
|
// Set a hard limit for the expanded tt
|
||||||
if let Err(value) = check_tt_count(&tt) {
|
if let Err(value) = check_tt_count(&tt) {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
fixup::reverse_fixups(&mut tt, arg_tm, undo_info);
|
|
||||||
|
|
||||||
ExpandResult { value: Arc::new(tt), err }
|
ExpandResult { value: Arc::new(tt), err }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -677,9 +650,10 @@ fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn token_tree_to_syntax_node(
|
fn token_tree_to_syntax_node(
|
||||||
|
db: &dyn ExpandDatabase,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::Subtree,
|
||||||
expand_to: ExpandTo,
|
expand_to: ExpandTo,
|
||||||
) -> (Parse<SyntaxNode>, mbe::TokenMap) {
|
) -> (Parse<SyntaxNode>, SpanMap) {
|
||||||
let entry_point = match expand_to {
|
let entry_point = match expand_to {
|
||||||
ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
|
ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
|
||||||
ExpandTo::Items => mbe::TopEntryPoint::MacroItems,
|
ExpandTo::Items => mbe::TopEntryPoint::MacroItems,
|
||||||
|
@ -687,7 +661,18 @@ fn token_tree_to_syntax_node(
|
||||||
ExpandTo::Type => mbe::TopEntryPoint::Type,
|
ExpandTo::Type => mbe::TopEntryPoint::Type,
|
||||||
ExpandTo::Expr => mbe::TopEntryPoint::Expr,
|
ExpandTo::Expr => mbe::TopEntryPoint::Expr,
|
||||||
};
|
};
|
||||||
mbe::token_tree_to_syntax_node(tt, entry_point)
|
let mut tm = mbe::token_tree_to_syntax_node(tt, entry_point);
|
||||||
|
// now what the hell is going on here
|
||||||
|
tm.1.span_map.sort_by(|(_, a), (_, b)| {
|
||||||
|
a.anchor.file_id.cmp(&b.anchor.file_id).then_with(|| {
|
||||||
|
let map = db.ast_id_map(a.anchor.file_id);
|
||||||
|
map.get_raw(a.anchor.ast_id)
|
||||||
|
.text_range()
|
||||||
|
.start()
|
||||||
|
.cmp(&map.get_raw(b.anchor.ast_id).text_range().start())
|
||||||
|
})
|
||||||
|
});
|
||||||
|
tm
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>> {
|
fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>> {
|
||||||
|
|
|
@ -18,8 +18,11 @@
|
||||||
//!
|
//!
|
||||||
//!
|
//!
|
||||||
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
|
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
|
||||||
use base_db::CrateId;
|
use base_db::{
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID},
|
||||||
|
CrateId,
|
||||||
|
};
|
||||||
|
use rustc_hash::FxHashMap;
|
||||||
use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent};
|
use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
|
@ -29,7 +32,7 @@ use crate::{
|
||||||
hygiene::Hygiene,
|
hygiene::Hygiene,
|
||||||
mod_path::ModPath,
|
mod_path::ModPath,
|
||||||
EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind,
|
EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind,
|
||||||
MacroCallLoc, MacroDefId, MacroDefKind,
|
MacroCallLoc, MacroDefId, MacroDefKind, SpanMap,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn expand_eager_macro_input(
|
pub fn expand_eager_macro_input(
|
||||||
|
@ -54,15 +57,15 @@ pub fn expand_eager_macro_input(
|
||||||
eager: None,
|
eager: None,
|
||||||
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
|
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
|
||||||
});
|
});
|
||||||
let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
|
let ExpandResult { value: (arg_exp, _arg_exp_map), err: parse_err } =
|
||||||
db.parse_macro_expansion(arg_id.as_macro_file());
|
db.parse_macro_expansion(arg_id.as_macro_file());
|
||||||
// we need this map here as the expansion of the eager input fake file loses whitespace ...
|
// we need this map here as the expansion of the eager input fake file loses whitespace ...
|
||||||
let mut ws_mapping = FxHashMap::default();
|
// let mut ws_mapping = FxHashMap::default();
|
||||||
if let Some((_, tm, _)) = db.macro_arg(arg_id).value.as_deref() {
|
// if let Some((tm)) = db.macro_arg(arg_id).value.as_deref() {
|
||||||
ws_mapping.extend(tm.entries().filter_map(|(id, range)| {
|
// ws_mapping.extend(tm.entries().filter_map(|(id, range)| {
|
||||||
Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range))
|
// Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range))
|
||||||
}));
|
// }));
|
||||||
}
|
// }
|
||||||
|
|
||||||
let ExpandResult { value: expanded_eager_input, err } = {
|
let ExpandResult { value: expanded_eager_input, err } = {
|
||||||
eager_macro_recur(
|
eager_macro_recur(
|
||||||
|
@ -75,49 +78,55 @@ pub fn expand_eager_macro_input(
|
||||||
};
|
};
|
||||||
let err = parse_err.or(err);
|
let err = parse_err.or(err);
|
||||||
|
|
||||||
let Some((expanded_eager_input, mapping)) = expanded_eager_input else {
|
let Some((expanded_eager_input, _mapping)) = expanded_eager_input else {
|
||||||
return ExpandResult { value: None, err };
|
return ExpandResult { value: None, err };
|
||||||
};
|
};
|
||||||
|
|
||||||
let (mut subtree, expanded_eager_input_token_map) =
|
let mut subtree = mbe::syntax_node_to_token_tree(
|
||||||
mbe::syntax_node_to_token_tree(&expanded_eager_input);
|
&expanded_eager_input,
|
||||||
|
// is this right?
|
||||||
|
SpanAnchor { file_id: arg_id.as_file(), ast_id: ROOT_ERASED_FILE_AST_ID },
|
||||||
|
TextSize::new(0),
|
||||||
|
// FIXME: Spans! `eager_macro_recur` needs to fill out a span map for us
|
||||||
|
&Default::default(),
|
||||||
|
);
|
||||||
|
|
||||||
let og_tmap = if let Some(tt) = macro_call.value.token_tree() {
|
// let og_tmap = if let Some(tt) = macro_call.value.token_tree() {
|
||||||
let mut ids_used = FxHashSet::default();
|
// let mut ids_used = FxHashSet::default();
|
||||||
let mut og_tmap = mbe::syntax_node_to_token_map(tt.syntax());
|
// let mut og_tmap = mbe::syntax_node_to_token_map(tt.syntax());
|
||||||
// The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside
|
// // The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside
|
||||||
// so we need to remap them to the original input of the eager macro.
|
// // so we need to remap them to the original input of the eager macro.
|
||||||
subtree.visit_ids(&mut |id| {
|
// subtree.visit_ids(&mut |id| {
|
||||||
// Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix
|
// // Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix
|
||||||
|
|
||||||
if let Some(range) = expanded_eager_input_token_map
|
// if let Some(range) = expanded_eager_input_token_map
|
||||||
.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)
|
// .first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)
|
||||||
{
|
// {
|
||||||
// remap from expanded eager input to eager input expansion
|
// // remap from expanded eager input to eager input expansion
|
||||||
if let Some(og_range) = mapping.get(&range) {
|
// if let Some(og_range) = mapping.get(&range) {
|
||||||
// remap from eager input expansion to original eager input
|
// // remap from eager input expansion to original eager input
|
||||||
if let Some(&og_range) = ws_mapping.get(og_range) {
|
// if let Some(&og_range) = ws_mapping.get(og_range) {
|
||||||
if let Some(og_token) = og_tmap.token_by_range(og_range) {
|
// if let Some(og_token) = og_tmap.token_by_range(og_range) {
|
||||||
ids_used.insert(og_token);
|
// ids_used.insert(og_token);
|
||||||
return og_token;
|
// return og_token;
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
tt::TokenId::UNSPECIFIED
|
// tt::TokenId::UNSPECIFIED
|
||||||
});
|
// });
|
||||||
og_tmap.filter(|id| ids_used.contains(&id));
|
// og_tmap.filter(|id| ids_used.contains(&id));
|
||||||
og_tmap
|
// og_tmap
|
||||||
} else {
|
// } else {
|
||||||
Default::default()
|
// Default::default()
|
||||||
};
|
// };
|
||||||
subtree.delimiter = crate::tt::Delimiter::unspecified();
|
subtree.delimiter = crate::tt::Delimiter::UNSPECIFIED;
|
||||||
|
|
||||||
let loc = MacroCallLoc {
|
let loc = MacroCallLoc {
|
||||||
def,
|
def,
|
||||||
krate,
|
krate,
|
||||||
eager: Some(Box::new(EagerCallInfo {
|
eager: Some(Box::new(EagerCallInfo {
|
||||||
arg: Arc::new((subtree, og_tmap)),
|
arg: Arc::new((subtree,)),
|
||||||
arg_id,
|
arg_id,
|
||||||
error: err.clone(),
|
error: err.clone(),
|
||||||
})),
|
})),
|
||||||
|
@ -132,7 +141,7 @@ fn lazy_expand(
|
||||||
def: &MacroDefId,
|
def: &MacroDefId,
|
||||||
macro_call: InFile<ast::MacroCall>,
|
macro_call: InFile<ast::MacroCall>,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<mbe::TokenMap>)> {
|
) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<SpanMap>)> {
|
||||||
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value);
|
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value);
|
||||||
|
|
||||||
let expand_to = ExpandTo::from_call_site(¯o_call.value);
|
let expand_to = ExpandTo::from_call_site(¯o_call.value);
|
||||||
|
@ -214,19 +223,19 @@ fn eager_macro_recur(
|
||||||
let ExpandResult { value, err: err2 } =
|
let ExpandResult { value, err: err2 } =
|
||||||
db.parse_macro_expansion(call_id.as_macro_file());
|
db.parse_macro_expansion(call_id.as_macro_file());
|
||||||
|
|
||||||
if let Some(tt) = call.token_tree() {
|
// if let Some(tt) = call.token_tree() {
|
||||||
let call_tt_start = tt.syntax().text_range().start();
|
// let call_tt_start = tt.syntax().text_range().start();
|
||||||
let call_start =
|
// let call_start =
|
||||||
apply_offset(call.syntax().text_range().start(), offset);
|
// apply_offset(call.syntax().text_range().start(), offset);
|
||||||
if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() {
|
// if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() {
|
||||||
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
|
// mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
|
||||||
value
|
// value
|
||||||
.1
|
// .1
|
||||||
.first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE)
|
// .first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE)
|
||||||
.map(|r| (r + call_start, range + call_tt_start))
|
// .map(|r| (r + call_start, range + call_tt_start))
|
||||||
}));
|
// }));
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
ExpandResult {
|
ExpandResult {
|
||||||
value: Some(value.0.syntax_node().clone_for_update()),
|
value: Some(value.0.syntax_node().clone_for_update()),
|
||||||
|
@ -241,13 +250,8 @@ fn eager_macro_recur(
|
||||||
| MacroDefKind::BuiltInAttr(..)
|
| MacroDefKind::BuiltInAttr(..)
|
||||||
| MacroDefKind::BuiltInDerive(..)
|
| MacroDefKind::BuiltInDerive(..)
|
||||||
| MacroDefKind::ProcMacro(..) => {
|
| MacroDefKind::ProcMacro(..) => {
|
||||||
let ExpandResult { value: (parse, tm), err } =
|
let ExpandResult { value: (parse, _tm), err } =
|
||||||
lazy_expand(db, &def, curr.with_value(call.clone()), krate);
|
lazy_expand(db, &def, curr.with_value(call.clone()), krate);
|
||||||
let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind {
|
|
||||||
Some(db.decl_macro_expander(def.krate, ast_id))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
// replace macro inside
|
// replace macro inside
|
||||||
let hygiene = Hygiene::new(db, parse.file_id);
|
let hygiene = Hygiene::new(db, parse.file_id);
|
||||||
|
@ -261,24 +265,29 @@ fn eager_macro_recur(
|
||||||
);
|
);
|
||||||
let err = err.or(error);
|
let err = err.or(error);
|
||||||
|
|
||||||
if let Some(tt) = call.token_tree() {
|
// if let Some(tt) = call.token_tree() {
|
||||||
let call_tt_start = tt.syntax().text_range().start();
|
// let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind {
|
||||||
let call_start = apply_offset(call.syntax().text_range().start(), offset);
|
// Some(db.decl_macro_expander(def.krate, ast_id))
|
||||||
if let Some((_tt, arg_map, _)) = parse
|
// } else {
|
||||||
.file_id
|
// None
|
||||||
.macro_file()
|
// };
|
||||||
.and_then(|id| db.macro_arg(id.macro_call_id).value)
|
// let call_tt_start = tt.syntax().text_range().start();
|
||||||
.as_deref()
|
// let call_start = apply_offset(call.syntax().text_range().start(), offset);
|
||||||
{
|
// if let Some((_tt, arg_map, _)) = parse
|
||||||
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
|
// .file_id
|
||||||
tm.first_range_by_token(
|
// .macro_file()
|
||||||
decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid),
|
// .and_then(|id| db.macro_arg(id.macro_call_id).value)
|
||||||
syntax::SyntaxKind::TOMBSTONE,
|
// .as_deref()
|
||||||
)
|
// {
|
||||||
.map(|r| (r + call_start, range + call_tt_start))
|
// mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
|
||||||
}));
|
// tm.first_range_by_token(
|
||||||
}
|
// decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid),
|
||||||
}
|
// syntax::SyntaxKind::TOMBSTONE,
|
||||||
|
// )
|
||||||
|
// .map(|r| (r + call_start, range + call_tt_start))
|
||||||
|
// }));
|
||||||
|
// }
|
||||||
|
// }
|
||||||
// FIXME: Do we need to re-use _m here?
|
// FIXME: Do we need to re-use _m here?
|
||||||
ExpandResult { value: value.map(|(n, _m)| n), err }
|
ExpandResult { value: value.map(|(n, _m)| n), err }
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,18 +5,16 @@
|
||||||
use base_db::CrateId;
|
use base_db::CrateId;
|
||||||
use db::TokenExpander;
|
use db::TokenExpander;
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use mbe::Origin;
|
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, HasDocComments},
|
ast::{self, HasDocComments},
|
||||||
AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize,
|
AstNode, SyntaxNode, TextRange, TextSize,
|
||||||
};
|
};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::{self, ExpandDatabase},
|
db::{self, ExpandDatabase},
|
||||||
fixup,
|
|
||||||
name::{AsName, Name},
|
name::{AsName, Name},
|
||||||
HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile,
|
HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile, SpanMap,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
|
@ -50,23 +48,25 @@ impl Hygiene {
|
||||||
Either::Left(name_ref.as_name())
|
Either::Left(name_ref.as_name())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn local_inner_macros(&self, db: &dyn ExpandDatabase, path: ast::Path) -> Option<CrateId> {
|
pub fn local_inner_macros(&self, _db: &dyn ExpandDatabase, path: ast::Path) -> Option<CrateId> {
|
||||||
let mut token = path.syntax().first_token()?.text_range();
|
let mut _token = path.syntax().first_token()?.text_range();
|
||||||
let frames = self.frames.as_ref()?;
|
let frames = self.frames.as_ref()?;
|
||||||
let mut current = &frames.0;
|
let mut _current = &frames.0;
|
||||||
|
|
||||||
loop {
|
// FIXME: Hygiene ...
|
||||||
let (mapped, origin) = current.expansion.as_ref()?.map_ident_up(db, token)?;
|
return None;
|
||||||
if origin == Origin::Def {
|
// loop {
|
||||||
return if current.local_inner {
|
// let (mapped, origin) = current.expansion.as_ref()?.map_ident_up(db, token)?;
|
||||||
frames.root_crate(db, path.syntax())
|
// if origin == Origin::Def {
|
||||||
} else {
|
// return if current.local_inner {
|
||||||
None
|
// frames.root_crate(db, path.syntax())
|
||||||
};
|
// } else {
|
||||||
}
|
// None
|
||||||
current = current.call_site.as_ref()?;
|
// };
|
||||||
token = mapped.value;
|
// }
|
||||||
}
|
// current = current.call_site.as_ref()?;
|
||||||
|
// token = mapped.value;
|
||||||
|
// }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -92,31 +92,33 @@ impl HygieneFrames {
|
||||||
HygieneFrames(Arc::new(HygieneFrame::new(db, file_id)))
|
HygieneFrames(Arc::new(HygieneFrame::new(db, file_id)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn root_crate(&self, db: &dyn ExpandDatabase, node: &SyntaxNode) -> Option<CrateId> {
|
fn root_crate(&self, _db: &dyn ExpandDatabase, node: &SyntaxNode) -> Option<CrateId> {
|
||||||
let mut token = node.first_token()?.text_range();
|
let mut _token = node.first_token()?.text_range();
|
||||||
let mut result = self.0.krate;
|
let mut _result = self.0.krate;
|
||||||
let mut current = self.0.clone();
|
let mut _current = self.0.clone();
|
||||||
|
|
||||||
while let Some((mapped, origin)) =
|
return None;
|
||||||
current.expansion.as_ref().and_then(|it| it.map_ident_up(db, token))
|
|
||||||
{
|
|
||||||
result = current.krate;
|
|
||||||
|
|
||||||
let site = match origin {
|
// while let Some((mapped, origin)) =
|
||||||
Origin::Def => ¤t.def_site,
|
// current.expansion.as_ref().and_then(|it| it.map_ident_up(db, token))
|
||||||
Origin::Call => ¤t.call_site,
|
// {
|
||||||
};
|
// result = current.krate;
|
||||||
|
|
||||||
let site = match site {
|
// let site = match origin {
|
||||||
None => break,
|
// Origin::Def => ¤t.def_site,
|
||||||
Some(it) => it,
|
// Origin::Call => ¤t.call_site,
|
||||||
};
|
// };
|
||||||
|
|
||||||
current = site.clone();
|
// let site = match site {
|
||||||
token = mapped.value;
|
// None => break,
|
||||||
}
|
// Some(it) => it,
|
||||||
|
// };
|
||||||
|
|
||||||
result
|
// current = site.clone();
|
||||||
|
// token = mapped.value;
|
||||||
|
// }
|
||||||
|
|
||||||
|
// result
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -127,45 +129,18 @@ struct HygieneInfo {
|
||||||
attr_input_or_mac_def_start: Option<InFile<TextSize>>,
|
attr_input_or_mac_def_start: Option<InFile<TextSize>>,
|
||||||
|
|
||||||
macro_def: TokenExpander,
|
macro_def: TokenExpander,
|
||||||
macro_arg: Arc<(crate::tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
|
macro_arg: Arc<crate::tt::Subtree>,
|
||||||
macro_arg_shift: mbe::Shift,
|
exp_map: Arc<SpanMap>,
|
||||||
exp_map: Arc<mbe::TokenMap>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HygieneInfo {
|
impl HygieneInfo {
|
||||||
fn map_ident_up(
|
fn _map_ident_up(
|
||||||
&self,
|
&self,
|
||||||
db: &dyn ExpandDatabase,
|
_db: &dyn ExpandDatabase,
|
||||||
token: TextRange,
|
_token: TextRange,
|
||||||
) -> Option<(InFile<TextRange>, Origin)> {
|
) -> Option<InFile<TextRange>> {
|
||||||
let token_id = self.exp_map.token_by_range(token)?;
|
// self.exp_map.token_by_range(token).map(|span| InFile::new(span.anchor, span.range))
|
||||||
let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
|
None
|
||||||
|
|
||||||
let loc = db.lookup_intern_macro_call(self.file.macro_call_id);
|
|
||||||
|
|
||||||
let (token_map, tt) = match &loc.kind {
|
|
||||||
MacroCallKind::Attr { attr_args, .. } => match self.macro_arg_shift.unshift(token_id) {
|
|
||||||
Some(unshifted) => {
|
|
||||||
token_id = unshifted;
|
|
||||||
(&attr_args.1, self.attr_input_or_mac_def_start?)
|
|
||||||
}
|
|
||||||
None => (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start())),
|
|
||||||
},
|
|
||||||
_ => match origin {
|
|
||||||
mbe::Origin::Call => {
|
|
||||||
(&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start()))
|
|
||||||
}
|
|
||||||
mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def_start) {
|
|
||||||
(TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
|
|
||||||
(&expander.def_site_token_map, *tt)
|
|
||||||
}
|
|
||||||
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
let range = token_map.first_range_by_token(token_id, SyntaxKind::IDENT)?;
|
|
||||||
Some((tt.with_value(range + tt.value), origin))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -197,18 +172,13 @@ fn make_hygiene_info(
|
||||||
let macro_def = db.macro_expander(loc.def);
|
let macro_def = db.macro_expander(loc.def);
|
||||||
let (_, exp_map) = db.parse_macro_expansion(macro_file).value;
|
let (_, exp_map) = db.parse_macro_expansion(macro_file).value;
|
||||||
let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
|
let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
|
||||||
Arc::new((
|
Arc::new(tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() })
|
||||||
tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
|
|
||||||
Default::default(),
|
|
||||||
Default::default(),
|
|
||||||
))
|
|
||||||
});
|
});
|
||||||
|
|
||||||
HygieneInfo {
|
HygieneInfo {
|
||||||
file: macro_file,
|
file: macro_file,
|
||||||
attr_input_or_mac_def_start: attr_input_or_mac_def
|
attr_input_or_mac_def_start: attr_input_or_mac_def
|
||||||
.map(|it| it.map(|tt| tt.syntax().text_range().start())),
|
.map(|it| it.map(|tt| tt.syntax().text_range().start())),
|
||||||
macro_arg_shift: mbe::Shift::new(¯o_arg.0),
|
|
||||||
macro_arg,
|
macro_arg,
|
||||||
macro_def,
|
macro_def,
|
||||||
exp_map,
|
exp_map,
|
||||||
|
|
|
@ -18,21 +18,13 @@ pub mod quote;
|
||||||
pub mod eager;
|
pub mod eager;
|
||||||
pub mod mod_path;
|
pub mod mod_path;
|
||||||
pub mod attrs;
|
pub mod attrs;
|
||||||
mod fixup;
|
// mod fixup;
|
||||||
|
|
||||||
use mbe::TokenMap;
|
|
||||||
pub use mbe::{Origin, ValueResult};
|
|
||||||
|
|
||||||
use ::tt::token_id as tt;
|
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use std::{fmt, hash::Hash, iter};
|
use std::{fmt, hash::Hash, iter};
|
||||||
|
|
||||||
use base_db::{
|
use base_db::{span::HirFileIdRepr, CrateId, FileId, FileRange, ProcMacroKind};
|
||||||
impl_intern_key,
|
|
||||||
salsa::{self, InternId},
|
|
||||||
CrateId, FileId, FileRange, ProcMacroKind,
|
|
||||||
};
|
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
algo::{self, skip_trivia_token},
|
algo::{self, skip_trivia_token},
|
||||||
|
@ -51,6 +43,25 @@ use crate::{
|
||||||
proc_macro::ProcMacroExpander,
|
proc_macro::ProcMacroExpander,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
pub use base_db::span::{HirFileId, MacroCallId, MacroFile};
|
||||||
|
pub use mbe::ValueResult;
|
||||||
|
|
||||||
|
pub type SpanMap = ::mbe::TokenMap<tt::SpanData>;
|
||||||
|
pub type DeclarativeMacro = ::mbe::DeclarativeMacro<tt::SpanData>;
|
||||||
|
|
||||||
|
pub mod tt {
|
||||||
|
pub use base_db::span::SpanData;
|
||||||
|
pub use tt::{DelimiterKind, Spacing, Span};
|
||||||
|
|
||||||
|
pub type Delimiter = ::tt::Delimiter<SpanData>;
|
||||||
|
pub type Subtree = ::tt::Subtree<SpanData>;
|
||||||
|
pub type Leaf = ::tt::Leaf<SpanData>;
|
||||||
|
pub type Literal = ::tt::Literal<SpanData>;
|
||||||
|
pub type Punct = ::tt::Punct<SpanData>;
|
||||||
|
pub type Ident = ::tt::Ident<SpanData>;
|
||||||
|
pub type TokenTree = ::tt::TokenTree<SpanData>;
|
||||||
|
}
|
||||||
|
|
||||||
pub type ExpandResult<T> = ValueResult<T, ExpandError>;
|
pub type ExpandResult<T> = ValueResult<T, ExpandError>;
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
||||||
|
@ -86,42 +97,43 @@ impl fmt::Display for ExpandError {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Input to the analyzer is a set of files, where each file is identified by
|
|
||||||
/// `FileId` and contains source code. However, another source of source code in
|
|
||||||
/// Rust are macros: each macro can be thought of as producing a "temporary
|
|
||||||
/// file". To assign an id to such a file, we use the id of the macro call that
|
|
||||||
/// produced the file. So, a `HirFileId` is either a `FileId` (source code
|
|
||||||
/// written by user), or a `MacroCallId` (source code produced by macro).
|
|
||||||
///
|
|
||||||
/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file
|
|
||||||
/// containing the call plus the offset of the macro call in the file. Note that
|
|
||||||
/// this is a recursive definition! However, the size_of of `HirFileId` is
|
|
||||||
/// finite (because everything bottoms out at the real `FileId`) and small
|
|
||||||
/// (`MacroCallId` uses the location interning. You can check details here:
|
|
||||||
/// <https://en.wikipedia.org/wiki/String_interning>).
|
|
||||||
///
|
|
||||||
/// The two variants are encoded in a single u32 which are differentiated by the MSB.
|
|
||||||
/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a
|
|
||||||
/// `MacroCallId`.
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
|
|
||||||
pub struct HirFileId(u32);
|
|
||||||
|
|
||||||
impl fmt::Debug for HirFileId {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
self.repr().fmt(f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
|
||||||
pub struct MacroFile {
|
|
||||||
pub macro_call_id: MacroCallId,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// `MacroCallId` identifies a particular macro invocation, like
|
/// `MacroCallId` identifies a particular macro invocation, like
|
||||||
/// `println!("Hello, {}", world)`.
|
/// `println!("Hello, {}", world)`.
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
pub struct MacroCallId(salsa::InternId);
|
pub struct SyntaxContextId(base_db::salsa::InternId);
|
||||||
impl_intern_key!(MacroCallId);
|
base_db::impl_intern_key!(SyntaxContextId);
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
|
||||||
|
pub struct SyntaxContext {
|
||||||
|
outer_expn: HirFileId,
|
||||||
|
outer_transparency: Transparency,
|
||||||
|
parent: SyntaxContextId,
|
||||||
|
/// This context, but with all transparent and semi-transparent expansions filtered away.
|
||||||
|
opaque: SyntaxContextId,
|
||||||
|
/// This context, but with all transparent expansions filtered away.
|
||||||
|
opaque_and_semitransparent: SyntaxContextId,
|
||||||
|
/// Name of the crate to which `$crate` with this context would resolve.
|
||||||
|
dollar_crate_name: name::Name,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A property of a macro expansion that determines how identifiers
|
||||||
|
/// produced by that expansion are resolved.
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug)]
|
||||||
|
pub enum Transparency {
|
||||||
|
/// Identifier produced by a transparent expansion is always resolved at call-site.
|
||||||
|
/// Call-site spans in procedural macros, hygiene opt-out in `macro` should use this.
|
||||||
|
Transparent,
|
||||||
|
/// Identifier produced by a semi-transparent expansion may be resolved
|
||||||
|
/// either at call-site or at definition-site.
|
||||||
|
/// If it's a local variable, label or `$crate` then it's resolved at def-site.
|
||||||
|
/// Otherwise it's resolved at call-site.
|
||||||
|
/// `macro_rules` macros behave like this, built-in macros currently behave like this too,
|
||||||
|
/// but that's an implementation detail.
|
||||||
|
SemiTransparent,
|
||||||
|
/// Identifier produced by an opaque expansion is always resolved at definition-site.
|
||||||
|
/// Def-site spans in procedural macros, identifiers from `macro` by default use this.
|
||||||
|
Opaque,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
pub struct MacroCallLoc {
|
pub struct MacroCallLoc {
|
||||||
|
@ -154,7 +166,7 @@ pub enum MacroDefKind {
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
struct EagerCallInfo {
|
struct EagerCallInfo {
|
||||||
/// The expanded argument of the eager macro.
|
/// The expanded argument of the eager macro.
|
||||||
arg: Arc<(tt::Subtree, TokenMap)>,
|
arg: Arc<(tt::Subtree,)>,
|
||||||
/// Call id of the eager macro's input file (this is the macro file for its fully expanded input).
|
/// Call id of the eager macro's input file (this is the macro file for its fully expanded input).
|
||||||
arg_id: MacroCallId,
|
arg_id: MacroCallId,
|
||||||
error: Option<ExpandError>,
|
error: Option<ExpandError>,
|
||||||
|
@ -178,7 +190,7 @@ pub enum MacroCallKind {
|
||||||
},
|
},
|
||||||
Attr {
|
Attr {
|
||||||
ast_id: AstId<ast::Item>,
|
ast_id: AstId<ast::Item>,
|
||||||
attr_args: Arc<(tt::Subtree, mbe::TokenMap)>,
|
attr_args: Arc<tt::Subtree>,
|
||||||
/// Syntactical index of the invoking `#[attribute]`.
|
/// Syntactical index of the invoking `#[attribute]`.
|
||||||
///
|
///
|
||||||
/// Outer attributes are counted first, then inner attributes. This does not support
|
/// Outer attributes are counted first, then inner attributes. This does not support
|
||||||
|
@ -187,34 +199,40 @@ pub enum MacroCallKind {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
pub trait HirFileIdExt {
|
||||||
enum HirFileIdRepr {
|
|
||||||
FileId(FileId),
|
|
||||||
MacroFile(MacroFile),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<FileId> for HirFileId {
|
|
||||||
fn from(FileId(id): FileId) -> Self {
|
|
||||||
assert!(id < Self::MAX_FILE_ID);
|
|
||||||
HirFileId(id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<MacroFile> for HirFileId {
|
|
||||||
fn from(MacroFile { macro_call_id: MacroCallId(id) }: MacroFile) -> Self {
|
|
||||||
let id = id.as_u32();
|
|
||||||
assert!(id < Self::MAX_FILE_ID);
|
|
||||||
HirFileId(id | Self::MACRO_FILE_TAG_MASK)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HirFileId {
|
|
||||||
const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK;
|
|
||||||
const MACRO_FILE_TAG_MASK: u32 = 1 << 31;
|
|
||||||
|
|
||||||
/// For macro-expansion files, returns the file original source file the
|
/// For macro-expansion files, returns the file original source file the
|
||||||
/// expansion originated from.
|
/// expansion originated from.
|
||||||
pub fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId {
|
fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId;
|
||||||
|
fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32;
|
||||||
|
|
||||||
|
/// If this is a macro call, returns the syntax node of the call.
|
||||||
|
fn call_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>>;
|
||||||
|
|
||||||
|
/// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
|
||||||
|
fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<(FileId, SyntaxNode)>;
|
||||||
|
|
||||||
|
/// Return expansion information if it is a macro-expansion file
|
||||||
|
fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo>;
|
||||||
|
|
||||||
|
fn as_builtin_derive_attr_node(&self, db: &dyn db::ExpandDatabase)
|
||||||
|
-> Option<InFile<ast::Attr>>;
|
||||||
|
fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool;
|
||||||
|
fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool;
|
||||||
|
|
||||||
|
/// Return whether this file is an include macro
|
||||||
|
fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool;
|
||||||
|
|
||||||
|
fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool;
|
||||||
|
/// Return whether this file is an attr macro
|
||||||
|
fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool;
|
||||||
|
|
||||||
|
/// Return whether this file is the pseudo expansion of the derive attribute.
|
||||||
|
/// See [`crate::builtin_attr_macro::derive_attr_expand`].
|
||||||
|
fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HirFileIdExt for HirFileId {
|
||||||
|
fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId {
|
||||||
let mut file_id = self;
|
let mut file_id = self;
|
||||||
loop {
|
loop {
|
||||||
match file_id.repr() {
|
match file_id.repr() {
|
||||||
|
@ -231,7 +249,7 @@ impl HirFileId {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 {
|
fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 {
|
||||||
let mut level = 0;
|
let mut level = 0;
|
||||||
let mut curr = self;
|
let mut curr = self;
|
||||||
while let Some(macro_file) = curr.macro_file() {
|
while let Some(macro_file) = curr.macro_file() {
|
||||||
|
@ -243,15 +261,13 @@ impl HirFileId {
|
||||||
level
|
level
|
||||||
}
|
}
|
||||||
|
|
||||||
/// If this is a macro call, returns the syntax node of the call.
|
fn call_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> {
|
||||||
pub fn call_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> {
|
|
||||||
let macro_file = self.macro_file()?;
|
let macro_file = self.macro_file()?;
|
||||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
||||||
Some(loc.to_node(db))
|
Some(loc.to_node(db))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
|
fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<(FileId, SyntaxNode)> {
|
||||||
pub fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<(FileId, SyntaxNode)> {
|
|
||||||
let mut call = db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).to_node(db);
|
let mut call = db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).to_node(db);
|
||||||
loop {
|
loop {
|
||||||
match call.file_id.repr() {
|
match call.file_id.repr() {
|
||||||
|
@ -264,12 +280,12 @@ impl HirFileId {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return expansion information if it is a macro-expansion file
|
/// Return expansion information if it is a macro-expansion file
|
||||||
pub fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo> {
|
fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo> {
|
||||||
let macro_file = self.macro_file()?;
|
let macro_file = self.macro_file()?;
|
||||||
ExpansionInfo::new(db, macro_file)
|
ExpansionInfo::new(db, macro_file)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_builtin_derive_attr_node(
|
fn as_builtin_derive_attr_node(
|
||||||
&self,
|
&self,
|
||||||
db: &dyn db::ExpandDatabase,
|
db: &dyn db::ExpandDatabase,
|
||||||
) -> Option<InFile<ast::Attr>> {
|
) -> Option<InFile<ast::Attr>> {
|
||||||
|
@ -282,7 +298,7 @@ impl HirFileId {
|
||||||
Some(attr.with_value(ast::Attr::cast(attr.value.clone())?))
|
Some(attr.with_value(ast::Attr::cast(attr.value.clone())?))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
|
fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
|
||||||
match self.macro_file() {
|
match self.macro_file() {
|
||||||
Some(macro_file) => {
|
Some(macro_file) => {
|
||||||
matches!(
|
matches!(
|
||||||
|
@ -294,7 +310,7 @@ impl HirFileId {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
|
fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
|
||||||
match self.macro_file() {
|
match self.macro_file() {
|
||||||
Some(macro_file) => {
|
Some(macro_file) => {
|
||||||
matches!(
|
matches!(
|
||||||
|
@ -306,8 +322,7 @@ impl HirFileId {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return whether this file is an include macro
|
fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
|
||||||
pub fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
|
|
||||||
match self.macro_file() {
|
match self.macro_file() {
|
||||||
Some(macro_file) => {
|
Some(macro_file) => {
|
||||||
db.lookup_intern_macro_call(macro_file.macro_call_id).def.is_include()
|
db.lookup_intern_macro_call(macro_file.macro_call_id).def.is_include()
|
||||||
|
@ -316,7 +331,7 @@ impl HirFileId {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool {
|
fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool {
|
||||||
match self.macro_file() {
|
match self.macro_file() {
|
||||||
Some(macro_file) => {
|
Some(macro_file) => {
|
||||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
||||||
|
@ -326,8 +341,7 @@ impl HirFileId {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return whether this file is an attr macro
|
fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
|
||||||
pub fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
|
|
||||||
match self.macro_file() {
|
match self.macro_file() {
|
||||||
Some(macro_file) => {
|
Some(macro_file) => {
|
||||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
||||||
|
@ -337,9 +351,7 @@ impl HirFileId {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return whether this file is the pseudo expansion of the derive attribute.
|
fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool {
|
||||||
/// See [`crate::builtin_attr_macro::derive_attr_expand`].
|
|
||||||
pub fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool {
|
|
||||||
match self.macro_file() {
|
match self.macro_file() {
|
||||||
Some(macro_file) => {
|
Some(macro_file) => {
|
||||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
||||||
|
@ -348,38 +360,6 @@ impl HirFileId {
|
||||||
None => false,
|
None => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
|
||||||
pub fn is_macro(self) -> bool {
|
|
||||||
self.0 & Self::MACRO_FILE_TAG_MASK != 0
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
pub fn macro_file(self) -> Option<MacroFile> {
|
|
||||||
match self.0 & Self::MACRO_FILE_TAG_MASK {
|
|
||||||
0 => None,
|
|
||||||
_ => Some(MacroFile {
|
|
||||||
macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
pub fn file_id(self) -> Option<FileId> {
|
|
||||||
match self.0 & Self::MACRO_FILE_TAG_MASK {
|
|
||||||
0 => Some(FileId(self.0)),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn repr(self) -> HirFileIdRepr {
|
|
||||||
match self.0 & Self::MACRO_FILE_TAG_MASK {
|
|
||||||
0 => HirFileIdRepr::FileId(FileId(self.0)),
|
|
||||||
_ => HirFileIdRepr::MacroFile(MacroFile {
|
|
||||||
macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MacroDefId {
|
impl MacroDefId {
|
||||||
|
@ -587,16 +567,6 @@ impl MacroCallKind {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MacroCallId {
|
|
||||||
pub fn as_file(self) -> HirFileId {
|
|
||||||
MacroFile { macro_call_id: self }.into()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn as_macro_file(self) -> MacroFile {
|
|
||||||
MacroFile { macro_call_id: self }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// ExpansionInfo mainly describes how to map text range between src and expanded macro
|
/// ExpansionInfo mainly describes how to map text range between src and expanded macro
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub struct ExpansionInfo {
|
pub struct ExpansionInfo {
|
||||||
|
@ -607,11 +577,8 @@ pub struct ExpansionInfo {
|
||||||
attr_input_or_mac_def: Option<InFile<ast::TokenTree>>,
|
attr_input_or_mac_def: Option<InFile<ast::TokenTree>>,
|
||||||
|
|
||||||
macro_def: TokenExpander,
|
macro_def: TokenExpander,
|
||||||
macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
|
macro_arg: Arc<tt::Subtree>,
|
||||||
/// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg
|
exp_map: Arc<SpanMap>,
|
||||||
/// and as such we need to shift tokens if they are part of an attributes input instead of their item.
|
|
||||||
macro_arg_shift: mbe::Shift,
|
|
||||||
exp_map: Arc<mbe::TokenMap>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ExpansionInfo {
|
impl ExpansionInfo {
|
||||||
|
@ -640,69 +607,33 @@ impl ExpansionInfo {
|
||||||
pub fn map_token_down(
|
pub fn map_token_down(
|
||||||
&self,
|
&self,
|
||||||
db: &dyn db::ExpandDatabase,
|
db: &dyn db::ExpandDatabase,
|
||||||
item: Option<ast::Item>,
|
|
||||||
token: InFile<&SyntaxToken>,
|
token: InFile<&SyntaxToken>,
|
||||||
// FIXME: use this for range mapping, so that we can resolve inline format args
|
// FIXME: use this for range mapping, so that we can resolve inline format args
|
||||||
_relative_token_offset: Option<TextSize>,
|
_relative_token_offset: Option<TextSize>,
|
||||||
) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> {
|
) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> {
|
||||||
assert_eq!(token.file_id, self.arg.file_id);
|
assert_eq!(token.file_id, self.arg.file_id);
|
||||||
let token_id_in_attr_input = if let Some(item) = item {
|
let span_map = &self.exp_map.span_map;
|
||||||
// check if we are mapping down in an attribute input
|
let (start, end) = if span_map
|
||||||
// this is a special case as attributes can have two inputs
|
.first()
|
||||||
let call_id = self.expanded.file_id.macro_call_id;
|
.map_or(false, |(_, span)| span.anchor.file_id == token.file_id)
|
||||||
let loc = db.lookup_intern_macro_call(call_id);
|
|
||||||
|
|
||||||
let token_range = token.value.text_range();
|
|
||||||
match &loc.kind {
|
|
||||||
MacroCallKind::Attr { attr_args, invoc_attr_index, .. } => {
|
|
||||||
// FIXME: handle `cfg_attr`
|
|
||||||
let attr = item
|
|
||||||
.doc_comments_and_attrs()
|
|
||||||
.nth(invoc_attr_index.ast_index())
|
|
||||||
.and_then(Either::left)?;
|
|
||||||
match attr.token_tree() {
|
|
||||||
Some(token_tree)
|
|
||||||
if token_tree.syntax().text_range().contains_range(token_range) =>
|
|
||||||
{
|
{
|
||||||
let attr_input_start =
|
(0, span_map.partition_point(|a| a.1.anchor.file_id == token.file_id))
|
||||||
token_tree.left_delimiter_token()?.text_range().start();
|
|
||||||
let relative_range =
|
|
||||||
token.value.text_range().checked_sub(attr_input_start)?;
|
|
||||||
// shift by the item's tree's max id
|
|
||||||
let token_id = attr_args.1.token_by_range(relative_range)?;
|
|
||||||
|
|
||||||
let token_id = if loc.def.is_attribute_derive() {
|
|
||||||
// we do not shift for `#[derive]`, as we only need to downmap the derive attribute tokens
|
|
||||||
token_id
|
|
||||||
} else {
|
} else {
|
||||||
self.macro_arg_shift.shift(token_id)
|
let start = span_map.partition_point(|a| a.1.anchor.file_id != token.file_id);
|
||||||
|
(
|
||||||
|
start,
|
||||||
|
start + span_map[start..].partition_point(|a| a.1.anchor.file_id == token.file_id),
|
||||||
|
)
|
||||||
};
|
};
|
||||||
Some(token_id)
|
let token_text_range = token.value.text_range();
|
||||||
}
|
let ast_id_map = db.ast_id_map(token.file_id);
|
||||||
_ => None,
|
let tokens = span_map[start..end]
|
||||||
}
|
.iter()
|
||||||
}
|
.filter_map(move |(range, span)| {
|
||||||
_ => None,
|
let offset = ast_id_map.get_raw(span.anchor.ast_id).text_range().start();
|
||||||
}
|
let abs_range = span.range + offset;
|
||||||
} else {
|
token_text_range.eq(&abs_range).then_some(*range)
|
||||||
None
|
})
|
||||||
};
|
|
||||||
|
|
||||||
let token_id = match token_id_in_attr_input {
|
|
||||||
Some(token_id) => token_id,
|
|
||||||
// the token is not inside `an attribute's input so do the lookup in the macro_arg as usual
|
|
||||||
None => {
|
|
||||||
let relative_range =
|
|
||||||
token.value.text_range().checked_sub(self.arg.value.text_range().start())?;
|
|
||||||
let token_id = self.macro_arg.1.token_by_range(relative_range)?;
|
|
||||||
// conditionally shift the id by a declarative macro definition
|
|
||||||
self.macro_def.map_id_down(token_id)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let tokens = self
|
|
||||||
.exp_map
|
|
||||||
.ranges_by_token(token_id, token.value.kind())
|
|
||||||
.flat_map(move |range| self.expanded.value.covering_element(range).into_token());
|
.flat_map(move |range| self.expanded.value.covering_element(range).into_token());
|
||||||
|
|
||||||
Some(tokens.map(move |token| InFile::new(self.expanded.file_id.into(), token)))
|
Some(tokens.map(move |token| InFile::new(self.expanded.file_id.into(), token)))
|
||||||
|
@ -713,60 +644,18 @@ impl ExpansionInfo {
|
||||||
&self,
|
&self,
|
||||||
db: &dyn db::ExpandDatabase,
|
db: &dyn db::ExpandDatabase,
|
||||||
token: InFile<&SyntaxToken>,
|
token: InFile<&SyntaxToken>,
|
||||||
) -> Option<(InFile<SyntaxToken>, Origin)> {
|
) -> Option<InFile<SyntaxToken>> {
|
||||||
assert_eq!(token.file_id, self.expanded.file_id.into());
|
self.exp_map.span_for_range(token.value.text_range()).and_then(|span| {
|
||||||
// Fetch the id through its text range,
|
let anchor =
|
||||||
let token_id = self.exp_map.token_by_range(token.value.text_range())?;
|
db.ast_id_map(span.anchor.file_id).get_raw(span.anchor.ast_id).text_range().start();
|
||||||
// conditionally unshifting the id to accommodate for macro-rules def site
|
InFile::new(
|
||||||
let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
|
span.anchor.file_id,
|
||||||
|
db.parse_or_expand(span.anchor.file_id)
|
||||||
let call_id = self.expanded.file_id.macro_call_id;
|
.covering_element(span.range + anchor)
|
||||||
let loc = db.lookup_intern_macro_call(call_id);
|
.into_token(),
|
||||||
|
)
|
||||||
// Special case: map tokens from `include!` expansions to the included file
|
.transpose()
|
||||||
if loc.def.is_include() {
|
})
|
||||||
if let Ok((tt_and_map, file_id)) = db.include_expand(call_id) {
|
|
||||||
let range = tt_and_map.1.first_range_by_token(token_id, token.value.kind())?;
|
|
||||||
let source = db.parse(file_id);
|
|
||||||
|
|
||||||
let token = source.syntax_node().covering_element(range).into_token()?;
|
|
||||||
|
|
||||||
return Some((InFile::new(file_id.into(), token), Origin::Call));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Attributes are a bit special for us, they have two inputs, the input tokentree and the annotated item.
|
|
||||||
let (token_map, tt) = match &loc.kind {
|
|
||||||
MacroCallKind::Attr { attr_args, .. } => {
|
|
||||||
if loc.def.is_attribute_derive() {
|
|
||||||
(&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned())
|
|
||||||
} else {
|
|
||||||
// try unshifting the token id, if unshifting fails, the token resides in the non-item attribute input
|
|
||||||
// note that the `TokenExpander::map_id_up` earlier only unshifts for declarative macros, so we don't double unshift with this
|
|
||||||
match self.macro_arg_shift.unshift(token_id) {
|
|
||||||
Some(unshifted) => {
|
|
||||||
token_id = unshifted;
|
|
||||||
(&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned())
|
|
||||||
}
|
|
||||||
None => (&self.macro_arg.1, self.arg.clone()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => match origin {
|
|
||||||
mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()),
|
|
||||||
mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def) {
|
|
||||||
(TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
|
|
||||||
(&expander.def_site_token_map, tt.syntax().cloned())
|
|
||||||
}
|
|
||||||
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
let range = token_map.first_range_by_token(token_id, token.value.kind())?;
|
|
||||||
let token =
|
|
||||||
tt.value.covering_element(range + tt.value.text_range().start()).into_token()?;
|
|
||||||
Some((tt.with_value(token), origin))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFile) -> Option<ExpansionInfo> {
|
fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFile) -> Option<ExpansionInfo> {
|
||||||
|
@ -779,11 +668,7 @@ impl ExpansionInfo {
|
||||||
let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() };
|
let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() };
|
||||||
|
|
||||||
let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
|
let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
|
||||||
Arc::new((
|
Arc::new(tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() })
|
||||||
tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
|
|
||||||
Default::default(),
|
|
||||||
Default::default(),
|
|
||||||
))
|
|
||||||
});
|
});
|
||||||
|
|
||||||
let def = loc.def.ast_id().left().and_then(|id| {
|
let def = loc.def.ast_id().left().and_then(|id| {
|
||||||
|
@ -814,7 +699,6 @@ impl ExpansionInfo {
|
||||||
expanded,
|
expanded,
|
||||||
arg: arg_tt,
|
arg: arg_tt,
|
||||||
attr_input_or_mac_def,
|
attr_input_or_mac_def,
|
||||||
macro_arg_shift: mbe::Shift::new(¯o_arg.0),
|
|
||||||
macro_arg,
|
macro_arg,
|
||||||
macro_def,
|
macro_def,
|
||||||
exp_map,
|
exp_map,
|
||||||
|
@ -1018,7 +902,7 @@ impl InFile<&SyntaxNode> {
|
||||||
impl InFile<SyntaxToken> {
|
impl InFile<SyntaxToken> {
|
||||||
pub fn upmap(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxToken>> {
|
pub fn upmap(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxToken>> {
|
||||||
let expansion = self.file_id.expansion_info(db)?;
|
let expansion = self.file_id.expansion_info(db)?;
|
||||||
expansion.map_token_up(db, self.as_ref()).map(|(it, _)| it)
|
expansion.map_token_up(db, self.as_ref())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Falls back to the macro call range if the node cannot be mapped up fully.
|
/// Falls back to the macro call range if the node cannot be mapped up fully.
|
||||||
|
@ -1067,6 +951,7 @@ impl<T> From<InMacroFile<T>> for InFile<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME: Get rid of this
|
||||||
fn ascend_node_border_tokens(
|
fn ascend_node_border_tokens(
|
||||||
db: &dyn db::ExpandDatabase,
|
db: &dyn db::ExpandDatabase,
|
||||||
InFile { file_id, value: node }: InFile<&SyntaxNode>,
|
InFile { file_id, value: node }: InFile<&SyntaxNode>,
|
||||||
|
@ -1090,13 +975,13 @@ fn ascend_call_token(
|
||||||
token: InFile<SyntaxToken>,
|
token: InFile<SyntaxToken>,
|
||||||
) -> Option<InFile<SyntaxToken>> {
|
) -> Option<InFile<SyntaxToken>> {
|
||||||
let mut mapping = expansion.map_token_up(db, token.as_ref())?;
|
let mut mapping = expansion.map_token_up(db, token.as_ref())?;
|
||||||
while let (mapped, Origin::Call) = mapping {
|
|
||||||
match mapped.file_id.expansion_info(db) {
|
loop {
|
||||||
Some(info) => mapping = info.map_token_up(db, mapped.as_ref())?,
|
match mapping.file_id.expansion_info(db) {
|
||||||
None => return Some(mapped),
|
Some(info) => mapping = info.map_token_up(db, mapping.as_ref())?,
|
||||||
|
None => return Some(mapping),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<N: AstNode> InFile<N> {
|
impl<N: AstNode> InFile<N> {
|
||||||
|
|
|
@ -18,8 +18,8 @@ macro_rules! __quote {
|
||||||
crate::tt::Subtree {
|
crate::tt::Subtree {
|
||||||
delimiter: crate::tt::Delimiter {
|
delimiter: crate::tt::Delimiter {
|
||||||
kind: crate::tt::DelimiterKind::$delim,
|
kind: crate::tt::DelimiterKind::$delim,
|
||||||
open: crate::tt::TokenId::unspecified(),
|
open: <crate::tt::SpanData as crate::tt::Span>::DUMMY,
|
||||||
close: crate::tt::TokenId::unspecified(),
|
close: <crate::tt::SpanData as crate::tt::Span>::DUMMY,
|
||||||
},
|
},
|
||||||
token_trees: $crate::quote::IntoTt::to_tokens(children),
|
token_trees: $crate::quote::IntoTt::to_tokens(children),
|
||||||
}
|
}
|
||||||
|
@ -32,7 +32,7 @@ macro_rules! __quote {
|
||||||
crate::tt::Leaf::Punct(crate::tt::Punct {
|
crate::tt::Leaf::Punct(crate::tt::Punct {
|
||||||
char: $first,
|
char: $first,
|
||||||
spacing: crate::tt::Spacing::Alone,
|
spacing: crate::tt::Spacing::Alone,
|
||||||
span: crate::tt::TokenId::unspecified(),
|
span: <crate::tt::SpanData as crate::tt::Span>::DUMMY,
|
||||||
}).into()
|
}).into()
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@ -44,12 +44,12 @@ macro_rules! __quote {
|
||||||
crate::tt::Leaf::Punct(crate::tt::Punct {
|
crate::tt::Leaf::Punct(crate::tt::Punct {
|
||||||
char: $first,
|
char: $first,
|
||||||
spacing: crate::tt::Spacing::Joint,
|
spacing: crate::tt::Spacing::Joint,
|
||||||
span: crate::tt::TokenId::unspecified(),
|
span: <crate::tt::SpanData as crate::tt::Span>::DUMMY,
|
||||||
}).into(),
|
}).into(),
|
||||||
crate::tt::Leaf::Punct(crate::tt::Punct {
|
crate::tt::Leaf::Punct(crate::tt::Punct {
|
||||||
char: $sec,
|
char: $sec,
|
||||||
spacing: crate::tt::Spacing::Alone,
|
spacing: crate::tt::Spacing::Alone,
|
||||||
span: crate::tt::TokenId::unspecified(),
|
span: <crate::tt::SpanData as crate::tt::Span>::DUMMY,
|
||||||
}).into()
|
}).into()
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@ -89,7 +89,7 @@ macro_rules! __quote {
|
||||||
vec![ {
|
vec![ {
|
||||||
crate::tt::Leaf::Ident(crate::tt::Ident {
|
crate::tt::Leaf::Ident(crate::tt::Ident {
|
||||||
text: stringify!($tt).into(),
|
text: stringify!($tt).into(),
|
||||||
span: crate::tt::TokenId::unspecified(),
|
span: <crate::tt::SpanData as crate::tt::Span>::DUMMY,
|
||||||
}).into()
|
}).into()
|
||||||
}]
|
}]
|
||||||
};
|
};
|
||||||
|
@ -195,20 +195,22 @@ macro_rules! impl_to_to_tokentrees {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl_to_to_tokentrees! {
|
impl_to_to_tokentrees! {
|
||||||
u32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
|
u32 => self { crate::tt::Literal{text: self.to_string().into(), span: <crate::tt::SpanData as crate::tt::Span>::DUMMY} };
|
||||||
usize => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
|
usize => self { crate::tt::Literal{text: self.to_string().into(), span: <crate::tt::SpanData as crate::tt::Span>::DUMMY} };
|
||||||
i32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
|
i32 => self { crate::tt::Literal{text: self.to_string().into(), span: <crate::tt::SpanData as crate::tt::Span>::DUMMY} };
|
||||||
bool => self { crate::tt::Ident{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
|
bool => self { crate::tt::Ident{text: self.to_string().into(), span: <crate::tt::SpanData as crate::tt::Span>::DUMMY} };
|
||||||
crate::tt::Leaf => self { self };
|
crate::tt::Leaf => self { self };
|
||||||
crate::tt::Literal => self { self };
|
crate::tt::Literal => self { self };
|
||||||
crate::tt::Ident => self { self };
|
crate::tt::Ident => self { self };
|
||||||
crate::tt::Punct => self { self };
|
crate::tt::Punct => self { self };
|
||||||
&str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}};
|
&str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: <crate::tt::SpanData as crate::tt::Span>::DUMMY}};
|
||||||
String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}}
|
String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: <crate::tt::SpanData as crate::tt::Span>::DUMMY}}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use expect_test::expect;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_quote_delimiters() {
|
fn test_quote_delimiters() {
|
||||||
assert_eq!(quote!({}).to_string(), "{}");
|
assert_eq!(quote!({}).to_string(), "{}");
|
||||||
|
@ -231,7 +233,10 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_ident(name: &str) -> crate::tt::Ident {
|
fn mk_ident(name: &str) -> crate::tt::Ident {
|
||||||
crate::tt::Ident { text: name.into(), span: crate::tt::TokenId::unspecified() }
|
crate::tt::Ident {
|
||||||
|
text: name.into(),
|
||||||
|
span: <crate::tt::SpanData as crate::tt::Span>::DUMMY,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -241,7 +246,9 @@ mod tests {
|
||||||
let quoted = quote!(#a);
|
let quoted = quote!(#a);
|
||||||
assert_eq!(quoted.to_string(), "hello");
|
assert_eq!(quoted.to_string(), "hello");
|
||||||
let t = format!("{quoted:?}");
|
let t = format!("{quoted:?}");
|
||||||
assert_eq!(t, "SUBTREE $$ 4294967295 4294967295\n IDENT hello 4294967295");
|
expect![[r#"
|
||||||
|
SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor { file_id: FileId(0), ast_id: Idx::<RustLanguage>>(0) } } SpanData { range: 0..0, anchor: SpanAnchor { file_id: FileId(0), ast_id: Idx::<RustLanguage>>(0) } }
|
||||||
|
IDENT hello SpanData { range: 0..0, anchor: SpanAnchor { file_id: FileId(0), ast_id: Idx::<RustLanguage>>(0) } }"#]].assert_eq(&t);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -273,8 +280,8 @@ mod tests {
|
||||||
let list = crate::tt::Subtree {
|
let list = crate::tt::Subtree {
|
||||||
delimiter: crate::tt::Delimiter {
|
delimiter: crate::tt::Delimiter {
|
||||||
kind: crate::tt::DelimiterKind::Brace,
|
kind: crate::tt::DelimiterKind::Brace,
|
||||||
open: crate::tt::TokenId::unspecified(),
|
open: <crate::tt::SpanData as crate::tt::Span>::DUMMY,
|
||||||
close: crate::tt::TokenId::unspecified(),
|
close: <crate::tt::SpanData as crate::tt::Span>::DUMMY,
|
||||||
},
|
},
|
||||||
token_trees: fields.collect(),
|
token_trees: fields.collect(),
|
||||||
};
|
};
|
||||||
|
|
|
@ -24,7 +24,7 @@ use hir_def::{
|
||||||
};
|
};
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
name::{AsName, Name},
|
name::{AsName, Name},
|
||||||
HirFileId,
|
HirFileId, HirFileIdExt,
|
||||||
};
|
};
|
||||||
use stdx::{always, never};
|
use stdx::{always, never};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
|
|
|
@ -21,7 +21,7 @@ use hir_def::{
|
||||||
AdtId, ConstId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup,
|
AdtId, ConstId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup,
|
||||||
StaticId, VariantId,
|
StaticId, VariantId,
|
||||||
};
|
};
|
||||||
use hir_expand::{mod_path::ModPath, InFile};
|
use hir_expand::{mod_path::ModPath, HirFileIdExt, InFile};
|
||||||
use intern::Interned;
|
use intern::Interned;
|
||||||
use la_arena::ArenaMap;
|
use la_arena::ArenaMap;
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
pub use hir_def::db::*;
|
pub use hir_def::db::*;
|
||||||
pub use hir_expand::db::{
|
pub use hir_expand::db::{
|
||||||
AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
|
AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
|
||||||
ExpandProcMacroQuery, HygieneFrameQuery, InternMacroCallQuery, MacroArgNodeQuery,
|
ExpandProcMacroQuery, HygieneFrameQuery, InternMacroCallQuery, MacroArgQuery, MacroExpandQuery,
|
||||||
MacroExpandQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery,
|
ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery,
|
||||||
};
|
};
|
||||||
pub use hir_ty::db::*;
|
pub use hir_ty::db::*;
|
||||||
|
|
|
@ -124,7 +124,7 @@ pub use {
|
||||||
hir_expand::{
|
hir_expand::{
|
||||||
attrs::{Attr, AttrId},
|
attrs::{Attr, AttrId},
|
||||||
name::{known, Name},
|
name::{known, Name},
|
||||||
ExpandResult, HirFileId, InFile, MacroFile, Origin,
|
tt, ExpandResult, HirFileId, HirFileIdExt, InFile, MacroFile,
|
||||||
},
|
},
|
||||||
hir_ty::{
|
hir_ty::{
|
||||||
display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite},
|
display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite},
|
||||||
|
|
|
@ -15,7 +15,7 @@ use hir_def::{
|
||||||
type_ref::Mutability,
|
type_ref::Mutability,
|
||||||
AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId,
|
AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId,
|
||||||
};
|
};
|
||||||
use hir_expand::{db::ExpandDatabase, name::AsName, ExpansionInfo, MacroCallId};
|
use hir_expand::{db::ExpandDatabase, name::AsName, ExpansionInfo, HirFileIdExt, MacroCallId};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
|
@ -549,7 +549,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
let mut mcache = self.macro_call_cache.borrow_mut();
|
let mut mcache = self.macro_call_cache.borrow_mut();
|
||||||
|
|
||||||
let mut process_expansion_for_token =
|
let mut process_expansion_for_token =
|
||||||
|stack: &mut SmallVec<_>, macro_file, item, token: InFile<&_>| {
|
|stack: &mut SmallVec<_>, macro_file, token: InFile<&_>| {
|
||||||
let expansion_info = cache
|
let expansion_info = cache
|
||||||
.entry(macro_file)
|
.entry(macro_file)
|
||||||
.or_insert_with(|| macro_file.expansion_info(self.db.upcast()))
|
.or_insert_with(|| macro_file.expansion_info(self.db.upcast()))
|
||||||
|
@ -562,7 +562,6 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
|
|
||||||
let mapped_tokens = expansion_info.map_token_down(
|
let mapped_tokens = expansion_info.map_token_down(
|
||||||
self.db.upcast(),
|
self.db.upcast(),
|
||||||
item,
|
|
||||||
token,
|
token,
|
||||||
relative_token_offset,
|
relative_token_offset,
|
||||||
)?;
|
)?;
|
||||||
|
@ -587,17 +586,12 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
// Don't force populate the dyn cache for items that don't have an attribute anyways
|
// Don't force populate the dyn cache for items that don't have an attribute anyways
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
Some((ctx.item_to_macro_call(token.with_value(item.clone()))?, item))
|
Some(ctx.item_to_macro_call(token.with_value(item.clone()))?)
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
if let Some((call_id, item)) = containing_attribute_macro_call {
|
if let Some(call_id) = containing_attribute_macro_call {
|
||||||
let file_id = call_id.as_file();
|
let file_id = call_id.as_file();
|
||||||
return process_expansion_for_token(
|
return process_expansion_for_token(&mut stack, file_id, token.as_ref());
|
||||||
&mut stack,
|
|
||||||
file_id,
|
|
||||||
Some(item),
|
|
||||||
token.as_ref(),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Then check for token trees, that means we are either in a function-like macro or
|
// Then check for token trees, that means we are either in a function-like macro or
|
||||||
|
@ -622,7 +616,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
it
|
it
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
process_expansion_for_token(&mut stack, file_id, None, token.as_ref())
|
process_expansion_for_token(&mut stack, file_id, token.as_ref())
|
||||||
} else if let Some(meta) = ast::Meta::cast(parent) {
|
} else if let Some(meta) = ast::Meta::cast(parent) {
|
||||||
// attribute we failed expansion for earlier, this might be a derive invocation
|
// attribute we failed expansion for earlier, this might be a derive invocation
|
||||||
// or derive helper attribute
|
// or derive helper attribute
|
||||||
|
@ -647,7 +641,6 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
return process_expansion_for_token(
|
return process_expansion_for_token(
|
||||||
&mut stack,
|
&mut stack,
|
||||||
file_id,
|
file_id,
|
||||||
Some(adt.into()),
|
|
||||||
token.as_ref(),
|
token.as_ref(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -679,13 +672,11 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
let id = self.db.ast_id_map(token.file_id).ast_id(&adt);
|
let id = self.db.ast_id_map(token.file_id).ast_id(&adt);
|
||||||
let helpers =
|
let helpers =
|
||||||
def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?;
|
def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?;
|
||||||
let item = Some(adt.into());
|
|
||||||
let mut res = None;
|
let mut res = None;
|
||||||
for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) {
|
for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) {
|
||||||
res = res.or(process_expansion_for_token(
|
res = res.or(process_expansion_for_token(
|
||||||
&mut stack,
|
&mut stack,
|
||||||
derive.as_file(),
|
derive.as_file(),
|
||||||
item.clone(),
|
|
||||||
token.as_ref(),
|
token.as_ref(),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
|
@ -97,7 +97,7 @@ use hir_def::{
|
||||||
FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId,
|
FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId,
|
||||||
StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId,
|
StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId,
|
||||||
};
|
};
|
||||||
use hir_expand::{attrs::AttrId, name::AsName, HirFileId, MacroCallId};
|
use hir_expand::{attrs::AttrId, name::AsName, HirFileId, HirFileIdExt, MacroCallId};
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use stdx::{impl_from, never};
|
use stdx::{impl_from, never};
|
||||||
|
|
|
@ -30,7 +30,7 @@ use hir_expand::{
|
||||||
mod_path::path,
|
mod_path::path,
|
||||||
name,
|
name,
|
||||||
name::{AsName, Name},
|
name::{AsName, Name},
|
||||||
HirFileId, InFile,
|
HirFileId, HirFileIdExt, InFile,
|
||||||
};
|
};
|
||||||
use hir_ty::{
|
use hir_ty::{
|
||||||
diagnostics::{
|
diagnostics::{
|
||||||
|
|
|
@ -3,7 +3,7 @@ use std::{
|
||||||
iter,
|
iter,
|
||||||
};
|
};
|
||||||
|
|
||||||
use hir::{HasSource, ModuleSource};
|
use hir::{HasSource, HirFileIdExt, ModuleSource};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
assists::{AssistId, AssistKind},
|
assists::{AssistId, AssistKind},
|
||||||
base_db::FileId,
|
base_db::FileId,
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
use hir::{db::HirDatabase, HasSource, HasVisibility, ModuleDef, PathResolution, ScopeDef};
|
use hir::{
|
||||||
|
db::HirDatabase, HasSource, HasVisibility, HirFileIdExt, ModuleDef, PathResolution, ScopeDef,
|
||||||
|
};
|
||||||
use ide_db::base_db::FileId;
|
use ide_db::base_db::FileId;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, edit_in_place::HasVisibilityEdit, make, HasVisibility as _},
|
ast::{self, edit_in_place::HasVisibilityEdit, make, HasVisibility as _},
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use crate::assist_context::{AssistContext, Assists};
|
use crate::assist_context::{AssistContext, Assists};
|
||||||
use hir::{HasVisibility, HirDisplay, Module};
|
use hir::{HasVisibility, HirDisplay, HirFileIdExt, Module};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
assists::{AssistId, AssistKind},
|
assists::{AssistId, AssistKind},
|
||||||
base_db::{FileId, Upcast},
|
base_db::{FileId, Upcast},
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use hir::{HasSource, HirDisplay, InFile};
|
use hir::{HasSource, HirDisplay, HirFileIdExt, InFile};
|
||||||
use ide_db::assists::{AssistId, AssistKind};
|
use ide_db::assists::{AssistId, AssistKind};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, make, HasArgList},
|
ast::{self, make, HasArgList},
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
use hir::{
|
use hir::{
|
||||||
Adt, AsAssocItem, HasSource, HirDisplay, Module, PathResolution, Semantics, Type, TypeInfo,
|
Adt, AsAssocItem, HasSource, HirDisplay, HirFileIdExt, Module, PathResolution, Semantics, Type,
|
||||||
|
TypeInfo,
|
||||||
};
|
};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::FileId,
|
base_db::FileId,
|
||||||
|
@ -510,7 +511,7 @@ fn assoc_fn_target_info(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_insert_offset(target: &GeneratedFunctionTarget) -> TextSize {
|
fn get_insert_offset(target: &GeneratedFunctionTarget) -> TextSize {
|
||||||
match &target {
|
match target {
|
||||||
GeneratedFunctionTarget::BehindItem(it) => it.text_range().end(),
|
GeneratedFunctionTarget::BehindItem(it) => it.text_range().end(),
|
||||||
GeneratedFunctionTarget::InEmptyItemList(it) => it.text_range().start() + TextSize::of('{'),
|
GeneratedFunctionTarget::InEmptyItemList(it) => it.text_range().start() + TextSize::of('{'),
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use std::collections::{hash_map::Entry, HashMap};
|
use std::collections::{hash_map::Entry, HashMap};
|
||||||
|
|
||||||
use hir::{InFile, Module, ModuleSource};
|
use hir::{HirFileIdExt, InFile, Module, ModuleSource};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::FileRange,
|
base_db::FileRange,
|
||||||
defs::Definition,
|
defs::Definition,
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use hir::{InFile, ModuleDef};
|
use hir::{HirFileIdExt, InFile, ModuleDef};
|
||||||
use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator};
|
use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
|
|
|
@ -683,7 +683,7 @@ pub(super) fn complete_name_ref(
|
||||||
ctx: &CompletionContext<'_>,
|
ctx: &CompletionContext<'_>,
|
||||||
NameRefContext { nameref, kind }: &NameRefContext,
|
NameRefContext { nameref, kind }: &NameRefContext,
|
||||||
) {
|
) {
|
||||||
match kind {
|
match dbg!(kind) {
|
||||||
NameRefKind::Path(path_ctx) => {
|
NameRefKind::Path(path_ctx) => {
|
||||||
flyimport::import_on_the_fly_path(acc, ctx, path_ctx);
|
flyimport::import_on_the_fly_path(acc, ctx, path_ctx);
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
use std::iter;
|
use std::iter;
|
||||||
|
|
||||||
use hir::{Module, ModuleSource};
|
use hir::{HirFileIdExt, Module, ModuleSource};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::{SourceDatabaseExt, VfsPath},
|
base_db::{SourceDatabaseExt, VfsPath},
|
||||||
FxHashSet, RootDatabase, SymbolKind,
|
FxHashSet, RootDatabase, SymbolKind,
|
||||||
|
|
|
@ -99,7 +99,7 @@ impl RootDatabase {
|
||||||
hir::db::AstIdMapQuery
|
hir::db::AstIdMapQuery
|
||||||
hir::db::ParseMacroExpansionQuery
|
hir::db::ParseMacroExpansionQuery
|
||||||
hir::db::InternMacroCallQuery
|
hir::db::InternMacroCallQuery
|
||||||
hir::db::MacroArgNodeQuery
|
hir::db::MacroArgQuery
|
||||||
hir::db::DeclMacroExpanderQuery
|
hir::db::DeclMacroExpanderQuery
|
||||||
hir::db::MacroExpandQuery
|
hir::db::MacroExpandQuery
|
||||||
hir::db::ExpandProcMacroQuery
|
hir::db::ExpandProcMacroQuery
|
||||||
|
|
|
@ -204,7 +204,7 @@ impl RootDatabase {
|
||||||
hir_db::AstIdMapQuery
|
hir_db::AstIdMapQuery
|
||||||
// hir_db::ParseMacroExpansionQuery
|
// hir_db::ParseMacroExpansionQuery
|
||||||
// hir_db::InternMacroCallQuery
|
// hir_db::InternMacroCallQuery
|
||||||
hir_db::MacroArgNodeQuery
|
hir_db::MacroArgQuery
|
||||||
hir_db::DeclMacroExpanderQuery
|
hir_db::DeclMacroExpanderQuery
|
||||||
// hir_db::MacroExpandQuery
|
// hir_db::MacroExpandQuery
|
||||||
hir_db::ExpandProcMacroQuery
|
hir_db::ExpandProcMacroQuery
|
||||||
|
|
|
@ -24,7 +24,7 @@ use std::fmt;
|
||||||
|
|
||||||
use base_db::{AnchoredPathBuf, FileId, FileRange};
|
use base_db::{AnchoredPathBuf, FileId, FileRange};
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir::{FieldSource, HasSource, InFile, ModuleSource, Semantics};
|
use hir::{FieldSource, HasSource, HirFileIdExt, InFile, ModuleSource, Semantics};
|
||||||
use stdx::never;
|
use stdx::never;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, HasName},
|
ast::{self, HasName},
|
||||||
|
|
|
@ -8,7 +8,8 @@ use std::mem;
|
||||||
|
|
||||||
use base_db::{salsa::Database, FileId, FileRange, SourceDatabase, SourceDatabaseExt};
|
use base_db::{salsa::Database, FileId, FileRange, SourceDatabase, SourceDatabaseExt};
|
||||||
use hir::{
|
use hir::{
|
||||||
AsAssocItem, DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility,
|
AsAssocItem, DefWithBody, HasAttrs, HasSource, HirFileIdExt, InFile, ModuleSource, Semantics,
|
||||||
|
Visibility,
|
||||||
};
|
};
|
||||||
use memchr::memmem::Finder;
|
use memchr::memmem::Finder;
|
||||||
use nohash_hasher::IntMap;
|
use nohash_hasher::IntMap;
|
||||||
|
|
|
@ -21,10 +21,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: STRUCT,
|
kind: STRUCT,
|
||||||
range: 83..119,
|
range: 83..119,
|
||||||
|
@ -50,10 +48,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: STRUCT,
|
kind: STRUCT,
|
||||||
range: 0..81,
|
range: 0..81,
|
||||||
|
@ -79,10 +75,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: STRUCT,
|
kind: STRUCT,
|
||||||
range: 0..81,
|
range: 0..81,
|
||||||
|
@ -108,10 +102,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: STRUCT,
|
kind: STRUCT,
|
||||||
range: 0..81,
|
range: 0..81,
|
||||||
|
@ -137,10 +129,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: STRUCT,
|
kind: STRUCT,
|
||||||
range: 0..81,
|
range: 0..81,
|
||||||
|
@ -166,10 +156,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: STRUCT,
|
kind: STRUCT,
|
||||||
range: 83..119,
|
range: 83..119,
|
||||||
|
@ -195,10 +183,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: STRUCT,
|
kind: STRUCT,
|
||||||
range: 0..81,
|
range: 0..81,
|
||||||
|
|
|
@ -19,10 +19,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: TYPE_ALIAS,
|
kind: TYPE_ALIAS,
|
||||||
range: 397..417,
|
range: 397..417,
|
||||||
|
@ -46,10 +44,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: CONST,
|
kind: CONST,
|
||||||
range: 340..361,
|
range: 340..361,
|
||||||
|
@ -73,10 +69,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: CONST,
|
kind: CONST,
|
||||||
range: 520..592,
|
range: 520..592,
|
||||||
|
@ -102,10 +96,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: ENUM,
|
kind: ENUM,
|
||||||
range: 185..207,
|
range: 185..207,
|
||||||
|
@ -131,10 +123,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: USE_TREE,
|
kind: USE_TREE,
|
||||||
range: 654..676,
|
range: 654..676,
|
||||||
|
@ -160,10 +150,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: MACRO_DEF,
|
kind: MACRO_DEF,
|
||||||
range: 153..168,
|
range: 153..168,
|
||||||
|
@ -187,10 +175,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: STATIC,
|
kind: STATIC,
|
||||||
range: 362..396,
|
range: 362..396,
|
||||||
|
@ -216,10 +202,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: STRUCT,
|
kind: STRUCT,
|
||||||
range: 170..184,
|
range: 170..184,
|
||||||
|
@ -245,12 +229,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: MacroFile(
|
hir_file_id: MacroFile(
|
||||||
MacroFile {
|
|
||||||
macro_call_id: MacroCallId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
},
|
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: STRUCT,
|
kind: STRUCT,
|
||||||
range: 0..22,
|
range: 0..22,
|
||||||
|
@ -276,10 +256,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: STRUCT,
|
kind: STRUCT,
|
||||||
range: 318..336,
|
range: 318..336,
|
||||||
|
@ -307,10 +285,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: STRUCT,
|
kind: STRUCT,
|
||||||
range: 555..581,
|
range: 555..581,
|
||||||
|
@ -338,10 +314,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: STRUCT,
|
kind: STRUCT,
|
||||||
range: 479..507,
|
range: 479..507,
|
||||||
|
@ -365,10 +339,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: TRAIT,
|
kind: TRAIT,
|
||||||
range: 261..300,
|
range: 261..300,
|
||||||
|
@ -394,10 +366,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: USE_TREE,
|
kind: USE_TREE,
|
||||||
range: 682..696,
|
range: 682..696,
|
||||||
|
@ -423,10 +393,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: UNION,
|
kind: UNION,
|
||||||
range: 208..222,
|
range: 208..222,
|
||||||
|
@ -452,10 +420,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: MODULE,
|
kind: MODULE,
|
||||||
range: 419..457,
|
range: 419..457,
|
||||||
|
@ -481,10 +447,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: MODULE,
|
kind: MODULE,
|
||||||
range: 594..604,
|
range: 594..604,
|
||||||
|
@ -510,10 +474,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: MACRO_RULES,
|
kind: MACRO_RULES,
|
||||||
range: 51..131,
|
range: 51..131,
|
||||||
|
@ -537,10 +499,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: FN,
|
kind: FN,
|
||||||
range: 242..257,
|
range: 242..257,
|
||||||
|
@ -566,10 +526,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: MACRO_RULES,
|
kind: MACRO_RULES,
|
||||||
range: 1..48,
|
range: 1..48,
|
||||||
|
@ -593,10 +551,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: FN,
|
kind: FN,
|
||||||
range: 302..338,
|
range: 302..338,
|
||||||
|
@ -622,10 +578,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: USE_TREE,
|
kind: USE_TREE,
|
||||||
range: 611..648,
|
range: 611..648,
|
||||||
|
@ -649,10 +603,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: FN,
|
kind: FN,
|
||||||
range: 279..298,
|
range: 279..298,
|
||||||
|
@ -691,10 +643,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: STRUCT,
|
kind: STRUCT,
|
||||||
range: 435..455,
|
range: 435..455,
|
||||||
|
@ -731,10 +681,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
1,
|
1,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: USE_TREE,
|
kind: USE_TREE,
|
||||||
range: 111..143,
|
range: 111..143,
|
||||||
|
@ -760,10 +708,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
1,
|
1,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: STRUCT,
|
kind: STRUCT,
|
||||||
range: 0..20,
|
range: 0..20,
|
||||||
|
@ -789,10 +735,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
1,
|
1,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: USE_TREE,
|
kind: USE_TREE,
|
||||||
range: 25..59,
|
range: 25..59,
|
||||||
|
@ -818,10 +762,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
1,
|
1,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: USE_TREE,
|
kind: USE_TREE,
|
||||||
range: 65..105,
|
range: 65..105,
|
||||||
|
@ -847,10 +789,8 @@
|
||||||
),
|
),
|
||||||
loc: DeclarationLocation {
|
loc: DeclarationLocation {
|
||||||
hir_file_id: FileId(
|
hir_file_id: FileId(
|
||||||
FileId(
|
|
||||||
1,
|
1,
|
||||||
),
|
),
|
||||||
),
|
|
||||||
ptr: SyntaxNodePtr {
|
ptr: SyntaxNodePtr {
|
||||||
kind: USE_TREE,
|
kind: USE_TREE,
|
||||||
range: 65..105,
|
range: 65..105,
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir::{
|
use hir::{
|
||||||
db::{ExpandDatabase, HirDatabase},
|
db::{ExpandDatabase, HirDatabase},
|
||||||
known, AssocItem, HirDisplay, InFile, Type,
|
known, AssocItem, HirDisplay, HirFileIdExt, InFile, Type,
|
||||||
};
|
};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
assists::Assist, famous_defs::FamousDefs, imports::import_assets::item_for_path_search,
|
assists::Assist, famous_defs::FamousDefs, imports::import_assets::item_for_path_search,
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use hir::db::ExpandDatabase;
|
use hir::db::ExpandDatabase;
|
||||||
|
use hir::HirFileIdExt;
|
||||||
use ide_db::{assists::Assist, source_change::SourceChange};
|
use ide_db::{assists::Assist, source_change::SourceChange};
|
||||||
use syntax::{ast, SyntaxNode};
|
use syntax::{ast, SyntaxNode};
|
||||||
use syntax::{match_ast, AstNode};
|
use syntax::{match_ast, AstNode};
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir::{db::ExpandDatabase, HasSource, HirDisplay, Semantics};
|
use hir::{db::ExpandDatabase, HasSource, HirDisplay, HirFileIdExt, Semantics};
|
||||||
use ide_db::{base_db::FileId, source_change::SourceChange, RootDatabase};
|
use ide_db::{base_db::FileId, source_change::SourceChange, RootDatabase};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, edit::IndentLevel, make},
|
ast::{self, edit::IndentLevel, make},
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use hir::{db::ExpandDatabase, InFile};
|
use hir::{db::ExpandDatabase, HirFileIdExt, InFile};
|
||||||
use ide_db::source_change::SourceChange;
|
use ide_db::source_change::SourceChange;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, HasArgList},
|
ast::{self, HasArgList},
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, InFile, Type};
|
use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, HirFileIdExt, InFile, Type};
|
||||||
use ide_db::{famous_defs::FamousDefs, source_change::SourceChange};
|
use ide_db::{famous_defs::FamousDefs, source_change::SourceChange};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, BlockExpr, ExprStmt},
|
ast::{self, BlockExpr, ExprStmt},
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use hir::db::ExpandDatabase;
|
use hir::{db::ExpandDatabase, HirFileIdExt};
|
||||||
use ide_db::{assists::Assist, base_db::AnchoredPathBuf, source_change::FileSystemEdit};
|
use ide_db::{assists::Assist, base_db::AnchoredPathBuf, source_change::FileSystemEdit};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use syntax::AstNode;
|
use syntax::AstNode;
|
||||||
|
|
|
@ -149,7 +149,7 @@ mod tests {
|
||||||
|
|
||||||
fn check_hierarchy(
|
fn check_hierarchy(
|
||||||
ra_fixture: &str,
|
ra_fixture: &str,
|
||||||
expected: Expect,
|
expected_nav: Expect,
|
||||||
expected_incoming: Expect,
|
expected_incoming: Expect,
|
||||||
expected_outgoing: Expect,
|
expected_outgoing: Expect,
|
||||||
) {
|
) {
|
||||||
|
@ -158,7 +158,7 @@ mod tests {
|
||||||
let mut navs = analysis.call_hierarchy(pos).unwrap().unwrap().info;
|
let mut navs = analysis.call_hierarchy(pos).unwrap().unwrap().info;
|
||||||
assert_eq!(navs.len(), 1);
|
assert_eq!(navs.len(), 1);
|
||||||
let nav = navs.pop().unwrap();
|
let nav = navs.pop().unwrap();
|
||||||
expected.assert_eq(&nav.debug_render());
|
expected_nav.assert_eq(&nav.debug_render());
|
||||||
|
|
||||||
let item_pos =
|
let item_pos =
|
||||||
FilePosition { file_id: nav.file_id, offset: nav.focus_or_full_range().start() };
|
FilePosition { file_id: nav.file_id, offset: nav.focus_or_full_range().start() };
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use hir::Semantics;
|
use hir::{HirFileIdExt, Semantics};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::FileId, helpers::pick_best_token,
|
base_db::FileId, helpers::pick_best_token,
|
||||||
syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase,
|
syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase,
|
||||||
|
|
|
@ -60,13 +60,13 @@ pub(crate) fn goto_definition(
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|token| {
|
.filter_map(|token| {
|
||||||
let parent = token.parent()?;
|
let parent = token.parent()?;
|
||||||
if let Some(tt) = ast::TokenTree::cast(parent) {
|
if let Some(tt) = ast::TokenTree::cast(parent.clone()) {
|
||||||
if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), file_id) {
|
if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), file_id) {
|
||||||
return Some(vec![x]);
|
return Some(vec![x]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Some(
|
Some(
|
||||||
IdentClass::classify_token(sema, &token)?
|
IdentClass::classify_node(sema, &parent)?
|
||||||
.definitions()
|
.definitions()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|def| {
|
.flat_map(|def| {
|
||||||
|
@ -392,6 +392,8 @@ fn bar() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME: We should emit two targets here, one for the identifier in the declaration, one for
|
||||||
|
// the macro call
|
||||||
#[test]
|
#[test]
|
||||||
fn goto_def_for_macro_defined_fn_no_arg() {
|
fn goto_def_for_macro_defined_fn_no_arg() {
|
||||||
check(
|
check(
|
||||||
|
@ -399,10 +401,10 @@ fn bar() {
|
||||||
//- /lib.rs
|
//- /lib.rs
|
||||||
macro_rules! define_fn {
|
macro_rules! define_fn {
|
||||||
() => (fn foo() {})
|
() => (fn foo() {})
|
||||||
|
//^^^
|
||||||
}
|
}
|
||||||
|
|
||||||
define_fn!();
|
define_fn!();
|
||||||
//^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
fn bar() {
|
fn bar() {
|
||||||
$0foo();
|
$0foo();
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
//! tests. This module also implements a couple of magic tricks, like renaming
|
//! tests. This module also implements a couple of magic tricks, like renaming
|
||||||
//! `self` and to `self` (to switch between associated function and method).
|
//! `self` and to `self` (to switch between associated function and method).
|
||||||
|
|
||||||
use hir::{AsAssocItem, InFile, Semantics};
|
use hir::{AsAssocItem, HirFileIdExt, InFile, Semantics};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::FileId,
|
base_db::FileId,
|
||||||
defs::{Definition, NameClass, NameRefClass},
|
defs::{Definition, NameClass, NameRefClass},
|
||||||
|
|
|
@ -2,7 +2,7 @@ use std::fmt;
|
||||||
|
|
||||||
use ast::HasName;
|
use ast::HasName;
|
||||||
use cfg::CfgExpr;
|
use cfg::CfgExpr;
|
||||||
use hir::{db::HirDatabase, AsAssocItem, HasAttrs, HasSource, Semantics};
|
use hir::{db::HirDatabase, AsAssocItem, HasAttrs, HasSource, HirFileIdExt, Semantics};
|
||||||
use ide_assists::utils::test_related_attribute;
|
use ide_assists::utils::test_related_attribute;
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::{FilePosition, FileRange},
|
base_db::{FilePosition, FileRange},
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use hir::{db::HirDatabase, Crate, Module};
|
use hir::{db::HirDatabase, Crate, HirFileIdExt, Module};
|
||||||
use ide_db::helpers::get_definition;
|
use ide_db::helpers::get_definition;
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::{FileId, FileRange, SourceDatabaseExt},
|
base_db::{FileId, FileRange, SourceDatabaseExt},
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
//! Computes color for a single element.
|
//! Computes color for a single element.
|
||||||
|
|
||||||
use hir::{AsAssocItem, HasVisibility, Semantics};
|
use hir::{AsAssocItem, HasVisibility, HirFileIdExt, Semantics};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
defs::{Definition, IdentClass, NameClass, NameRefClass},
|
defs::{Definition, IdentClass, NameClass, NameRefClass},
|
||||||
FxHashMap, RootDatabase, SymbolKind,
|
FxHashMap, RootDatabase, SymbolKind,
|
||||||
|
|
|
@ -4,13 +4,12 @@
|
||||||
// to run rust-analyzer as a library.
|
// to run rust-analyzer as a library.
|
||||||
use std::{collections::hash_map::Entry, mem, path::Path, sync};
|
use std::{collections::hash_map::Entry, mem, path::Path, sync};
|
||||||
|
|
||||||
use ::tt::token_id as tt;
|
|
||||||
use crossbeam_channel::{unbounded, Receiver};
|
use crossbeam_channel::{unbounded, Receiver};
|
||||||
use ide::{AnalysisHost, Change, SourceRoot};
|
use ide::{AnalysisHost, Change, SourceRoot};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::{
|
base_db::{
|
||||||
CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind,
|
span::SpanData, CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
|
||||||
ProcMacroLoadResult, ProcMacros,
|
ProcMacroKind, ProcMacroLoadResult, ProcMacros,
|
||||||
},
|
},
|
||||||
FxHashMap,
|
FxHashMap,
|
||||||
};
|
};
|
||||||
|
@ -374,16 +373,19 @@ struct Expander(proc_macro_api::ProcMacro);
|
||||||
impl ProcMacroExpander for Expander {
|
impl ProcMacroExpander for Expander {
|
||||||
fn expand(
|
fn expand(
|
||||||
&self,
|
&self,
|
||||||
subtree: &tt::Subtree,
|
subtree: &tt::Subtree<SpanData>,
|
||||||
attrs: Option<&tt::Subtree>,
|
attrs: Option<&tt::Subtree<SpanData>>,
|
||||||
env: &Env,
|
env: &Env,
|
||||||
) -> Result<tt::Subtree, ProcMacroExpansionError> {
|
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> {
|
||||||
let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
|
let _ = (subtree, attrs, env);
|
||||||
match self.0.expand(subtree, attrs, env) {
|
|
||||||
Ok(Ok(subtree)) => Ok(subtree),
|
// let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
|
||||||
Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
|
// match self.0.expand(subtree, attrs, env) {
|
||||||
Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
|
// Ok(Ok(subtree)) => Ok(subtree),
|
||||||
}
|
// Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
|
||||||
|
// Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
|
||||||
|
// }
|
||||||
|
todo!()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -394,10 +396,10 @@ struct IdentityExpander;
|
||||||
impl ProcMacroExpander for IdentityExpander {
|
impl ProcMacroExpander for IdentityExpander {
|
||||||
fn expand(
|
fn expand(
|
||||||
&self,
|
&self,
|
||||||
subtree: &tt::Subtree,
|
subtree: &tt::Subtree<SpanData>,
|
||||||
_: Option<&tt::Subtree>,
|
_: Option<&tt::Subtree<SpanData>>,
|
||||||
_: &Env,
|
_: &Env,
|
||||||
) -> Result<tt::Subtree, ProcMacroExpansionError> {
|
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> {
|
||||||
Ok(subtree.clone())
|
Ok(subtree.clone())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -409,10 +411,10 @@ struct EmptyExpander;
|
||||||
impl ProcMacroExpander for EmptyExpander {
|
impl ProcMacroExpander for EmptyExpander {
|
||||||
fn expand(
|
fn expand(
|
||||||
&self,
|
&self,
|
||||||
_: &tt::Subtree,
|
_: &tt::Subtree<SpanData>,
|
||||||
_: Option<&tt::Subtree>,
|
_: Option<&tt::Subtree<SpanData>>,
|
||||||
_: &Env,
|
_: &Env,
|
||||||
) -> Result<tt::Subtree, ProcMacroExpansionError> {
|
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> {
|
||||||
Ok(tt::Subtree::empty())
|
Ok(tt::Subtree::empty())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,13 +6,19 @@ use syntax::{
|
||||||
AstNode, SmolStr,
|
AstNode, SmolStr,
|
||||||
};
|
};
|
||||||
use test_utils::{bench, bench_fixture, skip_slow_tests};
|
use test_utils::{bench, bench_fixture, skip_slow_tests};
|
||||||
use tt::{Span, TokenId};
|
use tt::{Span, SpanData};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
parser::{MetaVarKind, Op, RepeatKind, Separator},
|
parser::{MetaVarKind, Op, RepeatKind, Separator},
|
||||||
syntax_node_to_token_tree, DeclarativeMacro,
|
syntax_node_to_token_tree, DeclarativeMacro,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
|
||||||
|
struct DummyFile;
|
||||||
|
impl Span for DummyFile {
|
||||||
|
const DUMMY: Self = DummyFile;
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn benchmark_parse_macro_rules() {
|
fn benchmark_parse_macro_rules() {
|
||||||
if skip_slow_tests() {
|
if skip_slow_tests() {
|
||||||
|
@ -39,7 +45,7 @@ fn benchmark_expand_macro_rules() {
|
||||||
invocations
|
invocations
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(id, tt)| {
|
.map(|(id, tt)| {
|
||||||
let res = rules[&id].expand(tt);
|
let res = rules[&id].expand(&tt);
|
||||||
assert!(res.err.is_none());
|
assert!(res.err.is_none());
|
||||||
res.value.token_trees.len()
|
res.value.token_trees.len()
|
||||||
})
|
})
|
||||||
|
@ -48,14 +54,14 @@ fn benchmark_expand_macro_rules() {
|
||||||
assert_eq!(hash, 69413);
|
assert_eq!(hash, 69413);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> {
|
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro<SpanData<DummyFile>>> {
|
||||||
macro_rules_fixtures_tt()
|
macro_rules_fixtures_tt()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true)))
|
.map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true)))
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<TokenId>> {
|
fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<SpanData<DummyFile>>> {
|
||||||
let fixture = bench_fixture::numerous_macro_rules();
|
let fixture = bench_fixture::numerous_macro_rules();
|
||||||
let source_file = ast::SourceFile::parse(&fixture).ok().unwrap();
|
let source_file = ast::SourceFile::parse(&fixture).ok().unwrap();
|
||||||
|
|
||||||
|
@ -65,7 +71,12 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<TokenId>> {
|
||||||
.filter_map(ast::MacroRules::cast)
|
.filter_map(ast::MacroRules::cast)
|
||||||
.map(|rule| {
|
.map(|rule| {
|
||||||
let id = rule.name().unwrap().to_string();
|
let id = rule.name().unwrap().to_string();
|
||||||
let (def_tt, _) = syntax_node_to_token_tree(rule.token_tree().unwrap().syntax());
|
let def_tt = syntax_node_to_token_tree(
|
||||||
|
rule.token_tree().unwrap().syntax(),
|
||||||
|
DummyFile,
|
||||||
|
0.into(),
|
||||||
|
&Default::default(),
|
||||||
|
);
|
||||||
(id, def_tt)
|
(id, def_tt)
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
|
@ -73,8 +84,8 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<TokenId>> {
|
||||||
|
|
||||||
/// Generate random invocation fixtures from rules
|
/// Generate random invocation fixtures from rules
|
||||||
fn invocation_fixtures(
|
fn invocation_fixtures(
|
||||||
rules: &FxHashMap<String, DeclarativeMacro>,
|
rules: &FxHashMap<String, DeclarativeMacro<SpanData<DummyFile>>>,
|
||||||
) -> Vec<(String, tt::Subtree<TokenId>)> {
|
) -> Vec<(String, tt::Subtree<SpanData<DummyFile>>)> {
|
||||||
let mut seed = 123456789;
|
let mut seed = 123456789;
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
|
|
||||||
|
@ -96,8 +107,8 @@ fn invocation_fixtures(
|
||||||
loop {
|
loop {
|
||||||
let mut subtree = tt::Subtree {
|
let mut subtree = tt::Subtree {
|
||||||
delimiter: tt::Delimiter {
|
delimiter: tt::Delimiter {
|
||||||
open: tt::TokenId::DUMMY,
|
open: SpanData::DUMMY,
|
||||||
close: tt::TokenId::DUMMY,
|
close: SpanData::DUMMY,
|
||||||
kind: tt::DelimiterKind::Invisible,
|
kind: tt::DelimiterKind::Invisible,
|
||||||
},
|
},
|
||||||
token_trees: vec![],
|
token_trees: vec![],
|
||||||
|
@ -105,7 +116,7 @@ fn invocation_fixtures(
|
||||||
for op in rule.lhs.iter() {
|
for op in rule.lhs.iter() {
|
||||||
collect_from_op(op, &mut subtree, &mut seed);
|
collect_from_op(op, &mut subtree, &mut seed);
|
||||||
}
|
}
|
||||||
if it.expand(subtree.clone()).err.is_none() {
|
if it.expand(&subtree).err.is_none() {
|
||||||
res.push((name.clone(), subtree));
|
res.push((name.clone(), subtree));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -119,7 +130,11 @@ fn invocation_fixtures(
|
||||||
}
|
}
|
||||||
return res;
|
return res;
|
||||||
|
|
||||||
fn collect_from_op(op: &Op<TokenId>, parent: &mut tt::Subtree<TokenId>, seed: &mut usize) {
|
fn collect_from_op(
|
||||||
|
op: &Op<SpanData<DummyFile>>,
|
||||||
|
parent: &mut tt::Subtree<SpanData<DummyFile>>,
|
||||||
|
seed: &mut usize,
|
||||||
|
) {
|
||||||
return match op {
|
return match op {
|
||||||
Op::Var { kind, .. } => match kind.as_ref() {
|
Op::Var { kind, .. } => match kind.as_ref() {
|
||||||
Some(MetaVarKind::Ident) => parent.token_trees.push(make_ident("foo")),
|
Some(MetaVarKind::Ident) => parent.token_trees.push(make_ident("foo")),
|
||||||
|
@ -205,32 +220,22 @@ fn invocation_fixtures(
|
||||||
*seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
|
*seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
|
||||||
*seed
|
*seed
|
||||||
}
|
}
|
||||||
fn make_ident(ident: &str) -> tt::TokenTree<TokenId> {
|
fn make_ident(ident: &str) -> tt::TokenTree<SpanData<DummyFile>> {
|
||||||
tt::Leaf::Ident(tt::Ident { span: tt::TokenId::DUMMY, text: SmolStr::new(ident) })
|
tt::Leaf::Ident(tt::Ident { span: SpanData::DUMMY, text: SmolStr::new(ident) }).into()
|
||||||
|
}
|
||||||
|
fn make_punct(char: char) -> tt::TokenTree<SpanData<DummyFile>> {
|
||||||
|
tt::Leaf::Punct(tt::Punct { span: SpanData::DUMMY, char, spacing: tt::Spacing::Alone })
|
||||||
.into()
|
.into()
|
||||||
}
|
}
|
||||||
fn make_punct(char: char) -> tt::TokenTree<TokenId> {
|
fn make_literal(lit: &str) -> tt::TokenTree<SpanData<DummyFile>> {
|
||||||
tt::Leaf::Punct(tt::Punct {
|
tt::Leaf::Literal(tt::Literal { span: SpanData::DUMMY, text: SmolStr::new(lit) }).into()
|
||||||
span: tt::TokenId::DUMMY,
|
|
||||||
char,
|
|
||||||
spacing: tt::Spacing::Alone,
|
|
||||||
})
|
|
||||||
.into()
|
|
||||||
}
|
|
||||||
fn make_literal(lit: &str) -> tt::TokenTree<TokenId> {
|
|
||||||
tt::Leaf::Literal(tt::Literal { span: tt::TokenId::DUMMY, text: SmolStr::new(lit) })
|
|
||||||
.into()
|
|
||||||
}
|
}
|
||||||
fn make_subtree(
|
fn make_subtree(
|
||||||
kind: tt::DelimiterKind,
|
kind: tt::DelimiterKind,
|
||||||
token_trees: Option<Vec<tt::TokenTree<TokenId>>>,
|
token_trees: Option<Vec<tt::TokenTree<SpanData<DummyFile>>>>,
|
||||||
) -> tt::TokenTree<TokenId> {
|
) -> tt::TokenTree<SpanData<DummyFile>> {
|
||||||
tt::Subtree {
|
tt::Subtree {
|
||||||
delimiter: tt::Delimiter {
|
delimiter: tt::Delimiter { open: SpanData::DUMMY, close: SpanData::DUMMY, kind },
|
||||||
open: tt::TokenId::DUMMY,
|
|
||||||
close: tt::TokenId::DUMMY,
|
|
||||||
kind,
|
|
||||||
},
|
|
||||||
token_trees: token_trees.unwrap_or_default(),
|
token_trees: token_trees.unwrap_or_default(),
|
||||||
}
|
}
|
||||||
.into()
|
.into()
|
||||||
|
|
|
@ -19,7 +19,7 @@ mod benchmark;
|
||||||
mod token_map;
|
mod token_map;
|
||||||
|
|
||||||
use stdx::impl_from;
|
use stdx::impl_from;
|
||||||
use tt::{Span, TokenId};
|
use tt::Span;
|
||||||
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
|
@ -34,10 +34,8 @@ pub use tt::{Delimiter, DelimiterKind, Punct};
|
||||||
|
|
||||||
pub use crate::{
|
pub use crate::{
|
||||||
syntax_bridge::{
|
syntax_bridge::{
|
||||||
parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_map,
|
map_from_syntax_node, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree,
|
||||||
syntax_node_to_token_map_with_modifications, syntax_node_to_token_tree,
|
syntax_node_to_token_tree_censored, token_tree_to_syntax_node,
|
||||||
syntax_node_to_token_tree_with_modifications, token_tree_to_syntax_node, SyntheticToken,
|
|
||||||
SyntheticTokenId,
|
|
||||||
},
|
},
|
||||||
token_map::TokenMap,
|
token_map::TokenMap,
|
||||||
};
|
};
|
||||||
|
@ -125,10 +123,8 @@ impl fmt::Display for CountError {
|
||||||
/// `tt::TokenTree`, but there's a crucial difference: in macro rules, `$ident`
|
/// `tt::TokenTree`, but there's a crucial difference: in macro rules, `$ident`
|
||||||
/// and `$()*` have special meaning (see `Var` and `Repeat` data structures)
|
/// and `$()*` have special meaning (see `Var` and `Repeat` data structures)
|
||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
pub struct DeclarativeMacro {
|
pub struct DeclarativeMacro<S> {
|
||||||
rules: Box<[Rule<TokenId>]>,
|
rules: Box<[Rule<S>]>,
|
||||||
/// Highest id of the token we have in TokenMap
|
|
||||||
shift: Shift,
|
|
||||||
// This is used for correctly determining the behavior of the pat fragment
|
// This is used for correctly determining the behavior of the pat fragment
|
||||||
// FIXME: This should be tracked by hygiene of the fragment identifier!
|
// FIXME: This should be tracked by hygiene of the fragment identifier!
|
||||||
is_2021: bool,
|
is_2021: bool,
|
||||||
|
@ -141,91 +137,13 @@ struct Rule<S> {
|
||||||
rhs: MetaTemplate<S>,
|
rhs: MetaTemplate<S>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
impl<S: Span> DeclarativeMacro<S> {
|
||||||
pub struct Shift(u32);
|
pub fn from_err(err: ParseError, is_2021: bool) -> DeclarativeMacro<S> {
|
||||||
|
DeclarativeMacro { rules: Box::default(), is_2021, err: Some(Box::new(err)) }
|
||||||
impl Shift {
|
|
||||||
pub fn new(tt: &tt::Subtree<TokenId>) -> Shift {
|
|
||||||
// Note that TokenId is started from zero,
|
|
||||||
// We have to add 1 to prevent duplication.
|
|
||||||
let value = max_id(tt).map_or(0, |it| it + 1);
|
|
||||||
return Shift(value);
|
|
||||||
|
|
||||||
// Find the max token id inside a subtree
|
|
||||||
fn max_id(subtree: &tt::Subtree<TokenId>) -> Option<u32> {
|
|
||||||
let filter =
|
|
||||||
|tt: &_| match tt {
|
|
||||||
tt::TokenTree::Subtree(subtree) => {
|
|
||||||
let tree_id = max_id(subtree);
|
|
||||||
if subtree.delimiter.open != tt::TokenId::unspecified() {
|
|
||||||
Some(tree_id.map_or(subtree.delimiter.open.0, |t| {
|
|
||||||
t.max(subtree.delimiter.open.0)
|
|
||||||
}))
|
|
||||||
} else {
|
|
||||||
tree_id
|
|
||||||
}
|
|
||||||
}
|
|
||||||
tt::TokenTree::Leaf(leaf) => {
|
|
||||||
let &(tt::Leaf::Ident(tt::Ident { span, .. })
|
|
||||||
| tt::Leaf::Punct(tt::Punct { span, .. })
|
|
||||||
| tt::Leaf::Literal(tt::Literal { span, .. })) = leaf;
|
|
||||||
|
|
||||||
(span != tt::TokenId::unspecified()).then_some(span.0)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
subtree.token_trees.iter().filter_map(filter).max()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Shift given TokenTree token id
|
|
||||||
pub fn shift_all(self, tt: &mut tt::Subtree<TokenId>) {
|
|
||||||
for t in &mut tt.token_trees {
|
|
||||||
match t {
|
|
||||||
tt::TokenTree::Leaf(
|
|
||||||
tt::Leaf::Ident(tt::Ident { span, .. })
|
|
||||||
| tt::Leaf::Punct(tt::Punct { span, .. })
|
|
||||||
| tt::Leaf::Literal(tt::Literal { span, .. }),
|
|
||||||
) => *span = self.shift(*span),
|
|
||||||
tt::TokenTree::Subtree(tt) => {
|
|
||||||
tt.delimiter.open = self.shift(tt.delimiter.open);
|
|
||||||
tt.delimiter.close = self.shift(tt.delimiter.close);
|
|
||||||
self.shift_all(tt)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn shift(self, id: tt::TokenId) -> tt::TokenId {
|
|
||||||
if id == tt::TokenId::unspecified() {
|
|
||||||
id
|
|
||||||
} else {
|
|
||||||
tt::TokenId(id.0 + self.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn unshift(self, id: tt::TokenId) -> Option<tt::TokenId> {
|
|
||||||
id.0.checked_sub(self.0).map(tt::TokenId)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
|
||||||
pub enum Origin {
|
|
||||||
Def,
|
|
||||||
Call,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DeclarativeMacro {
|
|
||||||
pub fn from_err(err: ParseError, is_2021: bool) -> DeclarativeMacro {
|
|
||||||
DeclarativeMacro {
|
|
||||||
rules: Box::default(),
|
|
||||||
shift: Shift(0),
|
|
||||||
is_2021,
|
|
||||||
err: Some(Box::new(err)),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The old, `macro_rules! m {}` flavor.
|
/// The old, `macro_rules! m {}` flavor.
|
||||||
pub fn parse_macro_rules(tt: &tt::Subtree<TokenId>, is_2021: bool) -> DeclarativeMacro {
|
pub fn parse_macro_rules(tt: &tt::Subtree<S>, is_2021: bool) -> DeclarativeMacro<S> {
|
||||||
// Note: this parsing can be implemented using mbe machinery itself, by
|
// Note: this parsing can be implemented using mbe machinery itself, by
|
||||||
// matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing
|
// matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing
|
||||||
// manually seems easier.
|
// manually seems easier.
|
||||||
|
@ -257,11 +175,11 @@ impl DeclarativeMacro {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021, err }
|
DeclarativeMacro { rules: rules.into_boxed_slice(), is_2021, err }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The new, unstable `macro m {}` flavor.
|
/// The new, unstable `macro m {}` flavor.
|
||||||
pub fn parse_macro2(tt: &tt::Subtree<TokenId>, is_2021: bool) -> DeclarativeMacro {
|
pub fn parse_macro2(tt: &tt::Subtree<S>, is_2021: bool) -> DeclarativeMacro<S> {
|
||||||
let mut src = TtIter::new(tt);
|
let mut src = TtIter::new(tt);
|
||||||
let mut rules = Vec::new();
|
let mut rules = Vec::new();
|
||||||
let mut err = None;
|
let mut err = None;
|
||||||
|
@ -308,31 +226,15 @@ impl DeclarativeMacro {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021, err }
|
DeclarativeMacro { rules: rules.into_boxed_slice(), is_2021, err }
|
||||||
}
|
|
||||||
|
|
||||||
pub fn expand(&self, mut tt: tt::Subtree<TokenId>) -> ExpandResult<tt::Subtree<TokenId>> {
|
|
||||||
self.shift.shift_all(&mut tt);
|
|
||||||
expander::expand_rules(&self.rules, &tt, self.is_2021)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn err(&self) -> Option<&ParseError> {
|
pub fn err(&self) -> Option<&ParseError> {
|
||||||
self.err.as_deref()
|
self.err.as_deref()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
|
pub fn expand(&self, tt: &tt::Subtree<S>) -> ExpandResult<tt::Subtree<S>> {
|
||||||
self.shift.shift(id)
|
expander::expand_rules(&self.rules, &tt, self.is_2021)
|
||||||
}
|
|
||||||
|
|
||||||
pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, Origin) {
|
|
||||||
match self.shift.unshift(id) {
|
|
||||||
Some(id) => (id, Origin::Call),
|
|
||||||
None => (id, Origin::Def),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn shift(&self) -> Shift {
|
|
||||||
self.shift
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -4,24 +4,32 @@ use syntax::{ast, AstNode};
|
||||||
use test_utils::extract_annotations;
|
use test_utils::extract_annotations;
|
||||||
use tt::{
|
use tt::{
|
||||||
buffer::{TokenBuffer, TokenTreeRef},
|
buffer::{TokenBuffer, TokenTreeRef},
|
||||||
Leaf, Punct, Spacing,
|
Leaf, Punct, Spacing, Span,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use crate::syntax_bridge::SpanData;
|
||||||
|
|
||||||
use super::syntax_node_to_token_tree;
|
use super::syntax_node_to_token_tree;
|
||||||
|
|
||||||
fn check_punct_spacing(fixture: &str) {
|
fn check_punct_spacing(fixture: &str) {
|
||||||
|
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
|
||||||
|
struct DummyFile;
|
||||||
|
impl Span for DummyFile {
|
||||||
|
const DUMMY: Self = DummyFile;
|
||||||
|
}
|
||||||
|
|
||||||
let source_file = ast::SourceFile::parse(fixture).ok().unwrap();
|
let source_file = ast::SourceFile::parse(fixture).ok().unwrap();
|
||||||
let (subtree, token_map) = syntax_node_to_token_tree(source_file.syntax());
|
let subtree =
|
||||||
|
syntax_node_to_token_tree(source_file.syntax(), DummyFile, 0.into(), &Default::default());
|
||||||
let mut annotations: HashMap<_, _> = extract_annotations(fixture)
|
let mut annotations: HashMap<_, _> = extract_annotations(fixture)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(range, annotation)| {
|
.map(|(range, annotation)| {
|
||||||
let token = token_map.token_by_range(range).expect("no token found");
|
|
||||||
let spacing = match annotation.as_str() {
|
let spacing = match annotation.as_str() {
|
||||||
"Alone" => Spacing::Alone,
|
"Alone" => Spacing::Alone,
|
||||||
"Joint" => Spacing::Joint,
|
"Joint" => Spacing::Joint,
|
||||||
a => panic!("unknown annotation: {a}"),
|
a => panic!("unknown annotation: {a}"),
|
||||||
};
|
};
|
||||||
(token, spacing)
|
(range, spacing)
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
@ -29,8 +37,12 @@ fn check_punct_spacing(fixture: &str) {
|
||||||
let mut cursor = buf.begin();
|
let mut cursor = buf.begin();
|
||||||
while !cursor.eof() {
|
while !cursor.eof() {
|
||||||
while let Some(token_tree) = cursor.token_tree() {
|
while let Some(token_tree) = cursor.token_tree() {
|
||||||
if let TokenTreeRef::Leaf(Leaf::Punct(Punct { spacing, span, .. }), _) = token_tree {
|
if let TokenTreeRef::Leaf(
|
||||||
if let Some(expected) = annotations.remove(span) {
|
Leaf::Punct(Punct { spacing, span: SpanData { range, .. }, .. }),
|
||||||
|
_,
|
||||||
|
) = token_tree
|
||||||
|
{
|
||||||
|
if let Some(expected) = annotations.remove(range) {
|
||||||
assert_eq!(expected, *spacing);
|
assert_eq!(expected, *spacing);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,123 +2,121 @@
|
||||||
|
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
|
|
||||||
use parser::{SyntaxKind, T};
|
use syntax::TextRange;
|
||||||
use syntax::{TextRange, TextSize};
|
use tt::Span;
|
||||||
|
|
||||||
use crate::syntax_bridge::SyntheticTokenId;
|
// pub type HirFile = u32;
|
||||||
|
// pub type FileRange = (HirFile, TextRange);
|
||||||
|
// Option<MacroCallId>, LocalSyntaxContet
|
||||||
|
// pub type SyntaxContext = ();
|
||||||
|
// pub type LocalSyntaxContext = u32;
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
|
/// Maps absolute text ranges for the corresponding file to the relevant span data.
|
||||||
enum TokenTextRange {
|
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
||||||
Token(TextRange),
|
// FIXME: Rename to SpanMap
|
||||||
Delimiter(TextRange),
|
pub struct TokenMap<S> {
|
||||||
|
// FIXME: This needs to be sorted by (FileId, AstId)
|
||||||
|
// Then we can do a binary search on the file id,
|
||||||
|
// then a bin search on the ast id
|
||||||
|
pub span_map: Vec<(TextRange, S)>,
|
||||||
|
// span_map2: rustc_hash::FxHashMap<TextRange, usize>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenTextRange {
|
impl<S> Default for TokenMap<S> {
|
||||||
fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> {
|
fn default() -> Self {
|
||||||
match self {
|
Self { span_map: Vec::new() }
|
||||||
TokenTextRange::Token(it) => Some(it),
|
|
||||||
TokenTextRange::Delimiter(it) => match kind {
|
|
||||||
T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())),
|
|
||||||
T!['}'] | T![')'] | T![']'] => {
|
|
||||||
Some(TextRange::at(it.end() - TextSize::of('}'), 1.into()))
|
|
||||||
}
|
|
||||||
_ => None,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Maps `tt::TokenId` to the relative range of the original token.
|
impl<S: Span> TokenMap<S> {
|
||||||
#[derive(Debug, PartialEq, Eq, Clone, Default, Hash)]
|
|
||||||
pub struct TokenMap {
|
|
||||||
/// Maps `tt::TokenId` to the *relative* source range.
|
|
||||||
entries: Vec<(tt::TokenId, TokenTextRange)>,
|
|
||||||
pub synthetic_entries: Vec<(tt::TokenId, SyntheticTokenId)>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TokenMap {
|
|
||||||
pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
|
|
||||||
let &(token_id, _) = self.entries.iter().find(|(_, range)| match range {
|
|
||||||
TokenTextRange::Token(it) => *it == relative_range,
|
|
||||||
TokenTextRange::Delimiter(it) => {
|
|
||||||
let open = TextRange::at(it.start(), 1.into());
|
|
||||||
let close = TextRange::at(it.end() - TextSize::of('}'), 1.into());
|
|
||||||
open == relative_range || close == relative_range
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
Some(token_id)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn ranges_by_token(
|
|
||||||
&self,
|
|
||||||
token_id: tt::TokenId,
|
|
||||||
kind: SyntaxKind,
|
|
||||||
) -> impl Iterator<Item = TextRange> + '_ {
|
|
||||||
self.entries
|
|
||||||
.iter()
|
|
||||||
.filter(move |&&(tid, _)| tid == token_id)
|
|
||||||
.filter_map(move |(_, range)| range.by_kind(kind))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn synthetic_token_id(&self, token_id: tt::TokenId) -> Option<SyntheticTokenId> {
|
|
||||||
self.synthetic_entries.iter().find(|(tid, _)| *tid == token_id).map(|(_, id)| *id)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn first_range_by_token(
|
|
||||||
&self,
|
|
||||||
token_id: tt::TokenId,
|
|
||||||
kind: SyntaxKind,
|
|
||||||
) -> Option<TextRange> {
|
|
||||||
self.ranges_by_token(token_id, kind).next()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn shrink_to_fit(&mut self) {
|
pub(crate) fn shrink_to_fit(&mut self) {
|
||||||
self.entries.shrink_to_fit();
|
self.span_map.shrink_to_fit();
|
||||||
self.synthetic_entries.shrink_to_fit();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
|
pub(crate) fn insert(&mut self, range: TextRange, span: S) {
|
||||||
self.entries.push((token_id, TokenTextRange::Token(relative_range)));
|
self.span_map.push((range, span));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn insert_synthetic(&mut self, token_id: tt::TokenId, id: SyntheticTokenId) {
|
pub fn ranges_with_span(&self, span: S) -> impl Iterator<Item = TextRange> + '_ {
|
||||||
self.synthetic_entries.push((token_id, id));
|
self.span_map.iter().filter_map(
|
||||||
|
move |(range, s)| {
|
||||||
|
if s == &span {
|
||||||
|
Some(*range)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn insert_delim(
|
pub fn span_for_range(&self, range: TextRange) -> Option<S> {
|
||||||
&mut self,
|
self.span_map.iter().find_map(|(r, s)| if r == &range { Some(s.clone()) } else { None })
|
||||||
token_id: tt::TokenId,
|
|
||||||
open_relative_range: TextRange,
|
|
||||||
close_relative_range: TextRange,
|
|
||||||
) -> usize {
|
|
||||||
let res = self.entries.len();
|
|
||||||
let cover = open_relative_range.cover(close_relative_range);
|
|
||||||
|
|
||||||
self.entries.push((token_id, TokenTextRange::Delimiter(cover)));
|
|
||||||
res
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) {
|
// pub fn ranges_by_token(
|
||||||
let (_, token_text_range) = &mut self.entries[idx];
|
// &self,
|
||||||
if let TokenTextRange::Delimiter(dim) = token_text_range {
|
// token_id: tt::TokenId,
|
||||||
let cover = dim.cover(close_relative_range);
|
// kind: SyntaxKind,
|
||||||
*token_text_range = TokenTextRange::Delimiter(cover);
|
// ) -> impl Iterator<Item = TextRange> + '_ {
|
||||||
}
|
// self.entries
|
||||||
}
|
// .iter()
|
||||||
|
// .filter(move |&&(tid, _)| tid == token_id)
|
||||||
|
// .filter_map(move |(_, range)| range.by_kind(kind))
|
||||||
|
// }
|
||||||
|
|
||||||
pub(crate) fn remove_delim(&mut self, idx: usize) {
|
// pub(crate) fn remove_delim(&mut self, idx: usize) {
|
||||||
// FIXME: This could be accidentally quadratic
|
// // FIXME: This could be accidentally quadratic
|
||||||
self.entries.remove(idx);
|
// self.entries.remove(idx);
|
||||||
}
|
// }
|
||||||
|
|
||||||
pub fn entries(&self) -> impl Iterator<Item = (tt::TokenId, TextRange)> + '_ {
|
// pub fn entries(&self) -> impl Iterator<Item = (tt::TokenId, TextRange)> + '_ {
|
||||||
self.entries.iter().filter_map(|&(tid, tr)| match tr {
|
// self.entries.iter().filter_map(|&(tid, tr)| match tr {
|
||||||
TokenTextRange::Token(range) => Some((tid, range)),
|
// TokenTextRange::Token(range) => Some((tid, range)),
|
||||||
TokenTextRange::Delimiter(_) => None,
|
// TokenTextRange::Delimiter(_) => None,
|
||||||
})
|
// })
|
||||||
}
|
// }
|
||||||
|
|
||||||
pub fn filter(&mut self, id: impl Fn(tt::TokenId) -> bool) {
|
// pub fn filter(&mut self, id: impl Fn(tt::TokenId) -> bool) {
|
||||||
self.entries.retain(|&(tid, _)| id(tid));
|
// self.entries.retain(|&(tid, _)| id(tid));
|
||||||
}
|
// }
|
||||||
|
// pub fn synthetic_token_id(&self, token_id: tt::TokenId) -> Option<SyntheticTokenId> {
|
||||||
|
// self.synthetic_entries.iter().find(|(tid, _)| *tid == token_id).map(|(_, id)| *id)
|
||||||
|
// }
|
||||||
|
|
||||||
|
// pub fn first_range_by_token(
|
||||||
|
// &self,
|
||||||
|
// token_id: tt::TokenId,
|
||||||
|
// kind: SyntaxKind,
|
||||||
|
// ) -> Option<TextRange> {
|
||||||
|
// self.ranges_by_token(token_id, kind).next()
|
||||||
|
// }
|
||||||
|
|
||||||
|
// pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
|
||||||
|
// self.entries.push((token_id, TokenTextRange::Token(relative_range)));
|
||||||
|
// }
|
||||||
|
|
||||||
|
// pub(crate) fn insert_synthetic(&mut self, token_id: tt::TokenId, id: SyntheticTokenId) {
|
||||||
|
// self.synthetic_entries.push((token_id, id));
|
||||||
|
// }
|
||||||
|
|
||||||
|
// pub(crate) fn insert_delim(
|
||||||
|
// &mut self,
|
||||||
|
// token_id: tt::TokenId,
|
||||||
|
// open_relative_range: TextRange,
|
||||||
|
// close_relative_range: TextRange,
|
||||||
|
// ) -> usize {
|
||||||
|
// let res = self.entries.len();
|
||||||
|
// let cover = open_relative_range.cover(close_relative_range);
|
||||||
|
|
||||||
|
// self.entries.push((token_id, TokenTextRange::Delimiter(cover)));
|
||||||
|
// res
|
||||||
|
// }
|
||||||
|
|
||||||
|
// pub(crate) fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) {
|
||||||
|
// let (_, token_text_range) = &mut self.entries[idx];
|
||||||
|
// if let TokenTextRange::Delimiter(dim) = token_text_range {
|
||||||
|
// let cover = dim.cover(close_relative_range);
|
||||||
|
// *token_text_range = TokenTextRange::Delimiter(cover);
|
||||||
|
// }
|
||||||
|
// }
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,5 +31,6 @@ paths.workspace = true
|
||||||
tt.workspace = true
|
tt.workspace = true
|
||||||
stdx.workspace = true
|
stdx.workspace = true
|
||||||
profile.workspace = true
|
profile.workspace = true
|
||||||
|
text-size.workspace = true
|
||||||
# Intentionally *not* depend on anything salsa-related
|
# Intentionally *not* depend on anything salsa-related
|
||||||
# base-db.workspace = true
|
# base-db.workspace = true
|
||||||
|
|
|
@ -38,6 +38,7 @@
|
||||||
use std::collections::{HashMap, VecDeque};
|
use std::collections::{HashMap, VecDeque};
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use text_size::TextRange;
|
||||||
use tt::Span;
|
use tt::Span;
|
||||||
|
|
||||||
use crate::msg::{ENCODE_CLOSE_SPAN_VERSION, VARIABLE_SIZED_SPANS};
|
use crate::msg::{ENCODE_CLOSE_SPAN_VERSION, VARIABLE_SIZED_SPANS};
|
||||||
|
@ -55,6 +56,19 @@ impl SerializableSpan<1> for tt::TokenId {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<FileId> SerializableSpan<3> for tt::SpanData<FileId>
|
||||||
|
where
|
||||||
|
FileId: From<u32> + Into<u32>,
|
||||||
|
Self: Span,
|
||||||
|
{
|
||||||
|
fn into_u32(self) -> [u32; 3] {
|
||||||
|
[self.anchor.into(), self.range.start().into(), self.range.end().into()]
|
||||||
|
}
|
||||||
|
fn from_u32([file_id, start, end]: [u32; 3]) -> Self {
|
||||||
|
tt::SpanData { anchor: file_id.into(), range: TextRange::new(start.into(), end.into()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub struct FlatTree {
|
pub struct FlatTree {
|
||||||
subtree: Vec<u32>,
|
subtree: Vec<u32>,
|
||||||
|
|
|
@ -209,17 +209,24 @@ mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
use cfg::CfgExpr;
|
use cfg::CfgExpr;
|
||||||
|
use hir::HirFileId;
|
||||||
|
use ide_db::base_db::span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID};
|
||||||
use mbe::syntax_node_to_token_tree;
|
use mbe::syntax_node_to_token_tree;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, AstNode},
|
ast::{self, AstNode},
|
||||||
SmolStr,
|
SmolStr, TextSize,
|
||||||
};
|
};
|
||||||
|
|
||||||
fn check(cfg: &str, expected_features: &[&str]) {
|
fn check(cfg: &str, expected_features: &[&str]) {
|
||||||
let cfg_expr = {
|
let cfg_expr = {
|
||||||
let source_file = ast::SourceFile::parse(cfg).ok().unwrap();
|
let source_file = ast::SourceFile::parse(cfg).ok().unwrap();
|
||||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||||
let (tt, _) = syntax_node_to_token_tree(tt.syntax());
|
let tt = syntax_node_to_token_tree(
|
||||||
|
tt.syntax(),
|
||||||
|
SpanAnchor { file_id: HirFileId::from(0), ast_id: ROOT_ERASED_FILE_AST_ID },
|
||||||
|
TextSize::new(0),
|
||||||
|
&Default::default(),
|
||||||
|
);
|
||||||
CfgExpr::parse(&tt)
|
CfgExpr::parse(&tt)
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@ use std::{
|
||||||
|
|
||||||
use hir::{
|
use hir::{
|
||||||
db::{DefDatabase, ExpandDatabase, HirDatabase},
|
db::{DefDatabase, ExpandDatabase, HirDatabase},
|
||||||
Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, ModuleDef, Name,
|
Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, HirFileIdExt, ModuleDef, Name,
|
||||||
};
|
};
|
||||||
use hir_def::{
|
use hir_def::{
|
||||||
body::{BodySourceMap, SyntheticSyntax},
|
body::{BodySourceMap, SyntheticSyntax},
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
use project_model::{CargoConfig, RustLibSource};
|
use project_model::{CargoConfig, RustLibSource};
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
|
|
||||||
use hir::{db::HirDatabase, Crate, Module};
|
use hir::{db::HirDatabase, Crate, HirFileIdExt, Module};
|
||||||
use ide::{AssistResolveStrategy, DiagnosticsConfig, Severity};
|
use ide::{AssistResolveStrategy, DiagnosticsConfig, Severity};
|
||||||
use ide_db::base_db::SourceDatabaseExt;
|
use ide_db::base_db::SourceDatabaseExt;
|
||||||
use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
|
use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
|
||||||
|
|
|
@ -75,7 +75,7 @@ pub use smol_str::SmolStr;
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
pub struct Parse<T> {
|
pub struct Parse<T> {
|
||||||
green: GreenNode,
|
green: GreenNode,
|
||||||
errors: Arc<[SyntaxError]>,
|
errors: Option<Arc<[SyntaxError]>>,
|
||||||
_ty: PhantomData<fn() -> T>,
|
_ty: PhantomData<fn() -> T>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -87,14 +87,18 @@ impl<T> Clone for Parse<T> {
|
||||||
|
|
||||||
impl<T> Parse<T> {
|
impl<T> Parse<T> {
|
||||||
fn new(green: GreenNode, errors: Vec<SyntaxError>) -> Parse<T> {
|
fn new(green: GreenNode, errors: Vec<SyntaxError>) -> Parse<T> {
|
||||||
Parse { green, errors: errors.into(), _ty: PhantomData }
|
Parse {
|
||||||
|
green,
|
||||||
|
errors: if errors.is_empty() { None } else { Some(errors.into()) },
|
||||||
|
_ty: PhantomData,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn syntax_node(&self) -> SyntaxNode {
|
pub fn syntax_node(&self) -> SyntaxNode {
|
||||||
SyntaxNode::new_root(self.green.clone())
|
SyntaxNode::new_root(self.green.clone())
|
||||||
}
|
}
|
||||||
pub fn errors(&self) -> &[SyntaxError] {
|
pub fn errors(&self) -> &[SyntaxError] {
|
||||||
&self.errors
|
self.errors.as_deref().unwrap_or_default()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -108,10 +112,9 @@ impl<T: AstNode> Parse<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ok(self) -> Result<T, Arc<[SyntaxError]>> {
|
pub fn ok(self) -> Result<T, Arc<[SyntaxError]>> {
|
||||||
if self.errors.is_empty() {
|
match self.errors {
|
||||||
Ok(self.tree())
|
Some(e) => Err(e),
|
||||||
} else {
|
None => Ok(self.tree()),
|
||||||
Err(self.errors)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -129,7 +132,7 @@ impl Parse<SyntaxNode> {
|
||||||
impl Parse<SourceFile> {
|
impl Parse<SourceFile> {
|
||||||
pub fn debug_dump(&self) -> String {
|
pub fn debug_dump(&self) -> String {
|
||||||
let mut buf = format!("{:#?}", self.tree().syntax());
|
let mut buf = format!("{:#?}", self.tree().syntax());
|
||||||
for err in self.errors.iter() {
|
for err in self.errors.as_deref().into_iter().flat_map(<[_]>::iter) {
|
||||||
format_to!(buf, "error {:?}: {}\n", err.range(), err);
|
format_to!(buf, "error {:?}: {}\n", err.range(), err);
|
||||||
}
|
}
|
||||||
buf
|
buf
|
||||||
|
@ -141,13 +144,16 @@ impl Parse<SourceFile> {
|
||||||
|
|
||||||
fn incremental_reparse(&self, indel: &Indel) -> Option<Parse<SourceFile>> {
|
fn incremental_reparse(&self, indel: &Indel) -> Option<Parse<SourceFile>> {
|
||||||
// FIXME: validation errors are not handled here
|
// FIXME: validation errors are not handled here
|
||||||
parsing::incremental_reparse(self.tree().syntax(), indel, self.errors.to_vec()).map(
|
parsing::incremental_reparse(
|
||||||
|(green_node, errors, _reparsed_range)| Parse {
|
self.tree().syntax(),
|
||||||
green: green_node,
|
indel,
|
||||||
errors: errors.into(),
|
self.errors.as_deref().unwrap_or_default().iter().cloned(),
|
||||||
_ty: PhantomData,
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
.map(|(green_node, errors, _reparsed_range)| Parse {
|
||||||
|
green: green_node,
|
||||||
|
errors: if errors.is_empty() { None } else { Some(errors.into()) },
|
||||||
|
_ty: PhantomData,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn full_reparse(&self, indel: &Indel) -> Parse<SourceFile> {
|
fn full_reparse(&self, indel: &Indel) -> Parse<SourceFile> {
|
||||||
|
@ -168,7 +174,11 @@ impl SourceFile {
|
||||||
errors.extend(validation::validate(&root));
|
errors.extend(validation::validate(&root));
|
||||||
|
|
||||||
assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
|
assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
|
||||||
Parse { green, errors: errors.into(), _ty: PhantomData }
|
Parse {
|
||||||
|
green,
|
||||||
|
errors: if errors.is_empty() { None } else { Some(errors.into()) },
|
||||||
|
_ty: PhantomData,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -275,7 +285,11 @@ impl ast::TokenTree {
|
||||||
|
|
||||||
let (green, errors) = builder.finish_raw();
|
let (green, errors) = builder.finish_raw();
|
||||||
|
|
||||||
Parse { green, errors: errors.into(), _ty: PhantomData }
|
Parse {
|
||||||
|
green,
|
||||||
|
errors: if errors.is_empty() { None } else { Some(errors.into()) },
|
||||||
|
_ty: PhantomData,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,7 @@ use crate::{
|
||||||
pub(crate) fn incremental_reparse(
|
pub(crate) fn incremental_reparse(
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
edit: &Indel,
|
edit: &Indel,
|
||||||
errors: Vec<SyntaxError>,
|
errors: impl IntoIterator<Item = SyntaxError>,
|
||||||
) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
|
) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
|
||||||
if let Some((green, new_errors, old_range)) = reparse_token(node, edit) {
|
if let Some((green, new_errors, old_range)) = reparse_token(node, edit) {
|
||||||
return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
|
return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
|
||||||
|
@ -147,7 +147,7 @@ fn is_balanced(lexed: &parser::LexedStr<'_>) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn merge_errors(
|
fn merge_errors(
|
||||||
old_errors: Vec<SyntaxError>,
|
old_errors: impl IntoIterator<Item = SyntaxError>,
|
||||||
new_errors: Vec<SyntaxError>,
|
new_errors: Vec<SyntaxError>,
|
||||||
range_before_reparse: TextRange,
|
range_before_reparse: TextRange,
|
||||||
edit: &Indel,
|
edit: &Indel,
|
||||||
|
@ -191,8 +191,12 @@ mod tests {
|
||||||
let fully_reparsed = SourceFile::parse(&after);
|
let fully_reparsed = SourceFile::parse(&after);
|
||||||
let incrementally_reparsed: Parse<SourceFile> = {
|
let incrementally_reparsed: Parse<SourceFile> = {
|
||||||
let before = SourceFile::parse(&before);
|
let before = SourceFile::parse(&before);
|
||||||
let (green, new_errors, range) =
|
let (green, new_errors, range) = incremental_reparse(
|
||||||
incremental_reparse(before.tree().syntax(), &edit, before.errors.to_vec()).unwrap();
|
before.tree().syntax(),
|
||||||
|
&edit,
|
||||||
|
before.errors.as_deref().unwrap_or_default().iter().cloned(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length");
|
assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length");
|
||||||
Parse::new(green, new_errors)
|
Parse::new(green, new_errors)
|
||||||
};
|
};
|
||||||
|
|
|
@ -38,7 +38,7 @@ fn benchmark_parser() {
|
||||||
let tree = {
|
let tree = {
|
||||||
let _b = bench("parsing");
|
let _b = bench("parsing");
|
||||||
let p = SourceFile::parse(&data);
|
let p = SourceFile::parse(&data);
|
||||||
assert!(p.errors.is_empty());
|
assert!(p.errors.is_none());
|
||||||
assert_eq!(p.tree().syntax.text_range().len(), 352474.into());
|
assert_eq!(p.tree().syntax.text_range().len(), 352474.into());
|
||||||
p.tree()
|
p.tree()
|
||||||
};
|
};
|
||||||
|
|
|
@ -13,5 +13,6 @@ doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
smol_str.workspace = true
|
smol_str.workspace = true
|
||||||
|
text-size.workspace = true
|
||||||
|
|
||||||
stdx.workspace = true
|
stdx.workspace = true
|
||||||
|
|
|
@ -7,6 +7,7 @@
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
use stdx::impl_from;
|
use stdx::impl_from;
|
||||||
|
use text_size::{TextRange, TextSize};
|
||||||
|
|
||||||
pub use smol_str::SmolStr;
|
pub use smol_str::SmolStr;
|
||||||
|
|
||||||
|
@ -31,36 +32,25 @@ impl TokenId {
|
||||||
Self::UNSPECIFIED
|
Self::UNSPECIFIED
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub mod token_id {
|
|
||||||
pub use crate::{DelimiterKind, Spacing, TokenId};
|
|
||||||
pub type Span = crate::TokenId;
|
|
||||||
pub type Subtree = crate::Subtree<Span>;
|
|
||||||
pub type Punct = crate::Punct<Span>;
|
|
||||||
pub type Delimiter = crate::Delimiter<Span>;
|
|
||||||
pub type Leaf = crate::Leaf<Span>;
|
|
||||||
pub type Ident = crate::Ident<Span>;
|
|
||||||
pub type Literal = crate::Literal<Span>;
|
|
||||||
pub type TokenTree = crate::TokenTree<Span>;
|
|
||||||
pub mod buffer {
|
|
||||||
pub type TokenBuffer<'a> = crate::buffer::TokenBuffer<'a, super::Span>;
|
|
||||||
pub type Cursor<'a> = crate::buffer::Cursor<'a, super::Span>;
|
|
||||||
pub type TokenTreeRef<'a> = crate::buffer::TokenTreeRef<'a, super::Span>;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait Span: std::fmt::Debug + Copy + Sized {
|
|
||||||
const DUMMY: Self;
|
|
||||||
fn is_dummy(&self) -> bool;
|
|
||||||
}
|
|
||||||
impl Span for TokenId {
|
impl Span for TokenId {
|
||||||
const DUMMY: Self = TokenId(!0);
|
const DUMMY: Self = TokenId(!0);
|
||||||
|
|
||||||
fn is_dummy(&self) -> bool {
|
|
||||||
*self == Self::DUMMY
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
||||||
|
pub struct SpanData<Anchor> {
|
||||||
|
/// The text range of this span, relative to the anchor.
|
||||||
|
pub range: TextRange,
|
||||||
|
pub anchor: Anchor,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<Anchor: Span> Span for SpanData<Anchor> {
|
||||||
|
const DUMMY: Self =
|
||||||
|
SpanData { range: TextRange::empty(TextSize::new(0)), anchor: Anchor::DUMMY };
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait Span: std::fmt::Debug + Copy + Sized + Eq {
|
||||||
|
const DUMMY: Self;
|
||||||
|
}
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
pub struct SyntaxContext(pub u32);
|
pub struct SyntaxContext(pub u32);
|
||||||
|
|
||||||
|
@ -134,7 +124,6 @@ impl<S: Span> Delimiter<S> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||||
pub enum DelimiterKind {
|
pub enum DelimiterKind {
|
||||||
Parenthesis,
|
Parenthesis,
|
||||||
|
|
Loading…
Reference in a new issue