mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-13 13:48:50 +00:00
hygiene 2.0
This commit is contained in:
parent
e36b3f7b8c
commit
05f375eae2
43 changed files with 758 additions and 458 deletions
|
@ -12,7 +12,7 @@ authors = ["rust-analyzer team"]
|
||||||
[profile.dev]
|
[profile.dev]
|
||||||
# Disabling debug info speeds up builds a bunch,
|
# Disabling debug info speeds up builds a bunch,
|
||||||
# and we don't rely on it for debugging that much.
|
# and we don't rely on it for debugging that much.
|
||||||
debug = 0
|
debug = 1
|
||||||
|
|
||||||
[profile.dev.package]
|
[profile.dev.package]
|
||||||
# These speed up local tests.
|
# These speed up local tests.
|
||||||
|
|
|
@ -17,9 +17,18 @@ pub struct SyntaxContextId(InternId);
|
||||||
crate::impl_intern_key!(SyntaxContextId);
|
crate::impl_intern_key!(SyntaxContextId);
|
||||||
|
|
||||||
impl SyntaxContext for SyntaxContextId {
|
impl SyntaxContext for SyntaxContextId {
|
||||||
|
const DUMMY: Self = Self::ROOT;
|
||||||
|
// veykril(HACK): salsa doesn't allow us fetching the id of the current input to be allocated so
|
||||||
|
// we need a special value that behaves as the current context.
|
||||||
|
}
|
||||||
|
// inherent trait impls please tyvm
|
||||||
|
impl SyntaxContextId {
|
||||||
// FIXME: This is very much UB, salsa exposes no way to create an InternId in a const context
|
// FIXME: This is very much UB, salsa exposes no way to create an InternId in a const context
|
||||||
// currently (which kind of makes sense but we need it here!)
|
// currently (which kind of makes sense but we need it here!)
|
||||||
const DUMMY: Self = SyntaxContextId(unsafe { core::mem::transmute(1) });
|
pub const ROOT: Self = SyntaxContextId(unsafe { core::mem::transmute(1) });
|
||||||
|
// FIXME: This is very much UB, salsa exposes no way to create an InternId in a const context
|
||||||
|
// currently (which kind of makes sense but we need it here!)
|
||||||
|
pub const SELF_REF: Self = SyntaxContextId(unsafe { core::mem::transmute(!0u32) });
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||||
|
|
|
@ -706,7 +706,7 @@ impl<'a> AssocItemCollector<'a> {
|
||||||
}
|
}
|
||||||
AssocItem::MacroCall(call) => {
|
AssocItem::MacroCall(call) => {
|
||||||
let file_id = self.expander.current_file_id();
|
let file_id = self.expander.current_file_id();
|
||||||
let MacroCall { ast_id, expand_to, ref path } = item_tree[call];
|
let MacroCall { ast_id, expand_to, call_site, ref path } = item_tree[call];
|
||||||
let module = self.expander.module.local_id;
|
let module = self.expander.module.local_id;
|
||||||
|
|
||||||
let resolver = |path| {
|
let resolver = |path| {
|
||||||
|
@ -725,6 +725,7 @@ impl<'a> AssocItemCollector<'a> {
|
||||||
match macro_call_as_call_id(
|
match macro_call_as_call_id(
|
||||||
self.db.upcast(),
|
self.db.upcast(),
|
||||||
&AstIdWithPath::new(file_id, ast_id, Clone::clone(path)),
|
&AstIdWithPath::new(file_id, ast_id, Clone::clone(path)),
|
||||||
|
call_site,
|
||||||
expand_to,
|
expand_to,
|
||||||
self.expander.module.krate(),
|
self.expander.module.krate(),
|
||||||
resolver,
|
resolver,
|
||||||
|
|
|
@ -7,11 +7,12 @@ use base_db::{
|
||||||
use cfg::CfgOptions;
|
use cfg::CfgOptions;
|
||||||
use drop_bomb::DropBomb;
|
use drop_bomb::DropBomb;
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
attrs::RawAttrs, hygiene::Hygiene, mod_path::ModPath, ExpandError, ExpandResult, HirFileId,
|
attrs::RawAttrs, mod_path::ModPath, ExpandError, ExpandResult, HirFileId, InFile, MacroCallId,
|
||||||
InFile, MacroCallId, UnresolvedMacro,
|
SpanMap, UnresolvedMacro,
|
||||||
};
|
};
|
||||||
use limit::Limit;
|
use limit::Limit;
|
||||||
use syntax::{ast, Parse, SyntaxNode};
|
use syntax::{ast, Parse, SyntaxNode};
|
||||||
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
attr::Attrs, db::DefDatabase, lower::LowerCtx, macro_id_to_def_id, path::Path, AsMacroCall,
|
attr::Attrs, db::DefDatabase, lower::LowerCtx, macro_id_to_def_id, path::Path, AsMacroCall,
|
||||||
|
@ -21,7 +22,7 @@ use crate::{
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Expander {
|
pub struct Expander {
|
||||||
cfg_options: CfgOptions,
|
cfg_options: CfgOptions,
|
||||||
hygiene: Hygiene,
|
hygiene: Arc<SpanMap>,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
pub(crate) current_file_id: HirFileId,
|
pub(crate) current_file_id: HirFileId,
|
||||||
pub(crate) module: ModuleId,
|
pub(crate) module: ModuleId,
|
||||||
|
@ -44,7 +45,7 @@ impl Expander {
|
||||||
recursion_depth: 0,
|
recursion_depth: 0,
|
||||||
recursion_limit,
|
recursion_limit,
|
||||||
cfg_options: db.crate_graph()[module.krate].cfg_options.clone(),
|
cfg_options: db.crate_graph()[module.krate].cfg_options.clone(),
|
||||||
hygiene: Hygiene::new(db.upcast(), current_file_id),
|
hygiene: db.span_map(current_file_id),
|
||||||
krate: module.krate,
|
krate: module.krate,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -98,7 +99,7 @@ impl Expander {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) {
|
pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) {
|
||||||
self.hygiene = Hygiene::new(db.upcast(), mark.file_id);
|
self.hygiene = db.span_map(mark.file_id);
|
||||||
self.current_file_id = mark.file_id;
|
self.current_file_id = mark.file_id;
|
||||||
if self.recursion_depth == u32::MAX {
|
if self.recursion_depth == u32::MAX {
|
||||||
// Recursion limit has been reached somewhere in the macro expansion tree. Reset the
|
// Recursion limit has been reached somewhere in the macro expansion tree. Reset the
|
||||||
|
@ -113,7 +114,7 @@ impl Expander {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ctx<'a>(&self, db: &'a dyn DefDatabase) -> LowerCtx<'a> {
|
pub fn ctx<'a>(&self, db: &'a dyn DefDatabase) -> LowerCtx<'a> {
|
||||||
LowerCtx::new(db, &self.hygiene, self.current_file_id)
|
LowerCtx::new(db, self.hygiene.clone(), self.current_file_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn to_source<T>(&self, value: T) -> InFile<T> {
|
pub(crate) fn to_source<T>(&self, value: T) -> InFile<T> {
|
||||||
|
@ -143,7 +144,7 @@ impl Expander {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn parse_path(&mut self, db: &dyn DefDatabase, path: ast::Path) -> Option<Path> {
|
pub(crate) fn parse_path(&mut self, db: &dyn DefDatabase, path: ast::Path) -> Option<Path> {
|
||||||
let ctx = LowerCtx::new(db, &self.hygiene, self.current_file_id);
|
let ctx = LowerCtx::new(db, self.hygiene.clone(), self.current_file_id);
|
||||||
Path::from_src(path, &ctx)
|
Path::from_src(path, &ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -187,7 +188,7 @@ impl Expander {
|
||||||
let parse = value.cast::<T>()?;
|
let parse = value.cast::<T>()?;
|
||||||
|
|
||||||
self.recursion_depth += 1;
|
self.recursion_depth += 1;
|
||||||
self.hygiene = Hygiene::new(db.upcast(), file_id);
|
self.hygiene = db.span_map(file_id);
|
||||||
let old_file_id = std::mem::replace(&mut self.current_file_id, file_id);
|
let old_file_id = std::mem::replace(&mut self.current_file_id, file_id);
|
||||||
let mark = Mark {
|
let mark = Mark {
|
||||||
file_id: old_file_id,
|
file_id: old_file_id,
|
||||||
|
|
|
@ -586,7 +586,7 @@ fn find_local_import_locations(
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use base_db::fixture::WithFixture;
|
use base_db::fixture::WithFixture;
|
||||||
use hir_expand::hygiene::Hygiene;
|
use hir_expand::SpanMap;
|
||||||
use syntax::ast::AstNode;
|
use syntax::ast::AstNode;
|
||||||
|
|
||||||
use crate::test_db::TestDB;
|
use crate::test_db::TestDB;
|
||||||
|
@ -608,7 +608,7 @@ mod tests {
|
||||||
let parsed_path_file = syntax::SourceFile::parse(&format!("use {path};"));
|
let parsed_path_file = syntax::SourceFile::parse(&format!("use {path};"));
|
||||||
let ast_path =
|
let ast_path =
|
||||||
parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap();
|
parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap();
|
||||||
let mod_path = ModPath::from_src(&db, ast_path, &Hygiene::new_unhygienic()).unwrap();
|
let mod_path = ModPath::from_src(&db, ast_path, &SpanMap::default()).unwrap();
|
||||||
|
|
||||||
let def_map = module.def_map(&db);
|
let def_map = module.def_map(&db);
|
||||||
let resolved = def_map
|
let resolved = def_map
|
||||||
|
|
|
@ -44,14 +44,13 @@ use std::{
|
||||||
|
|
||||||
use ast::{AstNode, HasName, StructKind};
|
use ast::{AstNode, HasName, StructKind};
|
||||||
use base_db::{
|
use base_db::{
|
||||||
span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID},
|
span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
|
||||||
CrateId,
|
CrateId,
|
||||||
};
|
};
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
ast_id_map::{AstIdNode, FileAstId},
|
ast_id_map::{AstIdNode, FileAstId},
|
||||||
attrs::RawAttrs,
|
attrs::RawAttrs,
|
||||||
hygiene::Hygiene,
|
|
||||||
name::{name, AsName, Name},
|
name::{name, AsName, Name},
|
||||||
ExpandTo, HirFileId, InFile,
|
ExpandTo, HirFileId, InFile,
|
||||||
};
|
};
|
||||||
|
@ -122,7 +121,7 @@ impl ItemTree {
|
||||||
let mut item_tree = match_ast! {
|
let mut item_tree = match_ast! {
|
||||||
match syntax {
|
match syntax {
|
||||||
ast::SourceFile(file) => {
|
ast::SourceFile(file) => {
|
||||||
top_attrs = Some(RawAttrs::new(db.upcast(), SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, &file, ctx.hygiene()));
|
top_attrs = Some(RawAttrs::new(db.upcast(), SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, &file, ctx.span_map()));
|
||||||
ctx.lower_module_items(&file)
|
ctx.lower_module_items(&file)
|
||||||
},
|
},
|
||||||
ast::MacroItems(items) => {
|
ast::MacroItems(items) => {
|
||||||
|
@ -750,6 +749,7 @@ pub struct MacroCall {
|
||||||
pub path: Interned<ModPath>,
|
pub path: Interned<ModPath>,
|
||||||
pub ast_id: FileAstId<ast::MacroCall>,
|
pub ast_id: FileAstId<ast::MacroCall>,
|
||||||
pub expand_to: ExpandTo,
|
pub expand_to: ExpandTo,
|
||||||
|
pub call_site: SyntaxContextId,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
|
@ -779,7 +779,7 @@ impl Use {
|
||||||
// Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
|
// Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
|
||||||
let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
|
let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
|
||||||
let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
|
let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
|
||||||
let hygiene = Hygiene::new(db.upcast(), file_id);
|
let hygiene = db.span_map(file_id);
|
||||||
let (_, source_map) =
|
let (_, source_map) =
|
||||||
lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree");
|
lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree");
|
||||||
source_map[index].clone()
|
source_map[index].clone()
|
||||||
|
@ -794,7 +794,7 @@ impl Use {
|
||||||
// Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
|
// Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
|
||||||
let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
|
let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
|
||||||
let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
|
let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
|
||||||
let hygiene = Hygiene::new(db.upcast(), file_id);
|
let hygiene = db.span_map(file_id);
|
||||||
lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree").1
|
lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree").1
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
use std::collections::hash_map::Entry;
|
use std::collections::hash_map::Entry;
|
||||||
|
|
||||||
use base_db::span::ErasedFileAstId;
|
use base_db::span::ErasedFileAstId;
|
||||||
use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, HirFileId};
|
use hir_expand::{ast_id_map::AstIdMap, HirFileId, SpanMap};
|
||||||
use syntax::ast::{self, HasModuleItem, HasTypeBounds};
|
use syntax::ast::{self, HasModuleItem, HasTypeBounds};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -37,8 +37,8 @@ impl<'a> Ctx<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn hygiene(&self) -> &Hygiene {
|
pub(super) fn span_map(&self) -> &SpanMap {
|
||||||
self.body_ctx.hygiene()
|
self.body_ctx.span_map()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn lower_module_items(mut self, item_owner: &dyn HasModuleItem) -> ItemTree {
|
pub(super) fn lower_module_items(mut self, item_owner: &dyn HasModuleItem) -> ItemTree {
|
||||||
|
@ -90,7 +90,7 @@ impl<'a> Ctx<'a> {
|
||||||
ast_id: self.source_ast_id_map.ast_id(block).erase(),
|
ast_id: self.source_ast_id_map.ast_id(block).erase(),
|
||||||
},
|
},
|
||||||
block,
|
block,
|
||||||
self.hygiene(),
|
self.span_map(),
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
self.tree.top_level = block
|
self.tree.top_level = block
|
||||||
|
@ -145,7 +145,7 @@ impl<'a> Ctx<'a> {
|
||||||
self.db.upcast(),
|
self.db.upcast(),
|
||||||
SpanAnchor { file_id: self.file, ast_id: mod_item.ast_id(&self.tree).erase() },
|
SpanAnchor { file_id: self.file, ast_id: mod_item.ast_id(&self.tree).erase() },
|
||||||
item,
|
item,
|
||||||
self.hygiene(),
|
self.span_map(),
|
||||||
);
|
);
|
||||||
self.add_attrs(mod_item.into(), attrs);
|
self.add_attrs(mod_item.into(), attrs);
|
||||||
|
|
||||||
|
@ -174,7 +174,7 @@ impl<'a> Ctx<'a> {
|
||||||
self.db.upcast(),
|
self.db.upcast(),
|
||||||
SpanAnchor { file_id: self.file, ast_id: item.ast_id(&self.tree).erase() },
|
SpanAnchor { file_id: self.file, ast_id: item.ast_id(&self.tree).erase() },
|
||||||
item_node,
|
item_node,
|
||||||
self.hygiene(),
|
self.span_map(),
|
||||||
);
|
);
|
||||||
self.add_attrs(
|
self.add_attrs(
|
||||||
match item {
|
match item {
|
||||||
|
@ -227,7 +227,7 @@ impl<'a> Ctx<'a> {
|
||||||
self.db.upcast(),
|
self.db.upcast(),
|
||||||
SpanAnchor { file_id: self.file, ast_id },
|
SpanAnchor { file_id: self.file, ast_id },
|
||||||
&field,
|
&field,
|
||||||
self.hygiene(),
|
self.span_map(),
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -260,7 +260,7 @@ impl<'a> Ctx<'a> {
|
||||||
self.db.upcast(),
|
self.db.upcast(),
|
||||||
SpanAnchor { file_id: self.file, ast_id },
|
SpanAnchor { file_id: self.file, ast_id },
|
||||||
&field,
|
&field,
|
||||||
self.hygiene(),
|
self.span_map(),
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -314,7 +314,7 @@ impl<'a> Ctx<'a> {
|
||||||
self.db.upcast(),
|
self.db.upcast(),
|
||||||
SpanAnchor { file_id: self.file, ast_id },
|
SpanAnchor { file_id: self.file, ast_id },
|
||||||
&variant,
|
&variant,
|
||||||
self.hygiene(),
|
self.span_map(),
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -370,7 +370,7 @@ impl<'a> Ctx<'a> {
|
||||||
self.db.upcast(),
|
self.db.upcast(),
|
||||||
SpanAnchor { file_id: self.file, ast_id: ast_id.erase() },
|
SpanAnchor { file_id: self.file, ast_id: ast_id.erase() },
|
||||||
&self_param,
|
&self_param,
|
||||||
self.hygiene(),
|
self.span_map(),
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
has_self_param = true;
|
has_self_param = true;
|
||||||
|
@ -396,7 +396,7 @@ impl<'a> Ctx<'a> {
|
||||||
self.db.upcast(),
|
self.db.upcast(),
|
||||||
SpanAnchor { file_id: self.file, ast_id: ast_id.erase() },
|
SpanAnchor { file_id: self.file, ast_id: ast_id.erase() },
|
||||||
¶m,
|
¶m,
|
||||||
self.hygiene(),
|
self.span_map(),
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -585,7 +585,7 @@ impl<'a> Ctx<'a> {
|
||||||
fn lower_use(&mut self, use_item: &ast::Use) -> Option<FileItemTreeId<Use>> {
|
fn lower_use(&mut self, use_item: &ast::Use) -> Option<FileItemTreeId<Use>> {
|
||||||
let visibility = self.lower_visibility(use_item);
|
let visibility = self.lower_visibility(use_item);
|
||||||
let ast_id = self.source_ast_id_map.ast_id(use_item);
|
let ast_id = self.source_ast_id_map.ast_id(use_item);
|
||||||
let (use_tree, _) = lower_use_tree(self.db, self.hygiene(), use_item.use_tree()?)?;
|
let (use_tree, _) = lower_use_tree(self.db, self.span_map(), use_item.use_tree()?)?;
|
||||||
|
|
||||||
let res = Use { visibility, ast_id, use_tree };
|
let res = Use { visibility, ast_id, use_tree };
|
||||||
Some(id(self.data().uses.alloc(res)))
|
Some(id(self.data().uses.alloc(res)))
|
||||||
|
@ -607,10 +607,18 @@ impl<'a> Ctx<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option<FileItemTreeId<MacroCall>> {
|
fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option<FileItemTreeId<MacroCall>> {
|
||||||
let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, self.hygiene())?);
|
let span_map = self.span_map();
|
||||||
|
let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, span_map)?);
|
||||||
let ast_id = self.source_ast_id_map.ast_id(m);
|
let ast_id = self.source_ast_id_map.ast_id(m);
|
||||||
let expand_to = hir_expand::ExpandTo::from_call_site(m);
|
let expand_to = hir_expand::ExpandTo::from_call_site(m);
|
||||||
let res = MacroCall { path, ast_id, expand_to };
|
let res = MacroCall {
|
||||||
|
path,
|
||||||
|
ast_id,
|
||||||
|
expand_to,
|
||||||
|
call_site: span_map
|
||||||
|
.span_for_range(m.syntax().text_range())
|
||||||
|
.map_or(SyntaxContextId::ROOT, |s| s.ctx),
|
||||||
|
};
|
||||||
Some(id(self.data().macro_calls.alloc(res)))
|
Some(id(self.data().macro_calls.alloc(res)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -655,7 +663,7 @@ impl<'a> Ctx<'a> {
|
||||||
ast_id: mod_item.ast_id(&self.tree).erase(),
|
ast_id: mod_item.ast_id(&self.tree).erase(),
|
||||||
},
|
},
|
||||||
&item,
|
&item,
|
||||||
self.hygiene(),
|
self.span_map(),
|
||||||
);
|
);
|
||||||
self.add_attrs(mod_item.into(), attrs);
|
self.add_attrs(mod_item.into(), attrs);
|
||||||
Some(mod_item)
|
Some(mod_item)
|
||||||
|
@ -697,7 +705,7 @@ impl<'a> Ctx<'a> {
|
||||||
self.db.upcast(),
|
self.db.upcast(),
|
||||||
SpanAnchor { file_id: self.file, ast_id: owner_ast_id },
|
SpanAnchor { file_id: self.file, ast_id: owner_ast_id },
|
||||||
¶m,
|
¶m,
|
||||||
self.body_ctx.hygiene(),
|
self.body_ctx.span_map(),
|
||||||
);
|
);
|
||||||
// This is identical to the body of `Ctx::add_attrs()` but we can't call that here
|
// This is identical to the body of `Ctx::add_attrs()` but we can't call that here
|
||||||
// because it requires `&mut self` and the call to `generics.fill()` below also
|
// because it requires `&mut self` and the call to `generics.fill()` below also
|
||||||
|
@ -731,7 +739,7 @@ impl<'a> Ctx<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lower_visibility(&mut self, item: &dyn ast::HasVisibility) -> RawVisibilityId {
|
fn lower_visibility(&mut self, item: &dyn ast::HasVisibility) -> RawVisibilityId {
|
||||||
let vis = RawVisibility::from_ast_with_hygiene(self.db, item.visibility(), self.hygiene());
|
let vis = RawVisibility::from_ast_with_hygiene(self.db, item.visibility(), self.span_map());
|
||||||
self.data().vis.alloc(vis)
|
self.data().vis.alloc(vis)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -809,7 +817,7 @@ fn lower_abi(abi: ast::Abi) -> Interned<str> {
|
||||||
|
|
||||||
struct UseTreeLowering<'a> {
|
struct UseTreeLowering<'a> {
|
||||||
db: &'a dyn DefDatabase,
|
db: &'a dyn DefDatabase,
|
||||||
hygiene: &'a Hygiene,
|
hygiene: &'a SpanMap,
|
||||||
mapping: Arena<ast::UseTree>,
|
mapping: Arena<ast::UseTree>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -877,7 +885,7 @@ impl UseTreeLowering<'_> {
|
||||||
|
|
||||||
pub(crate) fn lower_use_tree(
|
pub(crate) fn lower_use_tree(
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
hygiene: &Hygiene,
|
hygiene: &SpanMap,
|
||||||
tree: ast::UseTree,
|
tree: ast::UseTree,
|
||||||
) -> Option<(UseTree, Arena<ast::UseTree>)> {
|
) -> Option<(UseTree, Arena<ast::UseTree>)> {
|
||||||
let mut lowering = UseTreeLowering { db, hygiene, mapping: Arena::new() };
|
let mut lowering = UseTreeLowering { db, hygiene, mapping: Arena::new() };
|
||||||
|
|
|
@ -457,7 +457,7 @@ impl Printer<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ModItem::MacroCall(it) => {
|
ModItem::MacroCall(it) => {
|
||||||
let MacroCall { path, ast_id: _, expand_to: _ } = &self.tree[it];
|
let MacroCall { path, ast_id: _, expand_to: _, call_site: _ } = &self.tree[it];
|
||||||
wln!(self, "{}!(...);", path.display(self.db.upcast()));
|
wln!(self, "{}!(...);", path.display(self.db.upcast()));
|
||||||
}
|
}
|
||||||
ModItem::MacroRules(it) => {
|
ModItem::MacroRules(it) => {
|
||||||
|
|
|
@ -63,7 +63,7 @@ use std::{
|
||||||
panic::{RefUnwindSafe, UnwindSafe},
|
panic::{RefUnwindSafe, UnwindSafe},
|
||||||
};
|
};
|
||||||
|
|
||||||
use base_db::{impl_intern_key, salsa, CrateId, ProcMacroKind};
|
use base_db::{impl_intern_key, salsa, span::SyntaxContextId, CrateId, ProcMacroKind};
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
ast_id_map::{AstIdNode, FileAstId},
|
ast_id_map::{AstIdNode, FileAstId},
|
||||||
attrs::{Attr, AttrId, AttrInput},
|
attrs::{Attr, AttrId, AttrInput},
|
||||||
|
@ -72,7 +72,6 @@ use hir_expand::{
|
||||||
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
|
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
|
||||||
db::ExpandDatabase,
|
db::ExpandDatabase,
|
||||||
eager::expand_eager_macro_input,
|
eager::expand_eager_macro_input,
|
||||||
hygiene::Hygiene,
|
|
||||||
name::Name,
|
name::Name,
|
||||||
proc_macro::ProcMacroExpander,
|
proc_macro::ProcMacroExpander,
|
||||||
AstId, ExpandError, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind,
|
AstId, ExpandError, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind,
|
||||||
|
@ -82,7 +81,7 @@ use item_tree::ExternBlock;
|
||||||
use la_arena::Idx;
|
use la_arena::Idx;
|
||||||
use nameres::DefMap;
|
use nameres::DefMap;
|
||||||
use stdx::impl_from;
|
use stdx::impl_from;
|
||||||
use syntax::ast;
|
use syntax::{ast, AstNode};
|
||||||
|
|
||||||
pub use hir_expand::tt;
|
pub use hir_expand::tt;
|
||||||
|
|
||||||
|
@ -1166,16 +1165,21 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
|
||||||
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
|
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
|
||||||
let expands_to = hir_expand::ExpandTo::from_call_site(self.value);
|
let expands_to = hir_expand::ExpandTo::from_call_site(self.value);
|
||||||
let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
|
let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
|
||||||
let h = Hygiene::new(db, self.file_id);
|
let span_map = db.span_map(self.file_id);
|
||||||
let path = self.value.path().and_then(|path| path::ModPath::from_src(db, path, &h));
|
let path = self.value.path().and_then(|path| path::ModPath::from_src(db, path, &span_map));
|
||||||
|
|
||||||
let Some(path) = path else {
|
let Some(path) = path else {
|
||||||
return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation")));
|
return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation")));
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let call_site = span_map
|
||||||
|
.span_for_range(self.value.syntax().text_range())
|
||||||
|
.map_or(SyntaxContextId::ROOT, |s| s.ctx);
|
||||||
|
|
||||||
macro_call_as_call_id_with_eager(
|
macro_call_as_call_id_with_eager(
|
||||||
db,
|
db,
|
||||||
&AstIdWithPath::new(ast_id.file_id, ast_id.value, path),
|
&AstIdWithPath::new(ast_id.file_id, ast_id.value, path),
|
||||||
|
call_site,
|
||||||
expands_to,
|
expands_to,
|
||||||
krate,
|
krate,
|
||||||
resolver,
|
resolver,
|
||||||
|
@ -1200,17 +1204,19 @@ impl<T: AstIdNode> AstIdWithPath<T> {
|
||||||
fn macro_call_as_call_id(
|
fn macro_call_as_call_id(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
call: &AstIdWithPath<ast::MacroCall>,
|
call: &AstIdWithPath<ast::MacroCall>,
|
||||||
|
call_site: SyntaxContextId,
|
||||||
expand_to: ExpandTo,
|
expand_to: ExpandTo,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
|
resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
|
||||||
) -> Result<Option<MacroCallId>, UnresolvedMacro> {
|
) -> Result<Option<MacroCallId>, UnresolvedMacro> {
|
||||||
macro_call_as_call_id_with_eager(db, call, expand_to, krate, resolver, resolver)
|
macro_call_as_call_id_with_eager(db, call, call_site, expand_to, krate, resolver, resolver)
|
||||||
.map(|res| res.value)
|
.map(|res| res.value)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn macro_call_as_call_id_with_eager(
|
fn macro_call_as_call_id_with_eager(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
call: &AstIdWithPath<ast::MacroCall>,
|
call: &AstIdWithPath<ast::MacroCall>,
|
||||||
|
call_site: SyntaxContextId,
|
||||||
expand_to: ExpandTo,
|
expand_to: ExpandTo,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>,
|
resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>,
|
||||||
|
@ -1231,6 +1237,7 @@ fn macro_call_as_call_id_with_eager(
|
||||||
db,
|
db,
|
||||||
krate,
|
krate,
|
||||||
MacroCallKind::FnLike { ast_id: call.ast_id, expand_to },
|
MacroCallKind::FnLike { ast_id: call.ast_id, expand_to },
|
||||||
|
call_site,
|
||||||
)),
|
)),
|
||||||
err: None,
|
err: None,
|
||||||
},
|
},
|
||||||
|
@ -1329,6 +1336,8 @@ fn derive_macro_as_call_id(
|
||||||
derive_index: derive_pos,
|
derive_index: derive_pos,
|
||||||
derive_attr_index,
|
derive_attr_index,
|
||||||
},
|
},
|
||||||
|
//FIXME
|
||||||
|
SyntaxContextId::ROOT,
|
||||||
);
|
);
|
||||||
Ok((macro_id, def_id, call_id))
|
Ok((macro_id, def_id, call_id))
|
||||||
}
|
}
|
||||||
|
@ -1358,6 +1367,8 @@ fn attr_macro_as_call_id(
|
||||||
attr_args: Arc::new(arg),
|
attr_args: Arc::new(arg),
|
||||||
invoc_attr_index: macro_attr.id,
|
invoc_attr_index: macro_attr.id,
|
||||||
},
|
},
|
||||||
|
//FIXME
|
||||||
|
SyntaxContextId::ROOT,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
intern::impl_internable!(
|
intern::impl_internable!(
|
||||||
|
|
|
@ -3,8 +3,7 @@ use std::cell::OnceCell;
|
||||||
|
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
ast_id_map::{AstIdMap, AstIdNode},
|
ast_id_map::{AstIdMap, AstIdNode},
|
||||||
hygiene::Hygiene,
|
AstId, HirFileId, InFile, SpanMap,
|
||||||
AstId, HirFileId, InFile,
|
|
||||||
};
|
};
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
@ -13,28 +12,25 @@ use crate::{db::DefDatabase, path::Path};
|
||||||
|
|
||||||
pub struct LowerCtx<'a> {
|
pub struct LowerCtx<'a> {
|
||||||
pub db: &'a dyn DefDatabase,
|
pub db: &'a dyn DefDatabase,
|
||||||
hygiene: Hygiene,
|
hygiene: Arc<SpanMap>,
|
||||||
|
// FIXME: This optimization is probably pointless, ast id map should pretty much always exist anyways.
|
||||||
ast_id_map: Option<(HirFileId, OnceCell<Arc<AstIdMap>>)>,
|
ast_id_map: Option<(HirFileId, OnceCell<Arc<AstIdMap>>)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> LowerCtx<'a> {
|
impl<'a> LowerCtx<'a> {
|
||||||
pub fn new(db: &'a dyn DefDatabase, hygiene: &Hygiene, file_id: HirFileId) -> Self {
|
pub fn new(db: &'a dyn DefDatabase, hygiene: Arc<SpanMap>, file_id: HirFileId) -> Self {
|
||||||
LowerCtx { db, hygiene: hygiene.clone(), ast_id_map: Some((file_id, OnceCell::new())) }
|
LowerCtx { db, hygiene, ast_id_map: Some((file_id, OnceCell::new())) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_file_id(db: &'a dyn DefDatabase, file_id: HirFileId) -> Self {
|
pub fn with_file_id(db: &'a dyn DefDatabase, file_id: HirFileId) -> Self {
|
||||||
LowerCtx {
|
LowerCtx { db, hygiene: db.span_map(file_id), ast_id_map: Some((file_id, OnceCell::new())) }
|
||||||
db,
|
|
||||||
hygiene: Hygiene::new(db.upcast(), file_id),
|
|
||||||
ast_id_map: Some((file_id, OnceCell::new())),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_hygiene(db: &'a dyn DefDatabase, hygiene: &Hygiene) -> Self {
|
pub fn with_hygiene(db: &'a dyn DefDatabase, hygiene: Arc<SpanMap>) -> Self {
|
||||||
LowerCtx { db, hygiene: hygiene.clone(), ast_id_map: None }
|
LowerCtx { db, hygiene, ast_id_map: None }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn hygiene(&self) -> &Hygiene {
|
pub(crate) fn span_map(&self) -> &SpanMap {
|
||||||
&self.hygiene
|
&self.hygiene
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -26,6 +26,7 @@ macro_rules! f {
|
||||||
// +tokenids
|
// +tokenids
|
||||||
f!(struct MyTraitMap2);
|
f!(struct MyTraitMap2);
|
||||||
"#,
|
"#,
|
||||||
|
// FIXME: #SpanAnchor(FileId(0), 1)@91..92 why is there whitespace annotated with a span here?
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
macro_rules! f {
|
macro_rules! f {
|
||||||
( struct $ident:ident ) => {
|
( struct $ident:ident ) => {
|
||||||
|
@ -36,7 +37,7 @@ macro_rules! f {
|
||||||
}
|
}
|
||||||
|
|
||||||
struct#SpanAnchor(FileId(0), 1)@58..64 MyTraitMap2#SpanAnchor(FileId(0), 2)@23..34 {#SpanAnchor(FileId(0), 1)@72..73
|
struct#SpanAnchor(FileId(0), 1)@58..64 MyTraitMap2#SpanAnchor(FileId(0), 2)@23..34 {#SpanAnchor(FileId(0), 1)@72..73
|
||||||
map#SpanAnchor(FileId(0), 1)@86..89:#SpanAnchor(FileId(0), 1)@89..90 ::std#SpanAnchor(FileId(0), 1)@93..96::collections#SpanAnchor(FileId(0), 1)@98..109::HashSet#SpanAnchor(FileId(0), 1)@111..118<#SpanAnchor(FileId(0), 1)@118..119(#SpanAnchor(FileId(0), 1)@119..120)#SpanAnchor(FileId(0), 1)@120..121>#SpanAnchor(FileId(0), 1)@121..122,#SpanAnchor(FileId(0), 1)@122..123
|
map#SpanAnchor(FileId(0), 1)@86..89:#SpanAnchor(FileId(0), 1)@89..90 #SpanAnchor(FileId(0), 1)@91..92::#SpanAnchor(FileId(0), 1)@92..93std#SpanAnchor(FileId(0), 1)@93..96::#SpanAnchor(FileId(0), 1)@97..98collections#SpanAnchor(FileId(0), 1)@98..109::#SpanAnchor(FileId(0), 1)@110..111HashSet#SpanAnchor(FileId(0), 1)@111..118<#SpanAnchor(FileId(0), 1)@118..119(#SpanAnchor(FileId(0), 1)@119..120)#SpanAnchor(FileId(0), 1)@120..121>#SpanAnchor(FileId(0), 1)@121..122,#SpanAnchor(FileId(0), 1)@122..123
|
||||||
}#SpanAnchor(FileId(0), 1)@132..133
|
}#SpanAnchor(FileId(0), 1)@132..133
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
|
@ -938,9 +939,9 @@ macro_rules! vec {
|
||||||
fn f() {
|
fn f() {
|
||||||
{
|
{
|
||||||
let mut v = Vec::new();
|
let mut v = Vec::new();
|
||||||
v.push(1);
|
v.push((1));
|
||||||
v.push(2);
|
v.push((2));
|
||||||
v.push(3);
|
v.push((3));
|
||||||
v
|
v
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -1409,8 +1410,8 @@ macro_rules! matches {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
fn main() {
|
fn main() {
|
||||||
match 0 {
|
match (0) {
|
||||||
0|1 if true =>true , _=>false
|
0|1 if (true )=>true , _=>false
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
"#]],
|
"#]],
|
||||||
|
|
|
@ -62,10 +62,10 @@ macro_rules !implement_methods {
|
||||||
struct Foo;
|
struct Foo;
|
||||||
impl Foo {
|
impl Foo {
|
||||||
fn alpha() -> &'static[u32] {
|
fn alpha() -> &'static[u32] {
|
||||||
&[1, 2, 3]
|
&[(1), (2), (3)]
|
||||||
}
|
}
|
||||||
fn beta() -> &'static[u32] {
|
fn beta() -> &'static[u32] {
|
||||||
&[1, 2, 3]
|
&[(1), (2), (3)]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"#]],
|
"#]],
|
||||||
|
|
|
@ -39,8 +39,8 @@ fn main() {
|
||||||
};
|
};
|
||||||
{
|
{
|
||||||
let mut v = Vec::new();
|
let mut v = Vec::new();
|
||||||
v.push(1u32);
|
v.push((1u32));
|
||||||
v.push(2);
|
v.push((2));
|
||||||
v
|
v
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -192,9 +192,9 @@ macro_rules! constant {
|
||||||
($e:expr ;) => {$e};
|
($e:expr ;) => {$e};
|
||||||
}
|
}
|
||||||
|
|
||||||
const _: () = 0.0;
|
const _: () = (0.0);
|
||||||
const _: () = 0.;
|
const _: () = (0.);
|
||||||
const _: () = 0e0;
|
const _: () = (0e0);
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -123,8 +123,9 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
||||||
} else {
|
} else {
|
||||||
assert!(
|
assert!(
|
||||||
parse.errors().is_empty(),
|
parse.errors().is_empty(),
|
||||||
"parse errors in expansion: \n{:#?}",
|
"parse errors in expansion: \n{:#?}\n```\n{}\n```",
|
||||||
parse.errors()
|
parse.errors(),
|
||||||
|
parse.syntax_node(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
let pp = pretty_print_macro_expansion(
|
let pp = pretty_print_macro_expansion(
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
use std::{cmp::Ordering, iter, mem};
|
use std::{cmp::Ordering, iter, mem};
|
||||||
|
|
||||||
use ::tt::Span;
|
use ::tt::Span;
|
||||||
use base_db::{CrateId, Dependency, Edition, FileId};
|
use base_db::{span::SyntaxContextId, CrateId, Dependency, Edition, FileId};
|
||||||
use cfg::{CfgExpr, CfgOptions};
|
use cfg::{CfgExpr, CfgOptions};
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
|
@ -15,7 +15,6 @@ use hir_expand::{
|
||||||
builtin_attr_macro::find_builtin_attr,
|
builtin_attr_macro::find_builtin_attr,
|
||||||
builtin_derive_macro::find_builtin_derive,
|
builtin_derive_macro::find_builtin_derive,
|
||||||
builtin_fn_macro::find_builtin_macro,
|
builtin_fn_macro::find_builtin_macro,
|
||||||
hygiene::Hygiene,
|
|
||||||
name::{name, AsName, Name},
|
name::{name, AsName, Name},
|
||||||
proc_macro::ProcMacroExpander,
|
proc_macro::ProcMacroExpander,
|
||||||
ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroCallLoc,
|
ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroCallLoc,
|
||||||
|
@ -112,7 +111,6 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
|
||||||
from_glob_import: Default::default(),
|
from_glob_import: Default::default(),
|
||||||
skip_attrs: Default::default(),
|
skip_attrs: Default::default(),
|
||||||
is_proc_macro,
|
is_proc_macro,
|
||||||
hygienes: FxHashMap::default(),
|
|
||||||
};
|
};
|
||||||
if tree_id.is_block() {
|
if tree_id.is_block() {
|
||||||
collector.seed_with_inner(tree_id);
|
collector.seed_with_inner(tree_id);
|
||||||
|
@ -212,9 +210,22 @@ struct MacroDirective {
|
||||||
|
|
||||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
enum MacroDirectiveKind {
|
enum MacroDirectiveKind {
|
||||||
FnLike { ast_id: AstIdWithPath<ast::MacroCall>, expand_to: ExpandTo },
|
FnLike {
|
||||||
Derive { ast_id: AstIdWithPath<ast::Adt>, derive_attr: AttrId, derive_pos: usize },
|
ast_id: AstIdWithPath<ast::MacroCall>,
|
||||||
Attr { ast_id: AstIdWithPath<ast::Item>, attr: Attr, mod_item: ModItem, tree: TreeId },
|
expand_to: ExpandTo,
|
||||||
|
call_site: SyntaxContextId,
|
||||||
|
},
|
||||||
|
Derive {
|
||||||
|
ast_id: AstIdWithPath<ast::Adt>,
|
||||||
|
derive_attr: AttrId,
|
||||||
|
derive_pos: usize,
|
||||||
|
},
|
||||||
|
Attr {
|
||||||
|
ast_id: AstIdWithPath<ast::Item>,
|
||||||
|
attr: Attr,
|
||||||
|
mod_item: ModItem,
|
||||||
|
/* is this needed? */ tree: TreeId,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Walks the tree of module recursively
|
/// Walks the tree of module recursively
|
||||||
|
@ -242,12 +253,6 @@ struct DefCollector<'a> {
|
||||||
/// This also stores the attributes to skip when we resolve derive helpers and non-macro
|
/// This also stores the attributes to skip when we resolve derive helpers and non-macro
|
||||||
/// non-builtin attributes in general.
|
/// non-builtin attributes in general.
|
||||||
skip_attrs: FxHashMap<InFile<ModItem>, AttrId>,
|
skip_attrs: FxHashMap<InFile<ModItem>, AttrId>,
|
||||||
/// `Hygiene` cache, because `Hygiene` construction is expensive.
|
|
||||||
///
|
|
||||||
/// Almost all paths should have been lowered to `ModPath` during `ItemTree` construction.
|
|
||||||
/// However, `DefCollector` still needs to lower paths in attributes, in particular those in
|
|
||||||
/// derive meta item list.
|
|
||||||
hygienes: FxHashMap<HirFileId, Hygiene>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DefCollector<'_> {
|
impl DefCollector<'_> {
|
||||||
|
@ -315,9 +320,8 @@ impl DefCollector<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
if *attr_name == hir_expand::name![feature] {
|
if *attr_name == hir_expand::name![feature] {
|
||||||
let hygiene = &Hygiene::new_unhygienic();
|
|
||||||
let features = attr
|
let features = attr
|
||||||
.parse_path_comma_token_tree(self.db.upcast(), hygiene)
|
.parse_path_comma_token_tree(self.db.upcast())
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flatten()
|
.flatten()
|
||||||
.filter_map(|feat| match feat.segments() {
|
.filter_map(|feat| match feat.segments() {
|
||||||
|
@ -1119,10 +1123,11 @@ impl DefCollector<'_> {
|
||||||
let resolver_def_id = |path| resolver(path).map(|(_, it)| it);
|
let resolver_def_id = |path| resolver(path).map(|(_, it)| it);
|
||||||
|
|
||||||
match &directive.kind {
|
match &directive.kind {
|
||||||
MacroDirectiveKind::FnLike { ast_id, expand_to } => {
|
MacroDirectiveKind::FnLike { ast_id, expand_to, call_site } => {
|
||||||
let call_id = macro_call_as_call_id(
|
let call_id = macro_call_as_call_id(
|
||||||
self.db.upcast(),
|
self.db.upcast(),
|
||||||
ast_id,
|
ast_id,
|
||||||
|
*call_site,
|
||||||
*expand_to,
|
*expand_to,
|
||||||
self.def_map.krate,
|
self.def_map.krate,
|
||||||
resolver_def_id,
|
resolver_def_id,
|
||||||
|
@ -1234,19 +1239,7 @@ impl DefCollector<'_> {
|
||||||
};
|
};
|
||||||
let ast_id = ast_id.with_value(ast_adt_id);
|
let ast_id = ast_id.with_value(ast_adt_id);
|
||||||
|
|
||||||
let extend_unhygenic;
|
match attr.parse_path_comma_token_tree(self.db.upcast()) {
|
||||||
let hygiene = if file_id.is_macro() {
|
|
||||||
self.hygienes
|
|
||||||
.entry(file_id)
|
|
||||||
.or_insert_with(|| Hygiene::new(self.db.upcast(), file_id))
|
|
||||||
} else {
|
|
||||||
// Avoid heap allocation (`Hygiene` embraces `Arc`) and hash map entry
|
|
||||||
// when we're in an oridinary (non-macro) file.
|
|
||||||
extend_unhygenic = Hygiene::new_unhygienic();
|
|
||||||
&extend_unhygenic
|
|
||||||
};
|
|
||||||
|
|
||||||
match attr.parse_path_comma_token_tree(self.db.upcast(), hygiene) {
|
|
||||||
Some(derive_macros) => {
|
Some(derive_macros) => {
|
||||||
let mut len = 0;
|
let mut len = 0;
|
||||||
for (idx, path) in derive_macros.enumerate() {
|
for (idx, path) in derive_macros.enumerate() {
|
||||||
|
@ -1414,11 +1407,12 @@ impl DefCollector<'_> {
|
||||||
|
|
||||||
for directive in &self.unresolved_macros {
|
for directive in &self.unresolved_macros {
|
||||||
match &directive.kind {
|
match &directive.kind {
|
||||||
MacroDirectiveKind::FnLike { ast_id, expand_to } => {
|
MacroDirectiveKind::FnLike { ast_id, expand_to, call_site } => {
|
||||||
// FIXME: we shouldn't need to re-resolve the macro here just to get the unresolved error!
|
// FIXME: we shouldn't need to re-resolve the macro here just to get the unresolved error!
|
||||||
let macro_call_as_call_id = macro_call_as_call_id(
|
let macro_call_as_call_id = macro_call_as_call_id(
|
||||||
self.db.upcast(),
|
self.db.upcast(),
|
||||||
ast_id,
|
ast_id,
|
||||||
|
*call_site,
|
||||||
*expand_to,
|
*expand_to,
|
||||||
self.def_map.krate,
|
self.def_map.krate,
|
||||||
|path| {
|
|path| {
|
||||||
|
@ -1823,9 +1817,8 @@ impl ModCollector<'_, '_> {
|
||||||
cov_mark::hit!(macro_rules_from_other_crates_are_visible_with_macro_use);
|
cov_mark::hit!(macro_rules_from_other_crates_are_visible_with_macro_use);
|
||||||
|
|
||||||
let mut single_imports = Vec::new();
|
let mut single_imports = Vec::new();
|
||||||
let hygiene = Hygiene::new_unhygienic();
|
|
||||||
for attr in macro_use_attrs {
|
for attr in macro_use_attrs {
|
||||||
let Some(paths) = attr.parse_path_comma_token_tree(db.upcast(), &hygiene) else {
|
let Some(paths) = attr.parse_path_comma_token_tree(db.upcast()) else {
|
||||||
// `#[macro_use]` (without any paths) found, forget collected names and just import
|
// `#[macro_use]` (without any paths) found, forget collected names and just import
|
||||||
// all visible macros.
|
// all visible macros.
|
||||||
self.def_collector.import_macros_from_extern_crate(
|
self.def_collector.import_macros_from_extern_crate(
|
||||||
|
@ -2209,8 +2202,12 @@ impl ModCollector<'_, '_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_macro_call(&mut self, mac: &MacroCall, container: ItemContainerId) {
|
fn collect_macro_call(
|
||||||
let ast_id = AstIdWithPath::new(self.file_id(), mac.ast_id, ModPath::clone(&mac.path));
|
&mut self,
|
||||||
|
&MacroCall { ref path, ast_id, expand_to, call_site }: &MacroCall,
|
||||||
|
container: ItemContainerId,
|
||||||
|
) {
|
||||||
|
let ast_id = AstIdWithPath::new(self.file_id(), ast_id, ModPath::clone(&path));
|
||||||
let db = self.def_collector.db;
|
let db = self.def_collector.db;
|
||||||
|
|
||||||
// FIXME: Immediately expanding in "Case 1" is insufficient since "Case 2" may also define
|
// FIXME: Immediately expanding in "Case 1" is insufficient since "Case 2" may also define
|
||||||
|
@ -2221,7 +2218,8 @@ impl ModCollector<'_, '_> {
|
||||||
if let Ok(res) = macro_call_as_call_id_with_eager(
|
if let Ok(res) = macro_call_as_call_id_with_eager(
|
||||||
db.upcast(),
|
db.upcast(),
|
||||||
&ast_id,
|
&ast_id,
|
||||||
mac.expand_to,
|
call_site,
|
||||||
|
expand_to,
|
||||||
self.def_collector.def_map.krate,
|
self.def_collector.def_map.krate,
|
||||||
|path| {
|
|path| {
|
||||||
path.as_ident().and_then(|name| {
|
path.as_ident().and_then(|name| {
|
||||||
|
@ -2275,7 +2273,7 @@ impl ModCollector<'_, '_> {
|
||||||
self.def_collector.unresolved_macros.push(MacroDirective {
|
self.def_collector.unresolved_macros.push(MacroDirective {
|
||||||
module_id: self.module_id,
|
module_id: self.module_id,
|
||||||
depth: self.macro_depth + 1,
|
depth: self.macro_depth + 1,
|
||||||
kind: MacroDirectiveKind::FnLike { ast_id, expand_to: mac.expand_to },
|
kind: MacroDirectiveKind::FnLike { ast_id, expand_to: expand_to, call_site },
|
||||||
container,
|
container,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -2362,7 +2360,6 @@ mod tests {
|
||||||
from_glob_import: Default::default(),
|
from_glob_import: Default::default(),
|
||||||
skip_attrs: Default::default(),
|
skip_attrs: Default::default(),
|
||||||
is_proc_macro: false,
|
is_proc_macro: false,
|
||||||
hygienes: FxHashMap::default(),
|
|
||||||
};
|
};
|
||||||
collector.seed_with_top_level();
|
collector.seed_with_top_level();
|
||||||
collector.collect();
|
collector.collect();
|
||||||
|
|
|
@ -4,8 +4,11 @@ use std::iter;
|
||||||
|
|
||||||
use crate::{lower::LowerCtx, type_ref::ConstRef};
|
use crate::{lower::LowerCtx, type_ref::ConstRef};
|
||||||
|
|
||||||
use either::Either;
|
use base_db::span::SyntaxContextId;
|
||||||
use hir_expand::name::{name, AsName};
|
use hir_expand::{
|
||||||
|
mod_path::resolve_crate_root,
|
||||||
|
name::{name, AsName},
|
||||||
|
};
|
||||||
use intern::Interned;
|
use intern::Interned;
|
||||||
use syntax::ast::{self, AstNode, HasTypeBounds};
|
use syntax::ast::{self, AstNode, HasTypeBounds};
|
||||||
|
|
||||||
|
@ -14,14 +17,17 @@ use crate::{
|
||||||
type_ref::{LifetimeRef, TypeBound, TypeRef},
|
type_ref::{LifetimeRef, TypeBound, TypeRef},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// fn resolve_crate_root
|
||||||
|
|
||||||
/// Converts an `ast::Path` to `Path`. Works with use trees.
|
/// Converts an `ast::Path` to `Path`. Works with use trees.
|
||||||
/// It correctly handles `$crate` based path from macro call.
|
/// It correctly handles `$crate` based path from macro call.
|
||||||
|
// FIXME: flip the params
|
||||||
pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path> {
|
pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path> {
|
||||||
let mut kind = PathKind::Plain;
|
let mut kind = PathKind::Plain;
|
||||||
let mut type_anchor = None;
|
let mut type_anchor = None;
|
||||||
let mut segments = Vec::new();
|
let mut segments = Vec::new();
|
||||||
let mut generic_args = Vec::new();
|
let mut generic_args = Vec::new();
|
||||||
let hygiene = ctx.hygiene();
|
let hygiene = ctx.span_map();
|
||||||
loop {
|
loop {
|
||||||
let segment = path.segment()?;
|
let segment = path.segment()?;
|
||||||
|
|
||||||
|
@ -31,31 +37,34 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
|
||||||
|
|
||||||
match segment.kind()? {
|
match segment.kind()? {
|
||||||
ast::PathSegmentKind::Name(name_ref) => {
|
ast::PathSegmentKind::Name(name_ref) => {
|
||||||
// FIXME: this should just return name
|
let name = if name_ref.text() == "$crate" {
|
||||||
match hygiene.name_ref_to_name(ctx.db.upcast(), name_ref) {
|
kind = resolve_crate_root(
|
||||||
Either::Left(name) => {
|
ctx.db.upcast(),
|
||||||
let args = segment
|
hygiene
|
||||||
.generic_arg_list()
|
.span_for_range(name_ref.syntax().text_range())
|
||||||
.and_then(|it| lower_generic_args(ctx, it))
|
.map_or(SyntaxContextId::ROOT, |s| s.ctx),
|
||||||
.or_else(|| {
|
)
|
||||||
lower_generic_args_from_fn_path(
|
.map(PathKind::DollarCrate)?;
|
||||||
ctx,
|
break;
|
||||||
segment.param_list(),
|
} else {
|
||||||
segment.ret_type(),
|
name_ref.as_name()
|
||||||
)
|
};
|
||||||
})
|
let args = segment
|
||||||
.map(Interned::new);
|
.generic_arg_list()
|
||||||
if let Some(_) = args {
|
.and_then(|it| lower_generic_args(ctx, it))
|
||||||
generic_args.resize(segments.len(), None);
|
.or_else(|| {
|
||||||
generic_args.push(args);
|
lower_generic_args_from_fn_path(
|
||||||
}
|
ctx,
|
||||||
segments.push(name);
|
segment.param_list(),
|
||||||
}
|
segment.ret_type(),
|
||||||
Either::Right(crate_id) => {
|
)
|
||||||
kind = PathKind::DollarCrate(crate_id);
|
})
|
||||||
break;
|
.map(Interned::new);
|
||||||
}
|
if let Some(_) = args {
|
||||||
|
generic_args.resize(segments.len(), None);
|
||||||
|
generic_args.push(args);
|
||||||
}
|
}
|
||||||
|
segments.push(name);
|
||||||
}
|
}
|
||||||
ast::PathSegmentKind::SelfTypeKw => {
|
ast::PathSegmentKind::SelfTypeKw => {
|
||||||
segments.push(name![Self]);
|
segments.push(name![Self]);
|
||||||
|
@ -151,8 +160,16 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
|
||||||
// We follow what it did anyway :)
|
// We follow what it did anyway :)
|
||||||
if segments.len() == 1 && kind == PathKind::Plain {
|
if segments.len() == 1 && kind == PathKind::Plain {
|
||||||
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
|
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
|
||||||
if let Some(crate_id) = hygiene.local_inner_macros(ctx.db.upcast(), path) {
|
let syn_ctxt = hygiene
|
||||||
kind = PathKind::DollarCrate(crate_id);
|
.span_for_range(path.segment()?.syntax().text_range())
|
||||||
|
.map_or(SyntaxContextId::ROOT, |s| s.ctx);
|
||||||
|
if let Some(macro_call_id) = ctx.db.lookup_intern_syntax_context(syn_ctxt).outer_expn {
|
||||||
|
if ctx.db.lookup_intern_macro_call(macro_call_id).def.local_inner {
|
||||||
|
dbg!("local_inner_macros");
|
||||||
|
if let Some(crate_root) = resolve_crate_root(ctx.db.upcast(), syn_ctxt) {
|
||||||
|
kind = PathKind::DollarCrate(crate_root);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,7 +7,7 @@ use base_db::{
|
||||||
AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, FilePosition, SourceDatabase,
|
AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, FilePosition, SourceDatabase,
|
||||||
Upcast,
|
Upcast,
|
||||||
};
|
};
|
||||||
use hir_expand::{db::ExpandDatabase, InFile};
|
use hir_expand::{db::ExpandDatabase, hygiene::SyntaxContextData, InFile};
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use syntax::{algo, ast, AstNode};
|
use syntax::{algo, ast, AstNode};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
@ -34,6 +34,7 @@ pub(crate) struct TestDB {
|
||||||
impl Default for TestDB {
|
impl Default for TestDB {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
let mut this = Self { storage: Default::default(), events: Default::default() };
|
let mut this = Self { storage: Default::default(), events: Default::default() };
|
||||||
|
this.intern_syntax_context(SyntaxContextData::root());
|
||||||
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
|
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
|
||||||
this
|
this
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
use std::iter;
|
use std::iter;
|
||||||
|
|
||||||
use hir_expand::{hygiene::Hygiene, InFile};
|
use hir_expand::{InFile, SpanMap};
|
||||||
use la_arena::ArenaMap;
|
use la_arena::ArenaMap;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
@ -34,13 +34,13 @@ impl RawVisibility {
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
node: InFile<Option<ast::Visibility>>,
|
node: InFile<Option<ast::Visibility>>,
|
||||||
) -> RawVisibility {
|
) -> RawVisibility {
|
||||||
Self::from_ast_with_hygiene(db, node.value, &Hygiene::new(db.upcast(), node.file_id))
|
Self::from_ast_with_hygiene(db, node.value, &db.span_map(node.file_id))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn from_ast_with_hygiene(
|
pub(crate) fn from_ast_with_hygiene(
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
node: Option<ast::Visibility>,
|
node: Option<ast::Visibility>,
|
||||||
hygiene: &Hygiene,
|
hygiene: &SpanMap,
|
||||||
) -> RawVisibility {
|
) -> RawVisibility {
|
||||||
Self::from_ast_with_hygiene_and_default(db, node, RawVisibility::private(), hygiene)
|
Self::from_ast_with_hygiene_and_default(db, node, RawVisibility::private(), hygiene)
|
||||||
}
|
}
|
||||||
|
@ -49,7 +49,7 @@ impl RawVisibility {
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
node: Option<ast::Visibility>,
|
node: Option<ast::Visibility>,
|
||||||
default: RawVisibility,
|
default: RawVisibility,
|
||||||
hygiene: &Hygiene,
|
hygiene: &SpanMap,
|
||||||
) -> RawVisibility {
|
) -> RawVisibility {
|
||||||
let node = match node {
|
let node = match node {
|
||||||
None => return default,
|
None => return default,
|
||||||
|
|
|
@ -13,10 +13,9 @@ use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::ExpandDatabase,
|
db::ExpandDatabase,
|
||||||
hygiene::Hygiene,
|
|
||||||
mod_path::ModPath,
|
mod_path::ModPath,
|
||||||
tt::{self, Subtree},
|
tt::{self, Subtree},
|
||||||
InFile,
|
InFile, SpanMap,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Syntactical attributes, without filtering of `cfg_attr`s.
|
/// Syntactical attributes, without filtering of `cfg_attr`s.
|
||||||
|
@ -44,7 +43,7 @@ impl RawAttrs {
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
span_anchor: SpanAnchor,
|
span_anchor: SpanAnchor,
|
||||||
owner: &dyn ast::HasAttrs,
|
owner: &dyn ast::HasAttrs,
|
||||||
hygiene: &Hygiene,
|
hygiene: &SpanMap,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let entries = collect_attrs(owner)
|
let entries = collect_attrs(owner)
|
||||||
.filter_map(|(id, attr)| match attr {
|
.filter_map(|(id, attr)| match attr {
|
||||||
|
@ -69,8 +68,7 @@ impl RawAttrs {
|
||||||
span_anchor: SpanAnchor,
|
span_anchor: SpanAnchor,
|
||||||
owner: InFile<&dyn ast::HasAttrs>,
|
owner: InFile<&dyn ast::HasAttrs>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let hygiene = Hygiene::new(db, owner.file_id);
|
Self::new(db, span_anchor, owner.value, &db.span_map(owner.file_id))
|
||||||
Self::new(db, span_anchor, owner.value, &hygiene)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn merge(&self, other: Self) -> Self {
|
pub fn merge(&self, other: Self) -> Self {
|
||||||
|
@ -135,9 +133,7 @@ impl RawAttrs {
|
||||||
delimiter: tt::Delimiter::unspecified(),
|
delimiter: tt::Delimiter::unspecified(),
|
||||||
token_trees: attr.to_vec(),
|
token_trees: attr.to_vec(),
|
||||||
};
|
};
|
||||||
// FIXME hygiene
|
Attr::from_tt(db, &tree, index.with_cfg_attr(idx))
|
||||||
let hygiene = Hygiene::new_unhygienic();
|
|
||||||
Attr::from_tt(db, &tree, &hygiene, index.with_cfg_attr(idx))
|
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -220,7 +216,7 @@ impl Attr {
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
span_anchor: SpanAnchor,
|
span_anchor: SpanAnchor,
|
||||||
ast: ast::Meta,
|
ast: ast::Meta,
|
||||||
hygiene: &Hygiene,
|
hygiene: &SpanMap,
|
||||||
id: AttrId,
|
id: AttrId,
|
||||||
) -> Option<Attr> {
|
) -> Option<Attr> {
|
||||||
let path = Interned::new(ModPath::from_src(db, ast.path()?, hygiene)?);
|
let path = Interned::new(ModPath::from_src(db, ast.path()?, hygiene)?);
|
||||||
|
@ -234,9 +230,7 @@ impl Attr {
|
||||||
// FIXME: We could also allocate ids for attributes and use the attribute itself as an anchor
|
// FIXME: We could also allocate ids for attributes and use the attribute itself as an anchor
|
||||||
let offset =
|
let offset =
|
||||||
db.ast_id_map(span_anchor.file_id).get_raw(span_anchor.ast_id).text_range().start();
|
db.ast_id_map(span_anchor.file_id).get_raw(span_anchor.ast_id).text_range().start();
|
||||||
// FIXME: Spanmap
|
let tree = syntax_node_to_token_tree(tt.syntax(), span_anchor, offset, hygiene);
|
||||||
let tree =
|
|
||||||
syntax_node_to_token_tree(tt.syntax(), span_anchor, offset, &Default::default());
|
|
||||||
Some(Interned::new(AttrInput::TokenTree(Box::new(tree))))
|
Some(Interned::new(AttrInput::TokenTree(Box::new(tree))))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
@ -244,18 +238,13 @@ impl Attr {
|
||||||
Some(Attr { id, path, input })
|
Some(Attr { id, path, input })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_tt(
|
fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> {
|
||||||
db: &dyn ExpandDatabase,
|
|
||||||
tt: &tt::Subtree,
|
|
||||||
hygiene: &Hygiene,
|
|
||||||
id: AttrId,
|
|
||||||
) -> Option<Attr> {
|
|
||||||
// FIXME: Unecessary roundtrip tt -> ast -> tt
|
// FIXME: Unecessary roundtrip tt -> ast -> tt
|
||||||
let (parse, _map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
|
let (parse, _map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
|
||||||
let ast = ast::Meta::cast(parse.syntax_node())?;
|
let ast = ast::Meta::cast(parse.syntax_node())?;
|
||||||
|
|
||||||
// FIXME: we discard spans here!
|
// FIXME: we discard spans here!
|
||||||
Self::from_src(db, SpanAnchor::DUMMY, ast, hygiene, id)
|
Self::from_src(db, SpanAnchor::DUMMY, ast, &SpanMap::default(), id)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn path(&self) -> &ModPath {
|
pub fn path(&self) -> &ModPath {
|
||||||
|
@ -295,9 +284,9 @@ impl Attr {
|
||||||
pub fn parse_path_comma_token_tree<'a>(
|
pub fn parse_path_comma_token_tree<'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
db: &'a dyn ExpandDatabase,
|
db: &'a dyn ExpandDatabase,
|
||||||
hygiene: &'a Hygiene,
|
|
||||||
) -> Option<impl Iterator<Item = ModPath> + 'a> {
|
) -> Option<impl Iterator<Item = ModPath> + 'a> {
|
||||||
let args = self.token_tree_value()?;
|
let args = self.token_tree_value()?;
|
||||||
|
dbg!(args);
|
||||||
|
|
||||||
if args.delimiter.kind != DelimiterKind::Parenthesis {
|
if args.delimiter.kind != DelimiterKind::Parenthesis {
|
||||||
return None;
|
return None;
|
||||||
|
@ -309,12 +298,13 @@ impl Attr {
|
||||||
if tts.is_empty() {
|
if tts.is_empty() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
// FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation here.
|
// FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation
|
||||||
|
// here.
|
||||||
let subtree = tt::Subtree {
|
let subtree = tt::Subtree {
|
||||||
delimiter: tt::Delimiter::unspecified(),
|
delimiter: tt::Delimiter::unspecified(),
|
||||||
token_trees: tts.into_iter().cloned().collect(),
|
token_trees: tts.to_vec(),
|
||||||
};
|
};
|
||||||
let (parse, _) =
|
let (parse, span_map) =
|
||||||
mbe::token_tree_to_syntax_node(&subtree, mbe::TopEntryPoint::MetaItem);
|
mbe::token_tree_to_syntax_node(&subtree, mbe::TopEntryPoint::MetaItem);
|
||||||
let meta = ast::Meta::cast(parse.syntax_node())?;
|
let meta = ast::Meta::cast(parse.syntax_node())?;
|
||||||
// Only simple paths are allowed.
|
// Only simple paths are allowed.
|
||||||
|
@ -323,7 +313,7 @@ impl Attr {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let path = meta.path()?;
|
let path = meta.path()?;
|
||||||
ModPath::from_src(db, path, hygiene)
|
ModPath::from_src(db, path, &span_map)
|
||||||
});
|
});
|
||||||
|
|
||||||
Some(paths)
|
Some(paths)
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
//! Defines database & queries for macro expansion.
|
//! Defines database & queries for macro expansion.
|
||||||
|
|
||||||
use ::tt::SyntaxContext;
|
use ::tt::{SpanAnchor as _, SyntaxContext};
|
||||||
use base_db::{
|
use base_db::{
|
||||||
salsa,
|
salsa,
|
||||||
span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
|
span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
|
||||||
|
@ -17,9 +17,10 @@ use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ast_id_map::AstIdMap,
|
ast_id_map::AstIdMap,
|
||||||
|
attrs::RawAttrs,
|
||||||
builtin_attr_macro::pseudo_derive_attr_expansion,
|
builtin_attr_macro::pseudo_derive_attr_expansion,
|
||||||
builtin_fn_macro::EagerExpander,
|
builtin_fn_macro::EagerExpander,
|
||||||
hygiene::{self, HygieneFrame, SyntaxContextData},
|
hygiene::{self, SyntaxContextData, Transparency},
|
||||||
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
|
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
|
||||||
ExpandError, ExpandResult, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind,
|
ExpandError, ExpandResult, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind,
|
||||||
MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander, SpanMap,
|
MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander, SpanMap,
|
||||||
|
@ -37,16 +38,35 @@ static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
|
||||||
/// Old-style `macro_rules` or the new macros 2.0
|
/// Old-style `macro_rules` or the new macros 2.0
|
||||||
pub struct DeclarativeMacroExpander {
|
pub struct DeclarativeMacroExpander {
|
||||||
pub mac: mbe::DeclarativeMacro<base_db::span::SpanData>,
|
pub mac: mbe::DeclarativeMacro<base_db::span::SpanData>,
|
||||||
|
pub transparency: Transparency,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DeclarativeMacroExpander {
|
impl DeclarativeMacroExpander {
|
||||||
pub fn expand(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
|
pub fn expand(
|
||||||
|
&self,
|
||||||
|
db: &dyn ExpandDatabase,
|
||||||
|
tt: tt::Subtree,
|
||||||
|
call_id: MacroCallId,
|
||||||
|
) -> ExpandResult<tt::Subtree> {
|
||||||
match self.mac.err() {
|
match self.mac.err() {
|
||||||
Some(e) => ExpandResult::new(
|
Some(e) => ExpandResult::new(
|
||||||
tt::Subtree::empty(),
|
tt::Subtree::empty(),
|
||||||
ExpandError::other(format!("invalid macro definition: {e}")),
|
ExpandError::other(format!("invalid macro definition: {e}")),
|
||||||
),
|
),
|
||||||
None => self.mac.expand(&tt).map_err(Into::into),
|
None => self
|
||||||
|
.mac
|
||||||
|
.expand(&tt, |s| s.ctx = db.apply_mark(s.ctx, call_id, self.transparency))
|
||||||
|
.map_err(Into::into),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn expand_unhygienic(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
|
||||||
|
match self.mac.err() {
|
||||||
|
Some(e) => ExpandResult::new(
|
||||||
|
tt::Subtree::empty(),
|
||||||
|
ExpandError::other(format!("invalid macro definition: {e}")),
|
||||||
|
),
|
||||||
|
None => self.mac.expand(&tt, |_| ()).map_err(Into::into),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -83,6 +103,9 @@ pub trait ExpandDatabase: SourceDatabase {
|
||||||
&self,
|
&self,
|
||||||
macro_file: MacroFile,
|
macro_file: MacroFile,
|
||||||
) -> ExpandResult<(Parse<SyntaxNode>, Arc<SpanMap>)>;
|
) -> ExpandResult<(Parse<SyntaxNode>, Arc<SpanMap>)>;
|
||||||
|
// TODO: transparent?
|
||||||
|
#[salsa::transparent]
|
||||||
|
fn span_map(&self, file_id: HirFileId) -> Arc<SpanMap>;
|
||||||
|
|
||||||
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
|
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
|
||||||
/// reason why we use salsa at all.
|
/// reason why we use salsa at all.
|
||||||
|
@ -97,8 +120,8 @@ pub trait ExpandDatabase: SourceDatabase {
|
||||||
#[salsa::invoke(hygiene::apply_mark)]
|
#[salsa::invoke(hygiene::apply_mark)]
|
||||||
fn apply_mark(
|
fn apply_mark(
|
||||||
&self,
|
&self,
|
||||||
ctxt: SyntaxContextData,
|
ctxt: SyntaxContextId,
|
||||||
file_id: HirFileId,
|
call_id: MacroCallId,
|
||||||
transparency: hygiene::Transparency,
|
transparency: hygiene::Transparency,
|
||||||
) -> SyntaxContextId;
|
) -> SyntaxContextId;
|
||||||
|
|
||||||
|
@ -137,8 +160,13 @@ pub trait ExpandDatabase: SourceDatabase {
|
||||||
&self,
|
&self,
|
||||||
macro_call: MacroCallId,
|
macro_call: MacroCallId,
|
||||||
) -> ExpandResult<Box<[SyntaxError]>>;
|
) -> ExpandResult<Box<[SyntaxError]>>;
|
||||||
|
}
|
||||||
|
|
||||||
fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>;
|
fn span_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<SpanMap> {
|
||||||
|
match file_id.repr() {
|
||||||
|
HirFileIdRepr::FileId(_) => Arc::new(Default::default()),
|
||||||
|
HirFileIdRepr::MacroFile(m) => db.parse_macro_expansion(m).value.1,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This expands the given macro call, but with different arguments. This is
|
/// This expands the given macro call, but with different arguments. This is
|
||||||
|
@ -220,7 +248,9 @@ pub fn expand_speculative(
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand(tt),
|
MacroDefKind::Declarative(it) => {
|
||||||
|
db.decl_macro_expander(loc.krate, it).expand_unhygienic(tt)
|
||||||
|
}
|
||||||
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
|
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
|
||||||
MacroDefKind::BuiltInEager(it, _) => {
|
MacroDefKind::BuiltInEager(it, _) => {
|
||||||
it.expand(db, actual_macro_call, &tt).map_err(Into::into)
|
it.expand(db, actual_macro_call, &tt).map_err(Into::into)
|
||||||
|
@ -229,7 +259,9 @@ pub fn expand_speculative(
|
||||||
};
|
};
|
||||||
|
|
||||||
let expand_to = macro_expand_to(db, actual_macro_call);
|
let expand_to = macro_expand_to(db, actual_macro_call);
|
||||||
let (node, rev_tmap) = token_tree_to_syntax_node(db, &speculative_expansion.value, expand_to);
|
let (node, mut rev_tmap) =
|
||||||
|
token_tree_to_syntax_node(db, &speculative_expansion.value, expand_to);
|
||||||
|
rev_tmap.real_file = false;
|
||||||
|
|
||||||
let syntax_node = node.syntax_node();
|
let syntax_node = node.syntax_node();
|
||||||
let token = rev_tmap
|
let token = rev_tmap
|
||||||
|
@ -285,7 +317,8 @@ fn parse_macro_expansion(
|
||||||
tracing::debug!("expanded = {}", tt.as_debug_string());
|
tracing::debug!("expanded = {}", tt.as_debug_string());
|
||||||
tracing::debug!("kind = {:?}", expand_to);
|
tracing::debug!("kind = {:?}", expand_to);
|
||||||
|
|
||||||
let (parse, rev_token_map) = token_tree_to_syntax_node(db, &tt, expand_to);
|
let (parse, mut rev_token_map) = token_tree_to_syntax_node(db, &tt, expand_to);
|
||||||
|
rev_token_map.real_file = false;
|
||||||
|
|
||||||
ExpandResult { value: (parse, Arc::new(rev_token_map)), err }
|
ExpandResult { value: (parse, Arc::new(rev_token_map)), err }
|
||||||
}
|
}
|
||||||
|
@ -464,41 +497,70 @@ fn decl_macro_expander(
|
||||||
(parse.syntax_node(), map)
|
(parse.syntax_node(), map)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let mac = match id.to_ptr(db).to_node(&root) {
|
|
||||||
ast::Macro::MacroRules(macro_rules) => match macro_rules.token_tree() {
|
let transparency = |node| {
|
||||||
Some(arg) => {
|
// ... would be nice to have the item tree here
|
||||||
let tt = mbe::syntax_node_to_token_tree(
|
let attrs =
|
||||||
arg.syntax(),
|
RawAttrs::new(db, SpanAnchor::DUMMY, node, &Default::default()).filter(db, def_crate);
|
||||||
SpanAnchor { file_id: id.file_id, ast_id: id.value.erase() },
|
match &*attrs
|
||||||
macro_rules.syntax().text_range().start(),
|
.iter()
|
||||||
&map,
|
.find(|it| {
|
||||||
);
|
it.path.as_ident().and_then(|it| it.as_str()) == Some("rustc_macro_transparency")
|
||||||
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
|
})?
|
||||||
mac
|
.token_tree_value()?
|
||||||
}
|
.token_trees
|
||||||
None => mbe::DeclarativeMacro::from_err(
|
{
|
||||||
mbe::ParseError::Expected("expected a token tree".into()),
|
[tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &*i.text {
|
||||||
is_2021,
|
"transparent" => Some(Transparency::Transparent),
|
||||||
),
|
"semitransparent" => Some(Transparency::SemiTransparent),
|
||||||
},
|
"opaque" => Some(Transparency::Opaque),
|
||||||
ast::Macro::MacroDef(macro_def) => match macro_def.body() {
|
_ => None,
|
||||||
Some(arg) => {
|
},
|
||||||
let tt = mbe::syntax_node_to_token_tree(
|
_ => None,
|
||||||
arg.syntax(),
|
}
|
||||||
SpanAnchor { file_id: id.file_id, ast_id: id.value.erase() },
|
|
||||||
macro_def.syntax().text_range().start(),
|
|
||||||
&map,
|
|
||||||
);
|
|
||||||
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
|
|
||||||
mac
|
|
||||||
}
|
|
||||||
None => mbe::DeclarativeMacro::from_err(
|
|
||||||
mbe::ParseError::Expected("expected a token tree".into()),
|
|
||||||
is_2021,
|
|
||||||
),
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
Arc::new(DeclarativeMacroExpander { mac })
|
|
||||||
|
let (mac, transparency) = match id.to_ptr(db).to_node(&root) {
|
||||||
|
ast::Macro::MacroRules(macro_rules) => (
|
||||||
|
match macro_rules.token_tree() {
|
||||||
|
Some(arg) => {
|
||||||
|
let tt = mbe::syntax_node_to_token_tree(
|
||||||
|
arg.syntax(),
|
||||||
|
SpanAnchor { file_id: id.file_id, ast_id: id.value.erase() },
|
||||||
|
macro_rules.syntax().text_range().start(),
|
||||||
|
&map,
|
||||||
|
);
|
||||||
|
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
|
||||||
|
mac
|
||||||
|
}
|
||||||
|
None => mbe::DeclarativeMacro::from_err(
|
||||||
|
mbe::ParseError::Expected("expected a token tree".into()),
|
||||||
|
is_2021,
|
||||||
|
),
|
||||||
|
},
|
||||||
|
transparency(¯o_rules).unwrap_or(Transparency::SemiTransparent),
|
||||||
|
),
|
||||||
|
ast::Macro::MacroDef(macro_def) => (
|
||||||
|
match macro_def.body() {
|
||||||
|
Some(arg) => {
|
||||||
|
let tt = mbe::syntax_node_to_token_tree(
|
||||||
|
arg.syntax(),
|
||||||
|
SpanAnchor { file_id: id.file_id, ast_id: id.value.erase() },
|
||||||
|
macro_def.syntax().text_range().start(),
|
||||||
|
&map,
|
||||||
|
);
|
||||||
|
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
|
||||||
|
mac
|
||||||
|
}
|
||||||
|
None => mbe::DeclarativeMacro::from_err(
|
||||||
|
mbe::ParseError::Expected("expected a token tree".into()),
|
||||||
|
is_2021,
|
||||||
|
),
|
||||||
|
},
|
||||||
|
transparency(¯o_def).unwrap_or(Transparency::Opaque),
|
||||||
|
),
|
||||||
|
};
|
||||||
|
Arc::new(DeclarativeMacroExpander { mac, transparency })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
|
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
|
||||||
|
@ -514,12 +576,15 @@ fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
|
fn macro_expand(
|
||||||
|
db: &dyn ExpandDatabase,
|
||||||
|
macro_call_id: MacroCallId,
|
||||||
|
) -> ExpandResult<Arc<tt::Subtree>> {
|
||||||
let _p = profile::span("macro_expand");
|
let _p = profile::span("macro_expand");
|
||||||
let loc = db.lookup_intern_macro_call(id);
|
let loc = db.lookup_intern_macro_call(macro_call_id);
|
||||||
|
|
||||||
let ExpandResult { value: tt, mut err } = match loc.def.kind {
|
let ExpandResult { value: tt, mut err } = match loc.def.kind {
|
||||||
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(id),
|
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id),
|
||||||
MacroDefKind::BuiltInDerive(expander, ..) => {
|
MacroDefKind::BuiltInDerive(expander, ..) => {
|
||||||
// FIXME: add firewall query for this?
|
// FIXME: add firewall query for this?
|
||||||
let hir_file_id = loc.kind.file_id();
|
let hir_file_id = loc.kind.file_id();
|
||||||
|
@ -538,7 +603,7 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
|
||||||
let _t;
|
let _t;
|
||||||
expander.expand(
|
expander.expand(
|
||||||
db,
|
db,
|
||||||
id,
|
macro_call_id,
|
||||||
&node,
|
&node,
|
||||||
match &map {
|
match &map {
|
||||||
Some(map) => map,
|
Some(map) => map,
|
||||||
|
@ -554,7 +619,7 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
let ValueResult { value, err } = db.macro_arg(id);
|
let ValueResult { value, err } = db.macro_arg(macro_call_id);
|
||||||
let Some(macro_arg) = value else {
|
let Some(macro_arg) = value else {
|
||||||
return ExpandResult {
|
return ExpandResult {
|
||||||
value: Arc::new(tt::Subtree {
|
value: Arc::new(tt::Subtree {
|
||||||
|
@ -570,9 +635,11 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
|
||||||
let arg = &*macro_arg;
|
let arg = &*macro_arg;
|
||||||
match loc.def.kind {
|
match loc.def.kind {
|
||||||
MacroDefKind::Declarative(id) => {
|
MacroDefKind::Declarative(id) => {
|
||||||
db.decl_macro_expander(loc.def.krate, id).expand(arg.clone())
|
db.decl_macro_expander(loc.def.krate, id).expand(db, arg.clone(), macro_call_id)
|
||||||
|
}
|
||||||
|
MacroDefKind::BuiltIn(it, _) => {
|
||||||
|
it.expand(db, macro_call_id, &arg).map_err(Into::into)
|
||||||
}
|
}
|
||||||
MacroDefKind::BuiltIn(it, _) => it.expand(db, id, &arg).map_err(Into::into),
|
|
||||||
// This might look a bit odd, but we do not expand the inputs to eager macros here.
|
// This might look a bit odd, but we do not expand the inputs to eager macros here.
|
||||||
// Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
|
// Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
|
||||||
// That kind of expansion uses the ast id map of an eager macros input though which goes through
|
// That kind of expansion uses the ast id map of an eager macros input though which goes through
|
||||||
|
@ -594,8 +661,10 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
|
||||||
}),
|
}),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
MacroDefKind::BuiltInEager(it, _) => it.expand(db, id, &arg).map_err(Into::into),
|
MacroDefKind::BuiltInEager(it, _) => {
|
||||||
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg),
|
it.expand(db, macro_call_id, &arg).map_err(Into::into)
|
||||||
|
}
|
||||||
|
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, macro_call_id, &arg),
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -653,10 +722,6 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
|
||||||
ExpandResult { value: Arc::new(tt), err }
|
ExpandResult { value: Arc::new(tt), err }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hygiene_frame(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
|
|
||||||
Arc::new(HygieneFrame::new(db, file_id))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo {
|
fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo {
|
||||||
db.lookup_intern_macro_call(id).expand_to()
|
db.lookup_intern_macro_call(id).expand_to()
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
//!
|
//!
|
||||||
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
|
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
|
||||||
use base_db::{
|
use base_db::{
|
||||||
span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID},
|
span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
|
||||||
CrateId,
|
CrateId,
|
||||||
};
|
};
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
|
@ -29,7 +29,6 @@ use triomphe::Arc;
|
||||||
use crate::{
|
use crate::{
|
||||||
ast::{self, AstNode},
|
ast::{self, AstNode},
|
||||||
db::ExpandDatabase,
|
db::ExpandDatabase,
|
||||||
hygiene::Hygiene,
|
|
||||||
mod_path::ModPath,
|
mod_path::ModPath,
|
||||||
EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind,
|
EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind,
|
||||||
MacroCallLoc, MacroDefId, MacroDefKind, SpanMap,
|
MacroCallLoc, MacroDefId, MacroDefKind, SpanMap,
|
||||||
|
@ -56,8 +55,10 @@ pub fn expand_eager_macro_input(
|
||||||
krate,
|
krate,
|
||||||
eager: None,
|
eager: None,
|
||||||
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
|
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
|
||||||
|
// FIXME
|
||||||
|
call_site: SyntaxContextId::ROOT,
|
||||||
});
|
});
|
||||||
let ExpandResult { value: (arg_exp, _arg_exp_map), err: parse_err } =
|
let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
|
||||||
db.parse_macro_expansion(arg_id.as_macro_file());
|
db.parse_macro_expansion(arg_id.as_macro_file());
|
||||||
// we need this map here as the expansion of the eager input fake file loses whitespace ...
|
// we need this map here as the expansion of the eager input fake file loses whitespace ...
|
||||||
// let mut ws_mapping = FxHashMap::default();
|
// let mut ws_mapping = FxHashMap::default();
|
||||||
|
@ -70,7 +71,7 @@ pub fn expand_eager_macro_input(
|
||||||
let ExpandResult { value: expanded_eager_input, err } = {
|
let ExpandResult { value: expanded_eager_input, err } = {
|
||||||
eager_macro_recur(
|
eager_macro_recur(
|
||||||
db,
|
db,
|
||||||
&Hygiene::new(db, macro_call.file_id),
|
&arg_exp_map,
|
||||||
InFile::new(arg_id.as_file(), arg_exp.syntax_node()),
|
InFile::new(arg_id.as_file(), arg_exp.syntax_node()),
|
||||||
krate,
|
krate,
|
||||||
resolver,
|
resolver,
|
||||||
|
@ -131,6 +132,8 @@ pub fn expand_eager_macro_input(
|
||||||
error: err.clone(),
|
error: err.clone(),
|
||||||
})),
|
})),
|
||||||
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
|
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
|
||||||
|
// FIXME
|
||||||
|
call_site: SyntaxContextId::ROOT,
|
||||||
};
|
};
|
||||||
|
|
||||||
ExpandResult { value: Some(db.intern_macro_call(loc)), err }
|
ExpandResult { value: Some(db.intern_macro_call(loc)), err }
|
||||||
|
@ -146,7 +149,13 @@ fn lazy_expand(
|
||||||
|
|
||||||
let expand_to = ExpandTo::from_call_site(¯o_call.value);
|
let expand_to = ExpandTo::from_call_site(¯o_call.value);
|
||||||
let ast_id = macro_call.with_value(ast_id);
|
let ast_id = macro_call.with_value(ast_id);
|
||||||
let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to });
|
let id = def.as_lazy_macro(
|
||||||
|
db,
|
||||||
|
krate,
|
||||||
|
MacroCallKind::FnLike { ast_id, expand_to },
|
||||||
|
// FIXME
|
||||||
|
SyntaxContextId::ROOT,
|
||||||
|
);
|
||||||
let macro_file = id.as_macro_file();
|
let macro_file = id.as_macro_file();
|
||||||
|
|
||||||
db.parse_macro_expansion(macro_file)
|
db.parse_macro_expansion(macro_file)
|
||||||
|
@ -155,7 +164,7 @@ fn lazy_expand(
|
||||||
|
|
||||||
fn eager_macro_recur(
|
fn eager_macro_recur(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
hygiene: &Hygiene,
|
hygiene: &SpanMap,
|
||||||
curr: InFile<SyntaxNode>,
|
curr: InFile<SyntaxNode>,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
|
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
|
||||||
|
@ -250,14 +259,13 @@ fn eager_macro_recur(
|
||||||
| MacroDefKind::BuiltInAttr(..)
|
| MacroDefKind::BuiltInAttr(..)
|
||||||
| MacroDefKind::BuiltInDerive(..)
|
| MacroDefKind::BuiltInDerive(..)
|
||||||
| MacroDefKind::ProcMacro(..) => {
|
| MacroDefKind::ProcMacro(..) => {
|
||||||
let ExpandResult { value: (parse, _tm), err } =
|
let ExpandResult { value: (parse, tm), err } =
|
||||||
lazy_expand(db, &def, curr.with_value(call.clone()), krate);
|
lazy_expand(db, &def, curr.with_value(call.clone()), krate);
|
||||||
|
|
||||||
// replace macro inside
|
// replace macro inside
|
||||||
let hygiene = Hygiene::new(db, parse.file_id);
|
|
||||||
let ExpandResult { value, err: error } = eager_macro_recur(
|
let ExpandResult { value, err: error } = eager_macro_recur(
|
||||||
db,
|
db,
|
||||||
&hygiene,
|
&tm,
|
||||||
// FIXME: We discard parse errors here
|
// FIXME: We discard parse errors here
|
||||||
parse.as_ref().map(|it| it.syntax_node()),
|
parse.as_ref().map(|it| it.syntax_node()),
|
||||||
krate,
|
krate,
|
||||||
|
|
|
@ -2,32 +2,31 @@
|
||||||
//!
|
//!
|
||||||
//! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at
|
//! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at
|
||||||
//! this moment, this is horribly incomplete and handles only `$crate`.
|
//! this moment, this is horribly incomplete and handles only `$crate`.
|
||||||
use base_db::{span::SyntaxContextId, CrateId};
|
use base_db::span::{MacroCallId, SyntaxContextId};
|
||||||
use either::Either;
|
|
||||||
use syntax::{
|
|
||||||
ast::{self},
|
|
||||||
TextRange,
|
|
||||||
};
|
|
||||||
use triomphe::Arc;
|
|
||||||
|
|
||||||
use crate::{
|
use crate::db::ExpandDatabase;
|
||||||
db::ExpandDatabase,
|
|
||||||
name::{AsName, Name},
|
|
||||||
HirFileId, InFile,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
|
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
|
||||||
pub struct SyntaxContextData {
|
pub struct SyntaxContextData {
|
||||||
// FIXME: This might only need to be Option<MacroCallId>?
|
pub outer_expn: Option<MacroCallId>,
|
||||||
outer_expn: HirFileId,
|
pub outer_transparency: Transparency,
|
||||||
outer_transparency: Transparency,
|
pub parent: SyntaxContextId,
|
||||||
parent: SyntaxContextId,
|
|
||||||
/// This context, but with all transparent and semi-transparent expansions filtered away.
|
/// This context, but with all transparent and semi-transparent expansions filtered away.
|
||||||
opaque: SyntaxContextId,
|
pub opaque: SyntaxContextId,
|
||||||
/// This context, but with all transparent expansions filtered away.
|
/// This context, but with all transparent expansions filtered away.
|
||||||
opaque_and_semitransparent: SyntaxContextId,
|
pub opaque_and_semitransparent: SyntaxContextId,
|
||||||
/// Name of the crate to which `$crate` with this context would resolve.
|
}
|
||||||
dollar_crate_name: Name,
|
|
||||||
|
impl SyntaxContextData {
|
||||||
|
pub fn root() -> Self {
|
||||||
|
SyntaxContextData {
|
||||||
|
outer_expn: None,
|
||||||
|
outer_transparency: Transparency::Opaque,
|
||||||
|
parent: SyntaxContextId::ROOT,
|
||||||
|
opaque: SyntaxContextId::ROOT,
|
||||||
|
opaque_and_semitransparent: SyntaxContextId::ROOT,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A property of a macro expansion that determines how identifiers
|
/// A property of a macro expansion that determines how identifiers
|
||||||
|
@ -50,12 +49,130 @@ pub enum Transparency {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn apply_mark(
|
pub(super) fn apply_mark(
|
||||||
_db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
_ctxt: SyntaxContextData,
|
ctxt: SyntaxContextId,
|
||||||
_file_id: HirFileId,
|
call_id: MacroCallId,
|
||||||
_transparency: Transparency,
|
transparency: Transparency,
|
||||||
) -> SyntaxContextId {
|
) -> SyntaxContextId {
|
||||||
_db.intern_syntax_context(_ctxt)
|
if transparency == Transparency::Opaque {
|
||||||
|
return apply_mark_internal(db, ctxt, Some(call_id), transparency);
|
||||||
|
}
|
||||||
|
|
||||||
|
let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site;
|
||||||
|
let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
|
||||||
|
call_site_ctxt.normalize_to_macros_2_0(db)
|
||||||
|
} else {
|
||||||
|
call_site_ctxt.normalize_to_macro_rules(db)
|
||||||
|
};
|
||||||
|
|
||||||
|
if call_site_ctxt.is_root(db) {
|
||||||
|
return apply_mark_internal(db, ctxt, Some(call_id), transparency);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, `expn_id` is a macros 1.0 definition and the call site is in a
|
||||||
|
// macros 2.0 expansion, i.e., a macros 1.0 invocation is in a macros 2.0 definition.
|
||||||
|
//
|
||||||
|
// In this case, the tokens from the macros 1.0 definition inherit the hygiene
|
||||||
|
// at their invocation. That is, we pretend that the macros 1.0 definition
|
||||||
|
// was defined at its invocation (i.e., inside the macros 2.0 definition)
|
||||||
|
// so that the macros 2.0 definition remains hygienic.
|
||||||
|
//
|
||||||
|
// See the example at `test/ui/hygiene/legacy_interaction.rs`.
|
||||||
|
for (call_id, transparency) in ctxt.marks(db) {
|
||||||
|
call_site_ctxt = apply_mark_internal(db, call_site_ctxt, call_id, transparency);
|
||||||
|
}
|
||||||
|
apply_mark_internal(db, call_site_ctxt, Some(call_id), transparency)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn apply_mark_internal(
|
||||||
|
db: &dyn ExpandDatabase,
|
||||||
|
ctxt: SyntaxContextId,
|
||||||
|
call_id: Option<MacroCallId>,
|
||||||
|
transparency: Transparency,
|
||||||
|
) -> SyntaxContextId {
|
||||||
|
let syntax_context_data = db.lookup_intern_syntax_context(ctxt);
|
||||||
|
let mut opaque = syntax_context_data.opaque;
|
||||||
|
let mut opaque_and_semitransparent = syntax_context_data.opaque_and_semitransparent;
|
||||||
|
|
||||||
|
if transparency >= Transparency::Opaque {
|
||||||
|
let parent = opaque;
|
||||||
|
let new_opaque = SyntaxContextId::SELF_REF;
|
||||||
|
// But we can't just grab the to be allocated ID either as that would not deduplicate
|
||||||
|
// things!
|
||||||
|
// So we need a new salsa store type here ...
|
||||||
|
opaque = db.intern_syntax_context(SyntaxContextData {
|
||||||
|
outer_expn: call_id,
|
||||||
|
outer_transparency: transparency,
|
||||||
|
parent,
|
||||||
|
opaque: new_opaque,
|
||||||
|
opaque_and_semitransparent: new_opaque,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if transparency >= Transparency::SemiTransparent {
|
||||||
|
let parent = opaque_and_semitransparent;
|
||||||
|
let new_opaque_and_semitransparent = SyntaxContextId::SELF_REF;
|
||||||
|
opaque_and_semitransparent = db.intern_syntax_context(SyntaxContextData {
|
||||||
|
outer_expn: call_id,
|
||||||
|
outer_transparency: transparency,
|
||||||
|
parent,
|
||||||
|
opaque,
|
||||||
|
opaque_and_semitransparent: new_opaque_and_semitransparent,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let parent = ctxt;
|
||||||
|
db.intern_syntax_context(SyntaxContextData {
|
||||||
|
outer_expn: call_id,
|
||||||
|
outer_transparency: transparency,
|
||||||
|
parent,
|
||||||
|
opaque,
|
||||||
|
opaque_and_semitransparent,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
pub trait SyntaxContextExt {
|
||||||
|
fn is_root(self, db: &dyn ExpandDatabase) -> bool;
|
||||||
|
fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self;
|
||||||
|
fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self;
|
||||||
|
fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self;
|
||||||
|
fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
|
||||||
|
fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option<MacroCallId>, Transparency)>;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline(always)]
|
||||||
|
fn handle_self_ref(p: SyntaxContextId, n: SyntaxContextId) -> SyntaxContextId {
|
||||||
|
match n {
|
||||||
|
SyntaxContextId::SELF_REF => p,
|
||||||
|
_ => n,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SyntaxContextExt for SyntaxContextId {
|
||||||
|
fn is_root(self, db: &dyn ExpandDatabase) -> bool {
|
||||||
|
db.lookup_intern_syntax_context(self).outer_expn.is_none()
|
||||||
|
}
|
||||||
|
fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self {
|
||||||
|
handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque_and_semitransparent)
|
||||||
|
}
|
||||||
|
fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self {
|
||||||
|
handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque)
|
||||||
|
}
|
||||||
|
fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self {
|
||||||
|
db.lookup_intern_syntax_context(self).parent
|
||||||
|
}
|
||||||
|
fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency) {
|
||||||
|
let data = db.lookup_intern_syntax_context(self);
|
||||||
|
(data.outer_expn, data.outer_transparency)
|
||||||
|
}
|
||||||
|
fn marks(mut self, db: &dyn ExpandDatabase) -> Vec<(Option<MacroCallId>, Transparency)> {
|
||||||
|
let mut marks = Vec::new();
|
||||||
|
while self != SyntaxContextId::ROOT {
|
||||||
|
marks.push(self.outer_mark(db));
|
||||||
|
self = self.parent_ctxt(db);
|
||||||
|
}
|
||||||
|
marks.reverse();
|
||||||
|
marks
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// pub(super) fn with_ctxt_from_mark(db: &ExpandDatabase, file_id: HirFileId) {
|
// pub(super) fn with_ctxt_from_mark(db: &ExpandDatabase, file_id: HirFileId) {
|
||||||
|
@ -64,50 +181,3 @@ pub(super) fn apply_mark(
|
||||||
// pub(super) fn with_call_site_ctxt(db: &ExpandDatabase, file_id: HirFileId) {
|
// pub(super) fn with_call_site_ctxt(db: &ExpandDatabase, file_id: HirFileId) {
|
||||||
// self.with_ctxt_from_mark(expn_id, Transparency::Transparent)
|
// self.with_ctxt_from_mark(expn_id, Transparency::Transparent)
|
||||||
// }
|
// }
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct Hygiene {}
|
|
||||||
|
|
||||||
impl Hygiene {
|
|
||||||
pub fn new(_: &dyn ExpandDatabase, _: HirFileId) -> Hygiene {
|
|
||||||
Hygiene {}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn new_unhygienic() -> Hygiene {
|
|
||||||
Hygiene {}
|
|
||||||
}
|
|
||||||
|
|
||||||
// FIXME: this should just return name
|
|
||||||
pub fn name_ref_to_name(
|
|
||||||
&self,
|
|
||||||
_: &dyn ExpandDatabase,
|
|
||||||
name_ref: ast::NameRef,
|
|
||||||
) -> Either<Name, CrateId> {
|
|
||||||
Either::Left(name_ref.as_name())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn local_inner_macros(&self, _: &dyn ExpandDatabase, _: ast::Path) -> Option<CrateId> {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
struct HygieneFrames(Arc<HygieneFrame>);
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
|
||||||
pub struct HygieneFrame {}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
|
||||||
struct HygieneInfo {}
|
|
||||||
|
|
||||||
impl HygieneInfo {
|
|
||||||
fn _map_ident_up(&self, _: &dyn ExpandDatabase, _: TextRange) -> Option<InFile<TextRange>> {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HygieneFrame {
|
|
||||||
pub(crate) fn new(_: &dyn ExpandDatabase, _: HirFileId) -> HygieneFrame {
|
|
||||||
HygieneFrame {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -24,7 +24,10 @@ use triomphe::Arc;
|
||||||
|
|
||||||
use std::{fmt, hash::Hash, iter};
|
use std::{fmt, hash::Hash, iter};
|
||||||
|
|
||||||
use base_db::{span::HirFileIdRepr, CrateId, FileId, FileRange, ProcMacroKind};
|
use base_db::{
|
||||||
|
span::{HirFileIdRepr, SyntaxContextId},
|
||||||
|
CrateId, FileId, FileRange, ProcMacroKind,
|
||||||
|
};
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
algo::{self, skip_trivia_token},
|
algo::{self, skip_trivia_token},
|
||||||
|
@ -105,6 +108,7 @@ pub struct MacroCallLoc {
|
||||||
/// for the eager input macro file.
|
/// for the eager input macro file.
|
||||||
eager: Option<Box<EagerCallInfo>>,
|
eager: Option<Box<EagerCallInfo>>,
|
||||||
pub kind: MacroCallKind,
|
pub kind: MacroCallKind,
|
||||||
|
pub call_site: SyntaxContextId,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
|
@ -330,8 +334,9 @@ impl MacroDefId {
|
||||||
db: &dyn db::ExpandDatabase,
|
db: &dyn db::ExpandDatabase,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
kind: MacroCallKind,
|
kind: MacroCallKind,
|
||||||
|
call_site: SyntaxContextId,
|
||||||
) -> MacroCallId {
|
) -> MacroCallId {
|
||||||
db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind })
|
db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind, call_site })
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ast_id(&self) -> Either<AstId<ast::Macro>, AstId<ast::Fn>> {
|
pub fn ast_id(&self) -> Either<AstId<ast::Macro>, AstId<ast::Fn>> {
|
||||||
|
|
|
@ -7,11 +7,11 @@ use std::{
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::ExpandDatabase,
|
db::ExpandDatabase,
|
||||||
hygiene::Hygiene,
|
hygiene::{SyntaxContextExt, Transparency},
|
||||||
name::{known, Name},
|
name::{known, AsName, Name},
|
||||||
|
SpanMap,
|
||||||
};
|
};
|
||||||
use base_db::CrateId;
|
use base_db::{span::SyntaxContextId, CrateId};
|
||||||
use either::Either;
|
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use syntax::{ast, AstNode};
|
use syntax::{ast, AstNode};
|
||||||
|
|
||||||
|
@ -38,6 +38,7 @@ pub enum PathKind {
|
||||||
Crate,
|
Crate,
|
||||||
/// Absolute path (::foo)
|
/// Absolute path (::foo)
|
||||||
Abs,
|
Abs,
|
||||||
|
// FIXME: Remove this
|
||||||
/// `$crate` from macro expansion
|
/// `$crate` from macro expansion
|
||||||
DollarCrate(CrateId),
|
DollarCrate(CrateId),
|
||||||
}
|
}
|
||||||
|
@ -46,7 +47,7 @@ impl ModPath {
|
||||||
pub fn from_src(
|
pub fn from_src(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
path: ast::Path,
|
path: ast::Path,
|
||||||
hygiene: &Hygiene,
|
hygiene: &SpanMap,
|
||||||
) -> Option<ModPath> {
|
) -> Option<ModPath> {
|
||||||
convert_path(db, None, path, hygiene)
|
convert_path(db, None, path, hygiene)
|
||||||
}
|
}
|
||||||
|
@ -193,7 +194,7 @@ fn convert_path(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
prefix: Option<ModPath>,
|
prefix: Option<ModPath>,
|
||||||
path: ast::Path,
|
path: ast::Path,
|
||||||
hygiene: &Hygiene,
|
hygiene: &SpanMap,
|
||||||
) -> Option<ModPath> {
|
) -> Option<ModPath> {
|
||||||
let prefix = match path.qualifier() {
|
let prefix = match path.qualifier() {
|
||||||
Some(qual) => Some(convert_path(db, prefix, qual, hygiene)?),
|
Some(qual) => Some(convert_path(db, prefix, qual, hygiene)?),
|
||||||
|
@ -203,23 +204,26 @@ fn convert_path(
|
||||||
let segment = path.segment()?;
|
let segment = path.segment()?;
|
||||||
let mut mod_path = match segment.kind()? {
|
let mut mod_path = match segment.kind()? {
|
||||||
ast::PathSegmentKind::Name(name_ref) => {
|
ast::PathSegmentKind::Name(name_ref) => {
|
||||||
match hygiene.name_ref_to_name(db, name_ref) {
|
if name_ref.text() == "$crate" {
|
||||||
Either::Left(name) => {
|
if prefix.is_some() {
|
||||||
// no type args in use
|
return None;
|
||||||
let mut res = prefix.unwrap_or_else(|| {
|
|
||||||
ModPath::from_kind(
|
|
||||||
segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
|
|
||||||
)
|
|
||||||
});
|
|
||||||
res.segments.push(name);
|
|
||||||
res
|
|
||||||
}
|
|
||||||
Either::Right(crate_id) => {
|
|
||||||
return Some(ModPath::from_segments(
|
|
||||||
PathKind::DollarCrate(crate_id),
|
|
||||||
iter::empty(),
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
|
resolve_crate_root(
|
||||||
|
db,
|
||||||
|
hygiene
|
||||||
|
.span_for_range(name_ref.syntax().text_range())
|
||||||
|
.map_or(SyntaxContextId::ROOT, |s| s.ctx),
|
||||||
|
)
|
||||||
|
.map(PathKind::DollarCrate)
|
||||||
|
.map(ModPath::from_kind)?
|
||||||
|
} else {
|
||||||
|
let mut res = prefix.unwrap_or_else(|| {
|
||||||
|
ModPath::from_kind(
|
||||||
|
segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
|
||||||
|
)
|
||||||
|
});
|
||||||
|
res.segments.push(name_ref.as_name());
|
||||||
|
res
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::PathSegmentKind::SelfTypeKw => {
|
ast::PathSegmentKind::SelfTypeKw => {
|
||||||
|
@ -261,8 +265,15 @@ fn convert_path(
|
||||||
// We follow what it did anyway :)
|
// We follow what it did anyway :)
|
||||||
if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain {
|
if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain {
|
||||||
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
|
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
|
||||||
if let Some(crate_id) = hygiene.local_inner_macros(db, path) {
|
let syn_ctx = hygiene
|
||||||
mod_path.kind = PathKind::DollarCrate(crate_id);
|
.span_for_range(segment.syntax().text_range())
|
||||||
|
.map_or(SyntaxContextId::ROOT, |s| s.ctx);
|
||||||
|
if let Some(macro_call_id) = db.lookup_intern_syntax_context(syn_ctx).outer_expn {
|
||||||
|
if db.lookup_intern_macro_call(macro_call_id).def.local_inner {
|
||||||
|
if let Some(crate_root) = resolve_crate_root(db, syn_ctx) {
|
||||||
|
mod_path.kind = PathKind::DollarCrate(crate_root);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -270,6 +281,40 @@ fn convert_path(
|
||||||
Some(mod_path)
|
Some(mod_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) -> Option<CrateId> {
|
||||||
|
// When resolving `$crate` from a `macro_rules!` invoked in a `macro`,
|
||||||
|
// we don't want to pretend that the `macro_rules!` definition is in the `macro`
|
||||||
|
// as described in `SyntaxContext::apply_mark`, so we ignore prepended opaque marks.
|
||||||
|
// FIXME: This is only a guess and it doesn't work correctly for `macro_rules!`
|
||||||
|
// definitions actually produced by `macro` and `macro` definitions produced by
|
||||||
|
// `macro_rules!`, but at least such configurations are not stable yet.
|
||||||
|
ctxt = ctxt.normalize_to_macro_rules(db);
|
||||||
|
let mut iter = ctxt.marks(db).into_iter().rev().peekable();
|
||||||
|
let mut result_mark = None;
|
||||||
|
// Find the last opaque mark from the end if it exists.
|
||||||
|
while let Some(&(mark, transparency)) = iter.peek() {
|
||||||
|
if transparency == Transparency::Opaque {
|
||||||
|
result_mark = Some(mark);
|
||||||
|
iter.next();
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Then find the last semi-transparent mark from the end if it exists.
|
||||||
|
for (mark, transparency) in iter {
|
||||||
|
if transparency == Transparency::SemiTransparent {
|
||||||
|
result_mark = Some(mark);
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match result_mark {
|
||||||
|
Some(Some(call)) => Some(db.lookup_intern_macro_call(call.into()).def.krate),
|
||||||
|
Some(None) | None => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub use crate::name as __name;
|
pub use crate::name as __name;
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
|
|
|
@ -470,6 +470,7 @@ pub mod known {
|
||||||
pub const SELF_TYPE: super::Name = super::Name::new_inline("Self");
|
pub const SELF_TYPE: super::Name = super::Name::new_inline("Self");
|
||||||
|
|
||||||
pub const STATIC_LIFETIME: super::Name = super::Name::new_inline("'static");
|
pub const STATIC_LIFETIME: super::Name = super::Name::new_inline("'static");
|
||||||
|
pub const DOLLAR_CRATE: super::Name = super::Name::new_inline("$crate");
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! name {
|
macro_rules! name {
|
||||||
|
|
|
@ -23,7 +23,7 @@ use hir_def::{
|
||||||
EnumVariantId, HasModule, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, ModuleId,
|
EnumVariantId, HasModule, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, ModuleId,
|
||||||
TraitId,
|
TraitId,
|
||||||
};
|
};
|
||||||
use hir_expand::{hygiene::Hygiene, name::Name};
|
use hir_expand::name::Name;
|
||||||
use intern::{Internable, Interned};
|
use intern::{Internable, Interned};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use la_arena::ArenaMap;
|
use la_arena::ArenaMap;
|
||||||
|
@ -1732,11 +1732,11 @@ impl HirDisplay for TypeRef {
|
||||||
f.write_joined(bounds, " + ")?;
|
f.write_joined(bounds, " + ")?;
|
||||||
}
|
}
|
||||||
TypeRef::Macro(macro_call) => {
|
TypeRef::Macro(macro_call) => {
|
||||||
let macro_call = macro_call.to_node(f.db.upcast());
|
|
||||||
let ctx = hir_def::lower::LowerCtx::with_hygiene(
|
let ctx = hir_def::lower::LowerCtx::with_hygiene(
|
||||||
f.db.upcast(),
|
f.db.upcast(),
|
||||||
&Hygiene::new_unhygienic(),
|
f.db.span_map(macro_call.file_id),
|
||||||
);
|
);
|
||||||
|
let macro_call = macro_call.to_node(f.db.upcast());
|
||||||
match macro_call.path() {
|
match macro_call.path() {
|
||||||
Some(path) => match Path::from_src(path, &ctx) {
|
Some(path) => match Path::from_src(path, &ctx) {
|
||||||
Some(path) => path.hir_fmt(f)?,
|
Some(path) => path.hir_fmt(f)?,
|
||||||
|
|
|
@ -7,7 +7,7 @@ use base_db::{
|
||||||
AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
|
AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
|
||||||
};
|
};
|
||||||
use hir_def::{db::DefDatabase, ModuleId};
|
use hir_def::{db::DefDatabase, ModuleId};
|
||||||
use hir_expand::db::ExpandDatabase;
|
use hir_expand::{db::ExpandDatabase, hygiene::SyntaxContextData};
|
||||||
use nohash_hasher::IntMap;
|
use nohash_hasher::IntMap;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use syntax::TextRange;
|
use syntax::TextRange;
|
||||||
|
@ -30,6 +30,7 @@ pub(crate) struct TestDB {
|
||||||
impl Default for TestDB {
|
impl Default for TestDB {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
let mut this = Self { storage: Default::default(), events: Default::default() };
|
let mut this = Self { storage: Default::default(), events: Default::default() };
|
||||||
|
this.intern_syntax_context(SyntaxContextData::root());
|
||||||
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
|
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
|
||||||
this
|
this
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,7 +8,7 @@ use hir_def::{
|
||||||
resolver::{HasResolver, Resolver, TypeNs},
|
resolver::{HasResolver, Resolver, TypeNs},
|
||||||
AssocItemId, AttrDefId, ModuleDefId,
|
AssocItemId, AttrDefId, ModuleDefId,
|
||||||
};
|
};
|
||||||
use hir_expand::{hygiene::Hygiene, name::Name};
|
use hir_expand::name::Name;
|
||||||
use hir_ty::db::HirDatabase;
|
use hir_ty::db::HirDatabase;
|
||||||
use syntax::{ast, AstNode};
|
use syntax::{ast, AstNode};
|
||||||
|
|
||||||
|
@ -234,7 +234,7 @@ fn modpath_from_str(db: &dyn HirDatabase, link: &str) -> Option<ModPath> {
|
||||||
if ast_path.syntax().text() != link {
|
if ast_path.syntax().text() != link {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
ModPath::from_src(db.upcast(), ast_path, &Hygiene::new_unhygienic())
|
ModPath::from_src(db.upcast(), ast_path, &Default::default())
|
||||||
};
|
};
|
||||||
|
|
||||||
let full = try_get_modpath(link);
|
let full = try_get_modpath(link);
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
pub use hir_def::db::*;
|
pub use hir_def::db::*;
|
||||||
pub use hir_expand::db::{
|
pub use hir_expand::db::{
|
||||||
AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
|
AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
|
||||||
ExpandProcMacroQuery, HygieneFrameQuery, InternMacroCallQuery, MacroArgQuery, MacroExpandQuery,
|
ExpandProcMacroQuery, InternMacroCallQuery, MacroArgQuery, MacroExpandQuery,
|
||||||
ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery,
|
ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery,
|
||||||
};
|
};
|
||||||
pub use hir_ty::db::*;
|
pub use hir_ty::db::*;
|
||||||
|
|
|
@ -137,10 +137,7 @@ pub use {
|
||||||
// These are negative re-exports: pub using these names is forbidden, they
|
// These are negative re-exports: pub using these names is forbidden, they
|
||||||
// should remain private to hir internals.
|
// should remain private to hir internals.
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
use {
|
use {hir_def::path::Path, hir_expand::name::AsName};
|
||||||
hir_def::path::Path,
|
|
||||||
hir_expand::{hygiene::Hygiene, name::AsName},
|
|
||||||
};
|
|
||||||
|
|
||||||
/// hir::Crate describes a single crate. It's the main interface with which
|
/// hir::Crate describes a single crate. It's the main interface with which
|
||||||
/// a crate's dependencies interact. Mostly, it should be just a proxy for the
|
/// a crate's dependencies interact. Mostly, it should be just a proxy for the
|
||||||
|
|
|
@ -842,8 +842,8 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
|
|
||||||
pub fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
|
pub fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
|
||||||
let analyze = self.analyze(path.syntax())?;
|
let analyze = self.analyze(path.syntax())?;
|
||||||
let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id);
|
let hygiene = self.db.span_map(analyze.file_id);
|
||||||
let ctx = LowerCtx::with_hygiene(self.db.upcast(), &hygiene);
|
let ctx = LowerCtx::with_hygiene(self.db.upcast(), hygiene);
|
||||||
let hir_path = Path::from_src(path.clone(), &ctx)?;
|
let hir_path = Path::from_src(path.clone(), &ctx)?;
|
||||||
match analyze.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), &hir_path)? {
|
match analyze.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), &hir_path)? {
|
||||||
TypeNs::TraitId(id) => Some(Trait { id }),
|
TypeNs::TraitId(id) => Some(Trait { id }),
|
||||||
|
|
|
@ -26,7 +26,6 @@ use hir_def::{
|
||||||
};
|
};
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
builtin_fn_macro::BuiltinFnLikeExpander,
|
builtin_fn_macro::BuiltinFnLikeExpander,
|
||||||
hygiene::Hygiene,
|
|
||||||
mod_path::path,
|
mod_path::path,
|
||||||
name,
|
name,
|
||||||
name::{AsName, Name},
|
name::{AsName, Name},
|
||||||
|
@ -596,8 +595,7 @@ impl SourceAnalyzer {
|
||||||
}
|
}
|
||||||
|
|
||||||
// This must be a normal source file rather than macro file.
|
// This must be a normal source file rather than macro file.
|
||||||
let hygiene = Hygiene::new(db.upcast(), self.file_id);
|
let ctx = LowerCtx::with_hygiene(db.upcast(), db.span_map(self.file_id));
|
||||||
let ctx = LowerCtx::with_hygiene(db.upcast(), &hygiene);
|
|
||||||
let hir_path = Path::from_src(path.clone(), &ctx)?;
|
let hir_path = Path::from_src(path.clone(), &ctx)?;
|
||||||
|
|
||||||
// Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are
|
// Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are
|
||||||
|
|
|
@ -103,7 +103,6 @@ impl RootDatabase {
|
||||||
hir::db::DeclMacroExpanderQuery
|
hir::db::DeclMacroExpanderQuery
|
||||||
hir::db::MacroExpandQuery
|
hir::db::MacroExpandQuery
|
||||||
hir::db::ExpandProcMacroQuery
|
hir::db::ExpandProcMacroQuery
|
||||||
hir::db::HygieneFrameQuery
|
|
||||||
|
|
||||||
// DefDatabase
|
// DefDatabase
|
||||||
hir::db::FileItemTreeQuery
|
hir::db::FileItemTreeQuery
|
||||||
|
|
|
@ -208,7 +208,6 @@ impl RootDatabase {
|
||||||
hir_db::DeclMacroExpanderQuery
|
hir_db::DeclMacroExpanderQuery
|
||||||
// hir_db::MacroExpandQuery
|
// hir_db::MacroExpandQuery
|
||||||
hir_db::ExpandProcMacroQuery
|
hir_db::ExpandProcMacroQuery
|
||||||
hir_db::HygieneFrameQuery
|
|
||||||
hir_db::ParseMacroExpansionErrorQuery
|
hir_db::ParseMacroExpansionErrorQuery
|
||||||
|
|
||||||
// DefDatabase
|
// DefDatabase
|
||||||
|
|
|
@ -53,7 +53,7 @@ fn benchmark_expand_macro_rules() {
|
||||||
invocations
|
invocations
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(id, tt)| {
|
.map(|(id, tt)| {
|
||||||
let res = rules[&id].expand(&tt);
|
let res = rules[&id].expand(&tt, |_| ());
|
||||||
assert!(res.err.is_none());
|
assert!(res.err.is_none());
|
||||||
res.value.token_trees.len()
|
res.value.token_trees.len()
|
||||||
})
|
})
|
||||||
|
@ -124,7 +124,7 @@ fn invocation_fixtures(
|
||||||
for op in rule.lhs.iter() {
|
for op in rule.lhs.iter() {
|
||||||
collect_from_op(op, &mut subtree, &mut seed);
|
collect_from_op(op, &mut subtree, &mut seed);
|
||||||
}
|
}
|
||||||
if it.expand(&subtree).err.is_none() {
|
if it.expand(&subtree, |_| ()).err.is_none() {
|
||||||
res.push((name.clone(), subtree));
|
res.push((name.clone(), subtree));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,6 +14,7 @@ use crate::{parser::MetaVarKind, ExpandError, ExpandResult};
|
||||||
pub(crate) fn expand_rules<S: Span>(
|
pub(crate) fn expand_rules<S: Span>(
|
||||||
rules: &[crate::Rule<S>],
|
rules: &[crate::Rule<S>],
|
||||||
input: &tt::Subtree<S>,
|
input: &tt::Subtree<S>,
|
||||||
|
marker: impl Fn(&mut S) + Copy,
|
||||||
is_2021: bool,
|
is_2021: bool,
|
||||||
) -> ExpandResult<tt::Subtree<S>> {
|
) -> ExpandResult<tt::Subtree<S>> {
|
||||||
let mut match_: Option<(matcher::Match<S>, &crate::Rule<S>)> = None;
|
let mut match_: Option<(matcher::Match<S>, &crate::Rule<S>)> = None;
|
||||||
|
@ -25,7 +26,7 @@ pub(crate) fn expand_rules<S: Span>(
|
||||||
// Unconditionally returning the transcription here makes the
|
// Unconditionally returning the transcription here makes the
|
||||||
// `test_repeat_bad_var` test fail.
|
// `test_repeat_bad_var` test fail.
|
||||||
let ExpandResult { value, err: transcribe_err } =
|
let ExpandResult { value, err: transcribe_err } =
|
||||||
transcriber::transcribe(&rule.rhs, &new_match.bindings);
|
transcriber::transcribe(&rule.rhs, &new_match.bindings, marker);
|
||||||
if transcribe_err.is_none() {
|
if transcribe_err.is_none() {
|
||||||
return ExpandResult::ok(value);
|
return ExpandResult::ok(value);
|
||||||
}
|
}
|
||||||
|
@ -44,7 +45,7 @@ pub(crate) fn expand_rules<S: Span>(
|
||||||
if let Some((match_, rule)) = match_ {
|
if let Some((match_, rule)) = match_ {
|
||||||
// if we got here, there was no match without errors
|
// if we got here, there was no match without errors
|
||||||
let ExpandResult { value, err: transcribe_err } =
|
let ExpandResult { value, err: transcribe_err } =
|
||||||
transcriber::transcribe(&rule.rhs, &match_.bindings);
|
transcriber::transcribe(&rule.rhs, &match_.bindings, marker);
|
||||||
ExpandResult { value, err: match_.err.or(transcribe_err) }
|
ExpandResult { value, err: match_.err.or(transcribe_err) }
|
||||||
} else {
|
} else {
|
||||||
ExpandResult::new(
|
ExpandResult::new(
|
||||||
|
@ -129,7 +130,7 @@ enum Fragment<S> {
|
||||||
/// At one point in time, we tried to use "fake" delimiters here à la
|
/// At one point in time, we tried to use "fake" delimiters here à la
|
||||||
/// proc-macro delimiter=none. As we later discovered, "none" delimiters are
|
/// proc-macro delimiter=none. As we later discovered, "none" delimiters are
|
||||||
/// tricky to handle in the parser, and rustc doesn't handle those either.
|
/// tricky to handle in the parser, and rustc doesn't handle those either.
|
||||||
Expr(tt::TokenTree<S>),
|
Expr(tt::Subtree<S>),
|
||||||
/// There are roughly two types of paths: paths in expression context, where a
|
/// There are roughly two types of paths: paths in expression context, where a
|
||||||
/// separator `::` between an identifier and its following generic argument list
|
/// separator `::` between an identifier and its following generic argument list
|
||||||
/// is mandatory, and paths in type context, where `::` can be omitted.
|
/// is mandatory, and paths in type context, where `::` can be omitted.
|
||||||
|
@ -139,5 +140,5 @@ enum Fragment<S> {
|
||||||
/// and is trasncribed as an expression-context path, verbatim transcription
|
/// and is trasncribed as an expression-context path, verbatim transcription
|
||||||
/// would cause a syntax error. We need to fix it up just before transcribing;
|
/// would cause a syntax error. We need to fix it up just before transcribing;
|
||||||
/// see `transcriber::fix_up_and_push_path_tt()`.
|
/// see `transcriber::fix_up_and_push_path_tt()`.
|
||||||
Path(tt::TokenTree<S>),
|
Path(tt::Subtree<S>),
|
||||||
}
|
}
|
||||||
|
|
|
@ -765,7 +765,7 @@ fn match_meta_var<S: Span>(
|
||||||
MetaVarKind::Path => {
|
MetaVarKind::Path => {
|
||||||
return input
|
return input
|
||||||
.expect_fragment(parser::PrefixEntryPoint::Path)
|
.expect_fragment(parser::PrefixEntryPoint::Path)
|
||||||
.map(|it| it.map(Fragment::Path));
|
.map(|it| it.map(tt::TokenTree::subtree_or_wrap).map(Fragment::Path));
|
||||||
}
|
}
|
||||||
MetaVarKind::Ty => parser::PrefixEntryPoint::Ty,
|
MetaVarKind::Ty => parser::PrefixEntryPoint::Ty,
|
||||||
MetaVarKind::Pat if is_2021 => parser::PrefixEntryPoint::PatTop,
|
MetaVarKind::Pat if is_2021 => parser::PrefixEntryPoint::PatTop,
|
||||||
|
@ -793,7 +793,7 @@ fn match_meta_var<S: Span>(
|
||||||
};
|
};
|
||||||
return input
|
return input
|
||||||
.expect_fragment(parser::PrefixEntryPoint::Expr)
|
.expect_fragment(parser::PrefixEntryPoint::Expr)
|
||||||
.map(|tt| tt.map(Fragment::Expr));
|
.map(|tt| tt.map(tt::TokenTree::subtree_or_wrap).map(Fragment::Expr));
|
||||||
}
|
}
|
||||||
MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => {
|
MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => {
|
||||||
let tt_result = match kind {
|
let tt_result = match kind {
|
||||||
|
|
|
@ -11,21 +11,19 @@ use crate::{
|
||||||
};
|
};
|
||||||
|
|
||||||
impl<S: Span> Bindings<S> {
|
impl<S: Span> Bindings<S> {
|
||||||
fn contains(&self, name: &str) -> bool {
|
|
||||||
self.inner.contains_key(name)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get(&self, name: &str) -> Result<&Binding<S>, ExpandError> {
|
fn get(&self, name: &str) -> Result<&Binding<S>, ExpandError> {
|
||||||
match self.inner.get(name) {
|
match self.inner.get(name) {
|
||||||
Some(binding) => Ok(binding),
|
Some(binding) => Ok(binding),
|
||||||
None => Err(ExpandError::binding_error(format!("could not find binding `{name}`"))),
|
None => Err(ExpandError::UnresolvedBinding(Box::new(Box::from(name)))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_fragment(
|
fn get_fragment(
|
||||||
&self,
|
&self,
|
||||||
name: &str,
|
name: &str,
|
||||||
|
mut span: S,
|
||||||
nesting: &mut [NestingState],
|
nesting: &mut [NestingState],
|
||||||
|
marker: impl Fn(&mut S),
|
||||||
) -> Result<Fragment<S>, ExpandError> {
|
) -> Result<Fragment<S>, ExpandError> {
|
||||||
macro_rules! binding_err {
|
macro_rules! binding_err {
|
||||||
($($arg:tt)*) => { ExpandError::binding_error(format!($($arg)*)) };
|
($($arg:tt)*) => { ExpandError::binding_error(format!($($arg)*)) };
|
||||||
|
@ -48,54 +46,75 @@ impl<S: Span> Bindings<S> {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
match b {
|
match b {
|
||||||
Binding::Fragment(it) => Ok(it.clone()),
|
Binding::Fragment(f @ (Fragment::Path(sub) | Fragment::Expr(sub))) => {
|
||||||
|
let tt::Subtree { delimiter, token_trees } = sub;
|
||||||
|
marker(&mut span);
|
||||||
|
let subtree = tt::Subtree {
|
||||||
|
delimiter: tt::Delimiter {
|
||||||
|
// TODO split span
|
||||||
|
open: span,
|
||||||
|
close: span,
|
||||||
|
kind: delimiter.kind,
|
||||||
|
},
|
||||||
|
token_trees: token_trees.clone(),
|
||||||
|
};
|
||||||
|
Ok(match f {
|
||||||
|
Fragment::Tokens(_) => unreachable!(),
|
||||||
|
Fragment::Expr(_) => Fragment::Expr,
|
||||||
|
Fragment::Path(_) => Fragment::Path,
|
||||||
|
}(subtree))
|
||||||
|
}
|
||||||
|
Binding::Fragment(it @ Fragment::Tokens(_)) => Ok(it.clone()),
|
||||||
// emit some reasonable default expansion for missing bindings,
|
// emit some reasonable default expansion for missing bindings,
|
||||||
// this gives better recovery than emitting the `$fragment-name` verbatim
|
// this gives better recovery than emitting the `$fragment-name` verbatim
|
||||||
Binding::Missing(it) => Ok(match it {
|
Binding::Missing(it) => Ok({
|
||||||
MetaVarKind::Stmt => {
|
marker(&mut span);
|
||||||
Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
|
match it {
|
||||||
span: S::DUMMY,
|
MetaVarKind::Stmt => {
|
||||||
char: ';',
|
Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
|
||||||
spacing: tt::Spacing::Alone,
|
span,
|
||||||
})))
|
char: ';',
|
||||||
}
|
spacing: tt::Spacing::Alone,
|
||||||
MetaVarKind::Block => Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree {
|
})))
|
||||||
delimiter: tt::Delimiter {
|
}
|
||||||
open: S::DUMMY,
|
MetaVarKind::Block => Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree {
|
||||||
close: S::DUMMY,
|
delimiter: tt::Delimiter {
|
||||||
kind: tt::DelimiterKind::Brace,
|
open: S::DUMMY,
|
||||||
},
|
close: S::DUMMY,
|
||||||
token_trees: vec![],
|
kind: tt::DelimiterKind::Brace,
|
||||||
})),
|
},
|
||||||
// FIXME: Meta and Item should get proper defaults
|
|
||||||
MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => {
|
|
||||||
Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree {
|
|
||||||
delimiter: tt::Delimiter::UNSPECIFIED,
|
|
||||||
token_trees: vec![],
|
token_trees: vec![],
|
||||||
}))
|
})),
|
||||||
}
|
// FIXME: Meta and Item should get proper defaults
|
||||||
MetaVarKind::Path
|
MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => {
|
||||||
| MetaVarKind::Ty
|
Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree {
|
||||||
| MetaVarKind::Pat
|
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||||
| MetaVarKind::PatParam
|
token_trees: vec![],
|
||||||
| MetaVarKind::Expr
|
}))
|
||||||
| MetaVarKind::Ident => {
|
}
|
||||||
Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
MetaVarKind::Path
|
||||||
text: SmolStr::new_inline("missing"),
|
| MetaVarKind::Ty
|
||||||
span: S::DUMMY,
|
| MetaVarKind::Pat
|
||||||
})))
|
| MetaVarKind::PatParam
|
||||||
}
|
| MetaVarKind::Expr
|
||||||
MetaVarKind::Lifetime => {
|
| MetaVarKind::Ident => {
|
||||||
Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
||||||
text: SmolStr::new_inline("'missing"),
|
text: SmolStr::new_inline("missing"),
|
||||||
span: S::DUMMY,
|
span,
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
MetaVarKind::Literal => {
|
MetaVarKind::Lifetime => {
|
||||||
Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
||||||
text: SmolStr::new_inline("\"missing\""),
|
text: SmolStr::new_inline("'missing"),
|
||||||
span: S::DUMMY,
|
span,
|
||||||
})))
|
})))
|
||||||
|
}
|
||||||
|
MetaVarKind::Literal => {
|
||||||
|
Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
||||||
|
text: SmolStr::new_inline("\"missing\""),
|
||||||
|
span,
|
||||||
|
})))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
Binding::Nested(_) => {
|
Binding::Nested(_) => {
|
||||||
|
@ -111,10 +130,11 @@ impl<S: Span> Bindings<S> {
|
||||||
pub(super) fn transcribe<S: Span>(
|
pub(super) fn transcribe<S: Span>(
|
||||||
template: &MetaTemplate<S>,
|
template: &MetaTemplate<S>,
|
||||||
bindings: &Bindings<S>,
|
bindings: &Bindings<S>,
|
||||||
|
marker: impl Fn(&mut S) + Copy,
|
||||||
) -> ExpandResult<tt::Subtree<S>> {
|
) -> ExpandResult<tt::Subtree<S>> {
|
||||||
let mut ctx = ExpandCtx { bindings, nesting: Vec::new() };
|
let mut ctx = ExpandCtx { bindings, nesting: Vec::new() };
|
||||||
let mut arena: Vec<tt::TokenTree<S>> = Vec::new();
|
let mut arena: Vec<tt::TokenTree<S>> = Vec::new();
|
||||||
expand_subtree(&mut ctx, template, None, &mut arena)
|
expand_subtree(&mut ctx, template, None, &mut arena, marker)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -139,40 +159,65 @@ fn expand_subtree<S: Span>(
|
||||||
template: &MetaTemplate<S>,
|
template: &MetaTemplate<S>,
|
||||||
delimiter: Option<Delimiter<S>>,
|
delimiter: Option<Delimiter<S>>,
|
||||||
arena: &mut Vec<tt::TokenTree<S>>,
|
arena: &mut Vec<tt::TokenTree<S>>,
|
||||||
|
marker: impl Fn(&mut S) + Copy,
|
||||||
) -> ExpandResult<tt::Subtree<S>> {
|
) -> ExpandResult<tt::Subtree<S>> {
|
||||||
// remember how many elements are in the arena now - when returning, we want to drain exactly how many elements we added. This way, the recursive uses of the arena get their own "view" of the arena, but will reuse the allocation
|
// remember how many elements are in the arena now - when returning, we want to drain exactly how many elements we added. This way, the recursive uses of the arena get their own "view" of the arena, but will reuse the allocation
|
||||||
let start_elements = arena.len();
|
let start_elements = arena.len();
|
||||||
let mut err = None;
|
let mut err = None;
|
||||||
'ops: for op in template.iter() {
|
'ops: for op in template.iter() {
|
||||||
match op {
|
match op {
|
||||||
Op::Literal(it) => arena.push(tt::Leaf::from(it.clone()).into()),
|
Op::Literal(it) => arena.push(
|
||||||
Op::Ident(it) => arena.push(tt::Leaf::from(it.clone()).into()),
|
tt::Leaf::from({
|
||||||
|
let mut it = it.clone();
|
||||||
|
marker(&mut it.span);
|
||||||
|
it
|
||||||
|
})
|
||||||
|
.into(),
|
||||||
|
),
|
||||||
|
Op::Ident(it) => arena.push(
|
||||||
|
tt::Leaf::from({
|
||||||
|
let mut it = it.clone();
|
||||||
|
marker(&mut it.span);
|
||||||
|
it
|
||||||
|
})
|
||||||
|
.into(),
|
||||||
|
),
|
||||||
Op::Punct(puncts) => {
|
Op::Punct(puncts) => {
|
||||||
for punct in puncts {
|
for punct in puncts {
|
||||||
arena.push(tt::Leaf::from(*punct).into());
|
arena.push(
|
||||||
|
tt::Leaf::from({
|
||||||
|
let mut it = punct.clone();
|
||||||
|
marker(&mut it.span);
|
||||||
|
it
|
||||||
|
})
|
||||||
|
.into(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Op::Subtree { tokens, delimiter } => {
|
Op::Subtree { tokens, delimiter } => {
|
||||||
|
let mut delimiter = *delimiter;
|
||||||
|
marker(&mut delimiter.open);
|
||||||
|
marker(&mut delimiter.close);
|
||||||
let ExpandResult { value: tt, err: e } =
|
let ExpandResult { value: tt, err: e } =
|
||||||
expand_subtree(ctx, tokens, Some(*delimiter), arena);
|
expand_subtree(ctx, tokens, Some(delimiter), arena, marker);
|
||||||
err = err.or(e);
|
err = err.or(e);
|
||||||
arena.push(tt.into());
|
arena.push(tt.into());
|
||||||
}
|
}
|
||||||
Op::Var { name, id, .. } => {
|
Op::Var { name, id, .. } => {
|
||||||
let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id);
|
let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id, marker);
|
||||||
err = err.or(e);
|
err = err.or(e);
|
||||||
push_fragment(arena, fragment);
|
push_fragment(arena, fragment);
|
||||||
}
|
}
|
||||||
Op::Repeat { tokens: subtree, kind, separator } => {
|
Op::Repeat { tokens: subtree, kind, separator } => {
|
||||||
let ExpandResult { value: fragment, err: e } =
|
let ExpandResult { value: fragment, err: e } =
|
||||||
expand_repeat(ctx, subtree, *kind, separator, arena);
|
expand_repeat(ctx, subtree, *kind, separator, arena, marker);
|
||||||
err = err.or(e);
|
err = err.or(e);
|
||||||
push_fragment(arena, fragment)
|
push_fragment(arena, fragment)
|
||||||
}
|
}
|
||||||
Op::Ignore { name, id } => {
|
Op::Ignore { name, id } => {
|
||||||
// Expand the variable, but ignore the result. This registers the repetition count.
|
// Expand the variable, but ignore the result. This registers the repetition count.
|
||||||
// FIXME: Any emitted errors are dropped.
|
// FIXME: Any emitted errors are dropped.
|
||||||
expand_var(ctx, name, *id);
|
expand_var(ctx, name, *id, marker);
|
||||||
}
|
}
|
||||||
Op::Index { depth } => {
|
Op::Index { depth } => {
|
||||||
let index =
|
let index =
|
||||||
|
@ -258,42 +303,42 @@ fn expand_var<S: Span>(
|
||||||
ctx: &mut ExpandCtx<'_, S>,
|
ctx: &mut ExpandCtx<'_, S>,
|
||||||
v: &SmolStr,
|
v: &SmolStr,
|
||||||
id: S,
|
id: S,
|
||||||
|
marker: impl Fn(&mut S),
|
||||||
) -> ExpandResult<Fragment<S>> {
|
) -> ExpandResult<Fragment<S>> {
|
||||||
// We already handle $crate case in mbe parser
|
// We already handle $crate case in mbe parser
|
||||||
debug_assert!(v != "crate");
|
debug_assert!(v != "crate");
|
||||||
|
|
||||||
if !ctx.bindings.contains(v) {
|
match ctx.bindings.get_fragment(v, id, &mut ctx.nesting, marker) {
|
||||||
// Note that it is possible to have a `$var` inside a macro which is not bound.
|
Ok(it) => ExpandResult::ok(it),
|
||||||
// For example:
|
Err(ExpandError::UnresolvedBinding(_)) => {
|
||||||
// ```
|
// Note that it is possible to have a `$var` inside a macro which is not bound.
|
||||||
// macro_rules! foo {
|
// For example:
|
||||||
// ($a:ident, $b:ident, $c:tt) => {
|
// ```
|
||||||
// macro_rules! bar {
|
// macro_rules! foo {
|
||||||
// ($bi:ident) => {
|
// ($a:ident, $b:ident, $c:tt) => {
|
||||||
// fn $bi() -> u8 {$c}
|
// macro_rules! bar {
|
||||||
// }
|
// ($bi:ident) => {
|
||||||
// }
|
// fn $bi() -> u8 {$c}
|
||||||
// }
|
// }
|
||||||
// ```
|
// }
|
||||||
// We just treat it a normal tokens
|
// }
|
||||||
let tt = tt::Subtree {
|
// ```
|
||||||
delimiter: tt::Delimiter::UNSPECIFIED,
|
// We just treat it a normal tokens
|
||||||
token_trees: vec![
|
let tt = tt::Subtree {
|
||||||
tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id })
|
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||||
.into(),
|
token_trees: vec![
|
||||||
tt::Leaf::from(tt::Ident { text: v.clone(), span: id }).into(),
|
tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id })
|
||||||
],
|
.into(),
|
||||||
|
tt::Leaf::from(tt::Ident { text: v.clone(), span: id }).into(),
|
||||||
|
],
|
||||||
|
}
|
||||||
|
.into();
|
||||||
|
ExpandResult::ok(Fragment::Tokens(tt))
|
||||||
}
|
}
|
||||||
.into();
|
Err(e) => ExpandResult {
|
||||||
ExpandResult::ok(Fragment::Tokens(tt))
|
value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree::empty())),
|
||||||
} else {
|
err: Some(e),
|
||||||
ctx.bindings.get_fragment(v, &mut ctx.nesting).map_or_else(
|
},
|
||||||
|e| ExpandResult {
|
|
||||||
value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree::empty())),
|
|
||||||
err: Some(e),
|
|
||||||
},
|
|
||||||
ExpandResult::ok,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -303,6 +348,7 @@ fn expand_repeat<S: Span>(
|
||||||
kind: RepeatKind,
|
kind: RepeatKind,
|
||||||
separator: &Option<Separator<S>>,
|
separator: &Option<Separator<S>>,
|
||||||
arena: &mut Vec<tt::TokenTree<S>>,
|
arena: &mut Vec<tt::TokenTree<S>>,
|
||||||
|
marker: impl Fn(&mut S) + Copy,
|
||||||
) -> ExpandResult<Fragment<S>> {
|
) -> ExpandResult<Fragment<S>> {
|
||||||
let mut buf: Vec<tt::TokenTree<S>> = Vec::new();
|
let mut buf: Vec<tt::TokenTree<S>> = Vec::new();
|
||||||
ctx.nesting.push(NestingState { idx: 0, at_end: false, hit: false });
|
ctx.nesting.push(NestingState { idx: 0, at_end: false, hit: false });
|
||||||
|
@ -314,7 +360,8 @@ fn expand_repeat<S: Span>(
|
||||||
let mut err = None;
|
let mut err = None;
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
let ExpandResult { value: mut t, err: e } = expand_subtree(ctx, template, None, arena);
|
let ExpandResult { value: mut t, err: e } =
|
||||||
|
expand_subtree(ctx, template, None, arena, marker);
|
||||||
let nesting_state = ctx.nesting.last_mut().unwrap();
|
let nesting_state = ctx.nesting.last_mut().unwrap();
|
||||||
if nesting_state.at_end || !nesting_state.hit {
|
if nesting_state.at_end || !nesting_state.hit {
|
||||||
break;
|
break;
|
||||||
|
@ -391,7 +438,7 @@ fn expand_repeat<S: Span>(
|
||||||
fn push_fragment<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, fragment: Fragment<S>) {
|
fn push_fragment<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, fragment: Fragment<S>) {
|
||||||
match fragment {
|
match fragment {
|
||||||
Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt),
|
Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt),
|
||||||
Fragment::Expr(tt::TokenTree::Subtree(mut tt)) => {
|
Fragment::Expr(mut tt) => {
|
||||||
if tt.delimiter.kind == tt::DelimiterKind::Invisible {
|
if tt.delimiter.kind == tt::DelimiterKind::Invisible {
|
||||||
tt.delimiter = tt::Delimiter {
|
tt.delimiter = tt::Delimiter {
|
||||||
open: S::DUMMY,
|
open: S::DUMMY,
|
||||||
|
@ -401,8 +448,8 @@ fn push_fragment<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, fragment: Fragment<S>
|
||||||
}
|
}
|
||||||
buf.push(tt.into())
|
buf.push(tt.into())
|
||||||
}
|
}
|
||||||
Fragment::Path(tt::TokenTree::Subtree(tt)) => fix_up_and_push_path_tt(buf, tt),
|
Fragment::Path(tt) => fix_up_and_push_path_tt(buf, tt),
|
||||||
Fragment::Tokens(tt) | Fragment::Expr(tt) | Fragment::Path(tt) => buf.push(tt),
|
Fragment::Tokens(tt) => buf.push(tt),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -72,6 +72,7 @@ impl fmt::Display for ParseError {
|
||||||
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
||||||
pub enum ExpandError {
|
pub enum ExpandError {
|
||||||
BindingError(Box<Box<str>>),
|
BindingError(Box<Box<str>>),
|
||||||
|
UnresolvedBinding(Box<Box<str>>),
|
||||||
LeftoverTokens,
|
LeftoverTokens,
|
||||||
ConversionError,
|
ConversionError,
|
||||||
LimitExceeded,
|
LimitExceeded,
|
||||||
|
@ -94,6 +95,10 @@ impl fmt::Display for ExpandError {
|
||||||
ExpandError::NoMatchingRule => f.write_str("no rule matches input tokens"),
|
ExpandError::NoMatchingRule => f.write_str("no rule matches input tokens"),
|
||||||
ExpandError::UnexpectedToken => f.write_str("unexpected token in input"),
|
ExpandError::UnexpectedToken => f.write_str("unexpected token in input"),
|
||||||
ExpandError::BindingError(e) => f.write_str(e),
|
ExpandError::BindingError(e) => f.write_str(e),
|
||||||
|
ExpandError::UnresolvedBinding(binding) => {
|
||||||
|
f.write_str("could not find binding ")?;
|
||||||
|
f.write_str(binding)
|
||||||
|
}
|
||||||
ExpandError::ConversionError => f.write_str("could not convert tokens"),
|
ExpandError::ConversionError => f.write_str("could not convert tokens"),
|
||||||
ExpandError::LimitExceeded => f.write_str("Expand exceed limit"),
|
ExpandError::LimitExceeded => f.write_str("Expand exceed limit"),
|
||||||
ExpandError::LeftoverTokens => f.write_str("leftover tokens"),
|
ExpandError::LeftoverTokens => f.write_str("leftover tokens"),
|
||||||
|
@ -233,8 +238,12 @@ impl<S: Span> DeclarativeMacro<S> {
|
||||||
self.err.as_deref()
|
self.err.as_deref()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand(&self, tt: &tt::Subtree<S>) -> ExpandResult<tt::Subtree<S>> {
|
pub fn expand(
|
||||||
expander::expand_rules(&self.rules, &tt, self.is_2021)
|
&self,
|
||||||
|
tt: &tt::Subtree<S>,
|
||||||
|
marker: impl Fn(&mut S) + Copy,
|
||||||
|
) -> ExpandResult<tt::Subtree<S>> {
|
||||||
|
expander::expand_rules(&self.rules, &tt, marker, self.is_2021)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -155,10 +155,7 @@ pub fn parse_exprs_with_sep<S: Span>(tt: &tt::Subtree<S>, sep: char) -> Vec<tt::
|
||||||
|
|
||||||
res.push(match expanded.value {
|
res.push(match expanded.value {
|
||||||
None => break,
|
None => break,
|
||||||
Some(tt @ tt::TokenTree::Leaf(_)) => {
|
Some(tt) => tt.subtree_or_wrap(),
|
||||||
tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![tt] }
|
|
||||||
}
|
|
||||||
Some(tt::TokenTree::Subtree(tt)) => tt,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
let mut fork = iter.clone();
|
let mut fork = iter.clone();
|
||||||
|
@ -720,6 +717,7 @@ where
|
||||||
/// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween.
|
/// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween.
|
||||||
/// This occurs when a float literal is used as a field access.
|
/// This occurs when a float literal is used as a field access.
|
||||||
fn float_split(&mut self, has_pseudo_dot: bool) {
|
fn float_split(&mut self, has_pseudo_dot: bool) {
|
||||||
|
// TODO: FIXME this breaks the hygiene map
|
||||||
let (text, _span) = match self.cursor.token_tree() {
|
let (text, _span) = match self.cursor.token_tree() {
|
||||||
Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Literal(lit), _)) => {
|
Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Literal(lit), _)) => {
|
||||||
(lit.text.as_str(), lit.span)
|
(lit.text.as_str(), lit.span)
|
||||||
|
|
|
@ -20,11 +20,12 @@ pub struct TokenMap<S> {
|
||||||
// then a bin search on the ast id
|
// then a bin search on the ast id
|
||||||
pub span_map: Vec<(TextRange, S)>,
|
pub span_map: Vec<(TextRange, S)>,
|
||||||
// span_map2: rustc_hash::FxHashMap<TextRange, usize>,
|
// span_map2: rustc_hash::FxHashMap<TextRange, usize>,
|
||||||
|
pub real_file: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S> Default for TokenMap<S> {
|
impl<S> Default for TokenMap<S> {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self { span_map: Vec::new() }
|
Self { span_map: Vec::new(), real_file: true }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -49,8 +50,21 @@ impl<S: Span> TokenMap<S> {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME: Should be infallible
|
||||||
pub fn span_for_range(&self, range: TextRange) -> Option<S> {
|
pub fn span_for_range(&self, range: TextRange) -> Option<S> {
|
||||||
self.span_map.iter().find_map(|(r, s)| if r == &range { Some(s.clone()) } else { None })
|
// TODO FIXME: make this proper
|
||||||
|
self.span_map
|
||||||
|
.iter()
|
||||||
|
.filter_map(|(r, s)| Some((r, s, r.intersect(range)?)))
|
||||||
|
.max_by_key(|(_, _, intersection)| intersection.len())
|
||||||
|
.map(|(_, &s, _)| s)
|
||||||
|
.or_else(|| {
|
||||||
|
if self.real_file {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
panic!("no span for range {range:?} in {:#?}", self.span_map)
|
||||||
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// pub fn ranges_by_token(
|
// pub fn ranges_by_token(
|
||||||
|
|
|
@ -56,6 +56,7 @@ pub trait SpanAnchor: std::fmt::Debug + Copy + Sized + Eq {
|
||||||
const DUMMY: Self;
|
const DUMMY: Self;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME: Get rid of this trait?
|
||||||
pub trait Span: std::fmt::Debug + Copy + Sized + Eq {
|
pub trait Span: std::fmt::Debug + Copy + Sized + Eq {
|
||||||
const DUMMY: Self;
|
const DUMMY: Self;
|
||||||
}
|
}
|
||||||
|
@ -72,7 +73,16 @@ pub enum TokenTree<S> {
|
||||||
impl_from!(Leaf<S>, Subtree<S> for TokenTree);
|
impl_from!(Leaf<S>, Subtree<S> for TokenTree);
|
||||||
impl<S: Span> TokenTree<S> {
|
impl<S: Span> TokenTree<S> {
|
||||||
pub const fn empty() -> Self {
|
pub const fn empty() -> Self {
|
||||||
Self::Subtree(Subtree { delimiter: Delimiter::unspecified(), token_trees: vec![] })
|
Self::Subtree(Subtree { delimiter: Delimiter::UNSPECIFIED, token_trees: vec![] })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn subtree_or_wrap(self) -> Subtree<S> {
|
||||||
|
match self {
|
||||||
|
TokenTree::Leaf(_) => {
|
||||||
|
Subtree { delimiter: Delimiter::UNSPECIFIED, token_trees: vec![self] }
|
||||||
|
}
|
||||||
|
TokenTree::Subtree(s) => s,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue