mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-30 15:03:42 +00:00
Merge branch 'master' into feature/themes
This commit is contained in:
commit
8eb5148af7
20 changed files with 323 additions and 342 deletions
|
@ -5,13 +5,14 @@
|
|||
|
||||
mod generated;
|
||||
|
||||
use hir::mock::MockDatabase;
|
||||
use ra_db::FileRange;
|
||||
use ra_db::{fixture::WithFixture, FileRange};
|
||||
use test_utils::{assert_eq_text, extract_range_or_offset};
|
||||
|
||||
use crate::test_db::TestDB;
|
||||
|
||||
fn check(assist_id: &str, before: &str, after: &str) {
|
||||
let (selection, before) = extract_range_or_offset(before);
|
||||
let (db, _source_root, file_id) = MockDatabase::with_single_file(&before);
|
||||
let (db, file_id) = TestDB::with_single_file(&before);
|
||||
let frange = FileRange { file_id, range: selection.into() };
|
||||
|
||||
let (_assist_id, action) = crate::assists(&db, frange)
|
||||
|
|
|
@ -9,6 +9,8 @@ mod assist_ctx;
|
|||
mod marks;
|
||||
#[cfg(test)]
|
||||
mod doc_tests;
|
||||
#[cfg(test)]
|
||||
mod test_db;
|
||||
|
||||
use hir::db::HirDatabase;
|
||||
use ra_db::FileRange;
|
||||
|
@ -146,20 +148,19 @@ mod assists {
|
|||
|
||||
#[cfg(test)]
|
||||
mod helpers {
|
||||
use hir::mock::MockDatabase;
|
||||
use ra_db::FileRange;
|
||||
use ra_db::{fixture::WithFixture, FileRange};
|
||||
use ra_syntax::TextRange;
|
||||
use test_utils::{add_cursor, assert_eq_text, extract_offset, extract_range};
|
||||
|
||||
use crate::{Assist, AssistCtx};
|
||||
use crate::{test_db::TestDB, Assist, AssistCtx};
|
||||
|
||||
pub(crate) fn check_assist(
|
||||
assist: fn(AssistCtx<MockDatabase>) -> Option<Assist>,
|
||||
assist: fn(AssistCtx<TestDB>) -> Option<Assist>,
|
||||
before: &str,
|
||||
after: &str,
|
||||
) {
|
||||
let (before_cursor_pos, before) = extract_offset(before);
|
||||
let (db, _source_root, file_id) = MockDatabase::with_single_file(&before);
|
||||
let (db, file_id) = TestDB::with_single_file(&before);
|
||||
let frange =
|
||||
FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) };
|
||||
let assist =
|
||||
|
@ -182,12 +183,12 @@ mod helpers {
|
|||
}
|
||||
|
||||
pub(crate) fn check_assist_range(
|
||||
assist: fn(AssistCtx<MockDatabase>) -> Option<Assist>,
|
||||
assist: fn(AssistCtx<TestDB>) -> Option<Assist>,
|
||||
before: &str,
|
||||
after: &str,
|
||||
) {
|
||||
let (range, before) = extract_range(before);
|
||||
let (db, _source_root, file_id) = MockDatabase::with_single_file(&before);
|
||||
let (db, file_id) = TestDB::with_single_file(&before);
|
||||
let frange = FileRange { file_id, range };
|
||||
let assist =
|
||||
AssistCtx::with_ctx(&db, frange, true, assist).expect("code action is not applicable");
|
||||
|
@ -204,12 +205,12 @@ mod helpers {
|
|||
}
|
||||
|
||||
pub(crate) fn check_assist_target(
|
||||
assist: fn(AssistCtx<MockDatabase>) -> Option<Assist>,
|
||||
assist: fn(AssistCtx<TestDB>) -> Option<Assist>,
|
||||
before: &str,
|
||||
target: &str,
|
||||
) {
|
||||
let (before_cursor_pos, before) = extract_offset(before);
|
||||
let (db, _source_root, file_id) = MockDatabase::with_single_file(&before);
|
||||
let (db, file_id) = TestDB::with_single_file(&before);
|
||||
let frange =
|
||||
FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) };
|
||||
let assist =
|
||||
|
@ -224,12 +225,12 @@ mod helpers {
|
|||
}
|
||||
|
||||
pub(crate) fn check_assist_range_target(
|
||||
assist: fn(AssistCtx<MockDatabase>) -> Option<Assist>,
|
||||
assist: fn(AssistCtx<TestDB>) -> Option<Assist>,
|
||||
before: &str,
|
||||
target: &str,
|
||||
) {
|
||||
let (range, before) = extract_range(before);
|
||||
let (db, _source_root, file_id) = MockDatabase::with_single_file(&before);
|
||||
let (db, file_id) = TestDB::with_single_file(&before);
|
||||
let frange = FileRange { file_id, range };
|
||||
let assist =
|
||||
AssistCtx::with_ctx(&db, frange, true, assist).expect("code action is not applicable");
|
||||
|
@ -243,11 +244,11 @@ mod helpers {
|
|||
}
|
||||
|
||||
pub(crate) fn check_assist_not_applicable(
|
||||
assist: fn(AssistCtx<MockDatabase>) -> Option<Assist>,
|
||||
assist: fn(AssistCtx<TestDB>) -> Option<Assist>,
|
||||
before: &str,
|
||||
) {
|
||||
let (before_cursor_pos, before) = extract_offset(before);
|
||||
let (db, _source_root, file_id) = MockDatabase::with_single_file(&before);
|
||||
let (db, file_id) = TestDB::with_single_file(&before);
|
||||
let frange =
|
||||
FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) };
|
||||
let assist = AssistCtx::with_ctx(&db, frange, true, assist);
|
||||
|
@ -255,11 +256,11 @@ mod helpers {
|
|||
}
|
||||
|
||||
pub(crate) fn check_assist_range_not_applicable(
|
||||
assist: fn(AssistCtx<MockDatabase>) -> Option<Assist>,
|
||||
assist: fn(AssistCtx<TestDB>) -> Option<Assist>,
|
||||
before: &str,
|
||||
) {
|
||||
let (range, before) = extract_range(before);
|
||||
let (db, _source_root, file_id) = MockDatabase::with_single_file(&before);
|
||||
let (db, file_id) = TestDB::with_single_file(&before);
|
||||
let frange = FileRange { file_id, range };
|
||||
let assist = AssistCtx::with_ctx(&db, frange, true, assist);
|
||||
assert!(assist.is_none());
|
||||
|
@ -268,16 +269,17 @@ mod helpers {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use hir::mock::MockDatabase;
|
||||
use ra_db::FileRange;
|
||||
use ra_db::{fixture::WithFixture, FileRange};
|
||||
use ra_syntax::TextRange;
|
||||
use test_utils::{extract_offset, extract_range};
|
||||
|
||||
use crate::test_db::TestDB;
|
||||
|
||||
#[test]
|
||||
fn assist_order_field_struct() {
|
||||
let before = "struct Foo { <|>bar: u32 }";
|
||||
let (before_cursor_pos, before) = extract_offset(before);
|
||||
let (db, _source_root, file_id) = MockDatabase::with_single_file(&before);
|
||||
let (db, file_id) = TestDB::with_single_file(&before);
|
||||
let frange =
|
||||
FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) };
|
||||
let assists = super::assists(&db, frange);
|
||||
|
@ -298,7 +300,7 @@ mod tests {
|
|||
}
|
||||
}";
|
||||
let (range, before) = extract_range(before);
|
||||
let (db, _source_root, file_id) = MockDatabase::with_single_file(&before);
|
||||
let (db, file_id) = TestDB::with_single_file(&before);
|
||||
let frange = FileRange { file_id, range };
|
||||
let assists = super::assists(&db, frange);
|
||||
let mut assists = assists.iter();
|
||||
|
|
45
crates/ra_assists/src/test_db.rs
Normal file
45
crates/ra_assists/src/test_db.rs
Normal file
|
@ -0,0 +1,45 @@
|
|||
//! Database used for testing `ra_assists`.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use ra_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, RelativePath};
|
||||
|
||||
#[salsa::database(
|
||||
ra_db::SourceDatabaseExtStorage,
|
||||
ra_db::SourceDatabaseStorage,
|
||||
hir::db::InternDatabaseStorage,
|
||||
hir::db::AstDatabaseStorage,
|
||||
hir::db::DefDatabaseStorage,
|
||||
hir::db::DefDatabase2Storage,
|
||||
hir::db::HirDatabaseStorage
|
||||
)]
|
||||
#[derive(Debug, Default)]
|
||||
pub struct TestDB {
|
||||
runtime: salsa::Runtime<TestDB>,
|
||||
}
|
||||
|
||||
impl salsa::Database for TestDB {
|
||||
fn salsa_runtime(&self) -> &salsa::Runtime<Self> {
|
||||
&self.runtime
|
||||
}
|
||||
}
|
||||
|
||||
impl std::panic::RefUnwindSafe for TestDB {}
|
||||
|
||||
impl FileLoader for TestDB {
|
||||
fn file_text(&self, file_id: FileId) -> Arc<String> {
|
||||
FileLoaderDelegate(self).file_text(file_id)
|
||||
}
|
||||
fn resolve_relative_path(
|
||||
&self,
|
||||
anchor: FileId,
|
||||
relative_path: &RelativePath,
|
||||
) -> Option<FileId> {
|
||||
FileLoaderDelegate(self).resolve_relative_path(anchor, relative_path)
|
||||
}
|
||||
fn relevant_crates(&self, file_id: FileId) -> Arc<Vec<CrateId>> {
|
||||
FileLoaderDelegate(self).relevant_crates(file_id)
|
||||
}
|
||||
}
|
||||
|
||||
impl hir::debug::HirDebugHelper for TestDB {}
|
|
@ -30,8 +30,8 @@ use crate::{
|
|||
impl_block::ImplBlock,
|
||||
resolve::{Resolver, Scope, TypeNs},
|
||||
traits::TraitData,
|
||||
ty::{InferenceResult, TraitRef},
|
||||
Either, HasSource, Name, ScopeDef, Ty, {ImportId, Namespace},
|
||||
ty::{InferenceResult, Namespace, TraitRef},
|
||||
Either, HasSource, ImportId, Name, ScopeDef, Ty,
|
||||
};
|
||||
|
||||
/// hir::Crate describes a single crate. It's the main interface with which
|
||||
|
|
|
@ -14,10 +14,10 @@ use crate::{
|
|||
traits::TraitData,
|
||||
ty::{
|
||||
method_resolution::CrateImplBlocks, traits::Impl, CallableDef, FnSig, GenericPredicate,
|
||||
InferenceResult, Substs, Ty, TypableDef, TypeCtor,
|
||||
InferenceResult, Namespace, Substs, Ty, TypableDef, TypeCtor,
|
||||
},
|
||||
type_alias::TypeAliasData,
|
||||
Const, ConstData, Crate, DefWithBody, ExprScopes, FnData, Function, Module, Namespace, Static,
|
||||
Const, ConstData, Crate, DefWithBody, ExprScopes, FnData, Function, Module, Static,
|
||||
StructField, Trait, TypeAlias,
|
||||
};
|
||||
|
||||
|
|
|
@ -174,11 +174,11 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ra_db::SourceDatabase;
|
||||
use ra_db::{fixture::WithFixture, SourceDatabase};
|
||||
use ra_syntax::{algo::find_node_at_offset, ast, AstNode};
|
||||
use test_utils::{assert_eq_text, extract_offset};
|
||||
|
||||
use crate::{mock::MockDatabase, source_binder::SourceAnalyzer};
|
||||
use crate::{source_binder::SourceAnalyzer, test_db::TestDB};
|
||||
|
||||
fn do_check(code: &str, expected: &[&str]) {
|
||||
let (off, code) = extract_offset(code);
|
||||
|
@ -191,7 +191,7 @@ mod tests {
|
|||
buf
|
||||
};
|
||||
|
||||
let (db, _source_root, file_id) = MockDatabase::with_single_file(&code);
|
||||
let (db, file_id) = TestDB::with_single_file(&code);
|
||||
let file = db.parse(file_id).ok().unwrap();
|
||||
let marker: ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap();
|
||||
let analyzer = SourceAnalyzer::new(&db, file_id, marker.syntax(), None);
|
||||
|
@ -288,7 +288,7 @@ mod tests {
|
|||
fn do_check_local_name(code: &str, expected_offset: u32) {
|
||||
let (off, code) = extract_offset(code);
|
||||
|
||||
let (db, _source_root, file_id) = MockDatabase::with_single_file(&code);
|
||||
let (db, file_id) = TestDB::with_single_file(&code);
|
||||
let file = db.parse(file_id).ok().unwrap();
|
||||
let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into())
|
||||
.expect("failed to find a name at the target offset");
|
||||
|
|
|
@ -29,8 +29,6 @@ macro_rules! impl_froms {
|
|||
pub mod debug;
|
||||
|
||||
pub mod db;
|
||||
#[macro_use]
|
||||
pub mod mock;
|
||||
pub mod source_binder;
|
||||
|
||||
mod ids;
|
||||
|
@ -51,6 +49,8 @@ mod code_model;
|
|||
|
||||
pub mod from_source;
|
||||
|
||||
#[cfg(test)]
|
||||
mod test_db;
|
||||
#[cfg(test)]
|
||||
mod marks;
|
||||
|
||||
|
@ -81,10 +81,7 @@ pub use crate::{
|
|||
|
||||
pub use hir_def::{
|
||||
builtin_type::BuiltinType,
|
||||
nameres::{
|
||||
per_ns::{Namespace, PerNs},
|
||||
raw::ImportId,
|
||||
},
|
||||
nameres::{per_ns::PerNs, raw::ImportId},
|
||||
path::{Path, PathKind},
|
||||
type_ref::Mutability,
|
||||
};
|
||||
|
|
|
@ -1,262 +0,0 @@
|
|||
//! FIXME: write short doc here
|
||||
|
||||
use std::{panic, sync::Arc};
|
||||
|
||||
use hir_expand::diagnostics::DiagnosticSink;
|
||||
use parking_lot::Mutex;
|
||||
use ra_cfg::CfgOptions;
|
||||
use ra_db::{
|
||||
salsa, CrateGraph, CrateId, Edition, FileId, FileLoader, FileLoaderDelegate, FilePosition,
|
||||
RelativePath, RelativePathBuf, SourceDatabase, SourceDatabaseExt, SourceRoot, SourceRootId,
|
||||
};
|
||||
use rustc_hash::FxHashMap;
|
||||
use test_utils::{extract_offset, parse_fixture, CURSOR_MARKER};
|
||||
|
||||
use crate::{db, debug::HirDebugHelper};
|
||||
|
||||
pub const WORKSPACE: SourceRootId = SourceRootId(0);
|
||||
|
||||
#[salsa::database(
|
||||
ra_db::SourceDatabaseExtStorage,
|
||||
ra_db::SourceDatabaseStorage,
|
||||
db::InternDatabaseStorage,
|
||||
db::AstDatabaseStorage,
|
||||
db::DefDatabaseStorage,
|
||||
db::DefDatabase2Storage,
|
||||
db::HirDatabaseStorage
|
||||
)]
|
||||
#[derive(Debug)]
|
||||
pub struct MockDatabase {
|
||||
events: Mutex<Option<Vec<salsa::Event<MockDatabase>>>>,
|
||||
runtime: salsa::Runtime<MockDatabase>,
|
||||
files: FxHashMap<String, FileId>,
|
||||
crate_names: Arc<FxHashMap<CrateId, String>>,
|
||||
file_paths: Arc<FxHashMap<FileId, String>>,
|
||||
}
|
||||
|
||||
impl panic::RefUnwindSafe for MockDatabase {}
|
||||
|
||||
impl FileLoader for MockDatabase {
|
||||
fn file_text(&self, file_id: FileId) -> Arc<String> {
|
||||
FileLoaderDelegate(self).file_text(file_id)
|
||||
}
|
||||
fn resolve_relative_path(
|
||||
&self,
|
||||
anchor: FileId,
|
||||
relative_path: &RelativePath,
|
||||
) -> Option<FileId> {
|
||||
FileLoaderDelegate(self).resolve_relative_path(anchor, relative_path)
|
||||
}
|
||||
fn relevant_crates(&self, file_id: FileId) -> Arc<Vec<CrateId>> {
|
||||
FileLoaderDelegate(self).relevant_crates(file_id)
|
||||
}
|
||||
}
|
||||
|
||||
impl HirDebugHelper for MockDatabase {
|
||||
fn crate_name(&self, krate: CrateId) -> Option<String> {
|
||||
self.crate_names.get(&krate).cloned()
|
||||
}
|
||||
|
||||
fn file_path(&self, file_id: FileId) -> Option<String> {
|
||||
self.file_paths.get(&file_id).cloned()
|
||||
}
|
||||
}
|
||||
|
||||
impl MockDatabase {
|
||||
pub fn with_files(fixture: &str) -> MockDatabase {
|
||||
let (db, position) = MockDatabase::from_fixture(fixture);
|
||||
assert!(position.is_none());
|
||||
db
|
||||
}
|
||||
|
||||
pub fn with_single_file(text: &str) -> (MockDatabase, SourceRoot, FileId) {
|
||||
let mut db = MockDatabase::default();
|
||||
let mut source_root = SourceRoot::default();
|
||||
let file_id = db.add_file(WORKSPACE, "/", &mut source_root, "/main.rs", text);
|
||||
db.set_source_root(WORKSPACE, Arc::new(source_root.clone()));
|
||||
(db, source_root, file_id)
|
||||
}
|
||||
|
||||
pub fn file_id_of(&self, path: &str) -> FileId {
|
||||
match self.files.get(path) {
|
||||
Some(it) => *it,
|
||||
None => panic!("unknown file: {:?}\nexisting files:\n{:#?}", path, self.files),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn diagnostics(&self) -> String {
|
||||
let mut buf = String::new();
|
||||
let mut files: Vec<FileId> = self.files.values().copied().collect();
|
||||
files.sort();
|
||||
for file in files {
|
||||
let src = crate::Source {
|
||||
file_id: file.into(),
|
||||
ast: crate::ModuleSource::new(self, Some(file), None),
|
||||
};
|
||||
let module = crate::Module::from_definition(self, src).unwrap();
|
||||
module.diagnostics(
|
||||
self,
|
||||
&mut DiagnosticSink::new(|d| {
|
||||
buf += &format!("{:?}: {}\n", d.syntax_node(self).text(), d.message());
|
||||
}),
|
||||
)
|
||||
}
|
||||
buf
|
||||
}
|
||||
|
||||
fn from_fixture(fixture: &str) -> (MockDatabase, Option<FilePosition>) {
|
||||
let mut db = MockDatabase::default();
|
||||
|
||||
let pos = db.add_fixture(fixture);
|
||||
|
||||
(db, pos)
|
||||
}
|
||||
|
||||
fn add_fixture(&mut self, fixture: &str) -> Option<FilePosition> {
|
||||
let mut position = None;
|
||||
let mut source_root = SourceRoot::default();
|
||||
let mut source_root_id = WORKSPACE;
|
||||
let mut source_root_prefix = "/".to_string();
|
||||
for entry in parse_fixture(fixture) {
|
||||
if entry.meta.starts_with("root") {
|
||||
self.set_source_root(source_root_id, Arc::new(source_root));
|
||||
source_root = SourceRoot::default();
|
||||
|
||||
source_root_id = SourceRootId(source_root_id.0 + 1);
|
||||
source_root_prefix = entry.meta["root".len()..].trim().to_string();
|
||||
continue;
|
||||
}
|
||||
if entry.text.contains(CURSOR_MARKER) {
|
||||
assert!(position.is_none(), "only one marker (<|>) per fixture is allowed");
|
||||
position = Some(self.add_file_with_position(
|
||||
source_root_id,
|
||||
&source_root_prefix,
|
||||
&mut source_root,
|
||||
&entry.meta,
|
||||
&entry.text,
|
||||
));
|
||||
} else {
|
||||
self.add_file(
|
||||
source_root_id,
|
||||
&source_root_prefix,
|
||||
&mut source_root,
|
||||
&entry.meta,
|
||||
&entry.text,
|
||||
);
|
||||
}
|
||||
}
|
||||
self.set_source_root(source_root_id, Arc::new(source_root));
|
||||
position
|
||||
}
|
||||
|
||||
fn add_file(
|
||||
&mut self,
|
||||
source_root_id: SourceRootId,
|
||||
source_root_prefix: &str,
|
||||
source_root: &mut SourceRoot,
|
||||
path: &str,
|
||||
text: &str,
|
||||
) -> FileId {
|
||||
assert!(source_root_prefix.starts_with('/'));
|
||||
assert!(source_root_prefix.ends_with('/'));
|
||||
assert!(path.starts_with(source_root_prefix));
|
||||
let rel_path = RelativePathBuf::from_path(&path[source_root_prefix.len()..]).unwrap();
|
||||
|
||||
let is_crate_root = rel_path == "lib.rs" || rel_path == "/main.rs";
|
||||
|
||||
let file_id = FileId(self.files.len() as u32);
|
||||
|
||||
let prev = self.files.insert(path.to_string(), file_id);
|
||||
assert!(prev.is_none(), "duplicate files in the text fixture");
|
||||
Arc::make_mut(&mut self.file_paths).insert(file_id, path.to_string());
|
||||
|
||||
let text = Arc::new(text.to_string());
|
||||
self.set_file_text(file_id, text);
|
||||
self.set_file_relative_path(file_id, rel_path.clone());
|
||||
self.set_file_source_root(file_id, source_root_id);
|
||||
source_root.insert_file(rel_path, file_id);
|
||||
|
||||
if is_crate_root {
|
||||
let mut crate_graph = CrateGraph::default();
|
||||
crate_graph.add_crate_root(file_id, Edition::Edition2018, CfgOptions::default());
|
||||
self.set_crate_graph(Arc::new(crate_graph));
|
||||
}
|
||||
file_id
|
||||
}
|
||||
|
||||
fn add_file_with_position(
|
||||
&mut self,
|
||||
source_root_id: SourceRootId,
|
||||
source_root_prefix: &str,
|
||||
source_root: &mut SourceRoot,
|
||||
path: &str,
|
||||
text: &str,
|
||||
) -> FilePosition {
|
||||
let (offset, text) = extract_offset(text);
|
||||
let file_id = self.add_file(source_root_id, source_root_prefix, source_root, path, &text);
|
||||
FilePosition { file_id, offset }
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::Database for MockDatabase {
|
||||
fn salsa_runtime(&self) -> &salsa::Runtime<MockDatabase> {
|
||||
&self.runtime
|
||||
}
|
||||
|
||||
fn salsa_event(&self, event: impl Fn() -> salsa::Event<MockDatabase>) {
|
||||
let mut events = self.events.lock();
|
||||
if let Some(events) = &mut *events {
|
||||
events.push(event());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for MockDatabase {
|
||||
fn default() -> MockDatabase {
|
||||
let mut db = MockDatabase {
|
||||
events: Default::default(),
|
||||
runtime: salsa::Runtime::default(),
|
||||
files: FxHashMap::default(),
|
||||
crate_names: Default::default(),
|
||||
file_paths: Default::default(),
|
||||
};
|
||||
db.set_crate_graph(Default::default());
|
||||
db
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::ParallelDatabase for MockDatabase {
|
||||
fn snapshot(&self) -> salsa::Snapshot<MockDatabase> {
|
||||
salsa::Snapshot::new(MockDatabase {
|
||||
events: Default::default(),
|
||||
runtime: self.runtime.snapshot(self),
|
||||
// only the root database can be used to get file_id by path.
|
||||
files: FxHashMap::default(),
|
||||
file_paths: Arc::clone(&self.file_paths),
|
||||
crate_names: Arc::clone(&self.crate_names),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl MockDatabase {
|
||||
pub fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event<MockDatabase>> {
|
||||
*self.events.lock() = Some(Vec::new());
|
||||
f();
|
||||
self.events.lock().take().unwrap()
|
||||
}
|
||||
|
||||
pub fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
|
||||
let events = self.log(f);
|
||||
events
|
||||
.into_iter()
|
||||
.filter_map(|e| match e.kind {
|
||||
// This pretty horrible, but `Debug` is the only way to inspect
|
||||
// QueryDescriptor at the moment.
|
||||
salsa::EventKind::WillExecute { database_key } => {
|
||||
Some(format!("{:?}", database_key))
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
120
crates/ra_hir/src/test_db.rs
Normal file
120
crates/ra_hir/src/test_db.rs
Normal file
|
@ -0,0 +1,120 @@
|
|||
//! Database used for testing `hir`.
|
||||
|
||||
use std::{panic, sync::Arc};
|
||||
|
||||
use hir_def::{db::DefDatabase2, ModuleId};
|
||||
use hir_expand::diagnostics::DiagnosticSink;
|
||||
use parking_lot::Mutex;
|
||||
use ra_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, RelativePath, SourceDatabase};
|
||||
|
||||
use crate::{db, debug::HirDebugHelper};
|
||||
|
||||
#[salsa::database(
|
||||
ra_db::SourceDatabaseExtStorage,
|
||||
ra_db::SourceDatabaseStorage,
|
||||
db::InternDatabaseStorage,
|
||||
db::AstDatabaseStorage,
|
||||
db::DefDatabaseStorage,
|
||||
db::DefDatabase2Storage,
|
||||
db::HirDatabaseStorage
|
||||
)]
|
||||
#[derive(Debug, Default)]
|
||||
pub struct TestDB {
|
||||
events: Mutex<Option<Vec<salsa::Event<TestDB>>>>,
|
||||
runtime: salsa::Runtime<TestDB>,
|
||||
}
|
||||
|
||||
impl salsa::Database for TestDB {
|
||||
fn salsa_runtime(&self) -> &salsa::Runtime<TestDB> {
|
||||
&self.runtime
|
||||
}
|
||||
|
||||
fn salsa_event(&self, event: impl Fn() -> salsa::Event<TestDB>) {
|
||||
let mut events = self.events.lock();
|
||||
if let Some(events) = &mut *events {
|
||||
events.push(event());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::ParallelDatabase for TestDB {
|
||||
fn snapshot(&self) -> salsa::Snapshot<TestDB> {
|
||||
salsa::Snapshot::new(TestDB {
|
||||
events: Default::default(),
|
||||
runtime: self.runtime.snapshot(self),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl panic::RefUnwindSafe for TestDB {}
|
||||
|
||||
impl FileLoader for TestDB {
|
||||
fn file_text(&self, file_id: FileId) -> Arc<String> {
|
||||
FileLoaderDelegate(self).file_text(file_id)
|
||||
}
|
||||
fn resolve_relative_path(
|
||||
&self,
|
||||
anchor: FileId,
|
||||
relative_path: &RelativePath,
|
||||
) -> Option<FileId> {
|
||||
FileLoaderDelegate(self).resolve_relative_path(anchor, relative_path)
|
||||
}
|
||||
fn relevant_crates(&self, file_id: FileId) -> Arc<Vec<CrateId>> {
|
||||
FileLoaderDelegate(self).relevant_crates(file_id)
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: improve `WithFixture` to bring useful hir debugging back
|
||||
impl HirDebugHelper for TestDB {
|
||||
fn crate_name(&self, _krate: CrateId) -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
fn file_path(&self, _file_id: FileId) -> Option<String> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl TestDB {
|
||||
pub fn diagnostics(&self) -> String {
|
||||
let mut buf = String::new();
|
||||
let crate_graph = self.crate_graph();
|
||||
for krate in crate_graph.iter().next() {
|
||||
let crate_def_map = self.crate_def_map(krate);
|
||||
for (module_id, _) in crate_def_map.modules.iter() {
|
||||
let module_id = ModuleId { krate, module_id };
|
||||
let module = crate::Module::from(module_id);
|
||||
module.diagnostics(
|
||||
self,
|
||||
&mut DiagnosticSink::new(|d| {
|
||||
buf += &format!("{:?}: {}\n", d.syntax_node(self).text(), d.message());
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
buf
|
||||
}
|
||||
}
|
||||
|
||||
impl TestDB {
|
||||
pub fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event<TestDB>> {
|
||||
*self.events.lock() = Some(Vec::new());
|
||||
f();
|
||||
self.events.lock().take().unwrap()
|
||||
}
|
||||
|
||||
pub fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
|
||||
let events = self.log(f);
|
||||
events
|
||||
.into_iter()
|
||||
.filter_map(|e| match e.kind {
|
||||
// This pretty horrible, but `Debug` is the only way to inspect
|
||||
// QueryDescriptor at the moment.
|
||||
salsa::EventKind::WillExecute { database_key } => {
|
||||
Some(format!("{:?}", database_key))
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
|
@ -27,7 +27,7 @@ pub(crate) use infer::{infer_query, InferTy, InferenceResult};
|
|||
pub use lower::CallableDef;
|
||||
pub(crate) use lower::{
|
||||
callable_item_sig, generic_defaults_query, generic_predicates_for_param_query,
|
||||
generic_predicates_query, type_for_def, type_for_field, TypableDef,
|
||||
generic_predicates_query, type_for_def, type_for_field, Namespace, TypableDef,
|
||||
};
|
||||
pub(crate) use traits::{InEnvironment, Obligation, ProjectionPredicate, TraitEnvironment};
|
||||
|
||||
|
|
|
@ -12,10 +12,10 @@ use crate::{
|
|||
expr::{self, Array, BinaryOp, Expr, ExprId, Literal, Statement, UnaryOp},
|
||||
generics::{GenericParams, HasGenericParams},
|
||||
ty::{
|
||||
autoderef, method_resolution, op, primitive, CallableDef, InferTy, Mutability, Obligation,
|
||||
ProjectionPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor, TypeWalk,
|
||||
autoderef, method_resolution, op, primitive, CallableDef, InferTy, Mutability, Namespace,
|
||||
Obligation, ProjectionPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor, TypeWalk,
|
||||
},
|
||||
Adt, Name, Namespace,
|
||||
Adt, Name,
|
||||
};
|
||||
|
||||
impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||
|
|
|
@ -6,8 +6,8 @@ use super::{ExprOrPatId, InferenceContext, TraitRef};
|
|||
use crate::{
|
||||
db::HirDatabase,
|
||||
resolve::{ResolveValueResult, Resolver, TypeNs, ValueNs},
|
||||
ty::{method_resolution, Substs, Ty, TypableDef, TypeWalk},
|
||||
AssocItem, Container, HasGenericParams, Name, Namespace, Path,
|
||||
ty::{method_resolution, Namespace, Substs, Ty, TypableDef, TypeWalk},
|
||||
AssocItem, Container, HasGenericParams, Name, Path,
|
||||
};
|
||||
|
||||
impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||
|
|
|
@ -29,10 +29,21 @@ use crate::{
|
|||
Adt,
|
||||
},
|
||||
util::make_mut_slice,
|
||||
Const, Enum, EnumVariant, Function, ModuleDef, Namespace, Path, Static, Struct, StructField,
|
||||
Trait, TypeAlias, Union,
|
||||
Const, Enum, EnumVariant, Function, ModuleDef, Path, Static, Struct, StructField, Trait,
|
||||
TypeAlias, Union,
|
||||
};
|
||||
|
||||
// FIXME: this is only really used in `type_for_def`, which contains a bunch of
|
||||
// impossible cases. Perhaps we should recombine `TypeableDef` and `Namespace`
|
||||
// into a `AsTypeDef`, `AsValueDef` enums?
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub enum Namespace {
|
||||
Types,
|
||||
Values,
|
||||
// Note that only type inference uses this enum, and it doesn't care about macros.
|
||||
// Macro,
|
||||
}
|
||||
|
||||
impl Ty {
|
||||
pub(crate) fn from_hir(db: &impl HirDatabase, resolver: &Resolver, type_ref: &TypeRef) -> Self {
|
||||
match type_ref {
|
||||
|
|
|
@ -11,7 +11,7 @@ use ra_syntax::{
|
|||
use test_utils::covers;
|
||||
|
||||
use crate::{
|
||||
expr::BodySourceMap, mock::MockDatabase, ty::display::HirDisplay, ty::InferenceResult,
|
||||
expr::BodySourceMap, test_db::TestDB, ty::display::HirDisplay, ty::InferenceResult,
|
||||
SourceAnalyzer,
|
||||
};
|
||||
|
||||
|
@ -24,7 +24,7 @@ mod coercion;
|
|||
|
||||
#[test]
|
||||
fn cfg_impl_block() {
|
||||
let (db, pos) = MockDatabase::with_position(
|
||||
let (db, pos) = TestDB::with_position(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:foo cfg:test
|
||||
use foo::S as T;
|
||||
|
@ -64,7 +64,7 @@ impl S {
|
|||
|
||||
#[test]
|
||||
fn infer_await() {
|
||||
let (db, pos) = MockDatabase::with_position(
|
||||
let (db, pos) = TestDB::with_position(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:std
|
||||
|
||||
|
@ -95,7 +95,7 @@ mod future {
|
|||
|
||||
#[test]
|
||||
fn infer_box() {
|
||||
let (db, pos) = MockDatabase::with_position(
|
||||
let (db, pos) = TestDB::with_position(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:std
|
||||
|
||||
|
@ -122,7 +122,7 @@ mod boxed {
|
|||
|
||||
#[test]
|
||||
fn infer_adt_self() {
|
||||
let (db, pos) = MockDatabase::with_position(
|
||||
let (db, pos) = TestDB::with_position(
|
||||
r#"
|
||||
//- /main.rs
|
||||
enum Nat { Succ(Self), Demo(Nat), Zero }
|
||||
|
@ -141,7 +141,7 @@ fn test() {
|
|||
|
||||
#[test]
|
||||
fn infer_try() {
|
||||
let (db, pos) = MockDatabase::with_position(
|
||||
let (db, pos) = TestDB::with_position(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:std
|
||||
|
||||
|
@ -181,7 +181,7 @@ mod result {
|
|||
|
||||
#[test]
|
||||
fn infer_for_loop() {
|
||||
let (db, pos) = MockDatabase::with_position(
|
||||
let (db, pos) = TestDB::with_position(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:std
|
||||
|
||||
|
@ -223,7 +223,7 @@ mod collections {
|
|||
#[test]
|
||||
fn infer_while_let() {
|
||||
covers!(infer_while_let);
|
||||
let (db, pos) = MockDatabase::with_position(
|
||||
let (db, pos) = TestDB::with_position(
|
||||
r#"
|
||||
//- /main.rs
|
||||
enum Option<T> { Some(T), None }
|
||||
|
@ -2484,7 +2484,7 @@ pub fn main_loop() {
|
|||
|
||||
#[test]
|
||||
fn cross_crate_associated_method_call() {
|
||||
let (db, pos) = MockDatabase::with_position(
|
||||
let (db, pos) = TestDB::with_position(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:other_crate
|
||||
fn test() {
|
||||
|
@ -3378,7 +3378,7 @@ fn test() { S.foo()<|>; }
|
|||
|
||||
#[test]
|
||||
fn infer_macro_with_dollar_crate_is_correct_in_expr() {
|
||||
let (db, pos) = MockDatabase::with_position(
|
||||
let (db, pos) = TestDB::with_position(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:foo
|
||||
fn test() {
|
||||
|
@ -3482,7 +3482,7 @@ fn test() { (&S).foo()<|>; }
|
|||
|
||||
#[test]
|
||||
fn method_resolution_trait_from_prelude() {
|
||||
let (db, pos) = MockDatabase::with_position(
|
||||
let (db, pos) = TestDB::with_position(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:other_crate
|
||||
struct S;
|
||||
|
@ -4651,7 +4651,7 @@ fn test<T, U>() where T: Trait<U::Item>, U: Trait<T::Item> {
|
|||
assert_eq!(t, "{unknown}");
|
||||
}
|
||||
|
||||
fn type_at_pos(db: &MockDatabase, pos: FilePosition) -> String {
|
||||
fn type_at_pos(db: &TestDB, pos: FilePosition) -> String {
|
||||
let file = db.parse(pos.file_id).ok().unwrap();
|
||||
let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap();
|
||||
let analyzer = SourceAnalyzer::new(db, pos.file_id, expr.syntax(), Some(pos.offset));
|
||||
|
@ -4660,12 +4660,12 @@ fn type_at_pos(db: &MockDatabase, pos: FilePosition) -> String {
|
|||
}
|
||||
|
||||
fn type_at(content: &str) -> String {
|
||||
let (db, file_pos) = MockDatabase::with_position(content);
|
||||
let (db, file_pos) = TestDB::with_position(content);
|
||||
type_at_pos(&db, file_pos)
|
||||
}
|
||||
|
||||
fn infer(content: &str) -> String {
|
||||
let (db, _, file_id) = MockDatabase::with_single_file(content);
|
||||
let (db, file_id) = TestDB::with_single_file(content);
|
||||
let source_file = db.parse(file_id).ok().unwrap();
|
||||
|
||||
let mut acc = String::new();
|
||||
|
@ -4748,7 +4748,7 @@ fn ellipsize(mut text: String, max_len: usize) -> String {
|
|||
|
||||
#[test]
|
||||
fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
|
||||
let (mut db, pos) = MockDatabase::with_position(
|
||||
let (mut db, pos) = TestDB::with_position(
|
||||
"
|
||||
//- /lib.rs
|
||||
fn foo() -> i32 {
|
||||
|
@ -4788,7 +4788,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
|
|||
|
||||
#[test]
|
||||
fn no_such_field_diagnostics() {
|
||||
let diagnostics = MockDatabase::with_files(
|
||||
let diagnostics = TestDB::with_files(
|
||||
r"
|
||||
//- /lib.rs
|
||||
struct S { foo: i32, bar: () }
|
||||
|
|
|
@ -18,7 +18,10 @@ use crate::{
|
|||
db::HirDatabase,
|
||||
generics::GenericDef,
|
||||
ty::display::HirDisplay,
|
||||
ty::{ApplicationTy, GenericPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor, TypeWalk},
|
||||
ty::{
|
||||
ApplicationTy, GenericPredicate, Namespace, ProjectionTy, Substs, TraitRef, Ty, TypeCtor,
|
||||
TypeWalk,
|
||||
},
|
||||
AssocItem, Crate, HasGenericParams, ImplBlock, Trait, TypeAlias,
|
||||
};
|
||||
|
||||
|
@ -652,7 +655,7 @@ fn impl_block_datum(
|
|||
})
|
||||
.filter_map(|t| {
|
||||
let assoc_ty = trait_.associated_type_by_name(db, &t.name(db))?;
|
||||
let ty = db.type_for_def(t.into(), crate::Namespace::Types).subst(&bound_vars);
|
||||
let ty = db.type_for_def(t.into(), Namespace::Types).subst(&bound_vars);
|
||||
Some(chalk_rust_ir::AssociatedTyValue {
|
||||
impl_id,
|
||||
associated_ty_id: assoc_ty.to_chalk(db),
|
||||
|
|
|
@ -50,8 +50,8 @@
|
|||
// FIXME: review privacy of submodules
|
||||
pub mod raw;
|
||||
pub mod per_ns;
|
||||
pub mod collector;
|
||||
pub mod mod_resolution;
|
||||
mod collector;
|
||||
mod mod_resolution;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
@ -196,7 +196,7 @@ pub struct Resolution {
|
|||
}
|
||||
|
||||
impl Resolution {
|
||||
pub(crate) fn from_macro(macro_: MacroDefId) -> Self {
|
||||
fn from_macro(macro_: MacroDefId) -> Self {
|
||||
Resolution { def: PerNs::macros(macro_), import: None }
|
||||
}
|
||||
}
|
||||
|
@ -460,7 +460,7 @@ impl CrateDefMap {
|
|||
from_crate_root.or(from_extern_prelude)
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_name_in_module(
|
||||
fn resolve_name_in_module(
|
||||
&self,
|
||||
db: &impl DefDatabase2,
|
||||
module: CrateModuleId,
|
||||
|
|
|
@ -6,7 +6,7 @@ use ra_syntax::SmolStr;
|
|||
use crate::{db::DefDatabase2, HirFileId};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ModDir {
|
||||
pub(super) struct ModDir {
|
||||
/// `.` for `mod.rs`, `lib.rs`
|
||||
/// `./foo` for `foo.rs`
|
||||
/// `./foo/bar` for `mod bar { mod x; }` nested in `foo.rs`
|
||||
|
@ -16,11 +16,15 @@ pub struct ModDir {
|
|||
}
|
||||
|
||||
impl ModDir {
|
||||
pub fn root() -> ModDir {
|
||||
pub(super) fn root() -> ModDir {
|
||||
ModDir { path: RelativePathBuf::default(), root_non_dir_owner: false }
|
||||
}
|
||||
|
||||
pub fn descend_into_definition(&self, name: &Name, attr_path: Option<&SmolStr>) -> ModDir {
|
||||
pub(super) fn descend_into_definition(
|
||||
&self,
|
||||
name: &Name,
|
||||
attr_path: Option<&SmolStr>,
|
||||
) -> ModDir {
|
||||
let mut path = self.path.clone();
|
||||
match attr_to_path(attr_path) {
|
||||
None => path.push(&name.to_string()),
|
||||
|
@ -34,7 +38,7 @@ impl ModDir {
|
|||
ModDir { path, root_non_dir_owner: false }
|
||||
}
|
||||
|
||||
pub fn resolve_declaration(
|
||||
pub(super) fn resolve_declaration(
|
||||
&self,
|
||||
db: &impl DefDatabase2,
|
||||
file_id: HirFileId,
|
||||
|
|
|
@ -4,14 +4,6 @@ use hir_expand::MacroDefId;
|
|||
|
||||
use crate::ModuleDefId;
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub enum Namespace {
|
||||
Types,
|
||||
Values,
|
||||
// Note that only type inference uses this enum, and it doesn't care about macros.
|
||||
// Macro,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct PerNs {
|
||||
pub types: Option<ModuleDefId>,
|
||||
|
|
|
@ -42,6 +42,8 @@ pub use crate::syntax_bridge::{
|
|||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct MacroRules {
|
||||
pub(crate) rules: Vec<Rule>,
|
||||
/// Highest id of the token we have in TokenMap
|
||||
pub(crate) shift: u32,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
|
@ -50,6 +52,38 @@ pub(crate) struct Rule {
|
|||
pub(crate) rhs: tt::Subtree,
|
||||
}
|
||||
|
||||
// Find the max token id inside a subtree
|
||||
fn max_id(subtree: &tt::Subtree) -> Option<u32> {
|
||||
subtree
|
||||
.token_trees
|
||||
.iter()
|
||||
.filter_map(|tt| match tt {
|
||||
tt::TokenTree::Subtree(subtree) => max_id(subtree),
|
||||
tt::TokenTree::Leaf(tt::Leaf::Ident(ident))
|
||||
if ident.id != tt::TokenId::unspecified() =>
|
||||
{
|
||||
Some(ident.id.0)
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
.max()
|
||||
}
|
||||
|
||||
/// Shift given TokenTree token id
|
||||
fn shift_subtree(tt: &mut tt::Subtree, shift: u32) {
|
||||
for t in tt.token_trees.iter_mut() {
|
||||
match t {
|
||||
tt::TokenTree::Leaf(leaf) => match leaf {
|
||||
tt::Leaf::Ident(ident) if ident.id != tt::TokenId::unspecified() => {
|
||||
ident.id.0 += shift;
|
||||
}
|
||||
_ => (),
|
||||
},
|
||||
tt::TokenTree::Subtree(tt) => shift_subtree(tt, shift),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MacroRules {
|
||||
pub fn parse(tt: &tt::Subtree) -> Result<MacroRules, ParseError> {
|
||||
// Note: this parsing can be implemented using mbe machinery itself, by
|
||||
|
@ -72,10 +106,17 @@ impl MacroRules {
|
|||
validate(&rule.lhs)?;
|
||||
}
|
||||
|
||||
Ok(MacroRules { rules })
|
||||
// Note that TokenId is started from zero,
|
||||
// We have to add 1 to prevent duplication.
|
||||
let shift = max_id(tt).map_or(0, |it| it + 1);
|
||||
Ok(MacroRules { rules, shift })
|
||||
}
|
||||
|
||||
pub fn expand(&self, tt: &tt::Subtree) -> Result<tt::Subtree, ExpandError> {
|
||||
mbe_expander::expand(self, tt)
|
||||
// apply shift
|
||||
let mut tt = tt.clone();
|
||||
shift_subtree(&mut tt, self.shift);
|
||||
mbe_expander::expand(self, &tt)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -58,6 +58,33 @@ mod rule_parsing {
|
|||
// * Port the test to rust and add it to this module
|
||||
// * Make it pass :-)
|
||||
|
||||
#[test]
|
||||
fn test_token_id_shift() {
|
||||
let macro_definition = r#"
|
||||
macro_rules! foobar {
|
||||
($e:ident) => { foo bar $e }
|
||||
}
|
||||
"#;
|
||||
let rules = create_rules(macro_definition);
|
||||
let expansion = expand(&rules, "foobar!(baz);");
|
||||
|
||||
fn get_id(t: &tt::TokenTree) -> Option<u32> {
|
||||
if let tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) = t {
|
||||
return Some(ident.id.0);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
assert_eq!(expansion.token_trees.len(), 3);
|
||||
// ($e:ident) => { foo bar $e }
|
||||
// 0 1 2 3 4
|
||||
assert_eq!(get_id(&expansion.token_trees[0]), Some(2));
|
||||
assert_eq!(get_id(&expansion.token_trees[1]), Some(3));
|
||||
|
||||
// So baz should be 5
|
||||
assert_eq!(get_id(&expansion.token_trees[2]), Some(5));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_convert_tt() {
|
||||
let macro_definition = r#"
|
||||
|
|
Loading…
Reference in a new issue