Encode edition within FileId in the hir layer

This commit is contained in:
Lukas Wirth 2024-07-17 17:35:40 +02:00
parent 92268627a8
commit 5264f86242
160 changed files with 1117 additions and 824 deletions

View file

@ -12,7 +12,7 @@ use cfg::CfgOptions;
use intern::Symbol;
use la_arena::{Arena, Idx, RawIdx};
use rustc_hash::{FxHashMap, FxHashSet};
use span::Edition;
use span::{Edition, EditionedFileId};
use triomphe::Arc;
use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath};
@ -662,6 +662,10 @@ impl CrateData {
fn add_dep(&mut self, dep: Dependency) {
self.dependencies.push(dep)
}
pub fn root_file_id(&self) -> EditionedFileId {
EditionedFileId::new(self.root_file_id, self.edition)
}
}
impl Extend<(String, String)> for Env {

View file

@ -6,8 +6,10 @@ mod input;
use std::panic;
use salsa::Durability;
use span::EditionedFileId;
use syntax::{ast, Parse, SourceFile, SyntaxError};
use triomphe::Arc;
use vfs::FileId;
pub use crate::{
change::FileChange,
@ -18,8 +20,7 @@ pub use crate::{
},
};
pub use salsa::{self, Cancelled};
pub use span::{FilePosition, FileRange};
pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, FileId, VfsPath};
pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, VfsPath};
pub use semver::{BuildMetadata, Prerelease, Version, VersionReq};
@ -58,10 +59,10 @@ pub trait FileLoader {
#[salsa::query_group(SourceDatabaseStorage)]
pub trait SourceDatabase: FileLoader + std::fmt::Debug {
/// Parses the file into the syntax tree.
fn parse(&self, file_id: FileId) -> Parse<ast::SourceFile>;
fn parse(&self, file_id: EditionedFileId) -> Parse<ast::SourceFile>;
/// Returns the set of errors obtained from parsing the file including validation errors.
fn parse_errors(&self, file_id: FileId) -> Option<Arc<[SyntaxError]>>;
fn parse_errors(&self, file_id: EditionedFileId) -> Option<Arc<[SyntaxError]>>;
/// The crate graph.
#[salsa::input]
@ -82,14 +83,14 @@ fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option<ReleaseC
db.toolchain(krate).as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre))
}
fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
fn parse(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Parse<ast::SourceFile> {
let _p = tracing::info_span!("parse", ?file_id).entered();
let (file_id, edition) = file_id.unpack();
let text = db.file_text(file_id);
// FIXME: Edition based parsing
SourceFile::parse(&text, span::Edition::CURRENT)
SourceFile::parse(&text, edition)
}
fn parse_errors(db: &dyn SourceDatabase, file_id: FileId) -> Option<Arc<[SyntaxError]>> {
fn parse_errors(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Option<Arc<[SyntaxError]>> {
let errors = db.parse(file_id).errors();
match &*errors {
[] => None,

View file

@ -657,11 +657,12 @@ mod tests {
//! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`.
use intern::Symbol;
use span::EditionedFileId;
use triomphe::Arc;
use base_db::FileId;
use hir_expand::span_map::{RealSpanMap, SpanMap};
use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode};
use span::FileId;
use syntax::{ast, AstNode, TextRange};
use crate::attr::{DocAtom, DocExpr};
@ -669,7 +670,9 @@ mod tests {
fn assert_parse_result(input: &str, expected: DocExpr) {
let source_file = ast::SourceFile::parse(input, span::Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(
EditionedFileId::current_edition(FileId::from_raw(0)),
)));
let tt = syntax_node_to_token_tree(
tt.syntax(),
map.as_ref(),

View file

@ -288,8 +288,9 @@ fn compute_expr_scopes(
#[cfg(test)]
mod tests {
use base_db::{FileId, SourceDatabase};
use base_db::SourceDatabase;
use hir_expand::{name::AsName, InFile};
use span::FileId;
use syntax::{algo::find_node_at_offset, ast, AstNode};
use test_fixture::WithFixture;
use test_utils::{assert_eq_text, extract_offset};
@ -325,7 +326,7 @@ mod tests {
let file_syntax = db.parse(file_id).syntax_node();
let marker: ast::PathExpr = find_node_at_offset(&file_syntax, offset).unwrap();
let function = find_function(&db, file_id);
let function = find_function(&db, file_id.file_id());
let scopes = db.expr_scopes(function.into());
let (_body, source_map) = db.body_with_source_map(function.into());
@ -480,7 +481,7 @@ fn foo() {
.expect("failed to find a name at the target offset");
let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), offset).unwrap();
let function = find_function(&db, file_id);
let function = find_function(&db, file_id.file_id());
let scopes = db.expr_scopes(function.into());
let (body, source_map) = db.body_with_source_map(function.into());

View file

@ -1,10 +1,10 @@
//! Defines database & queries for name resolution.
use base_db::{salsa, CrateId, FileId, SourceDatabase, Upcast};
use base_db::{salsa, CrateId, SourceDatabase, Upcast};
use either::Either;
use hir_expand::{db::ExpandDatabase, HirFileId, MacroDefId};
use intern::{sym, Interned};
use la_arena::ArenaMap;
use span::MacroCallId;
use span::{EditionedFileId, MacroCallId};
use syntax::{ast, AstPtr};
use triomphe::Arc;
@ -239,11 +239,14 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
fn crate_supports_no_std(&self, crate_id: CrateId) -> bool;
fn include_macro_invoc(&self, crate_id: CrateId) -> Vec<(MacroCallId, FileId)>;
fn include_macro_invoc(&self, crate_id: CrateId) -> Vec<(MacroCallId, EditionedFileId)>;
}
// return: macro call id and include file id
fn include_macro_invoc(db: &dyn DefDatabase, krate: CrateId) -> Vec<(MacroCallId, FileId)> {
fn include_macro_invoc(
db: &dyn DefDatabase,
krate: CrateId,
) -> Vec<(MacroCallId, EditionedFileId)> {
db.crate_def_map(krate)
.modules
.values()
@ -257,7 +260,7 @@ fn include_macro_invoc(db: &dyn DefDatabase, krate: CrateId) -> Vec<(MacroCallId
}
fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool {
let file = db.crate_graph()[crate_id].root_file_id;
let file = db.crate_graph()[crate_id].root_file_id();
let item_tree = db.file_item_tree(file.into());
let attrs = item_tree.raw_attrs(AttrOwner::TopLevel);
for attr in &**attrs {

View file

@ -59,7 +59,7 @@ mod tests;
use std::ops::Deref;
use base_db::{CrateId, FileId};
use base_db::CrateId;
use hir_expand::{
name::Name, proc_macro::ProcMacroKind, ErasedAstId, HirFileId, InFile, MacroCallId, MacroDefId,
};
@ -67,7 +67,7 @@ use intern::Symbol;
use itertools::Itertools;
use la_arena::Arena;
use rustc_hash::{FxHashMap, FxHashSet};
use span::{Edition, FileAstId, ROOT_ERASED_FILE_AST_ID};
use span::{Edition, EditionedFileId, FileAstId, FileId, ROOT_ERASED_FILE_AST_ID};
use stdx::format_to;
use syntax::{ast, SmolStr};
use triomphe::Arc;
@ -244,14 +244,14 @@ impl std::ops::Index<LocalModuleId> for DefMap {
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum ModuleOrigin {
CrateRoot {
definition: FileId,
definition: EditionedFileId,
},
/// Note that non-inline modules, by definition, live inside non-macro file.
File {
is_mod_rs: bool,
declaration: FileAstId<ast::Module>,
declaration_tree_id: ItemTreeId<Mod>,
definition: FileId,
definition: EditionedFileId,
},
Inline {
definition_tree_id: ItemTreeId<Mod>,
@ -277,7 +277,7 @@ impl ModuleOrigin {
}
}
pub fn file_id(&self) -> Option<FileId> {
pub fn file_id(&self) -> Option<EditionedFileId> {
match self {
ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => {
Some(*definition)
@ -339,7 +339,7 @@ impl DefMap {
let _p = tracing::info_span!("crate_def_map_query", ?name).entered();
let module_data = ModuleData::new(
ModuleOrigin::CrateRoot { definition: krate.root_file_id },
ModuleOrigin::CrateRoot { definition: krate.root_file_id() },
Visibility::Public,
);
@ -350,7 +350,7 @@ impl DefMap {
None,
);
let def_map =
collector::collect_defs(db, def_map, TreeId::new(krate.root_file_id.into(), None));
collector::collect_defs(db, def_map, TreeId::new(krate.root_file_id().into(), None));
Arc::new(def_map)
}
@ -433,7 +433,9 @@ impl DefMap {
pub fn modules_for_file(&self, file_id: FileId) -> impl Iterator<Item = LocalModuleId> + '_ {
self.modules
.iter()
.filter(move |(_id, data)| data.origin.file_id() == Some(file_id))
.filter(move |(_id, data)| {
data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id)
})
.map(|(id, _data)| id)
}

View file

@ -5,7 +5,7 @@
use std::{cmp::Ordering, iter, mem, ops::Not};
use base_db::{CrateId, CrateOrigin, Dependency, FileId, LangCrateOrigin};
use base_db::{CrateId, CrateOrigin, Dependency, LangCrateOrigin};
use cfg::{CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{
@ -22,7 +22,7 @@ use itertools::{izip, Itertools};
use la_arena::Idx;
use limit::Limit;
use rustc_hash::{FxHashMap, FxHashSet};
use span::{Edition, ErasedFileAstId, FileAstId, SyntaxContextId};
use span::{Edition, EditionedFileId, ErasedFileAstId, FileAstId, SyntaxContextId};
use syntax::ast;
use triomphe::Arc;
@ -272,7 +272,7 @@ impl DefCollector<'_> {
let _p = tracing::info_span!("seed_with_top_level").entered();
let crate_graph = self.db.crate_graph();
let file_id = crate_graph[self.def_map.krate].root_file_id;
let file_id = crate_graph[self.def_map.krate].root_file_id();
let item_tree = self.db.file_item_tree(file_id.into());
let attrs = item_tree.top_level_attrs(self.db, self.def_map.krate);
let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap();
@ -2003,7 +2003,7 @@ impl ModCollector<'_, '_> {
&mut self,
name: Name,
declaration: FileAstId<ast::Module>,
definition: Option<(FileId, bool)>,
definition: Option<(EditionedFileId, bool)>,
visibility: &crate::visibility::RawVisibility,
mod_tree_id: FileItemTreeId<Mod>,
) -> LocalModuleId {

View file

@ -1,8 +1,9 @@
//! This module resolves `mod foo;` declaration to file.
use arrayvec::ArrayVec;
use base_db::{AnchoredPath, FileId};
use base_db::AnchoredPath;
use hir_expand::{name::Name, HirFileIdExt, MacroFileIdExt};
use limit::Limit;
use span::EditionedFileId;
use syntax::ToSmolStr as _;
use crate::{db::DefDatabase, HirFileId};
@ -64,7 +65,7 @@ impl ModDir {
file_id: HirFileId,
name: &Name,
attr_path: Option<&str>,
) -> Result<(FileId, bool, ModDir), Box<[String]>> {
) -> Result<(EditionedFileId, bool, ModDir), Box<[String]>> {
let name = name.unescaped();
let mut candidate_files = ArrayVec::<_, 2>::new();
@ -92,7 +93,7 @@ impl ModDir {
let orig_file_id = file_id.original_file_respecting_includes(db.upcast());
for candidate in candidate_files.iter() {
let path = AnchoredPath { anchor: orig_file_id, path: candidate.as_str() };
let path = AnchoredPath { anchor: orig_file_id.file_id(), path: candidate.as_str() };
if let Some(file_id) = db.resolve_path(path) {
let is_mod_rs = candidate.ends_with("/mod.rs");
@ -103,7 +104,12 @@ impl ModDir {
DirPath::new(format!("{}/", name.display(db.upcast())))
};
if let Some(mod_dir) = self.child(dir_path, !root_dir_owner) {
return Ok((file_id, is_mod_rs, mod_dir));
return Ok((
// FIXME: Edition, is this rightr?
EditionedFileId::new(file_id, orig_file_id.edition()),
is_mod_rs,
mod_dir,
));
}
}
}

View file

@ -16,7 +16,7 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change:
});
assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}")
}
db.set_file_text(pos.file_id, ra_fixture_change);
db.set_file_text(pos.file_id.file_id(), ra_fixture_change);
{
let events = db.log_executed(|| {
@ -266,7 +266,7 @@ fn quux() { 92 }
m!(Y);
m!(Z);
"#;
db.set_file_text(pos.file_id, new_text);
db.set_file_text(pos.file_id.file_id(), new_text);
{
let events = db.log_executed(|| {

View file

@ -4,10 +4,10 @@ use std::{fmt, panic, sync::Mutex};
use base_db::{
salsa::{self, Durability},
AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, FilePosition, SourceDatabase,
Upcast,
AnchoredPath, CrateId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
};
use hir_expand::{db::ExpandDatabase, InFile};
use hir_expand::{db::ExpandDatabase, files::FilePosition, InFile};
use span::{EditionedFileId, FileId};
use syntax::{algo, ast, AstNode};
use triomphe::Arc;
@ -85,7 +85,7 @@ impl TestDB {
for &krate in self.relevant_crates(file_id).iter() {
let crate_def_map = self.crate_def_map(krate);
for (local_id, data) in crate_def_map.modules() {
if data.origin.file_id() == Some(file_id) {
if data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id) {
return crate_def_map.module_id(local_id);
}
}
@ -94,7 +94,7 @@ impl TestDB {
}
pub(crate) fn module_at_position(&self, position: FilePosition) -> ModuleId {
let file_module = self.module_for_file(position.file_id);
let file_module = self.module_for_file(position.file_id.file_id());
let mut def_map = file_module.def_map(self);
let module = self.mod_at_position(&def_map, position);
@ -122,7 +122,7 @@ impl TestDB {
let mut res = DefMap::ROOT;
for (module, data) in def_map.modules() {
let src = data.definition_source(self);
if src.file_id != position.file_id.into() {
if src.file_id != position.file_id {
continue;
}
@ -163,7 +163,7 @@ impl TestDB {
let mut fn_def = None;
for (_, module) in def_map.modules() {
let file_id = module.definition_source(self).file_id;
if file_id != position.file_id.into() {
if file_id != position.file_id {
continue;
}
for decl in module.scope.declarations() {

View file

@ -209,7 +209,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
let (parsed, tm) = &mbe::token_tree_to_syntax_node(
tt,
mbe::TopEntryPoint::MacroItems,
parser::Edition::CURRENT,
parser::Edition::CURRENT_FIXME,
);
let macro_items = ast::MacroItems::cast(parsed.syntax_node())
.ok_or_else(|| ExpandError::other("invalid item definition"))?;

View file

@ -1,11 +1,11 @@
//! Builtin macro
use base_db::{AnchoredPath, FileId};
use base_db::AnchoredPath;
use cfg::CfgExpr;
use either::Either;
use intern::{sym, Symbol};
use mbe::{parse_exprs_with_sep, parse_to_token_tree};
use span::{Edition, Span, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
use span::{Edition, EditionedFileId, Span, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
use stdx::format_to;
use syntax::{
format_smolstr,
@ -228,7 +228,7 @@ fn assert_expand(
span: Span,
) -> ExpandResult<tt::Subtree> {
let call_site_span = span_with_call_site_ctxt(db, span, id);
let args = parse_exprs_with_sep(tt, ',', call_site_span, Edition::CURRENT);
let args = parse_exprs_with_sep(tt, ',', call_site_span, Edition::CURRENT_FIXME);
let dollar_crate = dollar_crate(span);
let expanded = match &*args {
[cond, panic_args @ ..] => {
@ -686,8 +686,9 @@ fn relative_file(
call_id: MacroCallId,
path_str: &str,
allow_recursion: bool,
) -> Result<FileId, ExpandError> {
let call_site = call_id.as_macro_file().parent(db).original_file_respecting_includes(db);
) -> Result<EditionedFileId, ExpandError> {
let call_site =
call_id.as_macro_file().parent(db).original_file_respecting_includes(db).file_id();
let path = AnchoredPath { anchor: call_site, path: path_str };
let res = db
.resolve_path(path)
@ -696,7 +697,7 @@ fn relative_file(
if res == call_site && !allow_recursion {
Err(ExpandError::other(format!("recursive inclusion of `{path_str}`")))
} else {
Ok(res)
Ok(EditionedFileId::new(res, Edition::CURRENT_FIXME))
}
}
@ -728,11 +729,10 @@ fn include_expand(
}
};
match parse_to_token_tree(
// FIXME
Edition::CURRENT,
file_id.edition(),
SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
SyntaxContextId::ROOT,
&db.file_text(file_id),
&db.file_text(file_id.file_id()),
) {
Some(it) => ExpandResult::ok(it),
None => ExpandResult::new(
@ -746,7 +746,7 @@ pub fn include_input_to_file_id(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
arg: &tt::Subtree,
) -> Result<FileId, ExpandError> {
) -> Result<EditionedFileId, ExpandError> {
relative_file(db, arg_id, parse_string(arg)?.0.as_str(), false)
}
@ -793,7 +793,7 @@ fn include_str_expand(
}
};
let text = db.file_text(file_id);
let text = db.file_text(file_id.file_id());
let text = &*text;
ExpandResult::ok(quote!(span =>#text))

View file

@ -1,11 +1,11 @@
//! Defines database & queries for macro expansion.
use base_db::{salsa, CrateId, FileId, SourceDatabase};
use base_db::{salsa, CrateId, SourceDatabase};
use either::Either;
use limit::Limit;
use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, MatchedArmIndex};
use rustc_hash::FxHashSet;
use span::{AstIdMap, Span, SyntaxContextData, SyntaxContextId};
use span::{AstIdMap, EditionedFileId, Span, SyntaxContextData, SyntaxContextId};
use syntax::{ast, AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T};
use triomphe::Arc;
@ -78,7 +78,7 @@ pub trait ExpandDatabase: SourceDatabase {
#[salsa::invoke(crate::span_map::expansion_span_map)]
fn expansion_span_map(&self, file_id: MacroFileId) -> Arc<ExpansionSpanMap>;
#[salsa::invoke(crate::span_map::real_span_map)]
fn real_span_map(&self, file_id: FileId) -> Arc<RealSpanMap>;
fn real_span_map(&self, file_id: EditionedFileId) -> Arc<RealSpanMap>;
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
/// reason why we use salsa at all.

View file

@ -3,8 +3,8 @@ use std::borrow::Borrow;
use either::Either;
use span::{
AstIdNode, ErasedFileAstId, FileAstId, FileId, FileRange, HirFileId, HirFileIdRepr,
MacroFileId, SyntaxContextId,
AstIdNode, EditionedFileId, ErasedFileAstId, FileAstId, HirFileId, HirFileIdRepr, MacroFileId,
SyntaxContextId,
};
use syntax::{AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize};
@ -27,7 +27,36 @@ pub struct InFileWrapper<FileKind, T> {
}
pub type InFile<T> = InFileWrapper<HirFileId, T>;
pub type InMacroFile<T> = InFileWrapper<MacroFileId, T>;
pub type InRealFile<T> = InFileWrapper<FileId, T>;
pub type InRealFile<T> = InFileWrapper<EditionedFileId, T>;
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub struct FilePositionWrapper<FileKind> {
pub file_id: FileKind,
pub offset: TextSize,
}
pub type HirFilePosition = FilePositionWrapper<HirFileId>;
pub type MacroFilePosition = FilePositionWrapper<MacroFileId>;
pub type FilePosition = FilePositionWrapper<EditionedFileId>;
impl From<FilePositionWrapper<EditionedFileId>> for FilePositionWrapper<span::FileId> {
fn from(value: FilePositionWrapper<EditionedFileId>) -> Self {
FilePositionWrapper { file_id: value.file_id.into(), offset: value.offset }
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub struct FileRangeWrapper<FileKind> {
pub file_id: FileKind,
pub range: TextRange,
}
pub type HirFileRange = FileRangeWrapper<HirFileId>;
pub type MacroFileRange = FileRangeWrapper<MacroFileId>;
pub type FileRange = FileRangeWrapper<EditionedFileId>;
impl From<FileRangeWrapper<EditionedFileId>> for FileRangeWrapper<span::FileId> {
fn from(value: FileRangeWrapper<EditionedFileId>) -> Self {
FileRangeWrapper { file_id: value.file_id.into(), range: value.range }
}
}
/// `AstId` points to an AST node in any file.
///
@ -128,7 +157,7 @@ trait FileIdToSyntax: Copy {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode;
}
impl FileIdToSyntax for FileId {
impl FileIdToSyntax for EditionedFileId {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
db.parse(self).syntax_node()
}

View file

@ -433,9 +433,9 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
#[cfg(test)]
mod tests {
use base_db::FileId;
use expect_test::{expect, Expect};
use mbe::DocCommentDesugarMode;
use span::{Edition, EditionedFileId, FileId};
use syntax::TextRange;
use triomphe::Arc;
@ -473,7 +473,10 @@ mod tests {
#[track_caller]
fn check(ra_fixture: &str, mut expect: Expect) {
let parsed = syntax::SourceFile::parse(ra_fixture, span::Edition::CURRENT);
let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(EditionedFileId::new(
FileId::from_raw(0),
Edition::CURRENT,
))));
let fixups = super::fixup_syntax(
span_map.as_ref(),
&parsed.syntax_node(),

View file

@ -30,10 +30,10 @@ use triomphe::Arc;
use std::{fmt, hash::Hash};
use base_db::{salsa::InternValueTrivial, CrateId, FileId};
use base_db::{salsa::InternValueTrivial, CrateId};
use either::Either;
use span::{
Edition, ErasedFileAstId, FileAstId, FileRange, HirFileIdRepr, Span, SpanAnchor,
Edition, EditionedFileId, ErasedFileAstId, FileAstId, HirFileIdRepr, Span, SpanAnchor,
SyntaxContextData, SyntaxContextId,
};
use syntax::{
@ -52,7 +52,7 @@ use crate::{
span_map::{ExpansionSpanMap, SpanMap},
};
pub use crate::files::{AstId, ErasedAstId, InFile, InMacroFile, InRealFile};
pub use crate::files::{AstId, ErasedAstId, FileRange, InFile, InMacroFile, InRealFile};
pub use mbe::{DeclarativeMacro, ValueResult};
pub use span::{HirFileId, MacroCallId, MacroFileId};
@ -243,11 +243,11 @@ pub enum MacroCallKind {
pub trait HirFileIdExt {
/// Returns the original file of this macro call hierarchy.
fn original_file(self, db: &dyn ExpandDatabase) -> FileId;
fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId;
/// Returns the original file of this macro call hierarchy while going into the included file if
/// one of the calls comes from an `include!``.
fn original_file_respecting_includes(self, db: &dyn ExpandDatabase) -> FileId;
fn original_file_respecting_includes(self, db: &dyn ExpandDatabase) -> EditionedFileId;
/// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>>;
@ -256,7 +256,7 @@ pub trait HirFileIdExt {
}
impl HirFileIdExt for HirFileId {
fn original_file(self, db: &dyn ExpandDatabase) -> FileId {
fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId {
let mut file_id = self;
loop {
match file_id.repr() {
@ -268,7 +268,7 @@ impl HirFileIdExt for HirFileId {
}
}
fn original_file_respecting_includes(mut self, db: &dyn ExpandDatabase) -> FileId {
fn original_file_respecting_includes(mut self, db: &dyn ExpandDatabase) -> EditionedFileId {
loop {
match self.repr() {
HirFileIdRepr::FileId(id) => break id,
@ -568,7 +568,7 @@ impl MacroCallLoc {
&self,
db: &dyn ExpandDatabase,
macro_call_id: MacroCallId,
) -> Option<FileId> {
) -> Option<EditionedFileId> {
if self.def.is_include() {
if let MacroCallKind::FnLike { eager: Some(eager), .. } = &self.kind {
if let Ok(it) =

View file

@ -227,7 +227,6 @@ impl_to_to_tokentrees! {
mod tests {
use crate::tt;
use ::tt::IdentIsRaw;
use base_db::FileId;
use expect_test::expect;
use intern::Symbol;
use span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
@ -235,7 +234,13 @@ mod tests {
const DUMMY: tt::Span = tt::Span {
range: TextRange::empty(TextSize::new(0)),
anchor: SpanAnchor { file_id: FileId::from_raw(0xe4e4e), ast_id: ROOT_ERASED_FILE_AST_ID },
anchor: SpanAnchor {
file_id: span::EditionedFileId::new(
span::FileId::from_raw(0xe4e4e),
span::Edition::CURRENT,
),
ast_id: ROOT_ERASED_FILE_AST_ID,
},
ctx: SyntaxContextId::ROOT,
};

View file

@ -1,6 +1,6 @@
//! Span maps for real files and macro expansions.
use span::{FileId, HirFileId, HirFileIdRepr, MacroFileId, Span, SyntaxContextId};
use span::{EditionedFileId, HirFileId, HirFileIdRepr, MacroFileId, Span, SyntaxContextId};
use stdx::TupleExt;
use syntax::{ast, AstNode, TextRange};
use triomphe::Arc;
@ -79,7 +79,7 @@ impl SpanMapRef<'_> {
}
}
pub(crate) fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc<RealSpanMap> {
pub(crate) fn real_span_map(db: &dyn ExpandDatabase, file_id: EditionedFileId) -> Arc<RealSpanMap> {
use syntax::ast::HasModuleItem;
let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)];
let ast_id_map = db.ast_id_map(file_id.into());

View file

@ -1,10 +1,10 @@
use base_db::FileId;
use chalk_ir::Substitution;
use hir_def::db::DefDatabase;
use rustc_apfloat::{
ieee::{Half as f16, Quad as f128},
Float,
};
use span::EditionedFileId;
use test_fixture::WithFixture;
use test_utils::skip_slow_tests;
@ -102,8 +102,8 @@ fn pretty_print_err(e: ConstEvalError, db: TestDB) -> String {
err
}
fn eval_goal(db: &TestDB, file_id: FileId) -> Result<Const, ConstEvalError> {
let module_id = db.module_for_file(file_id);
fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result<Const, ConstEvalError> {
let module_id = db.module_for_file(file_id.file_id());
let def_map = module_id.def_map(db);
let scope = &def_map[module_id.local_id].scope;
let const_id = scope

View file

@ -35,7 +35,7 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro
let adt_or_type_alias_id = file_ids
.into_iter()
.find_map(|file_id| {
let module_id = db.module_for_file(file_id);
let module_id = db.module_for_file(file_id.file_id());
let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope;
let adt_or_type_alias_id = scope.declarations().find_map(|x| match x {
@ -87,7 +87,7 @@ fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro
);
let (db, file_id) = TestDB::with_single_file(&ra_fixture);
let module_id = db.module_for_file(file_id);
let module_id = db.module_for_file(file_id.file_id());
let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope;
let function_id = scope

View file

@ -2,7 +2,7 @@
use std::{borrow::Cow, cell::RefCell, fmt::Write, iter, mem, ops::Range};
use base_db::{CrateId, FileId};
use base_db::CrateId;
use chalk_ir::{cast::Cast, Mutability};
use either::Either;
use hir_def::{
@ -23,6 +23,7 @@ use rustc_apfloat::{
Float,
};
use rustc_hash::{FxHashMap, FxHashSet};
use span::FileId;
use stdx::never;
use syntax::{SyntaxNodePtr, TextRange};
use triomphe::Arc;
@ -395,7 +396,7 @@ impl MirEvalError {
};
let file_id = span.file_id.original_file(db.upcast());
let text_range = span.value.text_range();
writeln!(f, "{}", span_formatter(file_id, text_range))?;
writeln!(f, "{}", span_formatter(file_id.file_id(), text_range))?;
}
}
match err {

View file

@ -1,5 +1,5 @@
use base_db::FileId;
use hir_def::db::DefDatabase;
use span::EditionedFileId;
use syntax::{TextRange, TextSize};
use test_fixture::WithFixture;
@ -7,7 +7,7 @@ use crate::{db::HirDatabase, test_db::TestDB, Interner, Substitution};
use super::{interpret_mir, MirEvalError};
fn eval_main(db: &TestDB, file_id: FileId) -> Result<(String, String), MirEvalError> {
fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), MirEvalError> {
let module_id = db.module_for_file(file_id);
let def_map = module_id.def_map(db);
let scope = &def_map[module_id.local_id].scope;

View file

@ -2,7 +2,7 @@
use std::{fmt::Write, iter, mem};
use base_db::{salsa::Cycle, FileId};
use base_db::salsa::Cycle;
use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind};
use hir_def::{
body::Body,
@ -21,6 +21,7 @@ use hir_expand::name::Name;
use la_arena::ArenaMap;
use rustc_apfloat::Float;
use rustc_hash::FxHashMap;
use span::FileId;
use syntax::TextRange;
use triomphe::Arc;

View file

@ -4,11 +4,12 @@ use std::{fmt, panic, sync::Mutex};
use base_db::{
salsa::{self, Durability},
AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
AnchoredPath, CrateId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
};
use hir_def::{db::DefDatabase, ModuleId};
use hir_expand::db::ExpandDatabase;
use nohash_hasher::IntMap;
use rustc_hash::FxHashMap;
use span::{EditionedFileId, FileId};
use syntax::TextRange;
use test_utils::extract_annotations;
use triomphe::Arc;
@ -86,11 +87,12 @@ impl FileLoader for TestDB {
}
impl TestDB {
pub(crate) fn module_for_file_opt(&self, file_id: FileId) -> Option<ModuleId> {
pub(crate) fn module_for_file_opt(&self, file_id: impl Into<FileId>) -> Option<ModuleId> {
let file_id = file_id.into();
for &krate in self.relevant_crates(file_id).iter() {
let crate_def_map = self.crate_def_map(krate);
for (local_id, data) in crate_def_map.modules() {
if data.origin.file_id() == Some(file_id) {
if data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id) {
return Some(crate_def_map.module_id(local_id));
}
}
@ -98,11 +100,13 @@ impl TestDB {
None
}
pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId {
self.module_for_file_opt(file_id).unwrap()
pub(crate) fn module_for_file(&self, file_id: impl Into<FileId>) -> ModuleId {
self.module_for_file_opt(file_id.into()).unwrap()
}
pub(crate) fn extract_annotations(&self) -> IntMap<FileId, Vec<(TextRange, String)>> {
pub(crate) fn extract_annotations(
&self,
) -> FxHashMap<EditionedFileId, Vec<(TextRange, String)>> {
let mut files = Vec::new();
let crate_graph = self.crate_graph();
for krate in crate_graph.iter() {
@ -115,7 +119,7 @@ impl TestDB {
files
.into_iter()
.filter_map(|file_id| {
let text = self.file_text(file_id);
let text = self.file_text(file_id.file_id());
let annotations = extract_annotations(&text);
if annotations.is_empty() {
return None;

View file

@ -12,7 +12,7 @@ mod traits;
use std::env;
use base_db::{FileRange, SourceDatabaseExt2 as _};
use base_db::SourceDatabaseExt2 as _;
use expect_test::Expect;
use hir_def::{
body::{Body, BodySourceMap, SyntheticSyntax},
@ -23,7 +23,7 @@ use hir_def::{
src::HasSource,
AssocItemId, DefWithBodyId, HasModule, LocalModuleId, Lookup, ModuleDefId,
};
use hir_expand::{db::ExpandDatabase, InFile};
use hir_expand::{db::ExpandDatabase, FileRange, InFile};
use once_cell::race::OnceBool;
use rustc_hash::FxHashMap;
use stdx::format_to;
@ -344,7 +344,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
} else {
(node.value.text_range(), node.value.text().to_string().replace('\n', " "))
};
let macro_prefix = if node.file_id != file_id.into() { "!" } else { "" };
let macro_prefix = if node.file_id != file_id { "!" } else { "" };
format_to!(
buf,
"{}{:?} '{}': {}\n",
@ -361,7 +361,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
});
for (src_ptr, mismatch) in &mismatches {
let range = src_ptr.value.text_range();
let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" };
let macro_prefix = if src_ptr.file_id != file_id { "!" } else { "" };
format_to!(
buf,
"{}{:?}: expected {}, got {}\n",
@ -584,7 +584,7 @@ fn salsa_bug() {
}
";
db.set_file_text(pos.file_id, new_text);
db.set_file_text(pos.file_id.file_id(), new_text);
let module = db.module_for_file(pos.file_id);
let crate_def_map = module.def_map(&db);

View file

@ -16,7 +16,7 @@ fn foo() -> i32 {
);
{
let events = db.log_executed(|| {
let module = db.module_for_file(pos.file_id);
let module = db.module_for_file(pos.file_id.file_id());
let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
db.infer(def);
@ -32,11 +32,11 @@ fn foo() -> i32 {
1
}";
db.set_file_text(pos.file_id, new_text);
db.set_file_text(pos.file_id.file_id(), new_text);
{
let events = db.log_executed(|| {
let module = db.module_for_file(pos.file_id);
let module = db.module_for_file(pos.file_id.file_id());
let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
db.infer(def);
@ -63,7 +63,7 @@ fn baz() -> i32 {
);
{
let events = db.log_executed(|| {
let module = db.module_for_file(pos.file_id);
let module = db.module_for_file(pos.file_id.file_id());
let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
db.infer(def);
@ -84,11 +84,11 @@ fn baz() -> i32 {
}
";
db.set_file_text(pos.file_id, new_text);
db.set_file_text(pos.file_id.file_id(), new_text);
{
let events = db.log_executed(|| {
let module = db.module_for_file(pos.file_id);
let module = db.module_for_file(pos.file_id.file_id());
let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
db.infer(def);

View file

@ -1,6 +1,5 @@
//! Provides set of implementation for hir's objects that allows get back location in file.
use base_db::FileId;
use either::Either;
use hir_def::{
nameres::{ModuleOrigin, ModuleSource},
@ -9,6 +8,7 @@ use hir_def::{
};
use hir_expand::{HirFileId, InFile};
use hir_ty::db::InternedClosure;
use span::EditionedFileId;
use syntax::ast;
use tt::TextRange;
@ -58,7 +58,7 @@ impl Module {
}
}
pub fn as_source_file_id(self, db: &dyn HirDatabase) -> Option<FileId> {
pub fn as_source_file_id(self, db: &dyn HirDatabase) -> Option<EditionedFileId> {
let def_map = self.id.def_map(db.upcast());
match def_map[self.id.local_id].origin {
ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition, .. } => {

View file

@ -37,7 +37,7 @@ mod display;
use std::{mem::discriminant, ops::ControlFlow};
use arrayvec::ArrayVec;
use base_db::{CrateDisplayName, CrateId, CrateOrigin, FileId};
use base_db::{CrateDisplayName, CrateId, CrateOrigin};
use either::Either;
use hir_def::{
body::{BodyDiagnostic, SyntheticSyntax},
@ -78,7 +78,7 @@ use hir_ty::{
use itertools::Itertools;
use nameres::diagnostics::DefDiagnosticKind;
use rustc_hash::FxHashSet;
use span::{Edition, MacroCallId};
use span::{Edition, EditionedFileId, FileId, MacroCallId};
use stdx::{impl_from, never};
use syntax::{
ast::{self, HasAttrs as _, HasName},
@ -129,12 +129,16 @@ pub use {
hir_expand::{
attrs::{Attr, AttrId},
change::ChangeWithProcMacros,
files::{
FilePosition, FilePositionWrapper, FileRange, FileRangeWrapper, HirFilePosition,
HirFileRange, InFile, InFileWrapper, InMacroFile, InRealFile, MacroFilePosition,
MacroFileRange,
},
hygiene::{marks_rev, SyntaxContextExt},
inert_attr_macro::AttributeTemplate,
name::Name,
proc_macro::ProcMacros,
tt, ExpandResult, HirFileId, HirFileIdExt, InFile, InMacroFile, InRealFile, MacroFileId,
MacroFileIdExt,
tt, ExpandResult, HirFileId, HirFileIdExt, MacroFileId, MacroFileIdExt,
},
hir_ty::{
consteval::ConstEvalError,
@ -3200,7 +3204,7 @@ impl LocalSource {
}
}
pub fn original_file(&self, db: &dyn HirDatabase) -> FileId {
pub fn original_file(&self, db: &dyn HirDatabase) -> EditionedFileId {
self.source.file_id.original_file(db.upcast())
}

View file

@ -8,7 +8,6 @@ use std::{
ops::{self, ControlFlow, Not},
};
use base_db::{FileId, FileRange};
use either::Either;
use hir_def::{
hir::Expr,
@ -24,12 +23,12 @@ use hir_expand::{
db::ExpandDatabase,
files::InRealFile,
name::AsName,
InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
FileRange, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
};
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{smallvec, SmallVec};
use span::{Span, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
use span::{EditionedFileId, FileId, Span, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
use stdx::TupleExt;
use syntax::{
algo::skip_trivia_token,
@ -225,12 +224,12 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.resolve_variant(record_lit).map(VariantDef::from)
}
pub fn file_to_module_def(&self, file: FileId) -> Option<Module> {
self.imp.file_to_module_defs(file).next()
pub fn file_to_module_def(&self, file: impl Into<FileId>) -> Option<Module> {
self.imp.file_to_module_defs(file.into()).next()
}
pub fn file_to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
self.imp.file_to_module_defs(file)
pub fn file_to_module_defs(&self, file: impl Into<FileId>) -> impl Iterator<Item = Module> {
self.imp.file_to_module_defs(file.into())
}
pub fn to_adt_def(&self, a: &ast::Adt) -> Option<Adt> {
@ -300,7 +299,23 @@ impl<'db> SemanticsImpl<'db> {
}
}
pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
pub fn parse(&self, file_id: EditionedFileId) -> ast::SourceFile {
let tree = self.db.parse(file_id).tree();
self.cache(tree.syntax().clone(), file_id.into());
tree
}
pub fn attach_first_edition(&self, file: FileId) -> Option<EditionedFileId> {
Some(EditionedFileId::new(
file,
self.file_to_module_defs(file).next()?.krate().edition(self.db),
))
}
pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile {
let file_id = self
.attach_first_edition(file_id)
.unwrap_or_else(|| EditionedFileId::current_edition(file_id));
let tree = self.db.parse(file_id).tree();
self.cache(tree.syntax().clone(), file_id.into());
tree
@ -757,7 +772,7 @@ impl<'db> SemanticsImpl<'db> {
// iterate related crates and find all include! invocations that include_file_id matches
for (invoc, _) in self
.db
.relevant_crates(file_id)
.relevant_crates(file_id.file_id())
.iter()
.flat_map(|krate| self.db.include_macro_invoc(*krate))
.filter(|&(_, include_file_id)| include_file_id == file_id)
@ -1089,6 +1104,7 @@ impl<'db> SemanticsImpl<'db> {
node.original_file_range_opt(self.db.upcast())
.filter(|(_, ctx)| ctx.is_root())
.map(TupleExt::head)
.map(Into::into)
}
/// Attempts to map the node out of macro expanded files.

View file

@ -85,7 +85,6 @@
//! active crate for a given position, and then provide an API to resolve all
//! syntax nodes against this specific crate.
use base_db::FileId;
use either::Either;
use hir_def::{
child_by_source::ChildBySource,
@ -103,7 +102,7 @@ use hir_expand::{
};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use span::MacroFileId;
use span::{FileId, MacroFileId};
use stdx::impl_from;
use syntax::{
ast::{self, HasName},
@ -162,7 +161,7 @@ impl SourceToDefCtx<'_, '_> {
}
None => {
let file_id = src.file_id.original_file(self.db.upcast());
self.file_to_def(file_id).first().copied()
self.file_to_def(file_id.file_id()).first().copied()
}
}?;
@ -175,7 +174,7 @@ impl SourceToDefCtx<'_, '_> {
pub(super) fn source_file_to_def(&mut self, src: InFile<&ast::SourceFile>) -> Option<ModuleId> {
let _p = tracing::info_span!("source_file_to_def").entered();
let file_id = src.file_id.original_file(self.db.upcast());
self.file_to_def(file_id).first().copied()
self.file_to_def(file_id.file_id()).first().copied()
}
pub(super) fn trait_to_def(&mut self, src: InFile<&ast::Trait>) -> Option<TraitId> {
@ -412,7 +411,10 @@ impl SourceToDefCtx<'_, '_> {
return Some(def);
}
let def = self.file_to_def(src.file_id.original_file(self.db.upcast())).first().copied()?;
let def = self
.file_to_def(src.file_id.original_file(self.db.upcast()).file_id())
.first()
.copied()?;
Some(def.into())
}

View file

@ -1,6 +1,5 @@
//! File symbol extraction.
use base_db::FileRange;
use hir_def::{
db::DefDatabase,
item_scope::ItemInNs,
@ -8,7 +7,7 @@ use hir_def::{
AdtId, AssocItemId, DefWithBodyId, HasModule, ImplId, Lookup, MacroId, ModuleDefId, ModuleId,
TraitId,
};
use hir_expand::{HirFileId, InFile};
use hir_expand::HirFileId;
use hir_ty::{db::HirDatabase, display::HirDisplay};
use syntax::{ast::HasName, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, ToSmolStr};
@ -42,25 +41,6 @@ impl DeclarationLocation {
let root = sema.parse_or_expand(self.hir_file_id);
self.ptr.to_node(&root)
}
pub fn original_range(&self, db: &dyn HirDatabase) -> FileRange {
if let Some(file_id) = self.hir_file_id.file_id() {
// fast path to prevent parsing
return FileRange { file_id, range: self.ptr.text_range() };
}
let node = resolve_node(db, self.hir_file_id, &self.ptr);
node.as_ref().original_file_range_rooted(db.upcast())
}
}
fn resolve_node(
db: &dyn HirDatabase,
file_id: HirFileId,
ptr: &SyntaxNodePtr,
) -> InFile<SyntaxNode> {
let root = db.parse_or_expand(file_id);
let node = ptr.to_node(&root);
InFile::new(file_id, node)
}
/// Represents an outstanding module that the symbol collector must collect symbols from.

View file

@ -1,8 +1,8 @@
//! See [`AssistContext`].
use hir::Semantics;
use ide_db::base_db::{FileId, FileRange};
use ide_db::{label::Label, RootDatabase};
use hir::{FileRange, Semantics};
use ide_db::EditionedFileId;
use ide_db::{label::Label, FileId, RootDatabase};
use syntax::{
algo::{self, find_node_at_offset, find_node_at_range},
AstNode, AstToken, Direction, SourceFile, SyntaxElement, SyntaxKind, SyntaxToken, TextRange,
@ -90,7 +90,7 @@ impl<'a> AssistContext<'a> {
self.frange.range.start()
}
pub(crate) fn file_id(&self) -> FileId {
pub(crate) fn file_id(&self) -> EditionedFileId {
self.frange.file_id
}
@ -139,7 +139,7 @@ impl Assists {
pub(crate) fn new(ctx: &AssistContext<'_>, resolve: AssistResolveStrategy) -> Assists {
Assists {
resolve,
file: ctx.frange.file_id,
file: ctx.frange.file_id.file_id(),
buf: Vec::new(),
allowed: ctx.config.allowed.clone(),
}

View file

@ -288,8 +288,8 @@ fn module_distance_heuristic(db: &dyn HirDatabase, current: &Module, item: &Modu
mod tests {
use super::*;
use hir::Semantics;
use ide_db::{assists::AssistResolveStrategy, base_db::FileRange, RootDatabase};
use hir::{FileRange, Semantics};
use ide_db::{assists::AssistResolveStrategy, RootDatabase};
use test_fixture::WithFixture;
use crate::tests::{

View file

@ -46,7 +46,7 @@ pub(crate) fn bind_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
&format!("Bind as `let _ = {};`", &ident_pat),
param.syntax().text_range(),
|builder| {
let line_index = ctx.db().line_index(ctx.file_id());
let line_index = ctx.db().line_index(ctx.file_id().into());
let indent = func.indent_level();
let text_indent = indent + 1;

View file

@ -208,7 +208,7 @@ fn replace_usages(
delayed_mutations: &mut Vec<(ImportScope, ast::Path)>,
) {
for (file_id, references) in usages {
edit.edit_file(file_id);
edit.edit_file(file_id.file_id());
let refs_with_imports = augment_references_with_imports(ctx, references, target_module);

View file

@ -143,7 +143,7 @@ fn edit_struct_references(
let usages = strukt_def.usages(&ctx.sema).include_self_refs().all();
for (file_id, refs) in usages {
edit.edit_file(file_id);
edit.edit_file(file_id.file_id());
for r in refs {
process_struct_name_reference(ctx, r, edit);
}
@ -221,7 +221,7 @@ fn edit_field_references(
let def = Definition::Field(field);
let usages = def.usages(&ctx.sema).all();
for (file_id, refs) in usages {
edit.edit_file(file_id);
edit.edit_file(file_id.file_id());
for r in refs {
if let Some(name_ref) = r.name.as_name_ref() {
// Only edit the field reference if it's part of a `.field` access

View file

@ -105,7 +105,7 @@ fn replace_usages(
target_module: &hir::Module,
) {
for (file_id, references) in usages.iter() {
edit.edit_file(*file_id);
edit.edit_file(file_id.file_id());
let refs_with_imports =
augment_references_with_imports(edit, ctx, references, struct_name, target_module);

View file

@ -188,7 +188,7 @@ fn edit_struct_references(
};
for (file_id, refs) in usages {
edit.edit_file(file_id);
edit.edit_file(file_id.file_id());
for r in refs {
for node in r.name.syntax().ancestors() {
if edit_node(edit, node).is_some() {
@ -213,7 +213,7 @@ fn edit_field_references(
let def = Definition::Field(field);
let usages = def.usages(&ctx.sema).all();
for (file_id, refs) in usages {
edit.edit_file(file_id);
edit.edit_file(file_id.file_id());
for r in refs {
if let Some(name_ref) = r.name.as_name_ref() {
edit.replace(ctx.sema.original_range(name_ref.syntax()).range, name.text());

View file

@ -224,7 +224,7 @@ fn edit_tuple_usages(
// tree mutation in the same file breaks when `builder.edit_file`
// is called
if let Some((_, refs)) = usages.iter().find(|(file_id, _)| **file_id == ctx.file_id()) {
if let Some((_, refs)) = usages.iter().find(|(file_id, _)| *file_id == ctx.file_id()) {
current_file_usages = Some(
refs.iter()
.filter_map(|r| edit_tuple_usage(ctx, edit, r, data, in_sub_pattern))
@ -233,11 +233,11 @@ fn edit_tuple_usages(
}
for (file_id, refs) in usages.iter() {
if *file_id == ctx.file_id() {
if file_id == ctx.file_id() {
continue;
}
edit.edit_file(*file_id);
edit.edit_file(file_id.file_id());
let tuple_edits = refs
.iter()

View file

@ -110,7 +110,7 @@ pub(crate) fn extract_expressions_from_format_string(
Arg::Expr(s) => {
// insert arg
// FIXME: use the crate's edition for parsing
let expr = ast::Expr::parse(&s, syntax::Edition::CURRENT).syntax_node();
let expr = ast::Expr::parse(&s, syntax::Edition::CURRENT_FIXME).syntax_node();
let mut expr_tt = utils::tt_from_syntax(expr);
new_tt_bits.append(&mut expr_tt);
}

View file

@ -4,10 +4,9 @@ use either::Either;
use hir::{HasSource, HirFileIdExt, ModuleSource};
use ide_db::{
assists::{AssistId, AssistKind},
base_db::FileId,
defs::{Definition, NameClass, NameRefClass},
search::{FileReference, SearchScope},
FxHashMap, FxHashSet,
FileId, FxHashMap, FxHashSet,
};
use itertools::Itertools;
use smallvec::SmallVec;
@ -364,7 +363,7 @@ impl Module {
None
});
refs_in_files.entry(file_id).or_default().extend(usages);
refs_in_files.entry(file_id.file_id()).or_default().extend(usages);
}
}
@ -477,8 +476,13 @@ impl Module {
}
}
let (def_in_mod, def_out_sel) =
check_def_in_mod_and_out_sel(def, ctx, curr_parent_module, selection_range, file_id);
let (def_in_mod, def_out_sel) = check_def_in_mod_and_out_sel(
def,
ctx,
curr_parent_module,
selection_range,
file_id.file_id(),
);
// Find use stmt that use def in current file
let use_stmt: Option<ast::Use> = usage_res

View file

@ -72,7 +72,7 @@ pub(crate) fn extract_struct_from_enum_variant(
def_file_references = Some(references);
continue;
}
builder.edit_file(file_id);
builder.edit_file(file_id.file_id());
let processed = process_references(
ctx,
builder,

View file

@ -1,7 +1,7 @@
use hir::{
db::HirDatabase, HasSource, HasVisibility, HirFileIdExt, ModuleDef, PathResolution, ScopeDef,
};
use ide_db::base_db::FileId;
use ide_db::FileId;
use syntax::{
ast::{self, edit_in_place::HasVisibilityEdit, make, HasVisibility as _},
AstNode, TextRange, ToSmolStr,
@ -128,7 +128,7 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_>
);
acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |edit| {
edit.edit_file(target_file);
edit.edit_file(target_file.file_id());
let vis_owner = edit.make_mut(vis_owner);
vis_owner.set_visibility(Some(missing_visibility.clone_for_update()));
@ -155,7 +155,11 @@ fn target_data_for_def(
let in_file_syntax = source.syntax();
let file_id = in_file_syntax.file_id;
let range = in_file_syntax.value.text_range();
Some((ast::AnyHasVisibility::new(source.value), range, file_id.original_file(db.upcast())))
Some((
ast::AnyHasVisibility::new(source.value),
range,
file_id.original_file(db.upcast()).file_id(),
))
}
let target_name;
@ -197,7 +201,7 @@ fn target_data_for_def(
let in_file_source = m.declaration_source(db)?;
let file_id = in_file_source.file_id.original_file(db.upcast());
let range = in_file_source.value.syntax().text_range();
(ast::AnyHasVisibility::new(in_file_source.value), range, file_id)
(ast::AnyHasVisibility::new(in_file_source.value), range, file_id.file_id())
}
// FIXME
hir::ModuleDef::Macro(_) => return None,

View file

@ -2,8 +2,9 @@ use crate::assist_context::{AssistContext, Assists};
use hir::{HasVisibility, HirDisplay, HirFileIdExt, Module};
use ide_db::{
assists::{AssistId, AssistKind},
base_db::{FileId, Upcast},
base_db::Upcast,
defs::{Definition, NameRefClass},
FileId,
};
use syntax::{
ast::{self, edit::IndentLevel, NameRef},
@ -139,9 +140,9 @@ fn target_data_for_generate_constant(
.any(|it| it.kind() == SyntaxKind::WHITESPACE && it.to_string().contains('\n'));
let post_string =
if siblings_has_newline { format!("{indent}") } else { format!("\n{indent}") };
Some((offset, indent + 1, Some(file_id), post_string))
Some((offset, indent + 1, Some(file_id.file_id()), post_string))
}
_ => Some((TextSize::from(0), 0.into(), Some(file_id), "\n".into())),
_ => Some((TextSize::from(0), 0.into(), Some(file_id.file_id()), "\n".into())),
}
}

View file

@ -121,7 +121,7 @@ fn add_variant_to_accumulator(
"Generate variant",
target,
|builder| {
builder.edit_file(file_id);
builder.edit_file(file_id.file_id());
let node = builder.make_mut(enum_node);
let variant = make_variant(ctx, name_ref, parent);
if let Some(it) = node.variant_list() {

View file

@ -3,13 +3,12 @@ use hir::{
StructKind, Type, TypeInfo,
};
use ide_db::{
base_db::FileId,
defs::{Definition, NameRefClass},
famous_defs::FamousDefs,
helpers::is_editable_crate,
path_transform::PathTransform,
source_change::SourceChangeBuilder,
FxHashMap, FxHashSet, RootDatabase, SnippetCap,
FileId, FxHashMap, FxHashSet, RootDatabase, SnippetCap,
};
use itertools::Itertools;
use stdx::to_lower_snake_case;
@ -208,7 +207,8 @@ fn get_adt_source(
let file = ctx.sema.parse(range.file_id);
let adt_source =
ctx.sema.find_node_at_offset_with_macros(file.syntax(), range.range.start())?;
find_struct_impl(ctx, &adt_source, &[fn_name.to_owned()]).map(|impl_| (impl_, range.file_id))
find_struct_impl(ctx, &adt_source, &[fn_name.to_owned()])
.map(|impl_| (impl_, range.file_id.file_id()))
}
struct FunctionBuilder {
@ -482,7 +482,7 @@ fn get_fn_target(
target_module: Option<Module>,
call: CallExpr,
) -> Option<(GeneratedFunctionTarget, FileId)> {
let mut file = ctx.file_id();
let mut file = ctx.file_id().into();
let target = match target_module {
Some(target_module) => {
let (in_file, target) = next_space_for_fn_in_module(ctx.db(), target_module);
@ -1168,7 +1168,7 @@ fn next_space_for_fn_in_module(
}
};
(file, assist_item)
(file.file_id(), assist_item)
}
#[derive(Clone, Copy)]

View file

@ -2,16 +2,15 @@ use std::collections::BTreeSet;
use ast::make;
use either::Either;
use hir::{db::HirDatabase, sym, PathResolution, Semantics, TypeInfo};
use hir::{db::HirDatabase, sym, FileRange, PathResolution, Semantics, TypeInfo};
use ide_db::{
base_db::{FileId, FileRange},
defs::Definition,
imports::insert_use::remove_path_if_in_use_stmt,
path_transform::PathTransform,
search::{FileReference, FileReferenceNode, SearchScope},
source_change::SourceChangeBuilder,
syntax_helpers::{insert_whitespace_into_node::insert_ws_into, node_ext::expr_as_name_ref},
RootDatabase,
EditionedFileId, RootDatabase,
};
use itertools::{izip, Itertools};
use syntax::{
@ -304,7 +303,7 @@ fn get_fn_params(
fn inline(
sema: &Semantics<'_, RootDatabase>,
function_def_file_id: FileId,
function_def_file_id: EditionedFileId,
function: hir::Function,
fn_body: &ast::BlockExpr,
params: &[(ast::Pat, Option<ast::Type>, hir::Param)],

View file

@ -1,9 +1,8 @@
use hir::{PathResolution, Semantics};
use ide_db::{
base_db::FileId,
defs::Definition,
search::{FileReference, FileReferenceNode, UsageSearchResult},
RootDatabase,
EditionedFileId, RootDatabase,
};
use syntax::{
ast::{self, AstNode, AstToken, HasName},
@ -150,7 +149,7 @@ fn inline_let(
sema: &Semantics<'_, RootDatabase>,
let_stmt: ast::LetStmt,
range: TextRange,
file_id: FileId,
file_id: EditionedFileId,
) -> Option<InlineData> {
let bind_pat = match let_stmt.pat()? {
ast::Pat::IdentPat(pat) => pat,
@ -185,7 +184,7 @@ fn inline_usage(
sema: &Semantics<'_, RootDatabase>,
path_expr: ast::PathExpr,
range: TextRange,
file_id: FileId,
file_id: EditionedFileId,
) -> Option<InlineData> {
let path = path_expr.path()?;
let name = path.as_single_name_ref()?;

View file

@ -92,7 +92,7 @@ pub(crate) fn inline_type_alias_uses(acc: &mut Assists, ctx: &AssistContext<'_>)
};
for (file_id, refs) in usages.into_iter() {
inline_refs_for_file(file_id, refs);
inline_refs_for_file(file_id.file_id(), refs);
}
if !definition_deleted {
builder.edit_file(ctx.file_id());

View file

@ -1,5 +1,5 @@
use hir::{AsAssocItem, AssocItemContainer, HasCrate, HasSource};
use ide_db::{assists::AssistId, base_db::FileRange, defs::Definition, search::SearchScope};
use hir::{AsAssocItem, AssocItemContainer, FileRange, HasCrate, HasSource};
use ide_db::{assists::AssistId, defs::Definition, search::SearchScope};
use syntax::{
ast::{self, edit::IndentLevel, edit_in_place::Indent, AstNode},
SyntaxKind,

View file

@ -41,7 +41,7 @@ pub(crate) fn move_from_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
let target = source_file.syntax().text_range();
let module_name = module.name(ctx.db())?.display(ctx.db()).to_string();
let path = format!("../{module_name}.rs");
let dst = AnchoredPathBuf { anchor: ctx.file_id(), path };
let dst = AnchoredPathBuf { anchor: ctx.file_id().into(), path };
acc.add(
AssistId("move_from_mod_rs", AssistKind::Refactor),
format!("Convert {module_name}/mod.rs to {module_name}.rs"),

View file

@ -104,7 +104,7 @@ pub(crate) fn move_module_to_file(acc: &mut Assists, ctx: &AssistContext<'_>) ->
buf,
);
let dst = AnchoredPathBuf { anchor: ctx.file_id(), path };
let dst = AnchoredPathBuf { anchor: ctx.file_id().into(), path };
builder.create_file(dst, contents);
},
)

View file

@ -41,7 +41,7 @@ pub(crate) fn move_to_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
let target = source_file.syntax().text_range();
let module_name = module.name(ctx.db())?.display(ctx.db()).to_string();
let path = format!("./{module_name}/mod.rs");
let dst = AnchoredPathBuf { anchor: ctx.file_id(), path };
let dst = AnchoredPathBuf { anchor: ctx.file_id().into(), path };
acc.add(
AssistId("move_to_mod_rs", AssistKind::Refactor),
format!("Convert {module_name}.rs to {module_name}/mod.rs"),

View file

@ -1,8 +1,7 @@
use std::collections::hash_map::Entry;
use hir::{HirFileIdExt, InFile, InRealFile, Module, ModuleSource};
use hir::{FileRange, HirFileIdExt, InFile, InRealFile, Module, ModuleSource};
use ide_db::{
base_db::FileRange,
defs::Definition,
search::{FileReference, ReferenceCategory, SearchScope},
FxHashMap, RootDatabase,

View file

@ -1,4 +1,4 @@
use ide_db::{base_db::FileId, defs::Definition, search::FileReference};
use ide_db::{defs::Definition, search::FileReference, EditionedFileId};
use syntax::{
algo::find_node_at_range,
ast::{self, HasArgList},
@ -90,7 +90,7 @@ pub(crate) fn remove_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) ->
fn process_usages(
ctx: &AssistContext<'_>,
builder: &mut SourceChangeBuilder,
file_id: FileId,
file_id: EditionedFileId,
references: Vec<FileReference>,
arg_to_remove: usize,
is_self_present: bool,

View file

@ -1,9 +1,8 @@
use hir::Semantics;
use hir::{FileRange, Semantics};
use ide_db::{
base_db::{FileId, FileRange},
defs::Definition,
search::{SearchScope, UsageSearchResult},
RootDatabase,
EditionedFileId, RootDatabase,
};
use syntax::{
ast::{
@ -157,7 +156,7 @@ fn find_usages(
sema: &Semantics<'_, RootDatabase>,
fn_: &ast::Fn,
type_param_def: Definition,
file_id: FileId,
file_id: EditionedFileId,
) -> UsageSearchResult {
let file_range = FileRange { file_id, range: fn_.syntax().text_range() };
type_param_def.usages(sema).in_scope(&SearchScope::file_range(file_range)).all()

View file

@ -1,9 +1,9 @@
use ide_db::{
assists::{AssistId, AssistKind},
base_db::FileId,
defs::Definition,
search::{FileReference, FileReferenceNode},
syntax_helpers::node_ext::full_path_of_name_ref,
EditionedFileId,
};
use syntax::{
ast::{self, NameRef},
@ -95,7 +95,7 @@ pub(crate) fn unnecessary_async(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
fn find_all_references(
ctx: &AssistContext<'_>,
def: &Definition,
) -> impl Iterator<Item = (FileId, FileReference)> {
) -> impl Iterator<Item = (EditionedFileId, FileReference)> {
def.usages(&ctx.sema).all().into_iter().flat_map(|(file_id, references)| {
references.into_iter().map(move |reference| (file_id, reference))
})

View file

@ -65,7 +65,7 @@ mod tests;
pub mod utils;
use hir::Semantics;
use ide_db::{base_db::FileRange, RootDatabase};
use ide_db::{EditionedFileId, RootDatabase};
use syntax::TextRange;
pub(crate) use crate::assist_context::{AssistContext, Assists};
@ -83,10 +83,13 @@ pub fn assists(
db: &RootDatabase,
config: &AssistConfig,
resolve: AssistResolveStrategy,
range: FileRange,
range: ide_db::FileRange,
) -> Vec<Assist> {
let sema = Semantics::new(db);
let ctx = AssistContext::new(sema, config, range);
let file_id = sema
.attach_first_edition(range.file_id)
.unwrap_or_else(|| EditionedFileId::current_edition(range.file_id));
let ctx = AssistContext::new(sema, config, hir::FileRange { file_id, range: range.range });
let mut acc = Assists::new(&ctx, resolve);
handlers::all().iter().for_each(|handler| {
handler(&mut acc, &ctx);

View file

@ -1,12 +1,12 @@
mod generated;
use expect_test::expect;
use hir::Semantics;
use hir::{FileRange, Semantics};
use ide_db::{
base_db::{FileId, FileRange, SourceDatabaseExt},
base_db::SourceDatabaseExt,
imports::insert_use::{ImportGranularity, InsertUseConfig},
source_change::FileSystemEdit,
RootDatabase, SnippetCap,
EditionedFileId, RootDatabase, SnippetCap,
};
use stdx::{format_to, trim_indent};
use syntax::TextRange;
@ -72,7 +72,7 @@ pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig {
term_search_borrowck: true,
};
pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) {
pub(crate) fn with_single_file(text: &str) -> (RootDatabase, EditionedFileId) {
RootDatabase::with_single_file(text)
}
@ -165,17 +165,17 @@ pub(crate) fn check_assist_unresolved(assist: Handler, ra_fixture: &str) {
fn check_doc_test(assist_id: &str, before: &str, after: &str) {
let after = trim_indent(after);
let (db, file_id, selection) = RootDatabase::with_range_or_offset(before);
let before = db.file_text(file_id).to_string();
let before = db.file_text(file_id.file_id()).to_string();
let frange = FileRange { file_id, range: selection.into() };
let assist = assists(&db, &TEST_CONFIG, AssistResolveStrategy::All, frange)
let assist = assists(&db, &TEST_CONFIG, AssistResolveStrategy::All, frange.into())
.into_iter()
.find(|assist| assist.id.0 == assist_id)
.unwrap_or_else(|| {
panic!(
"\n\nAssist is not applicable: {}\nAvailable assists: {}",
assist_id,
assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange)
assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange.into())
.into_iter()
.map(|assist| assist.id.0)
.collect::<Vec<_>>()
@ -190,7 +190,7 @@ fn check_doc_test(assist_id: &str, before: &str, after: &str) {
.expect("Assist did not contain any source changes");
let mut actual = before;
if let Some((source_file_edit, snippet_edit)) =
source_change.get_source_and_snippet_edit(file_id)
source_change.get_source_and_snippet_edit(file_id.file_id())
{
source_file_edit.apply(&mut actual);
if let Some(snippet_edit) = snippet_edit {
@ -224,7 +224,7 @@ fn check_with_config(
) {
let (mut db, file_with_caret_id, range_or_offset) = RootDatabase::with_range_or_offset(before);
db.enable_proc_attr_macros();
let text_without_caret = db.file_text(file_with_caret_id).to_string();
let text_without_caret = db.file_text(file_with_caret_id.into()).to_string();
let frange = FileRange { file_id: file_with_caret_id, range: range_or_offset.into() };
@ -331,7 +331,7 @@ fn assist_order_field_struct() {
let (before_cursor_pos, before) = extract_offset(before);
let (db, file_id) = with_single_file(&before);
let frange = FileRange { file_id, range: TextRange::empty(before_cursor_pos) };
let assists = assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange);
let assists = assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange.into());
let mut assists = assists.iter();
assert_eq!(assists.next().expect("expected assist").label, "Change visibility to pub(crate)");
@ -357,7 +357,7 @@ pub fn test_some_range(a: int) -> bool {
"#,
);
let assists = assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange);
let assists = assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange.into());
let expected = labels(&assists);
expect![[r#"
@ -386,7 +386,7 @@ pub fn test_some_range(a: int) -> bool {
let mut cfg = TEST_CONFIG;
cfg.allowed = Some(vec![AssistKind::Refactor]);
let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange);
let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange.into());
let expected = labels(&assists);
expect![[r#"
@ -401,7 +401,7 @@ pub fn test_some_range(a: int) -> bool {
{
let mut cfg = TEST_CONFIG;
cfg.allowed = Some(vec![AssistKind::RefactorExtract]);
let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange);
let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange.into());
let expected = labels(&assists);
expect![[r#"
@ -414,7 +414,7 @@ pub fn test_some_range(a: int) -> bool {
{
let mut cfg = TEST_CONFIG;
cfg.allowed = Some(vec![AssistKind::QuickFix]);
let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange);
let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange.into());
let expected = labels(&assists);
expect![[r#""#]].assert_eq(&expected);
@ -439,7 +439,7 @@ pub fn test_some_range(a: int) -> bool {
cfg.allowed = Some(vec![AssistKind::RefactorExtract]);
{
let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange);
let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange.into());
assert_eq!(2, assists.len());
let mut assists = assists.into_iter();
@ -484,7 +484,7 @@ pub fn test_some_range(a: int) -> bool {
assist_id: "SOMETHING_MISMATCHING".to_owned(),
assist_kind: AssistKind::RefactorExtract,
}),
frange,
frange.into(),
);
assert_eq!(2, assists.len());
let mut assists = assists.into_iter();
@ -530,7 +530,7 @@ pub fn test_some_range(a: int) -> bool {
assist_id: "extract_variable".to_owned(),
assist_kind: AssistKind::RefactorExtract,
}),
frange,
frange.into(),
);
assert_eq!(2, assists.len());
let mut assists = assists.into_iter();
@ -612,7 +612,7 @@ pub fn test_some_range(a: int) -> bool {
}
{
let assists = assists(&db, &cfg, AssistResolveStrategy::All, frange);
let assists = assists(&db, &cfg, AssistResolveStrategy::All, frange.into());
assert_eq!(2, assists.len());
let mut assists = assists.into_iter();

View file

@ -184,7 +184,7 @@ fn normalize(name: &str) -> Option<String> {
fn is_valid_name(name: &str) -> bool {
matches!(
ide_db::syntax_helpers::LexedStr::single_token(syntax::Edition::CURRENT, name),
ide_db::syntax_helpers::LexedStr::single_token(syntax::Edition::CURRENT_FIXME, name),
Some((syntax::SyntaxKind::IDENT, _error))
)
}
@ -319,7 +319,7 @@ fn from_field_name(expr: &ast::Expr) -> Option<String> {
#[cfg(test)]
mod tests {
use ide_db::base_db::FileRange;
use hir::FileRange;
use test_fixture::WithFixture;
use super::*;

View file

@ -7,6 +7,7 @@ use ide_db::{
base_db::{SourceDatabaseExt, VfsPath},
FxHashSet, RootDatabase, SymbolKind,
};
use stdx::IsNoneOr;
use syntax::{ast, AstNode, SyntaxKind, ToSmolStr};
use crate::{context::CompletionContext, CompletionItem, Completions};
@ -43,11 +44,11 @@ pub(crate) fn complete_mod(
let module_definition_file =
current_module.definition_source_file_id(ctx.db).original_file(ctx.db);
let source_root = ctx.db.source_root(ctx.db.file_source_root(module_definition_file));
let source_root = ctx.db.source_root(ctx.db.file_source_root(module_definition_file.file_id()));
let directory_to_look_for_submodules = directory_to_look_for_submodules(
current_module,
ctx.db,
source_root.path_for_file(&module_definition_file)?,
source_root.path_for_file(&module_definition_file.file_id())?,
)?;
let existing_mod_declarations = current_module
@ -63,9 +64,9 @@ pub(crate) fn complete_mod(
source_root
.iter()
.filter(|submodule_candidate_file| submodule_candidate_file != &module_definition_file)
.filter(|submodule_candidate_file| {
Some(submodule_candidate_file) != module_declaration_file.as_ref()
.filter(|&submodule_candidate_file| submodule_candidate_file != module_definition_file)
.filter(|&submodule_candidate_file| {
IsNoneOr::is_none_or(module_declaration_file, |it| it != submodule_candidate_file)
})
.filter_map(|submodule_file| {
let submodule_path = source_root.path_for_file(&submodule_file)?;

View file

@ -10,14 +10,12 @@ use hir::{
HasAttrs, Local, Name, PathResolution, ScopeDef, Semantics, SemanticsScope, Type, TypeInfo,
};
use ide_db::{
base_db::{FilePosition, SourceDatabase},
famous_defs::FamousDefs,
helpers::is_editable_crate,
base_db::SourceDatabase, famous_defs::FamousDefs, helpers::is_editable_crate, FilePosition,
FxHashMap, FxHashSet, RootDatabase,
};
use syntax::{
ast::{self, AttrKind, NameOrNameRef},
AstNode, Edition, SmolStr,
AstNode, SmolStr,
SyntaxKind::{self, *},
SyntaxToken, TextRange, TextSize, T,
};
@ -660,6 +658,7 @@ impl<'a> CompletionContext<'a> {
let _p = tracing::info_span!("CompletionContext::new").entered();
let sema = Semantics::new(db);
let file_id = sema.attach_first_edition(file_id)?;
let original_file = sema.parse(file_id);
// Insert a fake ident to get a valid parse tree. We will use this file
@ -668,8 +667,7 @@ impl<'a> CompletionContext<'a> {
let file_with_fake_ident = {
let parse = db.parse(file_id);
let edit = Indel::insert(offset, COMPLETION_MARKER.to_owned());
// FIXME: Edition
parse.reparse(&edit, Edition::CURRENT).tree()
parse.reparse(&edit, file_id.edition()).tree()
};
// always pick the token to the immediate left of the cursor, as that is what we are actually

View file

@ -12,13 +12,12 @@ mod tests;
use hir::ImportPathConfig;
use ide_db::{
base_db::FilePosition,
helpers::mod_path_to_ast,
imports::{
import_assets::NameToImport,
insert_use::{self, ImportScope},
},
items_locator, RootDatabase,
items_locator, FilePosition, RootDatabase,
};
use syntax::algo;
use text_edit::TextEdit;
@ -239,7 +238,7 @@ pub fn resolve_completion_edits(
let _p = tracing::info_span!("resolve_completion_edits").entered();
let sema = hir::Semantics::new(db);
let original_file = sema.parse(file_id);
let original_file = sema.parse(sema.attach_first_edition(file_id)?);
let original_token =
syntax::AstNode::syntax(&original_file).token_at_offset(offset).left_biased()?;
let position_for_import = &original_token.parent()?;

View file

@ -206,10 +206,11 @@ fn validate_snippet(
) -> Option<(Box<[GreenNode]>, String, Option<Box<str>>)> {
let mut imports = Vec::with_capacity(requires.len());
for path in requires.iter() {
let use_path = ast::SourceFile::parse(&format!("use {path};"), syntax::Edition::CURRENT)
.syntax_node()
.descendants()
.find_map(ast::Path::cast)?;
let use_path =
ast::SourceFile::parse(&format!("use {path};"), syntax::Edition::CURRENT_FIXME)
.syntax_node()
.descendants()
.find_map(ast::Path::cast)?;
if use_path.syntax().text() != path.as_str() {
return None;
}

View file

@ -26,9 +26,9 @@ mod visibility;
use expect_test::Expect;
use hir::PrefixKind;
use ide_db::{
base_db::{FileLoader, FilePosition},
base_db::FileLoader,
imports::insert_use::{ImportGranularity, InsertUseConfig},
RootDatabase, SnippetCap,
FilePosition, RootDatabase, SnippetCap,
};
use itertools::Itertools;
use stdx::{format_to, trim_indent};
@ -131,7 +131,7 @@ pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) {
database.apply_change(change_fixture.change);
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
let offset = range_or_offset.expect_offset();
(database, FilePosition { file_id, offset })
(database, FilePosition { file_id: file_id.file_id(), offset })
}
pub(crate) fn do_completion(code: &str, kind: CompletionItemKind) -> Vec<CompletionItem> {

View file

@ -2,8 +2,9 @@
use std::collections::VecDeque;
use base_db::{FileId, SourceDatabaseExt};
use base_db::SourceDatabaseExt;
use hir::{Crate, DescendPreference, ItemInNs, ModuleDef, Name, Semantics};
use span::FileId;
use syntax::{
ast::{self, make},
AstToken, SyntaxKind, SyntaxToken, ToSmolStr, TokenAtOffset,

View file

@ -176,7 +176,7 @@ pub fn insert_use(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) {
pub fn insert_use_as_alias(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) {
let text: &str = "use foo as _";
let parse = syntax::SourceFile::parse(text, span::Edition::CURRENT);
let parse = syntax::SourceFile::parse(text, span::Edition::CURRENT_FIXME);
let node = parse
.tree()
.syntax()

View file

@ -48,10 +48,13 @@ use std::{fmt, mem::ManuallyDrop};
use base_db::{
salsa::{self, Durability},
AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
AnchoredPath, CrateId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
DEFAULT_FILE_TEXT_LRU_CAP,
};
use hir::db::{DefDatabase, ExpandDatabase, HirDatabase};
use hir::{
db::{DefDatabase, ExpandDatabase, HirDatabase},
FilePositionWrapper, FileRangeWrapper,
};
use triomphe::Arc;
use crate::{line_index::LineIndex, symbol_index::SymbolsDatabase};
@ -61,11 +64,15 @@ pub use ::line_index;
/// `base_db` is normally also needed in places where `ide_db` is used, so this re-export is for convenience.
pub use base_db;
pub use span::{EditionedFileId, FileId};
pub type FxIndexSet<T> = indexmap::IndexSet<T, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
pub type FxIndexMap<K, V> =
indexmap::IndexMap<K, V, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
pub type FilePosition = FilePositionWrapper<FileId>;
pub type FileRange = FileRangeWrapper<FileId>;
#[salsa::database(
base_db::SourceDatabaseExtStorage,
base_db::SourceDatabaseStorage,

View file

@ -22,10 +22,10 @@
//! Our current behavior is ¯\_(ツ)_/¯.
use std::fmt;
use base_db::{AnchoredPathBuf, FileId, FileRange};
use base_db::AnchoredPathBuf;
use either::Either;
use hir::{FieldSource, HirFileIdExt, InFile, ModuleSource, Semantics};
use span::{Edition, SyntaxContextId};
use hir::{FieldSource, FileRange, HirFileIdExt, InFile, ModuleSource, Semantics};
use span::{Edition, EditionedFileId, FileId, SyntaxContextId};
use stdx::{never, TupleExt};
use syntax::{
ast::{self, HasName},
@ -241,7 +241,7 @@ fn rename_mod(
let InFile { file_id, value: def_source } = module.definition_source(sema.db);
if let ModuleSource::SourceFile(..) = def_source {
let new_name = new_name.trim_start_matches("r#");
let anchor = file_id.original_file(sema.db);
let anchor = file_id.original_file(sema.db).file_id();
let is_mod_rs = module.is_mod_rs(sema.db);
let has_detached_child = module.children(sema.db).any(|child| !child.is_inline(sema.db));
@ -290,7 +290,7 @@ fn rename_mod(
.map(TupleExt::head)
{
source_change.insert_source_edit(
file_id,
file_id.file_id(),
TextEdit::replace(file_range.range, new_name.to_owned()),
)
};
@ -301,8 +301,8 @@ fn rename_mod(
let def = Definition::Module(module);
let usages = def.usages(sema).all();
let ref_edits = usages.iter().map(|(&file_id, references)| {
(file_id, source_edit_from_references(references, def, new_name))
let ref_edits = usages.iter().map(|(file_id, references)| {
(EditionedFileId::file_id(file_id), source_edit_from_references(references, def, new_name))
});
source_change.extend(ref_edits);
@ -350,8 +350,8 @@ fn rename_reference(
bail!("Cannot rename reference to `_` as it is being referenced multiple times");
}
let mut source_change = SourceChange::default();
source_change.extend(usages.iter().map(|(&file_id, references)| {
(file_id, source_edit_from_references(references, def, new_name))
source_change.extend(usages.iter().map(|(file_id, references)| {
(EditionedFileId::file_id(file_id), source_edit_from_references(references, def, new_name))
}));
let mut insert_def_edit = |def| {
@ -584,7 +584,7 @@ fn source_edit_from_def(
}
}
let Some(file_id) = file_id else { bail!("No file available to rename") };
return Ok((file_id, edit.finish()));
return Ok((EditionedFileId::file_id(file_id), edit.finish()));
}
let FileRange { file_id, range } = def
.range_for_rename(sema)
@ -600,7 +600,7 @@ fn source_edit_from_def(
_ => (range, new_name.to_owned()),
};
edit.replace(range, new_name);
Ok((file_id, edit.finish()))
Ok((file_id.file_id(), edit.finish()))
}
#[derive(Copy, Clone, Debug, PartialEq)]

View file

@ -6,15 +6,16 @@
use std::mem;
use base_db::{salsa::Database, FileId, FileRange, SourceDatabase, SourceDatabaseExt};
use base_db::{salsa::Database, SourceDatabase, SourceDatabaseExt};
use hir::{
sym, AsAssocItem, DefWithBody, DescendPreference, HasAttrs, HasSource, HirFileIdExt, InFile,
InRealFile, ModuleSource, PathResolution, Semantics, Visibility,
sym, AsAssocItem, DefWithBody, DescendPreference, FileRange, HasAttrs, HasSource, HirFileIdExt,
InFile, InRealFile, ModuleSource, PathResolution, Semantics, Visibility,
};
use memchr::memmem::Finder;
use nohash_hasher::IntMap;
use once_cell::unsync::Lazy;
use parser::SyntaxKind;
use rustc_hash::FxHashMap;
use span::EditionedFileId;
use syntax::{ast, match_ast, AstNode, AstToken, SyntaxElement, TextRange, TextSize, ToSmolStr};
use triomphe::Arc;
@ -26,7 +27,7 @@ use crate::{
#[derive(Debug, Default, Clone)]
pub struct UsageSearchResult {
pub references: IntMap<FileId, Vec<FileReference>>,
pub references: FxHashMap<EditionedFileId, Vec<FileReference>>,
}
impl UsageSearchResult {
@ -38,8 +39,8 @@ impl UsageSearchResult {
self.references.len()
}
pub fn iter(&self) -> impl Iterator<Item = (&FileId, &[FileReference])> + '_ {
self.references.iter().map(|(file_id, refs)| (file_id, &**refs))
pub fn iter(&self) -> impl Iterator<Item = (EditionedFileId, &[FileReference])> + '_ {
self.references.iter().map(|(&file_id, refs)| (file_id, &**refs))
}
pub fn file_ranges(&self) -> impl Iterator<Item = FileRange> + '_ {
@ -50,8 +51,8 @@ impl UsageSearchResult {
}
impl IntoIterator for UsageSearchResult {
type Item = (FileId, Vec<FileReference>);
type IntoIter = <IntMap<FileId, Vec<FileReference>> as IntoIterator>::IntoIter;
type Item = (EditionedFileId, Vec<FileReference>);
type IntoIter = <FxHashMap<EditionedFileId, Vec<FileReference>> as IntoIterator>::IntoIter;
fn into_iter(self) -> Self::IntoIter {
self.references.into_iter()
@ -142,36 +143,40 @@ bitflags::bitflags! {
/// e.g. for things like local variables.
#[derive(Clone, Debug)]
pub struct SearchScope {
entries: IntMap<FileId, Option<TextRange>>,
entries: FxHashMap<EditionedFileId, Option<TextRange>>,
}
impl SearchScope {
fn new(entries: IntMap<FileId, Option<TextRange>>) -> SearchScope {
fn new(entries: FxHashMap<EditionedFileId, Option<TextRange>>) -> SearchScope {
SearchScope { entries }
}
/// Build a search scope spanning the entire crate graph of files.
fn crate_graph(db: &RootDatabase) -> SearchScope {
let mut entries = IntMap::default();
let mut entries = FxHashMap::default();
let graph = db.crate_graph();
for krate in graph.iter() {
let root_file = graph[krate].root_file_id;
let source_root_id = db.file_source_root(root_file);
let source_root = db.source_root(source_root_id);
entries.extend(source_root.iter().map(|id| (id, None)));
entries.extend(
source_root.iter().map(|id| (EditionedFileId::new(id, graph[krate].edition), None)),
);
}
SearchScope { entries }
}
/// Build a search scope spanning all the reverse dependencies of the given crate.
fn reverse_dependencies(db: &RootDatabase, of: hir::Crate) -> SearchScope {
let mut entries = IntMap::default();
let mut entries = FxHashMap::default();
for rev_dep in of.transitive_reverse_dependencies(db) {
let root_file = rev_dep.root_file(db);
let source_root_id = db.file_source_root(root_file);
let source_root = db.source_root(source_root_id);
entries.extend(source_root.iter().map(|id| (id, None)));
entries.extend(
source_root.iter().map(|id| (EditionedFileId::new(id, rev_dep.edition(db)), None)),
);
}
SearchScope { entries }
}
@ -181,12 +186,17 @@ impl SearchScope {
let root_file = of.root_file(db);
let source_root_id = db.file_source_root(root_file);
let source_root = db.source_root(source_root_id);
SearchScope { entries: source_root.iter().map(|id| (id, None)).collect() }
SearchScope {
entries: source_root
.iter()
.map(|id| (EditionedFileId::new(id, of.edition(db)), None))
.collect(),
}
}
/// Build a search scope spanning the given module and all its submodules.
pub fn module_and_children(db: &RootDatabase, module: hir::Module) -> SearchScope {
let mut entries = IntMap::default();
let mut entries = FxHashMap::default();
let (file_id, range) = {
let InFile { file_id, value } = module.definition_source_range(db);
@ -211,11 +221,11 @@ impl SearchScope {
/// Build an empty search scope.
pub fn empty() -> SearchScope {
SearchScope::new(IntMap::default())
SearchScope::new(FxHashMap::default())
}
/// Build a empty search scope spanning the given file.
pub fn single_file(file: FileId) -> SearchScope {
pub fn single_file(file: EditionedFileId) -> SearchScope {
SearchScope::new(std::iter::once((file, None)).collect())
}
@ -225,7 +235,7 @@ impl SearchScope {
}
/// Build a empty search scope spanning the given files.
pub fn files(files: &[FileId]) -> SearchScope {
pub fn files(files: &[EditionedFileId]) -> SearchScope {
SearchScope::new(files.iter().map(|f| (*f, None)).collect())
}
@ -256,8 +266,8 @@ impl SearchScope {
}
impl IntoIterator for SearchScope {
type Item = (FileId, Option<TextRange>);
type IntoIter = std::collections::hash_map::IntoIter<FileId, Option<TextRange>>;
type Item = (EditionedFileId, Option<TextRange>);
type IntoIter = std::collections::hash_map::IntoIter<EditionedFileId, Option<TextRange>>;
fn into_iter(self) -> Self::IntoIter {
self.entries.into_iter()
@ -432,7 +442,7 @@ impl<'a> FindUsages<'a> {
res
}
pub fn search(&self, sink: &mut dyn FnMut(FileId, FileReference) -> bool) {
pub fn search(&self, sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool) {
let _p = tracing::info_span!("FindUsages:search").entered();
let sema = self.sema;
@ -497,13 +507,13 @@ impl<'a> FindUsages<'a> {
})
}
// for<'a> |scope: &'a SearchScope| -> impl Iterator<Item = (Arc<String>, FileId, TextRange)> + 'a { ... }
// for<'a> |scope: &'a SearchScope| -> impl Iterator<Item = (Arc<String>, EditionedFileId, TextRange)> + 'a { ... }
fn scope_files<'a>(
sema: &'a Semantics<'_, RootDatabase>,
scope: &'a SearchScope,
) -> impl Iterator<Item = (Arc<str>, FileId, TextRange)> + 'a {
) -> impl Iterator<Item = (Arc<str>, EditionedFileId, TextRange)> + 'a {
scope.entries.iter().map(|(&file_id, &search_range)| {
let text = sema.db.file_text(file_id);
let text = sema.db.file_text(file_id.file_id());
let search_range =
search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text)));
@ -627,7 +637,7 @@ impl<'a> FindUsages<'a> {
return;
};
let text = sema.db.file_text(file_id);
let text = sema.db.file_text(file_id.file_id());
let search_range =
search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text)));
@ -651,7 +661,7 @@ impl<'a> FindUsages<'a> {
&self,
self_ty: &hir::Type,
name_ref: &ast::NameRef,
sink: &mut dyn FnMut(FileId, FileReference) -> bool,
sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool,
) -> bool {
match NameRefClass::classify(self.sema, name_ref) {
Some(NameRefClass::Definition(Definition::SelfType(impl_)))
@ -672,7 +682,7 @@ impl<'a> FindUsages<'a> {
fn found_self_module_name_ref(
&self,
name_ref: &ast::NameRef,
sink: &mut dyn FnMut(FileId, FileReference) -> bool,
sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool,
) -> bool {
match NameRefClass::classify(self.sema, name_ref) {
Some(NameRefClass::Definition(def @ Definition::Module(_))) if def == self.def => {
@ -695,11 +705,11 @@ impl<'a> FindUsages<'a> {
fn found_format_args_ref(
&self,
file_id: FileId,
file_id: EditionedFileId,
range: TextRange,
token: ast::String,
res: Option<PathResolution>,
sink: &mut dyn FnMut(FileId, FileReference) -> bool,
sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool,
) -> bool {
match res.map(Definition::from) {
Some(def) if def == self.def => {
@ -717,7 +727,7 @@ impl<'a> FindUsages<'a> {
fn found_lifetime(
&self,
lifetime: &ast::Lifetime,
sink: &mut dyn FnMut(FileId, FileReference) -> bool,
sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool,
) -> bool {
match NameRefClass::classify_lifetime(self.sema, lifetime) {
Some(NameRefClass::Definition(def)) if def == self.def => {
@ -736,7 +746,7 @@ impl<'a> FindUsages<'a> {
fn found_name_ref(
&self,
name_ref: &ast::NameRef,
sink: &mut dyn FnMut(FileId, FileReference) -> bool,
sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool,
) -> bool {
match NameRefClass::classify(self.sema, name_ref) {
Some(NameRefClass::Definition(def))
@ -810,7 +820,7 @@ impl<'a> FindUsages<'a> {
fn found_name(
&self,
name: &ast::Name,
sink: &mut dyn FnMut(FileId, FileReference) -> bool,
sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool,
) -> bool {
match NameClass::classify(self.sema, name) {
Some(NameClass::PatFieldShorthand { local_def: _, field_ref })

View file

@ -6,9 +6,10 @@
use std::{collections::hash_map::Entry, iter, mem};
use crate::{assists::Command, SnippetCap};
use base_db::{AnchoredPathBuf, FileId};
use base_db::AnchoredPathBuf;
use itertools::Itertools;
use nohash_hasher::IntMap;
use span::FileId;
use stdx::never;
use syntax::{
algo, AstNode, SyntaxElement, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize,
@ -32,28 +33,28 @@ impl SourceChange {
SourceChange { source_file_edits, file_system_edits, is_snippet: false }
}
pub fn from_text_edit(file_id: FileId, edit: TextEdit) -> Self {
pub fn from_text_edit(file_id: impl Into<FileId>, edit: TextEdit) -> Self {
SourceChange {
source_file_edits: iter::once((file_id, (edit, None))).collect(),
source_file_edits: iter::once((file_id.into(), (edit, None))).collect(),
..Default::default()
}
}
/// Inserts a [`TextEdit`] for the given [`FileId`]. This properly handles merging existing
/// edits for a file if some already exist.
pub fn insert_source_edit(&mut self, file_id: FileId, edit: TextEdit) {
self.insert_source_and_snippet_edit(file_id, edit, None)
pub fn insert_source_edit(&mut self, file_id: impl Into<FileId>, edit: TextEdit) {
self.insert_source_and_snippet_edit(file_id.into(), edit, None)
}
/// Inserts a [`TextEdit`] and potentially a [`SnippetEdit`] for the given [`FileId`].
/// This properly handles merging existing edits for a file if some already exist.
pub fn insert_source_and_snippet_edit(
&mut self,
file_id: FileId,
file_id: impl Into<FileId>,
edit: TextEdit,
snippet_edit: Option<SnippetEdit>,
) {
match self.source_file_edits.entry(file_id) {
match self.source_file_edits.entry(file_id.into()) {
Entry::Occupied(mut entry) => {
let value = entry.get_mut();
never!(value.0.union(edit).is_err(), "overlapping edits for same file");
@ -231,10 +232,10 @@ impl TreeMutator {
}
impl SourceChangeBuilder {
pub fn new(file_id: FileId) -> SourceChangeBuilder {
pub fn new(file_id: impl Into<FileId>) -> SourceChangeBuilder {
SourceChangeBuilder {
edit: TextEdit::builder(),
file_id,
file_id: file_id.into(),
source_change: SourceChange::default(),
command: None,
mutated_tree: None,
@ -242,9 +243,9 @@ impl SourceChangeBuilder {
}
}
pub fn edit_file(&mut self, file_id: FileId) {
pub fn edit_file(&mut self, file_id: impl Into<FileId>) {
self.commit();
self.file_id = file_id;
self.file_id = file_id.into();
}
fn commit(&mut self) {
@ -300,8 +301,8 @@ impl SourceChangeBuilder {
let file_system_edit = FileSystemEdit::CreateFile { dst, initial_contents: content.into() };
self.source_change.push_file_system_edit(file_system_edit);
}
pub fn move_file(&mut self, src: FileId, dst: AnchoredPathBuf) {
let file_system_edit = FileSystemEdit::MoveFile { src, dst };
pub fn move_file(&mut self, src: impl Into<FileId>, dst: AnchoredPathBuf) {
let file_system_edit = FileSystemEdit::MoveFile { src: src.into(), dst };
self.source_change.push_file_system_edit(file_system_edit);
}

View file

@ -113,8 +113,8 @@ fn assoc_item_of_trait(
#[cfg(test)]
mod tests {
use base_db::FilePosition;
use expect_test::{expect, Expect};
use hir::FilePosition;
use hir::Semantics;
use syntax::ast::{self, AstNode};
use test_fixture::ChangeFixture;

View file

@ -1,16 +1,17 @@
//! Suggests shortening `Foo { field: field }` to `Foo { field }` in both
//! expressions and patterns.
use ide_db::{
base_db::{FileId, FileRange},
source_change::SourceChange,
};
use ide_db::{source_change::SourceChange, EditionedFileId, FileRange};
use syntax::{ast, match_ast, AstNode, SyntaxNode};
use text_edit::TextEdit;
use crate::{fix, Diagnostic, DiagnosticCode};
pub(crate) fn field_shorthand(acc: &mut Vec<Diagnostic>, file_id: FileId, node: &SyntaxNode) {
pub(crate) fn field_shorthand(
acc: &mut Vec<Diagnostic>,
file_id: EditionedFileId,
node: &SyntaxNode,
) {
match_ast! {
match node {
ast::RecordExpr(it) => check_expr_field_shorthand(acc, file_id, it),
@ -22,7 +23,7 @@ pub(crate) fn field_shorthand(acc: &mut Vec<Diagnostic>, file_id: FileId, node:
fn check_expr_field_shorthand(
acc: &mut Vec<Diagnostic>,
file_id: FileId,
file_id: EditionedFileId,
record_expr: ast::RecordExpr,
) {
let record_field_list = match record_expr.record_expr_field_list() {
@ -52,7 +53,7 @@ fn check_expr_field_shorthand(
Diagnostic::new(
DiagnosticCode::Clippy("redundant_field_names"),
"Shorthand struct initialization",
FileRange { file_id, range: field_range },
FileRange { file_id: file_id.into(), range: field_range },
)
.with_fixes(Some(vec![fix(
"use_expr_field_shorthand",
@ -66,7 +67,7 @@ fn check_expr_field_shorthand(
fn check_pat_field_shorthand(
acc: &mut Vec<Diagnostic>,
file_id: FileId,
file_id: EditionedFileId,
record_pat: ast::RecordPat,
) {
let record_pat_field_list = match record_pat.record_pat_field_list() {
@ -96,7 +97,7 @@ fn check_pat_field_shorthand(
Diagnostic::new(
DiagnosticCode::Clippy("redundant_field_names"),
"Shorthand struct pattern",
FileRange { file_id, range: field_range },
FileRange { file_id: file_id.into(), range: field_range },
)
.with_fixes(Some(vec![fix(
"use_pat_field_shorthand",

View file

@ -3,11 +3,10 @@
use hir::{ImportPathConfig, PathResolution, Semantics};
use ide_db::{
base_db::{FileId, FileRange},
helpers::mod_path_to_ast,
imports::insert_use::{insert_use, ImportScope},
source_change::SourceChangeBuilder,
FxHashMap, RootDatabase,
EditionedFileId, FileRange, FxHashMap, RootDatabase,
};
use itertools::Itertools;
use stdx::{format_to, never};
@ -102,7 +101,7 @@ impl State {
pub(crate) fn json_in_items(
sema: &Semantics<'_, RootDatabase>,
acc: &mut Vec<Diagnostic>,
file_id: FileId,
file_id: EditionedFileId,
node: &SyntaxNode,
config: &DiagnosticsConfig,
) {
@ -132,7 +131,7 @@ pub(crate) fn json_in_items(
Diagnostic::new(
DiagnosticCode::Ra("json-is-not-rust", Severity::WeakWarning),
"JSON syntax is not valid as a Rust item",
FileRange { file_id, range },
FileRange { file_id: file_id.into(), range },
)
.with_fixes(Some(vec![{
let mut scb = SourceChangeBuilder::new(file_id);

View file

@ -1,6 +1,6 @@
use either::Either;
use hir::InFile;
use ide_db::base_db::FileRange;
use ide_db::FileRange;
use syntax::{
ast::{self, HasArgList},
AstNode, AstPtr,

View file

@ -1,6 +1,6 @@
use either::Either;
use hir::{db::ExpandDatabase, HasSource, HirDisplay, HirFileIdExt, Semantics, VariantId};
use ide_db::{base_db::FileId, source_change::SourceChange, RootDatabase};
use ide_db::{source_change::SourceChange, EditionedFileId, RootDatabase};
use syntax::{
ast::{self, edit::IndentLevel, make},
AstNode,
@ -51,7 +51,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Option<Vec<Assis
fn missing_record_expr_field_fixes(
sema: &Semantics<'_, RootDatabase>,
usage_file_id: FileId,
usage_file_id: EditionedFileId,
record_expr_field: &ast::RecordExprField,
) -> Option<Vec<Assist>> {
let record_lit = ast::RecordExpr::cast(record_expr_field.syntax().parent()?.parent()?)?;

View file

@ -1,5 +1,5 @@
use hir::{db::ExpandDatabase, diagnostics::RemoveTrailingReturn};
use ide_db::{assists::Assist, base_db::FileRange, source_change::SourceChange};
use hir::{db::ExpandDatabase, diagnostics::RemoveTrailingReturn, FileRange};
use ide_db::{assists::Assist, source_change::SourceChange};
use syntax::{ast, AstNode};
use text_edit::TextEdit;

View file

@ -4,9 +4,9 @@ use std::iter;
use hir::{db::DefDatabase, DefMap, InFile, ModuleSource};
use ide_db::{
base_db::{FileId, FileLoader, FileRange, SourceDatabase, SourceDatabaseExt},
base_db::{FileLoader, SourceDatabaseExt},
source_change::SourceChange,
RootDatabase,
FileId, FileRange, LineIndexDatabase,
};
use paths::Utf8Component;
use syntax::{
@ -26,7 +26,8 @@ pub(crate) fn unlinked_file(
acc: &mut Vec<Diagnostic>,
file_id: FileId,
) {
let fixes = fixes(ctx, file_id);
let mut range = TextRange::up_to(ctx.sema.db.line_index(file_id).len());
let fixes = fixes(ctx, file_id, range);
// FIXME: This is a hack for the vscode extension to notice whether there is an autofix or not before having to resolve diagnostics.
// This is to prevent project linking popups from appearing when there is an autofix. https://github.com/rust-lang/rust-analyzer/issues/14523
let message = if fixes.is_none() {
@ -37,7 +38,6 @@ pub(crate) fn unlinked_file(
let message = format!("{message}\n\nIf you're intentionally working on unowned files, you can silence this warning by adding \"unlinked-file\" to rust-analyzer.diagnostics.disabled in your settings.");
let mut range = ctx.sema.db.parse(file_id).syntax_node().text_range();
let mut unused = true;
if fixes.is_none() {
@ -70,7 +70,11 @@ pub(crate) fn unlinked_file(
);
}
fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option<Vec<Assist>> {
fn fixes(
ctx: &DiagnosticsContext<'_>,
file_id: FileId,
trigger_range: TextRange,
) -> Option<Vec<Assist>> {
// If there's an existing module that could add `mod` or `pub mod` items to include the unlinked file,
// suggest that as a fix.
@ -94,7 +98,9 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option<Vec<Assist>> {
let root_module = &crate_def_map[DefMap::ROOT];
let Some(root_file_id) = root_module.origin.file_id() else { continue };
let Some(crate_root_path) = source_root.path_for_file(&root_file_id) else { continue };
let Some(crate_root_path) = source_root.path_for_file(&root_file_id.file_id()) else {
continue;
};
let Some(rel) = parent.strip_prefix(&crate_root_path.parent()?) else { continue };
// try resolving the relative difference of the paths as inline modules
@ -119,7 +125,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option<Vec<Assist>> {
let InFile { file_id: parent_file_id, value: source } =
current.definition_source(ctx.sema.db);
let parent_file_id = parent_file_id.file_id()?;
return make_fixes(ctx.sema.db, parent_file_id, source, &module_name, file_id);
return make_fixes(parent_file_id.file_id(), source, &module_name, trigger_range);
}
// if we aren't adding to a crate root, walk backwards such that we support `#[path = ...]` overrides if possible
@ -139,18 +145,17 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option<Vec<Assist>> {
'crates: for &krate in ctx.sema.db.relevant_crates(parent_id).iter() {
let crate_def_map = ctx.sema.db.crate_def_map(krate);
let Some((_, module)) = crate_def_map.modules().find(|(_, module)| {
module.origin.file_id() == Some(parent_id) && !module.origin.is_inline()
module.origin.file_id().map(Into::into) == Some(parent_id) && !module.origin.is_inline()
}) else {
continue;
};
if stack.is_empty() {
return make_fixes(
ctx.sema.db,
parent_id,
module.definition_source(ctx.sema.db).value,
&module_name,
file_id,
trigger_range,
);
} else {
// direct parent file is missing,
@ -174,7 +179,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option<Vec<Assist>> {
let InFile { file_id: parent_file_id, value: source } =
current.definition_source(ctx.sema.db);
let parent_file_id = parent_file_id.file_id()?;
return make_fixes(ctx.sema.db, parent_file_id, source, &module_name, file_id);
return make_fixes(parent_file_id.file_id(), source, &module_name, trigger_range);
}
}
@ -182,11 +187,10 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option<Vec<Assist>> {
}
fn make_fixes(
db: &RootDatabase,
parent_file_id: FileId,
source: ModuleSource,
new_mod_name: &str,
added_file_id: FileId,
trigger_range: TextRange,
) -> Option<Vec<Assist>> {
fn is_outline_mod(item: &ast::Item) -> bool {
matches!(item, ast::Item::Module(m) if m.item_list().is_none())
@ -257,7 +261,6 @@ fn make_fixes(
}
}
let trigger_range = db.parse(added_file_id).tree().syntax().text_range();
Some(vec![
fix(
"add_mod_declaration",

View file

@ -1,9 +1,8 @@
use std::iter;
use hir::{db::ExpandDatabase, Adt, HasSource, HirDisplay, InFile, Struct, Union};
use hir::{db::ExpandDatabase, Adt, FileRange, HasSource, HirDisplay, InFile, Struct, Union};
use ide_db::{
assists::{Assist, AssistId, AssistKind},
base_db::FileRange,
helpers::is_editable_crate,
label::Label,
source_change::{SourceChange, SourceChangeBuilder},

View file

@ -1,7 +1,6 @@
use hir::{db::ExpandDatabase, AssocItem, HirDisplay, InFile};
use hir::{db::ExpandDatabase, AssocItem, FileRange, HirDisplay, InFile};
use ide_db::{
assists::{Assist, AssistId, AssistKind},
base_db::FileRange,
label::Label,
source_change::SourceChange,
};
@ -105,8 +104,8 @@ fn field_fix(
group: None,
target: range,
source_change: Some(SourceChange::from_iter([
(file_id, TextEdit::insert(range.start(), "(".to_owned())),
(file_id, TextEdit::insert(range.end(), ")".to_owned())),
(file_id.into(), TextEdit::insert(range.start(), "(".to_owned())),
(file_id.into(), TextEdit::insert(range.end(), ")".to_owned())),
])),
command: None,
})

View file

@ -43,7 +43,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedModule) -> Option<Vec<
&format!("Create module at `{candidate}`"),
FileSystemEdit::CreateFile {
dst: AnchoredPathBuf {
anchor: d.decl.file_id.original_file(ctx.sema.db),
anchor: d.decl.file_id.original_file(ctx.sema.db).file_id(),
path: candidate.clone(),
},
initial_contents: "".to_owned(),

View file

@ -1,10 +1,9 @@
use hir::Name;
use ide_db::{
assists::{Assist, AssistId, AssistKind},
base_db::FileRange,
label::Label,
source_change::SourceChange,
RootDatabase,
FileRange, RootDatabase,
};
use syntax::TextRange;
use text_edit::TextEdit;
@ -43,7 +42,7 @@ pub(crate) fn unused_variables(
ast,
)
.with_fixes(name_range.and_then(|it| {
fixes(ctx.sema.db, var_name, it.range, diagnostic_range, ast.file_id.is_macro())
fixes(ctx.sema.db, var_name, it.range, diagnostic_range.into(), ast.file_id.is_macro())
}))
.experimental(),
)

View file

@ -1,8 +1,5 @@
use hir::InFile;
use ide_db::{
base_db::{FileId, FileRange},
source_change::SourceChange,
};
use ide_db::{source_change::SourceChange, EditionedFileId, FileRange};
use itertools::Itertools;
use syntax::{ast, AstNode, SyntaxNode, SyntaxNodePtr};
use text_edit::TextEdit;
@ -14,7 +11,7 @@ use crate::{fix, Diagnostic, DiagnosticCode};
// Diagnostic for unnecessary braces in `use` items.
pub(crate) fn useless_braces(
acc: &mut Vec<Diagnostic>,
file_id: FileId,
file_id: EditionedFileId,
node: &SyntaxNode,
) -> Option<()> {
let use_tree_list = ast::UseTreeList::cast(node.clone())?;
@ -41,7 +38,7 @@ pub(crate) fn useless_braces(
Diagnostic::new(
DiagnosticCode::RustcLint("unused_braces"),
"Unnecessary braces in use statement".to_owned(),
FileRange { file_id, range: use_range },
FileRange { file_id: file_id.into(), range: use_range },
)
.with_main_node(InFile::new(file_id.into(), SyntaxNodePtr::new(node)))
.with_fixes(Some(vec![fix(

View file

@ -78,13 +78,13 @@ mod tests;
use hir::{diagnostics::AnyDiagnostic, InFile, Semantics};
use ide_db::{
assists::{Assist, AssistId, AssistKind, AssistResolveStrategy},
base_db::{FileId, FileRange, SourceDatabase},
base_db::SourceDatabase,
generated::lints::{LintGroup, CLIPPY_LINT_GROUPS, DEFAULT_LINT_GROUPS},
imports::insert_use::InsertUseConfig,
label::Label,
source_change::SourceChange,
syntax_helpers::node_ext::parse_tt_as_comma_sep_paths,
FxHashMap, FxHashSet, RootDatabase, SnippetCap,
EditionedFileId, FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, SnippetCap,
};
use once_cell::sync::Lazy;
use stdx::never;
@ -144,12 +144,16 @@ pub struct Diagnostic {
}
impl Diagnostic {
fn new(code: DiagnosticCode, message: impl Into<String>, range: FileRange) -> Diagnostic {
fn new(
code: DiagnosticCode,
message: impl Into<String>,
range: impl Into<FileRange>,
) -> Diagnostic {
let message = message.into();
Diagnostic {
code,
message,
range,
range: range.into(),
severity: match code {
DiagnosticCode::RustcHardError(_) => Severity::Error,
// FIXME: Rustc lints are not always warning, but the ones that are currently implemented are all warnings.
@ -290,6 +294,7 @@ impl DiagnosticsContext<'_> {
}
})()
.unwrap_or_else(|| sema.diagnostics_display_range(*node))
.into()
}
}
@ -303,6 +308,9 @@ pub fn diagnostics(
) -> Vec<Diagnostic> {
let _p = tracing::info_span!("diagnostics").entered();
let sema = Semantics::new(db);
let file_id = sema
.attach_first_edition(file_id)
.unwrap_or_else(|| EditionedFileId::current_edition(file_id));
let mut res = Vec::new();
// [#34344] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily.
@ -310,7 +318,7 @@ pub fn diagnostics(
Diagnostic::new(
DiagnosticCode::RustcHardError("syntax-error"),
format!("Syntax Error: {err}"),
FileRange { file_id, range: err.range() },
FileRange { file_id: file_id.into(), range: err.range() },
)
}));
let parse_errors = res.len();
@ -336,7 +344,7 @@ pub fn diagnostics(
// file, so we skip semantic diagnostics so we can show these faster.
Some(m) if parse_errors < 16 => m.diagnostics(db, &mut diags, config.style_lints),
Some(_) => (),
None => handlers::unlinked_file::unlinked_file(&ctx, &mut res, file_id),
None => handlers::unlinked_file::unlinked_file(&ctx, &mut res, file_id.file_id()),
}
for diag in diags {
@ -627,4 +635,5 @@ fn adjusted_display_range<N: AstNode>(
diag_ptr
.with_value(adj(node).unwrap_or_else(|| diag_ptr.value.text_range()))
.original_node_file_range_rooted(ctx.sema.db)
.into()
}

View file

@ -60,7 +60,7 @@ fn check_nth_fix_with_config(
let (db, file_position) = RootDatabase::with_position(ra_fixture_before);
let diagnostic =
super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_position.file_id)
super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_position.file_id.into())
.pop()
.expect("no diagnostics");
let fix = &diagnostic
@ -102,34 +102,37 @@ pub(crate) fn check_has_fix(ra_fixture_before: &str, ra_fixture_after: &str) {
let (db, file_position) = RootDatabase::with_position(ra_fixture_before);
let mut conf = DiagnosticsConfig::test_sample();
conf.expr_fill_default = ExprFillDefaultMode::Default;
let fix = super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id)
.into_iter()
.find(|d| {
d.fixes
.as_ref()
.and_then(|fixes| {
fixes.iter().find(|fix| {
if !fix.target.contains_inclusive(file_position.offset) {
return false;
}
let actual = {
let source_change = fix.source_change.as_ref().unwrap();
let file_id = *source_change.source_file_edits.keys().next().unwrap();
let mut actual = db.file_text(file_id).to_string();
for (edit, snippet_edit) in source_change.source_file_edits.values() {
edit.apply(&mut actual);
if let Some(snippet_edit) = snippet_edit {
snippet_edit.apply(&mut actual);
}
let fix =
super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id.into())
.into_iter()
.find(|d| {
d.fixes
.as_ref()
.and_then(|fixes| {
fixes.iter().find(|fix| {
if !fix.target.contains_inclusive(file_position.offset) {
return false;
}
actual
};
after == actual
let actual = {
let source_change = fix.source_change.as_ref().unwrap();
let file_id =
*source_change.source_file_edits.keys().next().unwrap();
let mut actual = db.file_text(file_id).to_string();
for (edit, snippet_edit) in source_change.source_file_edits.values()
{
edit.apply(&mut actual);
if let Some(snippet_edit) = snippet_edit {
snippet_edit.apply(&mut actual);
}
}
actual
};
after == actual
})
})
})
.is_some()
});
.is_some()
});
assert!(fix.is_some(), "no diagnostic with desired fix");
}
@ -141,35 +144,38 @@ pub(crate) fn check_has_single_fix(ra_fixture_before: &str, ra_fixture_after: &s
let mut conf = DiagnosticsConfig::test_sample();
conf.expr_fill_default = ExprFillDefaultMode::Default;
let mut n_fixes = 0;
let fix = super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id)
.into_iter()
.find(|d| {
d.fixes
.as_ref()
.and_then(|fixes| {
n_fixes += fixes.len();
fixes.iter().find(|fix| {
if !fix.target.contains_inclusive(file_position.offset) {
return false;
}
let actual = {
let source_change = fix.source_change.as_ref().unwrap();
let file_id = *source_change.source_file_edits.keys().next().unwrap();
let mut actual = db.file_text(file_id).to_string();
for (edit, snippet_edit) in source_change.source_file_edits.values() {
edit.apply(&mut actual);
if let Some(snippet_edit) = snippet_edit {
snippet_edit.apply(&mut actual);
}
let fix =
super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id.into())
.into_iter()
.find(|d| {
d.fixes
.as_ref()
.and_then(|fixes| {
n_fixes += fixes.len();
fixes.iter().find(|fix| {
if !fix.target.contains_inclusive(file_position.offset) {
return false;
}
actual
};
after == actual
let actual = {
let source_change = fix.source_change.as_ref().unwrap();
let file_id =
*source_change.source_file_edits.keys().next().unwrap();
let mut actual = db.file_text(file_id).to_string();
for (edit, snippet_edit) in source_change.source_file_edits.values()
{
edit.apply(&mut actual);
if let Some(snippet_edit) = snippet_edit {
snippet_edit.apply(&mut actual);
}
}
actual
};
after == actual
})
})
})
.is_some()
});
.is_some()
});
assert!(fix.is_some(), "no diagnostic with desired fix");
assert!(n_fixes == 1, "Too many fixes suggested");
}
@ -181,7 +187,7 @@ pub(crate) fn check_no_fix(ra_fixture: &str) {
&db,
&DiagnosticsConfig::test_sample(),
&AssistResolveStrategy::All,
file_position.file_id,
file_position.file_id.into(),
)
.pop()
.unwrap();
@ -209,8 +215,9 @@ pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixtur
.iter()
.copied()
.flat_map(|file_id| {
super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id).into_iter().map(
|d| {
super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id.into())
.into_iter()
.map(|d| {
let mut annotation = String::new();
if let Some(fixes) = &d.fixes {
assert!(!fixes.is_empty());
@ -225,12 +232,12 @@ pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixtur
annotation.push_str(": ");
annotation.push_str(&d.message);
(d.range, annotation)
},
)
})
})
.map(|(diagnostic, annotation)| (diagnostic.file_id, (diagnostic.range, annotation)))
.into_group_map();
for file_id in files {
let file_id = file_id.into();
let line_index = db.line_index(file_id);
let mut actual = annotations.remove(&file_id).unwrap_or_default();
@ -268,6 +275,7 @@ fn test_disabled_diagnostics() {
config.disabled.insert("E0583".into());
let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#);
let file_id = file_id.into();
let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
assert!(diagnostics.is_empty());

View file

@ -1,10 +1,7 @@
//! This module allows building an SSR MatchFinder by parsing the SSR rule
//! from a comment.
use ide_db::{
base_db::{FilePosition, FileRange, SourceDatabase},
RootDatabase,
};
use ide_db::{base_db::SourceDatabase, EditionedFileId, FilePosition, FileRange, RootDatabase};
use syntax::{
ast::{self, AstNode, AstToken},
TextRange,
@ -20,7 +17,7 @@ pub fn ssr_from_comment(
frange: FileRange,
) -> Option<(MatchFinder<'_>, TextRange)> {
let comment = {
let file = db.parse(frange.file_id);
let file = db.parse(EditionedFileId::current_edition(frange.file_id));
file.tree().syntax().token_at_offset(frange.range.start()).find_map(ast::Comment::cast)
}?;
let comment_text_without_prefix = comment.text().strip_prefix(comment.prefix()).unwrap();

View file

@ -83,9 +83,8 @@ mod tests;
pub use crate::{errors::SsrError, from_comment::ssr_from_comment, matching::Match};
use crate::{errors::bail, matching::MatchFailureReason};
use hir::Semantics;
use ide_db::base_db::{FileId, FilePosition, FileRange};
use nohash_hasher::IntMap;
use hir::{FileRange, Semantics};
use ide_db::{EditionedFileId, FileId, FxHashMap, RootDatabase};
use resolving::ResolvedRule;
use syntax::{ast, AstNode, SyntaxNode, TextRange};
use text_edit::TextEdit;
@ -116,21 +115,27 @@ pub struct MatchFinder<'db> {
sema: Semantics<'db, ide_db::RootDatabase>,
rules: Vec<ResolvedRule>,
resolution_scope: resolving::ResolutionScope<'db>,
restrict_ranges: Vec<FileRange>,
restrict_ranges: Vec<ide_db::FileRange>,
}
impl<'db> MatchFinder<'db> {
/// Constructs a new instance where names will be looked up as if they appeared at
/// `lookup_context`.
pub fn in_context(
db: &'db ide_db::RootDatabase,
lookup_context: FilePosition,
mut restrict_ranges: Vec<FileRange>,
db: &'db RootDatabase,
lookup_context: ide_db::FilePosition,
mut restrict_ranges: Vec<ide_db::FileRange>,
) -> Result<MatchFinder<'db>, SsrError> {
restrict_ranges.retain(|range| !range.range.is_empty());
let sema = Semantics::new(db);
let resolution_scope = resolving::ResolutionScope::new(&sema, lookup_context)
.ok_or_else(|| SsrError("no resolution scope for file".into()))?;
let file_id = sema
.attach_first_edition(lookup_context.file_id)
.unwrap_or_else(|| EditionedFileId::current_edition(lookup_context.file_id));
let resolution_scope = resolving::ResolutionScope::new(
&sema,
hir::FilePosition { file_id, offset: lookup_context.offset },
)
.ok_or_else(|| SsrError("no resolution scope for file".into()))?;
Ok(MatchFinder { sema, rules: Vec::new(), resolution_scope, restrict_ranges })
}
@ -143,7 +148,7 @@ impl<'db> MatchFinder<'db> {
{
MatchFinder::in_context(
db,
FilePosition { file_id: first_file_id, offset: 0.into() },
ide_db::FilePosition { file_id: first_file_id, offset: 0.into() },
vec![],
)
} else {
@ -166,12 +171,12 @@ impl<'db> MatchFinder<'db> {
}
/// Finds matches for all added rules and returns edits for all found matches.
pub fn edits(&self) -> IntMap<FileId, TextEdit> {
pub fn edits(&self) -> FxHashMap<FileId, TextEdit> {
use ide_db::base_db::SourceDatabaseExt;
let mut matches_by_file = IntMap::default();
let mut matches_by_file = FxHashMap::default();
for m in self.matches().matches {
matches_by_file
.entry(m.range.file_id)
.entry(m.range.file_id.file_id())
.or_insert_with(SsrMatches::default)
.matches
.push(m);
@ -218,11 +223,15 @@ impl<'db> MatchFinder<'db> {
/// Finds all nodes in `file_id` whose text is exactly equal to `snippet` and attempts to match
/// them, while recording reasons why they don't match. This API is useful for command
/// line-based debugging where providing a range is difficult.
pub fn debug_where_text_equal(&self, file_id: FileId, snippet: &str) -> Vec<MatchDebugInfo> {
pub fn debug_where_text_equal(
&self,
file_id: EditionedFileId,
snippet: &str,
) -> Vec<MatchDebugInfo> {
use ide_db::base_db::SourceDatabaseExt;
let file = self.sema.parse(file_id);
let mut res = Vec::new();
let file_text = self.sema.db.file_text(file_id);
let file_text = self.sema.db.file_text(file_id.into());
let mut remaining_text = &*file_text;
let mut base = 0;
let len = snippet.len() as u32;
@ -349,7 +358,7 @@ impl std::error::Error for SsrError {}
#[cfg(test)]
impl MatchDebugInfo {
pub(crate) fn match_failure_reason(&self) -> Option<&str> {
pub fn match_failure_reason(&self) -> Option<&str> {
self.matched.as_ref().err().map(|r| r.reason.as_str())
}
}

View file

@ -6,8 +6,8 @@ use crate::{
resolving::{ResolvedPattern, ResolvedRule, UfcsCallInfo},
SsrMatches,
};
use hir::{ImportPathConfig, Semantics};
use ide_db::{base_db::FileRange, FxHashMap};
use hir::{FileRange, ImportPathConfig, Semantics};
use ide_db::FxHashMap;
use std::{cell::Cell, iter::Peekable};
use syntax::{
ast::{self, AstNode, AstToken, HasGenericArgs},
@ -801,7 +801,12 @@ mod tests {
let input = "fn foo() {} fn bar() {} fn main() { foo(1+2); }";
let (db, position, selections) = crate::tests::single_file(input);
let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap();
let mut match_finder = MatchFinder::in_context(
&db,
position.into(),
selections.into_iter().map(Into::into).collect(),
)
.unwrap();
match_finder.add_rule(rule).unwrap();
let matches = match_finder.matches();
assert_eq!(matches.matches.len(), 1);
@ -810,7 +815,7 @@ mod tests {
let edits = match_finder.edits();
assert_eq!(edits.len(), 1);
let edit = &edits[&position.file_id];
let edit = &edits[&position.file_id.into()];
let mut after = input.to_owned();
edit.apply(&mut after);
assert_eq!(after, "fn foo() {} fn bar() {} fn main() { bar(1+2); }");

View file

@ -1,7 +1,7 @@
//! This module is responsible for resolving paths within rules.
use hir::AsAssocItem;
use ide_db::{base_db::FilePosition, FxHashMap};
use ide_db::FxHashMap;
use parsing::Placeholder;
use syntax::{
ast::{self, HasGenericArgs},
@ -195,7 +195,7 @@ impl Resolver<'_, '_> {
impl<'db> ResolutionScope<'db> {
pub(crate) fn new(
sema: &hir::Semantics<'db, ide_db::RootDatabase>,
resolve_context: FilePosition,
resolve_context: hir::FilePosition,
) -> Option<ResolutionScope<'db>> {
use syntax::ast::AstNode;
let file = sema.parse(resolve_context.file_id);

View file

@ -5,11 +5,11 @@ use crate::{
resolving::{ResolvedPath, ResolvedPattern, ResolvedRule},
Match, MatchFinder,
};
use hir::FileRange;
use ide_db::{
base_db::{FileId, FileRange},
defs::Definition,
search::{SearchScope, UsageSearchResult},
FxHashSet,
EditionedFileId, FileId, FxHashSet,
};
use syntax::{ast, AstNode, SyntaxKind, SyntaxNode};
@ -136,14 +136,18 @@ impl MatchFinder<'_> {
// seems to get put into a single source root.
let mut files = Vec::new();
self.search_files_do(|file_id| {
files.push(file_id);
files.push(
self.sema
.attach_first_edition(file_id)
.unwrap_or_else(|| EditionedFileId::current_edition(file_id)),
);
});
SearchScope::files(&files)
}
fn slow_scan(&self, rule: &ResolvedRule, matches_out: &mut Vec<Match>) {
self.search_files_do(|file_id| {
let file = self.sema.parse(file_id);
let file = self.sema.parse_guess_edition(file_id);
let code = file.syntax();
self.slow_scan_node(code, rule, &None, matches_out);
})

View file

@ -1,7 +1,8 @@
use expect_test::{expect, Expect};
use hir::{FilePosition, FileRange};
use ide_db::{
base_db::{salsa::Durability, FileId, FilePosition, FileRange, SourceDatabaseExt},
FxHashSet,
base_db::{salsa::Durability, SourceDatabaseExt},
EditionedFileId, FxHashSet,
};
use test_utils::RangeOrOffset;
use triomphe::Arc;
@ -97,7 +98,12 @@ fn assert_ssr_transform(rule: &str, input: &str, expected: Expect) {
fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) {
let (db, position, selections) = single_file(input);
let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap();
let mut match_finder = MatchFinder::in_context(
&db,
position.into(),
selections.into_iter().map(Into::into).collect(),
)
.unwrap();
for rule in rules {
let rule: SsrRule = rule.parse().unwrap();
match_finder.add_rule(rule).unwrap();
@ -108,13 +114,13 @@ fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) {
}
// Note, db.file_text is not necessarily the same as `input`, since fixture parsing alters
// stuff.
let mut actual = db.file_text(position.file_id).to_string();
edits[&position.file_id].apply(&mut actual);
let mut actual = db.file_text(position.file_id.into()).to_string();
edits[&position.file_id.into()].apply(&mut actual);
expected.assert_eq(&actual);
}
#[allow(clippy::print_stdout)]
fn print_match_debug_info(match_finder: &MatchFinder<'_>, file_id: FileId, snippet: &str) {
fn print_match_debug_info(match_finder: &MatchFinder<'_>, file_id: EditionedFileId, snippet: &str) {
let debug_info = match_finder.debug_where_text_equal(file_id, snippet);
println!(
"Match debug info: {} nodes had text exactly equal to '{}'",
@ -128,7 +134,12 @@ fn print_match_debug_info(match_finder: &MatchFinder<'_>, file_id: FileId, snipp
fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
let (db, position, selections) = single_file(code);
let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap();
let mut match_finder = MatchFinder::in_context(
&db,
position.into(),
selections.into_iter().map(Into::into).collect(),
)
.unwrap();
match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
let matched_strings: Vec<String> =
match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect();
@ -140,7 +151,12 @@ fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
fn assert_no_match(pattern: &str, code: &str) {
let (db, position, selections) = single_file(code);
let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap();
let mut match_finder = MatchFinder::in_context(
&db,
position.into(),
selections.into_iter().map(Into::into).collect(),
)
.unwrap();
match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
let matches = match_finder.matches().flattened().matches;
if !matches.is_empty() {
@ -151,7 +167,12 @@ fn assert_no_match(pattern: &str, code: &str) {
fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expected_reason: &str) {
let (db, position, selections) = single_file(code);
let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap();
let mut match_finder = MatchFinder::in_context(
&db,
position.into(),
selections.into_iter().map(Into::into).collect(),
)
.unwrap();
match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
let mut reasons = Vec::new();
for d in match_finder.debug_where_text_equal(position.file_id, snippet) {
@ -452,7 +473,7 @@ fn match_struct_instantiation() {
fn match_path() {
let code = r#"
mod foo {
pub fn bar() {}
pub(crate) fn bar() {}
}
fn f() {foo::bar(42)}"#;
assert_matches("foo::bar", code, &["foo::bar"]);
@ -471,8 +492,8 @@ fn match_pattern() {
fn match_fully_qualified_fn_path() {
let code = r#"
mod a {
pub mod b {
pub fn c(_: i32) {}
pub(crate) mod b {
pub(crate) fn c(_: i32) {}
}
}
use a::b::c;
@ -487,8 +508,8 @@ fn match_fully_qualified_fn_path() {
fn match_resolved_type_name() {
let code = r#"
mod m1 {
pub mod m2 {
pub trait Foo<T> {}
pub(crate) mod m2 {
pub(crate) trait Foo<T> {}
}
}
mod m3 {
@ -508,9 +529,9 @@ fn type_arguments_within_path() {
cov_mark::check!(type_arguments_within_path);
let code = r#"
mod foo {
pub struct Bar<T> {t: T}
pub(crate) struct Bar<T> {t: T}
impl<T> Bar<T> {
pub fn baz() {}
pub(crate) fn baz() {}
}
}
fn f1() {foo::Bar::<i32>::baz();}
@ -659,9 +680,9 @@ fn replace_associated_trait_default_function_call() {
"Bar2::foo() ==>> Bar2::foo2()",
r#"
trait Foo { fn foo() {} }
pub struct Bar {}
pub(crate) struct Bar {}
impl Foo for Bar {}
pub struct Bar2 {}
pub(crate) struct Bar2 {}
impl Foo for Bar2 {}
impl Bar2 { fn foo2() {} }
fn main() {
@ -671,9 +692,9 @@ fn replace_associated_trait_default_function_call() {
"#,
expect![[r#"
trait Foo { fn foo() {} }
pub struct Bar {}
pub(crate) struct Bar {}
impl Foo for Bar {}
pub struct Bar2 {}
pub(crate) struct Bar2 {}
impl Foo for Bar2 {}
impl Bar2 { fn foo2() {} }
fn main() {
@ -691,9 +712,9 @@ fn replace_associated_trait_constant() {
"Bar2::VALUE ==>> Bar2::VALUE_2222",
r#"
trait Foo { const VALUE: i32; const VALUE_2222: i32; }
pub struct Bar {}
pub(crate) struct Bar {}
impl Foo for Bar { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; }
pub struct Bar2 {}
pub(crate) struct Bar2 {}
impl Foo for Bar2 { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; }
impl Bar2 { fn foo2() {} }
fn main() {
@ -703,9 +724,9 @@ fn replace_associated_trait_constant() {
"#,
expect![[r#"
trait Foo { const VALUE: i32; const VALUE_2222: i32; }
pub struct Bar {}
pub(crate) struct Bar {}
impl Foo for Bar { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; }
pub struct Bar2 {}
pub(crate) struct Bar2 {}
impl Foo for Bar2 { const VALUE: i32 = 1; const VALUE_2222: i32 = 2; }
impl Bar2 { fn foo2() {} }
fn main() {
@ -726,10 +747,10 @@ fn replace_path_in_different_contexts() {
"c::foo() ==>> c::bar()",
r#"
mod a {
pub mod b {$0
pub mod c {
pub fn foo() {}
pub fn bar() {}
pub(crate) mod b {$0
pub(crate) mod c {
pub(crate) fn foo() {}
pub(crate) fn bar() {}
fn f1() { foo() }
}
fn f2() { c::foo() }
@ -741,10 +762,10 @@ fn replace_path_in_different_contexts() {
"#,
expect![[r#"
mod a {
pub mod b {
pub mod c {
pub fn foo() {}
pub fn bar() {}
pub(crate) mod b {
pub(crate) mod c {
pub(crate) fn foo() {}
pub(crate) fn bar() {}
fn f1() { bar() }
}
fn f2() { c::bar() }
@ -763,15 +784,15 @@ fn replace_associated_function_with_generics() {
"c::Foo::<$a>::new() ==>> d::Bar::<$a>::default()",
r#"
mod c {
pub struct Foo<T> {v: T}
impl<T> Foo<T> { pub fn new() {} }
pub(crate) struct Foo<T> {v: T}
impl<T> Foo<T> { pub(crate) fn new() {} }
fn f1() {
Foo::<i32>::new();
}
}
mod d {
pub struct Bar<T> {v: T}
impl<T> Bar<T> { pub fn default() {} }
pub(crate) struct Bar<T> {v: T}
impl<T> Bar<T> { pub(crate) fn default() {} }
fn f1() {
super::c::Foo::<i32>::new();
}
@ -779,15 +800,15 @@ fn replace_associated_function_with_generics() {
"#,
expect![[r#"
mod c {
pub struct Foo<T> {v: T}
impl<T> Foo<T> { pub fn new() {} }
pub(crate) struct Foo<T> {v: T}
impl<T> Foo<T> { pub(crate) fn new() {} }
fn f1() {
crate::d::Bar::<i32>::default();
}
}
mod d {
pub struct Bar<T> {v: T}
impl<T> Bar<T> { pub fn default() {} }
pub(crate) struct Bar<T> {v: T}
impl<T> Bar<T> { pub(crate) fn default() {} }
fn f1() {
Bar::<i32>::default();
}
@ -1029,14 +1050,14 @@ fn use_declaration_with_braces() {
assert_ssr_transform(
"foo::bar ==>> foo2::bar2",
r#"
mod foo { pub fn bar() {} pub fn baz() {} }
mod foo2 { pub fn bar2() {} }
mod foo { pub(crate) fn bar() {} pub(crate) fn baz() {} }
mod foo2 { pub(crate) fn bar2() {} }
use foo::{baz, bar};
fn main() { bar() }
"#,
expect![["
mod foo { pub fn bar() {} pub fn baz() {} }
mod foo2 { pub fn bar2() {} }
mod foo { pub(crate) fn bar() {} pub(crate) fn baz() {} }
mod foo2 { pub(crate) fn bar2() {} }
use foo::{baz, bar};
fn main() { foo2::bar2() }
"]],
@ -1266,9 +1287,9 @@ fn match_trait_method_call() {
// `Bar::foo` and `Bar2::foo` resolve to the same function. Make sure we only match if the type
// matches what's in the pattern. Also checks that we handle autoderef.
let code = r#"
pub struct Bar {}
pub struct Bar2 {}
pub trait Foo {
pub(crate) struct Bar {}
pub(crate) struct Bar2 {}
pub(crate) trait Foo {
fn foo(&self, _: i32) {}
}
impl Foo for Bar {}

View file

@ -1,9 +1,7 @@
use hir::{HasSource, InFile, InRealFile, Semantics};
use ide_db::{
base_db::{FileId, FilePosition, FileRange},
defs::Definition,
helpers::visit_file_defs,
FxHashSet, RootDatabase,
defs::Definition, helpers::visit_file_defs, FileId, FilePosition, FileRange, FxHashSet,
RootDatabase,
};
use itertools::Itertools;
use syntax::{ast::HasName, AstNode, TextRange};

View file

@ -13,7 +13,7 @@ pub(super) fn find_all_methods(
file_id: FileId,
) -> Vec<(TextRange, Option<TextRange>)> {
let sema = Semantics::new(db);
let source_file = sema.parse(file_id);
let source_file = sema.parse_guess_edition(file_id);
source_file.syntax().descendants().filter_map(method_range).collect()
}

View file

@ -7,9 +7,8 @@ use ide_db::{
defs::{Definition, NameClass, NameRefClass},
helpers::pick_best_token,
search::FileReference,
FxIndexMap, RootDatabase,
FileRange, FxIndexMap, RootDatabase,
};
use span::FileRange;
use syntax::{ast, AstNode, SyntaxKind::IDENT};
use crate::{goto_definition, FilePosition, NavigationTarget, RangeInfo, TryToNav};
@ -33,7 +32,7 @@ pub(crate) fn incoming_calls(
) -> Option<Vec<CallItem>> {
let sema = &Semantics::new(db);
let file = sema.parse(file_id);
let file = sema.parse_guess_edition(file_id);
let file = file.syntax();
let mut calls = CallLocations::default();
@ -63,9 +62,9 @@ pub(crate) fn incoming_calls(
});
if let Some(nav) = nav {
let range = sema.original_range(name.syntax());
calls.add(nav.call_site, range);
calls.add(nav.call_site, range.into());
if let Some(other) = nav.def_site {
calls.add(other, range);
calls.add(other, range.into());
}
}
}
@ -79,7 +78,7 @@ pub(crate) fn outgoing_calls(
FilePosition { file_id, offset }: FilePosition,
) -> Option<Vec<CallItem>> {
let sema = Semantics::new(db);
let file = sema.parse(file_id);
let file = sema.parse_guess_edition(file_id);
let file = file.syntax();
let token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
IDENT => 1,
@ -121,7 +120,7 @@ pub(crate) fn outgoing_calls(
Some(nav_target.into_iter().zip(iter::repeat(range)))
})
.flatten()
.for_each(|(nav, range)| calls.add(nav, range));
.for_each(|(nav, range)| calls.add(nav, range.into()));
Some(calls.into_items())
}
@ -144,7 +143,7 @@ impl CallLocations {
#[cfg(test)]
mod tests {
use expect_test::{expect, Expect};
use ide_db::base_db::FilePosition;
use ide_db::FilePosition;
use itertools::Itertools;
use crate::fixture;

View file

@ -137,7 +137,7 @@ pub(crate) fn external_docs(
sysroot: Option<&str>,
) -> Option<DocumentationLinks> {
let sema = &Semantics::new(db);
let file = sema.parse(file_id).syntax().clone();
let file = sema.parse_guess_edition(file_id).syntax().clone();
let token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
IDENT | INT_NUMBER | T![self] => 3,
T!['('] | T![')'] => 2,

View file

@ -3,10 +3,9 @@ use std::iter;
use expect_test::{expect, Expect};
use hir::Semantics;
use ide_db::{
base_db::{FilePosition, FileRange},
defs::Definition,
documentation::{Documentation, HasDocs},
RootDatabase,
FilePosition, FileRange, RootDatabase,
};
use itertools::Itertools;
use syntax::{ast, match_ast, AstNode, SyntaxNode};
@ -80,7 +79,7 @@ fn def_under_cursor(
position: &FilePosition,
) -> (Definition, Documentation) {
let (docs, def) = sema
.parse(position.file_id)
.parse_guess_edition(position.file_id)
.syntax()
.token_at_offset(position.offset)
.left_biased()

View file

@ -1,7 +1,7 @@
use hir::{DescendPreference, InFile, MacroFileIdExt, Semantics};
use ide_db::{
base_db::FileId, helpers::pick_best_token,
syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase,
helpers::pick_best_token, syntax_helpers::insert_whitespace_into_node::insert_ws_into, FileId,
RootDatabase,
};
use syntax::{ast, ted, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T};
@ -25,7 +25,7 @@ pub struct ExpandedMacro {
// image::https://user-images.githubusercontent.com/48062697/113020648-b3973180-917a-11eb-84a9-ecb921293dc5.gif[]
pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<ExpandedMacro> {
let sema = Semantics::new(db);
let file = sema.parse(position.file_id);
let file = sema.parse_guess_edition(position.file_id);
let tok = pick_best_token(file.syntax().token_at_offset(position.offset), |kind| match kind {
SyntaxKind::IDENT => 1,

View file

@ -26,7 +26,7 @@ use crate::FileRange;
// image::https://user-images.githubusercontent.com/48062697/113020651-b42fc800-917a-11eb-8a4f-cf1a07859fac.gif[]
pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange {
let sema = Semantics::new(db);
let src = sema.parse(frange.file_id);
let src = sema.parse_guess_edition(frange.file_id);
try_extend_selection(&sema, src.syntax(), frange).unwrap_or(frange.range)
}

View file

@ -1,6 +1,6 @@
use ide_db::{
base_db::{CrateOrigin, FileId, SourceDatabase},
FxIndexSet, RootDatabase,
base_db::{CrateOrigin, SourceDatabase},
FileId, FxIndexSet, RootDatabase,
};
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]

View file

@ -10,7 +10,7 @@ pub(crate) fn file(ra_fixture: &str) -> (Analysis, FileId) {
let change_fixture = ChangeFixture::parse(ra_fixture);
host.db.enable_proc_attr_macros();
host.db.apply_change(change_fixture.change);
(host.analysis(), change_fixture.files[0])
(host.analysis(), change_fixture.files[0].into())
}
/// Creates analysis from a multi-file fixture, returns positions marked with $0.
@ -21,7 +21,7 @@ pub(crate) fn position(ra_fixture: &str) -> (Analysis, FilePosition) {
host.db.apply_change(change_fixture.change);
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
let offset = range_or_offset.expect_offset();
(host.analysis(), FilePosition { file_id, offset })
(host.analysis(), FilePosition { file_id: file_id.into(), offset })
}
/// Creates analysis for a single file, returns range marked with a pair of $0.
@ -32,7 +32,7 @@ pub(crate) fn range(ra_fixture: &str) -> (Analysis, FileRange) {
host.db.apply_change(change_fixture.change);
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
let range = range_or_offset.expect_range();
(host.analysis(), FileRange { file_id, range })
(host.analysis(), FileRange { file_id: file_id.into(), range })
}
/// Creates analysis for a single file, returns range marked with a pair of $0 or a position marked with $0.
@ -42,7 +42,7 @@ pub(crate) fn range_or_position(ra_fixture: &str) -> (Analysis, FileId, RangeOrO
host.db.enable_proc_attr_macros();
host.db.apply_change(change_fixture.change);
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
(host.analysis(), file_id, range_or_offset)
(host.analysis(), file_id.into(), range_or_offset)
}
/// Creates analysis from a multi-file fixture, returns positions marked with $0.
@ -58,12 +58,14 @@ pub(crate) fn annotations(ra_fixture: &str) -> (Analysis, FilePosition, Vec<(Fil
.files
.iter()
.flat_map(|&file_id| {
let file_text = host.analysis().file_text(file_id).unwrap();
let file_text = host.analysis().file_text(file_id.into()).unwrap();
let annotations = extract_annotations(&file_text);
annotations.into_iter().map(move |(range, data)| (FileRange { file_id, range }, data))
annotations
.into_iter()
.map(move |(range, data)| (FileRange { file_id: file_id.into(), range }, data))
})
.collect();
(host.analysis(), FilePosition { file_id, offset }, annotations)
(host.analysis(), FilePosition { file_id: file_id.into(), offset }, annotations)
}
/// Creates analysis from a multi-file fixture with annotations without $0
@ -77,9 +79,11 @@ pub(crate) fn annotations_without_marker(ra_fixture: &str) -> (Analysis, Vec<(Fi
.files
.iter()
.flat_map(|&file_id| {
let file_text = host.analysis().file_text(file_id).unwrap();
let file_text = host.analysis().file_text(file_id.into()).unwrap();
let annotations = extract_annotations(&file_text);
annotations.into_iter().map(move |(range, data)| (FileRange { file_id, range }, data))
annotations
.into_iter()
.map(move |(range, data)| (FileRange { file_id: file_id.into(), range }, data))
})
.collect();
(host.analysis(), annotations)

Some files were not shown because too many files have changed in this diff Show more