Auto merge of #3635 - RalfJung:rustup, r=RalfJung

Rustup
This commit is contained in:
bors 2024-05-27 14:45:56 +00:00
commit 3d6f7fb62a
92 changed files with 1971 additions and 1318 deletions

View file

@ -12,9 +12,6 @@ use std::sync::OnceLock;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
/// Ignored attribute namespaces used by tools.
pub const TOOL_MODULES: &[&str] = &["rustfmt", "clippy"];
pub struct BuiltinAttribute { pub struct BuiltinAttribute {
pub name: &'static str, pub name: &'static str,
pub template: AttributeTemplate, pub template: AttributeTemplate,

View file

@ -17,7 +17,7 @@ use syntax::{
self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasLoopBody, HasName, self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasLoopBody, HasName,
RangeItem, SlicePatComponents, RangeItem, SlicePatComponents,
}, },
AstNode, AstPtr, SyntaxNodePtr, AstNode, AstPtr, AstToken as _, SyntaxNodePtr,
}; };
use triomphe::Arc; use triomphe::Arc;
@ -1577,7 +1577,13 @@ impl ExprCollector<'_> {
}); });
}); });
let template = f.template(); let template = f.template();
let fmt_snippet = template.as_ref().map(ToString::to_string); let fmt_snippet = template.as_ref().and_then(|it| match it {
ast::Expr::Literal(literal) => match literal.kind() {
ast::LiteralKind::String(s) => Some(s.text().to_owned()),
_ => None,
},
_ => None,
});
let mut mappings = vec![]; let mut mappings = vec![];
let fmt = match template.and_then(|it| self.expand_macros_to_string(it)) { let fmt = match template.and_then(|it| self.expand_macros_to_string(it)) {
Some((s, is_direct_literal)) => format_args::parse( Some((s, is_direct_literal)) => format_args::parse(

View file

@ -150,7 +150,7 @@ fn desugar_builtin_format_args() {
fn main() { fn main() {
let are = "are"; let are = "are";
let count = 10; let count = 10;
builtin#format_args("hello {count:02} {} friends, we {are:?} {0}{last}", "fancy", last = "!"); builtin#format_args("\u{1b}hello {count:02} {} friends, we {are:?} {0}{last}", "fancy", last = "!");
} }
"#, "#,
); );
@ -161,7 +161,7 @@ fn main() {
let count = 10; let count = 10;
builtin#lang(Arguments::new_v1_formatted)( builtin#lang(Arguments::new_v1_formatted)(
&[ &[
"hello ", " ", " friends, we ", " ", "", "\u{1b}hello ", " ", " friends, we ", " ", "",
], ],
&[ &[
builtin#lang(Argument::new_display)( builtin#lang(Argument::new_display)(

View file

@ -528,3 +528,65 @@ fn f() {$0
"#]], "#]],
) )
} }
#[test]
fn resolve_extern_prelude_in_block() {
check_at(
r#"
//- /main.rs crate:main deps:core
fn main() {
mod f {
use core::S;
$0
}
}
//- /core.rs crate:core
pub struct S;
"#,
expect![[r#"
block scope
f: t
block scope::f
S: ti vi
crate
main: v
"#]],
)
}
#[test]
fn shadow_extern_prelude_in_block() {
check_at(
r#"
//- /main.rs crate:main deps:core
fn main() {
mod core { pub struct S; }
{
fn inner() {} // forces a block def map
use core::S; // should resolve to the local one
$0
}
}
//- /core.rs crate:core
pub const S;
"#,
expect![[r#"
block scope
S: ti vi
inner: v
block scope
core: t
block scope::core
S: t v
crate
main: v
"#]],
)
}

File diff suppressed because it is too large Load diff

View file

@ -175,7 +175,13 @@ pub(crate) fn parse(
mut synth: impl FnMut(Name) -> ExprId, mut synth: impl FnMut(Name) -> ExprId,
mut record_usage: impl FnMut(Name, Option<TextRange>), mut record_usage: impl FnMut(Name, Option<TextRange>),
) -> FormatArgs { ) -> FormatArgs {
let text = s.text_without_quotes(); let Ok(text) = s.value() else {
return FormatArgs {
template: Default::default(),
arguments: args.finish(),
orphans: vec![],
};
};
let str_style = match s.quote_offsets() { let str_style = match s.quote_offsets() {
Some(offsets) => { Some(offsets) => {
let raw = usize::from(offsets.quotes.0.len()) - 1; let raw = usize::from(offsets.quotes.0.len()) - 1;
@ -186,7 +192,7 @@ pub(crate) fn parse(
}; };
let mut parser = let mut parser =
parse::Parser::new(text, str_style, fmt_snippet, false, parse::ParseMode::Format); parse::Parser::new(&text, str_style, fmt_snippet, false, parse::ParseMode::Format);
let mut pieces = Vec::new(); let mut pieces = Vec::new();
while let Some(piece) = parser.next() { while let Some(piece) = parser.next() {

View file

@ -1,13 +1,12 @@
//! A map of all publicly exported items in a crate. //! A map of all publicly exported items in a crate.
use std::{fmt, hash::BuildHasherDefault}; use std::fmt;
use base_db::CrateId; use base_db::CrateId;
use fst::{raw::IndexedValue, Automaton, Streamer}; use fst::{raw::IndexedValue, Automaton, Streamer};
use hir_expand::name::Name; use hir_expand::name::Name;
use indexmap::IndexMap;
use itertools::Itertools; use itertools::Itertools;
use rustc_hash::{FxHashSet, FxHasher}; use rustc_hash::FxHashSet;
use smallvec::SmallVec; use smallvec::SmallVec;
use stdx::{format_to, TupleExt}; use stdx::{format_to, TupleExt};
use triomphe::Arc; use triomphe::Arc;
@ -17,7 +16,7 @@ use crate::{
item_scope::{ImportOrExternCrate, ItemInNs}, item_scope::{ImportOrExternCrate, ItemInNs},
nameres::DefMap, nameres::DefMap,
visibility::Visibility, visibility::Visibility,
AssocItemId, ModuleDefId, ModuleId, TraitId, AssocItemId, FxIndexMap, ModuleDefId, ModuleId, TraitId,
}; };
/// Item import details stored in the `ImportMap`. /// Item import details stored in the `ImportMap`.
@ -58,7 +57,6 @@ enum IsTraitAssocItem {
No, No,
} }
type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<FxHasher>>;
type ImportMapIndex = FxIndexMap<ItemInNs, (SmallVec<[ImportInfo; 1]>, IsTraitAssocItem)>; type ImportMapIndex = FxIndexMap<ItemInNs, (SmallVec<[ImportInfo; 1]>, IsTraitAssocItem)>;
impl ImportMap { impl ImportMap {

View file

@ -295,7 +295,7 @@ impl ItemScope {
pub(crate) fn names_of<T>( pub(crate) fn names_of<T>(
&self, &self,
item: ItemInNs, item: ItemInNs,
mut cb: impl FnMut(&Name, Visibility, bool) -> Option<T>, mut cb: impl FnMut(&Name, Visibility, /*declared*/ bool) -> Option<T>,
) -> Option<T> { ) -> Option<T> {
match item { match item {
ItemInNs::Macros(def) => self ItemInNs::Macros(def) => self

View file

@ -106,6 +106,18 @@ use crate::{
}, },
}; };
type FxIndexMap<K, V> =
indexmap::IndexMap<K, V, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
/// A wrapper around two booleans, [`ImportPathConfig::prefer_no_std`] and [`ImportPathConfig::prefer_prelude`].
#[derive(Debug, Clone, PartialEq, Eq, Hash, Copy)]
pub struct ImportPathConfig {
/// If true, prefer to unconditionally use imports of the `core` and `alloc` crate
/// over the std.
pub prefer_no_std: bool,
/// If true, prefer import paths containing a prelude module.
pub prefer_prelude: bool,
}
#[derive(Debug)] #[derive(Debug)]
pub struct ItemLoc<N: ItemTreeNode> { pub struct ItemLoc<N: ItemTreeNode> {
pub container: ModuleId, pub container: ModuleId,
@ -455,6 +467,26 @@ impl ModuleId {
pub fn is_block_module(self) -> bool { pub fn is_block_module(self) -> bool {
self.block.is_some() && self.local_id == DefMap::ROOT self.block.is_some() && self.local_id == DefMap::ROOT
} }
pub fn is_within_block(self) -> bool {
self.block.is_some()
}
pub fn as_crate_root(&self) -> Option<CrateRootModuleId> {
if self.local_id == DefMap::ROOT && self.block.is_none() {
Some(CrateRootModuleId { krate: self.krate })
} else {
None
}
}
pub fn derive_crate_root(&self) -> CrateRootModuleId {
CrateRootModuleId { krate: self.krate }
}
fn is_crate_root(&self) -> bool {
self.local_id == DefMap::ROOT && self.block.is_none()
}
} }
impl PartialEq<CrateRootModuleId> for ModuleId { impl PartialEq<CrateRootModuleId> for ModuleId {

View file

@ -81,9 +81,17 @@ use crate::{
per_ns::PerNs, per_ns::PerNs,
visibility::{Visibility, VisibilityExplicitness}, visibility::{Visibility, VisibilityExplicitness},
AstId, BlockId, BlockLoc, CrateRootModuleId, EnumId, EnumVariantId, ExternCrateId, FunctionId, AstId, BlockId, BlockLoc, CrateRootModuleId, EnumId, EnumVariantId, ExternCrateId, FunctionId,
LocalModuleId, Lookup, MacroExpander, MacroId, ModuleId, ProcMacroId, UseId, FxIndexMap, LocalModuleId, Lookup, MacroExpander, MacroId, ModuleId, ProcMacroId, UseId,
}; };
const PREDEFINED_TOOLS: &[SmolStr] = &[
SmolStr::new_static("clippy"),
SmolStr::new_static("rustfmt"),
SmolStr::new_static("diagnostic"),
SmolStr::new_static("miri"),
SmolStr::new_static("rust_analyzer"),
];
/// Contains the results of (early) name resolution. /// Contains the results of (early) name resolution.
/// ///
/// A `DefMap` stores the module tree and the definitions that are in scope in every module after /// A `DefMap` stores the module tree and the definitions that are in scope in every module after
@ -129,7 +137,7 @@ pub struct DefMap {
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
struct DefMapCrateData { struct DefMapCrateData {
/// The extern prelude which contains all root modules of external crates that are in scope. /// The extern prelude which contains all root modules of external crates that are in scope.
extern_prelude: FxHashMap<Name, (CrateRootModuleId, Option<ExternCrateId>)>, extern_prelude: FxIndexMap<Name, (CrateRootModuleId, Option<ExternCrateId>)>,
/// Side table for resolving derive helpers. /// Side table for resolving derive helpers.
exported_derives: FxHashMap<MacroDefId, Box<[Name]>>, exported_derives: FxHashMap<MacroDefId, Box<[Name]>>,
@ -155,12 +163,12 @@ struct DefMapCrateData {
impl DefMapCrateData { impl DefMapCrateData {
fn new(edition: Edition) -> Self { fn new(edition: Edition) -> Self {
Self { Self {
extern_prelude: FxHashMap::default(), extern_prelude: FxIndexMap::default(),
exported_derives: FxHashMap::default(), exported_derives: FxHashMap::default(),
fn_proc_macro_mapping: FxHashMap::default(), fn_proc_macro_mapping: FxHashMap::default(),
proc_macro_loading_error: None, proc_macro_loading_error: None,
registered_attrs: Vec::new(), registered_attrs: Vec::new(),
registered_tools: Vec::new(), registered_tools: PREDEFINED_TOOLS.into(),
unstable_features: FxHashSet::default(), unstable_features: FxHashSet::default(),
rustc_coherence_is_core: false, rustc_coherence_is_core: false,
no_core: false, no_core: false,
@ -578,7 +586,8 @@ impl DefMap {
pub(crate) fn extern_prelude( pub(crate) fn extern_prelude(
&self, &self,
) -> impl Iterator<Item = (&Name, (CrateRootModuleId, Option<ExternCrateId>))> + '_ { ) -> impl DoubleEndedIterator<Item = (&Name, (CrateRootModuleId, Option<ExternCrateId>))> + '_
{
self.data.extern_prelude.iter().map(|(name, &def)| (name, def)) self.data.extern_prelude.iter().map(|(name, &def)| (name, def))
} }

View file

@ -10,7 +10,7 @@ use syntax::{ast, SmolStr};
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{
attr::builtin::{find_builtin_attr_idx, TOOL_MODULES}, attr::builtin::find_builtin_attr_idx,
db::DefDatabase, db::DefDatabase,
item_scope::BuiltinShadowMode, item_scope::BuiltinShadowMode,
nameres::path_resolution::ResolveMode, nameres::path_resolution::ResolveMode,
@ -82,8 +82,7 @@ impl DefMap {
let name = name.to_smol_str(); let name = name.to_smol_str();
let pred = |n: &_| *n == name; let pred = |n: &_| *n == name;
let registered = self.data.registered_tools.iter().map(SmolStr::as_str); let is_tool = self.data.registered_tools.iter().map(SmolStr::as_str).any(pred);
let is_tool = TOOL_MODULES.iter().copied().chain(registered).any(pred);
// FIXME: tool modules can be shadowed by actual modules // FIXME: tool modules can be shadowed by actual modules
if is_tool { if is_tool {
return true; return true;

View file

@ -395,6 +395,8 @@ impl DefCollector<'_> {
.cfg() .cfg()
.map_or(true, |cfg| self.cfg_options.check(&cfg) != Some(false)); .map_or(true, |cfg| self.cfg_options.check(&cfg) != Some(false));
if is_cfg_enabled { if is_cfg_enabled {
self.inject_prelude();
ModCollector { ModCollector {
def_collector: self, def_collector: self,
macro_depth: 0, macro_depth: 0,

View file

@ -221,7 +221,7 @@ impl DefMap {
None => return ResolvePathResult::empty(ReachedFixedPoint::Yes), None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
}; };
tracing::debug!("resolving {:?} in crate root (+ extern prelude)", segment); tracing::debug!("resolving {:?} in crate root (+ extern prelude)", segment);
self.resolve_name_in_crate_root_or_extern_prelude(db, segment) self.resolve_name_in_crate_root_or_extern_prelude(db, original_module, segment)
} }
PathKind::Plain => { PathKind::Plain => {
let (_, segment) = match segments.next() { let (_, segment) = match segments.next() {
@ -470,9 +470,9 @@ impl DefMap {
}; };
let extern_prelude = || { let extern_prelude = || {
if self.block.is_some() { if self.block.is_some() && module == DefMap::ROOT {
// Don't resolve extern prelude in block `DefMap`s, defer it to the crate def map so // Don't resolve extern prelude in pseudo-modules of blocks, because
// that blocks can properly shadow them // they might been shadowed by local names.
return PerNs::none(); return PerNs::none();
} }
self.data.extern_prelude.get(name).map_or(PerNs::none(), |&(it, extern_crate)| { self.data.extern_prelude.get(name).map_or(PerNs::none(), |&(it, extern_crate)| {
@ -505,6 +505,7 @@ impl DefMap {
fn resolve_name_in_crate_root_or_extern_prelude( fn resolve_name_in_crate_root_or_extern_prelude(
&self, &self,
db: &dyn DefDatabase, db: &dyn DefDatabase,
module: LocalModuleId,
name: &Name, name: &Name,
) -> PerNs { ) -> PerNs {
let from_crate_root = match self.block { let from_crate_root = match self.block {
@ -515,8 +516,8 @@ impl DefMap {
None => self[Self::ROOT].scope.get(name), None => self[Self::ROOT].scope.get(name),
}; };
let from_extern_prelude = || { let from_extern_prelude = || {
if self.block.is_some() { if self.block.is_some() && module == DefMap::ROOT {
// Don't resolve extern prelude in block `DefMap`s. // Don't resolve extern prelude in pseudo-module of a block.
return PerNs::none(); return PerNs::none();
} }
self.data.extern_prelude.get(name).copied().map_or( self.data.extern_prelude.get(name).copied().map_or(

View file

@ -1,12 +1,11 @@
//! Name resolution façade. //! Name resolution façade.
use std::{fmt, hash::BuildHasherDefault, iter, mem}; use std::{fmt, iter, mem};
use base_db::CrateId; use base_db::CrateId;
use hir_expand::{ use hir_expand::{
name::{name, Name}, name::{name, Name},
MacroDefId, MacroDefId,
}; };
use indexmap::IndexMap;
use intern::Interned; use intern::Interned;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
@ -27,10 +26,10 @@ use crate::{
type_ref::LifetimeRef, type_ref::LifetimeRef,
visibility::{RawVisibility, Visibility}, visibility::{RawVisibility, Visibility},
AdtId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, AdtId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId,
ExternBlockId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ExternBlockId, ExternCrateId, FunctionId, FxIndexMap, GenericDefId, GenericParamId, HasModule,
ItemContainerId, ItemTreeLoc, LifetimeParamId, LocalModuleId, Lookup, Macro2Id, MacroId, ImplId, ItemContainerId, ItemTreeLoc, LifetimeParamId, LocalModuleId, Lookup, Macro2Id,
MacroRulesId, ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId, MacroId, MacroRulesId, ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, TraitAliasId,
TypeAliasId, TypeOrConstParamId, TypeOwnerId, TypeParamId, UseId, VariantId, TraitId, TypeAliasId, TypeOrConstParamId, TypeOwnerId, TypeParamId, UseId, VariantId,
}; };
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -957,7 +956,6 @@ fn to_type_ns(per_ns: PerNs) -> Option<(TypeNs, Option<ImportOrExternCrate>)> {
Some((res, import)) Some((res, import))
} }
type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<rustc_hash::FxHasher>>;
#[derive(Default)] #[derive(Default)]
struct ScopeNames { struct ScopeNames {
map: FxIndexMap<Name, SmallVec<[ScopeDef; 1]>>, map: FxIndexMap<Name, SmallVec<[ScopeDef; 1]>>,

View file

@ -13,7 +13,7 @@ use either::Either;
use hir_def::{ use hir_def::{
data::adt::VariantData, data::adt::VariantData,
db::DefDatabase, db::DefDatabase,
find_path, find_path::{self, PrefixKind},
generics::{TypeOrConstParamData, TypeParamProvenance}, generics::{TypeOrConstParamData, TypeParamProvenance},
item_scope::ItemInNs, item_scope::ItemInNs,
lang_item::{LangItem, LangItemTarget}, lang_item::{LangItem, LangItemTarget},
@ -21,7 +21,8 @@ use hir_def::{
path::{Path, PathKind}, path::{Path, PathKind},
type_ref::{TraitBoundModifier, TypeBound, TypeRef}, type_ref::{TraitBoundModifier, TypeBound, TypeRef},
visibility::Visibility, visibility::Visibility,
HasModule, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, ModuleId, TraitId, HasModule, ImportPathConfig, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, ModuleId,
TraitId,
}; };
use hir_expand::name::Name; use hir_expand::name::Name;
use intern::{Internable, Interned}; use intern::{Internable, Interned};
@ -999,8 +1000,9 @@ impl HirDisplay for Ty {
db.upcast(), db.upcast(),
ItemInNs::Types((*def_id).into()), ItemInNs::Types((*def_id).into()),
module_id, module_id,
PrefixKind::Plain,
false, false,
true, ImportPathConfig { prefer_no_std: false, prefer_prelude: true },
) { ) {
write!(f, "{}", path.display(f.db.upcast()))?; write!(f, "{}", path.display(f.db.upcast()))?;
} else { } else {

View file

@ -933,8 +933,24 @@ impl InferenceContext<'_> {
let prev_ret_coercion = let prev_ret_coercion =
mem::replace(&mut self.return_coercion, Some(CoerceMany::new(ret_ty.clone()))); mem::replace(&mut self.return_coercion, Some(CoerceMany::new(ret_ty.clone())));
// FIXME: We should handle async blocks like we handle closures
let expected = &Expectation::has_type(ret_ty);
let (_, inner_ty) = self.with_breakable_ctx(BreakableKind::Border, None, None, |this| { let (_, inner_ty) = self.with_breakable_ctx(BreakableKind::Border, None, None, |this| {
this.infer_block(tgt_expr, *id, statements, *tail, None, &Expectation::has_type(ret_ty)) let ty = this.infer_block(tgt_expr, *id, statements, *tail, None, expected);
if let Some(target) = expected.only_has_type(&mut this.table) {
match this.coerce(Some(tgt_expr), &ty, &target) {
Ok(res) => res,
Err(_) => {
this.result.type_mismatches.insert(
tgt_expr.into(),
TypeMismatch { expected: target.clone(), actual: ty.clone() },
);
target
}
}
} else {
ty
}
}); });
self.diverges = prev_diverges; self.diverges = prev_diverges;

View file

@ -797,19 +797,22 @@ impl<'a> InferenceTable<'a> {
}) })
.build(); .build();
let projection = { let b = TyBuilder::trait_ref(self.db, fn_once_trait);
let b = TyBuilder::subst_for_def(self.db, fn_once_trait, None); if b.remaining() != 2 {
if b.remaining() != 2 { return None;
return None; }
} let mut trait_ref = b.push(ty.clone()).push(arg_ty).build();
let fn_once_subst = b.push(ty.clone()).push(arg_ty).build();
TyBuilder::assoc_type_projection(self.db, output_assoc_type, Some(fn_once_subst)) let projection = {
.build() TyBuilder::assoc_type_projection(
self.db,
output_assoc_type,
Some(trait_ref.substitution.clone()),
)
.build()
}; };
let trait_env = self.trait_env.env.clone(); let trait_env = self.trait_env.env.clone();
let mut trait_ref = projection.trait_ref(self.db);
let obligation = InEnvironment { let obligation = InEnvironment {
goal: trait_ref.clone().cast(Interner), goal: trait_ref.clone().cast(Interner),
environment: trait_env.clone(), environment: trait_env.clone(),

View file

@ -1,7 +1,7 @@
use chalk_ir::{AdtId, TyKind}; use chalk_ir::{AdtId, TyKind};
use either::Either; use either::Either;
use hir_def::db::DefDatabase; use hir_def::db::DefDatabase;
use project_model::target_data_layout::RustcDataLayoutConfig; use project_model::{target_data_layout::RustcDataLayoutConfig, Sysroot};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use test_fixture::WithFixture; use test_fixture::WithFixture;
use triomphe::Arc; use triomphe::Arc;
@ -17,7 +17,7 @@ mod closure;
fn current_machine_data_layout() -> String { fn current_machine_data_layout() -> String {
project_model::target_data_layout::get( project_model::target_data_layout::get(
RustcDataLayoutConfig::Rustc(None), RustcDataLayoutConfig::Rustc(&Sysroot::empty()),
None, None,
&FxHashMap::default(), &FxHashMap::default(),
) )

View file

@ -570,6 +570,10 @@ impl CallableSig {
} }
} }
pub fn abi(&self) -> FnAbi {
self.abi
}
pub fn params(&self) -> &[Ty] { pub fn params(&self) -> &[Ty] {
&self.params_and_return[0..self.params_and_return.len() - 1] &self.params_and_return[0..self.params_and_return.len() - 1]
} }
@ -892,20 +896,16 @@ where
Canonical { value, binders: chalk_ir::CanonicalVarKinds::from_iter(Interner, kinds) } Canonical { value, binders: chalk_ir::CanonicalVarKinds::from_iter(Interner, kinds) }
} }
pub fn callable_sig_from_fnonce( pub fn callable_sig_from_fn_trait(
mut self_ty: &Ty, self_ty: &Ty,
env: Arc<TraitEnvironment>, trait_env: Arc<TraitEnvironment>,
db: &dyn HirDatabase, db: &dyn HirDatabase,
) -> Option<CallableSig> { ) -> Option<(FnTrait, CallableSig)> {
if let Some((ty, _, _)) = self_ty.as_reference() { let krate = trait_env.krate;
// This will happen when it implements fn or fn mut, since we add a autoborrow adjustment
self_ty = ty;
}
let krate = env.krate;
let fn_once_trait = FnTrait::FnOnce.get_id(db, krate)?; let fn_once_trait = FnTrait::FnOnce.get_id(db, krate)?;
let output_assoc_type = db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?; let output_assoc_type = db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?;
let mut table = InferenceTable::new(db, env); let mut table = InferenceTable::new(db, trait_env.clone());
let b = TyBuilder::trait_ref(db, fn_once_trait); let b = TyBuilder::trait_ref(db, fn_once_trait);
if b.remaining() != 2 { if b.remaining() != 2 {
return None; return None;
@ -915,23 +915,56 @@ pub fn callable_sig_from_fnonce(
// - Self: FnOnce<?args_ty> // - Self: FnOnce<?args_ty>
// - <Self as FnOnce<?args_ty>>::Output == ?ret_ty // - <Self as FnOnce<?args_ty>>::Output == ?ret_ty
let args_ty = table.new_type_var(); let args_ty = table.new_type_var();
let trait_ref = b.push(self_ty.clone()).push(args_ty.clone()).build(); let mut trait_ref = b.push(self_ty.clone()).push(args_ty.clone()).build();
let projection = TyBuilder::assoc_type_projection( let projection = TyBuilder::assoc_type_projection(
db, db,
output_assoc_type, output_assoc_type,
Some(trait_ref.substitution.clone()), Some(trait_ref.substitution.clone()),
) )
.build(); .build();
table.register_obligation(trait_ref.cast(Interner));
let ret_ty = table.normalize_projection_ty(projection);
let ret_ty = table.resolve_completely(ret_ty); let block = trait_env.block;
let args_ty = table.resolve_completely(args_ty); let trait_env = trait_env.env.clone();
let obligation =
InEnvironment { goal: trait_ref.clone().cast(Interner), environment: trait_env.clone() };
let canonical = table.canonicalize(obligation.clone());
if db.trait_solve(krate, block, canonical.cast(Interner)).is_some() {
table.register_obligation(obligation.goal);
let return_ty = table.normalize_projection_ty(projection);
for fn_x in [FnTrait::Fn, FnTrait::FnMut, FnTrait::FnOnce] {
let fn_x_trait = fn_x.get_id(db, krate)?;
trait_ref.trait_id = to_chalk_trait_id(fn_x_trait);
let obligation: chalk_ir::InEnvironment<chalk_ir::Goal<Interner>> = InEnvironment {
goal: trait_ref.clone().cast(Interner),
environment: trait_env.clone(),
};
let canonical = table.canonicalize(obligation.clone());
if db.trait_solve(krate, block, canonical.cast(Interner)).is_some() {
let ret_ty = table.resolve_completely(return_ty);
let args_ty = table.resolve_completely(args_ty);
let params = args_ty
.as_tuple()?
.iter(Interner)
.map(|it| it.assert_ty_ref(Interner))
.cloned()
.collect();
let params = return Some((
args_ty.as_tuple()?.iter(Interner).map(|it| it.assert_ty_ref(Interner)).cloned().collect(); fn_x,
CallableSig::from_params_and_return(
Some(CallableSig::from_params_and_return(params, ret_ty, false, Safety::Safe, FnAbi::RustCall)) params,
ret_ty,
false,
Safety::Safe,
FnAbi::RustCall,
),
));
}
}
unreachable!("It should at least implement FnOnce at this point");
} else {
None
}
} }
struct PlaceholderCollector<'db> { struct PlaceholderCollector<'db> {

View file

@ -1,5 +1,6 @@
//! Trait solving using Chalk. //! Trait solving using Chalk.
use core::fmt;
use std::env::var; use std::env::var;
use chalk_ir::{fold::TypeFoldable, DebruijnIndex, GoalData}; use chalk_ir::{fold::TypeFoldable, DebruijnIndex, GoalData};
@ -209,7 +210,25 @@ pub enum FnTrait {
Fn, Fn,
} }
impl fmt::Display for FnTrait {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
FnTrait::FnOnce => write!(f, "FnOnce"),
FnTrait::FnMut => write!(f, "FnMut"),
FnTrait::Fn => write!(f, "Fn"),
}
}
}
impl FnTrait { impl FnTrait {
pub const fn function_name(&self) -> &'static str {
match self {
FnTrait::FnOnce => "call_once",
FnTrait::FnMut => "call_mut",
FnTrait::Fn => "call",
}
}
const fn lang_item(self) -> LangItem { const fn lang_item(self) -> LangItem {
match self { match self {
FnTrait::FnOnce => LangItem::FnOnce, FnTrait::FnOnce => LangItem::FnOnce,

View file

@ -35,7 +35,7 @@ pub mod term_search;
mod display; mod display;
use std::{iter, mem::discriminant, ops::ControlFlow}; use std::{mem::discriminant, ops::ControlFlow};
use arrayvec::ArrayVec; use arrayvec::ArrayVec;
use base_db::{CrateDisplayName, CrateId, CrateOrigin, FileId}; use base_db::{CrateDisplayName, CrateId, CrateOrigin, FileId};
@ -52,7 +52,6 @@ use hir_def::{
path::ImportAlias, path::ImportAlias,
per_ns::PerNs, per_ns::PerNs,
resolver::{HasResolver, Resolver}, resolver::{HasResolver, Resolver},
src::HasSource as _,
AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId,
EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, HasModule, EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, HasModule,
ImplId, InTypeConstId, ItemContainerId, LifetimeParamId, LocalFieldId, Lookup, MacroExpander, ImplId, InTypeConstId, ItemContainerId, LifetimeParamId, LocalFieldId, Lookup, MacroExpander,
@ -69,7 +68,7 @@ use hir_ty::{
diagnostics::BodyValidationDiagnostic, diagnostics::BodyValidationDiagnostic,
error_lifetime, known_const_to_ast, error_lifetime, known_const_to_ast,
layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding}, layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding},
method_resolution::{self, TyFingerprint}, method_resolution::{self},
mir::{interpret_mir, MutBorrowKind}, mir::{interpret_mir, MutBorrowKind},
primitive::UintTy, primitive::UintTy,
traits::FnTrait, traits::FnTrait,
@ -100,6 +99,7 @@ pub use crate::{
VisibleTraits, VisibleTraits,
}, },
}; };
pub use hir_ty::method_resolution::TyFingerprint;
// Be careful with these re-exports. // Be careful with these re-exports.
// //
@ -123,6 +123,7 @@ pub use {
per_ns::Namespace, per_ns::Namespace,
type_ref::{Mutability, TypeRef}, type_ref::{Mutability, TypeRef},
visibility::Visibility, visibility::Visibility,
ImportPathConfig,
// FIXME: This is here since some queries take it as input that are used // FIXME: This is here since some queries take it as input that are used
// outside of hir. // outside of hir.
{AdtId, MacroId, ModuleDefId}, {AdtId, MacroId, ModuleDefId},
@ -141,7 +142,7 @@ pub use {
display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite}, display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite},
layout::LayoutError, layout::LayoutError,
mir::{MirEvalError, MirLowerError}, mir::{MirEvalError, MirLowerError},
PointerCast, Safety, FnAbi, PointerCast, Safety,
}, },
// FIXME: Properly encapsulate mir // FIXME: Properly encapsulate mir
hir_ty::{mir, Interner as ChalkTyInterner}, hir_ty::{mir, Interner as ChalkTyInterner},
@ -789,40 +790,32 @@ impl Module {
/// Finds a path that can be used to refer to the given item from within /// Finds a path that can be used to refer to the given item from within
/// this module, if possible. /// this module, if possible.
pub fn find_use_path( pub fn find_path(
self, self,
db: &dyn DefDatabase, db: &dyn DefDatabase,
item: impl Into<ItemInNs>, item: impl Into<ItemInNs>,
prefer_no_std: bool, cfg: ImportPathConfig,
prefer_prelude: bool,
) -> Option<ModPath> { ) -> Option<ModPath> {
hir_def::find_path::find_path( hir_def::find_path::find_path(
db, db,
item.into().into(), item.into().into(),
self.into(), self.into(),
prefer_no_std, PrefixKind::Plain,
prefer_prelude, false,
cfg,
) )
} }
/// Finds a path that can be used to refer to the given item from within /// Finds a path that can be used to refer to the given item from within
/// this module, if possible. This is used for returning import paths for use-statements. /// this module, if possible. This is used for returning import paths for use-statements.
pub fn find_use_path_prefixed( pub fn find_use_path(
self, self,
db: &dyn DefDatabase, db: &dyn DefDatabase,
item: impl Into<ItemInNs>, item: impl Into<ItemInNs>,
prefix_kind: PrefixKind, prefix_kind: PrefixKind,
prefer_no_std: bool, cfg: ImportPathConfig,
prefer_prelude: bool,
) -> Option<ModPath> { ) -> Option<ModPath> {
hir_def::find_path::find_path_prefixed( hir_def::find_path::find_path(db, item.into().into(), self.into(), prefix_kind, true, cfg)
db,
item.into().into(),
self.into(),
prefix_kind,
prefer_no_std,
prefer_prelude,
)
} }
} }
@ -1965,7 +1958,7 @@ impl Function {
.enumerate() .enumerate()
.map(|(idx, ty)| { .map(|(idx, ty)| {
let ty = Type { env: environment.clone(), ty: ty.clone() }; let ty = Type { env: environment.clone(), ty: ty.clone() };
Param { func: self, ty, idx } Param { func: Callee::Def(CallableDefId::FunctionId(self.id)), ty, idx }
}) })
.collect() .collect()
} }
@ -1991,7 +1984,7 @@ impl Function {
.skip(skip) .skip(skip)
.map(|(idx, ty)| { .map(|(idx, ty)| {
let ty = Type { env: environment.clone(), ty: ty.clone() }; let ty = Type { env: environment.clone(), ty: ty.clone() };
Param { func: self, ty, idx } Param { func: Callee::Def(CallableDefId::FunctionId(self.id)), ty, idx }
}) })
.collect() .collect()
} }
@ -2037,7 +2030,7 @@ impl Function {
.skip(skip) .skip(skip)
.map(|(idx, ty)| { .map(|(idx, ty)| {
let ty = Type { env: environment.clone(), ty: ty.clone() }; let ty = Type { env: environment.clone(), ty: ty.clone() };
Param { func: self, ty, idx } Param { func: Callee::Def(CallableDefId::FunctionId(self.id)), ty, idx }
}) })
.collect() .collect()
} }
@ -2167,17 +2160,24 @@ impl From<hir_ty::Mutability> for Access {
#[derive(Clone, PartialEq, Eq, Hash, Debug)] #[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct Param { pub struct Param {
func: Function, func: Callee,
/// The index in parameter list, including self parameter. /// The index in parameter list, including self parameter.
idx: usize, idx: usize,
ty: Type, ty: Type,
} }
impl Param { impl Param {
pub fn parent_fn(&self) -> Function { pub fn parent_fn(&self) -> Option<Function> {
self.func match self.func {
Callee::Def(CallableDefId::FunctionId(f)) => Some(f.into()),
_ => None,
}
} }
// pub fn parent_closure(&self) -> Option<Closure> {
// self.func.as_ref().right().cloned()
// }
pub fn index(&self) -> usize { pub fn index(&self) -> usize {
self.idx self.idx
} }
@ -2191,7 +2191,11 @@ impl Param {
} }
pub fn as_local(&self, db: &dyn HirDatabase) -> Option<Local> { pub fn as_local(&self, db: &dyn HirDatabase) -> Option<Local> {
let parent = DefWithBodyId::FunctionId(self.func.into()); let parent = match self.func {
Callee::Def(CallableDefId::FunctionId(it)) => DefWithBodyId::FunctionId(it),
Callee::Closure(closure, _) => db.lookup_intern_closure(closure.into()).0,
_ => return None,
};
let body = db.body(parent); let body = db.body(parent);
if let Some(self_param) = body.self_param.filter(|_| self.idx == 0) { if let Some(self_param) = body.self_param.filter(|_| self.idx == 0) {
Some(Local { parent, binding_id: self_param }) Some(Local { parent, binding_id: self_param })
@ -2205,18 +2209,45 @@ impl Param {
} }
pub fn pattern_source(&self, db: &dyn HirDatabase) -> Option<ast::Pat> { pub fn pattern_source(&self, db: &dyn HirDatabase) -> Option<ast::Pat> {
self.source(db).and_then(|p| p.value.pat()) self.source(db).and_then(|p| p.value.right()?.pat())
} }
pub fn source(&self, db: &dyn HirDatabase) -> Option<InFile<ast::Param>> { pub fn source(
let InFile { file_id, value } = self.func.source(db)?; &self,
let params = value.param_list()?; db: &dyn HirDatabase,
if params.self_param().is_some() { ) -> Option<InFile<Either<ast::SelfParam, ast::Param>>> {
params.params().nth(self.idx.checked_sub(params.self_param().is_some() as usize)?) match self.func {
} else { Callee::Def(CallableDefId::FunctionId(func)) => {
params.params().nth(self.idx) let InFile { file_id, value } = Function { id: func }.source(db)?;
let params = value.param_list()?;
if let Some(self_param) = params.self_param() {
if let Some(idx) = self.idx.checked_sub(1) {
params.params().nth(idx).map(Either::Right)
} else {
Some(Either::Left(self_param))
}
} else {
params.params().nth(self.idx).map(Either::Right)
}
.map(|value| InFile { file_id, value })
}
Callee::Closure(closure, _) => {
let InternedClosure(owner, expr_id) = db.lookup_intern_closure(closure.into());
let (_, source_map) = db.body_with_source_map(owner);
let ast @ InFile { file_id, value } = source_map.expr_syntax(expr_id).ok()?;
let root = db.parse_or_expand(file_id);
match value.to_node(&root) {
ast::Expr::ClosureExpr(it) => it
.param_list()?
.params()
.nth(self.idx)
.map(Either::Right)
.map(|value| InFile { file_id: ast.file_id, value }),
_ => None,
}
}
_ => None,
} }
.map(|value| InFile { file_id, value })
} }
} }
@ -3372,34 +3403,21 @@ impl BuiltinAttr {
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct ToolModule { pub struct ToolModule {
krate: Option<CrateId>, krate: CrateId,
idx: u32, idx: u32,
} }
impl ToolModule { impl ToolModule {
// FIXME: consider crates\hir_def\src\nameres\attr_resolution.rs?
pub(crate) fn by_name(db: &dyn HirDatabase, krate: Crate, name: &str) -> Option<Self> { pub(crate) fn by_name(db: &dyn HirDatabase, krate: Crate, name: &str) -> Option<Self> {
if let builtin @ Some(_) = Self::builtin(name) { let krate = krate.id;
return builtin;
}
let idx = let idx =
db.crate_def_map(krate.id).registered_tools().iter().position(|it| it == name)? as u32; db.crate_def_map(krate).registered_tools().iter().position(|it| it == name)? as u32;
Some(ToolModule { krate: Some(krate.id), idx }) Some(ToolModule { krate, idx })
}
fn builtin(name: &str) -> Option<Self> {
hir_def::attr::builtin::TOOL_MODULES
.iter()
.position(|&tool| tool == name)
.map(|idx| ToolModule { krate: None, idx: idx as u32 })
} }
pub fn name(&self, db: &dyn HirDatabase) -> SmolStr { pub fn name(&self, db: &dyn HirDatabase) -> SmolStr {
// FIXME: Return a `Name` here // FIXME: Return a `Name` here
match self.krate { db.crate_def_map(self.krate).registered_tools()[self.idx as usize].clone()
Some(krate) => db.crate_def_map(krate).registered_tools()[self.idx as usize].clone(),
None => SmolStr::new(hir_def::attr::builtin::TOOL_MODULES[self.idx as usize]),
}
} }
} }
@ -4292,27 +4310,37 @@ impl Type {
} }
pub fn as_callable(&self, db: &dyn HirDatabase) -> Option<Callable> { pub fn as_callable(&self, db: &dyn HirDatabase) -> Option<Callable> {
let mut the_ty = &self.ty;
let callee = match self.ty.kind(Interner) { let callee = match self.ty.kind(Interner) {
TyKind::Ref(_, _, ty) if ty.as_closure().is_some() => { TyKind::Closure(id, subst) => Callee::Closure(*id, subst.clone()),
the_ty = ty;
Callee::Closure(ty.as_closure().unwrap())
}
TyKind::Closure(id, _) => Callee::Closure(*id),
TyKind::Function(_) => Callee::FnPtr, TyKind::Function(_) => Callee::FnPtr,
TyKind::FnDef(..) => Callee::Def(self.ty.callable_def(db)?), TyKind::FnDef(..) => Callee::Def(self.ty.callable_def(db)?),
_ => { kind => {
let sig = hir_ty::callable_sig_from_fnonce(&self.ty, self.env.clone(), db)?; // This will happen when it implements fn or fn mut, since we add an autoborrow adjustment
let (ty, kind) = if let TyKind::Ref(_, _, ty) = kind {
(ty, ty.kind(Interner))
} else {
(&self.ty, kind)
};
if let TyKind::Closure(closure, subst) = kind {
let sig = ty.callable_sig(db)?;
return Some(Callable {
ty: self.clone(),
sig,
callee: Callee::Closure(*closure, subst.clone()),
is_bound_method: false,
});
}
let (fn_trait, sig) = hir_ty::callable_sig_from_fn_trait(ty, self.env.clone(), db)?;
return Some(Callable { return Some(Callable {
ty: self.clone(), ty: self.clone(),
sig, sig,
callee: Callee::Other, callee: Callee::FnImpl(fn_trait),
is_bound_method: false, is_bound_method: false,
}); });
} }
}; };
let sig = the_ty.callable_sig(db)?; let sig = self.ty.callable_sig(db)?;
Some(Callable { ty: self.clone(), sig, callee, is_bound_method: false }) Some(Callable { ty: self.clone(), sig, callee, is_bound_method: false })
} }
@ -4929,37 +4957,39 @@ pub struct Callable {
sig: CallableSig, sig: CallableSig,
callee: Callee, callee: Callee,
/// Whether this is a method that was called with method call syntax. /// Whether this is a method that was called with method call syntax.
pub(crate) is_bound_method: bool, is_bound_method: bool,
} }
#[derive(Debug)] #[derive(Clone, PartialEq, Eq, Hash, Debug)]
enum Callee { enum Callee {
Def(CallableDefId), Def(CallableDefId),
Closure(ClosureId), Closure(ClosureId, Substitution),
FnPtr, FnPtr,
Other, FnImpl(FnTrait),
} }
pub enum CallableKind { pub enum CallableKind {
Function(Function), Function(Function),
TupleStruct(Struct), TupleStruct(Struct),
TupleEnumVariant(Variant), TupleEnumVariant(Variant),
Closure, Closure(Closure),
FnPtr, FnPtr,
/// Some other type that implements `FnOnce`. FnImpl(FnTrait),
Other,
} }
impl Callable { impl Callable {
pub fn kind(&self) -> CallableKind { pub fn kind(&self) -> CallableKind {
use Callee::*;
match self.callee { match self.callee {
Def(CallableDefId::FunctionId(it)) => CallableKind::Function(it.into()), Callee::Def(CallableDefId::FunctionId(it)) => CallableKind::Function(it.into()),
Def(CallableDefId::StructId(it)) => CallableKind::TupleStruct(it.into()), Callee::Def(CallableDefId::StructId(it)) => CallableKind::TupleStruct(it.into()),
Def(CallableDefId::EnumVariantId(it)) => CallableKind::TupleEnumVariant(it.into()), Callee::Def(CallableDefId::EnumVariantId(it)) => {
Closure(_) => CallableKind::Closure, CallableKind::TupleEnumVariant(it.into())
FnPtr => CallableKind::FnPtr, }
Other => CallableKind::Other, Callee::Closure(id, ref subst) => {
CallableKind::Closure(Closure { id, subst: subst.clone() })
}
Callee::FnPtr => CallableKind::FnPtr,
Callee::FnImpl(fn_) => CallableKind::FnImpl(fn_),
} }
} }
pub fn receiver_param(&self, db: &dyn HirDatabase) -> Option<(SelfParam, Type)> { pub fn receiver_param(&self, db: &dyn HirDatabase) -> Option<(SelfParam, Type)> {
@ -4973,43 +5003,15 @@ impl Callable {
pub fn n_params(&self) -> usize { pub fn n_params(&self) -> usize {
self.sig.params().len() - if self.is_bound_method { 1 } else { 0 } self.sig.params().len() - if self.is_bound_method { 1 } else { 0 }
} }
pub fn params( pub fn params(&self) -> Vec<Param> {
&self, self.sig
db: &dyn HirDatabase,
) -> Vec<(Option<Either<ast::SelfParam, ast::Pat>>, Type)> {
let types = self
.sig
.params() .params()
.iter() .iter()
.enumerate()
.skip(if self.is_bound_method { 1 } else { 0 }) .skip(if self.is_bound_method { 1 } else { 0 })
.map(|ty| self.ty.derived(ty.clone())); .map(|(idx, ty)| (idx, self.ty.derived(ty.clone())))
let map_param = |it: ast::Param| it.pat().map(Either::Right); .map(|(idx, ty)| Param { func: self.callee.clone(), idx, ty })
let patterns = match self.callee { .collect()
Callee::Def(CallableDefId::FunctionId(func)) => {
let src = func.lookup(db.upcast()).source(db.upcast());
src.value.param_list().map(|param_list| {
param_list
.self_param()
.map(|it| Some(Either::Left(it)))
.filter(|_| !self.is_bound_method)
.into_iter()
.chain(param_list.params().map(map_param))
})
}
Callee::Closure(closure_id) => match closure_source(db, closure_id) {
Some(src) => src.param_list().map(|param_list| {
param_list
.self_param()
.map(|it| Some(Either::Left(it)))
.filter(|_| !self.is_bound_method)
.into_iter()
.chain(param_list.params().map(map_param))
}),
None => None,
},
_ => None,
};
patterns.into_iter().flatten().chain(iter::repeat(None)).zip(types).collect()
} }
pub fn return_type(&self) -> Type { pub fn return_type(&self) -> Type {
self.ty.derived(self.sig.ret().clone()) self.ty.derived(self.sig.ret().clone())
@ -5017,17 +5019,9 @@ impl Callable {
pub fn sig(&self) -> &CallableSig { pub fn sig(&self) -> &CallableSig {
&self.sig &self.sig
} }
}
fn closure_source(db: &dyn HirDatabase, closure: ClosureId) -> Option<ast::ClosureExpr> { pub fn ty(&self) -> &Type {
let InternedClosure(owner, expr_id) = db.lookup_intern_closure(closure.into()); &self.ty
let (_, source_map) = db.body_with_source_map(owner);
let ast = source_map.expr_syntax(expr_id).ok()?;
let root = ast.file_syntax(db.upcast());
let expr = ast.value.to_node(&root);
match expr {
ast::Expr::ClosureExpr(it) => Some(it),
_ => None,
} }
} }

View file

@ -307,7 +307,8 @@ impl SourceAnalyzer {
db: &dyn HirDatabase, db: &dyn HirDatabase,
call: &ast::Expr, call: &ast::Expr,
) -> Option<Callable> { ) -> Option<Callable> {
self.type_of_expr(db, &call.clone())?.0.as_callable(db) let (orig, adjusted) = self.type_of_expr(db, &call.clone())?;
adjusted.unwrap_or(orig).as_callable(db)
} }
pub(crate) fn resolve_field( pub(crate) fn resolve_field(

View file

@ -1,6 +1,6 @@
//! Type tree for term search //! Type tree for term search
use hir_def::find_path::PrefixKind; use hir_def::ImportPathConfig;
use hir_expand::mod_path::ModPath; use hir_expand::mod_path::ModPath;
use hir_ty::{ use hir_ty::{
db::HirDatabase, db::HirDatabase,
@ -17,42 +17,20 @@ use crate::{
fn mod_item_path( fn mod_item_path(
sema_scope: &SemanticsScope<'_>, sema_scope: &SemanticsScope<'_>,
def: &ModuleDef, def: &ModuleDef,
prefer_no_std: bool, cfg: ImportPathConfig,
prefer_prelude: bool,
) -> Option<ModPath> { ) -> Option<ModPath> {
let db = sema_scope.db; let db = sema_scope.db;
// Account for locals shadowing items from module
let name_hit_count = def.name(db).map(|def_name| {
let mut name_hit_count = 0;
sema_scope.process_all_names(&mut |name, _| {
if name == def_name {
name_hit_count += 1;
}
});
name_hit_count
});
let m = sema_scope.module(); let m = sema_scope.module();
match name_hit_count { m.find_path(db.upcast(), *def, cfg)
Some(0..=1) | None => m.find_use_path(db.upcast(), *def, prefer_no_std, prefer_prelude),
Some(_) => m.find_use_path_prefixed(
db.upcast(),
*def,
PrefixKind::ByCrate,
prefer_no_std,
prefer_prelude,
),
}
} }
/// Helper function to get path to `ModuleDef` as string /// Helper function to get path to `ModuleDef` as string
fn mod_item_path_str( fn mod_item_path_str(
sema_scope: &SemanticsScope<'_>, sema_scope: &SemanticsScope<'_>,
def: &ModuleDef, def: &ModuleDef,
prefer_no_std: bool, cfg: ImportPathConfig,
prefer_prelude: bool,
) -> Result<String, DisplaySourceCodeError> { ) -> Result<String, DisplaySourceCodeError> {
let path = mod_item_path(sema_scope, def, prefer_no_std, prefer_prelude); let path = mod_item_path(sema_scope, def, cfg);
path.map(|it| it.display(sema_scope.db.upcast()).to_string()) path.map(|it| it.display(sema_scope.db.upcast()).to_string())
.ok_or(DisplaySourceCodeError::PathNotFound) .ok_or(DisplaySourceCodeError::PathNotFound)
} }
@ -61,8 +39,7 @@ fn mod_item_path_str(
fn type_path( fn type_path(
sema_scope: &SemanticsScope<'_>, sema_scope: &SemanticsScope<'_>,
ty: &Type, ty: &Type,
prefer_no_std: bool, cfg: ImportPathConfig,
prefer_prelude: bool,
) -> Result<String, DisplaySourceCodeError> { ) -> Result<String, DisplaySourceCodeError> {
let db = sema_scope.db; let db = sema_scope.db;
let m = sema_scope.module(); let m = sema_scope.module();
@ -71,9 +48,7 @@ fn type_path(
Some(adt) => { Some(adt) => {
let ty_name = ty.display_source_code(db, m.id, true)?; let ty_name = ty.display_source_code(db, m.id, true)?;
let mut path = let mut path = mod_item_path(sema_scope, &ModuleDef::Adt(adt), cfg).unwrap();
mod_item_path(sema_scope, &ModuleDef::Adt(adt), prefer_no_std, prefer_prelude)
.unwrap();
path.pop_segment(); path.pop_segment();
let path = path.display(db.upcast()).to_string(); let path = path.display(db.upcast()).to_string();
let res = match path.is_empty() { let res = match path.is_empty() {
@ -158,11 +133,10 @@ impl Expr {
&self, &self,
sema_scope: &SemanticsScope<'_>, sema_scope: &SemanticsScope<'_>,
many_formatter: &mut dyn FnMut(&Type) -> String, many_formatter: &mut dyn FnMut(&Type) -> String,
prefer_no_std: bool, cfg: ImportPathConfig,
prefer_prelude: bool,
) -> Result<String, DisplaySourceCodeError> { ) -> Result<String, DisplaySourceCodeError> {
let db = sema_scope.db; let db = sema_scope.db;
let mod_item_path_str = |s, def| mod_item_path_str(s, def, prefer_no_std, prefer_prelude); let mod_item_path_str = |s, def| mod_item_path_str(s, def, cfg);
match self { match self {
Expr::Const(it) => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)), Expr::Const(it) => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)),
Expr::Static(it) => mod_item_path_str(sema_scope, &ModuleDef::Static(*it)), Expr::Static(it) => mod_item_path_str(sema_scope, &ModuleDef::Static(*it)),
@ -172,9 +146,7 @@ impl Expr {
Expr::Function { func, params, .. } => { Expr::Function { func, params, .. } => {
let args = params let args = params
.iter() .iter()
.map(|f| { .map(|f| f.gen_source_code(sema_scope, many_formatter, cfg))
f.gen_source_code(sema_scope, many_formatter, prefer_no_std, prefer_prelude)
})
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()? .collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter() .into_iter()
.join(", "); .join(", ");
@ -188,14 +160,10 @@ impl Expr {
crate::AssocItemContainer::Impl(imp) => { crate::AssocItemContainer::Impl(imp) => {
let self_ty = imp.self_ty(db); let self_ty = imp.self_ty(db);
// Should it be guaranteed that `mod_item_path` always exists? // Should it be guaranteed that `mod_item_path` always exists?
match self_ty.as_adt().and_then(|adt| { match self_ty
mod_item_path( .as_adt()
sema_scope, .and_then(|adt| mod_item_path(sema_scope, &adt.into(), cfg))
&adt.into(), {
prefer_no_std,
prefer_prelude,
)
}) {
Some(path) => path.display(sema_scope.db.upcast()).to_string(), Some(path) => path.display(sema_scope.db.upcast()).to_string(),
None => self_ty.display(db).to_string(), None => self_ty.display(db).to_string(),
} }
@ -217,17 +185,10 @@ impl Expr {
let func_name = func.name(db).display(db.upcast()).to_string(); let func_name = func.name(db).display(db.upcast()).to_string();
let self_param = func.self_param(db).unwrap(); let self_param = func.self_param(db).unwrap();
let target_str = target.gen_source_code( let target_str = target.gen_source_code(sema_scope, many_formatter, cfg)?;
sema_scope,
many_formatter,
prefer_no_std,
prefer_prelude,
)?;
let args = params let args = params
.iter() .iter()
.map(|f| { .map(|f| f.gen_source_code(sema_scope, many_formatter, cfg))
f.gen_source_code(sema_scope, many_formatter, prefer_no_std, prefer_prelude)
})
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()? .collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter() .into_iter()
.join(", "); .join(", ");
@ -259,7 +220,7 @@ impl Expr {
false => { false => {
let generics = generics let generics = generics
.iter() .iter()
.map(|it| type_path(sema_scope, it, prefer_no_std, prefer_prelude)) .map(|it| type_path(sema_scope, it, cfg))
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()? .collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter() .into_iter()
.join(", "); .join(", ");
@ -270,14 +231,7 @@ impl Expr {
StructKind::Tuple => { StructKind::Tuple => {
let args = params let args = params
.iter() .iter()
.map(|f| { .map(|f| f.gen_source_code(sema_scope, many_formatter, cfg))
f.gen_source_code(
sema_scope,
many_formatter,
prefer_no_std,
prefer_prelude,
)
})
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()? .collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter() .into_iter()
.join(", "); .join(", ");
@ -292,12 +246,7 @@ impl Expr {
let tmp = format!( let tmp = format!(
"{}: {}", "{}: {}",
f.name(db).display(db.upcast()), f.name(db).display(db.upcast()),
a.gen_source_code( a.gen_source_code(sema_scope, many_formatter, cfg)?
sema_scope,
many_formatter,
prefer_no_std,
prefer_prelude
)?
); );
Ok(tmp) Ok(tmp)
}) })
@ -318,14 +267,7 @@ impl Expr {
StructKind::Tuple => { StructKind::Tuple => {
let args = params let args = params
.iter() .iter()
.map(|a| { .map(|a| a.gen_source_code(sema_scope, many_formatter, cfg))
a.gen_source_code(
sema_scope,
many_formatter,
prefer_no_std,
prefer_prelude,
)
})
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()? .collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter() .into_iter()
.join(", "); .join(", ");
@ -340,12 +282,7 @@ impl Expr {
let tmp = format!( let tmp = format!(
"{}: {}", "{}: {}",
f.name(db).display(db.upcast()), f.name(db).display(db.upcast()),
a.gen_source_code( a.gen_source_code(sema_scope, many_formatter, cfg)?
sema_scope,
many_formatter,
prefer_no_std,
prefer_prelude
)?
); );
Ok(tmp) Ok(tmp)
}) })
@ -359,7 +296,7 @@ impl Expr {
false => { false => {
let generics = generics let generics = generics
.iter() .iter()
.map(|it| type_path(sema_scope, it, prefer_no_std, prefer_prelude)) .map(|it| type_path(sema_scope, it, cfg))
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()? .collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter() .into_iter()
.join(", "); .join(", ");
@ -374,9 +311,7 @@ impl Expr {
Expr::Tuple { params, .. } => { Expr::Tuple { params, .. } => {
let args = params let args = params
.iter() .iter()
.map(|a| { .map(|a| a.gen_source_code(sema_scope, many_formatter, cfg))
a.gen_source_code(sema_scope, many_formatter, prefer_no_std, prefer_prelude)
})
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()? .collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter() .into_iter()
.join(", "); .join(", ");
@ -388,12 +323,7 @@ impl Expr {
return Ok(many_formatter(&expr.ty(db))); return Ok(many_formatter(&expr.ty(db)));
} }
let strukt = expr.gen_source_code( let strukt = expr.gen_source_code(sema_scope, many_formatter, cfg)?;
sema_scope,
many_formatter,
prefer_no_std,
prefer_prelude,
)?;
let field = field.name(db).display(db.upcast()).to_string(); let field = field.name(db).display(db.upcast()).to_string();
Ok(format!("{strukt}.{field}")) Ok(format!("{strukt}.{field}"))
} }
@ -402,12 +332,7 @@ impl Expr {
return Ok(many_formatter(&expr.ty(db))); return Ok(many_formatter(&expr.ty(db)));
} }
let inner = expr.gen_source_code( let inner = expr.gen_source_code(sema_scope, many_formatter, cfg)?;
sema_scope,
many_formatter,
prefer_no_std,
prefer_prelude,
)?;
Ok(format!("&{inner}")) Ok(format!("&{inner}"))
} }
Expr::Many(ty) => Ok(many_formatter(ty)), Expr::Many(ty) => Ok(many_formatter(ty)),

View file

@ -1,7 +1,7 @@
use std::iter::{self, Peekable}; use std::iter::{self, Peekable};
use either::Either; use either::Either;
use hir::{Adt, Crate, HasAttrs, HasSource, ModuleDef, Semantics}; use hir::{Adt, Crate, HasAttrs, HasSource, ImportPathConfig, ModuleDef, Semantics};
use ide_db::RootDatabase; use ide_db::RootDatabase;
use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast}; use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast};
use itertools::Itertools; use itertools::Itertools;
@ -71,6 +71,11 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
.filter(|pat| !matches!(pat, Pat::WildcardPat(_))) .filter(|pat| !matches!(pat, Pat::WildcardPat(_)))
.collect(); .collect();
let cfg = ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
};
let module = ctx.sema.scope(expr.syntax())?.module(); let module = ctx.sema.scope(expr.syntax())?.module();
let (mut missing_pats, is_non_exhaustive, has_hidden_variants): ( let (mut missing_pats, is_non_exhaustive, has_hidden_variants): (
Peekable<Box<dyn Iterator<Item = (ast::Pat, bool)>>>, Peekable<Box<dyn Iterator<Item = (ast::Pat, bool)>>>,
@ -88,13 +93,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
.into_iter() .into_iter()
.filter_map(|variant| { .filter_map(|variant| {
Some(( Some((
build_pat( build_pat(ctx.db(), module, variant, cfg)?,
ctx.db(),
module,
variant,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)?,
variant.should_be_hidden(ctx.db(), module.krate()), variant.should_be_hidden(ctx.db(), module.krate()),
)) ))
}) })
@ -145,15 +144,9 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
let is_hidden = variants let is_hidden = variants
.iter() .iter()
.any(|variant| variant.should_be_hidden(ctx.db(), module.krate())); .any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
let patterns = variants.into_iter().filter_map(|variant| { let patterns = variants
build_pat( .into_iter()
ctx.db(), .filter_map(|variant| build_pat(ctx.db(), module, variant, cfg));
module,
variant,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)
});
(ast::Pat::from(make::tuple_pat(patterns)), is_hidden) (ast::Pat::from(make::tuple_pat(patterns)), is_hidden)
}) })
@ -184,15 +177,9 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
let is_hidden = variants let is_hidden = variants
.iter() .iter()
.any(|variant| variant.should_be_hidden(ctx.db(), module.krate())); .any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
let patterns = variants.into_iter().filter_map(|variant| { let patterns = variants
build_pat( .into_iter()
ctx.db(), .filter_map(|variant| build_pat(ctx.db(), module, variant, cfg));
module,
variant,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)
});
(ast::Pat::from(make::slice_pat(patterns)), is_hidden) (ast::Pat::from(make::slice_pat(patterns)), is_hidden)
}) })
.filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat)); .filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat));
@ -457,18 +444,11 @@ fn build_pat(
db: &RootDatabase, db: &RootDatabase,
module: hir::Module, module: hir::Module,
var: ExtendedVariant, var: ExtendedVariant,
prefer_no_std: bool, cfg: ImportPathConfig,
prefer_prelude: bool,
) -> Option<ast::Pat> { ) -> Option<ast::Pat> {
match var { match var {
ExtendedVariant::Variant(var) => { ExtendedVariant::Variant(var) => {
let path = mod_path_to_ast(&module.find_use_path( let path = mod_path_to_ast(&module.find_path(db, ModuleDef::from(var), cfg)?);
db,
ModuleDef::from(var),
prefer_no_std,
prefer_prelude,
)?);
// FIXME: use HIR for this; it doesn't currently expose struct vs. tuple vs. unit variants though // FIXME: use HIR for this; it doesn't currently expose struct vs. tuple vs. unit variants though
Some(match var.source(db)?.value.kind() { Some(match var.source(db)?.value.kind() {
ast::StructKind::Tuple(field_list) => { ast::StructKind::Tuple(field_list) => {

View file

@ -1,6 +1,6 @@
use std::cmp::Reverse; use std::cmp::Reverse;
use hir::{db::HirDatabase, Module}; use hir::{db::HirDatabase, ImportPathConfig, Module};
use ide_db::{ use ide_db::{
helpers::mod_path_to_ast, helpers::mod_path_to_ast,
imports::{ imports::{
@ -90,14 +90,14 @@ use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel};
// # pub mod std { pub mod collections { pub struct HashMap { } } } // # pub mod std { pub mod collections { pub struct HashMap { } } }
// ``` // ```
pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let cfg = ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
};
let (import_assets, syntax_under_caret) = find_importable_node(ctx)?; let (import_assets, syntax_under_caret) = find_importable_node(ctx)?;
let mut proposed_imports: Vec<_> = import_assets let mut proposed_imports: Vec<_> = import_assets
.search_for_imports( .search_for_imports(&ctx.sema, cfg, ctx.config.insert_use.prefix_kind)
&ctx.sema,
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
ctx.config.prefer_no_std,
)
.collect(); .collect();
if proposed_imports.is_empty() { if proposed_imports.is_empty() {
return None; return None;

View file

@ -1,4 +1,4 @@
use hir::ModuleDef; use hir::{ImportPathConfig, ModuleDef};
use ide_db::{ use ide_db::{
assists::{AssistId, AssistKind}, assists::{AssistId, AssistKind},
defs::Definition, defs::Definition,
@ -326,6 +326,11 @@ fn augment_references_with_imports(
) -> Vec<FileReferenceWithImport> { ) -> Vec<FileReferenceWithImport> {
let mut visited_modules = FxHashSet::default(); let mut visited_modules = FxHashSet::default();
let cfg = ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
};
references references
.into_iter() .into_iter()
.filter_map(|FileReference { range, name, .. }| { .filter_map(|FileReference { range, name, .. }| {
@ -341,12 +346,11 @@ fn augment_references_with_imports(
let import_scope = ImportScope::find_insert_use_container(name.syntax(), &ctx.sema); let import_scope = ImportScope::find_insert_use_container(name.syntax(), &ctx.sema);
let path = ref_module let path = ref_module
.find_use_path_prefixed( .find_use_path(
ctx.sema.db, ctx.sema.db,
ModuleDef::Module(*target_module), ModuleDef::Module(*target_module),
ctx.config.insert_use.prefix_kind, ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std, cfg,
ctx.config.prefer_prelude,
) )
.map(|mod_path| { .map(|mod_path| {
make::path_concat(mod_path_to_ast(&mod_path), make::path_from_text("Bool")) make::path_concat(mod_path_to_ast(&mod_path), make::path_from_text("Bool"))
@ -1521,7 +1525,7 @@ mod foo {
} }
"#, "#,
r#" r#"
use crate::foo::Bool; use foo::Bool;
fn main() { fn main() {
use foo::FOO; use foo::FOO;
@ -1602,7 +1606,7 @@ pub mod bar {
"#, "#,
r#" r#"
//- /main.rs //- /main.rs
use crate::foo::bar::Bool; use foo::bar::Bool;
mod foo; mod foo;

View file

@ -1,3 +1,4 @@
use hir::ImportPathConfig;
use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast, traits::resolve_target_trait}; use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast, traits::resolve_target_trait};
use syntax::ast::{self, AstNode, HasName}; use syntax::ast::{self, AstNode, HasName};
@ -43,19 +44,18 @@ pub(crate) fn convert_into_to_from(acc: &mut Assists, ctx: &AssistContext<'_>) -
return None; return None;
} }
let cfg = ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
};
let src_type_path = { let src_type_path = {
let src_type_path = src_type.syntax().descendants().find_map(ast::Path::cast)?; let src_type_path = src_type.syntax().descendants().find_map(ast::Path::cast)?;
let src_type_def = match ctx.sema.resolve_path(&src_type_path) { let src_type_def = match ctx.sema.resolve_path(&src_type_path) {
Some(hir::PathResolution::Def(module_def)) => module_def, Some(hir::PathResolution::Def(module_def)) => module_def,
_ => return None, _ => return None,
}; };
mod_path_to_ast(&module.find_path(ctx.db(), src_type_def, cfg)?)
mod_path_to_ast(&module.find_use_path(
ctx.db(),
src_type_def,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)?)
}; };
let dest_type = match &ast_trait { let dest_type = match &ast_trait {

View file

@ -1,5 +1,5 @@
use either::Either; use either::Either;
use hir::ModuleDef; use hir::{ImportPathConfig, ModuleDef};
use ide_db::{ use ide_db::{
assists::{AssistId, AssistKind}, assists::{AssistId, AssistKind},
defs::Definition, defs::Definition,
@ -183,6 +183,11 @@ fn augment_references_with_imports(
) -> Vec<(ast::NameLike, Option<(ImportScope, ast::Path)>)> { ) -> Vec<(ast::NameLike, Option<(ImportScope, ast::Path)>)> {
let mut visited_modules = FxHashSet::default(); let mut visited_modules = FxHashSet::default();
let cfg = ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
};
references references
.iter() .iter()
.filter_map(|FileReference { name, .. }| { .filter_map(|FileReference { name, .. }| {
@ -201,12 +206,11 @@ fn augment_references_with_imports(
let import_scope = let import_scope =
ImportScope::find_insert_use_container(new_name.syntax(), &ctx.sema); ImportScope::find_insert_use_container(new_name.syntax(), &ctx.sema);
let path = ref_module let path = ref_module
.find_use_path_prefixed( .find_use_path(
ctx.sema.db, ctx.sema.db,
ModuleDef::Module(*target_module), ModuleDef::Module(*target_module),
ctx.config.insert_use.prefix_kind, ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std, cfg,
ctx.config.prefer_prelude,
) )
.map(|mod_path| { .map(|mod_path| {
make::path_concat( make::path_concat(
@ -811,7 +815,7 @@ pub mod bar {
"#, "#,
r#" r#"
//- /main.rs //- /main.rs
use crate::foo::bar::BarResult; use foo::bar::BarResult;
mod foo; mod foo;

View file

@ -1,4 +1,4 @@
use hir::HasVisibility; use hir::{HasVisibility, ImportPathConfig};
use ide_db::{ use ide_db::{
assists::{AssistId, AssistKind}, assists::{AssistId, AssistKind},
defs::Definition, defs::Definition,
@ -87,15 +87,15 @@ fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option<Str
let ty = ctx.sema.type_of_binding_in_pat(&ident_pat)?; let ty = ctx.sema.type_of_binding_in_pat(&ident_pat)?;
let hir::Adt::Struct(struct_type) = ty.strip_references().as_adt()? else { return None }; let hir::Adt::Struct(struct_type) = ty.strip_references().as_adt()? else { return None };
let cfg = ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
};
let module = ctx.sema.scope(ident_pat.syntax())?.module(); let module = ctx.sema.scope(ident_pat.syntax())?.module();
let struct_def = hir::ModuleDef::from(struct_type); let struct_def = hir::ModuleDef::from(struct_type);
let kind = struct_type.kind(ctx.db()); let kind = struct_type.kind(ctx.db());
let struct_def_path = module.find_use_path( let struct_def_path = module.find_path(ctx.db(), struct_def, cfg)?;
ctx.db(),
struct_def,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)?;
let is_non_exhaustive = struct_def.attrs(ctx.db())?.by_key("non_exhaustive").exists(); let is_non_exhaustive = struct_def.attrs(ctx.db())?.by_key("non_exhaustive").exists();
let is_foreign_crate = let is_foreign_crate =

View file

@ -3,8 +3,8 @@ use std::{iter, ops::RangeInclusive};
use ast::make; use ast::make;
use either::Either; use either::Either;
use hir::{ use hir::{
DescendPreference, HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef, DescendPreference, HasSource, HirDisplay, ImportPathConfig, InFile, Local, LocalSource,
PathResolution, Semantics, TypeInfo, TypeParam, ModuleDef, PathResolution, Semantics, TypeInfo, TypeParam,
}; };
use ide_db::{ use ide_db::{
defs::{Definition, NameRefClass}, defs::{Definition, NameRefClass},
@ -209,12 +209,14 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
FamousDefs(&ctx.sema, module.krate()).core_ops_ControlFlow(); FamousDefs(&ctx.sema, module.krate()).core_ops_ControlFlow();
if let Some(control_flow_enum) = control_flow_enum { if let Some(control_flow_enum) = control_flow_enum {
let mod_path = module.find_use_path_prefixed( let mod_path = module.find_use_path(
ctx.sema.db, ctx.sema.db,
ModuleDef::from(control_flow_enum), ModuleDef::from(control_flow_enum),
ctx.config.insert_use.prefix_kind, ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std, ImportPathConfig {
ctx.config.prefer_prelude, prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
); );
if let Some(mod_path) = mod_path { if let Some(mod_path) = mod_path {

View file

@ -1,7 +1,7 @@
use std::iter; use std::iter;
use either::Either; use either::Either;
use hir::{Module, ModuleDef, Name, Variant}; use hir::{ImportPathConfig, Module, ModuleDef, Name, Variant};
use ide_db::{ use ide_db::{
defs::Definition, defs::Definition,
helpers::mod_path_to_ast, helpers::mod_path_to_ast,
@ -386,12 +386,14 @@ fn process_references(
let segment = builder.make_mut(segment); let segment = builder.make_mut(segment);
let scope_node = builder.make_syntax_mut(scope_node); let scope_node = builder.make_syntax_mut(scope_node);
if !visited_modules.contains(&module) { if !visited_modules.contains(&module) {
let mod_path = module.find_use_path_prefixed( let mod_path = module.find_use_path(
ctx.sema.db, ctx.sema.db,
*enum_module_def, *enum_module_def,
ctx.config.insert_use.prefix_kind, ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std, ImportPathConfig {
ctx.config.prefer_prelude, prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
); );
if let Some(mut mod_path) = mod_path { if let Some(mut mod_path) = mod_path {
mod_path.pop_segment(); mod_path.pop_segment();
@ -881,7 +883,7 @@ fn another_fn() {
r#"use my_mod::my_other_mod::MyField; r#"use my_mod::my_other_mod::MyField;
mod my_mod { mod my_mod {
use self::my_other_mod::MyField; use my_other_mod::MyField;
fn another_fn() { fn another_fn() {
let m = my_other_mod::MyEnum::MyField(MyField(1, 1)); let m = my_other_mod::MyEnum::MyField(MyField(1, 1));

View file

@ -1,6 +1,6 @@
use std::fmt::Display; use std::fmt::Display;
use hir::{ModPath, ModuleDef}; use hir::{ImportPathConfig, ModPath, ModuleDef};
use ide_db::{famous_defs::FamousDefs, RootDatabase}; use ide_db::{famous_defs::FamousDefs, RootDatabase};
use syntax::{ use syntax::{
ast::{self, HasName}, ast::{self, HasName},
@ -58,11 +58,13 @@ fn generate_record_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
let module = ctx.sema.to_def(&strukt)?.module(ctx.db()); let module = ctx.sema.to_def(&strukt)?.module(ctx.db());
let trait_ = deref_type_to_generate.to_trait(&ctx.sema, module.krate())?; let trait_ = deref_type_to_generate.to_trait(&ctx.sema, module.krate())?;
let trait_path = module.find_use_path( let trait_path = module.find_path(
ctx.db(), ctx.db(),
ModuleDef::Trait(trait_), ModuleDef::Trait(trait_),
ctx.config.prefer_no_std, ImportPathConfig {
ctx.config.prefer_prelude, prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
)?; )?;
let field_type = field.ty()?; let field_type = field.ty()?;
@ -103,11 +105,13 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()
let module = ctx.sema.to_def(&strukt)?.module(ctx.db()); let module = ctx.sema.to_def(&strukt)?.module(ctx.db());
let trait_ = deref_type_to_generate.to_trait(&ctx.sema, module.krate())?; let trait_ = deref_type_to_generate.to_trait(&ctx.sema, module.krate())?;
let trait_path = module.find_use_path( let trait_path = module.find_path(
ctx.db(), ctx.db(),
ModuleDef::Trait(trait_), ModuleDef::Trait(trait_),
ctx.config.prefer_no_std, ImportPathConfig {
ctx.config.prefer_prelude, prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
)?; )?;
let field_type = field.ty()?; let field_type = field.ty()?;

View file

@ -1,3 +1,4 @@
use hir::ImportPathConfig;
use ide_db::{ use ide_db::{
imports::import_assets::item_for_path_search, use_trivial_constructor::use_trivial_constructor, imports::import_assets::item_for_path_search, use_trivial_constructor::use_trivial_constructor,
}; };
@ -58,11 +59,13 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
let item_in_ns = hir::ItemInNs::from(hir::ModuleDef::from(ty.as_adt()?)); let item_in_ns = hir::ItemInNs::from(hir::ModuleDef::from(ty.as_adt()?));
let type_path = current_module.find_use_path( let type_path = current_module.find_path(
ctx.sema.db, ctx.sema.db,
item_for_path_search(ctx.sema.db, item_in_ns)?, item_for_path_search(ctx.sema.db, item_in_ns)?,
ctx.config.prefer_no_std, ImportPathConfig {
ctx.config.prefer_prelude, prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
)?; )?;
let expr = use_trivial_constructor( let expr = use_trivial_constructor(

View file

@ -1,4 +1,7 @@
use hir::{db::HirDatabase, AsAssocItem, AssocItem, AssocItemContainer, ItemInNs, ModuleDef}; use hir::{
db::HirDatabase, AsAssocItem, AssocItem, AssocItemContainer, ImportPathConfig, ItemInNs,
ModuleDef,
};
use ide_db::assists::{AssistId, AssistKind}; use ide_db::assists::{AssistId, AssistKind};
use syntax::{ast, AstNode}; use syntax::{ast, AstNode};
@ -44,11 +47,13 @@ pub(crate) fn qualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let current_module = ctx.sema.scope(call.syntax())?.module(); let current_module = ctx.sema.scope(call.syntax())?.module();
let target_module_def = ModuleDef::from(resolved_call); let target_module_def = ModuleDef::from(resolved_call);
let item_in_ns = ItemInNs::from(target_module_def); let item_in_ns = ItemInNs::from(target_module_def);
let receiver_path = current_module.find_use_path( let receiver_path = current_module.find_path(
ctx.sema.db, ctx.sema.db,
item_for_path_search(ctx.sema.db, item_in_ns)?, item_for_path_search(ctx.sema.db, item_in_ns)?,
ctx.config.prefer_no_std, ImportPathConfig {
ctx.config.prefer_prelude, prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
)?; )?;
let qualify_candidate = QualifyCandidate::ImplMethod(ctx.sema.db, call, resolved_call); let qualify_candidate = QualifyCandidate::ImplMethod(ctx.sema.db, call, resolved_call);

View file

@ -1,6 +1,6 @@
use std::iter; use std::iter;
use hir::AsAssocItem; use hir::{AsAssocItem, ImportPathConfig};
use ide_db::RootDatabase; use ide_db::RootDatabase;
use ide_db::{ use ide_db::{
helpers::mod_path_to_ast, helpers::mod_path_to_ast,
@ -37,9 +37,13 @@ use crate::{
// ``` // ```
pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let (import_assets, syntax_under_caret) = find_importable_node(ctx)?; let (import_assets, syntax_under_caret) = find_importable_node(ctx)?;
let mut proposed_imports: Vec<_> = import_assets let cfg = ImportPathConfig {
.search_for_relative_paths(&ctx.sema, ctx.config.prefer_no_std, ctx.config.prefer_prelude) prefer_no_std: ctx.config.prefer_no_std,
.collect(); prefer_prelude: ctx.config.prefer_prelude,
};
let mut proposed_imports: Vec<_> =
import_assets.search_for_relative_paths(&ctx.sema, cfg).collect();
if proposed_imports.is_empty() { if proposed_imports.is_empty() {
return None; return None;
} }

View file

@ -776,6 +776,40 @@ mod z {
); );
} }
#[test]
fn remove_unused_fixes_nested_self() {
check_assist(
remove_unused_imports,
r#"
mod inner {
pub struct X();
pub struct Y();
}
mod z {
use super::inner::{self, X}$0;
fn f() {
let y = inner::Y();
}
}
"#,
r#"mod inner {
pub struct X();
pub struct Y();
}
mod z {
use super::inner::{self};
fn f() {
let y = inner::Y();
}
}
"#,
);
}
#[test] #[test]
fn dont_remove_used_glob() { fn dont_remove_used_glob() {
check_assist_not_applicable( check_assist_not_applicable(

View file

@ -1,4 +1,4 @@
use hir::{InFile, MacroFileIdExt, ModuleDef}; use hir::{ImportPathConfig, InFile, MacroFileIdExt, ModuleDef};
use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator}; use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator};
use itertools::Itertools; use itertools::Itertools;
use syntax::{ use syntax::{
@ -83,11 +83,13 @@ pub(crate) fn replace_derive_with_manual_impl(
}) })
.flat_map(|trait_| { .flat_map(|trait_| {
current_module current_module
.find_use_path( .find_path(
ctx.sema.db, ctx.sema.db,
hir::ModuleDef::Trait(trait_), hir::ModuleDef::Trait(trait_),
ctx.config.prefer_no_std, ImportPathConfig {
ctx.config.prefer_prelude, prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
) )
.as_ref() .as_ref()
.map(mod_path_to_ast) .map(mod_path_to_ast)

View file

@ -114,10 +114,10 @@ pub(crate) fn replace_with_eager_method(acc: &mut Assists, ctx: &AssistContext<'
let callable = ctx.sema.resolve_method_call_as_callable(&call)?; let callable = ctx.sema.resolve_method_call_as_callable(&call)?;
let (_, receiver_ty) = callable.receiver_param(ctx.sema.db)?; let (_, receiver_ty) = callable.receiver_param(ctx.sema.db)?;
let n_params = callable.n_params() + 1; let n_params = callable.n_params() + 1;
let params = callable.params(ctx.sema.db); let params = callable.params();
// FIXME: Check that the arg is of the form `() -> T` // FIXME: Check that the arg is of the form `() -> T`
if !params.first()?.1.impls_fnonce(ctx.sema.db) { if !params.first()?.ty().impls_fnonce(ctx.sema.db) {
return None; return None;
} }

View file

@ -1,4 +1,4 @@
use hir::AsAssocItem; use hir::{AsAssocItem, ImportPathConfig};
use ide_db::{ use ide_db::{
helpers::mod_path_to_ast, helpers::mod_path_to_ast,
imports::insert_use::{insert_use, ImportScope}, imports::insert_use::{insert_use, ImportScope},
@ -63,12 +63,14 @@ pub(crate) fn replace_qualified_name_with_use(
); );
let path_to_qualifier = starts_with_name_ref let path_to_qualifier = starts_with_name_ref
.then(|| { .then(|| {
ctx.sema.scope(path.syntax())?.module().find_use_path_prefixed( ctx.sema.scope(path.syntax())?.module().find_use_path(
ctx.sema.db, ctx.sema.db,
module, module,
ctx.config.insert_use.prefix_kind, ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std, ImportPathConfig {
ctx.config.prefer_prelude, prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
) )
}) })
.flatten(); .flatten();

View file

@ -1,5 +1,8 @@
//! Term search assist //! Term search assist
use hir::term_search::{TermSearchConfig, TermSearchCtx}; use hir::{
term_search::{TermSearchConfig, TermSearchCtx},
ImportPathConfig,
};
use ide_db::{ use ide_db::{
assists::{AssistId, AssistKind, GroupLabel}, assists::{AssistId, AssistKind, GroupLabel},
famous_defs::FamousDefs, famous_defs::FamousDefs,
@ -50,8 +53,10 @@ pub(crate) fn term_search(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
path.gen_source_code( path.gen_source_code(
&scope, &scope,
&mut formatter, &mut formatter,
ctx.config.prefer_no_std, ImportPathConfig {
ctx.config.prefer_prelude, prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
) )
.ok() .ok()
}) })

View file

@ -253,11 +253,8 @@ fn from_param(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> Option<St
}; };
let (idx, _) = arg_list.args().find_position(|it| it == expr).unwrap(); let (idx, _) = arg_list.args().find_position(|it| it == expr).unwrap();
let (pat, _) = func.params(sema.db).into_iter().nth(idx)?; let param = func.params().into_iter().nth(idx)?;
let pat = match pat? { let pat = param.source(sema.db)?.value.right()?.pat()?;
either::Either::Right(pat) => pat,
_ => return None,
};
let name = var_name_from_pat(&pat)?; let name = var_name_from_pat(&pat)?;
normalize(&name.to_string()) normalize(&name.to_string())
} }

View file

@ -24,7 +24,7 @@ pub(crate) mod vis;
use std::iter; use std::iter;
use hir::{known, HasAttrs, ScopeDef, Variant}; use hir::{known, HasAttrs, ImportPathConfig, ScopeDef, Variant};
use ide_db::{imports::import_assets::LocatedImport, RootDatabase, SymbolKind}; use ide_db::{imports::import_assets::LocatedImport, RootDatabase, SymbolKind};
use syntax::{ast, SmolStr}; use syntax::{ast, SmolStr};
@ -633,11 +633,13 @@ fn enum_variants_with_paths(
} }
for variant in variants { for variant in variants {
if let Some(path) = ctx.module.find_use_path( if let Some(path) = ctx.module.find_path(
ctx.db, ctx.db,
hir::ModuleDef::from(variant), hir::ModuleDef::from(variant),
ctx.config.prefer_no_std, ImportPathConfig {
ctx.config.prefer_prelude, prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
) { ) {
// Variants with trivial paths are already added by the existing completion logic, // Variants with trivial paths are already added by the existing completion logic,
// so we should avoid adding these twice // so we should avoid adding these twice

View file

@ -1,6 +1,6 @@
//! Completion of names from the current scope in expression position. //! Completion of names from the current scope in expression position.
use hir::ScopeDef; use hir::{ImportPathConfig, ScopeDef};
use syntax::ast; use syntax::ast;
use crate::{ use crate::{
@ -171,11 +171,13 @@ pub(crate) fn complete_expr_path(
hir::Adt::Struct(strukt) => { hir::Adt::Struct(strukt) => {
let path = ctx let path = ctx
.module .module
.find_use_path( .find_path(
ctx.db, ctx.db,
hir::ModuleDef::from(strukt), hir::ModuleDef::from(strukt),
ctx.config.prefer_no_std, ImportPathConfig {
ctx.config.prefer_prelude, prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
) )
.filter(|it| it.len() > 1); .filter(|it| it.len() > 1);
@ -194,11 +196,13 @@ pub(crate) fn complete_expr_path(
hir::Adt::Union(un) => { hir::Adt::Union(un) => {
let path = ctx let path = ctx
.module .module
.find_use_path( .find_path(
ctx.db, ctx.db,
hir::ModuleDef::from(un), hir::ModuleDef::from(un),
ctx.config.prefer_no_std, ImportPathConfig {
ctx.config.prefer_prelude, prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
) )
.filter(|it| it.len() > 1); .filter(|it| it.len() > 1);

View file

@ -1,5 +1,5 @@
//! See [`import_on_the_fly`]. //! See [`import_on_the_fly`].
use hir::{ItemInNs, ModuleDef}; use hir::{ImportPathConfig, ItemInNs, ModuleDef};
use ide_db::imports::{ use ide_db::imports::{
import_assets::{ImportAssets, LocatedImport}, import_assets::{ImportAssets, LocatedImport},
insert_use::ImportScope, insert_use::ImportScope,
@ -257,13 +257,13 @@ fn import_on_the_fly(
}; };
let user_input_lowercased = potential_import_name.to_lowercase(); let user_input_lowercased = potential_import_name.to_lowercase();
let import_cfg = ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
};
import_assets import_assets
.search_for_imports( .search_for_imports(&ctx.sema, import_cfg, ctx.config.insert_use.prefix_kind)
&ctx.sema,
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)
.filter(ns_filter) .filter(ns_filter)
.filter(|import| { .filter(|import| {
let original_item = &import.original_item; let original_item = &import.original_item;
@ -308,13 +308,13 @@ fn import_on_the_fly_pat_(
}; };
let user_input_lowercased = potential_import_name.to_lowercase(); let user_input_lowercased = potential_import_name.to_lowercase();
let cfg = ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
};
import_assets import_assets
.search_for_imports( .search_for_imports(&ctx.sema, cfg, ctx.config.insert_use.prefix_kind)
&ctx.sema,
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)
.filter(ns_filter) .filter(ns_filter)
.filter(|import| { .filter(|import| {
let original_item = &import.original_item; let original_item = &import.original_item;
@ -355,13 +355,13 @@ fn import_on_the_fly_method(
let user_input_lowercased = potential_import_name.to_lowercase(); let user_input_lowercased = potential_import_name.to_lowercase();
let cfg = ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
};
import_assets import_assets
.search_for_imports( .search_for_imports(&ctx.sema, cfg, ctx.config.insert_use.prefix_kind)
&ctx.sema,
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)
.filter(|import| { .filter(|import| {
!ctx.is_item_hidden(&import.item_to_import) !ctx.is_item_hidden(&import.item_to_import)
&& !ctx.is_item_hidden(&import.original_item) && !ctx.is_item_hidden(&import.original_item)

View file

@ -2,7 +2,7 @@
mod format_like; mod format_like;
use hir::ItemInNs; use hir::{ImportPathConfig, ItemInNs};
use ide_db::{ use ide_db::{
documentation::{Documentation, HasDocs}, documentation::{Documentation, HasDocs},
imports::insert_use::ImportScope, imports::insert_use::ImportScope,
@ -60,15 +60,17 @@ pub(crate) fn complete_postfix(
None => return, None => return,
}; };
let cfg = ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
};
if let Some(drop_trait) = ctx.famous_defs().core_ops_Drop() { if let Some(drop_trait) = ctx.famous_defs().core_ops_Drop() {
if receiver_ty.impls_trait(ctx.db, drop_trait, &[]) { if receiver_ty.impls_trait(ctx.db, drop_trait, &[]) {
if let Some(drop_fn) = ctx.famous_defs().core_mem_drop() { if let Some(drop_fn) = ctx.famous_defs().core_mem_drop() {
if let Some(path) = ctx.module.find_use_path( if let Some(path) =
ctx.db, ctx.module.find_path(ctx.db, ItemInNs::Values(drop_fn.into()), cfg)
ItemInNs::Values(drop_fn.into()), {
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
) {
cov_mark::hit!(postfix_drop_completion); cov_mark::hit!(postfix_drop_completion);
let mut item = postfix_snippet( let mut item = postfix_snippet(
"drop", "drop",

View file

@ -12,6 +12,7 @@ mod snippet;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
use hir::ImportPathConfig;
use ide_db::{ use ide_db::{
base_db::FilePosition, base_db::FilePosition,
helpers::mod_path_to_ast, helpers::mod_path_to_ast,
@ -251,6 +252,11 @@ pub fn resolve_completion_edits(
let new_ast = scope.clone_for_update(); let new_ast = scope.clone_for_update();
let mut import_insert = TextEdit::builder(); let mut import_insert = TextEdit::builder();
let cfg = ImportPathConfig {
prefer_no_std: config.prefer_no_std,
prefer_prelude: config.prefer_prelude,
};
imports.into_iter().for_each(|(full_import_path, imported_name)| { imports.into_iter().for_each(|(full_import_path, imported_name)| {
let items_with_name = items_locator::items_with_name( let items_with_name = items_locator::items_with_name(
&sema, &sema,
@ -260,13 +266,7 @@ pub fn resolve_completion_edits(
); );
let import = items_with_name let import = items_with_name
.filter_map(|candidate| { .filter_map(|candidate| {
current_module.find_use_path_prefixed( current_module.find_use_path(db, candidate, config.insert_use.prefix_kind, cfg)
db,
candidate,
config.insert_use.prefix_kind,
config.prefer_no_std,
config.prefer_prelude,
)
}) })
.find(|mod_path| mod_path.display(db).to_string() == full_import_path); .find(|mod_path| mod_path.display(db).to_string() == full_import_path);
if let Some(import_path) = import { if let Some(import_path) = import {

View file

@ -10,7 +10,7 @@ pub(crate) mod type_alias;
pub(crate) mod union_literal; pub(crate) mod union_literal;
pub(crate) mod variant; pub(crate) mod variant;
use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type}; use hir::{AsAssocItem, HasAttrs, HirDisplay, ImportPathConfig, ModuleDef, ScopeDef, Type};
use ide_db::{ use ide_db::{
documentation::{Documentation, HasDocs}, documentation::{Documentation, HasDocs},
helpers::item_name, helpers::item_name,
@ -295,14 +295,12 @@ pub(crate) fn render_expr(
.unwrap_or_else(|| String::from("...")) .unwrap_or_else(|| String::from("..."))
}; };
let label = expr let cfg = ImportPathConfig {
.gen_source_code( prefer_no_std: ctx.config.prefer_no_std,
&ctx.scope, prefer_prelude: ctx.config.prefer_prelude,
&mut label_formatter, };
ctx.config.prefer_no_std,
ctx.config.prefer_prelude, let label = expr.gen_source_code(&ctx.scope, &mut label_formatter, cfg).ok()?;
)
.ok()?;
let source_range = match ctx.original_token.parent() { let source_range = match ctx.original_token.parent() {
Some(node) => match node.ancestors().find_map(ast::Path::cast) { Some(node) => match node.ancestors().find_map(ast::Path::cast) {
@ -314,16 +312,8 @@ pub(crate) fn render_expr(
let mut item = CompletionItem::new(CompletionItemKind::Expression, source_range, label); let mut item = CompletionItem::new(CompletionItemKind::Expression, source_range, label);
let snippet = format!( let snippet =
"{}$0", format!("{}$0", expr.gen_source_code(&ctx.scope, &mut snippet_formatter, cfg).ok()?);
expr.gen_source_code(
&ctx.scope,
&mut snippet_formatter,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude
)
.ok()?
);
let edit = TextEdit::replace(source_range, snippet); let edit = TextEdit::replace(source_range, snippet);
item.snippet_edit(ctx.config.snippet_cap?, edit); item.snippet_edit(ctx.config.snippet_cap?, edit);
item.documentation(Documentation::new(String::from("Autogenerated expression by term search"))); item.documentation(Documentation::new(String::from("Autogenerated expression by term search")));
@ -333,12 +323,7 @@ pub(crate) fn render_expr(
}); });
for trait_ in expr.traits_used(ctx.db) { for trait_ in expr.traits_used(ctx.db) {
let trait_item = hir::ItemInNs::from(hir::ModuleDef::from(trait_)); let trait_item = hir::ItemInNs::from(hir::ModuleDef::from(trait_));
let Some(path) = ctx.module.find_use_path( let Some(path) = ctx.module.find_path(ctx.db, trait_item, cfg) else {
ctx.db,
trait_item,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
) else {
continue; continue;
}; };

View file

@ -100,6 +100,7 @@
// } // }
// ---- // ----
use hir::ImportPathConfig;
use ide_db::imports::import_assets::LocatedImport; use ide_db::imports::import_assets::LocatedImport;
use itertools::Itertools; use itertools::Itertools;
use syntax::{ast, AstNode, GreenNode, SyntaxNode}; use syntax::{ast, AstNode, GreenNode, SyntaxNode};
@ -168,18 +169,22 @@ impl Snippet {
} }
fn import_edits(ctx: &CompletionContext<'_>, requires: &[GreenNode]) -> Option<Vec<LocatedImport>> { fn import_edits(ctx: &CompletionContext<'_>, requires: &[GreenNode]) -> Option<Vec<LocatedImport>> {
let import_cfg = ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
};
let resolve = |import: &GreenNode| { let resolve = |import: &GreenNode| {
let path = ast::Path::cast(SyntaxNode::new_root(import.clone()))?; let path = ast::Path::cast(SyntaxNode::new_root(import.clone()))?;
let item = match ctx.scope.speculative_resolve(&path)? { let item = match ctx.scope.speculative_resolve(&path)? {
hir::PathResolution::Def(def) => def.into(), hir::PathResolution::Def(def) => def.into(),
_ => return None, _ => return None,
}; };
let path = ctx.module.find_use_path_prefixed( let path = ctx.module.find_use_path(
ctx.db, ctx.db,
item, item,
ctx.config.insert_use.prefix_kind, ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std, import_cfg,
ctx.config.prefer_prelude,
)?; )?;
Some((path.len() > 1).then(|| LocatedImport::new(path.clone(), item, item))) Some((path.len() > 1).then(|| LocatedImport::new(path.clone(), item, item)))
}; };

View file

@ -66,11 +66,10 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig {
enable_self_on_the_fly: true, enable_self_on_the_fly: true,
enable_private_editable: false, enable_private_editable: false,
enable_term_search: true, enable_term_search: true,
term_search_fuel: 200,
full_function_signatures: false, full_function_signatures: false,
callable: Some(CallableSnippets::FillArguments), callable: Some(CallableSnippets::FillArguments),
snippet_cap: SnippetCap::new(true), snippet_cap: SnippetCap::new(true),
prefer_no_std: false,
prefer_prelude: true,
insert_use: InsertUseConfig { insert_use: InsertUseConfig {
granularity: ImportGranularity::Crate, granularity: ImportGranularity::Crate,
prefix_kind: PrefixKind::Plain, prefix_kind: PrefixKind::Plain,
@ -78,9 +77,10 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig {
group: true, group: true,
skip_glob_imports: true, skip_glob_imports: true,
}, },
prefer_no_std: false,
prefer_prelude: true,
snippets: Vec::new(), snippets: Vec::new(),
limit: None, limit: None,
term_search_fuel: 200,
}; };
pub(crate) fn completion_list(ra_fixture: &str) -> String { pub(crate) fn completion_list(ra_fixture: &str) -> String {

View file

@ -472,6 +472,47 @@ fn main() {
); );
} }
#[test]
fn trait_completions_handle_associated_types() {
let fixture = r#"
//- /foo.rs crate:foo
pub trait NotInScope {
fn not_in_scope(&self);
}
pub trait Wrapper {
type Inner: NotInScope;
fn inner(&self) -> Self::Inner;
}
//- /main.rs crate:main deps:foo
use foo::Wrapper;
fn completion<T: Wrapper>(whatever: T) {
whatever.inner().$0
}
"#;
check(
fixture,
expect![[r#"
me not_in_scope() (use foo::NotInScope) fn(&self)
"#]],
);
check_edit(
"not_in_scope",
fixture,
r#"
use foo::{NotInScope, Wrapper};
fn completion<T: Wrapper>(whatever: T) {
whatever.inner().not_in_scope()$0
}
"#,
);
}
#[test] #[test]
fn trait_method_fuzzy_completion_aware_of_unit_type() { fn trait_method_fuzzy_completion_aware_of_unit_type() {
let fixture = r#" let fixture = r#"

View file

@ -1,7 +1,7 @@
//! This module provides functionality for querying callable information about a token. //! This module provides functionality for querying callable information about a token.
use either::Either; use either::Either;
use hir::{Semantics, Type}; use hir::{InFile, Semantics, Type};
use parser::T; use parser::T;
use syntax::{ use syntax::{
ast::{self, HasArgList, HasName}, ast::{self, HasArgList, HasName},
@ -13,7 +13,7 @@ use crate::RootDatabase;
#[derive(Debug)] #[derive(Debug)]
pub struct ActiveParameter { pub struct ActiveParameter {
pub ty: Type, pub ty: Type,
pub pat: Option<Either<ast::SelfParam, ast::Pat>>, pub src: Option<InFile<Either<ast::SelfParam, ast::Param>>>,
} }
impl ActiveParameter { impl ActiveParameter {
@ -22,18 +22,18 @@ impl ActiveParameter {
let (signature, active_parameter) = callable_for_token(sema, token)?; let (signature, active_parameter) = callable_for_token(sema, token)?;
let idx = active_parameter?; let idx = active_parameter?;
let mut params = signature.params(sema.db); let mut params = signature.params();
if idx >= params.len() { if idx >= params.len() {
cov_mark::hit!(too_many_arguments); cov_mark::hit!(too_many_arguments);
return None; return None;
} }
let (pat, ty) = params.swap_remove(idx); let param = params.swap_remove(idx);
Some(ActiveParameter { ty, pat }) Some(ActiveParameter { ty: param.ty().clone(), src: param.source(sema.db) })
} }
pub fn ident(&self) -> Option<ast::Name> { pub fn ident(&self) -> Option<ast::Name> {
self.pat.as_ref().and_then(|param| match param { self.src.as_ref().and_then(|param| match param.value.as_ref().right()?.pat()? {
Either::Right(ast::Pat::IdentPat(ident)) => ident.name(), ast::Pat::IdentPat(ident) => ident.name(),
_ => None, _ => None,
}) })
} }
@ -60,10 +60,7 @@ pub fn callable_for_node(
token: &SyntaxToken, token: &SyntaxToken,
) -> Option<(hir::Callable, Option<usize>)> { ) -> Option<(hir::Callable, Option<usize>)> {
let callable = match calling_node { let callable = match calling_node {
ast::CallableExpr::Call(call) => { ast::CallableExpr::Call(call) => sema.resolve_expr_as_callable(&call.expr()?),
let expr = call.expr()?;
sema.type_of_expr(&expr)?.adjusted().as_callable(sema.db)
}
ast::CallableExpr::MethodCall(call) => sema.resolve_method_call_as_callable(call), ast::CallableExpr::MethodCall(call) => sema.resolve_method_call_as_callable(call),
}?; }?;
let active_param = calling_node.arg_list().map(|arg_list| { let active_param = calling_node.arg_list().map(|arg_list| {

View file

@ -1,9 +1,9 @@
//! Look up accessible paths for items. //! Look up accessible paths for items.
use hir::{ use hir::{
db::HirDatabase, AsAssocItem, AssocItem, AssocItemContainer, Crate, HasCrate, ItemInNs, db::HirDatabase, AsAssocItem, AssocItem, AssocItemContainer, Crate, HasCrate, ImportPathConfig,
ModPath, Module, ModuleDef, Name, PathResolution, PrefixKind, ScopeDef, Semantics, ItemInNs, ModPath, Module, ModuleDef, Name, PathResolution, PrefixKind, ScopeDef, Semantics,
SemanticsScope, Trait, Type, SemanticsScope, Trait, TyFingerprint, Type,
}; };
use itertools::{EitherOrBoth, Itertools}; use itertools::{EitherOrBoth, Itertools};
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
@ -205,24 +205,22 @@ impl ImportAssets {
pub fn search_for_imports( pub fn search_for_imports(
&self, &self,
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
cfg: ImportPathConfig,
prefix_kind: PrefixKind, prefix_kind: PrefixKind,
prefer_no_std: bool,
prefer_prelude: bool,
) -> impl Iterator<Item = LocatedImport> { ) -> impl Iterator<Item = LocatedImport> {
let _p = tracing::span!(tracing::Level::INFO, "ImportAssets::search_for_imports").entered(); let _p = tracing::span!(tracing::Level::INFO, "ImportAssets::search_for_imports").entered();
self.search_for(sema, Some(prefix_kind), prefer_no_std, prefer_prelude) self.search_for(sema, Some(prefix_kind), cfg)
} }
/// This may return non-absolute paths if a part of the returned path is already imported into scope. /// This may return non-absolute paths if a part of the returned path is already imported into scope.
pub fn search_for_relative_paths( pub fn search_for_relative_paths(
&self, &self,
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
prefer_no_std: bool, cfg: ImportPathConfig,
prefer_prelude: bool,
) -> impl Iterator<Item = LocatedImport> { ) -> impl Iterator<Item = LocatedImport> {
let _p = tracing::span!(tracing::Level::INFO, "ImportAssets::search_for_relative_paths") let _p = tracing::span!(tracing::Level::INFO, "ImportAssets::search_for_relative_paths")
.entered(); .entered();
self.search_for(sema, None, prefer_no_std, prefer_prelude) self.search_for(sema, None, cfg)
} }
/// Requires imports to by prefix instead of fuzzily. /// Requires imports to by prefix instead of fuzzily.
@ -259,8 +257,7 @@ impl ImportAssets {
&self, &self,
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
prefixed: Option<PrefixKind>, prefixed: Option<PrefixKind>,
prefer_no_std: bool, cfg: ImportPathConfig,
prefer_prelude: bool,
) -> impl Iterator<Item = LocatedImport> { ) -> impl Iterator<Item = LocatedImport> {
let _p = tracing::span!(tracing::Level::INFO, "ImportAssets::search_for").entered(); let _p = tracing::span!(tracing::Level::INFO, "ImportAssets::search_for").entered();
@ -277,8 +274,7 @@ impl ImportAssets {
item_for_path_search(sema.db, item)?, item_for_path_search(sema.db, item)?,
&self.module_with_candidate, &self.module_with_candidate,
prefixed, prefixed,
prefer_no_std, cfg,
prefer_prelude,
) )
.filter(|path| path.len() > 1) .filter(|path| path.len() > 1)
}; };
@ -549,6 +545,15 @@ fn trait_applicable_items(
let Some(receiver) = trait_candidate.receiver_ty.fingerprint_for_trait_impl() else { let Some(receiver) = trait_candidate.receiver_ty.fingerprint_for_trait_impl() else {
return false; return false;
}; };
// in order to handle implied bounds through an associated type, keep any
// method receiver that matches `TyFingerprint::Unnameable`. this receiver
// won't be in `TraitImpls` anyways, as `TraitImpls` only contains actual
// implementations.
if matches!(receiver, TyFingerprint::Unnameable) {
return true;
}
let definitions_exist_in_trait_crate = db let definitions_exist_in_trait_crate = db
.trait_impls_in_crate(defining_crate_for_trait.into()) .trait_impls_in_crate(defining_crate_for_trait.into())
.has_impls_for_trait_and_self_ty(candidate_trait_id, receiver); .has_impls_for_trait_and_self_ty(candidate_trait_id, receiver);
@ -634,19 +639,12 @@ fn get_mod_path(
item_to_search: ItemInNs, item_to_search: ItemInNs,
module_with_candidate: &Module, module_with_candidate: &Module,
prefixed: Option<PrefixKind>, prefixed: Option<PrefixKind>,
prefer_no_std: bool, cfg: ImportPathConfig,
prefer_prelude: bool,
) -> Option<ModPath> { ) -> Option<ModPath> {
if let Some(prefix_kind) = prefixed { if let Some(prefix_kind) = prefixed {
module_with_candidate.find_use_path_prefixed( module_with_candidate.find_use_path(db, item_to_search, prefix_kind, cfg)
db,
item_to_search,
prefix_kind,
prefer_no_std,
prefer_prelude,
)
} else { } else {
module_with_candidate.find_use_path(db, item_to_search, prefer_no_std, prefer_prelude) module_with_candidate.find_path(db, item_to_search, cfg)
} }
} }

View file

@ -2,7 +2,7 @@
use crate::helpers::mod_path_to_ast; use crate::helpers::mod_path_to_ast;
use either::Either; use either::Either;
use hir::{AsAssocItem, HirDisplay, ModuleDef, SemanticsScope}; use hir::{AsAssocItem, HirDisplay, ImportPathConfig, ModuleDef, SemanticsScope};
use itertools::Itertools; use itertools::Itertools;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use syntax::{ use syntax::{
@ -308,11 +308,12 @@ impl Ctx<'_> {
parent.segment()?.name_ref()?, parent.segment()?.name_ref()?,
) )
.and_then(|trait_ref| { .and_then(|trait_ref| {
let found_path = self.target_module.find_use_path( let cfg =
ImportPathConfig { prefer_no_std: false, prefer_prelude: true };
let found_path = self.target_module.find_path(
self.source_scope.db.upcast(), self.source_scope.db.upcast(),
hir::ModuleDef::Trait(trait_ref), hir::ModuleDef::Trait(trait_ref),
false, cfg,
true,
)?; )?;
match make::ty_path(mod_path_to_ast(&found_path)) { match make::ty_path(mod_path_to_ast(&found_path)) {
ast::Type::PathType(path_ty) => Some(path_ty), ast::Type::PathType(path_ty) => Some(path_ty),
@ -347,12 +348,9 @@ impl Ctx<'_> {
} }
} }
let found_path = self.target_module.find_use_path( let cfg = ImportPathConfig { prefer_no_std: false, prefer_prelude: true };
self.source_scope.db.upcast(), let found_path =
def, self.target_module.find_path(self.source_scope.db.upcast(), def, cfg)?;
false,
true,
)?;
let res = mod_path_to_ast(&found_path).clone_for_update(); let res = mod_path_to_ast(&found_path).clone_for_update();
if let Some(args) = path.segment().and_then(|it| it.generic_arg_list()) { if let Some(args) = path.segment().and_then(|it| it.generic_arg_list()) {
if let Some(segment) = res.segment() { if let Some(segment) = res.segment() {
@ -385,11 +383,11 @@ impl Ctx<'_> {
if let Some(adt) = ty.as_adt() { if let Some(adt) = ty.as_adt() {
if let ast::Type::PathType(path_ty) = &ast_ty { if let ast::Type::PathType(path_ty) = &ast_ty {
let found_path = self.target_module.find_use_path( let cfg = ImportPathConfig { prefer_no_std: false, prefer_prelude: true };
let found_path = self.target_module.find_path(
self.source_scope.db.upcast(), self.source_scope.db.upcast(),
ModuleDef::from(adt), ModuleDef::from(adt),
false, cfg,
true,
)?; )?;
if let Some(qual) = mod_path_to_ast(&found_path).qualifier() { if let Some(qual) = mod_path_to_ast(&found_path).qualifier() {

View file

@ -41,6 +41,7 @@ pub enum FormatSpecifier {
Escape, Escape,
} }
// FIXME: Remove this, we can use rustc_format_parse instead
pub fn lex_format_specifiers( pub fn lex_format_specifiers(
string: &ast::String, string: &ast::String,
mut callback: &mut dyn FnMut(TextRange, FormatSpecifier), mut callback: &mut dyn FnMut(TextRange, FormatSpecifier),

View file

@ -1,7 +1,7 @@
//! This diagnostic provides an assist for creating a struct definition from a JSON //! This diagnostic provides an assist for creating a struct definition from a JSON
//! example. //! example.
use hir::{PathResolution, Semantics}; use hir::{ImportPathConfig, PathResolution, Semantics};
use ide_db::{ use ide_db::{
base_db::{FileId, FileRange}, base_db::{FileId, FileRange},
helpers::mod_path_to_ast, helpers::mod_path_to_ast,
@ -142,14 +142,19 @@ pub(crate) fn json_in_items(
ImportScope::Block(it) => ImportScope::Block(scb.make_mut(it)), ImportScope::Block(it) => ImportScope::Block(scb.make_mut(it)),
}; };
let current_module = semantics_scope.module(); let current_module = semantics_scope.module();
let cfg = ImportPathConfig {
prefer_no_std: config.prefer_no_std,
prefer_prelude: config.prefer_prelude,
};
if !scope_has("Serialize") { if !scope_has("Serialize") {
if let Some(PathResolution::Def(it)) = serialize_resolved { if let Some(PathResolution::Def(it)) = serialize_resolved {
if let Some(it) = current_module.find_use_path_prefixed( if let Some(it) = current_module.find_use_path(
sema.db, sema.db,
it, it,
config.insert_use.prefix_kind, config.insert_use.prefix_kind,
config.prefer_no_std, cfg,
config.prefer_prelude,
) { ) {
insert_use(&scope, mod_path_to_ast(&it), &config.insert_use); insert_use(&scope, mod_path_to_ast(&it), &config.insert_use);
} }
@ -157,12 +162,11 @@ pub(crate) fn json_in_items(
} }
if !scope_has("Deserialize") { if !scope_has("Deserialize") {
if let Some(PathResolution::Def(it)) = deserialize_resolved { if let Some(PathResolution::Def(it)) = deserialize_resolved {
if let Some(it) = current_module.find_use_path_prefixed( if let Some(it) = current_module.find_use_path(
sema.db, sema.db,
it, it,
config.insert_use.prefix_kind, config.insert_use.prefix_kind,
config.prefer_no_std, cfg,
config.prefer_prelude,
) { ) {
insert_use(&scope, mod_path_to_ast(&it), &config.insert_use); insert_use(&scope, mod_path_to_ast(&it), &config.insert_use);
} }

View file

@ -1,7 +1,7 @@
use either::Either; use either::Either;
use hir::{ use hir::{
db::{ExpandDatabase, HirDatabase}, db::{ExpandDatabase, HirDatabase},
known, AssocItem, HirDisplay, HirFileIdExt, InFile, Type, known, AssocItem, HirDisplay, HirFileIdExt, ImportPathConfig, InFile, Type,
}; };
use ide_db::{ use ide_db::{
assists::Assist, famous_defs::FamousDefs, imports::import_assets::item_for_path_search, assists::Assist, famous_defs::FamousDefs, imports::import_assets::item_for_path_search,
@ -122,11 +122,13 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
let expr = (|| -> Option<ast::Expr> { let expr = (|| -> Option<ast::Expr> {
let item_in_ns = hir::ItemInNs::from(hir::ModuleDef::from(ty.as_adt()?)); let item_in_ns = hir::ItemInNs::from(hir::ModuleDef::from(ty.as_adt()?));
let type_path = current_module?.find_use_path( let type_path = current_module?.find_path(
ctx.sema.db, ctx.sema.db,
item_for_path_search(ctx.sema.db, item_in_ns)?, item_for_path_search(ctx.sema.db, item_in_ns)?,
ctx.config.prefer_no_std, ImportPathConfig {
ctx.config.prefer_prelude, prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
)?; )?;
use_trivial_constructor( use_trivial_constructor(

View file

@ -1,7 +1,7 @@
use hir::{ use hir::{
db::ExpandDatabase, db::ExpandDatabase,
term_search::{term_search, TermSearchConfig, TermSearchCtx}, term_search::{term_search, TermSearchConfig, TermSearchCtx},
ClosureStyle, HirDisplay, ClosureStyle, HirDisplay, ImportPathConfig,
}; };
use ide_db::{ use ide_db::{
assists::{Assist, AssistId, AssistKind, GroupLabel}, assists::{Assist, AssistId, AssistKind, GroupLabel},
@ -59,8 +59,10 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option<Vec<Assist>
path.gen_source_code( path.gen_source_code(
&scope, &scope,
&mut formatter, &mut formatter,
ctx.config.prefer_no_std, ImportPathConfig {
ctx.config.prefer_prelude, prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
) )
.ok() .ok()
}) })
@ -368,6 +370,7 @@ fn main() {
); );
} }
// FIXME
#[test] #[test]
fn local_shadow_fn() { fn local_shadow_fn() {
check_fixes_unordered( check_fixes_unordered(
@ -385,7 +388,7 @@ fn f() {
r#" r#"
fn f() { fn f() {
let f: i32 = 0; let f: i32 = 0;
crate::f() f()
}"#, }"#,
], ],
); );

View file

@ -6,7 +6,7 @@ use crate::{
resolving::{ResolvedPattern, ResolvedRule, UfcsCallInfo}, resolving::{ResolvedPattern, ResolvedRule, UfcsCallInfo},
SsrMatches, SsrMatches,
}; };
use hir::Semantics; use hir::{ImportPathConfig, Semantics};
use ide_db::{base_db::FileRange, FxHashMap}; use ide_db::{base_db::FileRange, FxHashMap};
use std::{cell::Cell, iter::Peekable}; use std::{cell::Cell, iter::Peekable};
use syntax::{ use syntax::{
@ -663,10 +663,10 @@ impl Match {
.module(); .module();
for (path, resolved_path) in &template.resolved_paths { for (path, resolved_path) in &template.resolved_paths {
if let hir::PathResolution::Def(module_def) = resolved_path.resolution { if let hir::PathResolution::Def(module_def) = resolved_path.resolution {
let mod_path = let cfg = ImportPathConfig { prefer_no_std: false, prefer_prelude: true };
module.find_use_path(sema.db, module_def, false, true).ok_or_else(|| { let mod_path = module.find_path(sema.db, module_def, cfg).ok_or_else(|| {
match_error!("Failed to render template path `{}` at match location") match_error!("Failed to render template path `{}` at match location")
})?; })?;
self.rendered_template_paths.insert(path.clone(), mod_path); self.rendered_template_paths.insert(path.clone(), mod_path);
} }
} }

View file

@ -109,12 +109,12 @@ pub(crate) fn outgoing_calls(
let expr = call.expr()?; let expr = call.expr()?;
let callable = sema.type_of_expr(&expr)?.original.as_callable(db)?; let callable = sema.type_of_expr(&expr)?.original.as_callable(db)?;
match callable.kind() { match callable.kind() {
hir::CallableKind::Function(it) => { hir::CallableKind::Function(it) => it.try_to_nav(db),
let range = expr.syntax().text_range(); hir::CallableKind::TupleEnumVariant(it) => it.try_to_nav(db),
it.try_to_nav(db).zip(Some(range)) hir::CallableKind::TupleStruct(it) => it.try_to_nav(db),
}
_ => None, _ => None,
} }
.zip(Some(expr.syntax().text_range()))
} }
ast::CallableExpr::MethodCall(expr) => { ast::CallableExpr::MethodCall(expr) => {
let range = expr.name_ref()?.syntax().text_range(); let range = expr.name_ref()?.syntax().text_range();

View file

@ -487,19 +487,23 @@ fn get_doc_base_urls(
let system_doc = sysroot let system_doc = sysroot
.map(|sysroot| format!("file:///{sysroot}/share/doc/rust/html/")) .map(|sysroot| format!("file:///{sysroot}/share/doc/rust/html/"))
.and_then(|it| Url::parse(&it).ok()); .and_then(|it| Url::parse(&it).ok());
let krate = def.krate(db);
let channel = krate
.and_then(|krate| db.toolchain_channel(krate.into()))
.unwrap_or(ReleaseChannel::Nightly)
.as_str();
// special case base url of `BuiltinType` to core // special case base url of `BuiltinType` to core
// https://github.com/rust-lang/rust-analyzer/issues/12250 // https://github.com/rust-lang/rust-analyzer/issues/12250
if let Definition::BuiltinType(..) = def { if let Definition::BuiltinType(..) = def {
let web_link = Url::parse("https://doc.rust-lang.org/nightly/core/").ok(); let web_link = Url::parse(&format!("https://doc.rust-lang.org/{channel}/core/")).ok();
let system_link = system_doc.and_then(|it| it.join("core/").ok()); let system_link = system_doc.and_then(|it| it.join("core/").ok());
return (web_link, system_link); return (web_link, system_link);
}; };
let Some(krate) = def.krate(db) else { return Default::default() }; let Some(krate) = krate else { return Default::default() };
let Some(display_name) = krate.display_name(db) else { return Default::default() }; let Some(display_name) = krate.display_name(db) else { return Default::default() };
let crate_data = &db.crate_graph()[krate.into()]; let crate_data = &db.crate_graph()[krate.into()];
let channel = db.toolchain_channel(krate.into()).unwrap_or(ReleaseChannel::Nightly).as_str();
let (web_base, local_base) = match &crate_data.origin { let (web_base, local_base) = match &crate_data.origin {
// std and co do not specify `html_root_url` any longer so we gotta handwrite this ourself. // std and co do not specify `html_root_url` any longer so we gotta handwrite this ourself.

View file

@ -1120,4 +1120,30 @@ fn test() {
"#, "#,
); );
} }
#[test]
fn type_hints_async_block() {
check_types(
r#"
//- minicore: future
async fn main() {
let _x = async { 8_i32 };
//^^ impl Future<Output = i32>
}"#,
);
}
#[test]
fn type_hints_async_block_with_tail_return_exp() {
check_types(
r#"
//- minicore: future
async fn main() {
let _x = async {
//^^ impl Future<Output = i32>
return 8_i32;
};
}"#,
);
}
} }

View file

@ -24,34 +24,29 @@ pub(super) fn hints(
let (callable, arg_list) = get_callable(sema, &expr)?; let (callable, arg_list) = get_callable(sema, &expr)?;
let hints = callable let hints = callable
.params(sema.db) .params()
.into_iter() .into_iter()
.zip(arg_list.args()) .zip(arg_list.args())
.filter_map(|((param, _ty), arg)| { .filter_map(|(p, arg)| {
// Only annotate hints for expressions that exist in the original file // Only annotate hints for expressions that exist in the original file
let range = sema.original_range_opt(arg.syntax())?; let range = sema.original_range_opt(arg.syntax())?;
let (param_name, name_syntax) = match param.as_ref()? { let source = p.source(sema.db)?;
let (param_name, name_syntax) = match source.value.as_ref() {
Either::Left(pat) => (pat.name()?, pat.name()), Either::Left(pat) => (pat.name()?, pat.name()),
Either::Right(pat) => match pat { Either::Right(param) => match param.pat()? {
ast::Pat::IdentPat(it) => (it.name()?, it.name()), ast::Pat::IdentPat(it) => (it.name()?, it.name()),
_ => return None, _ => return None,
}, },
}; };
// make sure the file is cached so we can map out of macros
sema.parse_or_expand(source.file_id);
Some((name_syntax, param_name, arg, range)) Some((name_syntax, param_name, arg, range))
}) })
.filter(|(_, param_name, arg, _)| { .filter(|(_, param_name, arg, _)| {
!should_hide_param_name_hint(sema, &callable, &param_name.text(), arg) !should_hide_param_name_hint(sema, &callable, &param_name.text(), arg)
}) })
.map(|(param, param_name, _, FileRange { range, .. })| { .map(|(param, param_name, _, FileRange { range, .. })| {
let mut linked_location = None; let linked_location = param.and_then(|name| sema.original_range_opt(name.syntax()));
if let Some(name) = param {
if let hir::CallableKind::Function(f) = callable.kind() {
// assert the file is cached so we can map out of macros
if sema.source(f).is_some() {
linked_location = sema.original_range_opt(name.syntax());
}
}
}
let colon = if config.render_colons { ":" } else { "" }; let colon = if config.render_colons { ":" } else { "" };
let label = let label =

View file

@ -79,7 +79,7 @@ impl RunnableKind {
impl Runnable { impl Runnable {
// test package::module::testname // test package::module::testname
pub fn label(&self, target: Option<String>) -> String { pub fn label(&self, target: Option<&str>) -> String {
match &self.kind { match &self.kind {
RunnableKind::Test { test_id, .. } => format!("test {test_id}"), RunnableKind::Test { test_id, .. } => format!("test {test_id}"),
RunnableKind::TestMod { path } => format!("test-mod {path}"), RunnableKind::TestMod { path } => format!("test-mod {path}"),

View file

@ -201,7 +201,21 @@ fn signature_help_for_call(
variant.name(db).display(db) variant.name(db).display(db)
); );
} }
hir::CallableKind::Closure | hir::CallableKind::FnPtr | hir::CallableKind::Other => (), hir::CallableKind::Closure(closure) => {
let fn_trait = closure.fn_trait(db);
format_to!(res.signature, "impl {fn_trait}")
}
hir::CallableKind::FnPtr => format_to!(res.signature, "fn"),
hir::CallableKind::FnImpl(fn_trait) => match callable.ty().as_adt() {
// FIXME: Render docs of the concrete trait impl function
Some(adt) => format_to!(
res.signature,
"<{} as {fn_trait}>::{}",
adt.name(db).display(db),
fn_trait.function_name()
),
None => format_to!(res.signature, "impl {fn_trait}"),
},
} }
res.signature.push('('); res.signature.push('(');
@ -210,12 +224,15 @@ fn signature_help_for_call(
format_to!(res.signature, "{}", self_param.display(db)) format_to!(res.signature, "{}", self_param.display(db))
} }
let mut buf = String::new(); let mut buf = String::new();
for (idx, (pat, ty)) in callable.params(db).into_iter().enumerate() { for (idx, p) in callable.params().into_iter().enumerate() {
buf.clear(); buf.clear();
if let Some(pat) = pat { if let Some(param) = p.source(sema.db) {
match pat { match param.value {
Either::Left(_self) => format_to!(buf, "self: "), Either::Right(param) => match param.pat() {
Either::Right(pat) => format_to!(buf, "{}: ", pat), Some(pat) => format_to!(buf, "{}: ", pat),
None => format_to!(buf, "?: "),
},
Either::Left(_) => format_to!(buf, "self: "),
} }
} }
// APITs (argument position `impl Trait`s) are inferred as {unknown} as the user is // APITs (argument position `impl Trait`s) are inferred as {unknown} as the user is
@ -223,9 +240,9 @@ fn signature_help_for_call(
// In that case, fall back to render definitions of the respective parameters. // In that case, fall back to render definitions of the respective parameters.
// This is overly conservative: we do not substitute known type vars // This is overly conservative: we do not substitute known type vars
// (see FIXME in tests::impl_trait) and falling back on any unknowns. // (see FIXME in tests::impl_trait) and falling back on any unknowns.
match (ty.contains_unknown(), fn_params.as_deref()) { match (p.ty().contains_unknown(), fn_params.as_deref()) {
(true, Some(fn_params)) => format_to!(buf, "{}", fn_params[idx].ty().display(db)), (true, Some(fn_params)) => format_to!(buf, "{}", fn_params[idx].ty().display(db)),
_ => format_to!(buf, "{}", ty.display(db)), _ => format_to!(buf, "{}", p.ty().display(db)),
} }
res.push_call_param(&buf); res.push_call_param(&buf);
} }
@ -242,9 +259,9 @@ fn signature_help_for_call(
render(func.ret_type(db)) render(func.ret_type(db))
} }
hir::CallableKind::Function(_) hir::CallableKind::Function(_)
| hir::CallableKind::Closure | hir::CallableKind::Closure(_)
| hir::CallableKind::FnPtr | hir::CallableKind::FnPtr
| hir::CallableKind::Other => render(callable.return_type()), | hir::CallableKind::FnImpl(_) => render(callable.return_type()),
hir::CallableKind::TupleStruct(_) | hir::CallableKind::TupleEnumVariant(_) => {} hir::CallableKind::TupleStruct(_) | hir::CallableKind::TupleEnumVariant(_) => {}
} }
Some(res) Some(res)
@ -1345,15 +1362,43 @@ fn test() { S.foo($0); }
r#" r#"
struct S; struct S;
fn foo(s: S) -> i32 { 92 } fn foo(s: S) -> i32 { 92 }
fn main() {
let _move = S;
(|s| {{_move}; foo(s)})($0)
}
"#,
expect![[r#"
impl FnOnce(s: S) -> i32
^^^^
"#]],
);
check(
r#"
struct S;
fn foo(s: S) -> i32 { 92 }
fn main() { fn main() {
(|s| foo(s))($0) (|s| foo(s))($0)
} }
"#, "#,
expect![[r#" expect![[r#"
(s: S) -> i32 impl Fn(s: S) -> i32
^^^^ ^^^^
"#]], "#]],
) );
check(
r#"
struct S;
fn foo(s: S) -> i32 { 92 }
fn main() {
let mut mutate = 0;
(|s| { mutate = 1; foo(s) })($0)
}
"#,
expect![[r#"
impl FnMut(s: S) -> i32
^^^^
"#]],
);
} }
#[test] #[test]
@ -1383,12 +1428,81 @@ fn main(f: fn(i32, f64) -> char) {
} }
"#, "#,
expect![[r#" expect![[r#"
(i32, f64) -> char fn(i32, f64) -> char
--- ^^^ --- ^^^
"#]], "#]],
) )
} }
#[test]
fn call_info_for_fn_impl() {
check(
r#"
struct S;
impl core::ops::FnOnce<(i32, f64)> for S {
type Output = char;
}
impl core::ops::FnMut<(i32, f64)> for S {}
impl core::ops::Fn<(i32, f64)> for S {}
fn main() {
S($0);
}
"#,
expect![[r#"
<S as Fn>::call(i32, f64) -> char
^^^ ---
"#]],
);
check(
r#"
struct S;
impl core::ops::FnOnce<(i32, f64)> for S {
type Output = char;
}
impl core::ops::FnMut<(i32, f64)> for S {}
impl core::ops::Fn<(i32, f64)> for S {}
fn main() {
S(1, $0);
}
"#,
expect![[r#"
<S as Fn>::call(i32, f64) -> char
--- ^^^
"#]],
);
check(
r#"
struct S;
impl core::ops::FnOnce<(i32, f64)> for S {
type Output = char;
}
impl core::ops::FnOnce<(char, char)> for S {
type Output = f64;
}
fn main() {
S($0);
}
"#,
expect![""],
);
check(
r#"
struct S;
impl core::ops::FnOnce<(i32, f64)> for S {
type Output = char;
}
impl core::ops::FnOnce<(char, char)> for S {
type Output = f64;
}
fn main() {
// FIXME: The ide layer loses the calling info here so we get an ambiguous trait solve result
S(0i32, $0);
}
"#,
expect![""],
);
}
#[test] #[test]
fn call_info_for_unclosed_call() { fn call_info_for_unclosed_call() {
check( check(
@ -1794,19 +1908,19 @@ fn f<F: FnOnce(u8, u16) -> i32>(f: F) {
} }
"#, "#,
expect![[r#" expect![[r#"
(u8, u16) -> i32 impl FnOnce(u8, u16) -> i32
^^ --- ^^ ---
"#]], "#]],
); );
check( check(
r#" r#"
fn f<T, F: FnOnce(&T, u16) -> &T>(f: F) { fn f<T, F: FnMut(&T, u16) -> &T>(f: F) {
f($0) f($0)
} }
"#, "#,
expect![[r#" expect![[r#"
(&T, u16) -> &T impl FnMut(&T, u16) -> &T
^^ --- ^^ ---
"#]], "#]],
); );
} }
@ -1826,7 +1940,7 @@ fn take<C, Error>(
} }
"#, "#,
expect![[r#" expect![[r#"
() -> i32 impl Fn() -> i32
"#]], "#]],
); );
} }

View file

@ -9,8 +9,9 @@ pub(super) fn highlight_escape_string<T: IsString>(
string: &T, string: &T,
start: TextSize, start: TextSize,
) { ) {
let text = string.text();
string.escaped_char_ranges(&mut |piece_range, char| { string.escaped_char_ranges(&mut |piece_range, char| {
if string.text()[piece_range.start().into()..].starts_with('\\') { if text[piece_range.start().into()..].starts_with('\\') {
let highlight = match char { let highlight = match char {
Ok(_) => HlTag::EscapeSequence, Ok(_) => HlTag::EscapeSequence,
Err(_) => HlTag::InvalidEscapeSequence, Err(_) => HlTag::InvalidEscapeSequence,
@ -33,17 +34,15 @@ pub(super) fn highlight_escape_char(stack: &mut Highlights, char: &Char, start:
} }
let text = char.text(); let text = char.text();
if !text.starts_with('\'') || !text.ends_with('\'') { let Some(text) = text
.strip_prefix('\'')
.and_then(|it| it.strip_suffix('\''))
.filter(|it| it.starts_with('\\'))
else {
return; return;
} };
let text = &text[1..text.len() - 1]; let range = TextRange::at(start + TextSize::from(1), TextSize::from(text.len() as u32));
if !text.starts_with('\\') {
return;
}
let range =
TextRange::new(start + TextSize::from(1), start + TextSize::from(text.len() as u32 + 1));
stack.add(HlRange { range, highlight: HlTag::EscapeSequence.into(), binding_hash: None }) stack.add(HlRange { range, highlight: HlTag::EscapeSequence.into(), binding_hash: None })
} }
@ -54,16 +53,14 @@ pub(super) fn highlight_escape_byte(stack: &mut Highlights, byte: &Byte, start:
} }
let text = byte.text(); let text = byte.text();
if !text.starts_with("b'") || !text.ends_with('\'') { let Some(text) = text
.strip_prefix("b'")
.and_then(|it| it.strip_suffix('\''))
.filter(|it| it.starts_with('\\'))
else {
return; return;
} };
let text = &text[2..text.len() - 1]; let range = TextRange::at(start + TextSize::from(2), TextSize::from(text.len() as u32));
if !text.starts_with('\\') {
return;
}
let range =
TextRange::new(start + TextSize::from(2), start + TextSize::from(text.len() as u32 + 2));
stack.add(HlRange { range, highlight: HlTag::EscapeSequence.into(), binding_hash: None }) stack.add(HlRange { range, highlight: HlTag::EscapeSequence.into(), binding_hash: None })
} }

View file

@ -65,7 +65,7 @@ impl WorkspaceBuildScripts {
allowed_features: &FxHashSet<String>, allowed_features: &FxHashSet<String>,
manifest_path: &ManifestPath, manifest_path: &ManifestPath,
toolchain: Option<&Version>, toolchain: Option<&Version>,
sysroot: Option<&Sysroot>, sysroot: &Sysroot,
) -> io::Result<Command> { ) -> io::Result<Command> {
const RUST_1_75: Version = Version::new(1, 75, 0); const RUST_1_75: Version = Version::new(1, 75, 0);
let mut cmd = match config.run_build_script_command.as_deref() { let mut cmd = match config.run_build_script_command.as_deref() {
@ -75,7 +75,7 @@ impl WorkspaceBuildScripts {
cmd cmd
} }
_ => { _ => {
let mut cmd = Sysroot::tool(sysroot, Tool::Cargo); let mut cmd = sysroot.tool(Tool::Cargo);
cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]); cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]);
cmd.args(&config.extra_args); cmd.args(&config.extra_args);
@ -149,7 +149,7 @@ impl WorkspaceBuildScripts {
workspace: &CargoWorkspace, workspace: &CargoWorkspace,
progress: &dyn Fn(String), progress: &dyn Fn(String),
toolchain: Option<&Version>, toolchain: Option<&Version>,
sysroot: Option<&Sysroot>, sysroot: &Sysroot,
) -> io::Result<WorkspaceBuildScripts> { ) -> io::Result<WorkspaceBuildScripts> {
let current_dir = match &config.invocation_location { let current_dir = match &config.invocation_location {
InvocationLocation::Root(root) if config.run_build_script_command.is_some() => { InvocationLocation::Root(root) if config.run_build_script_command.is_some() => {
@ -195,7 +195,7 @@ impl WorkspaceBuildScripts {
// This is not gonna be used anyways, so just construct a dummy here // This is not gonna be used anyways, so just construct a dummy here
&ManifestPath::try_from(workspace_root.clone()).unwrap(), &ManifestPath::try_from(workspace_root.clone()).unwrap(),
None, None,
None, &Sysroot::empty(),
)?; )?;
// NB: Cargo.toml could have been modified between `cargo metadata` and // NB: Cargo.toml could have been modified between `cargo metadata` and
// `cargo check`. We shouldn't assume that package ids we see here are // `cargo check`. We shouldn't assume that package ids we see here are
@ -412,7 +412,7 @@ impl WorkspaceBuildScripts {
rustc: &CargoWorkspace, rustc: &CargoWorkspace,
current_dir: &AbsPath, current_dir: &AbsPath,
extra_env: &FxHashMap<String, String>, extra_env: &FxHashMap<String, String>,
sysroot: Option<&Sysroot>, sysroot: &Sysroot,
) -> Self { ) -> Self {
let mut bs = WorkspaceBuildScripts::default(); let mut bs = WorkspaceBuildScripts::default();
for p in rustc.packages() { for p in rustc.packages() {
@ -420,7 +420,7 @@ impl WorkspaceBuildScripts {
} }
let res = (|| { let res = (|| {
let target_libdir = (|| { let target_libdir = (|| {
let mut cargo_config = Sysroot::tool(sysroot, Tool::Cargo); let mut cargo_config = sysroot.tool(Tool::Cargo);
cargo_config.envs(extra_env); cargo_config.envs(extra_env);
cargo_config cargo_config
.current_dir(current_dir) .current_dir(current_dir)
@ -429,7 +429,7 @@ impl WorkspaceBuildScripts {
if let Ok(it) = utf8_stdout(cargo_config) { if let Ok(it) = utf8_stdout(cargo_config) {
return Ok(it); return Ok(it);
} }
let mut cmd = Sysroot::tool(sysroot, Tool::Rustc); let mut cmd = sysroot.tool(Tool::Rustc);
cmd.envs(extra_env); cmd.envs(extra_env);
cmd.args(["--print", "target-libdir"]); cmd.args(["--print", "target-libdir"]);
utf8_stdout(cmd) utf8_stdout(cmd)

View file

@ -258,12 +258,12 @@ impl CargoWorkspace {
cargo_toml: &ManifestPath, cargo_toml: &ManifestPath,
current_dir: &AbsPath, current_dir: &AbsPath,
config: &CargoConfig, config: &CargoConfig,
sysroot: Option<&Sysroot>, sysroot: &Sysroot,
progress: &dyn Fn(String), progress: &dyn Fn(String),
) -> anyhow::Result<cargo_metadata::Metadata> { ) -> anyhow::Result<cargo_metadata::Metadata> {
let targets = find_list_of_build_targets(config, cargo_toml, sysroot); let targets = find_list_of_build_targets(config, cargo_toml, sysroot);
let cargo = Sysroot::tool(sysroot, Tool::Cargo); let cargo = sysroot.tool(Tool::Cargo);
let mut meta = MetadataCommand::new(); let mut meta = MetadataCommand::new();
meta.cargo_path(cargo.get_program()); meta.cargo_path(cargo.get_program());
cargo.get_envs().for_each(|(var, val)| _ = meta.env(var, val.unwrap_or_default())); cargo.get_envs().for_each(|(var, val)| _ = meta.env(var, val.unwrap_or_default()));
@ -536,7 +536,7 @@ impl CargoWorkspace {
fn find_list_of_build_targets( fn find_list_of_build_targets(
config: &CargoConfig, config: &CargoConfig,
cargo_toml: &ManifestPath, cargo_toml: &ManifestPath,
sysroot: Option<&Sysroot>, sysroot: &Sysroot,
) -> Vec<String> { ) -> Vec<String> {
if let Some(target) = &config.target { if let Some(target) = &config.target {
return [target.into()].to_vec(); return [target.into()].to_vec();
@ -553,9 +553,9 @@ fn find_list_of_build_targets(
fn rustc_discover_host_triple( fn rustc_discover_host_triple(
cargo_toml: &ManifestPath, cargo_toml: &ManifestPath,
extra_env: &FxHashMap<String, String>, extra_env: &FxHashMap<String, String>,
sysroot: Option<&Sysroot>, sysroot: &Sysroot,
) -> Option<String> { ) -> Option<String> {
let mut rustc = Sysroot::tool(sysroot, Tool::Rustc); let mut rustc = sysroot.tool(Tool::Rustc);
rustc.envs(extra_env); rustc.envs(extra_env);
rustc.current_dir(cargo_toml.parent()).arg("-vV"); rustc.current_dir(cargo_toml.parent()).arg("-vV");
tracing::debug!("Discovering host platform by {:?}", rustc); tracing::debug!("Discovering host platform by {:?}", rustc);
@ -581,9 +581,9 @@ fn rustc_discover_host_triple(
fn cargo_config_build_target( fn cargo_config_build_target(
cargo_toml: &ManifestPath, cargo_toml: &ManifestPath,
extra_env: &FxHashMap<String, String>, extra_env: &FxHashMap<String, String>,
sysroot: Option<&Sysroot>, sysroot: &Sysroot,
) -> Vec<String> { ) -> Vec<String> {
let mut cargo_config = Sysroot::tool(sysroot, Tool::Cargo); let mut cargo_config = sysroot.tool(Tool::Cargo);
cargo_config.envs(extra_env); cargo_config.envs(extra_env);
cargo_config cargo_config
.current_dir(cargo_toml.parent()) .current_dir(cargo_toml.parent())

View file

@ -62,9 +62,9 @@ pub(crate) fn inject_rustc_tool_env(env: &mut Env, cargo_name: &str, kind: Targe
pub(crate) fn cargo_config_env( pub(crate) fn cargo_config_env(
manifest: &ManifestPath, manifest: &ManifestPath,
extra_env: &FxHashMap<String, String>, extra_env: &FxHashMap<String, String>,
sysroot: Option<&Sysroot>, sysroot: &Sysroot,
) -> FxHashMap<String, String> { ) -> FxHashMap<String, String> {
let mut cargo_config = Sysroot::tool(sysroot, Tool::Cargo); let mut cargo_config = sysroot.tool(Tool::Cargo);
cargo_config.envs(extra_env); cargo_config.envs(extra_env);
cargo_config cargo_config
.current_dir(manifest.parent()) .current_dir(manifest.parent())

View file

@ -10,10 +10,10 @@ use crate::{cfg::CfgFlag, utf8_stdout, ManifestPath, Sysroot};
pub(crate) enum RustcCfgConfig<'a> { pub(crate) enum RustcCfgConfig<'a> {
/// Use `rustc --print cfg`, either from with the binary from the sysroot or by discovering via /// Use `rustc --print cfg`, either from with the binary from the sysroot or by discovering via
/// [`toolchain::rustc`]. /// [`toolchain::rustc`].
Rustc(Option<&'a Sysroot>), Rustc(&'a Sysroot),
/// Use `cargo --print cfg`, either from with the binary from the sysroot or by discovering via /// Use `cargo --print cfg`, either from with the binary from the sysroot or by discovering via
/// [`toolchain::cargo`]. /// [`toolchain::cargo`].
Cargo(Option<&'a Sysroot>, &'a ManifestPath), Cargo(&'a Sysroot, &'a ManifestPath),
} }
pub(crate) fn get( pub(crate) fn get(
@ -65,7 +65,7 @@ fn get_rust_cfgs(
) -> anyhow::Result<String> { ) -> anyhow::Result<String> {
let sysroot = match config { let sysroot = match config {
RustcCfgConfig::Cargo(sysroot, cargo_toml) => { RustcCfgConfig::Cargo(sysroot, cargo_toml) => {
let mut cmd = Sysroot::tool(sysroot, Tool::Cargo); let mut cmd = sysroot.tool(Tool::Cargo);
cmd.envs(extra_env); cmd.envs(extra_env);
cmd.current_dir(cargo_toml.parent()) cmd.current_dir(cargo_toml.parent())
@ -86,7 +86,7 @@ fn get_rust_cfgs(
RustcCfgConfig::Rustc(sysroot) => sysroot, RustcCfgConfig::Rustc(sysroot) => sysroot,
}; };
let mut cmd = Sysroot::tool(sysroot, Tool::Rustc); let mut cmd = sysroot.tool(Tool::Rustc);
cmd.envs(extra_env); cmd.envs(extra_env);
cmd.args(["--print", "cfg", "-O"]); cmd.args(["--print", "cfg", "-O"]);
if let Some(target) = target { if let Some(target) = target {

View file

@ -4,7 +4,7 @@
//! but we can't process `.rlib` and need source code instead. The source code //! but we can't process `.rlib` and need source code instead. The source code
//! is typically installed with `rustup component add rust-src` command. //! is typically installed with `rustup component add rust-src` command.
use std::{env, fs, ops, process::Command, sync::Arc}; use std::{env, fs, ops, process::Command};
use anyhow::{format_err, Result}; use anyhow::{format_err, Result};
use base_db::CrateName; use base_db::CrateName;
@ -16,30 +16,19 @@ use toolchain::{probe_for_binary, Tool};
use crate::{utf8_stdout, CargoConfig, CargoWorkspace, ManifestPath}; use crate::{utf8_stdout, CargoConfig, CargoWorkspace, ManifestPath};
#[derive(Debug, Clone)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct Sysroot { pub struct Sysroot {
root: AbsPathBuf, root: Option<AbsPathBuf>,
src_root: Option<Result<AbsPathBuf, Arc<anyhow::Error>>>, src_root: Option<AbsPathBuf>,
mode: SysrootMode, mode: SysrootMode,
} error: Option<String>,
impl Eq for Sysroot {}
impl PartialEq for Sysroot {
fn eq(&self, other: &Self) -> bool {
self.root == other.root
&& self.mode == other.mode
&& match (&self.src_root, &other.src_root) {
(Some(Ok(this)), Some(Ok(other))) => this == other,
(None, None) | (Some(Err(_)), Some(Err(_))) => true,
_ => false,
}
}
} }
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
pub(crate) enum SysrootMode { pub(crate) enum SysrootMode {
Workspace(CargoWorkspace), Workspace(CargoWorkspace),
Stitched(Stitched), Stitched(Stitched),
Empty,
} }
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
@ -89,70 +78,40 @@ pub(crate) struct SysrootCrateData {
} }
impl Sysroot { impl Sysroot {
pub const fn empty() -> Sysroot {
Sysroot { root: None, src_root: None, mode: SysrootMode::Empty, error: None }
}
/// Returns sysroot "root" directory, where `bin/`, `etc/`, `lib/`, `libexec/` /// Returns sysroot "root" directory, where `bin/`, `etc/`, `lib/`, `libexec/`
/// subfolder live, like: /// subfolder live, like:
/// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu` /// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu`
pub fn root(&self) -> &AbsPath { pub fn root(&self) -> Option<&AbsPath> {
&self.root self.root.as_deref()
} }
/// Returns the sysroot "source" directory, where stdlib sources are located, like: /// Returns the sysroot "source" directory, where stdlib sources are located, like:
/// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library` /// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library`
pub fn src_root(&self) -> Option<&AbsPath> { pub fn src_root(&self) -> Option<&AbsPath> {
self.src_root.as_ref()?.as_deref().ok() self.src_root.as_deref()
} }
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
match &self.mode { match &self.mode {
SysrootMode::Workspace(ws) => ws.packages().next().is_none(), SysrootMode::Workspace(ws) => ws.packages().next().is_none(),
SysrootMode::Stitched(stitched) => stitched.crates.is_empty(), SysrootMode::Stitched(stitched) => stitched.crates.is_empty(),
SysrootMode::Empty => true,
} }
} }
pub fn loading_warning(&self) -> Option<String> { pub fn error(&self) -> Option<&str> {
let src_root = match &self.src_root { self.error.as_deref()
None => return Some(format!("sysroot at `{}` has no library sources", self.root)),
Some(Ok(src_root)) => src_root,
Some(Err(e)) => return Some(e.to_string()),
};
let has_core = match &self.mode {
SysrootMode::Workspace(ws) => ws.packages().any(|p| ws[p].name == "core"),
SysrootMode::Stitched(stitched) => stitched.by_name("core").is_some(),
};
if !has_core {
let var_note = if env::var_os("RUST_SRC_PATH").is_some() {
" (`RUST_SRC_PATH` might be incorrect, try unsetting it)"
} else {
" try running `rustup component add rust-src` to possible fix this"
};
Some(format!("could not find libcore in loaded sysroot at `{}`{var_note}", src_root,))
} else {
None
}
}
pub fn check_has_core(&self) -> Result<(), String> {
let Some(Ok(src_root)) = &self.src_root else { return Ok(()) };
let has_core = match &self.mode {
SysrootMode::Workspace(ws) => ws.packages().any(|p| ws[p].name == "core"),
SysrootMode::Stitched(stitched) => stitched.by_name("core").is_some(),
};
if !has_core {
let var_note = if env::var_os("RUST_SRC_PATH").is_some() {
" (`RUST_SRC_PATH` might be incorrect, try unsetting it)"
} else {
" try running `rustup component add rust-src` to possible fix this"
};
Err(format!("could not find libcore in loaded sysroot at `{}`{var_note}", src_root,))
} else {
Ok(())
}
} }
pub fn num_packages(&self) -> usize { pub fn num_packages(&self) -> usize {
match &self.mode { match &self.mode {
SysrootMode::Workspace(ws) => ws.packages().count(), SysrootMode::Workspace(ws) => ws.packages().count(),
SysrootMode::Stitched(c) => c.crates().count(), SysrootMode::Stitched(c) => c.crates().count(),
SysrootMode::Empty => 0,
} }
} }
@ -168,63 +127,50 @@ impl Sysroot {
dir: &AbsPath, dir: &AbsPath,
extra_env: &FxHashMap<String, String>, extra_env: &FxHashMap<String, String>,
metadata: bool, metadata: bool,
) -> Result<Sysroot> { ) -> Sysroot {
tracing::debug!("discovering sysroot for {dir}"); let sysroot_dir = discover_sysroot_dir(dir, extra_env);
let sysroot_dir = discover_sysroot_dir(dir, extra_env)?; let sysroot_src_dir = sysroot_dir.as_ref().ok().map(|sysroot_dir| {
let sysroot_src_dir = discover_sysroot_src_dir_or_add_component(sysroot_dir, dir, extra_env)
discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env); });
Ok(Sysroot::load(sysroot_dir, Some(sysroot_src_dir), metadata)) Sysroot::load_core_check(Some(sysroot_dir), sysroot_src_dir, metadata)
}
pub fn discover_no_source(
dir: &AbsPath,
extra_env: &FxHashMap<String, String>,
) -> Result<Sysroot> {
tracing::debug!("discovering sysroot for {dir}");
let sysroot_dir = discover_sysroot_dir(dir, extra_env)?;
let sysroot_src_dir =
discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env);
Ok(Sysroot::load(sysroot_dir, Some(sysroot_src_dir), false))
} }
pub fn discover_with_src_override( pub fn discover_with_src_override(
current_dir: &AbsPath, current_dir: &AbsPath,
extra_env: &FxHashMap<String, String>, extra_env: &FxHashMap<String, String>,
src: AbsPathBuf, sysroot_src_dir: AbsPathBuf,
metadata: bool, metadata: bool,
) -> Result<Sysroot> { ) -> Sysroot {
tracing::debug!("discovering sysroot for {current_dir}"); let sysroot_dir = discover_sysroot_dir(current_dir, extra_env);
let sysroot_dir = discover_sysroot_dir(current_dir, extra_env)?; Sysroot::load_core_check(Some(sysroot_dir), Some(Ok(sysroot_src_dir)), metadata)
Ok(Sysroot::load(sysroot_dir, Some(Ok(src)), metadata)) }
pub fn discover_sysroot_src_dir(sysroot_dir: AbsPathBuf, metadata: bool) -> Sysroot {
let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir)
.ok_or_else(|| format_err!("can't find standard library sources in {sysroot_dir}"));
Sysroot::load_core_check(Some(Ok(sysroot_dir)), Some(sysroot_src_dir), metadata)
} }
pub fn discover_rustc_src(&self) -> Option<ManifestPath> { pub fn discover_rustc_src(&self) -> Option<ManifestPath> {
get_rustc_src(&self.root) get_rustc_src(self.root()?)
}
pub fn with_sysroot_dir(sysroot_dir: AbsPathBuf, metadata: bool) -> Result<Sysroot> {
let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir).ok_or_else(|| {
format_err!("can't load standard library from sysroot path {sysroot_dir}")
});
Ok(Sysroot::load(sysroot_dir, Some(sysroot_src_dir), metadata))
} }
/// Returns a command to run a tool preferring the cargo proxies if the sysroot exists. /// Returns a command to run a tool preferring the cargo proxies if the sysroot exists.
pub fn tool(sysroot: Option<&Self>, tool: Tool) -> Command { pub fn tool(&self, tool: Tool) -> Command {
match sysroot { match self.root() {
Some(sysroot) => { Some(root) => {
// special case rustc, we can look that up directly in the sysroot's bin folder // special case rustc, we can look that up directly in the sysroot's bin folder
// as it should never invoke another cargo binary // as it should never invoke another cargo binary
if let Tool::Rustc = tool { if let Tool::Rustc = tool {
if let Some(path) = if let Some(path) =
probe_for_binary(sysroot.root.join("bin").join(Tool::Rustc.name()).into()) probe_for_binary(root.join("bin").join(Tool::Rustc.name()).into())
{ {
return Command::new(path); return Command::new(path);
} }
} }
let mut cmd = Command::new(tool.prefer_proxy()); let mut cmd = Command::new(tool.prefer_proxy());
cmd.env("RUSTUP_TOOLCHAIN", AsRef::<std::path::Path>::as_ref(&sysroot.root)); cmd.env("RUSTUP_TOOLCHAIN", AsRef::<std::path::Path>::as_ref(root));
cmd cmd
} }
_ => Command::new(tool.path()), _ => Command::new(tool.path()),
@ -232,35 +178,89 @@ impl Sysroot {
} }
pub fn discover_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> { pub fn discover_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
let Some(root) = self.root() else {
return Err(anyhow::format_err!("no sysroot",));
};
["libexec", "lib"] ["libexec", "lib"]
.into_iter() .into_iter()
.map(|segment| self.root().join(segment).join("rust-analyzer-proc-macro-srv")) .map(|segment| root.join(segment).join("rust-analyzer-proc-macro-srv"))
.find_map(|server_path| probe_for_binary(server_path.into())) .find_map(|server_path| probe_for_binary(server_path.into()))
.map(AbsPathBuf::assert) .map(AbsPathBuf::assert)
.ok_or_else(|| { .ok_or_else(|| {
anyhow::format_err!("cannot find proc-macro server in sysroot `{}`", self.root()) anyhow::format_err!("cannot find proc-macro server in sysroot `{}`", root)
}) })
} }
pub fn load( pub fn load(
sysroot_dir: AbsPathBuf, sysroot_dir: Option<AbsPathBuf>,
sysroot_src_dir: Option<AbsPathBuf>,
metadata: bool,
) -> Sysroot {
Self::load_core_check(sysroot_dir.map(Ok), sysroot_src_dir.map(Ok), metadata)
}
fn load_core_check(
sysroot_dir: Option<Result<AbsPathBuf, anyhow::Error>>,
sysroot_src_dir: Option<Result<AbsPathBuf, anyhow::Error>>, sysroot_src_dir: Option<Result<AbsPathBuf, anyhow::Error>>,
metadata: bool, metadata: bool,
) -> Sysroot { ) -> Sysroot {
let mut sysroot = Self::load_(sysroot_dir, sysroot_src_dir, metadata);
if sysroot.error.is_none() {
if let Some(src_root) = &sysroot.src_root {
let has_core = match &sysroot.mode {
SysrootMode::Workspace(ws) => ws.packages().any(|p| ws[p].name == "core"),
SysrootMode::Stitched(stitched) => stitched.by_name("core").is_some(),
SysrootMode::Empty => true,
};
if !has_core {
let var_note = if env::var_os("RUST_SRC_PATH").is_some() {
" (env var `RUST_SRC_PATH` is set and may be incorrect, try unsetting it)"
} else {
", try running `rustup component add rust-src` to possibly fix this"
};
sysroot.error = Some(format!(
"sysroot at `{}` is missing a `core` library{var_note}",
src_root,
));
}
}
}
sysroot
}
fn load_(
sysroot_dir: Option<Result<AbsPathBuf, anyhow::Error>>,
sysroot_src_dir: Option<Result<AbsPathBuf, anyhow::Error>>,
metadata: bool,
) -> Sysroot {
let sysroot_dir = match sysroot_dir {
Some(Ok(sysroot_dir)) => Some(sysroot_dir),
Some(Err(e)) => {
return Sysroot {
root: None,
src_root: None,
mode: SysrootMode::Empty,
error: Some(e.to_string()),
}
}
None => None,
};
let sysroot_src_dir = match sysroot_src_dir { let sysroot_src_dir = match sysroot_src_dir {
Some(Ok(sysroot_src_dir)) => sysroot_src_dir, Some(Ok(sysroot_src_dir)) => sysroot_src_dir,
Some(Err(e)) => { Some(Err(e)) => {
return Sysroot { return Sysroot {
root: sysroot_dir, root: sysroot_dir,
src_root: Some(Err(Arc::new(e))), src_root: None,
mode: SysrootMode::Stitched(Stitched { crates: Arena::default() }), mode: SysrootMode::Empty,
error: Some(e.to_string()),
} }
} }
None => { None => {
return Sysroot { return Sysroot {
root: sysroot_dir, root: sysroot_dir,
src_root: None, src_root: None,
mode: SysrootMode::Stitched(Stitched { crates: Arena::default() }), mode: SysrootMode::Empty,
error: None,
} }
} }
}; };
@ -284,7 +284,7 @@ impl Sysroot {
&sysroot_cargo_toml, &sysroot_cargo_toml,
&current_dir, &current_dir,
&cargo_config, &cargo_config,
None, &Sysroot::empty(),
&|_| (), &|_| (),
) )
.map_err(|e| { .map_err(|e| {
@ -368,8 +368,9 @@ impl Sysroot {
let cargo_workspace = CargoWorkspace::new(res, sysroot_cargo_toml); let cargo_workspace = CargoWorkspace::new(res, sysroot_cargo_toml);
Some(Sysroot { Some(Sysroot {
root: sysroot_dir.clone(), root: sysroot_dir.clone(),
src_root: Some(Ok(sysroot_src_dir.clone())), src_root: Some(sysroot_src_dir.clone()),
mode: SysrootMode::Workspace(cargo_workspace), mode: SysrootMode::Workspace(cargo_workspace),
error: None,
}) })
})(); })();
if let Some(sysroot) = sysroot { if let Some(sysroot) = sysroot {
@ -420,8 +421,9 @@ impl Sysroot {
} }
Sysroot { Sysroot {
root: sysroot_dir, root: sysroot_dir,
src_root: Some(Ok(sysroot_src_dir)), src_root: Some(sysroot_src_dir),
mode: SysrootMode::Stitched(stitched), mode: SysrootMode::Stitched(stitched),
error: None,
} }
} }
} }

View file

@ -9,10 +9,10 @@ use crate::{utf8_stdout, ManifestPath, Sysroot};
pub enum RustcDataLayoutConfig<'a> { pub enum RustcDataLayoutConfig<'a> {
/// Use `rustc --print target-spec-json`, either from with the binary from the sysroot or by discovering via /// Use `rustc --print target-spec-json`, either from with the binary from the sysroot or by discovering via
/// [`toolchain::rustc`]. /// [`toolchain::rustc`].
Rustc(Option<&'a Sysroot>), Rustc(&'a Sysroot),
/// Use `cargo --print target-spec-json`, either from with the binary from the sysroot or by discovering via /// Use `cargo --print target-spec-json`, either from with the binary from the sysroot or by discovering via
/// [`toolchain::cargo`]. /// [`toolchain::cargo`].
Cargo(Option<&'a Sysroot>, &'a ManifestPath), Cargo(&'a Sysroot, &'a ManifestPath),
} }
pub fn get( pub fn get(
@ -28,7 +28,7 @@ pub fn get(
}; };
let sysroot = match config { let sysroot = match config {
RustcDataLayoutConfig::Cargo(sysroot, cargo_toml) => { RustcDataLayoutConfig::Cargo(sysroot, cargo_toml) => {
let mut cmd = Sysroot::tool(sysroot, Tool::Cargo); let mut cmd = sysroot.tool(Tool::Cargo);
cmd.envs(extra_env); cmd.envs(extra_env);
cmd.current_dir(cargo_toml.parent()) cmd.current_dir(cargo_toml.parent())
.args([ .args([

View file

@ -34,7 +34,7 @@ fn load_cargo_with_overrides(
cargo_config_extra_env: Default::default(), cargo_config_extra_env: Default::default(),
}, },
cfg_overrides, cfg_overrides,
sysroot: Err(None), sysroot: Sysroot::empty(),
rustc_cfg: Vec::new(), rustc_cfg: Vec::new(),
toolchain: None, toolchain: None,
target_layout: Err("target_data_layout not loaded".into()), target_layout: Err("target_data_layout not loaded".into()),
@ -57,7 +57,7 @@ fn load_cargo_with_fake_sysroot(
rustc: Err(None), rustc: Err(None),
cargo_config_extra_env: Default::default(), cargo_config_extra_env: Default::default(),
}, },
sysroot: Ok(get_fake_sysroot()), sysroot: get_fake_sysroot(),
rustc_cfg: Vec::new(), rustc_cfg: Vec::new(),
cfg_overrides: Default::default(), cfg_overrides: Default::default(),
toolchain: None, toolchain: None,
@ -77,7 +77,7 @@ fn load_cargo_with_fake_sysroot(
fn load_rust_project(file: &str) -> (CrateGraph, ProcMacroPaths) { fn load_rust_project(file: &str) -> (CrateGraph, ProcMacroPaths) {
let data = get_test_json_file(file); let data = get_test_json_file(file);
let project = rooted_project_json(data); let project = rooted_project_json(data);
let sysroot = Ok(get_fake_sysroot()); let sysroot = get_fake_sysroot();
let project_workspace = ProjectWorkspace { let project_workspace = ProjectWorkspace {
kind: ProjectWorkspaceKind::Json(project), kind: ProjectWorkspaceKind::Json(project),
sysroot, sysroot,
@ -144,7 +144,7 @@ fn get_fake_sysroot() -> Sysroot {
// fake sysroot, so we give them both the same path: // fake sysroot, so we give them both the same path:
let sysroot_dir = AbsPathBuf::assert(sysroot_path); let sysroot_dir = AbsPathBuf::assert(sysroot_path);
let sysroot_src_dir = sysroot_dir.clone(); let sysroot_src_dir = sysroot_dir.clone();
Sysroot::load(sysroot_dir, Some(Ok(sysroot_src_dir)), false) Sysroot::load(Some(sysroot_dir), Some(sysroot_src_dir), false)
} }
fn rooted_project_json(data: ProjectJsonData) -> ProjectJson { fn rooted_project_json(data: ProjectJsonData) -> ProjectJson {
@ -281,12 +281,11 @@ fn smoke_test_real_sysroot_cargo() {
let manifest_path = let manifest_path =
ManifestPath::try_from(AbsPathBuf::try_from(meta.workspace_root.clone()).unwrap()).unwrap(); ManifestPath::try_from(AbsPathBuf::try_from(meta.workspace_root.clone()).unwrap()).unwrap();
let cargo_workspace = CargoWorkspace::new(meta, manifest_path); let cargo_workspace = CargoWorkspace::new(meta, manifest_path);
let sysroot = Ok(Sysroot::discover( let sysroot = Sysroot::discover(
AbsPath::assert(Utf8Path::new(env!("CARGO_MANIFEST_DIR"))), AbsPath::assert(Utf8Path::new(env!("CARGO_MANIFEST_DIR"))),
&Default::default(), &Default::default(),
true, true,
) );
.unwrap());
let project_workspace = ProjectWorkspace { let project_workspace = ProjectWorkspace {
kind: ProjectWorkspaceKind::Cargo { kind: ProjectWorkspaceKind::Cargo {

View file

@ -48,7 +48,7 @@ pub struct PackageRoot {
pub struct ProjectWorkspace { pub struct ProjectWorkspace {
pub kind: ProjectWorkspaceKind, pub kind: ProjectWorkspaceKind,
/// The sysroot loaded for this workspace. /// The sysroot loaded for this workspace.
pub sysroot: Result<Sysroot, Option<String>>, pub sysroot: Sysroot,
/// Holds cfg flags for the current target. We get those by running /// Holds cfg flags for the current target. We get those by running
/// `rustc --print cfg`. /// `rustc --print cfg`.
// FIXME: make this a per-crate map, as, eg, build.rs might have a // FIXME: make this a per-crate map, as, eg, build.rs might have a
@ -112,7 +112,7 @@ impl fmt::Debug for ProjectWorkspace {
.debug_struct("Cargo") .debug_struct("Cargo")
.field("root", &cargo.workspace_root().file_name()) .field("root", &cargo.workspace_root().file_name())
.field("n_packages", &cargo.packages().len()) .field("n_packages", &cargo.packages().len())
.field("sysroot", &sysroot.is_ok()) .field("n_sysroot_crates", &sysroot.num_packages())
.field( .field(
"n_rustc_compiler_crates", "n_rustc_compiler_crates",
&rustc.as_ref().map(|a| a.as_ref()).map_or(0, |(rc, _)| rc.packages().len()), &rustc.as_ref().map(|a| a.as_ref()).map_or(0, |(rc, _)| rc.packages().len()),
@ -125,11 +125,9 @@ impl fmt::Debug for ProjectWorkspace {
.finish(), .finish(),
ProjectWorkspaceKind::Json(project) => { ProjectWorkspaceKind::Json(project) => {
let mut debug_struct = f.debug_struct("Json"); let mut debug_struct = f.debug_struct("Json");
debug_struct.field("n_crates", &project.n_crates());
if let Ok(sysroot) = sysroot {
debug_struct.field("n_sysroot_crates", &sysroot.num_packages());
}
debug_struct debug_struct
.field("n_crates", &project.n_crates())
.field("n_sysroot_crates", &sysroot.num_packages())
.field("n_rustc_cfg", &rustc_cfg.len()) .field("n_rustc_cfg", &rustc_cfg.len())
.field("toolchain", &toolchain) .field("toolchain", &toolchain)
.field("data_layout", &target_layout) .field("data_layout", &target_layout)
@ -144,7 +142,7 @@ impl fmt::Debug for ProjectWorkspace {
.debug_struct("DetachedFiles") .debug_struct("DetachedFiles")
.field("file", &file) .field("file", &file)
.field("cargo_script", &cargo_script.is_some()) .field("cargo_script", &cargo_script.is_some())
.field("sysroot", &sysroot.is_ok()) .field("n_sysroot_crates", &sysroot.num_packages())
.field("cargo_script", &cargo_script.is_some()) .field("cargo_script", &cargo_script.is_some())
.field("n_rustc_cfg", &rustc_cfg.len()) .field("n_rustc_cfg", &rustc_cfg.len())
.field("toolchain", &toolchain) .field("toolchain", &toolchain)
@ -158,7 +156,7 @@ impl fmt::Debug for ProjectWorkspace {
fn get_toolchain_version( fn get_toolchain_version(
current_dir: &AbsPath, current_dir: &AbsPath,
sysroot: Option<&Sysroot>, sysroot: &Sysroot,
tool: Tool, tool: Tool,
extra_env: &FxHashMap<String, String>, extra_env: &FxHashMap<String, String>,
prefix: &str, prefix: &str,
@ -213,41 +211,37 @@ impl ProjectWorkspace {
} }
ProjectManifest::CargoToml(cargo_toml) => { ProjectManifest::CargoToml(cargo_toml) => {
let sysroot = match (&config.sysroot, &config.sysroot_src) { let sysroot = match (&config.sysroot, &config.sysroot_src) {
(Some(RustLibSource::Path(path)), None) => { (Some(RustLibSource::Discover), None) => Sysroot::discover(
Sysroot::with_sysroot_dir(path.clone(), config.sysroot_query_metadata).map_err(|e| { cargo_toml.parent(),
Some(format!("Failed to find sysroot at {path}:{e}")) &config.extra_env,
}) config.sysroot_query_metadata,
} ),
(Some(RustLibSource::Discover), None) => {
Sysroot::discover(cargo_toml.parent(), &config.extra_env, config.sysroot_query_metadata).map_err(|e| {
Some(format!("Failed to find sysroot for Cargo.toml file {cargo_toml}. Is rust-src installed? {e}"))
})
}
(Some(RustLibSource::Path(sysroot)), Some(sysroot_src)) => {
Ok(Sysroot::load(sysroot.clone(), Some(Ok(sysroot_src.clone())), config.sysroot_query_metadata))
}
(Some(RustLibSource::Discover), Some(sysroot_src)) => { (Some(RustLibSource::Discover), Some(sysroot_src)) => {
Sysroot::discover_with_src_override( Sysroot::discover_with_src_override(
cargo_toml.parent(), cargo_toml.parent(),
&config.extra_env, &config.extra_env,
sysroot_src.clone(), config.sysroot_query_metadata, sysroot_src.clone(),
).map_err(|e| { config.sysroot_query_metadata,
Some(format!("Failed to find sysroot for Cargo.toml file {cargo_toml}. Is rust-src installed? {e}")) )
})
} }
(None, _) => Err(None), (Some(RustLibSource::Path(path)), None) => Sysroot::discover_sysroot_src_dir(
path.clone(),
config.sysroot_query_metadata,
),
(Some(RustLibSource::Path(sysroot)), Some(sysroot_src)) => Sysroot::load(
Some(sysroot.clone()),
Some(sysroot_src.clone()),
config.sysroot_query_metadata,
),
(None, _) => Sysroot::empty(),
}; };
let sysroot_ref = sysroot.as_ref().ok(); tracing::info!(workspace = %cargo_toml, src_root = ?sysroot.src_root(), root = ?sysroot.root(), "Using sysroot");
if let Ok(sysroot) = &sysroot {
tracing::info!(workspace = %cargo_toml, src_root = ?sysroot.src_root(), root = %sysroot.root(), "Using sysroot");
}
let rustc_dir = match &config.rustc_source { let rustc_dir = match &config.rustc_source {
Some(RustLibSource::Path(path)) => ManifestPath::try_from(path.clone()) Some(RustLibSource::Path(path)) => ManifestPath::try_from(path.clone())
.map_err(|p| Some(format!("rustc source path is not absolute: {p}"))), .map_err(|p| Some(format!("rustc source path is not absolute: {p}"))),
Some(RustLibSource::Discover) => { Some(RustLibSource::Discover) => {
sysroot_ref.and_then(Sysroot::discover_rustc_src).ok_or_else(|| { sysroot.discover_rustc_src().ok_or_else(|| {
Some("Failed to discover rustc source for sysroot.".to_owned()) Some("Failed to discover rustc source for sysroot.".to_owned())
}) })
} }
@ -263,7 +257,7 @@ impl ProjectWorkspace {
features: crate::CargoFeatures::default(), features: crate::CargoFeatures::default(),
..config.clone() ..config.clone()
}, },
sysroot_ref, &sysroot,
progress, progress,
) { ) {
Ok(meta) => { Ok(meta) => {
@ -272,7 +266,7 @@ impl ProjectWorkspace {
&workspace, &workspace,
cargo_toml.parent(), cargo_toml.parent(),
&config.extra_env, &config.extra_env,
sysroot_ref &sysroot
); );
Ok(Box::new((workspace, buildscripts))) Ok(Box::new((workspace, buildscripts)))
} }
@ -290,7 +284,7 @@ impl ProjectWorkspace {
let toolchain = get_toolchain_version( let toolchain = get_toolchain_version(
cargo_toml.parent(), cargo_toml.parent(),
sysroot_ref, &sysroot,
Tool::Cargo, Tool::Cargo,
&config.extra_env, &config.extra_env,
"cargo ", "cargo ",
@ -298,12 +292,12 @@ impl ProjectWorkspace {
let rustc_cfg = rustc_cfg::get( let rustc_cfg = rustc_cfg::get(
config.target.as_deref(), config.target.as_deref(),
&config.extra_env, &config.extra_env,
RustcCfgConfig::Cargo(sysroot_ref, cargo_toml), RustcCfgConfig::Cargo(&sysroot, cargo_toml),
); );
let cfg_overrides = config.cfg_overrides.clone(); let cfg_overrides = config.cfg_overrides.clone();
let data_layout = target_data_layout::get( let data_layout = target_data_layout::get(
RustcDataLayoutConfig::Cargo(sysroot_ref, cargo_toml), RustcDataLayoutConfig::Cargo(&sysroot, cargo_toml),
config.target.as_deref(), config.target.as_deref(),
&config.extra_env, &config.extra_env,
); );
@ -315,7 +309,7 @@ impl ProjectWorkspace {
cargo_toml, cargo_toml,
cargo_toml.parent(), cargo_toml.parent(),
config, config,
sysroot_ref, &sysroot,
progress, progress,
) )
.with_context(|| { .with_context(|| {
@ -326,7 +320,7 @@ impl ProjectWorkspace {
let cargo = CargoWorkspace::new(meta, cargo_toml.clone()); let cargo = CargoWorkspace::new(meta, cargo_toml.clone());
let cargo_config_extra_env = let cargo_config_extra_env =
cargo_config_env(cargo_toml, &config.extra_env, sysroot_ref); cargo_config_env(cargo_toml, &config.extra_env, &sysroot);
ProjectWorkspace { ProjectWorkspace {
kind: ProjectWorkspaceKind::Cargo { kind: ProjectWorkspaceKind::Cargo {
cargo, cargo,
@ -354,32 +348,13 @@ impl ProjectWorkspace {
extra_env: &FxHashMap<String, String>, extra_env: &FxHashMap<String, String>,
cfg_overrides: &CfgOverrides, cfg_overrides: &CfgOverrides,
) -> ProjectWorkspace { ) -> ProjectWorkspace {
let sysroot = match (project_json.sysroot.clone(), project_json.sysroot_src.clone()) { let sysroot =
(Some(sysroot), Some(sysroot_src)) => { Sysroot::load(project_json.sysroot.clone(), project_json.sysroot_src.clone(), false);
Ok(Sysroot::load(sysroot, Some(Ok(sysroot_src)), false)) let cfg_config = RustcCfgConfig::Rustc(&sysroot);
} let data_layout_config = RustcDataLayoutConfig::Rustc(&sysroot);
(Some(sysroot), None) => {
// assume sysroot is structured like rustup's and guess `sysroot_src`
let sysroot_src =
sysroot.join("lib").join("rustlib").join("src").join("rust").join("library");
Ok(Sysroot::load(sysroot, Some(Ok(sysroot_src)), false))
}
(None, Some(sysroot_src)) => {
// assume sysroot is structured like rustup's and guess `sysroot`
let mut sysroot = sysroot_src.clone();
for _ in 0..5 {
sysroot.pop();
}
Ok(Sysroot::load(sysroot, Some(Ok(sysroot_src)), false))
}
(None, None) => Err(None),
};
let sysroot_ref = sysroot.as_ref().ok();
let cfg_config = RustcCfgConfig::Rustc(sysroot_ref);
let data_layout_config = RustcDataLayoutConfig::Rustc(sysroot_ref);
let toolchain = match get_toolchain_version( let toolchain = match get_toolchain_version(
project_json.path(), project_json.path(),
sysroot_ref, &sysroot,
Tool::Rustc, Tool::Rustc,
extra_env, extra_env,
"rustc ", "rustc ",
@ -410,24 +385,16 @@ impl ProjectWorkspace {
let dir = detached_file.parent(); let dir = detached_file.parent();
let sysroot = match &config.sysroot { let sysroot = match &config.sysroot {
Some(RustLibSource::Path(path)) => { Some(RustLibSource::Path(path)) => {
Sysroot::with_sysroot_dir(path.clone(), config.sysroot_query_metadata) Sysroot::discover_sysroot_src_dir(path.clone(), config.sysroot_query_metadata)
.map_err(|e| Some(format!("Failed to find sysroot at {path}:{e}")))
} }
Some(RustLibSource::Discover) => Sysroot::discover( Some(RustLibSource::Discover) => {
dir, Sysroot::discover(dir, &config.extra_env, config.sysroot_query_metadata)
&config.extra_env, }
config.sysroot_query_metadata, None => Sysroot::empty(),
)
.map_err(|e| {
Some(format!("Failed to find sysroot for {dir}. Is rust-src installed? {e}"))
}),
None => Err(None),
}; };
let sysroot_ref = sysroot.as_ref().ok();
let toolchain = let toolchain =
match get_toolchain_version(dir, sysroot_ref, Tool::Rustc, &config.extra_env, "rustc ") match get_toolchain_version(dir, &sysroot, Tool::Rustc, &config.extra_env, "rustc ") {
{
Ok(it) => it, Ok(it) => it,
Err(e) => { Err(e) => {
tracing::error!("{e}"); tracing::error!("{e}");
@ -435,25 +402,24 @@ impl ProjectWorkspace {
} }
}; };
let rustc_cfg = rustc_cfg::get(None, &config.extra_env, RustcCfgConfig::Rustc(sysroot_ref)); let rustc_cfg = rustc_cfg::get(None, &config.extra_env, RustcCfgConfig::Rustc(&sysroot));
let data_layout = target_data_layout::get( let data_layout = target_data_layout::get(
RustcDataLayoutConfig::Rustc(sysroot_ref), RustcDataLayoutConfig::Rustc(&sysroot),
None, None,
&config.extra_env, &config.extra_env,
); );
let cargo_script = let cargo_script =
CargoWorkspace::fetch_metadata(detached_file, dir, config, sysroot_ref, &|_| ()) CargoWorkspace::fetch_metadata(detached_file, dir, config, &sysroot, &|_| ()).ok().map(
.ok() |ws| {
.map(|ws| {
( (
CargoWorkspace::new(ws, detached_file.clone()), CargoWorkspace::new(ws, detached_file.clone()),
WorkspaceBuildScripts::default(), WorkspaceBuildScripts::default(),
) )
}); },
);
let cargo_config_extra_env = let cargo_config_extra_env = cargo_config_env(detached_file, &config.extra_env, &sysroot);
cargo_config_env(detached_file, &config.extra_env, sysroot_ref);
Ok(ProjectWorkspace { Ok(ProjectWorkspace {
kind: ProjectWorkspaceKind::DetachedFile { kind: ProjectWorkspaceKind::DetachedFile {
file: detached_file.to_owned(), file: detached_file.to_owned(),
@ -489,7 +455,7 @@ impl ProjectWorkspace {
cargo, cargo,
progress, progress,
self.toolchain.as_ref(), self.toolchain.as_ref(),
self.sysroot.as_ref().ok(), &self.sysroot,
) )
.with_context(|| { .with_context(|| {
format!("Failed to run build scripts for {}", cargo.workspace_root()) format!("Failed to run build scripts for {}", cargo.workspace_root())
@ -562,17 +528,7 @@ impl ProjectWorkspace {
} }
pub fn find_sysroot_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> { pub fn find_sysroot_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
match &self.sysroot { self.sysroot.discover_proc_macro_srv()
Ok(sysroot) => sysroot.discover_proc_macro_srv(),
Err(None) => Err(anyhow::format_err!(
"cannot find proc-macro server, the workspace `{}` is missing a sysroot",
self.manifest_or_root()
)),
Err(Some(e)) => Err(anyhow::format_err!(
"cannot find proc-macro server, the workspace `{}` is missing a sysroot: {e}",
self.manifest_or_root()
)),
}
} }
/// Returns the roots for the current `ProjectWorkspace` /// Returns the roots for the current `ProjectWorkspace`
@ -580,39 +536,37 @@ impl ProjectWorkspace {
/// the root is a member of the current workspace /// the root is a member of the current workspace
pub fn to_roots(&self) -> Vec<PackageRoot> { pub fn to_roots(&self) -> Vec<PackageRoot> {
let mk_sysroot = || { let mk_sysroot = || {
self.sysroot.as_ref().into_iter().flat_map(move |sysroot: &Sysroot| { let mut r = match self.sysroot.mode() {
let mut r = match sysroot.mode() { SysrootMode::Workspace(ws) => ws
SysrootMode::Workspace(ws) => ws .packages()
.packages() .filter_map(|pkg| {
.filter_map(|pkg| { if ws[pkg].is_local {
if ws[pkg].is_local { // the local ones are included in the main `PackageRoot`` below
// the local ones are included in the main `PackageRoot`` below return None;
return None; }
} let pkg_root = ws[pkg].manifest.parent().to_path_buf();
let pkg_root = ws[pkg].manifest.parent().to_path_buf();
let include = vec![pkg_root.clone()]; let include = vec![pkg_root.clone()];
let exclude = vec![ let exclude = vec![
pkg_root.join(".git"), pkg_root.join(".git"),
pkg_root.join("target"), pkg_root.join("target"),
pkg_root.join("tests"), pkg_root.join("tests"),
pkg_root.join("examples"), pkg_root.join("examples"),
pkg_root.join("benches"), pkg_root.join("benches"),
]; ];
Some(PackageRoot { is_local: false, include, exclude }) Some(PackageRoot { is_local: false, include, exclude })
}) })
.collect(), .collect(),
SysrootMode::Stitched(_) => vec![], SysrootMode::Stitched(_) | SysrootMode::Empty => vec![],
}; };
r.push(PackageRoot { r.push(PackageRoot {
is_local: false, is_local: false,
include: sysroot.src_root().map(|it| it.to_path_buf()).into_iter().collect(), include: self.sysroot.src_root().map(|it| it.to_path_buf()).into_iter().collect(),
exclude: Vec::new(), exclude: Vec::new(),
}); });
r r
})
}; };
match &self.kind { match &self.kind {
ProjectWorkspaceKind::Json(project) => project ProjectWorkspaceKind::Json(project) => project
@ -731,19 +685,15 @@ impl ProjectWorkspace {
} }
pub fn n_packages(&self) -> usize { pub fn n_packages(&self) -> usize {
let sysroot_package_len = self.sysroot.num_packages();
match &self.kind { match &self.kind {
ProjectWorkspaceKind::Json(project) => { ProjectWorkspaceKind::Json(project) => sysroot_package_len + project.n_crates(),
let sysroot_package_len = self.sysroot.as_ref().map_or(0, |it| it.num_packages());
sysroot_package_len + project.n_crates()
}
ProjectWorkspaceKind::Cargo { cargo, rustc, .. } => { ProjectWorkspaceKind::Cargo { cargo, rustc, .. } => {
let rustc_package_len = let rustc_package_len =
rustc.as_ref().map(|a| a.as_ref()).map_or(0, |(it, _)| it.packages().len()); rustc.as_ref().map(|a| a.as_ref()).map_or(0, |(it, _)| it.packages().len());
let sysroot_package_len = self.sysroot.as_ref().map_or(0, |it| it.num_packages());
cargo.packages().len() + sysroot_package_len + rustc_package_len cargo.packages().len() + sysroot_package_len + rustc_package_len
} }
ProjectWorkspaceKind::DetachedFile { cargo: cargo_script, .. } => { ProjectWorkspaceKind::DetachedFile { cargo: cargo_script, .. } => {
let sysroot_package_len = self.sysroot.as_ref().map_or(0, |it| it.num_packages());
sysroot_package_len sysroot_package_len
+ cargo_script.as_ref().map_or(1, |(cargo, _)| cargo.packages().len()) + cargo_script.as_ref().map_or(1, |(cargo, _)| cargo.packages().len())
} }
@ -764,7 +714,7 @@ impl ProjectWorkspace {
rustc_cfg.clone(), rustc_cfg.clone(),
load, load,
project, project,
sysroot.as_ref().ok(), sysroot,
extra_env, extra_env,
cfg_overrides, cfg_overrides,
), ),
@ -780,7 +730,7 @@ impl ProjectWorkspace {
load, load,
rustc.as_ref().map(|a| a.as_ref()).ok(), rustc.as_ref().map(|a| a.as_ref()).ok(),
cargo, cargo,
sysroot.as_ref().ok(), sysroot,
rustc_cfg.clone(), rustc_cfg.clone(),
cfg_overrides, cfg_overrides,
build_scripts, build_scripts,
@ -793,7 +743,7 @@ impl ProjectWorkspace {
&mut |path| load(path), &mut |path| load(path),
None, None,
cargo, cargo,
sysroot.as_ref().ok(), sysroot,
rustc_cfg.clone(), rustc_cfg.clone(),
cfg_overrides, cfg_overrides,
build_scripts, build_scripts,
@ -803,7 +753,7 @@ impl ProjectWorkspace {
rustc_cfg.clone(), rustc_cfg.clone(),
load, load,
file, file,
sysroot.as_ref().ok(), sysroot,
cfg_overrides, cfg_overrides,
) )
}, },
@ -811,9 +761,7 @@ impl ProjectWorkspace {
), ),
}; };
if matches!(sysroot.as_ref().map(|it| it.mode()), Ok(SysrootMode::Stitched(_))) if matches!(sysroot.mode(), SysrootMode::Stitched(_)) && crate_graph.patch_cfg_if() {
&& crate_graph.patch_cfg_if()
{
tracing::debug!("Patched std to depend on cfg-if") tracing::debug!("Patched std to depend on cfg-if")
} else { } else {
tracing::debug!("Did not patch std to depend on cfg-if") tracing::debug!("Did not patch std to depend on cfg-if")
@ -892,15 +840,14 @@ fn project_json_to_crate_graph(
rustc_cfg: Vec<CfgFlag>, rustc_cfg: Vec<CfgFlag>,
load: FileLoader<'_>, load: FileLoader<'_>,
project: &ProjectJson, project: &ProjectJson,
sysroot: Option<&Sysroot>, sysroot: &Sysroot,
extra_env: &FxHashMap<String, String>, extra_env: &FxHashMap<String, String>,
override_cfg: &CfgOverrides, override_cfg: &CfgOverrides,
) -> (CrateGraph, ProcMacroPaths) { ) -> (CrateGraph, ProcMacroPaths) {
let mut res = (CrateGraph::default(), ProcMacroPaths::default()); let mut res = (CrateGraph::default(), ProcMacroPaths::default());
let (crate_graph, proc_macros) = &mut res; let (crate_graph, proc_macros) = &mut res;
let sysroot_deps = sysroot let (public_deps, libproc_macro) =
.as_ref() sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load);
.map(|sysroot| sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load));
let r_a_cfg_flag = CfgFlag::Atom("rust_analyzer".to_owned()); let r_a_cfg_flag = CfgFlag::Atom("rust_analyzer".to_owned());
let mut cfg_cache: FxHashMap<&str, Vec<CfgFlag>> = FxHashMap::default(); let mut cfg_cache: FxHashMap<&str, Vec<CfgFlag>> = FxHashMap::default();
@ -978,11 +925,9 @@ fn project_json_to_crate_graph(
for (from_idx, krate) in project.crates() { for (from_idx, krate) in project.crates() {
if let Some(&from) = idx_to_crate_id.get(&from_idx) { if let Some(&from) = idx_to_crate_id.get(&from_idx) {
if let Some((public_deps, libproc_macro)) = &sysroot_deps { public_deps.add_to_crate_graph(crate_graph, from);
public_deps.add_to_crate_graph(crate_graph, from); if let Some(proc_macro) = libproc_macro {
if let Some(proc_macro) = libproc_macro { add_proc_macro_dep(crate_graph, from, proc_macro, krate.is_proc_macro);
add_proc_macro_dep(crate_graph, from, *proc_macro, krate.is_proc_macro);
}
} }
for dep in &krate.deps { for dep in &krate.deps {
@ -999,7 +944,7 @@ fn cargo_to_crate_graph(
load: FileLoader<'_>, load: FileLoader<'_>,
rustc: Option<&(CargoWorkspace, WorkspaceBuildScripts)>, rustc: Option<&(CargoWorkspace, WorkspaceBuildScripts)>,
cargo: &CargoWorkspace, cargo: &CargoWorkspace,
sysroot: Option<&Sysroot>, sysroot: &Sysroot,
rustc_cfg: Vec<CfgFlag>, rustc_cfg: Vec<CfgFlag>,
override_cfg: &CfgOverrides, override_cfg: &CfgOverrides,
build_scripts: &WorkspaceBuildScripts, build_scripts: &WorkspaceBuildScripts,
@ -1008,10 +953,8 @@ fn cargo_to_crate_graph(
let mut res = (CrateGraph::default(), ProcMacroPaths::default()); let mut res = (CrateGraph::default(), ProcMacroPaths::default());
let crate_graph = &mut res.0; let crate_graph = &mut res.0;
let proc_macros = &mut res.1; let proc_macros = &mut res.1;
let (public_deps, libproc_macro) = match sysroot { let (public_deps, libproc_macro) =
Some(sysroot) => sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load), sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load);
None => (SysrootPublicDeps::default(), None),
};
let cfg_options = CfgOptions::from_iter(rustc_cfg); let cfg_options = CfgOptions::from_iter(rustc_cfg);
@ -1188,15 +1131,13 @@ fn detached_file_to_crate_graph(
rustc_cfg: Vec<CfgFlag>, rustc_cfg: Vec<CfgFlag>,
load: FileLoader<'_>, load: FileLoader<'_>,
detached_file: &ManifestPath, detached_file: &ManifestPath,
sysroot: Option<&Sysroot>, sysroot: &Sysroot,
override_cfg: &CfgOverrides, override_cfg: &CfgOverrides,
) -> (CrateGraph, ProcMacroPaths) { ) -> (CrateGraph, ProcMacroPaths) {
let _p = tracing::span!(tracing::Level::INFO, "detached_file_to_crate_graph").entered(); let _p = tracing::span!(tracing::Level::INFO, "detached_file_to_crate_graph").entered();
let mut crate_graph = CrateGraph::default(); let mut crate_graph = CrateGraph::default();
let (public_deps, _libproc_macro) = match sysroot { let (public_deps, _libproc_macro) =
Some(sysroot) => sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load), sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load);
None => (SysrootPublicDeps::default(), None),
};
let mut cfg_options = CfgOptions::from_iter(rustc_cfg); let mut cfg_options = CfgOptions::from_iter(rustc_cfg);
cfg_options.insert_atom("test".into()); cfg_options.insert_atom("test".into());
@ -1431,7 +1372,7 @@ fn sysroot_to_crate_graph(
load, load,
None, None,
cargo, cargo,
None, &Sysroot::empty(),
rustc_cfg, rustc_cfg,
&CfgOverrides { &CfgOverrides {
global: CfgDiff::new( global: CfgDiff::new(
@ -1554,6 +1495,7 @@ fn sysroot_to_crate_graph(
stitched.proc_macro().and_then(|it| sysroot_crates.get(&it).copied()); stitched.proc_macro().and_then(|it| sysroot_crates.get(&it).copied());
(public_deps, libproc_macro) (public_deps, libproc_macro)
} }
SysrootMode::Empty => (SysrootPublicDeps { deps: vec![] }, None),
} }
} }

View file

@ -8,7 +8,8 @@ use std::{
use hir::{ use hir::{
db::{DefDatabase, ExpandDatabase, HirDatabase}, db::{DefDatabase, ExpandDatabase, HirDatabase},
Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, HirFileIdExt, ModuleDef, Name, Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, HirFileIdExt, ImportPathConfig,
ModuleDef, Name,
}; };
use hir_def::{ use hir_def::{
body::{BodySourceMap, SyntheticSyntax}, body::{BodySourceMap, SyntheticSyntax},
@ -438,8 +439,13 @@ impl flags::AnalysisStats {
let mut formatter = |_: &hir::Type| todo.clone(); let mut formatter = |_: &hir::Type| todo.clone();
let mut syntax_hit_found = false; let mut syntax_hit_found = false;
for term in found_terms { for term in found_terms {
let generated = let generated = term
term.gen_source_code(&scope, &mut formatter, false, true).unwrap(); .gen_source_code(
&scope,
&mut formatter,
ImportPathConfig { prefer_no_std: false, prefer_prelude: true },
)
.unwrap();
syntax_hit_found |= trim(&original_text) == trim(&generated); syntax_hit_found |= trim(&original_text) == trim(&generated);
// Validate if type-checks // Validate if type-checks

View file

@ -69,11 +69,9 @@ impl Tester {
let cargo_config = let cargo_config =
CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() }; CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() };
let sysroot = let sysroot = Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env, false);
Ok(Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env, false)
.unwrap());
let data_layout = target_data_layout::get( let data_layout = target_data_layout::get(
RustcDataLayoutConfig::Rustc(sysroot.as_ref().ok()), RustcDataLayoutConfig::Rustc(&sysroot),
None, None,
&cargo_config.extra_env, &cargo_config.extra_env,
); );

View file

@ -1031,6 +1031,8 @@ impl Config {
&& completion_item_edit_resolve(&self.caps), && completion_item_edit_resolve(&self.caps),
enable_self_on_the_fly: self.completion_autoself_enable(source_root).to_owned(), enable_self_on_the_fly: self.completion_autoself_enable(source_root).to_owned(),
enable_private_editable: self.completion_privateEditable_enable(source_root).to_owned(), enable_private_editable: self.completion_privateEditable_enable(source_root).to_owned(),
enable_term_search: self.completion_termSearch_enable(source_root).to_owned(),
term_search_fuel: self.completion_termSearch_fuel(source_root).to_owned() as u64,
full_function_signatures: self full_function_signatures: self
.completion_fullFunctionSignatures_enable(source_root) .completion_fullFunctionSignatures_enable(source_root)
.to_owned(), .to_owned(),
@ -1039,8 +1041,6 @@ impl Config {
CallableCompletionDef::AddParentheses => Some(CallableSnippets::AddParentheses), CallableCompletionDef::AddParentheses => Some(CallableSnippets::AddParentheses),
CallableCompletionDef::None => None, CallableCompletionDef::None => None,
}, },
insert_use: self.insert_use_config(source_root),
prefer_no_std: self.imports_preferNoStd(source_root).to_owned(),
snippet_cap: SnippetCap::new(try_or_def!( snippet_cap: SnippetCap::new(try_or_def!(
self.caps self.caps
.text_document .text_document
@ -1051,11 +1051,11 @@ impl Config {
.as_ref()? .as_ref()?
.snippet_support? .snippet_support?
)), )),
insert_use: self.insert_use_config(source_root),
prefer_no_std: self.imports_preferNoStd(source_root).to_owned(),
prefer_prelude: self.imports_preferPrelude(source_root).to_owned(),
snippets: self.snippets.clone().to_vec(), snippets: self.snippets.clone().to_vec(),
limit: self.completion_limit(source_root).to_owned(), limit: self.completion_limit(source_root).to_owned(),
enable_term_search: self.completion_termSearch_enable(source_root).to_owned(),
term_search_fuel: self.completion_termSearch_fuel(source_root).to_owned() as u64,
prefer_prelude: self.imports_preferPrelude(source_root).to_owned(),
} }
} }

View file

@ -87,6 +87,7 @@ pub(crate) struct GlobalState {
pub(crate) flycheck_sender: Sender<flycheck::Message>, pub(crate) flycheck_sender: Sender<flycheck::Message>,
pub(crate) flycheck_receiver: Receiver<flycheck::Message>, pub(crate) flycheck_receiver: Receiver<flycheck::Message>,
pub(crate) last_flycheck_error: Option<String>, pub(crate) last_flycheck_error: Option<String>,
pub(crate) diagnostics_received: bool,
// Test explorer // Test explorer
pub(crate) test_run_session: Option<Vec<flycheck::CargoTestHandle>>, pub(crate) test_run_session: Option<Vec<flycheck::CargoTestHandle>>,
@ -224,6 +225,7 @@ impl GlobalState {
flycheck_sender, flycheck_sender,
flycheck_receiver, flycheck_receiver,
last_flycheck_error: None, last_flycheck_error: None,
diagnostics_received: false,
test_run_session: None, test_run_session: None,
test_run_sender, test_run_sender,

View file

@ -860,6 +860,11 @@ pub(crate) fn handle_runnables(
if cmd == "run" && spec.target_kind != TargetKind::Bin { if cmd == "run" && spec.target_kind != TargetKind::Bin {
continue; continue;
} }
let cwd = if cmd != "test" || spec.target_kind == TargetKind::Bin {
spec.workspace_root.clone()
} else {
spec.cargo_toml.parent().to_path_buf()
};
let mut cargo_args = let mut cargo_args =
vec![cmd.to_owned(), "--package".to_owned(), spec.package.clone()]; vec![cmd.to_owned(), "--package".to_owned(), spec.package.clone()];
let all_targets = cmd != "run" && !is_crate_no_std; let all_targets = cmd != "run" && !is_crate_no_std;
@ -876,6 +881,7 @@ pub(crate) fn handle_runnables(
kind: lsp_ext::RunnableKind::Cargo, kind: lsp_ext::RunnableKind::Cargo,
args: lsp_ext::CargoRunnable { args: lsp_ext::CargoRunnable {
workspace_root: Some(spec.workspace_root.clone().into()), workspace_root: Some(spec.workspace_root.clone().into()),
cwd: Some(cwd.into()),
override_cargo: config.override_cargo.clone(), override_cargo: config.override_cargo.clone(),
cargo_args, cargo_args,
cargo_extra_args: config.cargo_extra_args.clone(), cargo_extra_args: config.cargo_extra_args.clone(),
@ -893,6 +899,7 @@ pub(crate) fn handle_runnables(
kind: lsp_ext::RunnableKind::Cargo, kind: lsp_ext::RunnableKind::Cargo,
args: lsp_ext::CargoRunnable { args: lsp_ext::CargoRunnable {
workspace_root: None, workspace_root: None,
cwd: None,
override_cargo: config.override_cargo, override_cargo: config.override_cargo,
cargo_args: vec!["check".to_owned(), "--workspace".to_owned()], cargo_args: vec!["check".to_owned(), "--workspace".to_owned()],
cargo_extra_args: config.cargo_extra_args, cargo_extra_args: config.cargo_extra_args,
@ -1783,18 +1790,18 @@ pub(crate) fn handle_open_docs(
let ws_and_sysroot = snap.workspaces.iter().find_map(|ws| match &ws.kind { let ws_and_sysroot = snap.workspaces.iter().find_map(|ws| match &ws.kind {
ProjectWorkspaceKind::Cargo { cargo, .. } ProjectWorkspaceKind::Cargo { cargo, .. }
| ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _)), .. } => { | ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _)), .. } => {
Some((cargo, ws.sysroot.as_ref().ok())) Some((cargo, &ws.sysroot))
} }
ProjectWorkspaceKind::Json { .. } => None, ProjectWorkspaceKind::Json { .. } => None,
ProjectWorkspaceKind::DetachedFile { .. } => None, ProjectWorkspaceKind::DetachedFile { .. } => None,
}); });
let (cargo, sysroot) = match ws_and_sysroot { let (cargo, sysroot) = match ws_and_sysroot {
Some((ws, sysroot)) => (Some(ws), sysroot), Some((ws, sysroot)) => (Some(ws), Some(sysroot)),
_ => (None, None), _ => (None, None),
}; };
let sysroot = sysroot.map(|p| p.root().as_str()); let sysroot = sysroot.and_then(|p| p.root()).map(|it| it.as_str());
let target_dir = cargo.map(|cargo| cargo.target_directory()).map(|p| p.as_str()); let target_dir = cargo.map(|cargo| cargo.target_directory()).map(|p| p.as_str());
let Ok(remote_urls) = snap.analysis.external_docs(position, target_dir, sysroot) else { let Ok(remote_urls) = snap.analysis.external_docs(position, target_dir, sysroot) else {

View file

@ -139,6 +139,7 @@ fn integrated_completion_benchmark() {
enable_self_on_the_fly: true, enable_self_on_the_fly: true,
enable_private_editable: true, enable_private_editable: true,
enable_term_search: true, enable_term_search: true,
term_search_fuel: 200,
full_function_signatures: false, full_function_signatures: false,
callable: Some(CallableSnippets::FillArguments), callable: Some(CallableSnippets::FillArguments),
snippet_cap: SnippetCap::new(true), snippet_cap: SnippetCap::new(true),
@ -149,11 +150,10 @@ fn integrated_completion_benchmark() {
group: true, group: true,
skip_glob_imports: true, skip_glob_imports: true,
}, },
snippets: Vec::new(),
prefer_no_std: false, prefer_no_std: false,
prefer_prelude: true, prefer_prelude: true,
snippets: Vec::new(),
limit: None, limit: None,
term_search_fuel: 200,
}; };
let position = let position =
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() }; FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
@ -184,6 +184,7 @@ fn integrated_completion_benchmark() {
enable_self_on_the_fly: true, enable_self_on_the_fly: true,
enable_private_editable: true, enable_private_editable: true,
enable_term_search: true, enable_term_search: true,
term_search_fuel: 200,
full_function_signatures: false, full_function_signatures: false,
callable: Some(CallableSnippets::FillArguments), callable: Some(CallableSnippets::FillArguments),
snippet_cap: SnippetCap::new(true), snippet_cap: SnippetCap::new(true),
@ -194,11 +195,10 @@ fn integrated_completion_benchmark() {
group: true, group: true,
skip_glob_imports: true, skip_glob_imports: true,
}, },
snippets: Vec::new(),
prefer_no_std: false, prefer_no_std: false,
prefer_prelude: true, prefer_prelude: true,
snippets: Vec::new(),
limit: None, limit: None,
term_search_fuel: 200,
}; };
let position = let position =
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() }; FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
@ -227,6 +227,7 @@ fn integrated_completion_benchmark() {
enable_self_on_the_fly: true, enable_self_on_the_fly: true,
enable_private_editable: true, enable_private_editable: true,
enable_term_search: true, enable_term_search: true,
term_search_fuel: 200,
full_function_signatures: false, full_function_signatures: false,
callable: Some(CallableSnippets::FillArguments), callable: Some(CallableSnippets::FillArguments),
snippet_cap: SnippetCap::new(true), snippet_cap: SnippetCap::new(true),
@ -237,11 +238,10 @@ fn integrated_completion_benchmark() {
group: true, group: true,
skip_glob_imports: true, skip_glob_imports: true,
}, },
snippets: Vec::new(),
prefer_no_std: false, prefer_no_std: false,
prefer_prelude: true, prefer_prelude: true,
snippets: Vec::new(),
limit: None, limit: None,
term_search_fuel: 200,
}; };
let position = let position =
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() }; FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };

View file

@ -441,6 +441,8 @@ pub struct CargoRunnable {
pub override_cargo: Option<String>, pub override_cargo: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub workspace_root: Option<PathBuf>, pub workspace_root: Option<PathBuf>,
#[serde(skip_serializing_if = "Option::is_none")]
pub cwd: Option<PathBuf>,
// command, --package and --lib stuff // command, --package and --lib stuff
pub cargo_args: Vec<String>, pub cargo_args: Vec<String>,
// user-specified additional cargo args, like `--release`. // user-specified additional cargo args, like `--release`.
@ -500,7 +502,6 @@ pub struct ServerStatusParams {
pub health: Health, pub health: Health,
pub quiescent: bool, pub quiescent: bool,
pub message: Option<String>, pub message: Option<String>,
pub workspace_info: Option<String>,
} }
#[derive(Serialize, Deserialize, Clone, Copy, PartialEq, Eq)] #[derive(Serialize, Deserialize, Clone, Copy, PartialEq, Eq)]

View file

@ -1360,10 +1360,14 @@ pub(crate) fn runnable(
let config = snap.config.runnables(); let config = snap.config.runnables();
let spec = CargoTargetSpec::for_file(snap, runnable.nav.file_id)?; let spec = CargoTargetSpec::for_file(snap, runnable.nav.file_id)?;
let workspace_root = spec.as_ref().map(|it| it.workspace_root.clone()); let workspace_root = spec.as_ref().map(|it| it.workspace_root.clone());
let target = spec.as_ref().map(|s| s.target.clone()); let cwd = match runnable.kind {
ide::RunnableKind::Bin { .. } => workspace_root.clone().map(|it| it.into()),
_ => spec.as_ref().map(|it| it.cargo_toml.parent().into()),
};
let target = spec.as_ref().map(|s| s.target.as_str());
let label = runnable.label(target);
let (cargo_args, executable_args) = let (cargo_args, executable_args) =
CargoTargetSpec::runnable_args(snap, spec, &runnable.kind, &runnable.cfg); CargoTargetSpec::runnable_args(snap, spec, &runnable.kind, &runnable.cfg);
let label = runnable.label(target);
let location = location_link(snap, None, runnable.nav)?; let location = location_link(snap, None, runnable.nav)?;
Ok(lsp_ext::Runnable { Ok(lsp_ext::Runnable {
@ -1372,6 +1376,7 @@ pub(crate) fn runnable(
kind: lsp_ext::RunnableKind::Cargo, kind: lsp_ext::RunnableKind::Cargo,
args: lsp_ext::CargoRunnable { args: lsp_ext::CargoRunnable {
workspace_root: workspace_root.map(|it| it.into()), workspace_root: workspace_root.map(|it| it.into()),
cwd,
override_cargo: config.override_cargo, override_cargo: config.override_cargo,
cargo_args, cargo_args,
cargo_extra_args: config.cargo_extra_args, cargo_extra_args: config.cargo_extra_args,

View file

@ -804,6 +804,10 @@ impl GlobalState {
fn handle_flycheck_msg(&mut self, message: flycheck::Message) { fn handle_flycheck_msg(&mut self, message: flycheck::Message) {
match message { match message {
flycheck::Message::AddDiagnostic { id, workspace_root, diagnostic } => { flycheck::Message::AddDiagnostic { id, workspace_root, diagnostic } => {
if !self.diagnostics_received {
self.diagnostics.clear_check(id);
self.diagnostics_received = true;
}
let snap = self.snapshot(); let snap = self.snapshot();
let diagnostics = crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp( let diagnostics = crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp(
&self.config.diagnostics_map(), &self.config.diagnostics_map(),
@ -832,7 +836,7 @@ impl GlobalState {
flycheck::Message::Progress { id, progress } => { flycheck::Message::Progress { id, progress } => {
let (state, message) = match progress { let (state, message) = match progress {
flycheck::Progress::DidStart => { flycheck::Progress::DidStart => {
self.diagnostics.clear_check(id); self.diagnostics_received = false;
(Progress::Begin, None) (Progress::Begin, None)
} }
flycheck::Progress::DidCheckCrate(target) => (Progress::Report, Some(target)), flycheck::Progress::DidCheckCrate(target) => (Progress::Report, Some(target)),
@ -848,6 +852,9 @@ impl GlobalState {
flycheck::Progress::DidFinish(result) => { flycheck::Progress::DidFinish(result) => {
self.last_flycheck_error = self.last_flycheck_error =
result.err().map(|err| format!("cargo check failed to start: {err}")); result.err().map(|err| format!("cargo check failed to start: {err}"));
if !self.diagnostics_received {
self.diagnostics.clear_check(id);
}
(Progress::End, None) (Progress::End, None)
} }
}; };

View file

@ -103,7 +103,6 @@ impl GlobalState {
health: lsp_ext::Health::Ok, health: lsp_ext::Health::Ok,
quiescent: self.is_quiescent(), quiescent: self.is_quiescent(),
message: None, message: None,
workspace_info: None,
}; };
let mut message = String::new(); let mut message = String::new();
@ -164,53 +163,37 @@ impl GlobalState {
let proc_macro_clients = let proc_macro_clients =
self.proc_macro_clients.iter().map(Some).chain(iter::repeat_with(|| None)); self.proc_macro_clients.iter().map(Some).chain(iter::repeat_with(|| None));
let mut workspace_info = "Loaded workspaces:\n".to_owned();
for (ws, proc_macro_client) in self.workspaces.iter().zip(proc_macro_clients) { for (ws, proc_macro_client) in self.workspaces.iter().zip(proc_macro_clients) {
format_to!(workspace_info, "- `{}`\n", ws.manifest_or_root()); if let Some(err) = ws.sysroot.error() {
format_to!(workspace_info, " - sysroot:");
match ws.sysroot.as_ref() {
Err(None) => format_to!(workspace_info, " None"),
Err(Some(e)) => {
status.health |= lsp_ext::Health::Warning;
format_to!(workspace_info, " {e}");
}
Ok(s) => {
format_to!(workspace_info, " `{}`", s.root().to_string());
if let Some(err) = s
.check_has_core()
.err()
.inspect(|_| status.health |= lsp_ext::Health::Warning)
{
format_to!(workspace_info, " ({err})");
}
if let Some(src_root) = s.src_root() {
format_to!(
workspace_info,
"\n - sysroot source: `{}`",
src_root
);
}
format_to!(workspace_info, "\n");
}
}
if let ProjectWorkspaceKind::Cargo { rustc: Err(Some(e)), .. } = &ws.kind {
status.health |= lsp_ext::Health::Warning; status.health |= lsp_ext::Health::Warning;
format_to!(workspace_info, " - rustc workspace: {e}\n"); format_to!(
message,
"Workspace `{}` has sysroot errors: ",
ws.manifest_or_root()
);
message.push_str(err);
message.push_str("\n\n");
}
if let ProjectWorkspaceKind::Cargo { rustc: Err(Some(err)), .. } = &ws.kind {
status.health |= lsp_ext::Health::Warning;
format_to!(
message,
"Failed loading rustc_private crates for workspace `{}`: ",
ws.manifest_or_root()
);
message.push_str(err);
message.push_str("\n\n");
}; };
if let Some(proc_macro_client) = proc_macro_client { if let Some(Err(err)) = proc_macro_client {
format_to!(workspace_info, " - proc-macro server: "); status.health |= lsp_ext::Health::Warning;
match proc_macro_client { format_to!(
Ok(it) => format_to!(workspace_info, "`{}`\n", it.path()), message,
Err(e) => { "Failed spawning proc-macro server for workspace `{}`: {err}",
status.health |= lsp_ext::Health::Warning; ws.manifest_or_root()
format_to!(workspace_info, "{e}\n") );
} message.push_str("\n\n");
}
} }
} }
status.workspace_info = Some(workspace_info);
} }
if !message.is_empty() { if !message.is_empty() {
@ -534,8 +517,8 @@ impl GlobalState {
.map(|(a, b)| (a.clone(), b.clone())) .map(|(a, b)| (a.clone(), b.clone()))
.chain( .chain(
ws.sysroot ws.sysroot
.as_ref() .root()
.map(|it| ("RUSTUP_TOOLCHAIN".to_owned(), it.root().to_string())), .map(|it| ("RUSTUP_TOOLCHAIN".to_owned(), it.to_string())),
) )
.collect(), .collect(),
@ -719,7 +702,7 @@ impl GlobalState {
} }
ProjectWorkspaceKind::DetachedFile { .. } => return None, ProjectWorkspaceKind::DetachedFile { .. } => return None,
}, },
ws.sysroot.as_ref().ok().map(|sysroot| sysroot.root().to_owned()), ws.sysroot.root().map(ToOwned::to_owned),
)) ))
}) })
.map(|(id, (root, manifest_path), sysroot_root)| { .map(|(id, (root, manifest_path), sysroot_root)| {

View file

@ -21,7 +21,7 @@ fn load_cargo_with_fake_sysroot(file: &str) -> ProjectWorkspace {
rustc: Err(None), rustc: Err(None),
cargo_config_extra_env: Default::default(), cargo_config_extra_env: Default::default(),
}, },
sysroot: Ok(get_fake_sysroot()), sysroot: get_fake_sysroot(),
rustc_cfg: Vec::new(), rustc_cfg: Vec::new(),
cfg_overrides: Default::default(), cfg_overrides: Default::default(),
toolchain: None, toolchain: None,
@ -69,7 +69,7 @@ fn get_fake_sysroot() -> Sysroot {
// fake sysroot, so we give them both the same path: // fake sysroot, so we give them both the same path:
let sysroot_dir = AbsPathBuf::assert_utf8(sysroot_path); let sysroot_dir = AbsPathBuf::assert_utf8(sysroot_path);
let sysroot_src_dir = sysroot_dir.clone(); let sysroot_src_dir = sysroot_dir.clone();
Sysroot::load(sysroot_dir, Some(Ok(sysroot_src_dir)), false) Sysroot::load(Some(sysroot_dir), Some(sysroot_src_dir), false)
} }
#[test] #[test]

View file

@ -260,6 +260,7 @@ fn main() {}
"executableArgs": ["test_eggs", "--exact", "--show-output"], "executableArgs": ["test_eggs", "--exact", "--show-output"],
"cargoExtraArgs": [], "cargoExtraArgs": [],
"overrideCargo": null, "overrideCargo": null,
"cwd": server.path().join("foo"),
"workspaceRoot": server.path().join("foo") "workspaceRoot": server.path().join("foo")
}, },
"kind": "cargo", "kind": "cargo",
@ -279,6 +280,7 @@ fn main() {}
{ {
"args": { "args": {
"overrideCargo": null, "overrideCargo": null,
"cwd": server.path().join("foo"),
"workspaceRoot": server.path().join("foo"), "workspaceRoot": server.path().join("foo"),
"cargoArgs": [ "cargoArgs": [
"test", "test",
@ -325,6 +327,7 @@ fn main() {}
"executableArgs": [], "executableArgs": [],
"cargoExtraArgs": [], "cargoExtraArgs": [],
"overrideCargo": null, "overrideCargo": null,
"cwd": server.path().join("foo"),
"workspaceRoot": server.path().join("foo") "workspaceRoot": server.path().join("foo")
}, },
"kind": "cargo", "kind": "cargo",
@ -336,6 +339,7 @@ fn main() {}
"executableArgs": [], "executableArgs": [],
"cargoExtraArgs": [], "cargoExtraArgs": [],
"overrideCargo": null, "overrideCargo": null,
"cwd": server.path().join("foo"),
"workspaceRoot": server.path().join("foo") "workspaceRoot": server.path().join("foo")
}, },
"kind": "cargo", "kind": "cargo",
@ -415,6 +419,7 @@ mod tests {
"args": { "args": {
"overrideCargo": null, "overrideCargo": null,
"workspaceRoot": server.path().join(runnable), "workspaceRoot": server.path().join(runnable),
"cwd": server.path().join(runnable),
"cargoArgs": [ "cargoArgs": [
"test", "test",
"--package", "--package",
@ -432,6 +437,94 @@ mod tests {
} }
} }
// The main fn in packages should be run from the workspace root
#[test]
fn test_runnables_cwd() {
if skip_slow_tests() {
return;
}
let server = Project::with_fixture(
r#"
//- /foo/Cargo.toml
[workspace]
members = ["mainpkg", "otherpkg"]
//- /foo/mainpkg/Cargo.toml
[package]
name = "mainpkg"
version = "0.1.0"
//- /foo/mainpkg/src/main.rs
fn main() {}
//- /foo/otherpkg/Cargo.toml
[package]
name = "otherpkg"
version = "0.1.0"
//- /foo/otherpkg/src/lib.rs
#[test]
fn otherpkg() {}
"#,
)
.root("foo")
.server()
.wait_until_workspace_is_loaded();
server.request::<Runnables>(
RunnablesParams { text_document: server.doc_id("foo/mainpkg/src/main.rs"), position: None },
json!([
"{...}",
{
"label": "cargo test -p mainpkg --all-targets",
"kind": "cargo",
"args": {
"overrideCargo": null,
"workspaceRoot": server.path().join("foo"),
"cwd": server.path().join("foo"),
"cargoArgs": [
"test",
"--package",
"mainpkg",
"--all-targets"
],
"cargoExtraArgs": [],
"executableArgs": []
},
},
"{...}",
"{...}"
]),
);
server.request::<Runnables>(
RunnablesParams { text_document: server.doc_id("foo/otherpkg/src/lib.rs"), position: None },
json!([
"{...}",
{
"label": "cargo test -p otherpkg --all-targets",
"kind": "cargo",
"args": {
"overrideCargo": null,
"workspaceRoot": server.path().join("foo"),
"cwd": server.path().join("foo").join("otherpkg"),
"cargoArgs": [
"test",
"--package",
"otherpkg",
"--all-targets"
],
"cargoExtraArgs": [],
"executableArgs": []
},
},
"{...}",
"{...}"
]),
);
}
#[test] #[test]
fn test_format_document() { fn test_format_document() {
if skip_slow_tests() { if skip_slow_tests() {
@ -1059,11 +1152,11 @@ fn resolve_proc_macro() {
return; return;
} }
let sysroot = project_model::Sysroot::discover_no_source( let sysroot = project_model::Sysroot::discover(
&AbsPathBuf::assert_utf8(std::env::current_dir().unwrap()), &AbsPathBuf::assert_utf8(std::env::current_dir().unwrap()),
&Default::default(), &Default::default(),
) false,
.unwrap(); );
let proc_macro_server_path = sysroot.discover_proc_macro_srv().unwrap(); let proc_macro_server_path = sysroot.discover_proc_macro_srv().unwrap();

View file

@ -378,9 +378,26 @@ impl ast::UseTreeList {
/// Remove the unnecessary braces in current `UseTreeList` /// Remove the unnecessary braces in current `UseTreeList`
pub fn remove_unnecessary_braces(mut self) { pub fn remove_unnecessary_braces(mut self) {
// Returns true iff there is a single subtree and it is not the self keyword. The braces in
// `use x::{self};` are necessary and so we should not remove them.
let has_single_subtree_that_is_not_self = |u: &ast::UseTreeList| {
if let Some((single_subtree,)) = u.use_trees().collect_tuple() {
// We have a single subtree, check whether it is self.
let is_self = single_subtree.path().as_ref().map_or(false, |path| {
path.segment().and_then(|seg| seg.self_token()).is_some()
&& path.qualifier().is_none()
});
!is_self
} else {
// Not a single subtree
false
}
};
let remove_brace_in_use_tree_list = |u: &ast::UseTreeList| { let remove_brace_in_use_tree_list = |u: &ast::UseTreeList| {
let use_tree_count = u.use_trees().count(); if has_single_subtree_that_is_not_self(u) {
if use_tree_count == 1 {
if let Some(a) = u.l_curly_token() { if let Some(a) = u.l_curly_token() {
ted::remove(a) ted::remove(a)
} }

View file

@ -8,6 +8,7 @@ use std::{
use rustc_lexer::unescape::{ use rustc_lexer::unescape::{
unescape_byte, unescape_char, unescape_mixed, unescape_unicode, EscapeError, MixedUnit, Mode, unescape_byte, unescape_char, unescape_mixed, unescape_unicode, EscapeError, MixedUnit, Mode,
}; };
use stdx::always;
use crate::{ use crate::{
ast::{self, AstToken}, ast::{self, AstToken},
@ -181,25 +182,25 @@ pub trait IsString: AstToken {
self.quote_offsets().map(|it| it.quotes.1) self.quote_offsets().map(|it| it.quotes.1)
} }
fn escaped_char_ranges(&self, cb: &mut dyn FnMut(TextRange, Result<char, EscapeError>)) { fn escaped_char_ranges(&self, cb: &mut dyn FnMut(TextRange, Result<char, EscapeError>)) {
let text_range_no_quotes = match self.text_range_between_quotes() { let Some(text_range_no_quotes) = self.text_range_between_quotes() else { return };
Some(it) => it,
None => return,
};
let start = self.syntax().text_range().start(); let start = self.syntax().text_range().start();
let text = &self.text()[text_range_no_quotes - start]; let text = &self.text()[text_range_no_quotes - start];
let offset = text_range_no_quotes.start() - start; let offset = text_range_no_quotes.start() - start;
unescape_unicode(text, Self::MODE, &mut |range, unescaped_char| { unescape_unicode(text, Self::MODE, &mut |range, unescaped_char| {
let text_range = if let Some((s, e)) = range.start.try_into().ok().zip(range.end.try_into().ok()) {
TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap()); cb(TextRange::new(s, e) + offset, unescaped_char);
cb(text_range + offset, unescaped_char); }
}); });
} }
fn map_range_up(&self, range: TextRange) -> Option<TextRange> { fn map_range_up(&self, range: TextRange) -> Option<TextRange> {
let contents_range = self.text_range_between_quotes()?; let contents_range = self.text_range_between_quotes()?;
assert!(TextRange::up_to(contents_range.len()).contains_range(range)); if always!(TextRange::up_to(contents_range.len()).contains_range(range)) {
Some(range + contents_range.start()) Some(range + contents_range.start())
} else {
None
}
} }
} }

View file

@ -1,5 +1,5 @@
<!--- <!---
lsp/ext.rs hash: a39009c351009d16 lsp/ext.rs hash: 1babf76a3c2cef3b
If you need to change the above hash to make the test pass, please check if you If you need to change the above hash to make the test pass, please check if you
need to adjust this doc as well and ping this issue: need to adjust this doc as well and ping this issue:
@ -377,6 +377,7 @@ rust-analyzer supports only one `kind`, `"cargo"`. The `args` for `"cargo"` look
```typescript ```typescript
{ {
workspaceRoot?: string; workspaceRoot?: string;
cwd?: string;
cargoArgs: string[]; cargoArgs: string[];
cargoExtraArgs: string[]; cargoExtraArgs: string[];
executableArgs: string[]; executableArgs: string[];

View file

@ -472,12 +472,6 @@ export class Ctx implements RustAnalyzerExtensionApi {
if (status.message) { if (status.message) {
statusBar.tooltip.appendText(status.message); statusBar.tooltip.appendText(status.message);
} }
if (status.workspaceInfo) {
if (statusBar.tooltip.value) {
statusBar.tooltip.appendMarkdown("\n\n---\n\n");
}
statusBar.tooltip.appendMarkdown(status.workspaceInfo);
}
if (statusBar.tooltip.value) { if (statusBar.tooltip.value) {
statusBar.tooltip.appendMarkdown("\n\n---\n\n"); statusBar.tooltip.appendMarkdown("\n\n---\n\n");
} }

View file

@ -3,7 +3,7 @@ import * as vscode from "vscode";
import * as path from "path"; import * as path from "path";
import type * as ra from "./lsp_ext"; import type * as ra from "./lsp_ext";
import { Cargo, type ExecutableInfo, getRustcId, getSysroot } from "./toolchain"; import { Cargo, getRustcId, getSysroot } from "./toolchain";
import type { Ctx } from "./ctx"; import type { Ctx } from "./ctx";
import { prepareEnv } from "./run"; import { prepareEnv } from "./run";
import { unwrapUndefinable } from "./undefinable"; import { unwrapUndefinable } from "./undefinable";
@ -12,7 +12,6 @@ const debugOutput = vscode.window.createOutputChannel("Debug");
type DebugConfigProvider = ( type DebugConfigProvider = (
config: ra.Runnable, config: ra.Runnable,
executable: string, executable: string,
cargoWorkspace: string,
env: Record<string, string>, env: Record<string, string>,
sourceFileMap?: Record<string, string>, sourceFileMap?: Record<string, string>,
) => vscode.DebugConfiguration; ) => vscode.DebugConfiguration;
@ -134,7 +133,7 @@ async function getDebugConfiguration(
} }
const env = prepareEnv(runnable, ctx.config.runnablesExtraEnv); const env = prepareEnv(runnable, ctx.config.runnablesExtraEnv);
const { executable, workspace: cargoWorkspace } = await getDebugExecutableInfo(runnable, env); const executable = await getDebugExecutable(runnable, env);
let sourceFileMap = debugOptions.sourceFileMap; let sourceFileMap = debugOptions.sourceFileMap;
if (sourceFileMap === "auto") { if (sourceFileMap === "auto") {
// let's try to use the default toolchain // let's try to use the default toolchain
@ -148,13 +147,7 @@ async function getDebugConfiguration(
} }
const provider = unwrapUndefinable(knownEngines[debugEngine.id]); const provider = unwrapUndefinable(knownEngines[debugEngine.id]);
const debugConfig = provider( const debugConfig = provider(runnable, simplifyPath(executable), env, sourceFileMap);
runnable,
simplifyPath(executable),
cargoWorkspace,
env,
sourceFileMap,
);
if (debugConfig.type in debugOptions.engineSettings) { if (debugConfig.type in debugOptions.engineSettings) {
const settingsMap = (debugOptions.engineSettings as any)[debugConfig.type]; const settingsMap = (debugOptions.engineSettings as any)[debugConfig.type];
for (var key in settingsMap) { for (var key in settingsMap) {
@ -176,21 +169,20 @@ async function getDebugConfiguration(
return debugConfig; return debugConfig;
} }
async function getDebugExecutableInfo( async function getDebugExecutable(
runnable: ra.Runnable, runnable: ra.Runnable,
env: Record<string, string>, env: Record<string, string>,
): Promise<ExecutableInfo> { ): Promise<string> {
const cargo = new Cargo(runnable.args.workspaceRoot || ".", debugOutput, env); const cargo = new Cargo(runnable.args.workspaceRoot || ".", debugOutput, env);
const executableInfo = await cargo.executableInfoFromArgs(runnable.args.cargoArgs); const executable = await cargo.executableFromArgs(runnable.args.cargoArgs);
// if we are here, there were no compilation errors. // if we are here, there were no compilation errors.
return executableInfo; return executable;
} }
function getCCppDebugConfig( function getCCppDebugConfig(
runnable: ra.Runnable, runnable: ra.Runnable,
executable: string, executable: string,
cargoWorkspace: string,
env: Record<string, string>, env: Record<string, string>,
sourceFileMap?: Record<string, string>, sourceFileMap?: Record<string, string>,
): vscode.DebugConfiguration { ): vscode.DebugConfiguration {
@ -200,9 +192,12 @@ function getCCppDebugConfig(
name: runnable.label, name: runnable.label,
program: executable, program: executable,
args: runnable.args.executableArgs, args: runnable.args.executableArgs,
cwd: cargoWorkspace || runnable.args.workspaceRoot, cwd: runnable.args.cwd || runnable.args.workspaceRoot || ".",
sourceFileMap, sourceFileMap,
env, environment: Object.entries(env).map((entry) => ({
name: entry[0],
value: entry[1],
})),
// See https://github.com/rust-lang/rust-analyzer/issues/16901#issuecomment-2024486941 // See https://github.com/rust-lang/rust-analyzer/issues/16901#issuecomment-2024486941
osx: { osx: {
MIMode: "lldb", MIMode: "lldb",
@ -213,7 +208,6 @@ function getCCppDebugConfig(
function getCodeLldbDebugConfig( function getCodeLldbDebugConfig(
runnable: ra.Runnable, runnable: ra.Runnable,
executable: string, executable: string,
cargoWorkspace: string,
env: Record<string, string>, env: Record<string, string>,
sourceFileMap?: Record<string, string>, sourceFileMap?: Record<string, string>,
): vscode.DebugConfiguration { ): vscode.DebugConfiguration {
@ -223,7 +217,7 @@ function getCodeLldbDebugConfig(
name: runnable.label, name: runnable.label,
program: executable, program: executable,
args: runnable.args.executableArgs, args: runnable.args.executableArgs,
cwd: cargoWorkspace || runnable.args.workspaceRoot, cwd: runnable.args.cwd || runnable.args.workspaceRoot || ".",
sourceMap: sourceFileMap, sourceMap: sourceFileMap,
sourceLanguages: ["rust"], sourceLanguages: ["rust"],
env, env,
@ -233,7 +227,6 @@ function getCodeLldbDebugConfig(
function getNativeDebugConfig( function getNativeDebugConfig(
runnable: ra.Runnable, runnable: ra.Runnable,
executable: string, executable: string,
cargoWorkspace: string,
env: Record<string, string>, env: Record<string, string>,
_sourceFileMap?: Record<string, string>, _sourceFileMap?: Record<string, string>,
): vscode.DebugConfiguration { ): vscode.DebugConfiguration {
@ -244,7 +237,7 @@ function getNativeDebugConfig(
target: executable, target: executable,
// See https://github.com/WebFreak001/code-debug/issues/359 // See https://github.com/WebFreak001/code-debug/issues/359
arguments: quote(runnable.args.executableArgs), arguments: quote(runnable.args.executableArgs),
cwd: cargoWorkspace || runnable.args.workspaceRoot, cwd: runnable.args.cwd || runnable.args.workspaceRoot || ".",
env, env,
valuesFormatting: "prettyPrinters", valuesFormatting: "prettyPrinters",
}; };

View file

@ -226,6 +226,7 @@ export type Runnable = {
kind: "cargo"; kind: "cargo";
args: { args: {
workspaceRoot?: string; workspaceRoot?: string;
cwd?: string;
cargoArgs: string[]; cargoArgs: string[];
cargoExtraArgs: string[]; cargoExtraArgs: string[];
executableArgs: string[]; executableArgs: string[];
@ -241,7 +242,6 @@ export type ServerStatusParams = {
health: "ok" | "warning" | "error"; health: "ok" | "warning" | "error";
quiescent: boolean; quiescent: boolean;
message?: string; message?: string;
workspaceInfo?: string;
}; };
export type SsrParams = { export type SsrParams = {
query: string; query: string;

View file

@ -9,17 +9,11 @@ import { unwrapUndefinable } from "./undefinable";
interface CompilationArtifact { interface CompilationArtifact {
fileName: string; fileName: string;
workspace: string;
name: string; name: string;
kind: string; kind: string;
isTest: boolean; isTest: boolean;
} }
export interface ExecutableInfo {
executable: string;
workspace: string;
}
export interface ArtifactSpec { export interface ArtifactSpec {
cargoArgs: string[]; cargoArgs: string[];
filter?: (artifacts: CompilationArtifact[]) => CompilationArtifact[]; filter?: (artifacts: CompilationArtifact[]) => CompilationArtifact[];
@ -74,7 +68,6 @@ export class Cargo {
artifacts.push({ artifacts.push({
fileName: message.executable, fileName: message.executable,
name: message.target.name, name: message.target.name,
workspace: path.dirname(message.manifest_path),
kind: message.target.kind[0], kind: message.target.kind[0],
isTest: message.profile.test, isTest: message.profile.test,
}); });
@ -93,7 +86,7 @@ export class Cargo {
return spec.filter?.(artifacts) ?? artifacts; return spec.filter?.(artifacts) ?? artifacts;
} }
async executableInfoFromArgs(args: readonly string[]): Promise<ExecutableInfo> { async executableFromArgs(args: readonly string[]): Promise<string> {
const artifacts = await this.getArtifacts(Cargo.artifactSpec(args)); const artifacts = await this.getArtifacts(Cargo.artifactSpec(args));
if (artifacts.length === 0) { if (artifacts.length === 0) {
@ -103,10 +96,7 @@ export class Cargo {
} }
const artifact = unwrapUndefinable(artifacts[0]); const artifact = unwrapUndefinable(artifacts[0]);
return { return artifact.fileName;
executable: artifact.fileName,
workspace: artifact.workspace,
};
} }
private async runCargo( private async runCargo(