Merge from rustc

This commit is contained in:
Ralf Jung 2024-05-27 16:42:12 +02:00
commit c0adbe2e85
92 changed files with 1971 additions and 1318 deletions

View file

@ -12,9 +12,6 @@ use std::sync::OnceLock;
use rustc_hash::FxHashMap;
/// Ignored attribute namespaces used by tools.
pub const TOOL_MODULES: &[&str] = &["rustfmt", "clippy"];
pub struct BuiltinAttribute {
pub name: &'static str,
pub template: AttributeTemplate,

View file

@ -17,7 +17,7 @@ use syntax::{
self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasLoopBody, HasName,
RangeItem, SlicePatComponents,
},
AstNode, AstPtr, SyntaxNodePtr,
AstNode, AstPtr, AstToken as _, SyntaxNodePtr,
};
use triomphe::Arc;
@ -1577,7 +1577,13 @@ impl ExprCollector<'_> {
});
});
let template = f.template();
let fmt_snippet = template.as_ref().map(ToString::to_string);
let fmt_snippet = template.as_ref().and_then(|it| match it {
ast::Expr::Literal(literal) => match literal.kind() {
ast::LiteralKind::String(s) => Some(s.text().to_owned()),
_ => None,
},
_ => None,
});
let mut mappings = vec![];
let fmt = match template.and_then(|it| self.expand_macros_to_string(it)) {
Some((s, is_direct_literal)) => format_args::parse(

View file

@ -150,7 +150,7 @@ fn desugar_builtin_format_args() {
fn main() {
let are = "are";
let count = 10;
builtin#format_args("hello {count:02} {} friends, we {are:?} {0}{last}", "fancy", last = "!");
builtin#format_args("\u{1b}hello {count:02} {} friends, we {are:?} {0}{last}", "fancy", last = "!");
}
"#,
);
@ -161,7 +161,7 @@ fn main() {
let count = 10;
builtin#lang(Arguments::new_v1_formatted)(
&[
"hello ", " ", " friends, we ", " ", "",
"\u{1b}hello ", " ", " friends, we ", " ", "",
],
&[
builtin#lang(Argument::new_display)(

View file

@ -528,3 +528,65 @@ fn f() {$0
"#]],
)
}
#[test]
fn resolve_extern_prelude_in_block() {
check_at(
r#"
//- /main.rs crate:main deps:core
fn main() {
mod f {
use core::S;
$0
}
}
//- /core.rs crate:core
pub struct S;
"#,
expect![[r#"
block scope
f: t
block scope::f
S: ti vi
crate
main: v
"#]],
)
}
#[test]
fn shadow_extern_prelude_in_block() {
check_at(
r#"
//- /main.rs crate:main deps:core
fn main() {
mod core { pub struct S; }
{
fn inner() {} // forces a block def map
use core::S; // should resolve to the local one
$0
}
}
//- /core.rs crate:core
pub const S;
"#,
expect![[r#"
block scope
S: ti vi
inner: v
block scope
core: t
block scope::core
S: t v
crate
main: v
"#]],
)
}

File diff suppressed because it is too large Load diff

View file

@ -175,7 +175,13 @@ pub(crate) fn parse(
mut synth: impl FnMut(Name) -> ExprId,
mut record_usage: impl FnMut(Name, Option<TextRange>),
) -> FormatArgs {
let text = s.text_without_quotes();
let Ok(text) = s.value() else {
return FormatArgs {
template: Default::default(),
arguments: args.finish(),
orphans: vec![],
};
};
let str_style = match s.quote_offsets() {
Some(offsets) => {
let raw = usize::from(offsets.quotes.0.len()) - 1;
@ -186,7 +192,7 @@ pub(crate) fn parse(
};
let mut parser =
parse::Parser::new(text, str_style, fmt_snippet, false, parse::ParseMode::Format);
parse::Parser::new(&text, str_style, fmt_snippet, false, parse::ParseMode::Format);
let mut pieces = Vec::new();
while let Some(piece) = parser.next() {

View file

@ -1,13 +1,12 @@
//! A map of all publicly exported items in a crate.
use std::{fmt, hash::BuildHasherDefault};
use std::fmt;
use base_db::CrateId;
use fst::{raw::IndexedValue, Automaton, Streamer};
use hir_expand::name::Name;
use indexmap::IndexMap;
use itertools::Itertools;
use rustc_hash::{FxHashSet, FxHasher};
use rustc_hash::FxHashSet;
use smallvec::SmallVec;
use stdx::{format_to, TupleExt};
use triomphe::Arc;
@ -17,7 +16,7 @@ use crate::{
item_scope::{ImportOrExternCrate, ItemInNs},
nameres::DefMap,
visibility::Visibility,
AssocItemId, ModuleDefId, ModuleId, TraitId,
AssocItemId, FxIndexMap, ModuleDefId, ModuleId, TraitId,
};
/// Item import details stored in the `ImportMap`.
@ -58,7 +57,6 @@ enum IsTraitAssocItem {
No,
}
type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<FxHasher>>;
type ImportMapIndex = FxIndexMap<ItemInNs, (SmallVec<[ImportInfo; 1]>, IsTraitAssocItem)>;
impl ImportMap {

View file

@ -295,7 +295,7 @@ impl ItemScope {
pub(crate) fn names_of<T>(
&self,
item: ItemInNs,
mut cb: impl FnMut(&Name, Visibility, bool) -> Option<T>,
mut cb: impl FnMut(&Name, Visibility, /*declared*/ bool) -> Option<T>,
) -> Option<T> {
match item {
ItemInNs::Macros(def) => self

View file

@ -106,6 +106,18 @@ use crate::{
},
};
type FxIndexMap<K, V> =
indexmap::IndexMap<K, V, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
/// A wrapper around two booleans, [`ImportPathConfig::prefer_no_std`] and [`ImportPathConfig::prefer_prelude`].
#[derive(Debug, Clone, PartialEq, Eq, Hash, Copy)]
pub struct ImportPathConfig {
/// If true, prefer to unconditionally use imports of the `core` and `alloc` crate
/// over the std.
pub prefer_no_std: bool,
/// If true, prefer import paths containing a prelude module.
pub prefer_prelude: bool,
}
#[derive(Debug)]
pub struct ItemLoc<N: ItemTreeNode> {
pub container: ModuleId,
@ -455,6 +467,26 @@ impl ModuleId {
pub fn is_block_module(self) -> bool {
self.block.is_some() && self.local_id == DefMap::ROOT
}
pub fn is_within_block(self) -> bool {
self.block.is_some()
}
pub fn as_crate_root(&self) -> Option<CrateRootModuleId> {
if self.local_id == DefMap::ROOT && self.block.is_none() {
Some(CrateRootModuleId { krate: self.krate })
} else {
None
}
}
pub fn derive_crate_root(&self) -> CrateRootModuleId {
CrateRootModuleId { krate: self.krate }
}
fn is_crate_root(&self) -> bool {
self.local_id == DefMap::ROOT && self.block.is_none()
}
}
impl PartialEq<CrateRootModuleId> for ModuleId {

View file

@ -81,9 +81,17 @@ use crate::{
per_ns::PerNs,
visibility::{Visibility, VisibilityExplicitness},
AstId, BlockId, BlockLoc, CrateRootModuleId, EnumId, EnumVariantId, ExternCrateId, FunctionId,
LocalModuleId, Lookup, MacroExpander, MacroId, ModuleId, ProcMacroId, UseId,
FxIndexMap, LocalModuleId, Lookup, MacroExpander, MacroId, ModuleId, ProcMacroId, UseId,
};
const PREDEFINED_TOOLS: &[SmolStr] = &[
SmolStr::new_static("clippy"),
SmolStr::new_static("rustfmt"),
SmolStr::new_static("diagnostic"),
SmolStr::new_static("miri"),
SmolStr::new_static("rust_analyzer"),
];
/// Contains the results of (early) name resolution.
///
/// A `DefMap` stores the module tree and the definitions that are in scope in every module after
@ -129,7 +137,7 @@ pub struct DefMap {
#[derive(Clone, Debug, PartialEq, Eq)]
struct DefMapCrateData {
/// The extern prelude which contains all root modules of external crates that are in scope.
extern_prelude: FxHashMap<Name, (CrateRootModuleId, Option<ExternCrateId>)>,
extern_prelude: FxIndexMap<Name, (CrateRootModuleId, Option<ExternCrateId>)>,
/// Side table for resolving derive helpers.
exported_derives: FxHashMap<MacroDefId, Box<[Name]>>,
@ -155,12 +163,12 @@ struct DefMapCrateData {
impl DefMapCrateData {
fn new(edition: Edition) -> Self {
Self {
extern_prelude: FxHashMap::default(),
extern_prelude: FxIndexMap::default(),
exported_derives: FxHashMap::default(),
fn_proc_macro_mapping: FxHashMap::default(),
proc_macro_loading_error: None,
registered_attrs: Vec::new(),
registered_tools: Vec::new(),
registered_tools: PREDEFINED_TOOLS.into(),
unstable_features: FxHashSet::default(),
rustc_coherence_is_core: false,
no_core: false,
@ -578,7 +586,8 @@ impl DefMap {
pub(crate) fn extern_prelude(
&self,
) -> impl Iterator<Item = (&Name, (CrateRootModuleId, Option<ExternCrateId>))> + '_ {
) -> impl DoubleEndedIterator<Item = (&Name, (CrateRootModuleId, Option<ExternCrateId>))> + '_
{
self.data.extern_prelude.iter().map(|(name, &def)| (name, def))
}

View file

@ -10,7 +10,7 @@ use syntax::{ast, SmolStr};
use triomphe::Arc;
use crate::{
attr::builtin::{find_builtin_attr_idx, TOOL_MODULES},
attr::builtin::find_builtin_attr_idx,
db::DefDatabase,
item_scope::BuiltinShadowMode,
nameres::path_resolution::ResolveMode,
@ -82,8 +82,7 @@ impl DefMap {
let name = name.to_smol_str();
let pred = |n: &_| *n == name;
let registered = self.data.registered_tools.iter().map(SmolStr::as_str);
let is_tool = TOOL_MODULES.iter().copied().chain(registered).any(pred);
let is_tool = self.data.registered_tools.iter().map(SmolStr::as_str).any(pred);
// FIXME: tool modules can be shadowed by actual modules
if is_tool {
return true;

View file

@ -395,6 +395,8 @@ impl DefCollector<'_> {
.cfg()
.map_or(true, |cfg| self.cfg_options.check(&cfg) != Some(false));
if is_cfg_enabled {
self.inject_prelude();
ModCollector {
def_collector: self,
macro_depth: 0,

View file

@ -221,7 +221,7 @@ impl DefMap {
None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
};
tracing::debug!("resolving {:?} in crate root (+ extern prelude)", segment);
self.resolve_name_in_crate_root_or_extern_prelude(db, segment)
self.resolve_name_in_crate_root_or_extern_prelude(db, original_module, segment)
}
PathKind::Plain => {
let (_, segment) = match segments.next() {
@ -470,9 +470,9 @@ impl DefMap {
};
let extern_prelude = || {
if self.block.is_some() {
// Don't resolve extern prelude in block `DefMap`s, defer it to the crate def map so
// that blocks can properly shadow them
if self.block.is_some() && module == DefMap::ROOT {
// Don't resolve extern prelude in pseudo-modules of blocks, because
// they might been shadowed by local names.
return PerNs::none();
}
self.data.extern_prelude.get(name).map_or(PerNs::none(), |&(it, extern_crate)| {
@ -505,6 +505,7 @@ impl DefMap {
fn resolve_name_in_crate_root_or_extern_prelude(
&self,
db: &dyn DefDatabase,
module: LocalModuleId,
name: &Name,
) -> PerNs {
let from_crate_root = match self.block {
@ -515,8 +516,8 @@ impl DefMap {
None => self[Self::ROOT].scope.get(name),
};
let from_extern_prelude = || {
if self.block.is_some() {
// Don't resolve extern prelude in block `DefMap`s.
if self.block.is_some() && module == DefMap::ROOT {
// Don't resolve extern prelude in pseudo-module of a block.
return PerNs::none();
}
self.data.extern_prelude.get(name).copied().map_or(

View file

@ -1,12 +1,11 @@
//! Name resolution façade.
use std::{fmt, hash::BuildHasherDefault, iter, mem};
use std::{fmt, iter, mem};
use base_db::CrateId;
use hir_expand::{
name::{name, Name},
MacroDefId,
};
use indexmap::IndexMap;
use intern::Interned;
use rustc_hash::FxHashSet;
use smallvec::{smallvec, SmallVec};
@ -27,10 +26,10 @@ use crate::{
type_ref::LifetimeRef,
visibility::{RawVisibility, Visibility},
AdtId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId,
ExternBlockId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, HasModule, ImplId,
ItemContainerId, ItemTreeLoc, LifetimeParamId, LocalModuleId, Lookup, Macro2Id, MacroId,
MacroRulesId, ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId,
TypeAliasId, TypeOrConstParamId, TypeOwnerId, TypeParamId, UseId, VariantId,
ExternBlockId, ExternCrateId, FunctionId, FxIndexMap, GenericDefId, GenericParamId, HasModule,
ImplId, ItemContainerId, ItemTreeLoc, LifetimeParamId, LocalModuleId, Lookup, Macro2Id,
MacroId, MacroRulesId, ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, TraitAliasId,
TraitId, TypeAliasId, TypeOrConstParamId, TypeOwnerId, TypeParamId, UseId, VariantId,
};
#[derive(Debug, Clone)]
@ -957,7 +956,6 @@ fn to_type_ns(per_ns: PerNs) -> Option<(TypeNs, Option<ImportOrExternCrate>)> {
Some((res, import))
}
type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<rustc_hash::FxHasher>>;
#[derive(Default)]
struct ScopeNames {
map: FxIndexMap<Name, SmallVec<[ScopeDef; 1]>>,

View file

@ -13,7 +13,7 @@ use either::Either;
use hir_def::{
data::adt::VariantData,
db::DefDatabase,
find_path,
find_path::{self, PrefixKind},
generics::{TypeOrConstParamData, TypeParamProvenance},
item_scope::ItemInNs,
lang_item::{LangItem, LangItemTarget},
@ -21,7 +21,8 @@ use hir_def::{
path::{Path, PathKind},
type_ref::{TraitBoundModifier, TypeBound, TypeRef},
visibility::Visibility,
HasModule, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, ModuleId, TraitId,
HasModule, ImportPathConfig, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, ModuleId,
TraitId,
};
use hir_expand::name::Name;
use intern::{Internable, Interned};
@ -999,8 +1000,9 @@ impl HirDisplay for Ty {
db.upcast(),
ItemInNs::Types((*def_id).into()),
module_id,
PrefixKind::Plain,
false,
true,
ImportPathConfig { prefer_no_std: false, prefer_prelude: true },
) {
write!(f, "{}", path.display(f.db.upcast()))?;
} else {

View file

@ -933,8 +933,24 @@ impl InferenceContext<'_> {
let prev_ret_coercion =
mem::replace(&mut self.return_coercion, Some(CoerceMany::new(ret_ty.clone())));
// FIXME: We should handle async blocks like we handle closures
let expected = &Expectation::has_type(ret_ty);
let (_, inner_ty) = self.with_breakable_ctx(BreakableKind::Border, None, None, |this| {
this.infer_block(tgt_expr, *id, statements, *tail, None, &Expectation::has_type(ret_ty))
let ty = this.infer_block(tgt_expr, *id, statements, *tail, None, expected);
if let Some(target) = expected.only_has_type(&mut this.table) {
match this.coerce(Some(tgt_expr), &ty, &target) {
Ok(res) => res,
Err(_) => {
this.result.type_mismatches.insert(
tgt_expr.into(),
TypeMismatch { expected: target.clone(), actual: ty.clone() },
);
target
}
}
} else {
ty
}
});
self.diverges = prev_diverges;

View file

@ -797,19 +797,22 @@ impl<'a> InferenceTable<'a> {
})
.build();
let projection = {
let b = TyBuilder::subst_for_def(self.db, fn_once_trait, None);
if b.remaining() != 2 {
return None;
}
let fn_once_subst = b.push(ty.clone()).push(arg_ty).build();
let b = TyBuilder::trait_ref(self.db, fn_once_trait);
if b.remaining() != 2 {
return None;
}
let mut trait_ref = b.push(ty.clone()).push(arg_ty).build();
TyBuilder::assoc_type_projection(self.db, output_assoc_type, Some(fn_once_subst))
.build()
let projection = {
TyBuilder::assoc_type_projection(
self.db,
output_assoc_type,
Some(trait_ref.substitution.clone()),
)
.build()
};
let trait_env = self.trait_env.env.clone();
let mut trait_ref = projection.trait_ref(self.db);
let obligation = InEnvironment {
goal: trait_ref.clone().cast(Interner),
environment: trait_env.clone(),

View file

@ -1,7 +1,7 @@
use chalk_ir::{AdtId, TyKind};
use either::Either;
use hir_def::db::DefDatabase;
use project_model::target_data_layout::RustcDataLayoutConfig;
use project_model::{target_data_layout::RustcDataLayoutConfig, Sysroot};
use rustc_hash::FxHashMap;
use test_fixture::WithFixture;
use triomphe::Arc;
@ -17,7 +17,7 @@ mod closure;
fn current_machine_data_layout() -> String {
project_model::target_data_layout::get(
RustcDataLayoutConfig::Rustc(None),
RustcDataLayoutConfig::Rustc(&Sysroot::empty()),
None,
&FxHashMap::default(),
)

View file

@ -570,6 +570,10 @@ impl CallableSig {
}
}
pub fn abi(&self) -> FnAbi {
self.abi
}
pub fn params(&self) -> &[Ty] {
&self.params_and_return[0..self.params_and_return.len() - 1]
}
@ -892,20 +896,16 @@ where
Canonical { value, binders: chalk_ir::CanonicalVarKinds::from_iter(Interner, kinds) }
}
pub fn callable_sig_from_fnonce(
mut self_ty: &Ty,
env: Arc<TraitEnvironment>,
pub fn callable_sig_from_fn_trait(
self_ty: &Ty,
trait_env: Arc<TraitEnvironment>,
db: &dyn HirDatabase,
) -> Option<CallableSig> {
if let Some((ty, _, _)) = self_ty.as_reference() {
// This will happen when it implements fn or fn mut, since we add a autoborrow adjustment
self_ty = ty;
}
let krate = env.krate;
) -> Option<(FnTrait, CallableSig)> {
let krate = trait_env.krate;
let fn_once_trait = FnTrait::FnOnce.get_id(db, krate)?;
let output_assoc_type = db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?;
let mut table = InferenceTable::new(db, env);
let mut table = InferenceTable::new(db, trait_env.clone());
let b = TyBuilder::trait_ref(db, fn_once_trait);
if b.remaining() != 2 {
return None;
@ -915,23 +915,56 @@ pub fn callable_sig_from_fnonce(
// - Self: FnOnce<?args_ty>
// - <Self as FnOnce<?args_ty>>::Output == ?ret_ty
let args_ty = table.new_type_var();
let trait_ref = b.push(self_ty.clone()).push(args_ty.clone()).build();
let mut trait_ref = b.push(self_ty.clone()).push(args_ty.clone()).build();
let projection = TyBuilder::assoc_type_projection(
db,
output_assoc_type,
Some(trait_ref.substitution.clone()),
)
.build();
table.register_obligation(trait_ref.cast(Interner));
let ret_ty = table.normalize_projection_ty(projection);
let ret_ty = table.resolve_completely(ret_ty);
let args_ty = table.resolve_completely(args_ty);
let block = trait_env.block;
let trait_env = trait_env.env.clone();
let obligation =
InEnvironment { goal: trait_ref.clone().cast(Interner), environment: trait_env.clone() };
let canonical = table.canonicalize(obligation.clone());
if db.trait_solve(krate, block, canonical.cast(Interner)).is_some() {
table.register_obligation(obligation.goal);
let return_ty = table.normalize_projection_ty(projection);
for fn_x in [FnTrait::Fn, FnTrait::FnMut, FnTrait::FnOnce] {
let fn_x_trait = fn_x.get_id(db, krate)?;
trait_ref.trait_id = to_chalk_trait_id(fn_x_trait);
let obligation: chalk_ir::InEnvironment<chalk_ir::Goal<Interner>> = InEnvironment {
goal: trait_ref.clone().cast(Interner),
environment: trait_env.clone(),
};
let canonical = table.canonicalize(obligation.clone());
if db.trait_solve(krate, block, canonical.cast(Interner)).is_some() {
let ret_ty = table.resolve_completely(return_ty);
let args_ty = table.resolve_completely(args_ty);
let params = args_ty
.as_tuple()?
.iter(Interner)
.map(|it| it.assert_ty_ref(Interner))
.cloned()
.collect();
let params =
args_ty.as_tuple()?.iter(Interner).map(|it| it.assert_ty_ref(Interner)).cloned().collect();
Some(CallableSig::from_params_and_return(params, ret_ty, false, Safety::Safe, FnAbi::RustCall))
return Some((
fn_x,
CallableSig::from_params_and_return(
params,
ret_ty,
false,
Safety::Safe,
FnAbi::RustCall,
),
));
}
}
unreachable!("It should at least implement FnOnce at this point");
} else {
None
}
}
struct PlaceholderCollector<'db> {

View file

@ -1,5 +1,6 @@
//! Trait solving using Chalk.
use core::fmt;
use std::env::var;
use chalk_ir::{fold::TypeFoldable, DebruijnIndex, GoalData};
@ -209,7 +210,25 @@ pub enum FnTrait {
Fn,
}
impl fmt::Display for FnTrait {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
FnTrait::FnOnce => write!(f, "FnOnce"),
FnTrait::FnMut => write!(f, "FnMut"),
FnTrait::Fn => write!(f, "Fn"),
}
}
}
impl FnTrait {
pub const fn function_name(&self) -> &'static str {
match self {
FnTrait::FnOnce => "call_once",
FnTrait::FnMut => "call_mut",
FnTrait::Fn => "call",
}
}
const fn lang_item(self) -> LangItem {
match self {
FnTrait::FnOnce => LangItem::FnOnce,

View file

@ -35,7 +35,7 @@ pub mod term_search;
mod display;
use std::{iter, mem::discriminant, ops::ControlFlow};
use std::{mem::discriminant, ops::ControlFlow};
use arrayvec::ArrayVec;
use base_db::{CrateDisplayName, CrateId, CrateOrigin, FileId};
@ -52,7 +52,6 @@ use hir_def::{
path::ImportAlias,
per_ns::PerNs,
resolver::{HasResolver, Resolver},
src::HasSource as _,
AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId,
EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, HasModule,
ImplId, InTypeConstId, ItemContainerId, LifetimeParamId, LocalFieldId, Lookup, MacroExpander,
@ -69,7 +68,7 @@ use hir_ty::{
diagnostics::BodyValidationDiagnostic,
error_lifetime, known_const_to_ast,
layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding},
method_resolution::{self, TyFingerprint},
method_resolution::{self},
mir::{interpret_mir, MutBorrowKind},
primitive::UintTy,
traits::FnTrait,
@ -100,6 +99,7 @@ pub use crate::{
VisibleTraits,
},
};
pub use hir_ty::method_resolution::TyFingerprint;
// Be careful with these re-exports.
//
@ -123,6 +123,7 @@ pub use {
per_ns::Namespace,
type_ref::{Mutability, TypeRef},
visibility::Visibility,
ImportPathConfig,
// FIXME: This is here since some queries take it as input that are used
// outside of hir.
{AdtId, MacroId, ModuleDefId},
@ -141,7 +142,7 @@ pub use {
display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite},
layout::LayoutError,
mir::{MirEvalError, MirLowerError},
PointerCast, Safety,
FnAbi, PointerCast, Safety,
},
// FIXME: Properly encapsulate mir
hir_ty::{mir, Interner as ChalkTyInterner},
@ -789,40 +790,32 @@ impl Module {
/// Finds a path that can be used to refer to the given item from within
/// this module, if possible.
pub fn find_use_path(
pub fn find_path(
self,
db: &dyn DefDatabase,
item: impl Into<ItemInNs>,
prefer_no_std: bool,
prefer_prelude: bool,
cfg: ImportPathConfig,
) -> Option<ModPath> {
hir_def::find_path::find_path(
db,
item.into().into(),
self.into(),
prefer_no_std,
prefer_prelude,
PrefixKind::Plain,
false,
cfg,
)
}
/// Finds a path that can be used to refer to the given item from within
/// this module, if possible. This is used for returning import paths for use-statements.
pub fn find_use_path_prefixed(
pub fn find_use_path(
self,
db: &dyn DefDatabase,
item: impl Into<ItemInNs>,
prefix_kind: PrefixKind,
prefer_no_std: bool,
prefer_prelude: bool,
cfg: ImportPathConfig,
) -> Option<ModPath> {
hir_def::find_path::find_path_prefixed(
db,
item.into().into(),
self.into(),
prefix_kind,
prefer_no_std,
prefer_prelude,
)
hir_def::find_path::find_path(db, item.into().into(), self.into(), prefix_kind, true, cfg)
}
}
@ -1965,7 +1958,7 @@ impl Function {
.enumerate()
.map(|(idx, ty)| {
let ty = Type { env: environment.clone(), ty: ty.clone() };
Param { func: self, ty, idx }
Param { func: Callee::Def(CallableDefId::FunctionId(self.id)), ty, idx }
})
.collect()
}
@ -1991,7 +1984,7 @@ impl Function {
.skip(skip)
.map(|(idx, ty)| {
let ty = Type { env: environment.clone(), ty: ty.clone() };
Param { func: self, ty, idx }
Param { func: Callee::Def(CallableDefId::FunctionId(self.id)), ty, idx }
})
.collect()
}
@ -2037,7 +2030,7 @@ impl Function {
.skip(skip)
.map(|(idx, ty)| {
let ty = Type { env: environment.clone(), ty: ty.clone() };
Param { func: self, ty, idx }
Param { func: Callee::Def(CallableDefId::FunctionId(self.id)), ty, idx }
})
.collect()
}
@ -2167,17 +2160,24 @@ impl From<hir_ty::Mutability> for Access {
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct Param {
func: Function,
func: Callee,
/// The index in parameter list, including self parameter.
idx: usize,
ty: Type,
}
impl Param {
pub fn parent_fn(&self) -> Function {
self.func
pub fn parent_fn(&self) -> Option<Function> {
match self.func {
Callee::Def(CallableDefId::FunctionId(f)) => Some(f.into()),
_ => None,
}
}
// pub fn parent_closure(&self) -> Option<Closure> {
// self.func.as_ref().right().cloned()
// }
pub fn index(&self) -> usize {
self.idx
}
@ -2191,7 +2191,11 @@ impl Param {
}
pub fn as_local(&self, db: &dyn HirDatabase) -> Option<Local> {
let parent = DefWithBodyId::FunctionId(self.func.into());
let parent = match self.func {
Callee::Def(CallableDefId::FunctionId(it)) => DefWithBodyId::FunctionId(it),
Callee::Closure(closure, _) => db.lookup_intern_closure(closure.into()).0,
_ => return None,
};
let body = db.body(parent);
if let Some(self_param) = body.self_param.filter(|_| self.idx == 0) {
Some(Local { parent, binding_id: self_param })
@ -2205,18 +2209,45 @@ impl Param {
}
pub fn pattern_source(&self, db: &dyn HirDatabase) -> Option<ast::Pat> {
self.source(db).and_then(|p| p.value.pat())
self.source(db).and_then(|p| p.value.right()?.pat())
}
pub fn source(&self, db: &dyn HirDatabase) -> Option<InFile<ast::Param>> {
let InFile { file_id, value } = self.func.source(db)?;
let params = value.param_list()?;
if params.self_param().is_some() {
params.params().nth(self.idx.checked_sub(params.self_param().is_some() as usize)?)
} else {
params.params().nth(self.idx)
pub fn source(
&self,
db: &dyn HirDatabase,
) -> Option<InFile<Either<ast::SelfParam, ast::Param>>> {
match self.func {
Callee::Def(CallableDefId::FunctionId(func)) => {
let InFile { file_id, value } = Function { id: func }.source(db)?;
let params = value.param_list()?;
if let Some(self_param) = params.self_param() {
if let Some(idx) = self.idx.checked_sub(1) {
params.params().nth(idx).map(Either::Right)
} else {
Some(Either::Left(self_param))
}
} else {
params.params().nth(self.idx).map(Either::Right)
}
.map(|value| InFile { file_id, value })
}
Callee::Closure(closure, _) => {
let InternedClosure(owner, expr_id) = db.lookup_intern_closure(closure.into());
let (_, source_map) = db.body_with_source_map(owner);
let ast @ InFile { file_id, value } = source_map.expr_syntax(expr_id).ok()?;
let root = db.parse_or_expand(file_id);
match value.to_node(&root) {
ast::Expr::ClosureExpr(it) => it
.param_list()?
.params()
.nth(self.idx)
.map(Either::Right)
.map(|value| InFile { file_id: ast.file_id, value }),
_ => None,
}
}
_ => None,
}
.map(|value| InFile { file_id, value })
}
}
@ -3372,34 +3403,21 @@ impl BuiltinAttr {
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct ToolModule {
krate: Option<CrateId>,
krate: CrateId,
idx: u32,
}
impl ToolModule {
// FIXME: consider crates\hir_def\src\nameres\attr_resolution.rs?
pub(crate) fn by_name(db: &dyn HirDatabase, krate: Crate, name: &str) -> Option<Self> {
if let builtin @ Some(_) = Self::builtin(name) {
return builtin;
}
let krate = krate.id;
let idx =
db.crate_def_map(krate.id).registered_tools().iter().position(|it| it == name)? as u32;
Some(ToolModule { krate: Some(krate.id), idx })
}
fn builtin(name: &str) -> Option<Self> {
hir_def::attr::builtin::TOOL_MODULES
.iter()
.position(|&tool| tool == name)
.map(|idx| ToolModule { krate: None, idx: idx as u32 })
db.crate_def_map(krate).registered_tools().iter().position(|it| it == name)? as u32;
Some(ToolModule { krate, idx })
}
pub fn name(&self, db: &dyn HirDatabase) -> SmolStr {
// FIXME: Return a `Name` here
match self.krate {
Some(krate) => db.crate_def_map(krate).registered_tools()[self.idx as usize].clone(),
None => SmolStr::new(hir_def::attr::builtin::TOOL_MODULES[self.idx as usize]),
}
db.crate_def_map(self.krate).registered_tools()[self.idx as usize].clone()
}
}
@ -4292,27 +4310,37 @@ impl Type {
}
pub fn as_callable(&self, db: &dyn HirDatabase) -> Option<Callable> {
let mut the_ty = &self.ty;
let callee = match self.ty.kind(Interner) {
TyKind::Ref(_, _, ty) if ty.as_closure().is_some() => {
the_ty = ty;
Callee::Closure(ty.as_closure().unwrap())
}
TyKind::Closure(id, _) => Callee::Closure(*id),
TyKind::Closure(id, subst) => Callee::Closure(*id, subst.clone()),
TyKind::Function(_) => Callee::FnPtr,
TyKind::FnDef(..) => Callee::Def(self.ty.callable_def(db)?),
_ => {
let sig = hir_ty::callable_sig_from_fnonce(&self.ty, self.env.clone(), db)?;
kind => {
// This will happen when it implements fn or fn mut, since we add an autoborrow adjustment
let (ty, kind) = if let TyKind::Ref(_, _, ty) = kind {
(ty, ty.kind(Interner))
} else {
(&self.ty, kind)
};
if let TyKind::Closure(closure, subst) = kind {
let sig = ty.callable_sig(db)?;
return Some(Callable {
ty: self.clone(),
sig,
callee: Callee::Closure(*closure, subst.clone()),
is_bound_method: false,
});
}
let (fn_trait, sig) = hir_ty::callable_sig_from_fn_trait(ty, self.env.clone(), db)?;
return Some(Callable {
ty: self.clone(),
sig,
callee: Callee::Other,
callee: Callee::FnImpl(fn_trait),
is_bound_method: false,
});
}
};
let sig = the_ty.callable_sig(db)?;
let sig = self.ty.callable_sig(db)?;
Some(Callable { ty: self.clone(), sig, callee, is_bound_method: false })
}
@ -4929,37 +4957,39 @@ pub struct Callable {
sig: CallableSig,
callee: Callee,
/// Whether this is a method that was called with method call syntax.
pub(crate) is_bound_method: bool,
is_bound_method: bool,
}
#[derive(Debug)]
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
enum Callee {
Def(CallableDefId),
Closure(ClosureId),
Closure(ClosureId, Substitution),
FnPtr,
Other,
FnImpl(FnTrait),
}
pub enum CallableKind {
Function(Function),
TupleStruct(Struct),
TupleEnumVariant(Variant),
Closure,
Closure(Closure),
FnPtr,
/// Some other type that implements `FnOnce`.
Other,
FnImpl(FnTrait),
}
impl Callable {
pub fn kind(&self) -> CallableKind {
use Callee::*;
match self.callee {
Def(CallableDefId::FunctionId(it)) => CallableKind::Function(it.into()),
Def(CallableDefId::StructId(it)) => CallableKind::TupleStruct(it.into()),
Def(CallableDefId::EnumVariantId(it)) => CallableKind::TupleEnumVariant(it.into()),
Closure(_) => CallableKind::Closure,
FnPtr => CallableKind::FnPtr,
Other => CallableKind::Other,
Callee::Def(CallableDefId::FunctionId(it)) => CallableKind::Function(it.into()),
Callee::Def(CallableDefId::StructId(it)) => CallableKind::TupleStruct(it.into()),
Callee::Def(CallableDefId::EnumVariantId(it)) => {
CallableKind::TupleEnumVariant(it.into())
}
Callee::Closure(id, ref subst) => {
CallableKind::Closure(Closure { id, subst: subst.clone() })
}
Callee::FnPtr => CallableKind::FnPtr,
Callee::FnImpl(fn_) => CallableKind::FnImpl(fn_),
}
}
pub fn receiver_param(&self, db: &dyn HirDatabase) -> Option<(SelfParam, Type)> {
@ -4973,43 +5003,15 @@ impl Callable {
pub fn n_params(&self) -> usize {
self.sig.params().len() - if self.is_bound_method { 1 } else { 0 }
}
pub fn params(
&self,
db: &dyn HirDatabase,
) -> Vec<(Option<Either<ast::SelfParam, ast::Pat>>, Type)> {
let types = self
.sig
pub fn params(&self) -> Vec<Param> {
self.sig
.params()
.iter()
.enumerate()
.skip(if self.is_bound_method { 1 } else { 0 })
.map(|ty| self.ty.derived(ty.clone()));
let map_param = |it: ast::Param| it.pat().map(Either::Right);
let patterns = match self.callee {
Callee::Def(CallableDefId::FunctionId(func)) => {
let src = func.lookup(db.upcast()).source(db.upcast());
src.value.param_list().map(|param_list| {
param_list
.self_param()
.map(|it| Some(Either::Left(it)))
.filter(|_| !self.is_bound_method)
.into_iter()
.chain(param_list.params().map(map_param))
})
}
Callee::Closure(closure_id) => match closure_source(db, closure_id) {
Some(src) => src.param_list().map(|param_list| {
param_list
.self_param()
.map(|it| Some(Either::Left(it)))
.filter(|_| !self.is_bound_method)
.into_iter()
.chain(param_list.params().map(map_param))
}),
None => None,
},
_ => None,
};
patterns.into_iter().flatten().chain(iter::repeat(None)).zip(types).collect()
.map(|(idx, ty)| (idx, self.ty.derived(ty.clone())))
.map(|(idx, ty)| Param { func: self.callee.clone(), idx, ty })
.collect()
}
pub fn return_type(&self) -> Type {
self.ty.derived(self.sig.ret().clone())
@ -5017,17 +5019,9 @@ impl Callable {
pub fn sig(&self) -> &CallableSig {
&self.sig
}
}
fn closure_source(db: &dyn HirDatabase, closure: ClosureId) -> Option<ast::ClosureExpr> {
let InternedClosure(owner, expr_id) = db.lookup_intern_closure(closure.into());
let (_, source_map) = db.body_with_source_map(owner);
let ast = source_map.expr_syntax(expr_id).ok()?;
let root = ast.file_syntax(db.upcast());
let expr = ast.value.to_node(&root);
match expr {
ast::Expr::ClosureExpr(it) => Some(it),
_ => None,
pub fn ty(&self) -> &Type {
&self.ty
}
}

View file

@ -307,7 +307,8 @@ impl SourceAnalyzer {
db: &dyn HirDatabase,
call: &ast::Expr,
) -> Option<Callable> {
self.type_of_expr(db, &call.clone())?.0.as_callable(db)
let (orig, adjusted) = self.type_of_expr(db, &call.clone())?;
adjusted.unwrap_or(orig).as_callable(db)
}
pub(crate) fn resolve_field(

View file

@ -1,6 +1,6 @@
//! Type tree for term search
use hir_def::find_path::PrefixKind;
use hir_def::ImportPathConfig;
use hir_expand::mod_path::ModPath;
use hir_ty::{
db::HirDatabase,
@ -17,42 +17,20 @@ use crate::{
fn mod_item_path(
sema_scope: &SemanticsScope<'_>,
def: &ModuleDef,
prefer_no_std: bool,
prefer_prelude: bool,
cfg: ImportPathConfig,
) -> Option<ModPath> {
let db = sema_scope.db;
// Account for locals shadowing items from module
let name_hit_count = def.name(db).map(|def_name| {
let mut name_hit_count = 0;
sema_scope.process_all_names(&mut |name, _| {
if name == def_name {
name_hit_count += 1;
}
});
name_hit_count
});
let m = sema_scope.module();
match name_hit_count {
Some(0..=1) | None => m.find_use_path(db.upcast(), *def, prefer_no_std, prefer_prelude),
Some(_) => m.find_use_path_prefixed(
db.upcast(),
*def,
PrefixKind::ByCrate,
prefer_no_std,
prefer_prelude,
),
}
m.find_path(db.upcast(), *def, cfg)
}
/// Helper function to get path to `ModuleDef` as string
fn mod_item_path_str(
sema_scope: &SemanticsScope<'_>,
def: &ModuleDef,
prefer_no_std: bool,
prefer_prelude: bool,
cfg: ImportPathConfig,
) -> Result<String, DisplaySourceCodeError> {
let path = mod_item_path(sema_scope, def, prefer_no_std, prefer_prelude);
let path = mod_item_path(sema_scope, def, cfg);
path.map(|it| it.display(sema_scope.db.upcast()).to_string())
.ok_or(DisplaySourceCodeError::PathNotFound)
}
@ -61,8 +39,7 @@ fn mod_item_path_str(
fn type_path(
sema_scope: &SemanticsScope<'_>,
ty: &Type,
prefer_no_std: bool,
prefer_prelude: bool,
cfg: ImportPathConfig,
) -> Result<String, DisplaySourceCodeError> {
let db = sema_scope.db;
let m = sema_scope.module();
@ -71,9 +48,7 @@ fn type_path(
Some(adt) => {
let ty_name = ty.display_source_code(db, m.id, true)?;
let mut path =
mod_item_path(sema_scope, &ModuleDef::Adt(adt), prefer_no_std, prefer_prelude)
.unwrap();
let mut path = mod_item_path(sema_scope, &ModuleDef::Adt(adt), cfg).unwrap();
path.pop_segment();
let path = path.display(db.upcast()).to_string();
let res = match path.is_empty() {
@ -158,11 +133,10 @@ impl Expr {
&self,
sema_scope: &SemanticsScope<'_>,
many_formatter: &mut dyn FnMut(&Type) -> String,
prefer_no_std: bool,
prefer_prelude: bool,
cfg: ImportPathConfig,
) -> Result<String, DisplaySourceCodeError> {
let db = sema_scope.db;
let mod_item_path_str = |s, def| mod_item_path_str(s, def, prefer_no_std, prefer_prelude);
let mod_item_path_str = |s, def| mod_item_path_str(s, def, cfg);
match self {
Expr::Const(it) => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)),
Expr::Static(it) => mod_item_path_str(sema_scope, &ModuleDef::Static(*it)),
@ -172,9 +146,7 @@ impl Expr {
Expr::Function { func, params, .. } => {
let args = params
.iter()
.map(|f| {
f.gen_source_code(sema_scope, many_formatter, prefer_no_std, prefer_prelude)
})
.map(|f| f.gen_source_code(sema_scope, many_formatter, cfg))
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter()
.join(", ");
@ -188,14 +160,10 @@ impl Expr {
crate::AssocItemContainer::Impl(imp) => {
let self_ty = imp.self_ty(db);
// Should it be guaranteed that `mod_item_path` always exists?
match self_ty.as_adt().and_then(|adt| {
mod_item_path(
sema_scope,
&adt.into(),
prefer_no_std,
prefer_prelude,
)
}) {
match self_ty
.as_adt()
.and_then(|adt| mod_item_path(sema_scope, &adt.into(), cfg))
{
Some(path) => path.display(sema_scope.db.upcast()).to_string(),
None => self_ty.display(db).to_string(),
}
@ -217,17 +185,10 @@ impl Expr {
let func_name = func.name(db).display(db.upcast()).to_string();
let self_param = func.self_param(db).unwrap();
let target_str = target.gen_source_code(
sema_scope,
many_formatter,
prefer_no_std,
prefer_prelude,
)?;
let target_str = target.gen_source_code(sema_scope, many_formatter, cfg)?;
let args = params
.iter()
.map(|f| {
f.gen_source_code(sema_scope, many_formatter, prefer_no_std, prefer_prelude)
})
.map(|f| f.gen_source_code(sema_scope, many_formatter, cfg))
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter()
.join(", ");
@ -259,7 +220,7 @@ impl Expr {
false => {
let generics = generics
.iter()
.map(|it| type_path(sema_scope, it, prefer_no_std, prefer_prelude))
.map(|it| type_path(sema_scope, it, cfg))
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter()
.join(", ");
@ -270,14 +231,7 @@ impl Expr {
StructKind::Tuple => {
let args = params
.iter()
.map(|f| {
f.gen_source_code(
sema_scope,
many_formatter,
prefer_no_std,
prefer_prelude,
)
})
.map(|f| f.gen_source_code(sema_scope, many_formatter, cfg))
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter()
.join(", ");
@ -292,12 +246,7 @@ impl Expr {
let tmp = format!(
"{}: {}",
f.name(db).display(db.upcast()),
a.gen_source_code(
sema_scope,
many_formatter,
prefer_no_std,
prefer_prelude
)?
a.gen_source_code(sema_scope, many_formatter, cfg)?
);
Ok(tmp)
})
@ -318,14 +267,7 @@ impl Expr {
StructKind::Tuple => {
let args = params
.iter()
.map(|a| {
a.gen_source_code(
sema_scope,
many_formatter,
prefer_no_std,
prefer_prelude,
)
})
.map(|a| a.gen_source_code(sema_scope, many_formatter, cfg))
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter()
.join(", ");
@ -340,12 +282,7 @@ impl Expr {
let tmp = format!(
"{}: {}",
f.name(db).display(db.upcast()),
a.gen_source_code(
sema_scope,
many_formatter,
prefer_no_std,
prefer_prelude
)?
a.gen_source_code(sema_scope, many_formatter, cfg)?
);
Ok(tmp)
})
@ -359,7 +296,7 @@ impl Expr {
false => {
let generics = generics
.iter()
.map(|it| type_path(sema_scope, it, prefer_no_std, prefer_prelude))
.map(|it| type_path(sema_scope, it, cfg))
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter()
.join(", ");
@ -374,9 +311,7 @@ impl Expr {
Expr::Tuple { params, .. } => {
let args = params
.iter()
.map(|a| {
a.gen_source_code(sema_scope, many_formatter, prefer_no_std, prefer_prelude)
})
.map(|a| a.gen_source_code(sema_scope, many_formatter, cfg))
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter()
.join(", ");
@ -388,12 +323,7 @@ impl Expr {
return Ok(many_formatter(&expr.ty(db)));
}
let strukt = expr.gen_source_code(
sema_scope,
many_formatter,
prefer_no_std,
prefer_prelude,
)?;
let strukt = expr.gen_source_code(sema_scope, many_formatter, cfg)?;
let field = field.name(db).display(db.upcast()).to_string();
Ok(format!("{strukt}.{field}"))
}
@ -402,12 +332,7 @@ impl Expr {
return Ok(many_formatter(&expr.ty(db)));
}
let inner = expr.gen_source_code(
sema_scope,
many_formatter,
prefer_no_std,
prefer_prelude,
)?;
let inner = expr.gen_source_code(sema_scope, many_formatter, cfg)?;
Ok(format!("&{inner}"))
}
Expr::Many(ty) => Ok(many_formatter(ty)),

View file

@ -1,7 +1,7 @@
use std::iter::{self, Peekable};
use either::Either;
use hir::{Adt, Crate, HasAttrs, HasSource, ModuleDef, Semantics};
use hir::{Adt, Crate, HasAttrs, HasSource, ImportPathConfig, ModuleDef, Semantics};
use ide_db::RootDatabase;
use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast};
use itertools::Itertools;
@ -71,6 +71,11 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
.filter(|pat| !matches!(pat, Pat::WildcardPat(_)))
.collect();
let cfg = ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
};
let module = ctx.sema.scope(expr.syntax())?.module();
let (mut missing_pats, is_non_exhaustive, has_hidden_variants): (
Peekable<Box<dyn Iterator<Item = (ast::Pat, bool)>>>,
@ -88,13 +93,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
.into_iter()
.filter_map(|variant| {
Some((
build_pat(
ctx.db(),
module,
variant,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)?,
build_pat(ctx.db(), module, variant, cfg)?,
variant.should_be_hidden(ctx.db(), module.krate()),
))
})
@ -145,15 +144,9 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
let is_hidden = variants
.iter()
.any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
let patterns = variants.into_iter().filter_map(|variant| {
build_pat(
ctx.db(),
module,
variant,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)
});
let patterns = variants
.into_iter()
.filter_map(|variant| build_pat(ctx.db(), module, variant, cfg));
(ast::Pat::from(make::tuple_pat(patterns)), is_hidden)
})
@ -184,15 +177,9 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
let is_hidden = variants
.iter()
.any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
let patterns = variants.into_iter().filter_map(|variant| {
build_pat(
ctx.db(),
module,
variant,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)
});
let patterns = variants
.into_iter()
.filter_map(|variant| build_pat(ctx.db(), module, variant, cfg));
(ast::Pat::from(make::slice_pat(patterns)), is_hidden)
})
.filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat));
@ -457,18 +444,11 @@ fn build_pat(
db: &RootDatabase,
module: hir::Module,
var: ExtendedVariant,
prefer_no_std: bool,
prefer_prelude: bool,
cfg: ImportPathConfig,
) -> Option<ast::Pat> {
match var {
ExtendedVariant::Variant(var) => {
let path = mod_path_to_ast(&module.find_use_path(
db,
ModuleDef::from(var),
prefer_no_std,
prefer_prelude,
)?);
let path = mod_path_to_ast(&module.find_path(db, ModuleDef::from(var), cfg)?);
// FIXME: use HIR for this; it doesn't currently expose struct vs. tuple vs. unit variants though
Some(match var.source(db)?.value.kind() {
ast::StructKind::Tuple(field_list) => {

View file

@ -1,6 +1,6 @@
use std::cmp::Reverse;
use hir::{db::HirDatabase, Module};
use hir::{db::HirDatabase, ImportPathConfig, Module};
use ide_db::{
helpers::mod_path_to_ast,
imports::{
@ -90,14 +90,14 @@ use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel};
// # pub mod std { pub mod collections { pub struct HashMap { } } }
// ```
pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let cfg = ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
};
let (import_assets, syntax_under_caret) = find_importable_node(ctx)?;
let mut proposed_imports: Vec<_> = import_assets
.search_for_imports(
&ctx.sema,
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
ctx.config.prefer_no_std,
)
.search_for_imports(&ctx.sema, cfg, ctx.config.insert_use.prefix_kind)
.collect();
if proposed_imports.is_empty() {
return None;

View file

@ -1,4 +1,4 @@
use hir::ModuleDef;
use hir::{ImportPathConfig, ModuleDef};
use ide_db::{
assists::{AssistId, AssistKind},
defs::Definition,
@ -326,6 +326,11 @@ fn augment_references_with_imports(
) -> Vec<FileReferenceWithImport> {
let mut visited_modules = FxHashSet::default();
let cfg = ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
};
references
.into_iter()
.filter_map(|FileReference { range, name, .. }| {
@ -341,12 +346,11 @@ fn augment_references_with_imports(
let import_scope = ImportScope::find_insert_use_container(name.syntax(), &ctx.sema);
let path = ref_module
.find_use_path_prefixed(
.find_use_path(
ctx.sema.db,
ModuleDef::Module(*target_module),
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
cfg,
)
.map(|mod_path| {
make::path_concat(mod_path_to_ast(&mod_path), make::path_from_text("Bool"))
@ -1521,7 +1525,7 @@ mod foo {
}
"#,
r#"
use crate::foo::Bool;
use foo::Bool;
fn main() {
use foo::FOO;
@ -1602,7 +1606,7 @@ pub mod bar {
"#,
r#"
//- /main.rs
use crate::foo::bar::Bool;
use foo::bar::Bool;
mod foo;

View file

@ -1,3 +1,4 @@
use hir::ImportPathConfig;
use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast, traits::resolve_target_trait};
use syntax::ast::{self, AstNode, HasName};
@ -43,19 +44,18 @@ pub(crate) fn convert_into_to_from(acc: &mut Assists, ctx: &AssistContext<'_>) -
return None;
}
let cfg = ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
};
let src_type_path = {
let src_type_path = src_type.syntax().descendants().find_map(ast::Path::cast)?;
let src_type_def = match ctx.sema.resolve_path(&src_type_path) {
Some(hir::PathResolution::Def(module_def)) => module_def,
_ => return None,
};
mod_path_to_ast(&module.find_use_path(
ctx.db(),
src_type_def,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)?)
mod_path_to_ast(&module.find_path(ctx.db(), src_type_def, cfg)?)
};
let dest_type = match &ast_trait {

View file

@ -1,5 +1,5 @@
use either::Either;
use hir::ModuleDef;
use hir::{ImportPathConfig, ModuleDef};
use ide_db::{
assists::{AssistId, AssistKind},
defs::Definition,
@ -183,6 +183,11 @@ fn augment_references_with_imports(
) -> Vec<(ast::NameLike, Option<(ImportScope, ast::Path)>)> {
let mut visited_modules = FxHashSet::default();
let cfg = ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
};
references
.iter()
.filter_map(|FileReference { name, .. }| {
@ -201,12 +206,11 @@ fn augment_references_with_imports(
let import_scope =
ImportScope::find_insert_use_container(new_name.syntax(), &ctx.sema);
let path = ref_module
.find_use_path_prefixed(
.find_use_path(
ctx.sema.db,
ModuleDef::Module(*target_module),
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
cfg,
)
.map(|mod_path| {
make::path_concat(
@ -811,7 +815,7 @@ pub mod bar {
"#,
r#"
//- /main.rs
use crate::foo::bar::BarResult;
use foo::bar::BarResult;
mod foo;

View file

@ -1,4 +1,4 @@
use hir::HasVisibility;
use hir::{HasVisibility, ImportPathConfig};
use ide_db::{
assists::{AssistId, AssistKind},
defs::Definition,
@ -87,15 +87,15 @@ fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option<Str
let ty = ctx.sema.type_of_binding_in_pat(&ident_pat)?;
let hir::Adt::Struct(struct_type) = ty.strip_references().as_adt()? else { return None };
let cfg = ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
};
let module = ctx.sema.scope(ident_pat.syntax())?.module();
let struct_def = hir::ModuleDef::from(struct_type);
let kind = struct_type.kind(ctx.db());
let struct_def_path = module.find_use_path(
ctx.db(),
struct_def,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)?;
let struct_def_path = module.find_path(ctx.db(), struct_def, cfg)?;
let is_non_exhaustive = struct_def.attrs(ctx.db())?.by_key("non_exhaustive").exists();
let is_foreign_crate =

View file

@ -3,8 +3,8 @@ use std::{iter, ops::RangeInclusive};
use ast::make;
use either::Either;
use hir::{
DescendPreference, HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef,
PathResolution, Semantics, TypeInfo, TypeParam,
DescendPreference, HasSource, HirDisplay, ImportPathConfig, InFile, Local, LocalSource,
ModuleDef, PathResolution, Semantics, TypeInfo, TypeParam,
};
use ide_db::{
defs::{Definition, NameRefClass},
@ -209,12 +209,14 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
FamousDefs(&ctx.sema, module.krate()).core_ops_ControlFlow();
if let Some(control_flow_enum) = control_flow_enum {
let mod_path = module.find_use_path_prefixed(
let mod_path = module.find_use_path(
ctx.sema.db,
ModuleDef::from(control_flow_enum),
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
);
if let Some(mod_path) = mod_path {

View file

@ -1,7 +1,7 @@
use std::iter;
use either::Either;
use hir::{Module, ModuleDef, Name, Variant};
use hir::{ImportPathConfig, Module, ModuleDef, Name, Variant};
use ide_db::{
defs::Definition,
helpers::mod_path_to_ast,
@ -386,12 +386,14 @@ fn process_references(
let segment = builder.make_mut(segment);
let scope_node = builder.make_syntax_mut(scope_node);
if !visited_modules.contains(&module) {
let mod_path = module.find_use_path_prefixed(
let mod_path = module.find_use_path(
ctx.sema.db,
*enum_module_def,
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
);
if let Some(mut mod_path) = mod_path {
mod_path.pop_segment();
@ -881,7 +883,7 @@ fn another_fn() {
r#"use my_mod::my_other_mod::MyField;
mod my_mod {
use self::my_other_mod::MyField;
use my_other_mod::MyField;
fn another_fn() {
let m = my_other_mod::MyEnum::MyField(MyField(1, 1));

View file

@ -1,6 +1,6 @@
use std::fmt::Display;
use hir::{ModPath, ModuleDef};
use hir::{ImportPathConfig, ModPath, ModuleDef};
use ide_db::{famous_defs::FamousDefs, RootDatabase};
use syntax::{
ast::{self, HasName},
@ -58,11 +58,13 @@ fn generate_record_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
let module = ctx.sema.to_def(&strukt)?.module(ctx.db());
let trait_ = deref_type_to_generate.to_trait(&ctx.sema, module.krate())?;
let trait_path = module.find_use_path(
let trait_path = module.find_path(
ctx.db(),
ModuleDef::Trait(trait_),
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
)?;
let field_type = field.ty()?;
@ -103,11 +105,13 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()
let module = ctx.sema.to_def(&strukt)?.module(ctx.db());
let trait_ = deref_type_to_generate.to_trait(&ctx.sema, module.krate())?;
let trait_path = module.find_use_path(
let trait_path = module.find_path(
ctx.db(),
ModuleDef::Trait(trait_),
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
)?;
let field_type = field.ty()?;

View file

@ -1,3 +1,4 @@
use hir::ImportPathConfig;
use ide_db::{
imports::import_assets::item_for_path_search, use_trivial_constructor::use_trivial_constructor,
};
@ -58,11 +59,13 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
let item_in_ns = hir::ItemInNs::from(hir::ModuleDef::from(ty.as_adt()?));
let type_path = current_module.find_use_path(
let type_path = current_module.find_path(
ctx.sema.db,
item_for_path_search(ctx.sema.db, item_in_ns)?,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
)?;
let expr = use_trivial_constructor(

View file

@ -1,4 +1,7 @@
use hir::{db::HirDatabase, AsAssocItem, AssocItem, AssocItemContainer, ItemInNs, ModuleDef};
use hir::{
db::HirDatabase, AsAssocItem, AssocItem, AssocItemContainer, ImportPathConfig, ItemInNs,
ModuleDef,
};
use ide_db::assists::{AssistId, AssistKind};
use syntax::{ast, AstNode};
@ -44,11 +47,13 @@ pub(crate) fn qualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let current_module = ctx.sema.scope(call.syntax())?.module();
let target_module_def = ModuleDef::from(resolved_call);
let item_in_ns = ItemInNs::from(target_module_def);
let receiver_path = current_module.find_use_path(
let receiver_path = current_module.find_path(
ctx.sema.db,
item_for_path_search(ctx.sema.db, item_in_ns)?,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
)?;
let qualify_candidate = QualifyCandidate::ImplMethod(ctx.sema.db, call, resolved_call);

View file

@ -1,6 +1,6 @@
use std::iter;
use hir::AsAssocItem;
use hir::{AsAssocItem, ImportPathConfig};
use ide_db::RootDatabase;
use ide_db::{
helpers::mod_path_to_ast,
@ -37,9 +37,13 @@ use crate::{
// ```
pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let (import_assets, syntax_under_caret) = find_importable_node(ctx)?;
let mut proposed_imports: Vec<_> = import_assets
.search_for_relative_paths(&ctx.sema, ctx.config.prefer_no_std, ctx.config.prefer_prelude)
.collect();
let cfg = ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
};
let mut proposed_imports: Vec<_> =
import_assets.search_for_relative_paths(&ctx.sema, cfg).collect();
if proposed_imports.is_empty() {
return None;
}

View file

@ -776,6 +776,40 @@ mod z {
);
}
#[test]
fn remove_unused_fixes_nested_self() {
check_assist(
remove_unused_imports,
r#"
mod inner {
pub struct X();
pub struct Y();
}
mod z {
use super::inner::{self, X}$0;
fn f() {
let y = inner::Y();
}
}
"#,
r#"mod inner {
pub struct X();
pub struct Y();
}
mod z {
use super::inner::{self};
fn f() {
let y = inner::Y();
}
}
"#,
);
}
#[test]
fn dont_remove_used_glob() {
check_assist_not_applicable(

View file

@ -1,4 +1,4 @@
use hir::{InFile, MacroFileIdExt, ModuleDef};
use hir::{ImportPathConfig, InFile, MacroFileIdExt, ModuleDef};
use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator};
use itertools::Itertools;
use syntax::{
@ -83,11 +83,13 @@ pub(crate) fn replace_derive_with_manual_impl(
})
.flat_map(|trait_| {
current_module
.find_use_path(
.find_path(
ctx.sema.db,
hir::ModuleDef::Trait(trait_),
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
)
.as_ref()
.map(mod_path_to_ast)

View file

@ -114,10 +114,10 @@ pub(crate) fn replace_with_eager_method(acc: &mut Assists, ctx: &AssistContext<'
let callable = ctx.sema.resolve_method_call_as_callable(&call)?;
let (_, receiver_ty) = callable.receiver_param(ctx.sema.db)?;
let n_params = callable.n_params() + 1;
let params = callable.params(ctx.sema.db);
let params = callable.params();
// FIXME: Check that the arg is of the form `() -> T`
if !params.first()?.1.impls_fnonce(ctx.sema.db) {
if !params.first()?.ty().impls_fnonce(ctx.sema.db) {
return None;
}

View file

@ -1,4 +1,4 @@
use hir::AsAssocItem;
use hir::{AsAssocItem, ImportPathConfig};
use ide_db::{
helpers::mod_path_to_ast,
imports::insert_use::{insert_use, ImportScope},
@ -63,12 +63,14 @@ pub(crate) fn replace_qualified_name_with_use(
);
let path_to_qualifier = starts_with_name_ref
.then(|| {
ctx.sema.scope(path.syntax())?.module().find_use_path_prefixed(
ctx.sema.scope(path.syntax())?.module().find_use_path(
ctx.sema.db,
module,
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
)
})
.flatten();

View file

@ -1,5 +1,8 @@
//! Term search assist
use hir::term_search::{TermSearchConfig, TermSearchCtx};
use hir::{
term_search::{TermSearchConfig, TermSearchCtx},
ImportPathConfig,
};
use ide_db::{
assists::{AssistId, AssistKind, GroupLabel},
famous_defs::FamousDefs,
@ -50,8 +53,10 @@ pub(crate) fn term_search(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
path.gen_source_code(
&scope,
&mut formatter,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
)
.ok()
})

View file

@ -253,11 +253,8 @@ fn from_param(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> Option<St
};
let (idx, _) = arg_list.args().find_position(|it| it == expr).unwrap();
let (pat, _) = func.params(sema.db).into_iter().nth(idx)?;
let pat = match pat? {
either::Either::Right(pat) => pat,
_ => return None,
};
let param = func.params().into_iter().nth(idx)?;
let pat = param.source(sema.db)?.value.right()?.pat()?;
let name = var_name_from_pat(&pat)?;
normalize(&name.to_string())
}

View file

@ -24,7 +24,7 @@ pub(crate) mod vis;
use std::iter;
use hir::{known, HasAttrs, ScopeDef, Variant};
use hir::{known, HasAttrs, ImportPathConfig, ScopeDef, Variant};
use ide_db::{imports::import_assets::LocatedImport, RootDatabase, SymbolKind};
use syntax::{ast, SmolStr};
@ -633,11 +633,13 @@ fn enum_variants_with_paths(
}
for variant in variants {
if let Some(path) = ctx.module.find_use_path(
if let Some(path) = ctx.module.find_path(
ctx.db,
hir::ModuleDef::from(variant),
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
) {
// Variants with trivial paths are already added by the existing completion logic,
// so we should avoid adding these twice

View file

@ -1,6 +1,6 @@
//! Completion of names from the current scope in expression position.
use hir::ScopeDef;
use hir::{ImportPathConfig, ScopeDef};
use syntax::ast;
use crate::{
@ -171,11 +171,13 @@ pub(crate) fn complete_expr_path(
hir::Adt::Struct(strukt) => {
let path = ctx
.module
.find_use_path(
.find_path(
ctx.db,
hir::ModuleDef::from(strukt),
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
)
.filter(|it| it.len() > 1);
@ -194,11 +196,13 @@ pub(crate) fn complete_expr_path(
hir::Adt::Union(un) => {
let path = ctx
.module
.find_use_path(
.find_path(
ctx.db,
hir::ModuleDef::from(un),
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
)
.filter(|it| it.len() > 1);

View file

@ -1,5 +1,5 @@
//! See [`import_on_the_fly`].
use hir::{ItemInNs, ModuleDef};
use hir::{ImportPathConfig, ItemInNs, ModuleDef};
use ide_db::imports::{
import_assets::{ImportAssets, LocatedImport},
insert_use::ImportScope,
@ -257,13 +257,13 @@ fn import_on_the_fly(
};
let user_input_lowercased = potential_import_name.to_lowercase();
let import_cfg = ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
};
import_assets
.search_for_imports(
&ctx.sema,
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)
.search_for_imports(&ctx.sema, import_cfg, ctx.config.insert_use.prefix_kind)
.filter(ns_filter)
.filter(|import| {
let original_item = &import.original_item;
@ -308,13 +308,13 @@ fn import_on_the_fly_pat_(
};
let user_input_lowercased = potential_import_name.to_lowercase();
let cfg = ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
};
import_assets
.search_for_imports(
&ctx.sema,
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)
.search_for_imports(&ctx.sema, cfg, ctx.config.insert_use.prefix_kind)
.filter(ns_filter)
.filter(|import| {
let original_item = &import.original_item;
@ -355,13 +355,13 @@ fn import_on_the_fly_method(
let user_input_lowercased = potential_import_name.to_lowercase();
let cfg = ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
};
import_assets
.search_for_imports(
&ctx.sema,
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)
.search_for_imports(&ctx.sema, cfg, ctx.config.insert_use.prefix_kind)
.filter(|import| {
!ctx.is_item_hidden(&import.item_to_import)
&& !ctx.is_item_hidden(&import.original_item)

View file

@ -2,7 +2,7 @@
mod format_like;
use hir::ItemInNs;
use hir::{ImportPathConfig, ItemInNs};
use ide_db::{
documentation::{Documentation, HasDocs},
imports::insert_use::ImportScope,
@ -60,15 +60,17 @@ pub(crate) fn complete_postfix(
None => return,
};
let cfg = ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
};
if let Some(drop_trait) = ctx.famous_defs().core_ops_Drop() {
if receiver_ty.impls_trait(ctx.db, drop_trait, &[]) {
if let Some(drop_fn) = ctx.famous_defs().core_mem_drop() {
if let Some(path) = ctx.module.find_use_path(
ctx.db,
ItemInNs::Values(drop_fn.into()),
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
) {
if let Some(path) =
ctx.module.find_path(ctx.db, ItemInNs::Values(drop_fn.into()), cfg)
{
cov_mark::hit!(postfix_drop_completion);
let mut item = postfix_snippet(
"drop",

View file

@ -12,6 +12,7 @@ mod snippet;
#[cfg(test)]
mod tests;
use hir::ImportPathConfig;
use ide_db::{
base_db::FilePosition,
helpers::mod_path_to_ast,
@ -251,6 +252,11 @@ pub fn resolve_completion_edits(
let new_ast = scope.clone_for_update();
let mut import_insert = TextEdit::builder();
let cfg = ImportPathConfig {
prefer_no_std: config.prefer_no_std,
prefer_prelude: config.prefer_prelude,
};
imports.into_iter().for_each(|(full_import_path, imported_name)| {
let items_with_name = items_locator::items_with_name(
&sema,
@ -260,13 +266,7 @@ pub fn resolve_completion_edits(
);
let import = items_with_name
.filter_map(|candidate| {
current_module.find_use_path_prefixed(
db,
candidate,
config.insert_use.prefix_kind,
config.prefer_no_std,
config.prefer_prelude,
)
current_module.find_use_path(db, candidate, config.insert_use.prefix_kind, cfg)
})
.find(|mod_path| mod_path.display(db).to_string() == full_import_path);
if let Some(import_path) = import {

View file

@ -10,7 +10,7 @@ pub(crate) mod type_alias;
pub(crate) mod union_literal;
pub(crate) mod variant;
use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type};
use hir::{AsAssocItem, HasAttrs, HirDisplay, ImportPathConfig, ModuleDef, ScopeDef, Type};
use ide_db::{
documentation::{Documentation, HasDocs},
helpers::item_name,
@ -295,14 +295,12 @@ pub(crate) fn render_expr(
.unwrap_or_else(|| String::from("..."))
};
let label = expr
.gen_source_code(
&ctx.scope,
&mut label_formatter,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)
.ok()?;
let cfg = ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
};
let label = expr.gen_source_code(&ctx.scope, &mut label_formatter, cfg).ok()?;
let source_range = match ctx.original_token.parent() {
Some(node) => match node.ancestors().find_map(ast::Path::cast) {
@ -314,16 +312,8 @@ pub(crate) fn render_expr(
let mut item = CompletionItem::new(CompletionItemKind::Expression, source_range, label);
let snippet = format!(
"{}$0",
expr.gen_source_code(
&ctx.scope,
&mut snippet_formatter,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude
)
.ok()?
);
let snippet =
format!("{}$0", expr.gen_source_code(&ctx.scope, &mut snippet_formatter, cfg).ok()?);
let edit = TextEdit::replace(source_range, snippet);
item.snippet_edit(ctx.config.snippet_cap?, edit);
item.documentation(Documentation::new(String::from("Autogenerated expression by term search")));
@ -333,12 +323,7 @@ pub(crate) fn render_expr(
});
for trait_ in expr.traits_used(ctx.db) {
let trait_item = hir::ItemInNs::from(hir::ModuleDef::from(trait_));
let Some(path) = ctx.module.find_use_path(
ctx.db,
trait_item,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
) else {
let Some(path) = ctx.module.find_path(ctx.db, trait_item, cfg) else {
continue;
};

View file

@ -100,6 +100,7 @@
// }
// ----
use hir::ImportPathConfig;
use ide_db::imports::import_assets::LocatedImport;
use itertools::Itertools;
use syntax::{ast, AstNode, GreenNode, SyntaxNode};
@ -168,18 +169,22 @@ impl Snippet {
}
fn import_edits(ctx: &CompletionContext<'_>, requires: &[GreenNode]) -> Option<Vec<LocatedImport>> {
let import_cfg = ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
};
let resolve = |import: &GreenNode| {
let path = ast::Path::cast(SyntaxNode::new_root(import.clone()))?;
let item = match ctx.scope.speculative_resolve(&path)? {
hir::PathResolution::Def(def) => def.into(),
_ => return None,
};
let path = ctx.module.find_use_path_prefixed(
let path = ctx.module.find_use_path(
ctx.db,
item,
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
import_cfg,
)?;
Some((path.len() > 1).then(|| LocatedImport::new(path.clone(), item, item)))
};

View file

@ -66,11 +66,10 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig {
enable_self_on_the_fly: true,
enable_private_editable: false,
enable_term_search: true,
term_search_fuel: 200,
full_function_signatures: false,
callable: Some(CallableSnippets::FillArguments),
snippet_cap: SnippetCap::new(true),
prefer_no_std: false,
prefer_prelude: true,
insert_use: InsertUseConfig {
granularity: ImportGranularity::Crate,
prefix_kind: PrefixKind::Plain,
@ -78,9 +77,10 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig {
group: true,
skip_glob_imports: true,
},
prefer_no_std: false,
prefer_prelude: true,
snippets: Vec::new(),
limit: None,
term_search_fuel: 200,
};
pub(crate) fn completion_list(ra_fixture: &str) -> String {

View file

@ -472,6 +472,47 @@ fn main() {
);
}
#[test]
fn trait_completions_handle_associated_types() {
let fixture = r#"
//- /foo.rs crate:foo
pub trait NotInScope {
fn not_in_scope(&self);
}
pub trait Wrapper {
type Inner: NotInScope;
fn inner(&self) -> Self::Inner;
}
//- /main.rs crate:main deps:foo
use foo::Wrapper;
fn completion<T: Wrapper>(whatever: T) {
whatever.inner().$0
}
"#;
check(
fixture,
expect![[r#"
me not_in_scope() (use foo::NotInScope) fn(&self)
"#]],
);
check_edit(
"not_in_scope",
fixture,
r#"
use foo::{NotInScope, Wrapper};
fn completion<T: Wrapper>(whatever: T) {
whatever.inner().not_in_scope()$0
}
"#,
);
}
#[test]
fn trait_method_fuzzy_completion_aware_of_unit_type() {
let fixture = r#"

View file

@ -1,7 +1,7 @@
//! This module provides functionality for querying callable information about a token.
use either::Either;
use hir::{Semantics, Type};
use hir::{InFile, Semantics, Type};
use parser::T;
use syntax::{
ast::{self, HasArgList, HasName},
@ -13,7 +13,7 @@ use crate::RootDatabase;
#[derive(Debug)]
pub struct ActiveParameter {
pub ty: Type,
pub pat: Option<Either<ast::SelfParam, ast::Pat>>,
pub src: Option<InFile<Either<ast::SelfParam, ast::Param>>>,
}
impl ActiveParameter {
@ -22,18 +22,18 @@ impl ActiveParameter {
let (signature, active_parameter) = callable_for_token(sema, token)?;
let idx = active_parameter?;
let mut params = signature.params(sema.db);
let mut params = signature.params();
if idx >= params.len() {
cov_mark::hit!(too_many_arguments);
return None;
}
let (pat, ty) = params.swap_remove(idx);
Some(ActiveParameter { ty, pat })
let param = params.swap_remove(idx);
Some(ActiveParameter { ty: param.ty().clone(), src: param.source(sema.db) })
}
pub fn ident(&self) -> Option<ast::Name> {
self.pat.as_ref().and_then(|param| match param {
Either::Right(ast::Pat::IdentPat(ident)) => ident.name(),
self.src.as_ref().and_then(|param| match param.value.as_ref().right()?.pat()? {
ast::Pat::IdentPat(ident) => ident.name(),
_ => None,
})
}
@ -60,10 +60,7 @@ pub fn callable_for_node(
token: &SyntaxToken,
) -> Option<(hir::Callable, Option<usize>)> {
let callable = match calling_node {
ast::CallableExpr::Call(call) => {
let expr = call.expr()?;
sema.type_of_expr(&expr)?.adjusted().as_callable(sema.db)
}
ast::CallableExpr::Call(call) => sema.resolve_expr_as_callable(&call.expr()?),
ast::CallableExpr::MethodCall(call) => sema.resolve_method_call_as_callable(call),
}?;
let active_param = calling_node.arg_list().map(|arg_list| {

View file

@ -1,9 +1,9 @@
//! Look up accessible paths for items.
use hir::{
db::HirDatabase, AsAssocItem, AssocItem, AssocItemContainer, Crate, HasCrate, ItemInNs,
ModPath, Module, ModuleDef, Name, PathResolution, PrefixKind, ScopeDef, Semantics,
SemanticsScope, Trait, Type,
db::HirDatabase, AsAssocItem, AssocItem, AssocItemContainer, Crate, HasCrate, ImportPathConfig,
ItemInNs, ModPath, Module, ModuleDef, Name, PathResolution, PrefixKind, ScopeDef, Semantics,
SemanticsScope, Trait, TyFingerprint, Type,
};
use itertools::{EitherOrBoth, Itertools};
use rustc_hash::{FxHashMap, FxHashSet};
@ -205,24 +205,22 @@ impl ImportAssets {
pub fn search_for_imports(
&self,
sema: &Semantics<'_, RootDatabase>,
cfg: ImportPathConfig,
prefix_kind: PrefixKind,
prefer_no_std: bool,
prefer_prelude: bool,
) -> impl Iterator<Item = LocatedImport> {
let _p = tracing::span!(tracing::Level::INFO, "ImportAssets::search_for_imports").entered();
self.search_for(sema, Some(prefix_kind), prefer_no_std, prefer_prelude)
self.search_for(sema, Some(prefix_kind), cfg)
}
/// This may return non-absolute paths if a part of the returned path is already imported into scope.
pub fn search_for_relative_paths(
&self,
sema: &Semantics<'_, RootDatabase>,
prefer_no_std: bool,
prefer_prelude: bool,
cfg: ImportPathConfig,
) -> impl Iterator<Item = LocatedImport> {
let _p = tracing::span!(tracing::Level::INFO, "ImportAssets::search_for_relative_paths")
.entered();
self.search_for(sema, None, prefer_no_std, prefer_prelude)
self.search_for(sema, None, cfg)
}
/// Requires imports to by prefix instead of fuzzily.
@ -259,8 +257,7 @@ impl ImportAssets {
&self,
sema: &Semantics<'_, RootDatabase>,
prefixed: Option<PrefixKind>,
prefer_no_std: bool,
prefer_prelude: bool,
cfg: ImportPathConfig,
) -> impl Iterator<Item = LocatedImport> {
let _p = tracing::span!(tracing::Level::INFO, "ImportAssets::search_for").entered();
@ -277,8 +274,7 @@ impl ImportAssets {
item_for_path_search(sema.db, item)?,
&self.module_with_candidate,
prefixed,
prefer_no_std,
prefer_prelude,
cfg,
)
.filter(|path| path.len() > 1)
};
@ -549,6 +545,15 @@ fn trait_applicable_items(
let Some(receiver) = trait_candidate.receiver_ty.fingerprint_for_trait_impl() else {
return false;
};
// in order to handle implied bounds through an associated type, keep any
// method receiver that matches `TyFingerprint::Unnameable`. this receiver
// won't be in `TraitImpls` anyways, as `TraitImpls` only contains actual
// implementations.
if matches!(receiver, TyFingerprint::Unnameable) {
return true;
}
let definitions_exist_in_trait_crate = db
.trait_impls_in_crate(defining_crate_for_trait.into())
.has_impls_for_trait_and_self_ty(candidate_trait_id, receiver);
@ -634,19 +639,12 @@ fn get_mod_path(
item_to_search: ItemInNs,
module_with_candidate: &Module,
prefixed: Option<PrefixKind>,
prefer_no_std: bool,
prefer_prelude: bool,
cfg: ImportPathConfig,
) -> Option<ModPath> {
if let Some(prefix_kind) = prefixed {
module_with_candidate.find_use_path_prefixed(
db,
item_to_search,
prefix_kind,
prefer_no_std,
prefer_prelude,
)
module_with_candidate.find_use_path(db, item_to_search, prefix_kind, cfg)
} else {
module_with_candidate.find_use_path(db, item_to_search, prefer_no_std, prefer_prelude)
module_with_candidate.find_path(db, item_to_search, cfg)
}
}

View file

@ -2,7 +2,7 @@
use crate::helpers::mod_path_to_ast;
use either::Either;
use hir::{AsAssocItem, HirDisplay, ModuleDef, SemanticsScope};
use hir::{AsAssocItem, HirDisplay, ImportPathConfig, ModuleDef, SemanticsScope};
use itertools::Itertools;
use rustc_hash::FxHashMap;
use syntax::{
@ -308,11 +308,12 @@ impl Ctx<'_> {
parent.segment()?.name_ref()?,
)
.and_then(|trait_ref| {
let found_path = self.target_module.find_use_path(
let cfg =
ImportPathConfig { prefer_no_std: false, prefer_prelude: true };
let found_path = self.target_module.find_path(
self.source_scope.db.upcast(),
hir::ModuleDef::Trait(trait_ref),
false,
true,
cfg,
)?;
match make::ty_path(mod_path_to_ast(&found_path)) {
ast::Type::PathType(path_ty) => Some(path_ty),
@ -347,12 +348,9 @@ impl Ctx<'_> {
}
}
let found_path = self.target_module.find_use_path(
self.source_scope.db.upcast(),
def,
false,
true,
)?;
let cfg = ImportPathConfig { prefer_no_std: false, prefer_prelude: true };
let found_path =
self.target_module.find_path(self.source_scope.db.upcast(), def, cfg)?;
let res = mod_path_to_ast(&found_path).clone_for_update();
if let Some(args) = path.segment().and_then(|it| it.generic_arg_list()) {
if let Some(segment) = res.segment() {
@ -385,11 +383,11 @@ impl Ctx<'_> {
if let Some(adt) = ty.as_adt() {
if let ast::Type::PathType(path_ty) = &ast_ty {
let found_path = self.target_module.find_use_path(
let cfg = ImportPathConfig { prefer_no_std: false, prefer_prelude: true };
let found_path = self.target_module.find_path(
self.source_scope.db.upcast(),
ModuleDef::from(adt),
false,
true,
cfg,
)?;
if let Some(qual) = mod_path_to_ast(&found_path).qualifier() {

View file

@ -41,6 +41,7 @@ pub enum FormatSpecifier {
Escape,
}
// FIXME: Remove this, we can use rustc_format_parse instead
pub fn lex_format_specifiers(
string: &ast::String,
mut callback: &mut dyn FnMut(TextRange, FormatSpecifier),

View file

@ -1,7 +1,7 @@
//! This diagnostic provides an assist for creating a struct definition from a JSON
//! example.
use hir::{PathResolution, Semantics};
use hir::{ImportPathConfig, PathResolution, Semantics};
use ide_db::{
base_db::{FileId, FileRange},
helpers::mod_path_to_ast,
@ -142,14 +142,19 @@ pub(crate) fn json_in_items(
ImportScope::Block(it) => ImportScope::Block(scb.make_mut(it)),
};
let current_module = semantics_scope.module();
let cfg = ImportPathConfig {
prefer_no_std: config.prefer_no_std,
prefer_prelude: config.prefer_prelude,
};
if !scope_has("Serialize") {
if let Some(PathResolution::Def(it)) = serialize_resolved {
if let Some(it) = current_module.find_use_path_prefixed(
if let Some(it) = current_module.find_use_path(
sema.db,
it,
config.insert_use.prefix_kind,
config.prefer_no_std,
config.prefer_prelude,
cfg,
) {
insert_use(&scope, mod_path_to_ast(&it), &config.insert_use);
}
@ -157,12 +162,11 @@ pub(crate) fn json_in_items(
}
if !scope_has("Deserialize") {
if let Some(PathResolution::Def(it)) = deserialize_resolved {
if let Some(it) = current_module.find_use_path_prefixed(
if let Some(it) = current_module.find_use_path(
sema.db,
it,
config.insert_use.prefix_kind,
config.prefer_no_std,
config.prefer_prelude,
cfg,
) {
insert_use(&scope, mod_path_to_ast(&it), &config.insert_use);
}

View file

@ -1,7 +1,7 @@
use either::Either;
use hir::{
db::{ExpandDatabase, HirDatabase},
known, AssocItem, HirDisplay, HirFileIdExt, InFile, Type,
known, AssocItem, HirDisplay, HirFileIdExt, ImportPathConfig, InFile, Type,
};
use ide_db::{
assists::Assist, famous_defs::FamousDefs, imports::import_assets::item_for_path_search,
@ -122,11 +122,13 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
let expr = (|| -> Option<ast::Expr> {
let item_in_ns = hir::ItemInNs::from(hir::ModuleDef::from(ty.as_adt()?));
let type_path = current_module?.find_use_path(
let type_path = current_module?.find_path(
ctx.sema.db,
item_for_path_search(ctx.sema.db, item_in_ns)?,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
)?;
use_trivial_constructor(

View file

@ -1,7 +1,7 @@
use hir::{
db::ExpandDatabase,
term_search::{term_search, TermSearchConfig, TermSearchCtx},
ClosureStyle, HirDisplay,
ClosureStyle, HirDisplay, ImportPathConfig,
};
use ide_db::{
assists::{Assist, AssistId, AssistKind, GroupLabel},
@ -59,8 +59,10 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option<Vec<Assist>
path.gen_source_code(
&scope,
&mut formatter,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
ImportPathConfig {
prefer_no_std: ctx.config.prefer_no_std,
prefer_prelude: ctx.config.prefer_prelude,
},
)
.ok()
})
@ -368,6 +370,7 @@ fn main() {
);
}
// FIXME
#[test]
fn local_shadow_fn() {
check_fixes_unordered(
@ -385,7 +388,7 @@ fn f() {
r#"
fn f() {
let f: i32 = 0;
crate::f()
f()
}"#,
],
);

View file

@ -6,7 +6,7 @@ use crate::{
resolving::{ResolvedPattern, ResolvedRule, UfcsCallInfo},
SsrMatches,
};
use hir::Semantics;
use hir::{ImportPathConfig, Semantics};
use ide_db::{base_db::FileRange, FxHashMap};
use std::{cell::Cell, iter::Peekable};
use syntax::{
@ -663,10 +663,10 @@ impl Match {
.module();
for (path, resolved_path) in &template.resolved_paths {
if let hir::PathResolution::Def(module_def) = resolved_path.resolution {
let mod_path =
module.find_use_path(sema.db, module_def, false, true).ok_or_else(|| {
match_error!("Failed to render template path `{}` at match location")
})?;
let cfg = ImportPathConfig { prefer_no_std: false, prefer_prelude: true };
let mod_path = module.find_path(sema.db, module_def, cfg).ok_or_else(|| {
match_error!("Failed to render template path `{}` at match location")
})?;
self.rendered_template_paths.insert(path.clone(), mod_path);
}
}

View file

@ -109,12 +109,12 @@ pub(crate) fn outgoing_calls(
let expr = call.expr()?;
let callable = sema.type_of_expr(&expr)?.original.as_callable(db)?;
match callable.kind() {
hir::CallableKind::Function(it) => {
let range = expr.syntax().text_range();
it.try_to_nav(db).zip(Some(range))
}
hir::CallableKind::Function(it) => it.try_to_nav(db),
hir::CallableKind::TupleEnumVariant(it) => it.try_to_nav(db),
hir::CallableKind::TupleStruct(it) => it.try_to_nav(db),
_ => None,
}
.zip(Some(expr.syntax().text_range()))
}
ast::CallableExpr::MethodCall(expr) => {
let range = expr.name_ref()?.syntax().text_range();

View file

@ -487,19 +487,23 @@ fn get_doc_base_urls(
let system_doc = sysroot
.map(|sysroot| format!("file:///{sysroot}/share/doc/rust/html/"))
.and_then(|it| Url::parse(&it).ok());
let krate = def.krate(db);
let channel = krate
.and_then(|krate| db.toolchain_channel(krate.into()))
.unwrap_or(ReleaseChannel::Nightly)
.as_str();
// special case base url of `BuiltinType` to core
// https://github.com/rust-lang/rust-analyzer/issues/12250
if let Definition::BuiltinType(..) = def {
let web_link = Url::parse("https://doc.rust-lang.org/nightly/core/").ok();
let web_link = Url::parse(&format!("https://doc.rust-lang.org/{channel}/core/")).ok();
let system_link = system_doc.and_then(|it| it.join("core/").ok());
return (web_link, system_link);
};
let Some(krate) = def.krate(db) else { return Default::default() };
let Some(krate) = krate else { return Default::default() };
let Some(display_name) = krate.display_name(db) else { return Default::default() };
let crate_data = &db.crate_graph()[krate.into()];
let channel = db.toolchain_channel(krate.into()).unwrap_or(ReleaseChannel::Nightly).as_str();
let (web_base, local_base) = match &crate_data.origin {
// std and co do not specify `html_root_url` any longer so we gotta handwrite this ourself.

View file

@ -1120,4 +1120,30 @@ fn test() {
"#,
);
}
#[test]
fn type_hints_async_block() {
check_types(
r#"
//- minicore: future
async fn main() {
let _x = async { 8_i32 };
//^^ impl Future<Output = i32>
}"#,
);
}
#[test]
fn type_hints_async_block_with_tail_return_exp() {
check_types(
r#"
//- minicore: future
async fn main() {
let _x = async {
//^^ impl Future<Output = i32>
return 8_i32;
};
}"#,
);
}
}

View file

@ -24,34 +24,29 @@ pub(super) fn hints(
let (callable, arg_list) = get_callable(sema, &expr)?;
let hints = callable
.params(sema.db)
.params()
.into_iter()
.zip(arg_list.args())
.filter_map(|((param, _ty), arg)| {
.filter_map(|(p, arg)| {
// Only annotate hints for expressions that exist in the original file
let range = sema.original_range_opt(arg.syntax())?;
let (param_name, name_syntax) = match param.as_ref()? {
let source = p.source(sema.db)?;
let (param_name, name_syntax) = match source.value.as_ref() {
Either::Left(pat) => (pat.name()?, pat.name()),
Either::Right(pat) => match pat {
Either::Right(param) => match param.pat()? {
ast::Pat::IdentPat(it) => (it.name()?, it.name()),
_ => return None,
},
};
// make sure the file is cached so we can map out of macros
sema.parse_or_expand(source.file_id);
Some((name_syntax, param_name, arg, range))
})
.filter(|(_, param_name, arg, _)| {
!should_hide_param_name_hint(sema, &callable, &param_name.text(), arg)
})
.map(|(param, param_name, _, FileRange { range, .. })| {
let mut linked_location = None;
if let Some(name) = param {
if let hir::CallableKind::Function(f) = callable.kind() {
// assert the file is cached so we can map out of macros
if sema.source(f).is_some() {
linked_location = sema.original_range_opt(name.syntax());
}
}
}
let linked_location = param.and_then(|name| sema.original_range_opt(name.syntax()));
let colon = if config.render_colons { ":" } else { "" };
let label =

View file

@ -79,7 +79,7 @@ impl RunnableKind {
impl Runnable {
// test package::module::testname
pub fn label(&self, target: Option<String>) -> String {
pub fn label(&self, target: Option<&str>) -> String {
match &self.kind {
RunnableKind::Test { test_id, .. } => format!("test {test_id}"),
RunnableKind::TestMod { path } => format!("test-mod {path}"),

View file

@ -201,7 +201,21 @@ fn signature_help_for_call(
variant.name(db).display(db)
);
}
hir::CallableKind::Closure | hir::CallableKind::FnPtr | hir::CallableKind::Other => (),
hir::CallableKind::Closure(closure) => {
let fn_trait = closure.fn_trait(db);
format_to!(res.signature, "impl {fn_trait}")
}
hir::CallableKind::FnPtr => format_to!(res.signature, "fn"),
hir::CallableKind::FnImpl(fn_trait) => match callable.ty().as_adt() {
// FIXME: Render docs of the concrete trait impl function
Some(adt) => format_to!(
res.signature,
"<{} as {fn_trait}>::{}",
adt.name(db).display(db),
fn_trait.function_name()
),
None => format_to!(res.signature, "impl {fn_trait}"),
},
}
res.signature.push('(');
@ -210,12 +224,15 @@ fn signature_help_for_call(
format_to!(res.signature, "{}", self_param.display(db))
}
let mut buf = String::new();
for (idx, (pat, ty)) in callable.params(db).into_iter().enumerate() {
for (idx, p) in callable.params().into_iter().enumerate() {
buf.clear();
if let Some(pat) = pat {
match pat {
Either::Left(_self) => format_to!(buf, "self: "),
Either::Right(pat) => format_to!(buf, "{}: ", pat),
if let Some(param) = p.source(sema.db) {
match param.value {
Either::Right(param) => match param.pat() {
Some(pat) => format_to!(buf, "{}: ", pat),
None => format_to!(buf, "?: "),
},
Either::Left(_) => format_to!(buf, "self: "),
}
}
// APITs (argument position `impl Trait`s) are inferred as {unknown} as the user is
@ -223,9 +240,9 @@ fn signature_help_for_call(
// In that case, fall back to render definitions of the respective parameters.
// This is overly conservative: we do not substitute known type vars
// (see FIXME in tests::impl_trait) and falling back on any unknowns.
match (ty.contains_unknown(), fn_params.as_deref()) {
match (p.ty().contains_unknown(), fn_params.as_deref()) {
(true, Some(fn_params)) => format_to!(buf, "{}", fn_params[idx].ty().display(db)),
_ => format_to!(buf, "{}", ty.display(db)),
_ => format_to!(buf, "{}", p.ty().display(db)),
}
res.push_call_param(&buf);
}
@ -242,9 +259,9 @@ fn signature_help_for_call(
render(func.ret_type(db))
}
hir::CallableKind::Function(_)
| hir::CallableKind::Closure
| hir::CallableKind::Closure(_)
| hir::CallableKind::FnPtr
| hir::CallableKind::Other => render(callable.return_type()),
| hir::CallableKind::FnImpl(_) => render(callable.return_type()),
hir::CallableKind::TupleStruct(_) | hir::CallableKind::TupleEnumVariant(_) => {}
}
Some(res)
@ -1345,15 +1362,43 @@ fn test() { S.foo($0); }
r#"
struct S;
fn foo(s: S) -> i32 { 92 }
fn main() {
let _move = S;
(|s| {{_move}; foo(s)})($0)
}
"#,
expect![[r#"
impl FnOnce(s: S) -> i32
^^^^
"#]],
);
check(
r#"
struct S;
fn foo(s: S) -> i32 { 92 }
fn main() {
(|s| foo(s))($0)
}
"#,
expect![[r#"
(s: S) -> i32
^^^^
impl Fn(s: S) -> i32
^^^^
"#]],
)
);
check(
r#"
struct S;
fn foo(s: S) -> i32 { 92 }
fn main() {
let mut mutate = 0;
(|s| { mutate = 1; foo(s) })($0)
}
"#,
expect![[r#"
impl FnMut(s: S) -> i32
^^^^
"#]],
);
}
#[test]
@ -1383,12 +1428,81 @@ fn main(f: fn(i32, f64) -> char) {
}
"#,
expect![[r#"
(i32, f64) -> char
--- ^^^
fn(i32, f64) -> char
--- ^^^
"#]],
)
}
#[test]
fn call_info_for_fn_impl() {
check(
r#"
struct S;
impl core::ops::FnOnce<(i32, f64)> for S {
type Output = char;
}
impl core::ops::FnMut<(i32, f64)> for S {}
impl core::ops::Fn<(i32, f64)> for S {}
fn main() {
S($0);
}
"#,
expect![[r#"
<S as Fn>::call(i32, f64) -> char
^^^ ---
"#]],
);
check(
r#"
struct S;
impl core::ops::FnOnce<(i32, f64)> for S {
type Output = char;
}
impl core::ops::FnMut<(i32, f64)> for S {}
impl core::ops::Fn<(i32, f64)> for S {}
fn main() {
S(1, $0);
}
"#,
expect![[r#"
<S as Fn>::call(i32, f64) -> char
--- ^^^
"#]],
);
check(
r#"
struct S;
impl core::ops::FnOnce<(i32, f64)> for S {
type Output = char;
}
impl core::ops::FnOnce<(char, char)> for S {
type Output = f64;
}
fn main() {
S($0);
}
"#,
expect![""],
);
check(
r#"
struct S;
impl core::ops::FnOnce<(i32, f64)> for S {
type Output = char;
}
impl core::ops::FnOnce<(char, char)> for S {
type Output = f64;
}
fn main() {
// FIXME: The ide layer loses the calling info here so we get an ambiguous trait solve result
S(0i32, $0);
}
"#,
expect![""],
);
}
#[test]
fn call_info_for_unclosed_call() {
check(
@ -1794,19 +1908,19 @@ fn f<F: FnOnce(u8, u16) -> i32>(f: F) {
}
"#,
expect![[r#"
(u8, u16) -> i32
^^ ---
impl FnOnce(u8, u16) -> i32
^^ ---
"#]],
);
check(
r#"
fn f<T, F: FnOnce(&T, u16) -> &T>(f: F) {
fn f<T, F: FnMut(&T, u16) -> &T>(f: F) {
f($0)
}
"#,
expect![[r#"
(&T, u16) -> &T
^^ ---
impl FnMut(&T, u16) -> &T
^^ ---
"#]],
);
}
@ -1826,7 +1940,7 @@ fn take<C, Error>(
}
"#,
expect![[r#"
() -> i32
impl Fn() -> i32
"#]],
);
}

View file

@ -9,8 +9,9 @@ pub(super) fn highlight_escape_string<T: IsString>(
string: &T,
start: TextSize,
) {
let text = string.text();
string.escaped_char_ranges(&mut |piece_range, char| {
if string.text()[piece_range.start().into()..].starts_with('\\') {
if text[piece_range.start().into()..].starts_with('\\') {
let highlight = match char {
Ok(_) => HlTag::EscapeSequence,
Err(_) => HlTag::InvalidEscapeSequence,
@ -33,17 +34,15 @@ pub(super) fn highlight_escape_char(stack: &mut Highlights, char: &Char, start:
}
let text = char.text();
if !text.starts_with('\'') || !text.ends_with('\'') {
let Some(text) = text
.strip_prefix('\'')
.and_then(|it| it.strip_suffix('\''))
.filter(|it| it.starts_with('\\'))
else {
return;
}
};
let text = &text[1..text.len() - 1];
if !text.starts_with('\\') {
return;
}
let range =
TextRange::new(start + TextSize::from(1), start + TextSize::from(text.len() as u32 + 1));
let range = TextRange::at(start + TextSize::from(1), TextSize::from(text.len() as u32));
stack.add(HlRange { range, highlight: HlTag::EscapeSequence.into(), binding_hash: None })
}
@ -54,16 +53,14 @@ pub(super) fn highlight_escape_byte(stack: &mut Highlights, byte: &Byte, start:
}
let text = byte.text();
if !text.starts_with("b'") || !text.ends_with('\'') {
let Some(text) = text
.strip_prefix("b'")
.and_then(|it| it.strip_suffix('\''))
.filter(|it| it.starts_with('\\'))
else {
return;
}
};
let text = &text[2..text.len() - 1];
if !text.starts_with('\\') {
return;
}
let range =
TextRange::new(start + TextSize::from(2), start + TextSize::from(text.len() as u32 + 2));
let range = TextRange::at(start + TextSize::from(2), TextSize::from(text.len() as u32));
stack.add(HlRange { range, highlight: HlTag::EscapeSequence.into(), binding_hash: None })
}

View file

@ -65,7 +65,7 @@ impl WorkspaceBuildScripts {
allowed_features: &FxHashSet<String>,
manifest_path: &ManifestPath,
toolchain: Option<&Version>,
sysroot: Option<&Sysroot>,
sysroot: &Sysroot,
) -> io::Result<Command> {
const RUST_1_75: Version = Version::new(1, 75, 0);
let mut cmd = match config.run_build_script_command.as_deref() {
@ -75,7 +75,7 @@ impl WorkspaceBuildScripts {
cmd
}
_ => {
let mut cmd = Sysroot::tool(sysroot, Tool::Cargo);
let mut cmd = sysroot.tool(Tool::Cargo);
cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]);
cmd.args(&config.extra_args);
@ -149,7 +149,7 @@ impl WorkspaceBuildScripts {
workspace: &CargoWorkspace,
progress: &dyn Fn(String),
toolchain: Option<&Version>,
sysroot: Option<&Sysroot>,
sysroot: &Sysroot,
) -> io::Result<WorkspaceBuildScripts> {
let current_dir = match &config.invocation_location {
InvocationLocation::Root(root) if config.run_build_script_command.is_some() => {
@ -195,7 +195,7 @@ impl WorkspaceBuildScripts {
// This is not gonna be used anyways, so just construct a dummy here
&ManifestPath::try_from(workspace_root.clone()).unwrap(),
None,
None,
&Sysroot::empty(),
)?;
// NB: Cargo.toml could have been modified between `cargo metadata` and
// `cargo check`. We shouldn't assume that package ids we see here are
@ -412,7 +412,7 @@ impl WorkspaceBuildScripts {
rustc: &CargoWorkspace,
current_dir: &AbsPath,
extra_env: &FxHashMap<String, String>,
sysroot: Option<&Sysroot>,
sysroot: &Sysroot,
) -> Self {
let mut bs = WorkspaceBuildScripts::default();
for p in rustc.packages() {
@ -420,7 +420,7 @@ impl WorkspaceBuildScripts {
}
let res = (|| {
let target_libdir = (|| {
let mut cargo_config = Sysroot::tool(sysroot, Tool::Cargo);
let mut cargo_config = sysroot.tool(Tool::Cargo);
cargo_config.envs(extra_env);
cargo_config
.current_dir(current_dir)
@ -429,7 +429,7 @@ impl WorkspaceBuildScripts {
if let Ok(it) = utf8_stdout(cargo_config) {
return Ok(it);
}
let mut cmd = Sysroot::tool(sysroot, Tool::Rustc);
let mut cmd = sysroot.tool(Tool::Rustc);
cmd.envs(extra_env);
cmd.args(["--print", "target-libdir"]);
utf8_stdout(cmd)

View file

@ -258,12 +258,12 @@ impl CargoWorkspace {
cargo_toml: &ManifestPath,
current_dir: &AbsPath,
config: &CargoConfig,
sysroot: Option<&Sysroot>,
sysroot: &Sysroot,
progress: &dyn Fn(String),
) -> anyhow::Result<cargo_metadata::Metadata> {
let targets = find_list_of_build_targets(config, cargo_toml, sysroot);
let cargo = Sysroot::tool(sysroot, Tool::Cargo);
let cargo = sysroot.tool(Tool::Cargo);
let mut meta = MetadataCommand::new();
meta.cargo_path(cargo.get_program());
cargo.get_envs().for_each(|(var, val)| _ = meta.env(var, val.unwrap_or_default()));
@ -536,7 +536,7 @@ impl CargoWorkspace {
fn find_list_of_build_targets(
config: &CargoConfig,
cargo_toml: &ManifestPath,
sysroot: Option<&Sysroot>,
sysroot: &Sysroot,
) -> Vec<String> {
if let Some(target) = &config.target {
return [target.into()].to_vec();
@ -553,9 +553,9 @@ fn find_list_of_build_targets(
fn rustc_discover_host_triple(
cargo_toml: &ManifestPath,
extra_env: &FxHashMap<String, String>,
sysroot: Option<&Sysroot>,
sysroot: &Sysroot,
) -> Option<String> {
let mut rustc = Sysroot::tool(sysroot, Tool::Rustc);
let mut rustc = sysroot.tool(Tool::Rustc);
rustc.envs(extra_env);
rustc.current_dir(cargo_toml.parent()).arg("-vV");
tracing::debug!("Discovering host platform by {:?}", rustc);
@ -581,9 +581,9 @@ fn rustc_discover_host_triple(
fn cargo_config_build_target(
cargo_toml: &ManifestPath,
extra_env: &FxHashMap<String, String>,
sysroot: Option<&Sysroot>,
sysroot: &Sysroot,
) -> Vec<String> {
let mut cargo_config = Sysroot::tool(sysroot, Tool::Cargo);
let mut cargo_config = sysroot.tool(Tool::Cargo);
cargo_config.envs(extra_env);
cargo_config
.current_dir(cargo_toml.parent())

View file

@ -62,9 +62,9 @@ pub(crate) fn inject_rustc_tool_env(env: &mut Env, cargo_name: &str, kind: Targe
pub(crate) fn cargo_config_env(
manifest: &ManifestPath,
extra_env: &FxHashMap<String, String>,
sysroot: Option<&Sysroot>,
sysroot: &Sysroot,
) -> FxHashMap<String, String> {
let mut cargo_config = Sysroot::tool(sysroot, Tool::Cargo);
let mut cargo_config = sysroot.tool(Tool::Cargo);
cargo_config.envs(extra_env);
cargo_config
.current_dir(manifest.parent())

View file

@ -10,10 +10,10 @@ use crate::{cfg::CfgFlag, utf8_stdout, ManifestPath, Sysroot};
pub(crate) enum RustcCfgConfig<'a> {
/// Use `rustc --print cfg`, either from with the binary from the sysroot or by discovering via
/// [`toolchain::rustc`].
Rustc(Option<&'a Sysroot>),
Rustc(&'a Sysroot),
/// Use `cargo --print cfg`, either from with the binary from the sysroot or by discovering via
/// [`toolchain::cargo`].
Cargo(Option<&'a Sysroot>, &'a ManifestPath),
Cargo(&'a Sysroot, &'a ManifestPath),
}
pub(crate) fn get(
@ -65,7 +65,7 @@ fn get_rust_cfgs(
) -> anyhow::Result<String> {
let sysroot = match config {
RustcCfgConfig::Cargo(sysroot, cargo_toml) => {
let mut cmd = Sysroot::tool(sysroot, Tool::Cargo);
let mut cmd = sysroot.tool(Tool::Cargo);
cmd.envs(extra_env);
cmd.current_dir(cargo_toml.parent())
@ -86,7 +86,7 @@ fn get_rust_cfgs(
RustcCfgConfig::Rustc(sysroot) => sysroot,
};
let mut cmd = Sysroot::tool(sysroot, Tool::Rustc);
let mut cmd = sysroot.tool(Tool::Rustc);
cmd.envs(extra_env);
cmd.args(["--print", "cfg", "-O"]);
if let Some(target) = target {

View file

@ -4,7 +4,7 @@
//! but we can't process `.rlib` and need source code instead. The source code
//! is typically installed with `rustup component add rust-src` command.
use std::{env, fs, ops, process::Command, sync::Arc};
use std::{env, fs, ops, process::Command};
use anyhow::{format_err, Result};
use base_db::CrateName;
@ -16,30 +16,19 @@ use toolchain::{probe_for_binary, Tool};
use crate::{utf8_stdout, CargoConfig, CargoWorkspace, ManifestPath};
#[derive(Debug, Clone)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Sysroot {
root: AbsPathBuf,
src_root: Option<Result<AbsPathBuf, Arc<anyhow::Error>>>,
root: Option<AbsPathBuf>,
src_root: Option<AbsPathBuf>,
mode: SysrootMode,
}
impl Eq for Sysroot {}
impl PartialEq for Sysroot {
fn eq(&self, other: &Self) -> bool {
self.root == other.root
&& self.mode == other.mode
&& match (&self.src_root, &other.src_root) {
(Some(Ok(this)), Some(Ok(other))) => this == other,
(None, None) | (Some(Err(_)), Some(Err(_))) => true,
_ => false,
}
}
error: Option<String>,
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub(crate) enum SysrootMode {
Workspace(CargoWorkspace),
Stitched(Stitched),
Empty,
}
#[derive(Debug, Clone, Eq, PartialEq)]
@ -89,70 +78,40 @@ pub(crate) struct SysrootCrateData {
}
impl Sysroot {
pub const fn empty() -> Sysroot {
Sysroot { root: None, src_root: None, mode: SysrootMode::Empty, error: None }
}
/// Returns sysroot "root" directory, where `bin/`, `etc/`, `lib/`, `libexec/`
/// subfolder live, like:
/// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu`
pub fn root(&self) -> &AbsPath {
&self.root
pub fn root(&self) -> Option<&AbsPath> {
self.root.as_deref()
}
/// Returns the sysroot "source" directory, where stdlib sources are located, like:
/// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library`
pub fn src_root(&self) -> Option<&AbsPath> {
self.src_root.as_ref()?.as_deref().ok()
self.src_root.as_deref()
}
pub fn is_empty(&self) -> bool {
match &self.mode {
SysrootMode::Workspace(ws) => ws.packages().next().is_none(),
SysrootMode::Stitched(stitched) => stitched.crates.is_empty(),
SysrootMode::Empty => true,
}
}
pub fn loading_warning(&self) -> Option<String> {
let src_root = match &self.src_root {
None => return Some(format!("sysroot at `{}` has no library sources", self.root)),
Some(Ok(src_root)) => src_root,
Some(Err(e)) => return Some(e.to_string()),
};
let has_core = match &self.mode {
SysrootMode::Workspace(ws) => ws.packages().any(|p| ws[p].name == "core"),
SysrootMode::Stitched(stitched) => stitched.by_name("core").is_some(),
};
if !has_core {
let var_note = if env::var_os("RUST_SRC_PATH").is_some() {
" (`RUST_SRC_PATH` might be incorrect, try unsetting it)"
} else {
" try running `rustup component add rust-src` to possible fix this"
};
Some(format!("could not find libcore in loaded sysroot at `{}`{var_note}", src_root,))
} else {
None
}
}
pub fn check_has_core(&self) -> Result<(), String> {
let Some(Ok(src_root)) = &self.src_root else { return Ok(()) };
let has_core = match &self.mode {
SysrootMode::Workspace(ws) => ws.packages().any(|p| ws[p].name == "core"),
SysrootMode::Stitched(stitched) => stitched.by_name("core").is_some(),
};
if !has_core {
let var_note = if env::var_os("RUST_SRC_PATH").is_some() {
" (`RUST_SRC_PATH` might be incorrect, try unsetting it)"
} else {
" try running `rustup component add rust-src` to possible fix this"
};
Err(format!("could not find libcore in loaded sysroot at `{}`{var_note}", src_root,))
} else {
Ok(())
}
pub fn error(&self) -> Option<&str> {
self.error.as_deref()
}
pub fn num_packages(&self) -> usize {
match &self.mode {
SysrootMode::Workspace(ws) => ws.packages().count(),
SysrootMode::Stitched(c) => c.crates().count(),
SysrootMode::Empty => 0,
}
}
@ -168,63 +127,50 @@ impl Sysroot {
dir: &AbsPath,
extra_env: &FxHashMap<String, String>,
metadata: bool,
) -> Result<Sysroot> {
tracing::debug!("discovering sysroot for {dir}");
let sysroot_dir = discover_sysroot_dir(dir, extra_env)?;
let sysroot_src_dir =
discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env);
Ok(Sysroot::load(sysroot_dir, Some(sysroot_src_dir), metadata))
}
pub fn discover_no_source(
dir: &AbsPath,
extra_env: &FxHashMap<String, String>,
) -> Result<Sysroot> {
tracing::debug!("discovering sysroot for {dir}");
let sysroot_dir = discover_sysroot_dir(dir, extra_env)?;
let sysroot_src_dir =
discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env);
Ok(Sysroot::load(sysroot_dir, Some(sysroot_src_dir), false))
) -> Sysroot {
let sysroot_dir = discover_sysroot_dir(dir, extra_env);
let sysroot_src_dir = sysroot_dir.as_ref().ok().map(|sysroot_dir| {
discover_sysroot_src_dir_or_add_component(sysroot_dir, dir, extra_env)
});
Sysroot::load_core_check(Some(sysroot_dir), sysroot_src_dir, metadata)
}
pub fn discover_with_src_override(
current_dir: &AbsPath,
extra_env: &FxHashMap<String, String>,
src: AbsPathBuf,
sysroot_src_dir: AbsPathBuf,
metadata: bool,
) -> Result<Sysroot> {
tracing::debug!("discovering sysroot for {current_dir}");
let sysroot_dir = discover_sysroot_dir(current_dir, extra_env)?;
Ok(Sysroot::load(sysroot_dir, Some(Ok(src)), metadata))
) -> Sysroot {
let sysroot_dir = discover_sysroot_dir(current_dir, extra_env);
Sysroot::load_core_check(Some(sysroot_dir), Some(Ok(sysroot_src_dir)), metadata)
}
pub fn discover_sysroot_src_dir(sysroot_dir: AbsPathBuf, metadata: bool) -> Sysroot {
let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir)
.ok_or_else(|| format_err!("can't find standard library sources in {sysroot_dir}"));
Sysroot::load_core_check(Some(Ok(sysroot_dir)), Some(sysroot_src_dir), metadata)
}
pub fn discover_rustc_src(&self) -> Option<ManifestPath> {
get_rustc_src(&self.root)
}
pub fn with_sysroot_dir(sysroot_dir: AbsPathBuf, metadata: bool) -> Result<Sysroot> {
let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir).ok_or_else(|| {
format_err!("can't load standard library from sysroot path {sysroot_dir}")
});
Ok(Sysroot::load(sysroot_dir, Some(sysroot_src_dir), metadata))
get_rustc_src(self.root()?)
}
/// Returns a command to run a tool preferring the cargo proxies if the sysroot exists.
pub fn tool(sysroot: Option<&Self>, tool: Tool) -> Command {
match sysroot {
Some(sysroot) => {
pub fn tool(&self, tool: Tool) -> Command {
match self.root() {
Some(root) => {
// special case rustc, we can look that up directly in the sysroot's bin folder
// as it should never invoke another cargo binary
if let Tool::Rustc = tool {
if let Some(path) =
probe_for_binary(sysroot.root.join("bin").join(Tool::Rustc.name()).into())
probe_for_binary(root.join("bin").join(Tool::Rustc.name()).into())
{
return Command::new(path);
}
}
let mut cmd = Command::new(tool.prefer_proxy());
cmd.env("RUSTUP_TOOLCHAIN", AsRef::<std::path::Path>::as_ref(&sysroot.root));
cmd.env("RUSTUP_TOOLCHAIN", AsRef::<std::path::Path>::as_ref(root));
cmd
}
_ => Command::new(tool.path()),
@ -232,35 +178,89 @@ impl Sysroot {
}
pub fn discover_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
let Some(root) = self.root() else {
return Err(anyhow::format_err!("no sysroot",));
};
["libexec", "lib"]
.into_iter()
.map(|segment| self.root().join(segment).join("rust-analyzer-proc-macro-srv"))
.map(|segment| root.join(segment).join("rust-analyzer-proc-macro-srv"))
.find_map(|server_path| probe_for_binary(server_path.into()))
.map(AbsPathBuf::assert)
.ok_or_else(|| {
anyhow::format_err!("cannot find proc-macro server in sysroot `{}`", self.root())
anyhow::format_err!("cannot find proc-macro server in sysroot `{}`", root)
})
}
pub fn load(
sysroot_dir: AbsPathBuf,
sysroot_dir: Option<AbsPathBuf>,
sysroot_src_dir: Option<AbsPathBuf>,
metadata: bool,
) -> Sysroot {
Self::load_core_check(sysroot_dir.map(Ok), sysroot_src_dir.map(Ok), metadata)
}
fn load_core_check(
sysroot_dir: Option<Result<AbsPathBuf, anyhow::Error>>,
sysroot_src_dir: Option<Result<AbsPathBuf, anyhow::Error>>,
metadata: bool,
) -> Sysroot {
let mut sysroot = Self::load_(sysroot_dir, sysroot_src_dir, metadata);
if sysroot.error.is_none() {
if let Some(src_root) = &sysroot.src_root {
let has_core = match &sysroot.mode {
SysrootMode::Workspace(ws) => ws.packages().any(|p| ws[p].name == "core"),
SysrootMode::Stitched(stitched) => stitched.by_name("core").is_some(),
SysrootMode::Empty => true,
};
if !has_core {
let var_note = if env::var_os("RUST_SRC_PATH").is_some() {
" (env var `RUST_SRC_PATH` is set and may be incorrect, try unsetting it)"
} else {
", try running `rustup component add rust-src` to possibly fix this"
};
sysroot.error = Some(format!(
"sysroot at `{}` is missing a `core` library{var_note}",
src_root,
));
}
}
}
sysroot
}
fn load_(
sysroot_dir: Option<Result<AbsPathBuf, anyhow::Error>>,
sysroot_src_dir: Option<Result<AbsPathBuf, anyhow::Error>>,
metadata: bool,
) -> Sysroot {
let sysroot_dir = match sysroot_dir {
Some(Ok(sysroot_dir)) => Some(sysroot_dir),
Some(Err(e)) => {
return Sysroot {
root: None,
src_root: None,
mode: SysrootMode::Empty,
error: Some(e.to_string()),
}
}
None => None,
};
let sysroot_src_dir = match sysroot_src_dir {
Some(Ok(sysroot_src_dir)) => sysroot_src_dir,
Some(Err(e)) => {
return Sysroot {
root: sysroot_dir,
src_root: Some(Err(Arc::new(e))),
mode: SysrootMode::Stitched(Stitched { crates: Arena::default() }),
src_root: None,
mode: SysrootMode::Empty,
error: Some(e.to_string()),
}
}
None => {
return Sysroot {
root: sysroot_dir,
src_root: None,
mode: SysrootMode::Stitched(Stitched { crates: Arena::default() }),
mode: SysrootMode::Empty,
error: None,
}
}
};
@ -284,7 +284,7 @@ impl Sysroot {
&sysroot_cargo_toml,
&current_dir,
&cargo_config,
None,
&Sysroot::empty(),
&|_| (),
)
.map_err(|e| {
@ -368,8 +368,9 @@ impl Sysroot {
let cargo_workspace = CargoWorkspace::new(res, sysroot_cargo_toml);
Some(Sysroot {
root: sysroot_dir.clone(),
src_root: Some(Ok(sysroot_src_dir.clone())),
src_root: Some(sysroot_src_dir.clone()),
mode: SysrootMode::Workspace(cargo_workspace),
error: None,
})
})();
if let Some(sysroot) = sysroot {
@ -420,8 +421,9 @@ impl Sysroot {
}
Sysroot {
root: sysroot_dir,
src_root: Some(Ok(sysroot_src_dir)),
src_root: Some(sysroot_src_dir),
mode: SysrootMode::Stitched(stitched),
error: None,
}
}
}

View file

@ -9,10 +9,10 @@ use crate::{utf8_stdout, ManifestPath, Sysroot};
pub enum RustcDataLayoutConfig<'a> {
/// Use `rustc --print target-spec-json`, either from with the binary from the sysroot or by discovering via
/// [`toolchain::rustc`].
Rustc(Option<&'a Sysroot>),
Rustc(&'a Sysroot),
/// Use `cargo --print target-spec-json`, either from with the binary from the sysroot or by discovering via
/// [`toolchain::cargo`].
Cargo(Option<&'a Sysroot>, &'a ManifestPath),
Cargo(&'a Sysroot, &'a ManifestPath),
}
pub fn get(
@ -28,7 +28,7 @@ pub fn get(
};
let sysroot = match config {
RustcDataLayoutConfig::Cargo(sysroot, cargo_toml) => {
let mut cmd = Sysroot::tool(sysroot, Tool::Cargo);
let mut cmd = sysroot.tool(Tool::Cargo);
cmd.envs(extra_env);
cmd.current_dir(cargo_toml.parent())
.args([

View file

@ -34,7 +34,7 @@ fn load_cargo_with_overrides(
cargo_config_extra_env: Default::default(),
},
cfg_overrides,
sysroot: Err(None),
sysroot: Sysroot::empty(),
rustc_cfg: Vec::new(),
toolchain: None,
target_layout: Err("target_data_layout not loaded".into()),
@ -57,7 +57,7 @@ fn load_cargo_with_fake_sysroot(
rustc: Err(None),
cargo_config_extra_env: Default::default(),
},
sysroot: Ok(get_fake_sysroot()),
sysroot: get_fake_sysroot(),
rustc_cfg: Vec::new(),
cfg_overrides: Default::default(),
toolchain: None,
@ -77,7 +77,7 @@ fn load_cargo_with_fake_sysroot(
fn load_rust_project(file: &str) -> (CrateGraph, ProcMacroPaths) {
let data = get_test_json_file(file);
let project = rooted_project_json(data);
let sysroot = Ok(get_fake_sysroot());
let sysroot = get_fake_sysroot();
let project_workspace = ProjectWorkspace {
kind: ProjectWorkspaceKind::Json(project),
sysroot,
@ -144,7 +144,7 @@ fn get_fake_sysroot() -> Sysroot {
// fake sysroot, so we give them both the same path:
let sysroot_dir = AbsPathBuf::assert(sysroot_path);
let sysroot_src_dir = sysroot_dir.clone();
Sysroot::load(sysroot_dir, Some(Ok(sysroot_src_dir)), false)
Sysroot::load(Some(sysroot_dir), Some(sysroot_src_dir), false)
}
fn rooted_project_json(data: ProjectJsonData) -> ProjectJson {
@ -281,12 +281,11 @@ fn smoke_test_real_sysroot_cargo() {
let manifest_path =
ManifestPath::try_from(AbsPathBuf::try_from(meta.workspace_root.clone()).unwrap()).unwrap();
let cargo_workspace = CargoWorkspace::new(meta, manifest_path);
let sysroot = Ok(Sysroot::discover(
let sysroot = Sysroot::discover(
AbsPath::assert(Utf8Path::new(env!("CARGO_MANIFEST_DIR"))),
&Default::default(),
true,
)
.unwrap());
);
let project_workspace = ProjectWorkspace {
kind: ProjectWorkspaceKind::Cargo {

View file

@ -48,7 +48,7 @@ pub struct PackageRoot {
pub struct ProjectWorkspace {
pub kind: ProjectWorkspaceKind,
/// The sysroot loaded for this workspace.
pub sysroot: Result<Sysroot, Option<String>>,
pub sysroot: Sysroot,
/// Holds cfg flags for the current target. We get those by running
/// `rustc --print cfg`.
// FIXME: make this a per-crate map, as, eg, build.rs might have a
@ -112,7 +112,7 @@ impl fmt::Debug for ProjectWorkspace {
.debug_struct("Cargo")
.field("root", &cargo.workspace_root().file_name())
.field("n_packages", &cargo.packages().len())
.field("sysroot", &sysroot.is_ok())
.field("n_sysroot_crates", &sysroot.num_packages())
.field(
"n_rustc_compiler_crates",
&rustc.as_ref().map(|a| a.as_ref()).map_or(0, |(rc, _)| rc.packages().len()),
@ -125,11 +125,9 @@ impl fmt::Debug for ProjectWorkspace {
.finish(),
ProjectWorkspaceKind::Json(project) => {
let mut debug_struct = f.debug_struct("Json");
debug_struct.field("n_crates", &project.n_crates());
if let Ok(sysroot) = sysroot {
debug_struct.field("n_sysroot_crates", &sysroot.num_packages());
}
debug_struct
.field("n_crates", &project.n_crates())
.field("n_sysroot_crates", &sysroot.num_packages())
.field("n_rustc_cfg", &rustc_cfg.len())
.field("toolchain", &toolchain)
.field("data_layout", &target_layout)
@ -144,7 +142,7 @@ impl fmt::Debug for ProjectWorkspace {
.debug_struct("DetachedFiles")
.field("file", &file)
.field("cargo_script", &cargo_script.is_some())
.field("sysroot", &sysroot.is_ok())
.field("n_sysroot_crates", &sysroot.num_packages())
.field("cargo_script", &cargo_script.is_some())
.field("n_rustc_cfg", &rustc_cfg.len())
.field("toolchain", &toolchain)
@ -158,7 +156,7 @@ impl fmt::Debug for ProjectWorkspace {
fn get_toolchain_version(
current_dir: &AbsPath,
sysroot: Option<&Sysroot>,
sysroot: &Sysroot,
tool: Tool,
extra_env: &FxHashMap<String, String>,
prefix: &str,
@ -213,41 +211,37 @@ impl ProjectWorkspace {
}
ProjectManifest::CargoToml(cargo_toml) => {
let sysroot = match (&config.sysroot, &config.sysroot_src) {
(Some(RustLibSource::Path(path)), None) => {
Sysroot::with_sysroot_dir(path.clone(), config.sysroot_query_metadata).map_err(|e| {
Some(format!("Failed to find sysroot at {path}:{e}"))
})
}
(Some(RustLibSource::Discover), None) => {
Sysroot::discover(cargo_toml.parent(), &config.extra_env, config.sysroot_query_metadata).map_err(|e| {
Some(format!("Failed to find sysroot for Cargo.toml file {cargo_toml}. Is rust-src installed? {e}"))
})
}
(Some(RustLibSource::Path(sysroot)), Some(sysroot_src)) => {
Ok(Sysroot::load(sysroot.clone(), Some(Ok(sysroot_src.clone())), config.sysroot_query_metadata))
}
(Some(RustLibSource::Discover), None) => Sysroot::discover(
cargo_toml.parent(),
&config.extra_env,
config.sysroot_query_metadata,
),
(Some(RustLibSource::Discover), Some(sysroot_src)) => {
Sysroot::discover_with_src_override(
cargo_toml.parent(),
&config.extra_env,
sysroot_src.clone(), config.sysroot_query_metadata,
).map_err(|e| {
Some(format!("Failed to find sysroot for Cargo.toml file {cargo_toml}. Is rust-src installed? {e}"))
})
sysroot_src.clone(),
config.sysroot_query_metadata,
)
}
(None, _) => Err(None),
(Some(RustLibSource::Path(path)), None) => Sysroot::discover_sysroot_src_dir(
path.clone(),
config.sysroot_query_metadata,
),
(Some(RustLibSource::Path(sysroot)), Some(sysroot_src)) => Sysroot::load(
Some(sysroot.clone()),
Some(sysroot_src.clone()),
config.sysroot_query_metadata,
),
(None, _) => Sysroot::empty(),
};
let sysroot_ref = sysroot.as_ref().ok();
if let Ok(sysroot) = &sysroot {
tracing::info!(workspace = %cargo_toml, src_root = ?sysroot.src_root(), root = %sysroot.root(), "Using sysroot");
}
tracing::info!(workspace = %cargo_toml, src_root = ?sysroot.src_root(), root = ?sysroot.root(), "Using sysroot");
let rustc_dir = match &config.rustc_source {
Some(RustLibSource::Path(path)) => ManifestPath::try_from(path.clone())
.map_err(|p| Some(format!("rustc source path is not absolute: {p}"))),
Some(RustLibSource::Discover) => {
sysroot_ref.and_then(Sysroot::discover_rustc_src).ok_or_else(|| {
sysroot.discover_rustc_src().ok_or_else(|| {
Some("Failed to discover rustc source for sysroot.".to_owned())
})
}
@ -263,7 +257,7 @@ impl ProjectWorkspace {
features: crate::CargoFeatures::default(),
..config.clone()
},
sysroot_ref,
&sysroot,
progress,
) {
Ok(meta) => {
@ -272,7 +266,7 @@ impl ProjectWorkspace {
&workspace,
cargo_toml.parent(),
&config.extra_env,
sysroot_ref
&sysroot
);
Ok(Box::new((workspace, buildscripts)))
}
@ -290,7 +284,7 @@ impl ProjectWorkspace {
let toolchain = get_toolchain_version(
cargo_toml.parent(),
sysroot_ref,
&sysroot,
Tool::Cargo,
&config.extra_env,
"cargo ",
@ -298,12 +292,12 @@ impl ProjectWorkspace {
let rustc_cfg = rustc_cfg::get(
config.target.as_deref(),
&config.extra_env,
RustcCfgConfig::Cargo(sysroot_ref, cargo_toml),
RustcCfgConfig::Cargo(&sysroot, cargo_toml),
);
let cfg_overrides = config.cfg_overrides.clone();
let data_layout = target_data_layout::get(
RustcDataLayoutConfig::Cargo(sysroot_ref, cargo_toml),
RustcDataLayoutConfig::Cargo(&sysroot, cargo_toml),
config.target.as_deref(),
&config.extra_env,
);
@ -315,7 +309,7 @@ impl ProjectWorkspace {
cargo_toml,
cargo_toml.parent(),
config,
sysroot_ref,
&sysroot,
progress,
)
.with_context(|| {
@ -326,7 +320,7 @@ impl ProjectWorkspace {
let cargo = CargoWorkspace::new(meta, cargo_toml.clone());
let cargo_config_extra_env =
cargo_config_env(cargo_toml, &config.extra_env, sysroot_ref);
cargo_config_env(cargo_toml, &config.extra_env, &sysroot);
ProjectWorkspace {
kind: ProjectWorkspaceKind::Cargo {
cargo,
@ -354,32 +348,13 @@ impl ProjectWorkspace {
extra_env: &FxHashMap<String, String>,
cfg_overrides: &CfgOverrides,
) -> ProjectWorkspace {
let sysroot = match (project_json.sysroot.clone(), project_json.sysroot_src.clone()) {
(Some(sysroot), Some(sysroot_src)) => {
Ok(Sysroot::load(sysroot, Some(Ok(sysroot_src)), false))
}
(Some(sysroot), None) => {
// assume sysroot is structured like rustup's and guess `sysroot_src`
let sysroot_src =
sysroot.join("lib").join("rustlib").join("src").join("rust").join("library");
Ok(Sysroot::load(sysroot, Some(Ok(sysroot_src)), false))
}
(None, Some(sysroot_src)) => {
// assume sysroot is structured like rustup's and guess `sysroot`
let mut sysroot = sysroot_src.clone();
for _ in 0..5 {
sysroot.pop();
}
Ok(Sysroot::load(sysroot, Some(Ok(sysroot_src)), false))
}
(None, None) => Err(None),
};
let sysroot_ref = sysroot.as_ref().ok();
let cfg_config = RustcCfgConfig::Rustc(sysroot_ref);
let data_layout_config = RustcDataLayoutConfig::Rustc(sysroot_ref);
let sysroot =
Sysroot::load(project_json.sysroot.clone(), project_json.sysroot_src.clone(), false);
let cfg_config = RustcCfgConfig::Rustc(&sysroot);
let data_layout_config = RustcDataLayoutConfig::Rustc(&sysroot);
let toolchain = match get_toolchain_version(
project_json.path(),
sysroot_ref,
&sysroot,
Tool::Rustc,
extra_env,
"rustc ",
@ -410,24 +385,16 @@ impl ProjectWorkspace {
let dir = detached_file.parent();
let sysroot = match &config.sysroot {
Some(RustLibSource::Path(path)) => {
Sysroot::with_sysroot_dir(path.clone(), config.sysroot_query_metadata)
.map_err(|e| Some(format!("Failed to find sysroot at {path}:{e}")))
Sysroot::discover_sysroot_src_dir(path.clone(), config.sysroot_query_metadata)
}
Some(RustLibSource::Discover) => Sysroot::discover(
dir,
&config.extra_env,
config.sysroot_query_metadata,
)
.map_err(|e| {
Some(format!("Failed to find sysroot for {dir}. Is rust-src installed? {e}"))
}),
None => Err(None),
Some(RustLibSource::Discover) => {
Sysroot::discover(dir, &config.extra_env, config.sysroot_query_metadata)
}
None => Sysroot::empty(),
};
let sysroot_ref = sysroot.as_ref().ok();
let toolchain =
match get_toolchain_version(dir, sysroot_ref, Tool::Rustc, &config.extra_env, "rustc ")
{
match get_toolchain_version(dir, &sysroot, Tool::Rustc, &config.extra_env, "rustc ") {
Ok(it) => it,
Err(e) => {
tracing::error!("{e}");
@ -435,25 +402,24 @@ impl ProjectWorkspace {
}
};
let rustc_cfg = rustc_cfg::get(None, &config.extra_env, RustcCfgConfig::Rustc(sysroot_ref));
let rustc_cfg = rustc_cfg::get(None, &config.extra_env, RustcCfgConfig::Rustc(&sysroot));
let data_layout = target_data_layout::get(
RustcDataLayoutConfig::Rustc(sysroot_ref),
RustcDataLayoutConfig::Rustc(&sysroot),
None,
&config.extra_env,
);
let cargo_script =
CargoWorkspace::fetch_metadata(detached_file, dir, config, sysroot_ref, &|_| ())
.ok()
.map(|ws| {
CargoWorkspace::fetch_metadata(detached_file, dir, config, &sysroot, &|_| ()).ok().map(
|ws| {
(
CargoWorkspace::new(ws, detached_file.clone()),
WorkspaceBuildScripts::default(),
)
});
},
);
let cargo_config_extra_env =
cargo_config_env(detached_file, &config.extra_env, sysroot_ref);
let cargo_config_extra_env = cargo_config_env(detached_file, &config.extra_env, &sysroot);
Ok(ProjectWorkspace {
kind: ProjectWorkspaceKind::DetachedFile {
file: detached_file.to_owned(),
@ -489,7 +455,7 @@ impl ProjectWorkspace {
cargo,
progress,
self.toolchain.as_ref(),
self.sysroot.as_ref().ok(),
&self.sysroot,
)
.with_context(|| {
format!("Failed to run build scripts for {}", cargo.workspace_root())
@ -562,17 +528,7 @@ impl ProjectWorkspace {
}
pub fn find_sysroot_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
match &self.sysroot {
Ok(sysroot) => sysroot.discover_proc_macro_srv(),
Err(None) => Err(anyhow::format_err!(
"cannot find proc-macro server, the workspace `{}` is missing a sysroot",
self.manifest_or_root()
)),
Err(Some(e)) => Err(anyhow::format_err!(
"cannot find proc-macro server, the workspace `{}` is missing a sysroot: {e}",
self.manifest_or_root()
)),
}
self.sysroot.discover_proc_macro_srv()
}
/// Returns the roots for the current `ProjectWorkspace`
@ -580,39 +536,37 @@ impl ProjectWorkspace {
/// the root is a member of the current workspace
pub fn to_roots(&self) -> Vec<PackageRoot> {
let mk_sysroot = || {
self.sysroot.as_ref().into_iter().flat_map(move |sysroot: &Sysroot| {
let mut r = match sysroot.mode() {
SysrootMode::Workspace(ws) => ws
.packages()
.filter_map(|pkg| {
if ws[pkg].is_local {
// the local ones are included in the main `PackageRoot`` below
return None;
}
let pkg_root = ws[pkg].manifest.parent().to_path_buf();
let mut r = match self.sysroot.mode() {
SysrootMode::Workspace(ws) => ws
.packages()
.filter_map(|pkg| {
if ws[pkg].is_local {
// the local ones are included in the main `PackageRoot`` below
return None;
}
let pkg_root = ws[pkg].manifest.parent().to_path_buf();
let include = vec![pkg_root.clone()];
let include = vec![pkg_root.clone()];
let exclude = vec![
pkg_root.join(".git"),
pkg_root.join("target"),
pkg_root.join("tests"),
pkg_root.join("examples"),
pkg_root.join("benches"),
];
Some(PackageRoot { is_local: false, include, exclude })
})
.collect(),
SysrootMode::Stitched(_) => vec![],
};
let exclude = vec![
pkg_root.join(".git"),
pkg_root.join("target"),
pkg_root.join("tests"),
pkg_root.join("examples"),
pkg_root.join("benches"),
];
Some(PackageRoot { is_local: false, include, exclude })
})
.collect(),
SysrootMode::Stitched(_) | SysrootMode::Empty => vec![],
};
r.push(PackageRoot {
is_local: false,
include: sysroot.src_root().map(|it| it.to_path_buf()).into_iter().collect(),
exclude: Vec::new(),
});
r
})
r.push(PackageRoot {
is_local: false,
include: self.sysroot.src_root().map(|it| it.to_path_buf()).into_iter().collect(),
exclude: Vec::new(),
});
r
};
match &self.kind {
ProjectWorkspaceKind::Json(project) => project
@ -731,19 +685,15 @@ impl ProjectWorkspace {
}
pub fn n_packages(&self) -> usize {
let sysroot_package_len = self.sysroot.num_packages();
match &self.kind {
ProjectWorkspaceKind::Json(project) => {
let sysroot_package_len = self.sysroot.as_ref().map_or(0, |it| it.num_packages());
sysroot_package_len + project.n_crates()
}
ProjectWorkspaceKind::Json(project) => sysroot_package_len + project.n_crates(),
ProjectWorkspaceKind::Cargo { cargo, rustc, .. } => {
let rustc_package_len =
rustc.as_ref().map(|a| a.as_ref()).map_or(0, |(it, _)| it.packages().len());
let sysroot_package_len = self.sysroot.as_ref().map_or(0, |it| it.num_packages());
cargo.packages().len() + sysroot_package_len + rustc_package_len
}
ProjectWorkspaceKind::DetachedFile { cargo: cargo_script, .. } => {
let sysroot_package_len = self.sysroot.as_ref().map_or(0, |it| it.num_packages());
sysroot_package_len
+ cargo_script.as_ref().map_or(1, |(cargo, _)| cargo.packages().len())
}
@ -764,7 +714,7 @@ impl ProjectWorkspace {
rustc_cfg.clone(),
load,
project,
sysroot.as_ref().ok(),
sysroot,
extra_env,
cfg_overrides,
),
@ -780,7 +730,7 @@ impl ProjectWorkspace {
load,
rustc.as_ref().map(|a| a.as_ref()).ok(),
cargo,
sysroot.as_ref().ok(),
sysroot,
rustc_cfg.clone(),
cfg_overrides,
build_scripts,
@ -793,7 +743,7 @@ impl ProjectWorkspace {
&mut |path| load(path),
None,
cargo,
sysroot.as_ref().ok(),
sysroot,
rustc_cfg.clone(),
cfg_overrides,
build_scripts,
@ -803,7 +753,7 @@ impl ProjectWorkspace {
rustc_cfg.clone(),
load,
file,
sysroot.as_ref().ok(),
sysroot,
cfg_overrides,
)
},
@ -811,9 +761,7 @@ impl ProjectWorkspace {
),
};
if matches!(sysroot.as_ref().map(|it| it.mode()), Ok(SysrootMode::Stitched(_)))
&& crate_graph.patch_cfg_if()
{
if matches!(sysroot.mode(), SysrootMode::Stitched(_)) && crate_graph.patch_cfg_if() {
tracing::debug!("Patched std to depend on cfg-if")
} else {
tracing::debug!("Did not patch std to depend on cfg-if")
@ -892,15 +840,14 @@ fn project_json_to_crate_graph(
rustc_cfg: Vec<CfgFlag>,
load: FileLoader<'_>,
project: &ProjectJson,
sysroot: Option<&Sysroot>,
sysroot: &Sysroot,
extra_env: &FxHashMap<String, String>,
override_cfg: &CfgOverrides,
) -> (CrateGraph, ProcMacroPaths) {
let mut res = (CrateGraph::default(), ProcMacroPaths::default());
let (crate_graph, proc_macros) = &mut res;
let sysroot_deps = sysroot
.as_ref()
.map(|sysroot| sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load));
let (public_deps, libproc_macro) =
sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load);
let r_a_cfg_flag = CfgFlag::Atom("rust_analyzer".to_owned());
let mut cfg_cache: FxHashMap<&str, Vec<CfgFlag>> = FxHashMap::default();
@ -978,11 +925,9 @@ fn project_json_to_crate_graph(
for (from_idx, krate) in project.crates() {
if let Some(&from) = idx_to_crate_id.get(&from_idx) {
if let Some((public_deps, libproc_macro)) = &sysroot_deps {
public_deps.add_to_crate_graph(crate_graph, from);
if let Some(proc_macro) = libproc_macro {
add_proc_macro_dep(crate_graph, from, *proc_macro, krate.is_proc_macro);
}
public_deps.add_to_crate_graph(crate_graph, from);
if let Some(proc_macro) = libproc_macro {
add_proc_macro_dep(crate_graph, from, proc_macro, krate.is_proc_macro);
}
for dep in &krate.deps {
@ -999,7 +944,7 @@ fn cargo_to_crate_graph(
load: FileLoader<'_>,
rustc: Option<&(CargoWorkspace, WorkspaceBuildScripts)>,
cargo: &CargoWorkspace,
sysroot: Option<&Sysroot>,
sysroot: &Sysroot,
rustc_cfg: Vec<CfgFlag>,
override_cfg: &CfgOverrides,
build_scripts: &WorkspaceBuildScripts,
@ -1008,10 +953,8 @@ fn cargo_to_crate_graph(
let mut res = (CrateGraph::default(), ProcMacroPaths::default());
let crate_graph = &mut res.0;
let proc_macros = &mut res.1;
let (public_deps, libproc_macro) = match sysroot {
Some(sysroot) => sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load),
None => (SysrootPublicDeps::default(), None),
};
let (public_deps, libproc_macro) =
sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load);
let cfg_options = CfgOptions::from_iter(rustc_cfg);
@ -1188,15 +1131,13 @@ fn detached_file_to_crate_graph(
rustc_cfg: Vec<CfgFlag>,
load: FileLoader<'_>,
detached_file: &ManifestPath,
sysroot: Option<&Sysroot>,
sysroot: &Sysroot,
override_cfg: &CfgOverrides,
) -> (CrateGraph, ProcMacroPaths) {
let _p = tracing::span!(tracing::Level::INFO, "detached_file_to_crate_graph").entered();
let mut crate_graph = CrateGraph::default();
let (public_deps, _libproc_macro) = match sysroot {
Some(sysroot) => sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load),
None => (SysrootPublicDeps::default(), None),
};
let (public_deps, _libproc_macro) =
sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load);
let mut cfg_options = CfgOptions::from_iter(rustc_cfg);
cfg_options.insert_atom("test".into());
@ -1431,7 +1372,7 @@ fn sysroot_to_crate_graph(
load,
None,
cargo,
None,
&Sysroot::empty(),
rustc_cfg,
&CfgOverrides {
global: CfgDiff::new(
@ -1554,6 +1495,7 @@ fn sysroot_to_crate_graph(
stitched.proc_macro().and_then(|it| sysroot_crates.get(&it).copied());
(public_deps, libproc_macro)
}
SysrootMode::Empty => (SysrootPublicDeps { deps: vec![] }, None),
}
}

View file

@ -8,7 +8,8 @@ use std::{
use hir::{
db::{DefDatabase, ExpandDatabase, HirDatabase},
Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, HirFileIdExt, ModuleDef, Name,
Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, HirFileIdExt, ImportPathConfig,
ModuleDef, Name,
};
use hir_def::{
body::{BodySourceMap, SyntheticSyntax},
@ -438,8 +439,13 @@ impl flags::AnalysisStats {
let mut formatter = |_: &hir::Type| todo.clone();
let mut syntax_hit_found = false;
for term in found_terms {
let generated =
term.gen_source_code(&scope, &mut formatter, false, true).unwrap();
let generated = term
.gen_source_code(
&scope,
&mut formatter,
ImportPathConfig { prefer_no_std: false, prefer_prelude: true },
)
.unwrap();
syntax_hit_found |= trim(&original_text) == trim(&generated);
// Validate if type-checks

View file

@ -69,11 +69,9 @@ impl Tester {
let cargo_config =
CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() };
let sysroot =
Ok(Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env, false)
.unwrap());
let sysroot = Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env, false);
let data_layout = target_data_layout::get(
RustcDataLayoutConfig::Rustc(sysroot.as_ref().ok()),
RustcDataLayoutConfig::Rustc(&sysroot),
None,
&cargo_config.extra_env,
);

View file

@ -1031,6 +1031,8 @@ impl Config {
&& completion_item_edit_resolve(&self.caps),
enable_self_on_the_fly: self.completion_autoself_enable(source_root).to_owned(),
enable_private_editable: self.completion_privateEditable_enable(source_root).to_owned(),
enable_term_search: self.completion_termSearch_enable(source_root).to_owned(),
term_search_fuel: self.completion_termSearch_fuel(source_root).to_owned() as u64,
full_function_signatures: self
.completion_fullFunctionSignatures_enable(source_root)
.to_owned(),
@ -1039,8 +1041,6 @@ impl Config {
CallableCompletionDef::AddParentheses => Some(CallableSnippets::AddParentheses),
CallableCompletionDef::None => None,
},
insert_use: self.insert_use_config(source_root),
prefer_no_std: self.imports_preferNoStd(source_root).to_owned(),
snippet_cap: SnippetCap::new(try_or_def!(
self.caps
.text_document
@ -1051,11 +1051,11 @@ impl Config {
.as_ref()?
.snippet_support?
)),
insert_use: self.insert_use_config(source_root),
prefer_no_std: self.imports_preferNoStd(source_root).to_owned(),
prefer_prelude: self.imports_preferPrelude(source_root).to_owned(),
snippets: self.snippets.clone().to_vec(),
limit: self.completion_limit(source_root).to_owned(),
enable_term_search: self.completion_termSearch_enable(source_root).to_owned(),
term_search_fuel: self.completion_termSearch_fuel(source_root).to_owned() as u64,
prefer_prelude: self.imports_preferPrelude(source_root).to_owned(),
}
}

View file

@ -87,6 +87,7 @@ pub(crate) struct GlobalState {
pub(crate) flycheck_sender: Sender<flycheck::Message>,
pub(crate) flycheck_receiver: Receiver<flycheck::Message>,
pub(crate) last_flycheck_error: Option<String>,
pub(crate) diagnostics_received: bool,
// Test explorer
pub(crate) test_run_session: Option<Vec<flycheck::CargoTestHandle>>,
@ -224,6 +225,7 @@ impl GlobalState {
flycheck_sender,
flycheck_receiver,
last_flycheck_error: None,
diagnostics_received: false,
test_run_session: None,
test_run_sender,

View file

@ -860,6 +860,11 @@ pub(crate) fn handle_runnables(
if cmd == "run" && spec.target_kind != TargetKind::Bin {
continue;
}
let cwd = if cmd != "test" || spec.target_kind == TargetKind::Bin {
spec.workspace_root.clone()
} else {
spec.cargo_toml.parent().to_path_buf()
};
let mut cargo_args =
vec![cmd.to_owned(), "--package".to_owned(), spec.package.clone()];
let all_targets = cmd != "run" && !is_crate_no_std;
@ -876,6 +881,7 @@ pub(crate) fn handle_runnables(
kind: lsp_ext::RunnableKind::Cargo,
args: lsp_ext::CargoRunnable {
workspace_root: Some(spec.workspace_root.clone().into()),
cwd: Some(cwd.into()),
override_cargo: config.override_cargo.clone(),
cargo_args,
cargo_extra_args: config.cargo_extra_args.clone(),
@ -893,6 +899,7 @@ pub(crate) fn handle_runnables(
kind: lsp_ext::RunnableKind::Cargo,
args: lsp_ext::CargoRunnable {
workspace_root: None,
cwd: None,
override_cargo: config.override_cargo,
cargo_args: vec!["check".to_owned(), "--workspace".to_owned()],
cargo_extra_args: config.cargo_extra_args,
@ -1783,18 +1790,18 @@ pub(crate) fn handle_open_docs(
let ws_and_sysroot = snap.workspaces.iter().find_map(|ws| match &ws.kind {
ProjectWorkspaceKind::Cargo { cargo, .. }
| ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _)), .. } => {
Some((cargo, ws.sysroot.as_ref().ok()))
Some((cargo, &ws.sysroot))
}
ProjectWorkspaceKind::Json { .. } => None,
ProjectWorkspaceKind::DetachedFile { .. } => None,
});
let (cargo, sysroot) = match ws_and_sysroot {
Some((ws, sysroot)) => (Some(ws), sysroot),
Some((ws, sysroot)) => (Some(ws), Some(sysroot)),
_ => (None, None),
};
let sysroot = sysroot.map(|p| p.root().as_str());
let sysroot = sysroot.and_then(|p| p.root()).map(|it| it.as_str());
let target_dir = cargo.map(|cargo| cargo.target_directory()).map(|p| p.as_str());
let Ok(remote_urls) = snap.analysis.external_docs(position, target_dir, sysroot) else {

View file

@ -139,6 +139,7 @@ fn integrated_completion_benchmark() {
enable_self_on_the_fly: true,
enable_private_editable: true,
enable_term_search: true,
term_search_fuel: 200,
full_function_signatures: false,
callable: Some(CallableSnippets::FillArguments),
snippet_cap: SnippetCap::new(true),
@ -149,11 +150,10 @@ fn integrated_completion_benchmark() {
group: true,
skip_glob_imports: true,
},
snippets: Vec::new(),
prefer_no_std: false,
prefer_prelude: true,
snippets: Vec::new(),
limit: None,
term_search_fuel: 200,
};
let position =
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
@ -184,6 +184,7 @@ fn integrated_completion_benchmark() {
enable_self_on_the_fly: true,
enable_private_editable: true,
enable_term_search: true,
term_search_fuel: 200,
full_function_signatures: false,
callable: Some(CallableSnippets::FillArguments),
snippet_cap: SnippetCap::new(true),
@ -194,11 +195,10 @@ fn integrated_completion_benchmark() {
group: true,
skip_glob_imports: true,
},
snippets: Vec::new(),
prefer_no_std: false,
prefer_prelude: true,
snippets: Vec::new(),
limit: None,
term_search_fuel: 200,
};
let position =
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
@ -227,6 +227,7 @@ fn integrated_completion_benchmark() {
enable_self_on_the_fly: true,
enable_private_editable: true,
enable_term_search: true,
term_search_fuel: 200,
full_function_signatures: false,
callable: Some(CallableSnippets::FillArguments),
snippet_cap: SnippetCap::new(true),
@ -237,11 +238,10 @@ fn integrated_completion_benchmark() {
group: true,
skip_glob_imports: true,
},
snippets: Vec::new(),
prefer_no_std: false,
prefer_prelude: true,
snippets: Vec::new(),
limit: None,
term_search_fuel: 200,
};
let position =
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };

View file

@ -441,6 +441,8 @@ pub struct CargoRunnable {
pub override_cargo: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub workspace_root: Option<PathBuf>,
#[serde(skip_serializing_if = "Option::is_none")]
pub cwd: Option<PathBuf>,
// command, --package and --lib stuff
pub cargo_args: Vec<String>,
// user-specified additional cargo args, like `--release`.
@ -500,7 +502,6 @@ pub struct ServerStatusParams {
pub health: Health,
pub quiescent: bool,
pub message: Option<String>,
pub workspace_info: Option<String>,
}
#[derive(Serialize, Deserialize, Clone, Copy, PartialEq, Eq)]

View file

@ -1360,10 +1360,14 @@ pub(crate) fn runnable(
let config = snap.config.runnables();
let spec = CargoTargetSpec::for_file(snap, runnable.nav.file_id)?;
let workspace_root = spec.as_ref().map(|it| it.workspace_root.clone());
let target = spec.as_ref().map(|s| s.target.clone());
let cwd = match runnable.kind {
ide::RunnableKind::Bin { .. } => workspace_root.clone().map(|it| it.into()),
_ => spec.as_ref().map(|it| it.cargo_toml.parent().into()),
};
let target = spec.as_ref().map(|s| s.target.as_str());
let label = runnable.label(target);
let (cargo_args, executable_args) =
CargoTargetSpec::runnable_args(snap, spec, &runnable.kind, &runnable.cfg);
let label = runnable.label(target);
let location = location_link(snap, None, runnable.nav)?;
Ok(lsp_ext::Runnable {
@ -1372,6 +1376,7 @@ pub(crate) fn runnable(
kind: lsp_ext::RunnableKind::Cargo,
args: lsp_ext::CargoRunnable {
workspace_root: workspace_root.map(|it| it.into()),
cwd,
override_cargo: config.override_cargo,
cargo_args,
cargo_extra_args: config.cargo_extra_args,

View file

@ -804,6 +804,10 @@ impl GlobalState {
fn handle_flycheck_msg(&mut self, message: flycheck::Message) {
match message {
flycheck::Message::AddDiagnostic { id, workspace_root, diagnostic } => {
if !self.diagnostics_received {
self.diagnostics.clear_check(id);
self.diagnostics_received = true;
}
let snap = self.snapshot();
let diagnostics = crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp(
&self.config.diagnostics_map(),
@ -832,7 +836,7 @@ impl GlobalState {
flycheck::Message::Progress { id, progress } => {
let (state, message) = match progress {
flycheck::Progress::DidStart => {
self.diagnostics.clear_check(id);
self.diagnostics_received = false;
(Progress::Begin, None)
}
flycheck::Progress::DidCheckCrate(target) => (Progress::Report, Some(target)),
@ -848,6 +852,9 @@ impl GlobalState {
flycheck::Progress::DidFinish(result) => {
self.last_flycheck_error =
result.err().map(|err| format!("cargo check failed to start: {err}"));
if !self.diagnostics_received {
self.diagnostics.clear_check(id);
}
(Progress::End, None)
}
};

View file

@ -103,7 +103,6 @@ impl GlobalState {
health: lsp_ext::Health::Ok,
quiescent: self.is_quiescent(),
message: None,
workspace_info: None,
};
let mut message = String::new();
@ -164,53 +163,37 @@ impl GlobalState {
let proc_macro_clients =
self.proc_macro_clients.iter().map(Some).chain(iter::repeat_with(|| None));
let mut workspace_info = "Loaded workspaces:\n".to_owned();
for (ws, proc_macro_client) in self.workspaces.iter().zip(proc_macro_clients) {
format_to!(workspace_info, "- `{}`\n", ws.manifest_or_root());
format_to!(workspace_info, " - sysroot:");
match ws.sysroot.as_ref() {
Err(None) => format_to!(workspace_info, " None"),
Err(Some(e)) => {
status.health |= lsp_ext::Health::Warning;
format_to!(workspace_info, " {e}");
}
Ok(s) => {
format_to!(workspace_info, " `{}`", s.root().to_string());
if let Some(err) = s
.check_has_core()
.err()
.inspect(|_| status.health |= lsp_ext::Health::Warning)
{
format_to!(workspace_info, " ({err})");
}
if let Some(src_root) = s.src_root() {
format_to!(
workspace_info,
"\n - sysroot source: `{}`",
src_root
);
}
format_to!(workspace_info, "\n");
}
}
if let ProjectWorkspaceKind::Cargo { rustc: Err(Some(e)), .. } = &ws.kind {
if let Some(err) = ws.sysroot.error() {
status.health |= lsp_ext::Health::Warning;
format_to!(workspace_info, " - rustc workspace: {e}\n");
format_to!(
message,
"Workspace `{}` has sysroot errors: ",
ws.manifest_or_root()
);
message.push_str(err);
message.push_str("\n\n");
}
if let ProjectWorkspaceKind::Cargo { rustc: Err(Some(err)), .. } = &ws.kind {
status.health |= lsp_ext::Health::Warning;
format_to!(
message,
"Failed loading rustc_private crates for workspace `{}`: ",
ws.manifest_or_root()
);
message.push_str(err);
message.push_str("\n\n");
};
if let Some(proc_macro_client) = proc_macro_client {
format_to!(workspace_info, " - proc-macro server: ");
match proc_macro_client {
Ok(it) => format_to!(workspace_info, "`{}`\n", it.path()),
Err(e) => {
status.health |= lsp_ext::Health::Warning;
format_to!(workspace_info, "{e}\n")
}
}
if let Some(Err(err)) = proc_macro_client {
status.health |= lsp_ext::Health::Warning;
format_to!(
message,
"Failed spawning proc-macro server for workspace `{}`: {err}",
ws.manifest_or_root()
);
message.push_str("\n\n");
}
}
status.workspace_info = Some(workspace_info);
}
if !message.is_empty() {
@ -534,8 +517,8 @@ impl GlobalState {
.map(|(a, b)| (a.clone(), b.clone()))
.chain(
ws.sysroot
.as_ref()
.map(|it| ("RUSTUP_TOOLCHAIN".to_owned(), it.root().to_string())),
.root()
.map(|it| ("RUSTUP_TOOLCHAIN".to_owned(), it.to_string())),
)
.collect(),
@ -719,7 +702,7 @@ impl GlobalState {
}
ProjectWorkspaceKind::DetachedFile { .. } => return None,
},
ws.sysroot.as_ref().ok().map(|sysroot| sysroot.root().to_owned()),
ws.sysroot.root().map(ToOwned::to_owned),
))
})
.map(|(id, (root, manifest_path), sysroot_root)| {

View file

@ -21,7 +21,7 @@ fn load_cargo_with_fake_sysroot(file: &str) -> ProjectWorkspace {
rustc: Err(None),
cargo_config_extra_env: Default::default(),
},
sysroot: Ok(get_fake_sysroot()),
sysroot: get_fake_sysroot(),
rustc_cfg: Vec::new(),
cfg_overrides: Default::default(),
toolchain: None,
@ -69,7 +69,7 @@ fn get_fake_sysroot() -> Sysroot {
// fake sysroot, so we give them both the same path:
let sysroot_dir = AbsPathBuf::assert_utf8(sysroot_path);
let sysroot_src_dir = sysroot_dir.clone();
Sysroot::load(sysroot_dir, Some(Ok(sysroot_src_dir)), false)
Sysroot::load(Some(sysroot_dir), Some(sysroot_src_dir), false)
}
#[test]

View file

@ -260,6 +260,7 @@ fn main() {}
"executableArgs": ["test_eggs", "--exact", "--show-output"],
"cargoExtraArgs": [],
"overrideCargo": null,
"cwd": server.path().join("foo"),
"workspaceRoot": server.path().join("foo")
},
"kind": "cargo",
@ -279,6 +280,7 @@ fn main() {}
{
"args": {
"overrideCargo": null,
"cwd": server.path().join("foo"),
"workspaceRoot": server.path().join("foo"),
"cargoArgs": [
"test",
@ -325,6 +327,7 @@ fn main() {}
"executableArgs": [],
"cargoExtraArgs": [],
"overrideCargo": null,
"cwd": server.path().join("foo"),
"workspaceRoot": server.path().join("foo")
},
"kind": "cargo",
@ -336,6 +339,7 @@ fn main() {}
"executableArgs": [],
"cargoExtraArgs": [],
"overrideCargo": null,
"cwd": server.path().join("foo"),
"workspaceRoot": server.path().join("foo")
},
"kind": "cargo",
@ -415,6 +419,7 @@ mod tests {
"args": {
"overrideCargo": null,
"workspaceRoot": server.path().join(runnable),
"cwd": server.path().join(runnable),
"cargoArgs": [
"test",
"--package",
@ -432,6 +437,94 @@ mod tests {
}
}
// The main fn in packages should be run from the workspace root
#[test]
fn test_runnables_cwd() {
if skip_slow_tests() {
return;
}
let server = Project::with_fixture(
r#"
//- /foo/Cargo.toml
[workspace]
members = ["mainpkg", "otherpkg"]
//- /foo/mainpkg/Cargo.toml
[package]
name = "mainpkg"
version = "0.1.0"
//- /foo/mainpkg/src/main.rs
fn main() {}
//- /foo/otherpkg/Cargo.toml
[package]
name = "otherpkg"
version = "0.1.0"
//- /foo/otherpkg/src/lib.rs
#[test]
fn otherpkg() {}
"#,
)
.root("foo")
.server()
.wait_until_workspace_is_loaded();
server.request::<Runnables>(
RunnablesParams { text_document: server.doc_id("foo/mainpkg/src/main.rs"), position: None },
json!([
"{...}",
{
"label": "cargo test -p mainpkg --all-targets",
"kind": "cargo",
"args": {
"overrideCargo": null,
"workspaceRoot": server.path().join("foo"),
"cwd": server.path().join("foo"),
"cargoArgs": [
"test",
"--package",
"mainpkg",
"--all-targets"
],
"cargoExtraArgs": [],
"executableArgs": []
},
},
"{...}",
"{...}"
]),
);
server.request::<Runnables>(
RunnablesParams { text_document: server.doc_id("foo/otherpkg/src/lib.rs"), position: None },
json!([
"{...}",
{
"label": "cargo test -p otherpkg --all-targets",
"kind": "cargo",
"args": {
"overrideCargo": null,
"workspaceRoot": server.path().join("foo"),
"cwd": server.path().join("foo").join("otherpkg"),
"cargoArgs": [
"test",
"--package",
"otherpkg",
"--all-targets"
],
"cargoExtraArgs": [],
"executableArgs": []
},
},
"{...}",
"{...}"
]),
);
}
#[test]
fn test_format_document() {
if skip_slow_tests() {
@ -1059,11 +1152,11 @@ fn resolve_proc_macro() {
return;
}
let sysroot = project_model::Sysroot::discover_no_source(
let sysroot = project_model::Sysroot::discover(
&AbsPathBuf::assert_utf8(std::env::current_dir().unwrap()),
&Default::default(),
)
.unwrap();
false,
);
let proc_macro_server_path = sysroot.discover_proc_macro_srv().unwrap();

View file

@ -378,9 +378,26 @@ impl ast::UseTreeList {
/// Remove the unnecessary braces in current `UseTreeList`
pub fn remove_unnecessary_braces(mut self) {
// Returns true iff there is a single subtree and it is not the self keyword. The braces in
// `use x::{self};` are necessary and so we should not remove them.
let has_single_subtree_that_is_not_self = |u: &ast::UseTreeList| {
if let Some((single_subtree,)) = u.use_trees().collect_tuple() {
// We have a single subtree, check whether it is self.
let is_self = single_subtree.path().as_ref().map_or(false, |path| {
path.segment().and_then(|seg| seg.self_token()).is_some()
&& path.qualifier().is_none()
});
!is_self
} else {
// Not a single subtree
false
}
};
let remove_brace_in_use_tree_list = |u: &ast::UseTreeList| {
let use_tree_count = u.use_trees().count();
if use_tree_count == 1 {
if has_single_subtree_that_is_not_self(u) {
if let Some(a) = u.l_curly_token() {
ted::remove(a)
}

View file

@ -8,6 +8,7 @@ use std::{
use rustc_lexer::unescape::{
unescape_byte, unescape_char, unescape_mixed, unescape_unicode, EscapeError, MixedUnit, Mode,
};
use stdx::always;
use crate::{
ast::{self, AstToken},
@ -181,25 +182,25 @@ pub trait IsString: AstToken {
self.quote_offsets().map(|it| it.quotes.1)
}
fn escaped_char_ranges(&self, cb: &mut dyn FnMut(TextRange, Result<char, EscapeError>)) {
let text_range_no_quotes = match self.text_range_between_quotes() {
Some(it) => it,
None => return,
};
let Some(text_range_no_quotes) = self.text_range_between_quotes() else { return };
let start = self.syntax().text_range().start();
let text = &self.text()[text_range_no_quotes - start];
let offset = text_range_no_quotes.start() - start;
unescape_unicode(text, Self::MODE, &mut |range, unescaped_char| {
let text_range =
TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
cb(text_range + offset, unescaped_char);
if let Some((s, e)) = range.start.try_into().ok().zip(range.end.try_into().ok()) {
cb(TextRange::new(s, e) + offset, unescaped_char);
}
});
}
fn map_range_up(&self, range: TextRange) -> Option<TextRange> {
let contents_range = self.text_range_between_quotes()?;
assert!(TextRange::up_to(contents_range.len()).contains_range(range));
Some(range + contents_range.start())
if always!(TextRange::up_to(contents_range.len()).contains_range(range)) {
Some(range + contents_range.start())
} else {
None
}
}
}

View file

@ -1,5 +1,5 @@
<!---
lsp/ext.rs hash: a39009c351009d16
lsp/ext.rs hash: 1babf76a3c2cef3b
If you need to change the above hash to make the test pass, please check if you
need to adjust this doc as well and ping this issue:
@ -377,6 +377,7 @@ rust-analyzer supports only one `kind`, `"cargo"`. The `args` for `"cargo"` look
```typescript
{
workspaceRoot?: string;
cwd?: string;
cargoArgs: string[];
cargoExtraArgs: string[];
executableArgs: string[];

View file

@ -472,12 +472,6 @@ export class Ctx implements RustAnalyzerExtensionApi {
if (status.message) {
statusBar.tooltip.appendText(status.message);
}
if (status.workspaceInfo) {
if (statusBar.tooltip.value) {
statusBar.tooltip.appendMarkdown("\n\n---\n\n");
}
statusBar.tooltip.appendMarkdown(status.workspaceInfo);
}
if (statusBar.tooltip.value) {
statusBar.tooltip.appendMarkdown("\n\n---\n\n");
}

View file

@ -3,7 +3,7 @@ import * as vscode from "vscode";
import * as path from "path";
import type * as ra from "./lsp_ext";
import { Cargo, type ExecutableInfo, getRustcId, getSysroot } from "./toolchain";
import { Cargo, getRustcId, getSysroot } from "./toolchain";
import type { Ctx } from "./ctx";
import { prepareEnv } from "./run";
import { unwrapUndefinable } from "./undefinable";
@ -12,7 +12,6 @@ const debugOutput = vscode.window.createOutputChannel("Debug");
type DebugConfigProvider = (
config: ra.Runnable,
executable: string,
cargoWorkspace: string,
env: Record<string, string>,
sourceFileMap?: Record<string, string>,
) => vscode.DebugConfiguration;
@ -134,7 +133,7 @@ async function getDebugConfiguration(
}
const env = prepareEnv(runnable, ctx.config.runnablesExtraEnv);
const { executable, workspace: cargoWorkspace } = await getDebugExecutableInfo(runnable, env);
const executable = await getDebugExecutable(runnable, env);
let sourceFileMap = debugOptions.sourceFileMap;
if (sourceFileMap === "auto") {
// let's try to use the default toolchain
@ -148,13 +147,7 @@ async function getDebugConfiguration(
}
const provider = unwrapUndefinable(knownEngines[debugEngine.id]);
const debugConfig = provider(
runnable,
simplifyPath(executable),
cargoWorkspace,
env,
sourceFileMap,
);
const debugConfig = provider(runnable, simplifyPath(executable), env, sourceFileMap);
if (debugConfig.type in debugOptions.engineSettings) {
const settingsMap = (debugOptions.engineSettings as any)[debugConfig.type];
for (var key in settingsMap) {
@ -176,21 +169,20 @@ async function getDebugConfiguration(
return debugConfig;
}
async function getDebugExecutableInfo(
async function getDebugExecutable(
runnable: ra.Runnable,
env: Record<string, string>,
): Promise<ExecutableInfo> {
): Promise<string> {
const cargo = new Cargo(runnable.args.workspaceRoot || ".", debugOutput, env);
const executableInfo = await cargo.executableInfoFromArgs(runnable.args.cargoArgs);
const executable = await cargo.executableFromArgs(runnable.args.cargoArgs);
// if we are here, there were no compilation errors.
return executableInfo;
return executable;
}
function getCCppDebugConfig(
runnable: ra.Runnable,
executable: string,
cargoWorkspace: string,
env: Record<string, string>,
sourceFileMap?: Record<string, string>,
): vscode.DebugConfiguration {
@ -200,9 +192,12 @@ function getCCppDebugConfig(
name: runnable.label,
program: executable,
args: runnable.args.executableArgs,
cwd: cargoWorkspace || runnable.args.workspaceRoot,
cwd: runnable.args.cwd || runnable.args.workspaceRoot || ".",
sourceFileMap,
env,
environment: Object.entries(env).map((entry) => ({
name: entry[0],
value: entry[1],
})),
// See https://github.com/rust-lang/rust-analyzer/issues/16901#issuecomment-2024486941
osx: {
MIMode: "lldb",
@ -213,7 +208,6 @@ function getCCppDebugConfig(
function getCodeLldbDebugConfig(
runnable: ra.Runnable,
executable: string,
cargoWorkspace: string,
env: Record<string, string>,
sourceFileMap?: Record<string, string>,
): vscode.DebugConfiguration {
@ -223,7 +217,7 @@ function getCodeLldbDebugConfig(
name: runnable.label,
program: executable,
args: runnable.args.executableArgs,
cwd: cargoWorkspace || runnable.args.workspaceRoot,
cwd: runnable.args.cwd || runnable.args.workspaceRoot || ".",
sourceMap: sourceFileMap,
sourceLanguages: ["rust"],
env,
@ -233,7 +227,6 @@ function getCodeLldbDebugConfig(
function getNativeDebugConfig(
runnable: ra.Runnable,
executable: string,
cargoWorkspace: string,
env: Record<string, string>,
_sourceFileMap?: Record<string, string>,
): vscode.DebugConfiguration {
@ -244,7 +237,7 @@ function getNativeDebugConfig(
target: executable,
// See https://github.com/WebFreak001/code-debug/issues/359
arguments: quote(runnable.args.executableArgs),
cwd: cargoWorkspace || runnable.args.workspaceRoot,
cwd: runnable.args.cwd || runnable.args.workspaceRoot || ".",
env,
valuesFormatting: "prettyPrinters",
};

View file

@ -226,6 +226,7 @@ export type Runnable = {
kind: "cargo";
args: {
workspaceRoot?: string;
cwd?: string;
cargoArgs: string[];
cargoExtraArgs: string[];
executableArgs: string[];
@ -241,7 +242,6 @@ export type ServerStatusParams = {
health: "ok" | "warning" | "error";
quiescent: boolean;
message?: string;
workspaceInfo?: string;
};
export type SsrParams = {
query: string;

View file

@ -9,17 +9,11 @@ import { unwrapUndefinable } from "./undefinable";
interface CompilationArtifact {
fileName: string;
workspace: string;
name: string;
kind: string;
isTest: boolean;
}
export interface ExecutableInfo {
executable: string;
workspace: string;
}
export interface ArtifactSpec {
cargoArgs: string[];
filter?: (artifacts: CompilationArtifact[]) => CompilationArtifact[];
@ -74,7 +68,6 @@ export class Cargo {
artifacts.push({
fileName: message.executable,
name: message.target.name,
workspace: path.dirname(message.manifest_path),
kind: message.target.kind[0],
isTest: message.profile.test,
});
@ -93,7 +86,7 @@ export class Cargo {
return spec.filter?.(artifacts) ?? artifacts;
}
async executableInfoFromArgs(args: readonly string[]): Promise<ExecutableInfo> {
async executableFromArgs(args: readonly string[]): Promise<string> {
const artifacts = await this.getArtifacts(Cargo.artifactSpec(args));
if (artifacts.length === 0) {
@ -103,10 +96,7 @@ export class Cargo {
}
const artifact = unwrapUndefinable(artifacts[0]);
return {
executable: artifact.fileName,
workspace: artifact.workspace,
};
return artifact.fileName;
}
private async runCargo(